diff --git a/Dockerfile b/Dockerfile index 7f0a37ea5..0b2fdb403 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ -FROM gradle:6.5.0-jdk8 +FROM gradle:6.7.1-jdk8 ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip ENV ANDROID_API_LEVEL android-30 ENV ANDROID_BUILD_TOOLS_VERSION 30.0.3 ENV ANDROID_HOME /usr/local/android-sdk-linux -ENV ANDROID_NDK_VERSION 21.1.6352462 +ENV ANDROID_NDK_VERSION 21.4.7075529 ENV ANDROID_VERSION 30 ENV ANDROID_NDK_HOME ${ANDROID_HOME}/ndk/${ANDROID_NDK_VERSION}/ ENV PATH ${PATH}:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools diff --git a/TMessagesProj/build.gradle b/TMessagesProj/build.gradle index 5ea3733fd..fcbdaa243 100644 --- a/TMessagesProj/build.gradle +++ b/TMessagesProj/build.gradle @@ -3,7 +3,6 @@ apply plugin: 'com.android.application' repositories { mavenCentral() google() - jcenter() } configurations { @@ -16,7 +15,7 @@ configurations.all { } dependencies { - implementation 'androidx.core:core:1.3.2' + implementation 'androidx.core:core:1.5.0' implementation 'androidx.palette:palette:1.0.0' implementation 'androidx.exifinterface:exifinterface:1.3.2' implementation 'androidx.dynamicanimation:dynamicanimation:1.0.0' @@ -25,16 +24,16 @@ dependencies { compileOnly 'org.checkerframework:checker-qual:2.5.2' compileOnly 'org.checkerframework:checker-compat-qual:2.5.0' - implementation 'com.google.firebase:firebase-messaging:21.1.0' - implementation 'com.google.firebase:firebase-config:20.0.4' - implementation 'com.google.firebase:firebase-datatransport:17.0.11' - implementation 'com.google.firebase:firebase-appindexing:19.2.0' - implementation 'com.google.android.gms:play-services-maps:17.0.0' + implementation 'com.google.firebase:firebase-messaging:22.0.0' + implementation 'com.google.firebase:firebase-config:21.0.0' + implementation 'com.google.firebase:firebase-datatransport:18.0.0' + implementation 'com.google.firebase:firebase-appindexing:20.0.0' + implementation 'com.google.android.gms:play-services-maps:17.0.1' implementation 'com.google.android.gms:play-services-auth:19.0.0' implementation 'com.google.android.gms:play-services-vision:16.2.0' - implementation 'com.google.android.gms:play-services-wearable:17.0.0' + implementation 'com.google.android.gms:play-services-wearable:17.1.0' implementation 'com.google.android.gms:play-services-location:18.0.0' - implementation 'com.google.android.gms:play-services-wallet:18.1.2' + implementation 'com.google.android.gms:play-services-wallet:18.1.3' implementation 'com.googlecode.mp4parser:isoparser:1.0.6' implementation 'com.stripe:stripe-android:2.0.2' implementation files('libs/libgsaverification-client.aar') @@ -45,7 +44,7 @@ dependencies { android { compileSdkVersion 30 buildToolsVersion '30.0.3' - ndkVersion "21.1.6352462" + ndkVersion "21.4.7075529" defaultConfig.applicationId = "org.telegram.messenger" @@ -300,7 +299,7 @@ android { } } - defaultConfig.versionCode = 2293 + defaultConfig.versionCode = 2359 applicationVariants.all { variant -> variant.outputs.all { output -> @@ -319,7 +318,7 @@ android { defaultConfig { minSdkVersion 16 targetSdkVersion 29 - versionName "7.7.2" + versionName "7.8.0" vectorDrawables.generatedDensities = ['mdpi', 'hdpi', 'xhdpi', 'xxhdpi'] diff --git a/TMessagesProj/jni/CMakeLists.txt b/TMessagesProj/jni/CMakeLists.txt index d955668c6..bde159f5e 100644 --- a/TMessagesProj/jni/CMakeLists.txt +++ b/TMessagesProj/jni/CMakeLists.txt @@ -399,7 +399,7 @@ target_compile_definitions(sqlite PUBLIC #voip include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt) -set(NATIVE_LIB "tmessages.38") +set(NATIVE_LIB "tmessages.39") #tmessages add_library(${NATIVE_LIB} SHARED @@ -669,7 +669,7 @@ target_include_directories(${NATIVE_LIB} PUBLIC lz4) target_link_libraries(${NATIVE_LIB} - -Wl,--whole-archive voipandroid -Wl,--no-whole-archive + -Wl,--whole-archive rnnoise voipandroid -Wl,--no-whole-archive tgvoip tgcalls tgcalls_tp @@ -692,6 +692,7 @@ target_link_libraries(${NATIVE_LIB} log z GLESv2 + EGL android OpenSLES cpufeatures) diff --git a/TMessagesProj/jni/audio.c b/TMessagesProj/jni/audio.c index f77c92727..7dc36e45c 100644 --- a/TMessagesProj/jni/audio.c +++ b/TMessagesProj/jni/audio.c @@ -302,8 +302,8 @@ int initRecorder(const char *path, opus_int32 sampleRate) { inopt.gain = 0; inopt.endianness = 0; inopt.copy_comments = 0; - inopt.rawmode = 1; - inopt.ignorelength = 1; + inopt.rawmode = 0; + inopt.ignorelength = 0; inopt.samplesize = 16; inopt.channels = 1; inopt.skip = 0; @@ -332,7 +332,7 @@ int initRecorder(const char *path, opus_int32 sampleRate) { _packet = malloc(max_frame_bytes); result = opus_encoder_ctl(_encoder, OPUS_SET_BITRATE(bitrate)); - result = opus_encoder_ctl(_encoder, OPUS_SET_COMPLEXITY(10)); + //result = opus_encoder_ctl(_encoder, OPUS_SET_COMPLEXITY(10)); if (result != OPUS_OK) { LOGE("Error OPUS_SET_BITRATE returned: %s", opus_strerror(result)); return 0; diff --git a/TMessagesProj/jni/ffmpeg/include/libavcodec/bytestream.h b/TMessagesProj/jni/ffmpeg/include/libavcodec/bytestream.h new file mode 100644 index 000000000..0516a6e3d --- /dev/null +++ b/TMessagesProj/jni/ffmpeg/include/libavcodec/bytestream.h @@ -0,0 +1,376 @@ +/* + * Bytestream functions + * copyright (c) 2006 Baptiste Coudurier + * Copyright (c) 2012 Aneesh Dogra (lionaneesh) + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_BYTESTREAM_H +#define AVCODEC_BYTESTREAM_H + +#include +#include + +#include "libavutil/avassert.h" +#include "libavutil/common.h" +#include "libavutil/intreadwrite.h" + +typedef struct GetByteContext { + const uint8_t *buffer, *buffer_end, *buffer_start; +} GetByteContext; + +typedef struct PutByteContext { + uint8_t *buffer, *buffer_end, *buffer_start; + int eof; +} PutByteContext; + +#define DEF(type, name, bytes, read, write) \ +static av_always_inline type bytestream_get_ ## name(const uint8_t **b) \ +{ \ + (*b) += bytes; \ + return read(*b - bytes); \ +} \ +static av_always_inline void bytestream_put_ ## name(uint8_t **b, \ + const type value) \ +{ \ + write(*b, value); \ + (*b) += bytes; \ +} \ +static av_always_inline void bytestream2_put_ ## name ## u(PutByteContext *p, \ + const type value) \ +{ \ + bytestream_put_ ## name(&p->buffer, value); \ +} \ +static av_always_inline void bytestream2_put_ ## name(PutByteContext *p, \ + const type value) \ +{ \ + if (!p->eof && (p->buffer_end - p->buffer >= bytes)) { \ + write(p->buffer, value); \ + p->buffer += bytes; \ + } else \ + p->eof = 1; \ +} \ +static av_always_inline type bytestream2_get_ ## name ## u(GetByteContext *g) \ +{ \ + return bytestream_get_ ## name(&g->buffer); \ +} \ +static av_always_inline type bytestream2_get_ ## name(GetByteContext *g) \ +{ \ + if (g->buffer_end - g->buffer < bytes) { \ + g->buffer = g->buffer_end; \ + return 0; \ + } \ + return bytestream2_get_ ## name ## u(g); \ +} \ +static av_always_inline type bytestream2_peek_ ## name(GetByteContext *g) \ +{ \ + if (g->buffer_end - g->buffer < bytes) \ + return 0; \ + return read(g->buffer); \ +} + +DEF(uint64_t, le64, 8, AV_RL64, AV_WL64) +DEF(unsigned int, le32, 4, AV_RL32, AV_WL32) +DEF(unsigned int, le24, 3, AV_RL24, AV_WL24) +DEF(unsigned int, le16, 2, AV_RL16, AV_WL16) +DEF(uint64_t, be64, 8, AV_RB64, AV_WB64) +DEF(unsigned int, be32, 4, AV_RB32, AV_WB32) +DEF(unsigned int, be24, 3, AV_RB24, AV_WB24) +DEF(unsigned int, be16, 2, AV_RB16, AV_WB16) +DEF(unsigned int, byte, 1, AV_RB8 , AV_WB8) + +#if AV_HAVE_BIGENDIAN +# define bytestream2_get_ne16 bytestream2_get_be16 +# define bytestream2_get_ne24 bytestream2_get_be24 +# define bytestream2_get_ne32 bytestream2_get_be32 +# define bytestream2_get_ne64 bytestream2_get_be64 +# define bytestream2_get_ne16u bytestream2_get_be16u +# define bytestream2_get_ne24u bytestream2_get_be24u +# define bytestream2_get_ne32u bytestream2_get_be32u +# define bytestream2_get_ne64u bytestream2_get_be64u +# define bytestream2_put_ne16 bytestream2_put_be16 +# define bytestream2_put_ne24 bytestream2_put_be24 +# define bytestream2_put_ne32 bytestream2_put_be32 +# define bytestream2_put_ne64 bytestream2_put_be64 +# define bytestream2_peek_ne16 bytestream2_peek_be16 +# define bytestream2_peek_ne24 bytestream2_peek_be24 +# define bytestream2_peek_ne32 bytestream2_peek_be32 +# define bytestream2_peek_ne64 bytestream2_peek_be64 +#else +# define bytestream2_get_ne16 bytestream2_get_le16 +# define bytestream2_get_ne24 bytestream2_get_le24 +# define bytestream2_get_ne32 bytestream2_get_le32 +# define bytestream2_get_ne64 bytestream2_get_le64 +# define bytestream2_get_ne16u bytestream2_get_le16u +# define bytestream2_get_ne24u bytestream2_get_le24u +# define bytestream2_get_ne32u bytestream2_get_le32u +# define bytestream2_get_ne64u bytestream2_get_le64u +# define bytestream2_put_ne16 bytestream2_put_le16 +# define bytestream2_put_ne24 bytestream2_put_le24 +# define bytestream2_put_ne32 bytestream2_put_le32 +# define bytestream2_put_ne64 bytestream2_put_le64 +# define bytestream2_peek_ne16 bytestream2_peek_le16 +# define bytestream2_peek_ne24 bytestream2_peek_le24 +# define bytestream2_peek_ne32 bytestream2_peek_le32 +# define bytestream2_peek_ne64 bytestream2_peek_le64 +#endif + +static av_always_inline void bytestream2_init(GetByteContext *g, + const uint8_t *buf, + int buf_size) +{ + av_assert0(buf_size >= 0); + g->buffer = buf; + g->buffer_start = buf; + g->buffer_end = buf + buf_size; +} + +static av_always_inline void bytestream2_init_writer(PutByteContext *p, + uint8_t *buf, + int buf_size) +{ + av_assert0(buf_size >= 0); + p->buffer = buf; + p->buffer_start = buf; + p->buffer_end = buf + buf_size; + p->eof = 0; +} + +static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g) +{ + return g->buffer_end - g->buffer; +} + +static av_always_inline int bytestream2_get_bytes_left_p(PutByteContext *p) +{ + return p->buffer_end - p->buffer; +} + +static av_always_inline void bytestream2_skip(GetByteContext *g, + unsigned int size) +{ + g->buffer += FFMIN(g->buffer_end - g->buffer, size); +} + +static av_always_inline void bytestream2_skipu(GetByteContext *g, + unsigned int size) +{ + g->buffer += size; +} + +static av_always_inline void bytestream2_skip_p(PutByteContext *p, + unsigned int size) +{ + int size2; + if (p->eof) + return; + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + p->buffer += size2; +} + +static av_always_inline int bytestream2_tell(GetByteContext *g) +{ + return (int)(g->buffer - g->buffer_start); +} + +static av_always_inline int bytestream2_tell_p(PutByteContext *p) +{ + return (int)(p->buffer - p->buffer_start); +} + +static av_always_inline int bytestream2_size(GetByteContext *g) +{ + return (int)(g->buffer_end - g->buffer_start); +} + +static av_always_inline int bytestream2_size_p(PutByteContext *p) +{ + return (int)(p->buffer_end - p->buffer_start); +} + +static av_always_inline int bytestream2_seek(GetByteContext *g, + int offset, + int whence) +{ + switch (whence) { + case SEEK_CUR: + offset = av_clip(offset, -(g->buffer - g->buffer_start), + g->buffer_end - g->buffer); + g->buffer += offset; + break; + case SEEK_END: + offset = av_clip(offset, -(g->buffer_end - g->buffer_start), 0); + g->buffer = g->buffer_end + offset; + break; + case SEEK_SET: + offset = av_clip(offset, 0, g->buffer_end - g->buffer_start); + g->buffer = g->buffer_start + offset; + break; + default: + return AVERROR(EINVAL); + } + return bytestream2_tell(g); +} + +static av_always_inline int bytestream2_seek_p(PutByteContext *p, + int offset, + int whence) +{ + p->eof = 0; + switch (whence) { + case SEEK_CUR: + if (p->buffer_end - p->buffer < offset) + p->eof = 1; + offset = av_clip(offset, -(p->buffer - p->buffer_start), + p->buffer_end - p->buffer); + p->buffer += offset; + break; + case SEEK_END: + if (offset > 0) + p->eof = 1; + offset = av_clip(offset, -(p->buffer_end - p->buffer_start), 0); + p->buffer = p->buffer_end + offset; + break; + case SEEK_SET: + if (p->buffer_end - p->buffer_start < offset) + p->eof = 1; + offset = av_clip(offset, 0, p->buffer_end - p->buffer_start); + p->buffer = p->buffer_start + offset; + break; + default: + return AVERROR(EINVAL); + } + return bytestream2_tell_p(p); +} + +static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, + uint8_t *dst, + unsigned int size) +{ + int size2 = FFMIN(g->buffer_end - g->buffer, size); + memcpy(dst, g->buffer, size2); + g->buffer += size2; + return size2; +} + +static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g, + uint8_t *dst, + unsigned int size) +{ + memcpy(dst, g->buffer, size); + g->buffer += size; + return size; +} + +static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p, + const uint8_t *src, + unsigned int size) +{ + int size2; + if (p->eof) + return 0; + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + memcpy(p->buffer, src, size2); + p->buffer += size2; + return size2; +} + +static av_always_inline unsigned int bytestream2_put_bufferu(PutByteContext *p, + const uint8_t *src, + unsigned int size) +{ + memcpy(p->buffer, src, size); + p->buffer += size; + return size; +} + +static av_always_inline void bytestream2_set_buffer(PutByteContext *p, + const uint8_t c, + unsigned int size) +{ + int size2; + if (p->eof) + return; + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + memset(p->buffer, c, size2); + p->buffer += size2; +} + +static av_always_inline void bytestream2_set_bufferu(PutByteContext *p, + const uint8_t c, + unsigned int size) +{ + memset(p->buffer, c, size); + p->buffer += size; +} + +static av_always_inline unsigned int bytestream2_get_eof(PutByteContext *p) +{ + return p->eof; +} + +static av_always_inline unsigned int bytestream2_copy_bufferu(PutByteContext *p, + GetByteContext *g, + unsigned int size) +{ + memcpy(p->buffer, g->buffer, size); + p->buffer += size; + g->buffer += size; + return size; +} + +static av_always_inline unsigned int bytestream2_copy_buffer(PutByteContext *p, + GetByteContext *g, + unsigned int size) +{ + int size2; + + if (p->eof) + return 0; + size = FFMIN(g->buffer_end - g->buffer, size); + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + + return bytestream2_copy_bufferu(p, g, size2); +} + +static av_always_inline unsigned int bytestream_get_buffer(const uint8_t **b, + uint8_t *dst, + unsigned int size) +{ + memcpy(dst, *b, size); + (*b) += size; + return size; +} + +static av_always_inline void bytestream_put_buffer(uint8_t **b, + const uint8_t *src, + unsigned int size) +{ + memcpy(*b, src, size); + (*b) += size; +} + +#endif /* AVCODEC_BYTESTREAM_H */ diff --git a/TMessagesProj/jni/ffmpeg/include/libavcodec/get_bits.h b/TMessagesProj/jni/ffmpeg/include/libavcodec/get_bits.h new file mode 100644 index 000000000..a0695d318 --- /dev/null +++ b/TMessagesProj/jni/ffmpeg/include/libavcodec/get_bits.h @@ -0,0 +1,673 @@ +/* + * Copyright (c) 2004 Michael Niedermayer + * Copyright (c) 2016 Alexandra Hájková + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * bitstream reader API header. + */ + +#ifndef AVCODEC_GET_BITS_H +#define AVCODEC_GET_BITS_H + +#include + +#ifndef NEG_USR32 +# define NEG_USR32(a,s) (((uint32_t)(a))>>(32-(s))) +#endif + +/* + * Safe bitstream reading: + * optionally, the get_bits API can check to ensure that we + * don't read past input buffer boundaries. This is protected + * with CONFIG_SAFE_BITSTREAM_READER at the global level, and + * then below that with UNCHECKED_BITSTREAM_READER at the per- + * decoder level. This means that decoders that check internally + * can "#define UNCHECKED_BITSTREAM_READER 1" to disable + * overread checks. + * Boundary checking causes a minor performance penalty so for + * applications that won't want/need this, it can be disabled + * globally using "#define CONFIG_SAFE_BITSTREAM_READER 0". + */ +#ifndef UNCHECKED_BITSTREAM_READER +#define UNCHECKED_BITSTREAM_READER !CONFIG_SAFE_BITSTREAM_READER +#endif + +#ifndef CACHED_BITSTREAM_READER +#define CACHED_BITSTREAM_READER 0 +#endif + +typedef struct GetBitContext { + const uint8_t *buffer, *buffer_end; +#if CACHED_BITSTREAM_READER + uint64_t cache; + unsigned bits_left; +#endif + int index; + int size_in_bits; + int size_in_bits_plus8; +} GetBitContext; + +static inline unsigned int get_bits(GetBitContext *s, int n); +static inline void skip_bits(GetBitContext *s, int n); +static inline unsigned int show_bits(GetBitContext *s, int n); + +/* Bitstream reader API docs: + * name + * arbitrary name which is used as prefix for the internal variables + * + * gb + * getbitcontext + * + * OPEN_READER(name, gb) + * load gb into local variables + * + * CLOSE_READER(name, gb) + * store local vars in gb + * + * UPDATE_CACHE(name, gb) + * Refill the internal cache from the bitstream. + * After this call at least MIN_CACHE_BITS will be available. + * + * GET_CACHE(name, gb) + * Will output the contents of the internal cache, + * next bit is MSB of 32 or 64 bits (FIXME 64 bits). + * + * SHOW_UBITS(name, gb, num) + * Will return the next num bits. + * + * SHOW_SBITS(name, gb, num) + * Will return the next num bits and do sign extension. + * + * SKIP_BITS(name, gb, num) + * Will skip over the next num bits. + * Note, this is equivalent to SKIP_CACHE; SKIP_COUNTER. + * + * SKIP_CACHE(name, gb, num) + * Will remove the next num bits from the cache (note SKIP_COUNTER + * MUST be called before UPDATE_CACHE / CLOSE_READER). + * + * SKIP_COUNTER(name, gb, num) + * Will increment the internal bit counter (see SKIP_CACHE & SKIP_BITS). + * + * LAST_SKIP_BITS(name, gb, num) + * Like SKIP_BITS, to be used if next call is UPDATE_CACHE or CLOSE_READER. + * + * BITS_LEFT(name, gb) + * Return the number of bits left + * + * For examples see get_bits, show_bits, skip_bits, get_vlc. + */ + +#if CACHED_BITSTREAM_READER +# define MIN_CACHE_BITS 64 +#elif defined LONG_BITSTREAM_READER +# define MIN_CACHE_BITS 32 +#else +# define MIN_CACHE_BITS 25 +#endif + +#if !CACHED_BITSTREAM_READER + +#define OPEN_READER_NOSIZE(name, gb) \ + unsigned int name ## _index = (gb)->index; \ + unsigned int av_unused name ## _cache + +#if UNCHECKED_BITSTREAM_READER +#define OPEN_READER(name, gb) OPEN_READER_NOSIZE(name, gb) + +#define BITS_AVAILABLE(name, gb) 1 +#else +#define OPEN_READER(name, gb) \ + OPEN_READER_NOSIZE(name, gb); \ + unsigned int name ## _size_plus8 = (gb)->size_in_bits_plus8 + +#define BITS_AVAILABLE(name, gb) name ## _index < name ## _size_plus8 +#endif + +#define CLOSE_READER(name, gb) (gb)->index = name ## _index + +# ifdef LONG_BITSTREAM_READER + +# define UPDATE_CACHE_LE(name, gb) name ## _cache = \ + AV_RL64((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7) + +# define UPDATE_CACHE_BE(name, gb) name ## _cache = \ + AV_RB64((gb)->buffer + (name ## _index >> 3)) >> (32 - (name ## _index & 7)) + +#else + +# define UPDATE_CACHE_LE(name, gb) name ## _cache = \ + AV_RL32((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7) + +# define UPDATE_CACHE_BE(name, gb) name ## _cache = \ + AV_RB32((gb)->buffer + (name ## _index >> 3)) << (name ## _index & 7) + +#endif + + +#ifdef BITSTREAM_READER_LE + +# define UPDATE_CACHE(name, gb) UPDATE_CACHE_LE(name, gb) + +# define SKIP_CACHE(name, gb, num) name ## _cache >>= (num) + +#else + +# define UPDATE_CACHE(name, gb) UPDATE_CACHE_BE(name, gb) + +# define SKIP_CACHE(name, gb, num) name ## _cache <<= (num) + +#endif + +#if UNCHECKED_BITSTREAM_READER +# define SKIP_COUNTER(name, gb, num) name ## _index += (num) +#else +# define SKIP_COUNTER(name, gb, num) \ + name ## _index = FFMIN(name ## _size_plus8, name ## _index + (num)) +#endif + +#define BITS_LEFT(name, gb) ((int)((gb)->size_in_bits - name ## _index)) + +#define SKIP_BITS(name, gb, num) \ + do { \ + SKIP_CACHE(name, gb, num); \ + SKIP_COUNTER(name, gb, num); \ + } while (0) + +#define LAST_SKIP_BITS(name, gb, num) SKIP_COUNTER(name, gb, num) + +#define SHOW_UBITS_LE(name, gb, num) zero_extend(name ## _cache, num) +#define SHOW_SBITS_LE(name, gb, num) sign_extend(name ## _cache, num) + +#define SHOW_UBITS_BE(name, gb, num) NEG_USR32(name ## _cache, num) +#define SHOW_SBITS_BE(name, gb, num) NEG_SSR32(name ## _cache, num) + +#ifdef BITSTREAM_READER_LE +# define SHOW_UBITS(name, gb, num) SHOW_UBITS_LE(name, gb, num) +# define SHOW_SBITS(name, gb, num) SHOW_SBITS_LE(name, gb, num) +#else +# define SHOW_UBITS(name, gb, num) SHOW_UBITS_BE(name, gb, num) +# define SHOW_SBITS(name, gb, num) SHOW_SBITS_BE(name, gb, num) +#endif + +#define GET_CACHE(name, gb) ((uint32_t) name ## _cache) + +#endif + +static inline int get_bits_count(const GetBitContext *s) +{ +#if CACHED_BITSTREAM_READER + return s->index - s->bits_left; +#else + return s->index; +#endif +} + +#if CACHED_BITSTREAM_READER +static inline void refill_32(GetBitContext *s, int is_le) +{ +#if !UNCHECKED_BITSTREAM_READER + if (s->index >> 3 >= s->buffer_end - s->buffer) + return; +#endif + + if (is_le) + s->cache = (uint64_t)AV_RL32(s->buffer + (s->index >> 3)) << s->bits_left | s->cache; + else + s->cache = s->cache | (uint64_t)AV_RB32(s->buffer + (s->index >> 3)) << (32 - s->bits_left); + s->index += 32; + s->bits_left += 32; +} + +static inline void refill_64(GetBitContext *s, int is_le) +{ +#if !UNCHECKED_BITSTREAM_READER + if (s->index >> 3 >= s->buffer_end - s->buffer) + return; +#endif + + if (is_le) + s->cache = AV_RL64(s->buffer + (s->index >> 3)); + else + s->cache = AV_RB64(s->buffer + (s->index >> 3)); + s->index += 64; + s->bits_left = 64; +} + +static inline uint64_t get_val(GetBitContext *s, unsigned n, int is_le) +{ + uint64_t ret; + av_assert2(n>0 && n<=63); + if (is_le) { + ret = s->cache & ((UINT64_C(1) << n) - 1); + s->cache >>= n; + } else { + ret = s->cache >> (64 - n); + s->cache <<= n; + } + s->bits_left -= n; + return ret; +} + +static inline unsigned show_val(const GetBitContext *s, unsigned n) +{ +#ifdef BITSTREAM_READER_LE + return s->cache & ((UINT64_C(1) << n) - 1); +#else + return s->cache >> (64 - n); +#endif +} +#endif + +/** + * Skips the specified number of bits. + * @param n the number of bits to skip, + * For the UNCHECKED_BITSTREAM_READER this must not cause the distance + * from the start to overflow int32_t. Staying within the bitstream + padding + * is sufficient, too. + */ +static inline void skip_bits_long(GetBitContext *s, int n) +{ +#if CACHED_BITSTREAM_READER + skip_bits(s, n); +#else +#if UNCHECKED_BITSTREAM_READER + s->index += n; +#else + s->index += av_clip(n, -s->index, s->size_in_bits_plus8 - s->index); +#endif +#endif +} + +#if CACHED_BITSTREAM_READER +static inline void skip_remaining(GetBitContext *s, unsigned n) +{ +#ifdef BITSTREAM_READER_LE + s->cache >>= n; +#else + s->cache <<= n; +#endif + s->bits_left -= n; +} +#endif + +/** + * Read 1-25 bits. + */ +static inline unsigned int get_bits(GetBitContext *s, int n) +{ + register unsigned int tmp; +#if CACHED_BITSTREAM_READER + + av_assert2(n>0 && n<=32); + if (n > s->bits_left) { +#ifdef BITSTREAM_READER_LE + refill_32(s, 1); +#else + refill_32(s, 0); +#endif + if (s->bits_left < 32) + s->bits_left = n; + } + +#ifdef BITSTREAM_READER_LE + tmp = get_val(s, n, 1); +#else + tmp = get_val(s, n, 0); +#endif +#else + OPEN_READER(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE(re, s); + tmp = SHOW_UBITS(re, s, n); + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); +#endif + av_assert2(tmp < UINT64_C(1) << n); + return tmp; +} + +static inline void skip_bits(GetBitContext *s, int n) +{ +#if CACHED_BITSTREAM_READER + if (n < s->bits_left) + skip_remaining(s, n); + else { + n -= s->bits_left; + s->cache = 0; + s->bits_left = 0; + + if (n >= 64) { + unsigned skip = (n / 8) * 8; + + n -= skip; + s->index += skip; + } +#ifdef BITSTREAM_READER_LE + refill_64(s, 1); +#else + refill_64(s, 0); +#endif + if (n) + skip_remaining(s, n); + } +#else + OPEN_READER(re, s); + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); +#endif +} + +static inline unsigned int get_bits1(GetBitContext *s) +{ +#if CACHED_BITSTREAM_READER + if (!s->bits_left) +#ifdef BITSTREAM_READER_LE + refill_64(s, 1); +#else + refill_64(s, 0); +#endif + +#ifdef BITSTREAM_READER_LE + return get_val(s, 1, 1); +#else + return get_val(s, 1, 0); +#endif +#else + unsigned int index = s->index; + uint8_t result = s->buffer[index >> 3]; +#ifdef BITSTREAM_READER_LE + result >>= index & 7; + result &= 1; +#else + result <<= index & 7; + result >>= 8 - 1; +#endif +#if !UNCHECKED_BITSTREAM_READER + if (s->index < s->size_in_bits_plus8) +#endif + index++; + s->index = index; + + return result; +#endif +} + +static inline void skip_bits1(GetBitContext *s) +{ + skip_bits(s, 1); +} + +/** + * Read 0-32 bits. + */ +static inline unsigned int get_bits_long(GetBitContext *s, int n) +{ + av_assert2(n>=0 && n<=32); + if (!n) { + return 0; +#if CACHED_BITSTREAM_READER + } + return get_bits(s, n); +#else + } else if (n <= MIN_CACHE_BITS) { + return get_bits(s, n); + } else { +#ifdef BITSTREAM_READER_LE + unsigned ret = get_bits(s, 16); + return ret | (get_bits(s, n - 16) << 16); +#else + unsigned ret = get_bits(s, 16) << (n - 16); + return ret | get_bits(s, n - 16); +#endif + } +#endif +} + +/** + * Read 0-64 bits. + */ +static inline uint64_t get_bits64(GetBitContext *s, int n) +{ + if (n <= 32) { + return get_bits_long(s, n); + } else { +#ifdef BITSTREAM_READER_LE + uint64_t ret = get_bits_long(s, 32); + return ret | (uint64_t) get_bits_long(s, n - 32) << 32; +#else + uint64_t ret = (uint64_t) get_bits_long(s, n - 32) << 32; + return ret | get_bits_long(s, 32); +#endif + } +} + +static inline int check_marker(void *logctx, GetBitContext *s, const char *msg) +{ + int bit = get_bits1(s); + if (!bit) + av_log(logctx, AV_LOG_INFO, "Marker bit missing at %d of %d %s\n", + get_bits_count(s) - 1, s->size_in_bits, msg); + + return bit; +} + +static inline int init_get_bits_xe(GetBitContext *s, const uint8_t *buffer, + int bit_size, int is_le) +{ + int buffer_size; + int ret = 0; + + if (bit_size >= INT_MAX - FFMAX(7, AV_INPUT_BUFFER_PADDING_SIZE*8) || bit_size < 0 || !buffer) { + bit_size = 0; + buffer = NULL; + ret = AVERROR_INVALIDDATA; + } + + buffer_size = (bit_size + 7) >> 3; + + s->buffer = buffer; + s->size_in_bits = bit_size; + s->size_in_bits_plus8 = bit_size + 8; + s->buffer_end = buffer + buffer_size; + s->index = 0; + +#if CACHED_BITSTREAM_READER + s->cache = 0; + s->bits_left = 0; + refill_64(s, is_le); +#endif + + return ret; +} + +/** + * Initialize GetBitContext. + * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes + * larger than the actual read bits because some optimized bitstream + * readers read 32 or 64 bit at once and could read over the end + * @param bit_size the size of the buffer in bits + * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. + */ +static inline int init_get_bits(GetBitContext *s, const uint8_t *buffer, + int bit_size) +{ +#ifdef BITSTREAM_READER_LE + return init_get_bits_xe(s, buffer, bit_size, 1); +#else + return init_get_bits_xe(s, buffer, bit_size, 0); +#endif +} + +/** + * Initialize GetBitContext. + * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes + * larger than the actual read bits because some optimized bitstream + * readers read 32 or 64 bit at once and could read over the end + * @param byte_size the size of the buffer in bytes + * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. + */ +static inline int init_get_bits8(GetBitContext *s, const uint8_t *buffer, + int byte_size) +{ + if (byte_size > INT_MAX / 8 || byte_size < 0) + byte_size = -1; + return init_get_bits(s, buffer, byte_size * 8); +} + +static inline int init_get_bits8_le(GetBitContext *s, const uint8_t *buffer, + int byte_size) +{ + if (byte_size > INT_MAX / 8 || byte_size < 0) + byte_size = -1; + return init_get_bits_xe(s, buffer, byte_size * 8, 1); +} + +static inline const uint8_t *align_get_bits(GetBitContext *s) +{ + int n = -get_bits_count(s) & 7; + if (n) + skip_bits(s, n); + return s->buffer + (s->index >> 3); +} + +/** + * If the vlc code is invalid and max_depth=1, then no bits will be removed. + * If the vlc code is invalid and max_depth>1, then the number of bits removed + * is undefined. + */ +#define GET_VLC(code, name, gb, table, bits, max_depth) \ + do { \ + int n, nb_bits; \ + unsigned int index; \ + \ + index = SHOW_UBITS(name, gb, bits); \ + code = table[index][0]; \ + n = table[index][1]; \ + \ + if (max_depth > 1 && n < 0) { \ + LAST_SKIP_BITS(name, gb, bits); \ + UPDATE_CACHE(name, gb); \ + \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + code; \ + code = table[index][0]; \ + n = table[index][1]; \ + if (max_depth > 2 && n < 0) { \ + LAST_SKIP_BITS(name, gb, nb_bits); \ + UPDATE_CACHE(name, gb); \ + \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + code; \ + code = table[index][0]; \ + n = table[index][1]; \ + } \ + } \ + SKIP_BITS(name, gb, n); \ + } while (0) + +#define GET_RL_VLC(level, run, name, gb, table, bits, \ + max_depth, need_update) \ + do { \ + int n, nb_bits; \ + unsigned int index; \ + \ + index = SHOW_UBITS(name, gb, bits); \ + level = table[index].level; \ + n = table[index].len; \ + \ + if (max_depth > 1 && n < 0) { \ + SKIP_BITS(name, gb, bits); \ + if (need_update) { \ + UPDATE_CACHE(name, gb); \ + } \ + \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + level; \ + level = table[index].level; \ + n = table[index].len; \ + if (max_depth > 2 && n < 0) { \ + LAST_SKIP_BITS(name, gb, nb_bits); \ + if (need_update) { \ + UPDATE_CACHE(name, gb); \ + } \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + level; \ + level = table[index].level; \ + n = table[index].len; \ + } \ + } \ + run = table[index].run; \ + SKIP_BITS(name, gb, n); \ + } while (0) + +static inline int decode012(GetBitContext *gb) +{ + int n; + n = get_bits1(gb); + if (n == 0) + return 0; + else + return get_bits1(gb) + 1; +} + +static inline int decode210(GetBitContext *gb) +{ + if (get_bits1(gb)) + return 0; + else + return 2 - get_bits1(gb); +} + +static inline int get_bits_left(GetBitContext *gb) +{ + return gb->size_in_bits - get_bits_count(gb); +} + +static inline int skip_1stop_8data_bits(GetBitContext *gb) +{ + if (get_bits_left(gb) <= 0) + return AVERROR_INVALIDDATA; + + while (get_bits1(gb)) { + skip_bits(gb, 8); + if (get_bits_left(gb) <= 0) + return AVERROR_INVALIDDATA; + } + + return 0; +} + +static inline unsigned int show_bits_long(GetBitContext *s, int n) +{ + if (n <= MIN_CACHE_BITS) { + return show_bits(s, n); + } else { + GetBitContext gb = *s; + return get_bits_long(&gb, n); + } +} + +#endif /* AVCODEC_GET_BITS_H */ diff --git a/TMessagesProj/jni/ffmpeg/include/libavcodec/golomb.h b/TMessagesProj/jni/ffmpeg/include/libavcodec/golomb.h new file mode 100644 index 000000000..637111af5 --- /dev/null +++ b/TMessagesProj/jni/ffmpeg/include/libavcodec/golomb.h @@ -0,0 +1,478 @@ +/* + * exp golomb vlc stuff + * Copyright (c) 2003 Michael Niedermayer + * Copyright (c) 2004 Alex Beregszaszi + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * @brief + * exp golomb vlc stuff + * @author Michael Niedermayer and Alex Beregszaszi + */ + +#ifndef AVCODEC_GOLOMB_H +#define AVCODEC_GOLOMB_H + +#include + +#include "get_bits.h" + +#define INVALID_VLC 0x80000000 + +extern const uint8_t ff_golomb_vlc_len[512]; +extern const uint8_t ff_ue_golomb_vlc_code[512]; +extern const int8_t ff_se_golomb_vlc_code[512]; +extern const uint8_t ff_ue_golomb_len[256]; + +extern const uint8_t ff_interleaved_golomb_vlc_len[256]; +extern const uint8_t ff_interleaved_ue_golomb_vlc_code[256]; +extern const int8_t ff_interleaved_se_golomb_vlc_code[256]; +extern const uint8_t ff_interleaved_dirac_golomb_vlc_code[256]; + +/** + * Read an unsigned Exp-Golomb code in the range 0 to 8190. + * + * @returns the read value or a negative error code. + */ +static inline int get_ue_golomb(GetBitContext *gb) +{ + unsigned int buf; + +#if CACHED_BITSTREAM_READER + buf = show_bits_long(gb, 32); + + if (buf >= (1 << 27)) { + buf >>= 32 - 9; + skip_bits_long(gb, ff_golomb_vlc_len[buf]); + + return ff_ue_golomb_vlc_code[buf]; + } else { + int log = 2 * av_log2(buf) - 31; + buf >>= log; + buf--; + skip_bits_long(gb, 32 - log); + + return buf; + } +#else + OPEN_READER(re, gb); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + if (buf >= (1 << 27)) { + buf >>= 32 - 9; + LAST_SKIP_BITS(re, gb, ff_golomb_vlc_len[buf]); + CLOSE_READER(re, gb); + + return ff_ue_golomb_vlc_code[buf]; + } else { + int log = 2 * av_log2(buf) - 31; + LAST_SKIP_BITS(re, gb, 32 - log); + CLOSE_READER(re, gb); + if (log < 7) { + av_log(NULL, AV_LOG_ERROR, "Invalid UE golomb code\n"); + return AVERROR_INVALIDDATA; + } + buf >>= log; + buf--; + + return buf; + } +#endif +} + +/** + * Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1. + */ +static inline unsigned get_ue_golomb_long(GetBitContext *gb) +{ + unsigned buf, log; + + buf = show_bits_long(gb, 32); + log = 31 - av_log2(buf); + skip_bits_long(gb, log); + + return get_bits_long(gb, log + 1) - 1; +} + +/** + * read unsigned exp golomb code, constraint to a max of 31. + * the return value is undefined if the stored value exceeds 31. + */ +static inline int get_ue_golomb_31(GetBitContext *gb) +{ + unsigned int buf; + +#if CACHED_BITSTREAM_READER + buf = show_bits_long(gb, 32); + + buf >>= 32 - 9; + skip_bits_long(gb, ff_golomb_vlc_len[buf]); +#else + + OPEN_READER(re, gb); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + buf >>= 32 - 9; + LAST_SKIP_BITS(re, gb, ff_golomb_vlc_len[buf]); + CLOSE_READER(re, gb); +#endif + + return ff_ue_golomb_vlc_code[buf]; +} + +static inline unsigned get_interleaved_ue_golomb(GetBitContext *gb) +{ + uint32_t buf; + +#if CACHED_BITSTREAM_READER + buf = show_bits_long(gb, 32); + + if (buf & 0xAA800000) { + buf >>= 32 - 8; + skip_bits_long(gb, ff_interleaved_golomb_vlc_len[buf]); + + return ff_interleaved_ue_golomb_vlc_code[buf]; + } else { + unsigned ret = 1; + + do { + buf >>= 32 - 8; + skip_bits_long(gb, FFMIN(ff_interleaved_golomb_vlc_len[buf], 8)); + + if (ff_interleaved_golomb_vlc_len[buf] != 9) { + ret <<= (ff_interleaved_golomb_vlc_len[buf] - 1) >> 1; + ret |= ff_interleaved_dirac_golomb_vlc_code[buf]; + break; + } + ret = (ret << 4) | ff_interleaved_dirac_golomb_vlc_code[buf]; + buf = show_bits_long(gb, 32); + } while (get_bits_left(gb) > 0); + + return ret - 1; + } +#else + OPEN_READER(re, gb); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + if (buf & 0xAA800000) { + buf >>= 32 - 8; + LAST_SKIP_BITS(re, gb, ff_interleaved_golomb_vlc_len[buf]); + CLOSE_READER(re, gb); + + return ff_interleaved_ue_golomb_vlc_code[buf]; + } else { + unsigned ret = 1; + + do { + buf >>= 32 - 8; + LAST_SKIP_BITS(re, gb, + FFMIN(ff_interleaved_golomb_vlc_len[buf], 8)); + + if (ff_interleaved_golomb_vlc_len[buf] != 9) { + ret <<= (ff_interleaved_golomb_vlc_len[buf] - 1) >> 1; + ret |= ff_interleaved_dirac_golomb_vlc_code[buf]; + break; + } + ret = (ret << 4) | ff_interleaved_dirac_golomb_vlc_code[buf]; + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + } while (ret<0x8000000U && BITS_AVAILABLE(re, gb)); + + CLOSE_READER(re, gb); + return ret - 1; + } +#endif +} + +/** + * read unsigned truncated exp golomb code. + */ +static inline int get_te0_golomb(GetBitContext *gb, int range) +{ + av_assert2(range >= 1); + + if (range == 1) + return 0; + else if (range == 2) + return get_bits1(gb) ^ 1; + else + return get_ue_golomb(gb); +} + +/** + * read unsigned truncated exp golomb code. + */ +static inline int get_te_golomb(GetBitContext *gb, int range) +{ + av_assert2(range >= 1); + + if (range == 2) + return get_bits1(gb) ^ 1; + else + return get_ue_golomb(gb); +} + +/** + * read signed exp golomb code. + */ +static inline int get_se_golomb(GetBitContext *gb) +{ + unsigned int buf; + +#if CACHED_BITSTREAM_READER + buf = show_bits_long(gb, 32); + + if (buf >= (1 << 27)) { + buf >>= 32 - 9; + skip_bits_long(gb, ff_golomb_vlc_len[buf]); + + return ff_se_golomb_vlc_code[buf]; + } else { + int log = 2 * av_log2(buf) - 31; + buf >>= log; + + skip_bits_long(gb, 32 - log); + + if (buf & 1) + buf = -(buf >> 1); + else + buf = (buf >> 1); + + return buf; + } +#else + OPEN_READER(re, gb); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + if (buf >= (1 << 27)) { + buf >>= 32 - 9; + LAST_SKIP_BITS(re, gb, ff_golomb_vlc_len[buf]); + CLOSE_READER(re, gb); + + return ff_se_golomb_vlc_code[buf]; + } else { + int log = av_log2(buf), sign; + LAST_SKIP_BITS(re, gb, 31 - log); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + buf >>= log; + + LAST_SKIP_BITS(re, gb, 32 - log); + CLOSE_READER(re, gb); + + sign = -(buf & 1); + buf = ((buf >> 1) ^ sign) - sign; + + return buf; + } +#endif +} + +static inline int get_se_golomb_long(GetBitContext *gb) +{ + unsigned int buf = get_ue_golomb_long(gb); + int sign = (buf & 1) - 1; + return ((buf >> 1) ^ sign) + 1; +} + +static inline int get_interleaved_se_golomb(GetBitContext *gb) +{ + unsigned int buf; + +#if CACHED_BITSTREAM_READER + buf = show_bits_long(gb, 32); + + if (buf & 0xAA800000) { + buf >>= 32 - 8; + skip_bits_long(gb, ff_interleaved_golomb_vlc_len[buf]); + + return ff_interleaved_se_golomb_vlc_code[buf]; + } else { + int log; + skip_bits(gb, 8); + buf |= 1 | show_bits(gb, 24); + + if ((buf & 0xAAAAAAAA) == 0) + return INVALID_VLC; + + for (log = 31; (buf & 0x80000000) == 0; log--) + buf = (buf << 2) - ((buf << log) >> (log - 1)) + (buf >> 30); + + skip_bits_long(gb, 63 - 2 * log - 8); + + return (signed) (((((buf << log) >> log) - 1) ^ -(buf & 0x1)) + 1) >> 1; + } +#else + OPEN_READER(re, gb); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + if (buf & 0xAA800000) { + buf >>= 32 - 8; + LAST_SKIP_BITS(re, gb, ff_interleaved_golomb_vlc_len[buf]); + CLOSE_READER(re, gb); + + return ff_interleaved_se_golomb_vlc_code[buf]; + } else { + int log; + LAST_SKIP_BITS(re, gb, 8); + UPDATE_CACHE(re, gb); + buf |= 1 | (GET_CACHE(re, gb) >> 8); + + if ((buf & 0xAAAAAAAA) == 0) + return INVALID_VLC; + + for (log = 31; (buf & 0x80000000) == 0; log--) + buf = (buf << 2) - ((buf << log) >> (log - 1)) + (buf >> 30); + + LAST_SKIP_BITS(re, gb, 63 - 2 * log - 8); + CLOSE_READER(re, gb); + + return (signed) (((((buf << log) >> log) - 1) ^ -(buf & 0x1)) + 1) >> 1; + } +#endif +} + +static inline int dirac_get_se_golomb(GetBitContext *gb) +{ + uint32_t ret = get_interleaved_ue_golomb(gb); + + if (ret) { + int sign = -get_bits1(gb); + ret = (ret ^ sign) - sign; + } + + return ret; +} + +/** + * read unsigned golomb rice code (ffv1). + */ +static inline int get_ur_golomb(GetBitContext *gb, int k, int limit, + int esc_len) +{ + unsigned int buf; + int log; + +#if CACHED_BITSTREAM_READER + buf = show_bits_long(gb, 32); + + log = av_log2(buf); + + if (log > 31 - limit) { + buf >>= log - k; + buf += (30 - log) << k; + skip_bits_long(gb, 32 + k - log); + + return buf; + } else { + skip_bits_long(gb, limit); + buf = get_bits_long(gb, esc_len); + + return buf + limit - 1; + } +#else + OPEN_READER(re, gb); + UPDATE_CACHE(re, gb); + buf = GET_CACHE(re, gb); + + log = av_log2(buf); + + if (log > 31 - limit) { + buf >>= log - k; + buf += (30U - log) << k; + LAST_SKIP_BITS(re, gb, 32 + k - log); + CLOSE_READER(re, gb); + + return buf; + } else { + LAST_SKIP_BITS(re, gb, limit); + UPDATE_CACHE(re, gb); + + buf = SHOW_UBITS(re, gb, esc_len); + + LAST_SKIP_BITS(re, gb, esc_len); + CLOSE_READER(re, gb); + + return buf + limit - 1; + } +#endif +} + +#ifdef TRACE + +static inline int get_ue(GetBitContext *s, const char *file, const char *func, + int line) +{ + int show = show_bits(s, 24); + int pos = get_bits_count(s); + int i = get_ue_golomb(s); + int len = get_bits_count(s) - pos; + int bits = show >> (24 - len); + + av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d ue @%5d in %s %s:%d\n", + bits, len, i, pos, file, func, line); + + return i; +} + +static inline int get_se(GetBitContext *s, const char *file, const char *func, + int line) +{ + int show = show_bits(s, 24); + int pos = get_bits_count(s); + int i = get_se_golomb(s); + int len = get_bits_count(s) - pos; + int bits = show >> (24 - len); + + av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d se @%5d in %s %s:%d\n", + bits, len, i, pos, file, func, line); + + return i; +} + +static inline int get_te(GetBitContext *s, int r, char *file, const char *func, + int line) +{ + int show = show_bits(s, 24); + int pos = get_bits_count(s); + int i = get_te0_golomb(s, r); + int len = get_bits_count(s) - pos; + int bits = show >> (24 - len); + + av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d te @%5d in %s %s:%d\n", + bits, len, i, pos, file, func, line); + + return i; +} + +#define get_ue_golomb(a) get_ue(a, __FILE__, __func__, __LINE__) +#define get_se_golomb(a) get_se(a, __FILE__, __func__, __LINE__) +#define get_te_golomb(a, r) get_te(a, r, __FILE__, __func__, __LINE__) +#define get_te0_golomb(a, r) get_te(a, r, __FILE__, __func__, __LINE__) + +#endif /* TRACE */ + +#endif /* AVCODEC_GOLOMB_H */ diff --git a/TMessagesProj/jni/ffmpeg/include/libavcodec/vlc.h b/TMessagesProj/jni/ffmpeg/include/libavcodec/vlc.h new file mode 100644 index 000000000..42ccddf3f --- /dev/null +++ b/TMessagesProj/jni/ffmpeg/include/libavcodec/vlc.h @@ -0,0 +1,81 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_VLC_H +#define AVCODEC_VLC_H + +#include + +#define VLC_TYPE int16_t + +typedef struct VLC { + int bits; + VLC_TYPE (*table)[2]; ///< code, bits + int table_size, table_allocated; +} VLC; + +typedef struct RL_VLC_ELEM { + int16_t level; + int8_t len; + uint8_t run; +} RL_VLC_ELEM; + +#define init_vlc(vlc, nb_bits, nb_codes, \ + bits, bits_wrap, bits_size, \ + codes, codes_wrap, codes_size, \ + flags) \ + ff_init_vlc_sparse(vlc, nb_bits, nb_codes, \ + bits, bits_wrap, bits_size, \ + codes, codes_wrap, codes_size, \ + NULL, 0, 0, flags) + +int ff_init_vlc_sparse(VLC *vlc, int nb_bits, int nb_codes, + const void *bits, int bits_wrap, int bits_size, + const void *codes, int codes_wrap, int codes_size, + const void *symbols, int symbols_wrap, int symbols_size, + int flags); +void ff_free_vlc(VLC *vlc); + +#define INIT_VLC_LE 2 +#define INIT_VLC_USE_NEW_STATIC 4 + +#define INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \ + do { \ + static VLC_TYPE table[static_size][2]; \ + (vlc)->table = table; \ + (vlc)->table_allocated = static_size; \ + ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \ + INIT_VLC_USE_NEW_STATIC); \ + } while (0) + +#define INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \ + do { \ + static VLC_TYPE table[static_size][2]; \ + (vlc)->table = table; \ + (vlc)->table_allocated = static_size; \ + ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \ + INIT_VLC_USE_NEW_STATIC | INIT_VLC_LE); \ + } while (0) + +#define INIT_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ + INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) + +#define INIT_LE_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ + INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) + +#endif /* AVCODEC_VLC_H */ diff --git a/TMessagesProj/jni/ffmpeg/include/libavutil/intmath.h b/TMessagesProj/jni/ffmpeg/include/libavutil/intmath.h new file mode 100644 index 000000000..9573109e9 --- /dev/null +++ b/TMessagesProj/jni/ffmpeg/include/libavutil/intmath.h @@ -0,0 +1,165 @@ +/* + * Copyright (c) 2010 Mans Rullgard + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVUTIL_INTMATH_H +#define AVUTIL_INTMATH_H + +#include + +#include "config.h" +#include "attributes.h" + +#if ARCH_ARM +# include "arm/intmath.h" +#endif +#if ARCH_X86 +# include "x86/intmath.h" +#endif + +#if HAVE_FAST_CLZ +#if AV_GCC_VERSION_AT_LEAST(3,4) +#ifndef ff_log2 +# define ff_log2(x) (31 - __builtin_clz((x)|1)) +# ifndef ff_log2_16bit +# define ff_log2_16bit av_log2 +# endif +#endif /* ff_log2 */ +#endif /* AV_GCC_VERSION_AT_LEAST(3,4) */ +#endif + +extern const uint8_t ff_log2_tab[256]; + +#ifndef ff_log2 +#define ff_log2 ff_log2_c +static av_always_inline av_const int ff_log2_c(unsigned int v) +{ + int n = 0; + if (v & 0xffff0000) { + v >>= 16; + n += 16; + } + if (v & 0xff00) { + v >>= 8; + n += 8; + } + n += ff_log2_tab[v]; + + return n; +} +#endif + +#ifndef ff_log2_16bit +#define ff_log2_16bit ff_log2_16bit_c +static av_always_inline av_const int ff_log2_16bit_c(unsigned int v) +{ + int n = 0; + if (v & 0xff00) { + v >>= 8; + n += 8; + } + n += ff_log2_tab[v]; + + return n; +} +#endif + +#define av_log2 ff_log2 +#define av_log2_16bit ff_log2_16bit + +/** + * @addtogroup lavu_math + * @{ + */ + +#if HAVE_FAST_CLZ +#if AV_GCC_VERSION_AT_LEAST(3,4) +#ifndef ff_ctz +#define ff_ctz(v) __builtin_ctz(v) +#endif +#ifndef ff_ctzll +#define ff_ctzll(v) __builtin_ctzll(v) +#endif +#ifndef ff_clz +#define ff_clz(v) __builtin_clz(v) +#endif +#endif +#endif + +#ifndef ff_ctz +#define ff_ctz ff_ctz_c +/** + * Trailing zero bit count. + * + * @param v input value. If v is 0, the result is undefined. + * @return the number of trailing 0-bits + */ +/* We use the De-Bruijn method outlined in: + * http://supertech.csail.mit.edu/papers/debruijn.pdf. */ +static av_always_inline av_const int ff_ctz_c(int v) +{ + static const uint8_t debruijn_ctz32[32] = { + 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, + 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9 + }; + return debruijn_ctz32[(uint32_t)((v & -v) * 0x077CB531U) >> 27]; +} +#endif + +#ifndef ff_ctzll +#define ff_ctzll ff_ctzll_c +/* We use the De-Bruijn method outlined in: + * http://supertech.csail.mit.edu/papers/debruijn.pdf. */ +static av_always_inline av_const int ff_ctzll_c(long long v) +{ + static const uint8_t debruijn_ctz64[64] = { + 0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28, + 62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11, + 63, 52, 6, 26, 37, 40, 33, 47, 61, 45, 43, 21, 23, 58, 17, 10, + 51, 25, 36, 32, 60, 20, 57, 16, 50, 31, 19, 15, 30, 14, 13, 12 + }; + return debruijn_ctz64[(uint64_t)((v & -v) * 0x022FDD63CC95386DU) >> 58]; +} +#endif + +#ifndef ff_clz +#define ff_clz ff_clz_c +static av_always_inline av_const unsigned ff_clz_c(unsigned x) +{ + unsigned i = sizeof(x) * 8; + + while (x) { + x >>= 1; + i--; + } + + return i; +} +#endif + +#if AV_GCC_VERSION_AT_LEAST(3,4) +#ifndef av_parity +#define av_parity __builtin_parity +#endif +#endif + +/** + * @} + */ +#endif /* AVUTIL_INTMATH_H */ diff --git a/TMessagesProj/jni/gifvideo.cpp b/TMessagesProj/jni/gifvideo.cpp index 9a7a51c5a..6d28f30ef 100644 --- a/TMessagesProj/jni/gifvideo.cpp +++ b/TMessagesProj/jni/gifvideo.cpp @@ -10,12 +10,18 @@ #include #include #include "tgnet/ConnectionsManager.h" +#include "voip/webrtc/common_video/h264/sps_parser.h" +#include "voip/webrtc/common_video/h264/h264_common.h" #include "c_utils.h" extern "C" { #include #include +#include +#include +#include #include +#include #include } @@ -35,6 +41,53 @@ jmethodID jclass_AnimatedFileDrawableStream_cancel; jmethodID jclass_AnimatedFileDrawableStream_isFinishedLoadingFile; jmethodID jclass_AnimatedFileDrawableStream_getFinishedFilePath; +typedef struct H2645NAL { + uint8_t *rbsp_buffer; + int size; + const uint8_t *data; + int size_bits; + int raw_size; + const uint8_t *raw_data; + int type; + int temporal_id; + int nuh_layer_id; + int skipped_bytes; + int skipped_bytes_pos_size; + int *skipped_bytes_pos; + int ref_idc; + GetBitContext gb; +} H2645NAL; + +typedef struct H2645RBSP { + uint8_t *rbsp_buffer; + AVBufferRef *rbsp_buffer_ref; + int rbsp_buffer_alloc_size; + int rbsp_buffer_size; +} H2645RBSP; + +typedef struct H2645Packet { + H2645NAL *nals; + H2645RBSP rbsp; + int nb_nals; + int nals_allocated; + unsigned nal_buffer_size; +} H2645Packet; + +void ff_h2645_packet_uninit(H2645Packet *pkt) { + int i; + for (i = 0; i < pkt->nals_allocated; i++) { + av_freep(&pkt->nals[i].skipped_bytes_pos); + } + av_freep(&pkt->nals); + pkt->nals_allocated = pkt->nal_buffer_size = 0; + if (pkt->rbsp.rbsp_buffer_ref) { + av_buffer_unref(&pkt->rbsp.rbsp_buffer_ref); + pkt->rbsp.rbsp_buffer = NULL; + } else + av_freep(&pkt->rbsp.rbsp_buffer); + pkt->rbsp.rbsp_buffer_alloc_size = pkt->rbsp.rbsp_buffer_size = 0; +} + typedef struct VideoInfo { ~VideoInfo() { @@ -88,6 +141,7 @@ typedef struct VideoInfo { fd = -1; } + ff_h2645_packet_uninit(&h2645Packet); av_packet_unref(&orig_pkt); video_stream_idx = -1; @@ -108,6 +162,13 @@ typedef struct VideoInfo { bool stopped = false; bool seeking = false; + int firstWidth = 0; + int firstHeight = 0; + + bool dropFrames = false; + + H2645Packet h2645Packet = {nullptr}; + int32_t dst_linesize[1]; struct SwsContext *sws_ctx = nullptr; @@ -121,12 +182,24 @@ typedef struct VideoInfo { int64_t last_seek_p = 0; }; +void custom_log(void *ptr, int level, const char* fmt, va_list vl){ + va_list vl2; + char line[1024]; + static int print_prefix = 1; + + va_copy(vl2, vl); + av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix); + va_end(vl2); + + LOGE(line); +} + int open_codec_context(int *stream_idx, AVCodecContext **dec_ctx, AVFormatContext *fmt_ctx, enum AVMediaType type) { int ret, stream_index; AVStream *st; AVCodec *dec = NULL; AVDictionary *opts = NULL; - + ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0); if (ret < 0) { LOGE("can't find %s stream in input file", av_get_media_type_string(type)); @@ -159,19 +232,481 @@ int open_codec_context(int *stream_idx, AVCodecContext **dec_ctx, AVFormatContex } *stream_idx = stream_index; } - + return 0; } + +#define MAX_MBPAIR_SIZE (256*1024) + +int ff_h2645_extract_rbsp(const uint8_t *src, int length, H2645RBSP *rbsp, H2645NAL *nal) +{ + int i, si, di; + uint8_t *dst; + + nal->skipped_bytes = 0; +#define STARTCODE_TEST \ + if (i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) { \ + if (src[i + 2] != 3 && src[i + 2] != 0) { \ + /* startcode, so we must be past the end */ \ + length = i; \ + } \ + break; \ + } + + for (i = 0; i + 1 < length; i += 2) { + if (src[i]) + continue; + if (i > 0 && src[i - 1] == 0) + i--; + STARTCODE_TEST; + } + + if (i > length) + i = length; + + nal->rbsp_buffer = &rbsp->rbsp_buffer[rbsp->rbsp_buffer_size]; + dst = nal->rbsp_buffer; + + memcpy(dst, src, i); + si = di = i; + while (si + 2 < length) { + if (src[si + 2] > 3) { + dst[di++] = src[si++]; + dst[di++] = src[si++]; + } else if (src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) { + if (src[si + 2] == 3) { + dst[di++] = 0; + dst[di++] = 0; + si += 3; + + if (nal->skipped_bytes_pos) { + nal->skipped_bytes++; + if (nal->skipped_bytes_pos_size < nal->skipped_bytes) { + nal->skipped_bytes_pos_size *= 2; + av_reallocp_array(&nal->skipped_bytes_pos, + nal->skipped_bytes_pos_size, + sizeof(*nal->skipped_bytes_pos)); + if (!nal->skipped_bytes_pos) { + nal->skipped_bytes_pos_size = 0; + return AVERROR(ENOMEM); + } + } + if (nal->skipped_bytes_pos) + nal->skipped_bytes_pos[nal->skipped_bytes-1] = di - 1; + } + continue; + } else // next start code + goto nsc; + } + + dst[di++] = src[si++]; + } + while (si < length) + dst[di++] = src[si++]; + + nsc: + memset(dst + di, 0, AV_INPUT_BUFFER_PADDING_SIZE); + + nal->data = dst; + nal->size = di; + nal->raw_data = src; + nal->raw_size = si; + rbsp->rbsp_buffer_size += si; + + return si; +} + +static inline int get_nalsize(int nal_length_size, const uint8_t *buf, int buf_size, int *buf_index) { + int i, nalsize = 0; + if (*buf_index >= buf_size - nal_length_size) { + return AVERROR(EAGAIN); + } + for (i = 0; i < nal_length_size; i++) + nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++]; + if (nalsize <= 0 || nalsize > buf_size - *buf_index) { + return AVERROR_INVALIDDATA; + } + return nalsize; +} + +static int find_next_start_code(const uint8_t *buf, const uint8_t *next_avc) { + int i = 0; + if (buf + 3 >= next_avc) + return next_avc - buf; + while (buf + i + 3 < next_avc) { + if (buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1) + break; + i++; + } + return i + 3; +} + +static int get_bit_length(H2645NAL *nal, int skip_trailing_zeros) { + int size = nal->size; + int v; + + while (skip_trailing_zeros && size > 0 && nal->data[size - 1] == 0) + size--; + + if (!size) + return 0; + + v = nal->data[size - 1]; + + if (size > INT_MAX / 8) + return AVERROR(ERANGE); + size *= 8; + + /* remove the stop bit and following trailing zeros, + * or nothing for damaged bitstreams */ + if (v) + size -= ff_ctz(v) + 1; + + return size; +} + +static void alloc_rbsp_buffer(H2645RBSP *rbsp, unsigned int size) { + int min_size = size; + + if (size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE) + goto fail; + size += AV_INPUT_BUFFER_PADDING_SIZE; + + if (rbsp->rbsp_buffer_alloc_size >= size && + (!rbsp->rbsp_buffer_ref || av_buffer_is_writable(rbsp->rbsp_buffer_ref))) { + memset(rbsp->rbsp_buffer + min_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + return; + } + + size = FFMIN(size + size / 16 + 32, INT_MAX); + + if (rbsp->rbsp_buffer_ref) + av_buffer_unref(&rbsp->rbsp_buffer_ref); + else + av_free(rbsp->rbsp_buffer); + + rbsp->rbsp_buffer = (uint8_t *) av_mallocz(size); + if (!rbsp->rbsp_buffer) + goto fail; + rbsp->rbsp_buffer_alloc_size = size; + + return; + + fail: + rbsp->rbsp_buffer_alloc_size = 0; + if (rbsp->rbsp_buffer_ref) { + av_buffer_unref(&rbsp->rbsp_buffer_ref); + rbsp->rbsp_buffer = NULL; + } else + av_freep(&rbsp->rbsp_buffer); + + return; +} + +static int h264_parse_nal_header(H2645NAL *nal) { + GetBitContext *gb = &nal->gb; + + if (get_bits1(gb) != 0) + return AVERROR_INVALIDDATA; + + nal->ref_idc = get_bits(gb, 2); + nal->type = get_bits(gb, 5); + + return 1; +} + +int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, int is_nalff, int nal_length_size) { + GetByteContext bc; + int consumed, ret = 0; + int next_avc = is_nalff ? 0 : length; + int64_t padding = MAX_MBPAIR_SIZE; + + bytestream2_init(&bc, buf, length); + alloc_rbsp_buffer(&pkt->rbsp, length + padding); + + if (!pkt->rbsp.rbsp_buffer) + return AVERROR(ENOMEM); + + pkt->rbsp.rbsp_buffer_size = 0; + pkt->nb_nals = 0; + while (bytestream2_get_bytes_left(&bc) >= 4) { + H2645NAL *nal; + int extract_length = 0; + int skip_trailing_zeros = 1; + + if (bytestream2_tell(&bc) == next_avc) { + int i = 0; + extract_length = get_nalsize(nal_length_size, bc.buffer, bytestream2_get_bytes_left(&bc), &i); + if (extract_length < 0) + return extract_length; + + bytestream2_skip(&bc, nal_length_size); + + next_avc = bytestream2_tell(&bc) + extract_length; + } else { + int buf_index; + buf_index = find_next_start_code(bc.buffer, buf + next_avc); + bytestream2_skip(&bc, buf_index); + if (!bytestream2_get_bytes_left(&bc)) { + if (pkt->nb_nals > 0) { + return 0; + } else { + return AVERROR_INVALIDDATA; + } + } + extract_length = FFMIN(bytestream2_get_bytes_left(&bc), next_avc - bytestream2_tell(&bc)); + if (bytestream2_tell(&bc) >= next_avc) { + bytestream2_skip(&bc, next_avc - bytestream2_tell(&bc)); + continue; + } + } + + if (pkt->nals_allocated < pkt->nb_nals + 1) { + int new_size = pkt->nals_allocated + 1; + void *tmp; + + if (new_size >= INT_MAX / sizeof(*pkt->nals)) + return AVERROR(ENOMEM); + + tmp = av_fast_realloc(pkt->nals, &pkt->nal_buffer_size, new_size * sizeof(*pkt->nals)); + if (!tmp) + return AVERROR(ENOMEM); + + pkt->nals = (H2645NAL *) tmp; + memset(pkt->nals + pkt->nals_allocated, 0, sizeof(*pkt->nals)); + + nal = &pkt->nals[pkt->nb_nals]; + nal->skipped_bytes_pos_size = 1024; + nal->skipped_bytes_pos = (int *) av_malloc_array(nal->skipped_bytes_pos_size, sizeof(*nal->skipped_bytes_pos)); + if (!nal->skipped_bytes_pos) + return AVERROR(ENOMEM); + + pkt->nals_allocated = new_size; + } + nal = &pkt->nals[pkt->nb_nals]; + + consumed = ff_h2645_extract_rbsp(bc.buffer, extract_length, &pkt->rbsp, nal); + if (consumed < 0) + return consumed; + + pkt->nb_nals++; + + bytestream2_skip(&bc, consumed); + + /* see commit 3566042a0 */ + if (bytestream2_get_bytes_left(&bc) >= 4 && + bytestream2_peek_be32(&bc) == 0x000001E0) + skip_trailing_zeros = 0; + + nal->size_bits = get_bit_length(nal, skip_trailing_zeros); + + ret = init_get_bits(&nal->gb, nal->data, nal->size_bits); + if (ret < 0) + return ret; + + ret = h264_parse_nal_header(nal); + if (ret <= 0 || nal->size <= 0 || nal->size_bits <= 0) { + pkt->nb_nals--; + } + } + + return 0; +} + +#define MAX_SPS_COUNT 32 + +const uint8_t ff_zigzag_direct[64] = { + 0, 1, 8, 16, 9, 2, 3, 10, + 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, + 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, + 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, + 53, 60, 61, 54, 47, 55, 62, 63 +}; + +const uint8_t ff_zigzag_scan[16+1] = { + 0 + 0 * 4, 1 + 0 * 4, 0 + 1 * 4, 0 + 2 * 4, + 1 + 1 * 4, 2 + 0 * 4, 3 + 0 * 4, 2 + 1 * 4, + 1 + 2 * 4, 0 + 3 * 4, 1 + 3 * 4, 2 + 2 * 4, + 3 + 1 * 4, 3 + 2 * 4, 2 + 3 * 4, 3 + 3 * 4, +}; + +static int decode_scaling_list(GetBitContext *gb, uint8_t *factors, int size) { + int i, last = 8, next = 8; + const uint8_t *scan = size == 16 ? ff_zigzag_scan : ff_zigzag_direct; + if (!get_bits1(gb)) { + + } else { + for (i = 0; i < size; i++) { + if (next) { + int v = get_se_golomb(gb); + if (v < -128 || v > 127) { + return AVERROR_INVALIDDATA; + } + next = (last + v) & 0xff; + } + if (!i && !next) { /* matrix not written, we use the preset one */ + break; + } + last = factors[scan[i]] = next ? next : last; + } + } + return 0; +} + +static int decode_scaling_matrices(GetBitContext *gb, int chroma_format_idc, uint8_t(*scaling_matrix4)[16], uint8_t(*scaling_matrix8)[64]) { + int ret = 0; + if (get_bits1(gb)) { + ret |= decode_scaling_list(gb, scaling_matrix4[0], 16); // Intra, Y + ret |= decode_scaling_list(gb, scaling_matrix4[1], 16); // Intra, Cr + ret |= decode_scaling_list(gb, scaling_matrix4[2], 16); // Intra, Cb + ret |= decode_scaling_list(gb, scaling_matrix4[3], 16); // Inter, Y + ret |= decode_scaling_list(gb, scaling_matrix4[4], 16); // Inter, Cr + ret |= decode_scaling_list(gb, scaling_matrix4[5], 16); // Inter, Cb + + ret |= decode_scaling_list(gb, scaling_matrix8[0], 64); // Intra, Y + ret |= decode_scaling_list(gb, scaling_matrix8[3], 64); // Inter, Y + if (chroma_format_idc == 3) { + ret |= decode_scaling_list(gb, scaling_matrix8[1], 64); // Intra, Cr + ret |= decode_scaling_list(gb, scaling_matrix8[4], 64); // Inter, Cr + ret |= decode_scaling_list(gb, scaling_matrix8[2], 64); // Intra, Cb + ret |= decode_scaling_list(gb, scaling_matrix8[5], 64); // Inter, Cb + } + if (!ret) + ret = 1; + } + + return ret; +} + +int ff_h264_decode_seq_parameter_set(GetBitContext *gb, int &width, int &height) { + int profile_idc, level_idc, constraint_set_flags = 0; + unsigned int sps_id; + int i, log2_max_frame_num_minus4; + int ret; + + profile_idc = get_bits(gb, 8); + constraint_set_flags |= get_bits1(gb) << 0; + constraint_set_flags |= get_bits1(gb) << 1; + constraint_set_flags |= get_bits1(gb) << 2; + constraint_set_flags |= get_bits1(gb) << 3; + constraint_set_flags |= get_bits1(gb) << 4; + constraint_set_flags |= get_bits1(gb) << 5; + skip_bits(gb, 2); + level_idc = get_bits(gb, 8); + sps_id = get_ue_golomb_31(gb); + + if (sps_id >= MAX_SPS_COUNT) { + return false; + } + + if (profile_idc == 100 || // High profile + profile_idc == 110 || // High10 profile + profile_idc == 122 || // High422 profile + profile_idc == 244 || // High444 Predictive profile + profile_idc == 44 || // Cavlc444 profile + profile_idc == 83 || // Scalable Constrained High profile (SVC) + profile_idc == 86 || // Scalable High Intra profile (SVC) + profile_idc == 118 || // Stereo High profile (MVC) + profile_idc == 128 || // Multiview High profile (MVC) + profile_idc == 138 || // Multiview Depth High profile (MVCD) + profile_idc == 144) { // old High444 profile + int chroma_format_idc = get_ue_golomb_31(gb); + if (chroma_format_idc > 3U) { + return false; + } else if (chroma_format_idc == 3) { + int residual_color_transform_flag = get_bits1(gb); + if (residual_color_transform_flag) { + return false; + } + } + int bit_depth_luma = get_ue_golomb(gb) + 8; + int bit_depth_chroma = get_ue_golomb(gb) + 8; + if (bit_depth_chroma != bit_depth_luma) { + return false; + } + if (bit_depth_luma < 8 || bit_depth_luma > 14 || bit_depth_chroma < 8 || bit_depth_chroma > 14) { + return false; + } + get_bits1(gb); + uint8_t scaling_matrix4[6][16]; + uint8_t scaling_matrix8[6][64]; + ret = decode_scaling_matrices(gb, chroma_format_idc, scaling_matrix4, scaling_matrix8); + if (ret < 0) + return false; + } + + get_ue_golomb(gb); + + int poc_type = get_ue_golomb_31(gb); + + if (poc_type == 0) { + unsigned t = get_ue_golomb(gb); + if (t > 12) { + return false; + } + } else if (poc_type == 1) { + get_bits1(gb); + int offset_for_non_ref_pic = get_se_golomb_long(gb); + int offset_for_top_to_bottom_field = get_se_golomb_long(gb); + + if (offset_for_non_ref_pic == INT32_MIN || offset_for_top_to_bottom_field == INT32_MIN) { + return false; + } + + int poc_cycle_length = get_ue_golomb(gb); + + if ((unsigned) poc_cycle_length >= 256) { + return false; + } + + for (i = 0; i < poc_cycle_length; i++) { + int offset_for_ref_frame = get_se_golomb_long(gb); + if (offset_for_ref_frame == INT32_MIN) { + return false; + } + } + } else if (poc_type != 2) { + return false; + } + + get_ue_golomb_31(gb); + get_bits1(gb); + int mb_width = get_ue_golomb(gb) + 1; + int mb_height = get_ue_golomb(gb) + 1; + + if (width == 0 || height == 0) { + width = mb_width; + height = mb_height; + } + return mb_width != width || mb_height != height; +} int decode_packet(VideoInfo *info, int *got_frame) { int ret = 0; int decoded = info->pkt.size; *got_frame = 0; - + if (info->pkt.stream_index == info->video_stream_idx) { - ret = avcodec_decode_video2(info->video_dec_ctx, info->frame, got_frame, &info->pkt); - if (ret != 0) { - return ret; + if (info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264 && decoded > 0) { + ff_h2645_packet_split(&info->h2645Packet, info->pkt.data, info->pkt.size, 1, 4); + for (int i = 0; i < info->h2645Packet.nb_nals; i++) { + H2645NAL *nal = &info->h2645Packet.nals[i]; + switch (nal->type) { + case 7: { + GetBitContext tmp_gb = nal->gb; + info->dropFrames = ff_h264_decode_seq_parameter_set(&tmp_gb, info->firstWidth, info->firstHeight); + } + } + } + } + if (!info->dropFrames) { + ret = avcodec_decode_video2(info->video_dec_ctx, info->frame, got_frame, &info->pkt); + if (ret != 0) { + return ret; + } } } @@ -585,7 +1120,7 @@ extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDr if (got_frame) { info->has_decoded_frames = true; bool finished = false; - if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { + if (info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { int64_t pkt_pts = info->frame->best_effort_timestamp; if (pkt_pts >= pts) { finished = true; @@ -620,9 +1155,9 @@ static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray da wantedHeight = bitmapHeight; } - void *pixels; - if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) { - if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) { + if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) { + void *pixels; + if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) { if (info->sws_ctx == nullptr) { if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB) { info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); @@ -631,7 +1166,9 @@ static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray da } } if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) { - if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) { + if (info->frame->format == AV_PIX_FMT_YUV444P) { + libyuv::H444ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight); + } else if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) { libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight); } else { @@ -706,7 +1243,7 @@ extern "C" JNIEXPORT int JNICALL Java_org_telegram_ui_Components_AnimatedFileDra } if (got_frame) { bool finished = false; - if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { + if (info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { int64_t pkt_pts = info->frame->best_effort_timestamp; bool isLastPacket = false; if (info->pkt.size == 0) { @@ -800,7 +1337,7 @@ extern "C" JNIEXPORT jint JNICALL Java_org_telegram_ui_Components_AnimatedFileDr } if (got_frame) { //LOGD("decoded frame with w = %d, h = %d, format = %d", info->frame->width, info->frame->height, info->frame->format); - if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { + if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P || info->frame->format == AV_PIX_FMT_YUV444P) { writeFrameToBitmap(env, info, data, bitmap, stride); } info->has_decoded_frames = true; @@ -815,6 +1352,7 @@ extern "C" JNIEXPORT jint JNICALL Java_org_telegram_ui_Components_AnimatedFileDr } extern "C" jint videoOnJNILoad(JavaVM *vm, JNIEnv *env) { + //av_log_set_callback(custom_log); jclass_AnimatedFileDrawableStream = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/AnimatedFileDrawableStream")); if (jclass_AnimatedFileDrawableStream == 0) { return JNI_FALSE; diff --git a/TMessagesProj/jni/image.cpp b/TMessagesProj/jni/image.cpp index c88742fdc..72e0e2339 100644 --- a/TMessagesProj/jni/image.cpp +++ b/TMessagesProj/jni/image.cpp @@ -1,14 +1,17 @@ #include -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include #include #include #include #include #include +#include +#include +#include #include "libwebp/webp/decode.h" #include "libwebp/webp/encode.h" #include "mozjpeg/turbojpeg.h" @@ -150,7 +153,7 @@ static void fastBlurMore(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int } static void fastBlur(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int32_t radius) { - if (pix == NULL) { + if (pix == nullptr) { return; } const int32_t r1 = radius + 1; @@ -173,7 +176,7 @@ static void fastBlur(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int32_t } uint64_t *rgb = new uint64_t[w * h]; - if (rgb == NULL) { + if (rgb == nullptr) { return; } @@ -450,11 +453,11 @@ JNIEXPORT int Java_org_telegram_messenger_Utilities_needInvert(JNIEnv *env, jcla return 0; } - void *pixels = 0; + void *pixels = nullptr; if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) { return 0; } - if (pixels == NULL) { + if (pixels == nullptr) { return 0; } uint8_t *pix = (uint8_t *) pixels; @@ -516,7 +519,7 @@ JNIEXPORT void Java_org_telegram_messenger_Utilities_blurBitmap(JNIEnv *env, jcl return; } - void *pixels = 0; + void *pixels = nullptr; if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) { return; } @@ -642,7 +645,7 @@ JNIEXPORT jint Java_org_telegram_messenger_Utilities_pinBitmap(JNIEnv *env, jcla } JNIEXPORT void Java_org_telegram_messenger_Utilities_unpinBitmap(JNIEnv *env, jclass clazz, jobject bitmap) { - if (bitmap == NULL) { + if (bitmap == nullptr) { return; } AndroidBitmap_unlockPixels(env, bitmap); @@ -680,7 +683,7 @@ JNIEXPORT jboolean Java_org_telegram_messenger_Utilities_loadWebpImage(JNIEnv *e return 0; } - void *bitmapPixels = 0; + void *bitmapPixels = nullptr; if (AndroidBitmap_lockPixels(env, outputBitmap, &bitmapPixels) != ANDROID_BITMAP_RESUT_SUCCESS) { env->ThrowNew(jclass_RuntimeException, "Failed to lock Bitmap pixels"); return 0; @@ -723,7 +726,7 @@ JNIEXPORT void Java_org_telegram_messenger_Utilities_stackBlurBitmap(JNIEnv *env int h = info.height; int stride = info.stride; - unsigned char *pixels = 0; + unsigned char *pixels = nullptr; AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels); if (!pixels) { return; @@ -1166,4 +1169,129 @@ JNIEXPORT jint Java_org_telegram_messenger_Utilities_saveProgressiveJpeg(JNIEnv return outSize;*/ } +std::vector> gatherPositions(std::vector> list, int phase) { + std::vector> result(4); + for (int i = 0; i < 4; i++) { + int pos = phase + i * 2; + while (pos >= 8) { + pos -= 8; + } + result[i] = list[pos]; + result[i].second = 1.0f - result[i].second; + } + return result; +} + +static float *pixelCache = nullptr; + +JNIEXPORT void Java_org_telegram_messenger_Utilities_generateGradient(JNIEnv *env, jclass clazz, jobject bitmap, jboolean unpin, jint phase, jfloat progress, jint width, jint height, jint stride, jintArray colors) { + if (!bitmap) { + return; + } + + if (!width || !height) { + return; + } + + uint8_t *pixels = nullptr; + if (AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels) < 0) { + return; + } + + std::vector> positions{ + {0.80f, 0.10f}, + {0.60f, 0.20f}, + {0.35f, 0.25f}, + {0.25f, 0.60f}, + {0.20f, 0.90f}, + {0.40f, 0.80f}, + {0.65f, 0.75f}, + {0.75f, 0.40f} + }; + + int32_t previousPhase = phase + 1; + if (previousPhase > 7) { + previousPhase = 0; + } + std::vector> previous = gatherPositions(positions, previousPhase); + std::vector> current = gatherPositions(positions, phase); + + auto colorsArray = (uint8_t *) env->GetIntArrayElements(colors, nullptr); + /*float *newPixelCache = nullptr; + if (pixelCache == nullptr) { + newPixelCache = new float[width * height * 2]; + }*/ + float directPixelY; + float centerDistanceY; + float centerDistanceY2; + int32_t colorsCount = colorsArray[12] == 0 ? 3 : 4; + + for (int y = 0; y < height; y++) { + //if (pixelCache == nullptr) { + directPixelY = (float) y / (float) height; + centerDistanceY = directPixelY - 0.5f; + centerDistanceY2 = centerDistanceY * centerDistanceY; + //} + uint32_t offset = y * stride; + for (int x = 0; x < width; x++) { + float pixelX; + float pixelY; + /*if (pixelCache != nullptr) { + pixelX = pixelCache[(y * width + x) * 2]; + pixelX = pixelCache[(y * width + x) * 2 + 1]; + } else {*/ + float directPixelX = (float) x / (float) width; + + float centerDistanceX = directPixelX - 0.5f; + float centerDistance = sqrtf(centerDistanceX * centerDistanceX + centerDistanceY2); + + float swirlFactor = 0.35f * centerDistance; + float theta = swirlFactor * swirlFactor * 0.8f * 8.0f; + float sinTheta = sinf(theta); + float cosTheta = cosf(theta); + + pixelX = /*newPixelCache[(y * width + x) * 2] =*/ std::max(0.0f, std::min(1.0f, 0.5f + centerDistanceX * cosTheta - centerDistanceY * sinTheta)); + pixelY = /*newPixelCache[(y * width + x) * 2 + 1] =*/ std::max(0.0f, std::min(1.0f, 0.5f + centerDistanceX * sinTheta + centerDistanceY * cosTheta)); + //} + + float distanceSum = 0.0f; + + float r = 0.0f; + float g = 0.0f; + float b = 0.0f; + + for (int i = 0; i < colorsCount; i++) { + float colorX = previous[i].first + (current[i].first - previous[i].first) * progress; + float colorY = previous[i].second + (current[i].second - previous[i].second) * progress; + + float distanceX = pixelX - colorX; + float distanceY = pixelY - colorY; + + float distance = std::max(0.0f, 0.9f - sqrtf(distanceX * distanceX + distanceY * distanceY)); + distance = distance * distance * distance * distance; + distanceSum += distance; + + r = r + distance * ((float) colorsArray[i * 4] / 255.0f); + g = g + distance * ((float) colorsArray[i * 4 + 1] / 255.0f); + b = b + distance * ((float) colorsArray[i * 4 + 2] / 255.0f); + } + + pixels[offset + x * 4] = (uint8_t) (b / distanceSum * 255.0f); + pixels[offset + x * 4 + 1] = (uint8_t) (g / distanceSum * 255.0f); + pixels[offset + x * 4 + 2] = (uint8_t) (r / distanceSum * 255.0f); + pixels[offset + x * 4 + 3] = 0xff; + } + } + /*if (newPixelCache != nullptr) { + delete [] pixelCache; + pixelCache = newPixelCache; + }*/ + + env->ReleaseIntArrayElements(colors, (jint *) colorsArray, JNI_ABORT); + + if (unpin) { + AndroidBitmap_unlockPixels(env, bitmap); + } +} + } \ No newline at end of file diff --git a/TMessagesProj/jni/lottie.cpp b/TMessagesProj/jni/lottie.cpp index 429b22d37..984c52847 100644 --- a/TMessagesProj/jni/lottie.cpp +++ b/TMessagesProj/jni/lottie.cpp @@ -45,19 +45,19 @@ typedef struct LottieInfo { char *compressBuffer = nullptr; const char *buffer = nullptr; bool firstFrame = false; - int bufferSize; - int compressBound; - int firstFrameSize; + int bufferSize = 0; + int compressBound = 0; + int firstFrameSize = 0; volatile uint32_t framesAvailableInCache = 0; }; -JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *env, jclass clazz, jstring src, jint w, jint h, jintArray data, jboolean precache, jintArray colorReplacement, jboolean limitFps) { - LottieInfo *info = new LottieInfo(); +JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *env, jclass clazz, jstring src, jstring json, jint w, jint h, jintArray data, jboolean precache, jintArray colorReplacement, jboolean limitFps) { + auto info = new LottieInfo(); std::map *colors = nullptr; int color = 0; if (colorReplacement != nullptr) { - jint *arr = env->GetIntArrayElements(colorReplacement, 0); + jint *arr = env->GetIntArrayElements(colorReplacement, nullptr); if (arr != nullptr) { jsize len = env->GetArrayLength(colorReplacement); colors = new std::map(); @@ -71,10 +71,18 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e } } - char const *srcString = env->GetStringUTFChars(src, 0); + char const *srcString = env->GetStringUTFChars(src, nullptr); info->path = srcString; - info->animation = rlottie::Animation::loadFromFile(info->path, colors); - if (srcString != 0) { + if (json != nullptr) { + char const *jsonString = env->GetStringUTFChars(json, nullptr); + if (jsonString) { + info->animation = rlottie::Animation::loadFromData(jsonString, info->path, colors); + env->ReleaseStringUTFChars(json, jsonString); + } + } else { + info->animation = rlottie::Animation::loadFromFile(info->path, colors); + } + if (srcString) { env->ReleaseStringUTFChars(src, srcString); } if (info->animation == nullptr) { @@ -91,7 +99,7 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e info->precache = precache; if (info->precache) { info->cacheFile = info->path; - std::string::size_type index = info->cacheFile.find_last_of("/"); + std::string::size_type index = info->cacheFile.find_last_of('/'); if (index != std::string::npos) { std::string dir = info->cacheFile.substr(0, index) + "/acache"; mkdir(dir.c_str(), 0777); @@ -119,13 +127,13 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e info->maxFrameSize = maxFrameSize; fread(&(info->imageSize), sizeof(uint32_t), 1, precacheFile); info->fileOffset = 9; - utimensat(0, info->cacheFile.c_str(), NULL, 0); + utimensat(0, info->cacheFile.c_str(), nullptr, 0); } fclose(precacheFile); } } - jint *dataArr = env->GetIntArrayElements(data, 0); + jint *dataArr = env->GetIntArrayElements(data, nullptr); if (dataArr != nullptr) { dataArr[0] = (jint) info->frameCount; dataArr[1] = (jint) info->animation->frameRate(); @@ -138,7 +146,7 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(JNIEnv *env, jclass clazz, jstring json, jstring name, jintArray data, jintArray colorReplacement) { std::map *colors = nullptr; if (colorReplacement != nullptr) { - jint *arr = env->GetIntArrayElements(colorReplacement, 0); + jint *arr = env->GetIntArrayElements(colorReplacement, nullptr); if (arr != nullptr) { jsize len = env->GetArrayLength(colorReplacement); colors = new std::map(); @@ -149,15 +157,15 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(J } } - LottieInfo *info = new LottieInfo(); + auto info = new LottieInfo(); - char const *jsonString = env->GetStringUTFChars(json, 0); - char const *nameString = env->GetStringUTFChars(name, 0); + char const *jsonString = env->GetStringUTFChars(json, nullptr); + char const *nameString = env->GetStringUTFChars(name, nullptr); info->animation = rlottie::Animation::loadFromData(jsonString, nameString, colors); - if (jsonString != 0) { + if (jsonString) { env->ReleaseStringUTFChars(json, jsonString); } - if (nameString != 0) { + if (nameString) { env->ReleaseStringUTFChars(name, nameString); } if (info->animation == nullptr) { @@ -167,7 +175,7 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(J info->frameCount = info->animation->totalFrame(); info->fps = (int) info->animation->frameRate(); - jint *dataArr = env->GetIntArrayElements(data, 0); + jint *dataArr = env->GetIntArrayElements(data, nullptr); if (dataArr != nullptr) { dataArr[0] = (int) info->frameCount; dataArr[1] = (int) info->animation->frameRate(); @@ -181,7 +189,7 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_destroy(JNIEnv *e if (!ptr) { return; } - LottieInfo *info = (LottieInfo *) (intptr_t) ptr; + auto info = (LottieInfo *) (intptr_t) ptr; delete info; } @@ -189,10 +197,10 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_setLayerColor(JNI if (!ptr || layer == nullptr) { return; } - LottieInfo *info = (LottieInfo *) (intptr_t) ptr; - char const *layerString = env->GetStringUTFChars(layer, 0); + auto info = (LottieInfo *) (intptr_t) ptr; + char const *layerString = env->GetStringUTFChars(layer, nullptr); info->animation->setValue(layerString, Color(((color) & 0xff) / 255.0f, ((color >> 8) & 0xff) / 255.0f, ((color >> 16) & 0xff) / 255.0f)); - if (layerString != 0) { + if (layerString) { env->ReleaseStringUTFChars(layer, layerString); } } @@ -201,9 +209,9 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_replaceColors(JNI if (!ptr || colorReplacement == nullptr) { return; } - LottieInfo *info = (LottieInfo *) (intptr_t) ptr; + auto info = (LottieInfo *) (intptr_t) ptr; - jint *arr = env->GetIntArrayElements(colorReplacement, 0); + jint *arr = env->GetIntArrayElements(colorReplacement, nullptr); if (arr != nullptr) { jsize len = env->GetArrayLength(colorReplacement); for (int32_t a = 0; a < len / 2; a++) { @@ -240,7 +248,7 @@ void CacheWriteThreadProc() { lk.unlock(); if (task != nullptr) { - uint32_t size = (uint32_t) LZ4_compress_default(task->buffer, task->compressBuffer, task->bufferSize, task->compressBound); + auto size = (uint32_t) LZ4_compress_default(task->buffer, task->compressBuffer, task->bufferSize, task->compressBound); if (task->firstFrame) { task->firstFrameSize = size; task->fileOffset = 9 + sizeof(uint32_t) + task->firstFrameSize; @@ -262,7 +270,7 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_createCache(JNIEn if (ptr == NULL) { return; } - LottieInfo *info = (LottieInfo *) (intptr_t) ptr; + auto info = (LottieInfo *) (intptr_t) ptr; FILE *cacheFile = fopen(info->cacheFile.c_str(), "r+"); if (cacheFile != nullptr) { @@ -288,8 +296,8 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_createCache(JNIEn info->imageSize = (uint32_t) w * h * 4; info->compressBound = LZ4_compressBound(info->bufferSize); info->compressBuffer = new char[info->compressBound]; - uint8_t *firstBuffer = new uint8_t[info->bufferSize]; - uint8_t *secondBuffer = new uint8_t[info->bufferSize]; + auto firstBuffer = new uint8_t[info->bufferSize]; + auto secondBuffer = new uint8_t[info->bufferSize]; //long time = ConnectionsManager::getInstance(0).getCurrentTimeMonotonicMillis(); Surface surface1((uint32_t *) firstBuffer, (size_t) w, (size_t) h, (size_t) w * 4); @@ -337,7 +345,7 @@ JNIEXPORT jint Java_org_telegram_ui_Components_RLottieDrawable_getFrame(JNIEnv * if (!ptr || bitmap == nullptr) { return 0; } - LottieInfo *info = (LottieInfo *) (intptr_t) ptr; + auto info = (LottieInfo *) (intptr_t) ptr; int framesPerUpdate = !info->limitFps || info->fps < 60 ? 1 : 2; int framesAvailableInCache = info->framesAvailableInCache; diff --git a/TMessagesProj/jni/rlottie/src/lottie/lottieanimation.cpp b/TMessagesProj/jni/rlottie/src/lottie/lottieanimation.cpp index 743002b83..bd2880883 100755 --- a/TMessagesProj/jni/rlottie/src/lottie/lottieanimation.cpp +++ b/TMessagesProj/jni/rlottie/src/lottie/lottieanimation.cpp @@ -60,7 +60,7 @@ private: std::shared_ptr mModel; std::unique_ptr mCompItem; SharedRenderTask mTask; - std::atomic mRenderInProgress; + std::atomic mRenderInProgress{false}; }; void AnimationImpl::setValue(const std::string &keypath, LOTVariant &&value) @@ -141,9 +141,7 @@ std::unique_ptr Animation::loadFromData( animation->d->init(loader.model()); return animation; } - if (colorReplacement != nullptr) { - delete colorReplacement; - } + delete colorReplacement; return nullptr; } @@ -161,9 +159,7 @@ std::unique_ptr Animation::loadFromFile(const std::string &path, std: animation->d->init(loader.model()); return animation; } - if (colorReplacement != nullptr) { - delete colorReplacement; - } + delete colorReplacement; return nullptr; } diff --git a/TMessagesProj/jni/rlottie/src/vector/freetype/v_ft_raster.cpp b/TMessagesProj/jni/rlottie/src/vector/freetype/v_ft_raster.cpp index 0d6750dd5..9b22d64e7 100755 --- a/TMessagesProj/jni/rlottie/src/vector/freetype/v_ft_raster.cpp +++ b/TMessagesProj/jni/rlottie/src/vector/freetype/v_ft_raster.cpp @@ -679,63 +679,69 @@ static void gray_render_conic( RAS_ARG_ const SW_FT_Vector* control, const SW_FT_Vector* to ) { - SW_FT_Vector bez_stack[16 * 2 + 1]; /* enough to accommodate bisections */ - SW_FT_Vector* arc = bez_stack; - TPos dx, dy; - int draw, split; + TPos dx, dy; + TPos min, max, y; + int top, level; + int* levels; + SW_FT_Vector* arc; + levels = ras.lev_stack; - arc[0].x = UPSCALE( to->x ); - arc[0].y = UPSCALE( to->y ); - arc[1].x = UPSCALE( control->x ); - arc[1].y = UPSCALE( control->y ); + arc = ras.bez_stack; + arc[0].x = UPSCALE(to->x); + arc[0].y = UPSCALE(to->y); + arc[1].x = UPSCALE(control->x); + arc[1].y = UPSCALE(control->y); arc[2].x = ras.x; arc[2].y = ras.y; + top = 0; + + dx = SW_FT_ABS(arc[2].x + arc[0].x - 2 * arc[1].x); + dy = SW_FT_ABS(arc[2].y + arc[0].y - 2 * arc[1].y); + if (dx < dy) dx = dy; + + if (dx < ONE_PIXEL / 4) goto Draw; /* short-cut the arc that crosses the current band */ - if ( ( TRUNC( arc[0].y ) >= ras.max_ey && - TRUNC( arc[1].y ) >= ras.max_ey && - TRUNC( arc[2].y ) >= ras.max_ey ) || - ( TRUNC( arc[0].y ) < ras.min_ey && - TRUNC( arc[1].y ) < ras.min_ey && - TRUNC( arc[2].y ) < ras.min_ey ) ) - { - ras.x = arc[0].x; - ras.y = arc[0].y; - return; - } + min = max = arc[0].y; - dx = SW_FT_ABS( arc[2].x + arc[0].x - 2 * arc[1].x ); - dy = SW_FT_ABS( arc[2].y + arc[0].y - 2 * arc[1].y ); - if ( dx < dy ) - dx = dy; + y = arc[1].y; + if (y < min) min = y; + if (y > max) max = y; - /* We can calculate the number of necessary bisections because */ - /* each bisection predictably reduces deviation exactly 4-fold. */ - /* Even 32-bit deviation would vanish after 16 bisections. */ - draw = 1; - while ( dx > ONE_PIXEL / 4 ) - { - dx >>= 2; - draw <<= 1; - } + y = arc[2].y; + if (y < min) min = y; + if (y > max) max = y; - /* We use decrement counter to count the total number of segments */ - /* to draw starting from 2^level. Before each draw we split as */ - /* many times as there are trailing zeros in the counter. */ - do - { - split = draw & ( -draw ); /* isolate the rightmost 1-bit */ - while ( ( split >>= 1 ) ) - { - gray_split_conic( arc ); + if (TRUNC(min) >= ras.max_ey || TRUNC(max) < ras.min_ey) goto Draw; + + level = 0; + do { + dx >>= 2; + level++; + } while (dx > ONE_PIXEL / 4); + + levels[0] = level; + + do { + level = levels[top]; + if (level > 0) { + gray_split_conic(arc); arc += 2; + top++; + + if (top + 1 > 32) return; + + levels[top] = levels[top - 1] = level - 1; + continue; } - gray_render_line( RAS_VAR_ arc[0].x, arc[0].y ); + Draw: + gray_render_line(RAS_VAR_ arc[0].x, arc[0].y); + top--; arc -= 2; - } while ( --draw ); + } while (top >= 0); } static void @@ -809,7 +815,7 @@ gray_render_cubic( RAS_ARG_ const SW_FT_Vector* control1, /* with each split, control points quickly converge towards */ /* chord trisection points and the vanishing distances below */ /* indicate when the segment is flat enough to draw */ - if (num < 0 || num >= count) { + if (num < 0 || num + 7 >= count) { return; } if ( SW_FT_ABS( 2 * arc[0].x - 3 * arc[1].x + arc[3].x ) > ONE_PIXEL / 2 || diff --git a/TMessagesProj/jni/tgnet/ApiScheme.cpp b/TMessagesProj/jni/tgnet/ApiScheme.cpp index e638324f9..bfd2de0fe 100644 --- a/TMessagesProj/jni/tgnet/ApiScheme.cpp +++ b/TMessagesProj/jni/tgnet/ApiScheme.cpp @@ -1172,7 +1172,7 @@ UserProfilePhoto *UserProfilePhoto::TLdeserialize(NativeByteBuffer *stream, uint case 0x4f11bae1: result = new TL_userProfilePhotoEmpty(); break; - case 0xcc656077: + case 0x82d1f706: result = new TL_userProfilePhoto(); break; default: @@ -1192,8 +1192,6 @@ void TL_userProfilePhoto::readParams(NativeByteBuffer *stream, int32_t instanceN flags = stream->readInt32(&error); has_video = (flags & 1) != 0; photo_id = stream->readInt64(&error); - photo_small = std::unique_ptr(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error)); - photo_big = std::unique_ptr(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error)); if ((flags & 2) != 0) { stripped_thumb = std::unique_ptr(stream->readByteArray(&error)); } @@ -1205,8 +1203,6 @@ void TL_userProfilePhoto::serializeToStream(NativeByteBuffer *stream) { flags = has_video ? (flags | 1) : (flags &~ 1); stream->writeInt32(flags); stream->writeInt64(photo_id); - photo_small->serializeToStream(stream); - photo_big->serializeToStream(stream); if ((flags & 2) != 0) { stream->writeByteArray(stripped_thumb.get()); } diff --git a/TMessagesProj/jni/tgnet/ApiScheme.h b/TMessagesProj/jni/tgnet/ApiScheme.h index 132441805..4f6c00807 100644 --- a/TMessagesProj/jni/tgnet/ApiScheme.h +++ b/TMessagesProj/jni/tgnet/ApiScheme.h @@ -255,8 +255,6 @@ public: int32_t flags; bool has_video; int64_t photo_id; - std::unique_ptr photo_small; - std::unique_ptr photo_big; std::unique_ptr stripped_thumb; int32_t dc_id; @@ -274,7 +272,7 @@ public: class TL_userProfilePhoto : public UserProfilePhoto { public: - static const uint32_t constructor = 0xcc656077; + static const uint32_t constructor = 0x82d1f706; void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error); void serializeToStream(NativeByteBuffer *stream); diff --git a/TMessagesProj/jni/tgnet/ConnectionSocket.cpp b/TMessagesProj/jni/tgnet/ConnectionSocket.cpp index 1c53b34bb..8191f7d4c 100644 --- a/TMessagesProj/jni/tgnet/ConnectionSocket.cpp +++ b/TMessagesProj/jni/tgnet/ConnectionSocket.cpp @@ -6,6 +6,7 @@ * Copyright Nikolai Kudashov, 2015-2018. */ +#include #include #include #include diff --git a/TMessagesProj/jni/tgnet/ConnectionsManager.cpp b/TMessagesProj/jni/tgnet/ConnectionsManager.cpp index 7e0d94d93..0f10596dd 100644 --- a/TMessagesProj/jni/tgnet/ConnectionsManager.cpp +++ b/TMessagesProj/jni/tgnet/ConnectionsManager.cpp @@ -6,7 +6,8 @@ * Copyright Nikolai Kudashov, 2015-2018. */ -#include +#include +#include #include #include #include @@ -16,7 +17,7 @@ #include #include #include -#include +#include #include "ConnectionsManager.h" #include "FileLog.h" #include "EventObject.h" @@ -3453,14 +3454,6 @@ void ConnectionsManager::setIpStrategy(uint8_t value) { }); } -void ConnectionsManager::setMtProtoVersion(int version) { - mtProtoVersion = version; -} - -int32_t ConnectionsManager::getMtProtoVersion() { - return mtProtoVersion; -} - int64_t ConnectionsManager::checkProxy(std::string address, uint16_t port, std::string username, std::string password, std::string secret, onRequestTimeFunc requestTimeFunc, jobject ptr1) { ProxyCheckInfo *proxyCheckInfo = new ProxyCheckInfo(); proxyCheckInfo->address = address; diff --git a/TMessagesProj/jni/tgnet/ConnectionsManager.h b/TMessagesProj/jni/tgnet/ConnectionsManager.h index 740e86830..2ae12f1f4 100644 --- a/TMessagesProj/jni/tgnet/ConnectionsManager.h +++ b/TMessagesProj/jni/tgnet/ConnectionsManager.h @@ -71,8 +71,6 @@ public: void updateDcSettings(uint32_t datacenterId, bool workaround); void setPushConnectionEnabled(bool value); void applyDnsConfig(NativeByteBuffer *buffer, std::string phone, int32_t date); - void setMtProtoVersion(int version); - int32_t getMtProtoVersion(); int64_t checkProxy(std::string address, uint16_t port, std::string username, std::string password, std::string secret, onRequestTimeFunc requestTimeFunc, jobject ptr1); #ifdef ANDROID @@ -227,7 +225,6 @@ private: int32_t currentUserId = 0; bool registeredForInternalPush = false; bool pushConnectionEnabled = true; - int32_t mtProtoVersion = 2; std::map>> genericMessagesToDatacenters; std::map>> genericMediaMessagesToDatacenters; diff --git a/TMessagesProj/jni/tgnet/Datacenter.cpp b/TMessagesProj/jni/tgnet/Datacenter.cpp index 6aa44acb5..80790b48d 100644 --- a/TMessagesProj/jni/tgnet/Datacenter.cpp +++ b/TMessagesProj/jni/tgnet/Datacenter.cpp @@ -1158,7 +1158,7 @@ NativeByteBuffer *Datacenter::createRequestsData(std::vectorgetObjectSize(); uint32_t additionalSize = (32 + messageSize) % 16; @@ -1235,43 +1235,29 @@ bool Datacenter::decryptServerResponse(int64_t keyId, uint8_t *key, uint8_t *dat if (authKey == nullptr) { return false; } - bool error = false; - if (authKeyId != keyId) { - error = true; - } + bool error = authKeyId != keyId; thread_local static uint8_t messageKey[96]; - int mtProtoVersion = ConnectionsManager::getInstance(instanceNum).getMtProtoVersion(); - generateMessageKey(instanceNum, authKey->bytes, key, messageKey + 32, true, mtProtoVersion); + generateMessageKey(instanceNum, authKey->bytes, key, messageKey + 32, true, 2); aesIgeEncryption(data, messageKey + 32, messageKey + 64, false, false, length); uint32_t messageLength; memcpy(&messageLength, data + 28, sizeof(uint32_t)); - uint32_t paddingLength = (int32_t) length - (messageLength + 32); - if (messageLength > length - 32) { - error = true; - } else if (paddingLength < 12 || paddingLength > 1024) { - error = true; - } - messageLength += 32; - if (messageLength > length) { - messageLength = length; + uint32_t paddingLength = length - (messageLength + 32); + + error |= (messageLength > length - 32); + error |= (paddingLength < 12); + error |= (paddingLength > 1024); + + SHA256_Init(&sha256Ctx); + SHA256_Update(&sha256Ctx, authKey->bytes + 88 + 8, 32); + SHA256_Update(&sha256Ctx, data, length); + SHA256_Final(messageKey, &sha256Ctx); + + for (uint32_t i = 0; i < 16; i++) { + error |= (messageKey[i + 8] != key[i]); } - switch (mtProtoVersion) { - case 2: { - SHA256_Init(&sha256Ctx); - SHA256_Update(&sha256Ctx, authKey->bytes + 88 + 8, 32); - SHA256_Update(&sha256Ctx, data, length); - SHA256_Final(messageKey, &sha256Ctx); - break; - } - default: { - SHA1(data, messageLength, messageKey + 4); - break; - } - } - - return memcmp(messageKey + 8, key, 16) == 0 && !error; + return !error; } bool Datacenter::hasPermanentAuthKey() { diff --git a/TMessagesProj/jni/tgnet/Handshake.cpp b/TMessagesProj/jni/tgnet/Handshake.cpp index fdb9198a5..63115b90e 100644 --- a/TMessagesProj/jni/tgnet/Handshake.cpp +++ b/TMessagesProj/jni/tgnet/Handshake.cpp @@ -849,7 +849,7 @@ void Handshake::processHandshakeResponse(TLObject *message, int64_t messageId) { inner->temp_session_id = connection->getSessionId(); NetworkMessage *networkMessage = new NetworkMessage(); - networkMessage->message = std::unique_ptr(new TL_message()); + networkMessage->message = std::make_unique(); networkMessage->message->msg_id = authKeyPendingMessageId = messageId; networkMessage->message->bytes = inner->getObjectSize(); networkMessage->message->body = std::unique_ptr(inner); diff --git a/TMessagesProj/jni/voip/CMakeLists.txt b/TMessagesProj/jni/voip/CMakeLists.txt index 22d15c470..b8b06a78f 100644 --- a/TMessagesProj/jni/voip/CMakeLists.txt +++ b/TMessagesProj/jni/voip/CMakeLists.txt @@ -47,7 +47,7 @@ set_target_properties(tgvoip PROPERTIES target_compile_definitions(tgvoip PUBLIC HAVE_PTHREAD __STDC_LIMIT_MACROS BSD=1 USE_KISS_FFT TGVOIP_NO_VIDEO NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED _LARGEFILE_SOURCE=1 _FILE_OFFSET_BITS=64 restrict= __EMX__ OPUS_BUILD FIXED_POINT USE_ALLOCA HAVE_LRINT HAVE_LRINTF) target_compile_definitions(tgvoip PUBLIC - RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) + RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) target_include_directories(tgvoip PUBLIC ./ voip @@ -76,6 +76,50 @@ elseif(${ANDROID_ABI} STREQUAL "x86_64") HAVE_SSE2) endif() +#rnnoise +add_library(rnnoise STATIC + voip/rnnoise/src/celt_lpc.c + voip/rnnoise/src/denoise.c + voip/rnnoise/src/kiss_fft.c + voip/rnnoise/src/pitch.c + voip/rnnoise/src/rnn_data.c + voip/rnnoise/src/rnn_reader.c + voip/rnnoise/src/rnn_reader.c + voip/rnnoise/src/rnn.c) +target_compile_options(rnnoise PUBLIC + -Wall -finline-functions -fno-strict-aliasing -O3 -frtti -Wno-unknown-pragmas -funroll-loops -fexceptions -fno-math-errno) +set_target_properties(rnnoise PROPERTIES + ANDROID_ARM_MODE arm) +target_compile_definitions(rnnoise PRIVATE + HAVE_PTHREAD __STDC_LIMIT_MACROS BSD=1 USE_KISS_FFT NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED _LARGEFILE_SOURCE=1 _FILE_OFFSET_BITS=64 restrict= __EMX__ OPUS_BUILD USE_ALLOCA HAVE_LRINT HAVE_LRINTF + _celt_autocorr=rnnoise__celt_autocorr + celt_fir=rnnoise_celt_fir + celt_iir=rnnoise_celt_iir + _celt_lpc=rnnoise__celt_lpc + celt_pitch_xcorr=rnnoise_celt_pitch_xcorr + compute_band_corr=rnnoise_compute_band_corr + compute_band_energy=rnnoise_compute_band_energy + compute_dense=rnnoise_compute_dense + compute_gru=rnnoise_compute_gru + compute_rnn=rnnoise_compute_rnn + interp_band_gain=rnnoise_interp_band_gain + opus_fft_alloc=rnnoise_opus_fft_alloc + opus_fft_alloc_arch_c=rnnoise_opus_fft_alloc_arch_c + opus_fft_alloc_twiddles=rnnoise_opus_fft_alloc_twiddles + opus_fft_c=rnnoise_opus_fft_c + opus_fft_free=rnnoise_opus_fft_free + opus_fft_free_arch_c=rnnoise_opus_fft_free_arch_c + opus_fft_impl=rnnoise_opus_fft_impl + opus_ifft_c=rnnoise_opus_ifft_c + pitch_downsample=rnnoise_pitch_downsample + pitch_filter=rnnoise_pitch_filter + pitch_search=rnnoise_pitch_search + remove_doubling=rnnoise_remove_doubling) +target_compile_definitions(rnnoise PUBLIC + RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) +target_include_directories(rnnoise PUBLIC + voip/rnnoise/include) + #tgcalls_tp add_library(tgcalls_tp STATIC third_party/rnnoise/src/rnn_vad_weights.cc @@ -427,7 +471,7 @@ target_compile_options(tgcalls_tp PUBLIC set_target_properties(tgcalls_tp PROPERTIES ANDROID_ARM_MODE arm) target_compile_definitions(tgcalls_tp PUBLIC - HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux HAVE_WEBRTC_VIDEO __ANDROID__) + RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux HAVE_WEBRTC_VIDEO __ANDROID__) target_include_directories(tgcalls_tp PUBLIC ./ voip @@ -738,15 +782,18 @@ add_library(tgcalls STATIC voip/tgcalls/ThreadLocalObject.cpp voip/tgcalls/VideoCaptureInterface.cpp voip/tgcalls/VideoCaptureInterfaceImpl.cpp - voip/tgcalls/JsonConfig.cpp voip/tgcalls/AudioDeviceHelper.cpp voip/tgcalls/reference/InstanceImplReference.cpp voip/tgcalls/legacy/InstanceImplLegacy.cpp voip/tgcalls/group/GroupNetworkManager.cpp voip/tgcalls/group/GroupInstanceCustomImpl.cpp + voip/tgcalls/group/GroupJoinPayloadInternal.cpp voip/tgcalls/group/StreamingPart.cpp + voip/tgcalls/third-party/json11.cpp voip/webrtc/rtc_base/async_invoker.cc + voip/webrtc/rtc_base/system_time.cc + voip/webrtc/rtc_base/async_resolver.cc voip/webrtc/rtc_base/async_packet_socket.cc voip/webrtc/rtc_base/async_resolver_interface.cc voip/webrtc/rtc_base/async_socket.cc @@ -762,9 +809,12 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/data_rate_limiter.cc voip/webrtc/rtc_base/event.cc voip/webrtc/rtc_base/event_tracer.cc + voip/webrtc/rtc_base/boringssl_certificate.cc + voip/webrtc/rtc_base/boringssl_identity.cc voip/webrtc/rtc_base/experiments/alr_experiment.cc voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc + voip/webrtc/rtc_base/experiments/encoder_info_settings.cc voip/webrtc/rtc_base/experiments/field_trial_list.cc voip/webrtc/rtc_base/experiments/field_trial_parser.cc voip/webrtc/rtc_base/experiments/field_trial_units.cc @@ -812,6 +862,7 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/openssl_certificate.cc voip/webrtc/rtc_base/openssl_digest.cc voip/webrtc/rtc_base/openssl_identity.cc + voip/webrtc/rtc_base/openssl_key_pair.cc voip/webrtc/rtc_base/openssl_session_cache.cc voip/webrtc/rtc_base/openssl_stream_adapter.cc voip/webrtc/rtc_base/openssl_utility.cc @@ -845,7 +896,7 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/strings/string_format.cc voip/webrtc/rtc_base/synchronization/mutex.cc voip/webrtc/rtc_base/synchronization/yield.cc - voip/webrtc/rtc_base/synchronization/sequence_checker.cc + voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc voip/webrtc/rtc_base/synchronization/yield_policy.cc voip/webrtc/rtc_base/system/file_wrapper.cc voip/webrtc/rtc_base/system/thread_registry.cc @@ -866,7 +917,7 @@ add_library(tgcalls STATIC voip/webrtc/rtc_base/zero_memory.cc voip/webrtc/rtc_base/callback_list.cc voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc - voip/webrtc/rtc_base/deprecated/signal_thread.cc + voip/webrtc/rtc_base/internal/default_socket_server.cc voip/webrtc/api/audio/audio_frame.cc voip/webrtc/api/audio/channel_layout.cc voip/webrtc/api/audio/echo_canceller3_config.cc @@ -973,14 +1024,12 @@ add_library(tgcalls STATIC voip/webrtc/api/video/nv12_buffer.cc voip/webrtc/api/video/video_source_interface.cc voip/webrtc/api/video/video_stream_decoder_create.cc - voip/webrtc/api/video/video_stream_encoder_create.cc voip/webrtc/api/video/video_timing.cc voip/webrtc/api/video_codecs/builtin_video_decoder_factory.cc voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc voip/webrtc/api/video_codecs/sdp_video_format.cc voip/webrtc/api/video_codecs/video_codec.cc voip/webrtc/api/video_codecs/video_decoder.cc - voip/webrtc/api/video_codecs/video_decoder_factory.cc voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc voip/webrtc/api/video_codecs/video_encoder.cc voip/webrtc/api/video_codecs/video_encoder_config.cc @@ -989,11 +1038,12 @@ add_library(tgcalls STATIC voip/webrtc/api/video_codecs/vp8_temporal_layers.cc voip/webrtc/api/video_codecs/vp8_temporal_layers_factory.cc voip/webrtc/api/video_codecs/spatial_layer.cc + voip/webrtc/api/video_codecs/h264_profile_level_id.cc + voip/webrtc/api/video_codecs/vp9_profile.cc voip/webrtc/pc/audio_rtp_receiver.cc voip/webrtc/pc/audio_track.cc voip/webrtc/pc/channel.cc voip/webrtc/pc/channel_manager.cc - voip/webrtc/pc/composite_rtp_transport.cc voip/webrtc/pc/data_channel_controller.cc voip/webrtc/pc/data_channel_utils.cc voip/webrtc/pc/dtls_srtp_transport.cc @@ -1021,7 +1071,6 @@ add_library(tgcalls STATIC voip/webrtc/pc/rtp_media_utils.cc voip/webrtc/pc/rtp_parameters_conversion.cc voip/webrtc/pc/rtp_receiver.cc - voip/webrtc/pc/rtp_data_channel.cc voip/webrtc/pc/rtp_sender.cc voip/webrtc/pc/rtp_transceiver.cc voip/webrtc/pc/rtp_transport.cc @@ -1051,6 +1100,7 @@ add_library(tgcalls STATIC voip/webrtc/pc/sdp_offer_answer.cc voip/webrtc/pc/transceiver_list.cc voip/webrtc/pc/usage_pattern.cc + voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc voip/webrtc/media/base/adapted_video_track_source.cc voip/webrtc/media/base/codec.cc voip/webrtc/media/base/h264_profile_level_id.cc @@ -1058,18 +1108,15 @@ add_library(tgcalls STATIC voip/webrtc/media/base/media_constants.cc voip/webrtc/media/base/media_engine.cc voip/webrtc/media/base/rid_description.cc - voip/webrtc/media/base/rtp_data_engine.cc voip/webrtc/media/base/rtp_utils.cc - voip/webrtc/media/base/sdp_fmtp_utils.cc + voip/webrtc/media/base/sdp_video_format_utils.cc voip/webrtc/media/base/stream_params.cc voip/webrtc/media/base/turn_utils.cc voip/webrtc/media/base/video_adapter.cc voip/webrtc/media/base/video_broadcaster.cc voip/webrtc/media/base/video_common.cc voip/webrtc/media/base/video_source_base.cc - voip/webrtc/media/base/vp9_profile.cc voip/webrtc/media/engine/adm_helpers.cc - voip/webrtc/media/engine/constants.cc voip/webrtc/media/engine/encoder_simulcast_proxy.cc voip/webrtc/media/engine/internal_decoder_factory.cc voip/webrtc/media/engine/internal_encoder_factory.cc @@ -1082,8 +1129,9 @@ add_library(tgcalls STATIC voip/webrtc/media/engine/webrtc_media_engine_defaults.cc voip/webrtc/media/engine/webrtc_video_engine.cc voip/webrtc/media/engine/webrtc_voice_engine.cc - voip/webrtc/media/sctp/noop.cc - voip/webrtc/media/sctp/sctp_transport.cc + voip/webrtc/media/sctp/dcsctp_transport.cc + voip/webrtc/media/sctp/sctp_transport_factory.cc + voip/webrtc/media/sctp/usrsctp_transport.cc voip/webrtc/system_wrappers/source/clock.cc voip/webrtc/system_wrappers/source/cpu_features.cc voip/webrtc/system_wrappers/source/cpu_info.cc @@ -1376,9 +1424,9 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/agc/loudness_histogram.cc voip/webrtc/modules/audio_processing/agc/utility.cc voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc + voip/webrtc/modules/audio_processing/agc2/cpu_features.cc voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc - voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc voip/webrtc/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc @@ -1394,8 +1442,8 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc voip/webrtc/modules/audio_processing/agc2/vector_float_frame.cc + voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc - voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc @@ -1403,6 +1451,8 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc + voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc + voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc voip/webrtc/modules/audio_processing/audio_buffer.cc voip/webrtc/modules/audio_processing/audio_processing_impl.cc voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc @@ -1459,6 +1509,9 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc voip/webrtc/modules/audio_processing/voice_detection.cc voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc + voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc + voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc + voip/webrtc/modules/congestion_controller/remb_throttler.cc voip/webrtc/modules/congestion_controller/pcc/bitrate_controller.cc voip/webrtc/modules/congestion_controller/pcc/monitor_interval.cc voip/webrtc/modules/congestion_controller/pcc/pcc_factory.cc @@ -1483,6 +1536,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc + voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc voip/webrtc/modules/pacing/bitrate_prober.cc voip/webrtc/modules/pacing/interval_budget.cc voip/webrtc/modules/pacing/paced_sender.cc @@ -1492,6 +1546,8 @@ add_library(tgcalls STATIC voip/webrtc/modules/pacing/task_queue_paced_sender.cc voip/webrtc/modules/rtp_rtcp/include/report_block_data.cc voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc + voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc + voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.cc voip/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_sender.cc @@ -1586,6 +1642,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc + voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc voip/webrtc/modules/utility/source/helpers_android.cc voip/webrtc/modules/utility/source/jvm_android.cc voip/webrtc/modules/utility/source/process_thread_impl.cc @@ -1636,6 +1693,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/timing.cc voip/webrtc/modules/video_coding/unique_timestamp_counter.cc voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc + voip/webrtc/modules/video_coding/utility/qp_parser.cc voip/webrtc/modules/video_coding/utility/frame_dropper.cc voip/webrtc/modules/video_coding/utility/framerate_controller.cc voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc @@ -1650,8 +1708,8 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/video_coding_impl.cc voip/webrtc/modules/video_coding/video_receiver.cc voip/webrtc/modules/video_coding/video_receiver2.cc + voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.cc voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc - voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc voip/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc @@ -1659,21 +1717,21 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc - voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc + voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc + voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc - voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc + voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc + voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc + voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.cc + voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc + voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc + voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc + voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc voip/webrtc/modules/video_processing/util/denoiser_filter.cc voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc voip/webrtc/modules/video_processing/util/noise_estimation.cc @@ -1682,6 +1740,7 @@ add_library(tgcalls STATIC voip/webrtc/call/adaptation/resource_adaptation_processor_interface.cc voip/webrtc/call/adaptation/video_source_restrictions.cc voip/webrtc/call/audio_receive_stream.cc + voip/webrtc/call/version.cc voip/webrtc/call/audio_send_stream.cc voip/webrtc/call/audio_state.cc voip/webrtc/call/bitrate_allocator.cc @@ -1792,7 +1851,6 @@ add_library(tgcalls STATIC voip/webrtc/p2p/base/ice_controller_interface.cc voip/webrtc/p2p/base/ice_credentials_iterator.cc voip/webrtc/p2p/base/ice_transport_internal.cc - voip/webrtc/p2p/base/mdns_message.cc voip/webrtc/p2p/base/p2p_constants.cc voip/webrtc/p2p/base/p2p_transport_channel.cc voip/webrtc/p2p/base/packet_transport_internal.cc @@ -1857,6 +1915,7 @@ add_library(tgcalls STATIC voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc voip/webrtc/video/adaptation/balanced_constraint.cc voip/webrtc/video/adaptation/bitrate_constraint.cc + voip/webrtc/video/adaptation/pixel_limit_resource.cc voip/webrtc/video/buffered_frame_decryptor.cc voip/webrtc/video/call_stats.cc voip/webrtc/video/encoder_bitrate_adjuster.cc @@ -1916,6 +1975,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc + voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc voip/webrtc/sdk/media_constraints.cc voip/webrtc/stats/rtc_stats_report.cc voip/webrtc/stats/rtc_stats.cc @@ -1939,12 +1999,13 @@ target_compile_options(tgcalls PUBLIC set_target_properties(tgcalls PROPERTIES ANDROID_ARM_MODE arm) target_compile_definitions(tgcalls PUBLIC - WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) + RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) target_include_directories(tgcalls PUBLIC ./ voip boringssl/include voip/tgcalls + voip/rnnoise/include voip/webrtc opus/include opus/silk @@ -2043,6 +2104,8 @@ add_library(voipandroid STATIC voip/webrtc/sdk/android/native_api/video/wrapper.cc voip/webrtc/sdk/android/native_api/network_monitor/network_monitor.cc voip/webrtc/sdk/android/src/jni/android_histogram.cc + voip/webrtc/sdk/android/src/jni/av1_codec.cc + voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc voip/webrtc/sdk/android/src/jni/android_metrics.cc voip/webrtc/sdk/android/src/jni/android_network_monitor.cc voip/webrtc/sdk/android/src/jni/android_video_track_source.cc @@ -2116,7 +2179,7 @@ target_compile_options(voipandroid PUBLIC set_target_properties(voipandroid PROPERTIES ANDROID_ARM_MODE arm) target_compile_definitions(voipandroid PUBLIC - WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) + RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) target_include_directories(voipandroid PUBLIC ./ voip diff --git a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp index b09df13f0..aebc7b0dd 100644 --- a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp +++ b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp @@ -13,6 +13,8 @@ #include #include +#include +#include #include "pc/video_track.h" #include "legacy/InstanceImplLegacy.h" @@ -23,10 +25,6 @@ #include "libtgvoip/os/android/JNIUtilities.h" #include "tgcalls/VideoCaptureInterface.h" -#include "rapidjson/document.h" -#include "rapidjson/stringbuffer.h" -#include "rapidjson/writer.h" - using namespace tgcalls; const auto RegisterTag = Register(); @@ -39,13 +37,48 @@ jclass FinalStateClass; jclass NativeInstanceClass; jmethodID FinalStateInitMethod; +class RequestMediaChannelDescriptionTaskJava : public RequestMediaChannelDescriptionTask { +public: + RequestMediaChannelDescriptionTaskJava(std::shared_ptr platformContext, + std::function &&)> callback) : + _platformContext(std::move(platformContext)), + _callback(std::move(callback)) { + } + + void call(JNIEnv *env, jintArray audioSsrcs) { + std::vector descriptions; + + jint *ssrcsArr = env->GetIntArrayElements(audioSsrcs, nullptr); + jsize size = env->GetArrayLength(audioSsrcs); + for (int i = 0; i < size; i++) { + MediaChannelDescription description; + description.type = MediaChannelDescription::Type::Audio; + description.audioSsrc = ssrcsArr[i]; + descriptions.push_back(description); + } + env->ReleaseIntArrayElements(audioSsrcs, ssrcsArr, JNI_ABORT); + _callback(std::move<>(descriptions)); + } + +private: + void cancel() override { + /*tgvoip::jni::DoWithJNI([&](JNIEnv *env) { + jobject globalRef = ((AndroidContext *) _platformContext.get())->getJavaInstance(); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onCancelRequestMediaChannelDescription", "(J)V"), _timestamp); + });*/ + } + + std::shared_ptr _platformContext; + std::function &&)> _callback; +}; + class BroadcastPartTaskJava : public BroadcastPartTask { public: BroadcastPartTaskJava(std::shared_ptr platformContext, std::function callback, int64_t timestamp) : - _platformContext(platformContext), - _callback(callback), + _platformContext(std::move(platformContext)), + _callback(std::move(callback)), _timestamp(timestamp) { } @@ -71,7 +104,6 @@ private: }); } -private: std::shared_ptr _platformContext; std::function _callback; int64_t _timestamp; @@ -113,9 +145,24 @@ public: return (jbyteArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[B")); } + jintArray getIntArrayField(const char *name) { + return (jintArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[I")); + } + jstring getStringField(const char *name) { return (jstring) env->GetObjectField(obj, env->GetFieldID(clazz, name, "Ljava/lang/String;")); } + + jobjectArray getObjectArrayField(const char *name) { + return (jobjectArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[Ljava/lang/Object;")); + } +}; + +struct SetVideoSink { + std::shared_ptr> sink; + VideoChannelDescription::Quality quality; + std::string endpointId; + std::vector ssrcGroups; }; struct InstanceHolder { @@ -123,6 +170,7 @@ struct InstanceHolder { std::unique_ptr groupNativeInstance; std::shared_ptr _videoCapture; std::shared_ptr _platformContext; + std::map remoteGroupSinks; }; jlong getInstanceHolderId(JNIEnv *env, jobject obj) { @@ -262,7 +310,7 @@ jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) { return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested); } -jobject asJavaFingerprint(JNIEnv *env, std::string hash, std::string setup, std::string fingerprint) { +jobject asJavaFingerprint(JNIEnv *env, const std::string& hash, const std::string& setup, const std::string& fingerprint) { jstring hashStr = env->NewStringUTF(hash.c_str()); jstring setupStr = env->NewStringUTF(setup.c_str()); jstring fingerprintStr = env->NewStringUTF(fingerprint.c_str()); @@ -292,16 +340,24 @@ void initWebRTC(JNIEnv *env) { FinalStateInitMethod = env->GetMethodID(FinalStateClass, "", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); } -JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jstring logFilePath, jboolean highQuality) { +JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jstring logFilePath, jboolean highQuality, jlong videoCapturer, jboolean screencast, jboolean noiseSupression) { initWebRTC(env); - std::shared_ptr platformContext = std::make_shared(env, instanceObj); + std::shared_ptr videoCapture = videoCapturer ? std::shared_ptr(reinterpret_cast(videoCapturer)) : nullptr; + + std::shared_ptr platformContext; + if (videoCapture) { + platformContext = videoCapture->getPlatformContext(); + ((AndroidContext *) platformContext.get())->setJavaInstance(env, instanceObj); + } else { + platformContext = std::make_shared(env, instanceObj, screencast); + } GroupInstanceDescriptor descriptor = { .threads = StaticThreads::getThreads(), .config = { - .need_log = false, - //.logPath = tgvoip::jni::JavaStringToStdString(env, logFilePath), + .need_log = true, + .logPath = {tgvoip::jni::JavaStringToStdString(env, logFilePath)}, }, .networkStateUpdated = [platformContext](GroupNetworkState state) { tgvoip::jni::DoWithJNI([platformContext, state](JNIEnv *env) { @@ -321,8 +377,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou jboolean boolFill[size]; for (int a = 0; a < size; a++) { intFill[a] = update.updates[a].ssrc; - floatFill[a] = update.updates[a].value.level; - boolFill[a] = update.updates[a].value.voice; + floatFill[a] = update.updates[a].value.isMuted ? 0 : update.updates[a].value.level; + boolFill[a] = !update.updates[a].value.isMuted && update.updates[a].value.voice; } env->SetIntArrayRegion(intArray, 0, size, intFill); env->SetFloatArrayRegion(floatArray, 0, size, floatFill); @@ -335,22 +391,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou env->DeleteLocalRef(boolArray); }); }, - .participantDescriptionsRequired = [platformContext](std::vector const &update) { - tgvoip::jni::DoWithJNI([platformContext, update](JNIEnv *env) { - unsigned int size = update.size(); - jintArray intArray = env->NewIntArray(size); - - jint intFill[size]; - for (int a = 0; a < size; a++) { - intFill[a] = update[a]; - } - env->SetIntArrayRegion(intArray, 0, size, intFill); - - jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance(); - env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onParticipantDescriptionsRequired", "([I)V"), intArray); - env->DeleteLocalRef(intArray); - }); - }, + .videoCapture = videoCapture, .requestBroadcastPart = [](std::shared_ptr platformContext, int64_t timestamp, int64_t duration, std::function callback) -> std::shared_ptr { std::shared_ptr task = std::make_shared(platformContext, callback, timestamp); ((AndroidContext *) platformContext.get())->streamTask = task; @@ -360,61 +401,44 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou }); return task; }, + .videoContentType = screencast ? VideoContentType::Screencast : VideoContentType::Generic, + .initialEnableNoiseSuppression = (bool) noiseSupression, + .requestMediaChannelDescriptions = [platformContext](std::vector const &ssrcs, std::function &&)> callback) -> std::shared_ptr { + std::shared_ptr task = std::make_shared(platformContext, callback); + ((AndroidContext *) platformContext.get())->descriptionTasks.push_back(task); + tgvoip::jni::DoWithJNI([platformContext, ssrcs, task](JNIEnv *env) { + unsigned int size = ssrcs.size(); + jintArray intArray = env->NewIntArray(size); + + jint intFill[size]; + for (int a = 0; a < size; a++) { + intFill[a] = ssrcs[a]; + } + env->SetIntArrayRegion(intArray, 0, size, intFill); + + jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance(); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onParticipantDescriptionsRequired", "(J[I)V"), (jlong) task.get(), intArray); + env->DeleteLocalRef(intArray); + }); + return task; + }, .platformContext = platformContext }; auto *holder = new InstanceHolder; holder->groupNativeInstance = std::make_unique(std::move(descriptor)); holder->_platformContext = platformContext; + holder->_videoCapture = videoCapture; return reinterpret_cast(holder); } -JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setJoinResponsePayload(JNIEnv *env, jobject obj, jstring ufrag, jstring pwd, jobjectArray fingerprints, jobjectArray candidates) { +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setJoinResponsePayload(JNIEnv *env, jobject obj, jstring payload) { InstanceHolder *instance = getInstanceHolder(env, obj); if (instance->groupNativeInstance == nullptr) { return; } - std::vector fingerprintsArray; - std::vector candidatesArray; instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeRtc, true); - jsize size = env->GetArrayLength(fingerprints); - for (int i = 0; i < size; i++) { - JavaObject fingerprintObject(env, env->GetObjectArrayElement(fingerprints, i)); - fingerprintsArray.push_back( - { - .hash = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("hash")), - .setup = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("setup")), - .fingerprint = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("fingerprint")) - }); - } - size = env->GetArrayLength(candidates); - for (int i = 0; i < size; i++) { - JavaObject candidateObject(env, env->GetObjectArrayElement(candidates, i)); - candidatesArray.push_back( - { - .port = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("port")), - .protocol = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("protocol")), - .network = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("network")), - .generation = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("generation")), - .id = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("id")), - .component = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("component")), - .foundation = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("foundation")), - .priority = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("priority")), - .ip = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("ip")), - .type = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("type")), - .tcpType = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("tcpType")), - .relAddr = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("relAddr")), - .relPort = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("relPort")), - }); - } - std::vector participants; - instance->groupNativeInstance->setJoinResponsePayload( - { - .ufrag = tgvoip::jni::JavaStringToStdString(env, ufrag), - .pwd = tgvoip::jni::JavaStringToStdString(env, pwd), - .fingerprints = fingerprintsArray, - .candidates = candidatesArray, - }, std::move(participants)); + instance->groupNativeInstance->setJoinResponsePayload(tgvoip::jni::JavaStringToStdString(env, payload)); } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_prepareForStream(JNIEnv *env, jobject obj) { @@ -425,86 +449,108 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_prepareFo instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeBroadcast, true); } -JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_resetGroupInstance(JNIEnv *env, jobject obj, jboolean disconnect) { +void onEmitJoinPayload(const std::shared_ptr& platformContext, const GroupJoinPayload& payload) { + JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); + jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance(); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onEmitJoinPayload", "(Ljava/lang/String;I)V"), env->NewStringUTF(payload.json.c_str()), (jint) payload.audioSsrc); +} + +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_resetGroupInstance(JNIEnv *env, jobject obj, jboolean set, jboolean disconnect) { InstanceHolder *instance = getInstanceHolder(env, obj); if (instance->groupNativeInstance == nullptr) { return; } - instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeNone, !disconnect); + if (set) { + instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeNone, !disconnect); + } std::shared_ptr platformContext = instance->_platformContext; instance->groupNativeInstance->emitJoinPayload([platformContext](const GroupJoinPayload& payload) { - JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - jobjectArray array = env->NewObjectArray(payload.fingerprints.size(), FingerprintClass, 0); - for (int a = 0; a < payload.fingerprints.size(); a++) { - env->SetObjectArrayElement(array, a, asJavaFingerprint(env, payload.fingerprints[a].hash, payload.fingerprints[a].setup, payload.fingerprints[a].fingerprint)); - } - jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance(); - env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onEmitJoinPayload", "(Ljava/lang/String;Ljava/lang/String;[Lorg/telegram/messenger/voip/Instance$Fingerprint;I)V"), env->NewStringUTF(payload.ufrag.c_str()), env->NewStringUTF(payload.pwd.c_str()), array, (jint) payload.ssrc); + onEmitJoinPayload(platformContext, payload); }); } -JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_addParticipants(JNIEnv *env, jobject obj, jintArray ssrcs, jobjectArray array) { +void broadcastRequestedSinks(InstanceHolder *instance) { + std::vector descriptions; + for (auto & remoteGroupSink : instance->remoteGroupSinks) { + VideoChannelDescription description; + description.endpointId = remoteGroupSink.second.endpointId; + description.ssrcGroups = remoteGroupSink.second.ssrcGroups; + description.maxQuality = remoteGroupSink.second.quality; + descriptions.push_back(std::move(description)); + } + instance->groupNativeInstance->setRequestedVideoChannels(std::move(descriptions)); +} + +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setNoiseSuppressionEnabled(JNIEnv *env, jobject obj, jboolean enabled) { InstanceHolder *instance = getInstanceHolder(env, obj); if (instance->groupNativeInstance == nullptr) { return; } + instance->groupNativeInstance->setIsNoiseSuppressionEnabled(enabled); +} - rapidjson::Document d; - jint *ssrcsArr = env->GetIntArrayElements(ssrcs, 0); - - jsize size = env->GetArrayLength(array); - std::vector participants; - for (int i = 0; i < size; i++) { - GroupParticipantDescription participantDescription; - participantDescription.audioSsrc = ssrcsArr[i]; - - jstring str = (jstring) env->GetObjectArrayElement(array, i); - if (str != nullptr) { - std::string json = tgvoip::jni::JavaStringToStdString(env, str); - d.Parse(json); - participantDescription.endpointId = d["endpoint"].GetString(); - - for (const auto &group : d["ssrc-groups"].GetArray()) { - tgcalls::GroupJoinPayloadVideoSourceGroup groupDesc; - groupDesc.semantics = group["semantics"].GetString(); - for (const auto &source : group["sources"].GetArray()) { - groupDesc.ssrcs.push_back(source.GetUint()); - } - participantDescription.videoSourceGroups.push_back(std::move(groupDesc)); +JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_addIncomingVideoOutput(JNIEnv *env, jobject obj, jint quality, jstring endpointId, jobjectArray ssrcGroups, jobject remoteSink) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return 0; + } + SetVideoSink sink; + std::string endpointIdStr = tgvoip::jni::JavaStringToStdString(env, endpointId); + std::shared_ptr> ptr = webrtc::JavaToNativeVideoSink(env, remoteSink); + sink.sink = ptr; + sink.endpointId = endpointIdStr; + if (ssrcGroups) { + for (int i = 0, size = env->GetArrayLength(ssrcGroups); i < size; i++) { + JavaObject javaObject(env, env->GetObjectArrayElement(ssrcGroups, i)); + MediaSsrcGroup ssrcGroup; + ssrcGroup.semantics = tgvoip::jni::JavaStringToStdString(env, javaObject.getStringField("semantics")); + jintArray ssrcsArray = javaObject.getIntArrayField("ssrcs"); + jint *elements = env->GetIntArrayElements(ssrcsArray, nullptr); + for (int j = 0, size2 = env->GetArrayLength(ssrcsArray); j < size2; j++) { + ssrcGroup.ssrcs.push_back(elements[j]); } + env->ReleaseIntArrayElements(ssrcsArray, elements, JNI_ABORT); + sink.ssrcGroups.push_back(std::move(ssrcGroup)); + } + } + sink.quality = (VideoChannelDescription::Quality) quality; + instance->remoteGroupSinks[endpointIdStr] = std::move(sink); + broadcastRequestedSinks(instance); + instance->groupNativeInstance->addIncomingVideoOutput(endpointIdStr, ptr); + return reinterpret_cast(ptr.get()); +} - for (const auto &extDict : d["rtp-hdrexts"].GetArray()) { - participantDescription.videoExtensionMap.emplace_back(extDict["id"].GetUint(), extDict["uri"].GetString()); - } - - for (const auto &payload : d["payload-types"].GetArray()) { - tgcalls::GroupJoinPayloadVideoPayloadType parsedPayload; - parsedPayload.id = payload["id"].GetUint(); - parsedPayload.clockrate = payload["clockrate"].GetUint(); - parsedPayload.channels = payload["channels"].GetUint(); - parsedPayload.name = payload["name"].GetString(); - - for (const auto &fb : payload["rtcp-fbs"].GetArray()) { - tgcalls::GroupJoinPayloadVideoPayloadFeedbackType parsedFeedback; - parsedFeedback.type = fb["type"].GetString(); - if (fb.HasMember("subtype")) { - parsedFeedback.subtype = fb["subtype"].GetString(); - } - parsedPayload.feedbackTypes.push_back(std::move(parsedFeedback)); - } - - for (const auto &fb : payload["parameters"].GetObject()) { - parsedPayload.parameters.emplace_back(fb.name.GetString(), fb.value.GetString()); - } - - participantDescription.videoPayloadTypes.push_back(std::move(parsedPayload)); +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_removeIncomingVideoOutput(JNIEnv *env, jobject obj, jlong nativeRemoteSink) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return; + } + if (nativeRemoteSink == 0) { + instance->remoteGroupSinks.clear(); + } else { + for (auto iter = instance->remoteGroupSinks.begin(); iter != instance->remoteGroupSinks.end(); iter++) { + if (reinterpret_cast(iter->second.sink.get()) == nativeRemoteSink) { + instance->remoteGroupSinks.erase(iter); + break; } } - participants.push_back(std::move(participantDescription)); } - env->ReleaseIntArrayElements(ssrcs, ssrcsArr, JNI_ABORT); - instance->groupNativeInstance->addParticipants(std::move(participants)); + broadcastRequestedSinks(instance); +} + +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setVideoEndpointQuality(JNIEnv *env, jobject obj, jstring endpointId, jint quality) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return; + } + broadcastRequestedSinks(instance); + auto sink = instance->remoteGroupSinks.find(tgvoip::jni::JavaStringToStdString(env, endpointId)); + if (sink == instance->remoteGroupSinks.end()) { + return; + } + sink->second.quality = (VideoChannelDescription::Quality) quality; + broadcastRequestedSinks(instance); } JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) { @@ -527,7 +573,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati platformContext = videoCapture->getPlatformContext(); ((AndroidContext *) platformContext.get())->setJavaInstance(env, instanceObj); } else { - platformContext = std::make_shared(env, instanceObj); + platformContext = std::make_shared(env, instanceObj, false); } Descriptor descriptor = { @@ -541,8 +587,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati .enableNS = configObject.getBooleanField("enableNs") == JNI_TRUE, .enableAGC = configObject.getBooleanField("enableAgc") == JNI_TRUE, .enableVolumeControl = true, - .logPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath")), - .statsLogPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("statsLogPath")), + .logPath = {tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath"))}, + .statsLogPath = {tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("statsLogPath"))}, .maxApiLayer = configObject.getIntField("maxApiLayer"), .enableHighBitrateVideo = true, .preferredVideoCodecs = {cricket::kVp9CodecName} @@ -625,7 +671,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati if (!env->IsSameObject(proxyClass, nullptr)) { JavaObject proxyObject(env, proxyClass); - descriptor.proxy = std::unique_ptr(new Proxy); + descriptor.proxy = std::make_unique(); descriptor.proxy->host = tgvoip::jni::JavaStringToStdString(env, proxyObject.getStringField("host")); descriptor.proxy->port = static_cast(proxyObject.getIntField("port")); descriptor.proxy->login = tgvoip::jni::JavaStringToStdString(env, proxyObject.getStringField("login")); @@ -742,7 +788,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNativ if (instance->nativeInstance == nullptr) { return; } - instance->nativeInstance->stop([instance](FinalState finalState) { + instance->nativeInstance->stop([instance](const FinalState& finalState) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); jobject globalRef = ((AndroidContext *) instance->_platformContext.get())->getJavaInstance(); const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, globalRef).getStringField("persistentStateFilePath")); @@ -767,12 +813,12 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onStreamP if (instance->groupNativeInstance == nullptr) { return; } - AndroidContext *context = (AndroidContext *) instance->_platformContext.get(); + auto context = (AndroidContext *) instance->_platformContext.get(); std::shared_ptr streamTask = context->streamTask; - BroadcastPartTaskJava *task = (BroadcastPartTaskJava *) streamTask.get(); + auto task = (BroadcastPartTaskJava *) streamTask.get(); if (task != nullptr) { if (byteBuffer != nullptr) { - uint8_t *buf = (uint8_t *) env->GetDirectBufferAddress(byteBuffer); + auto buf = (uint8_t *) env->GetDirectBufferAddress(byteBuffer); task->call(ts, responseTs, BroadcastPart::Status::Success, buf, size); } else { task->call(ts, responseTs, size == 0 ? BroadcastPart::Status::NotReady : BroadcastPart::Status::ResyncNeeded, nullptr, 0); @@ -780,45 +826,79 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onStreamP } } -JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink, jboolean front) { +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onMediaDescriptionAvailable(JNIEnv *env, jobject obj, jlong taskPtr, jintArray ssrcs) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->groupNativeInstance == nullptr) { + return; + } + auto task = reinterpret_cast(taskPtr); + task->call(env, ssrcs); + auto context = (AndroidContext *) instance->_platformContext.get(); + for (auto iter = context->descriptionTasks.begin(); iter != context->descriptionTasks.end(); iter++) { + if (reinterpret_cast(iter->get()) == taskPtr) { + context->descriptionTasks.erase(iter); + break; + } + } +} + +JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink, jint type) { initWebRTC(env); - std::unique_ptr capture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), front ? "front" : "back", std::make_shared(env, nullptr)); + std::unique_ptr capture; + if (type == 0 || type == 1) { + capture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), type == 1 ? "front" : "back", std::make_shared(env, nullptr, false)); + } else { + capture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), "screen", std::make_shared(env, nullptr, true)); + } capture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink)); capture->setState(VideoState::Active); return reinterpret_cast(capture.release()); } +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_activateVideoCapturer(JNIEnv *env, jobject obj, jlong videoCapturer) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->nativeInstance) { + instance->nativeInstance->setVideoCapture(nullptr); + } else if (instance->groupNativeInstance) { + instance->groupNativeInstance->setVideoSource(nullptr); + } + auto capturer = reinterpret_cast(videoCapturer); + capturer->setState(VideoState::Active); +} + JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_destroyVideoCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer) { - VideoCaptureInterface *capturer = reinterpret_cast(videoCapturer); + auto capturer = reinterpret_cast(videoCapturer); delete capturer; } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCameraCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer, jboolean front) { - VideoCaptureInterface *capturer = reinterpret_cast(videoCapturer); + auto capturer = reinterpret_cast(videoCapturer); capturer->switchToDevice(front ? "front" : "back"); } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setVideoStateCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer, jint videoState) { - VideoCaptureInterface *capturer = reinterpret_cast(videoCapturer); + auto capturer = reinterpret_cast(videoCapturer); capturer->setState(static_cast(videoState)); } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCamera(JNIEnv *env, jobject obj, jboolean front) { InstanceHolder *instance = getInstanceHolder(env, obj); - if (instance->nativeInstance == nullptr) { - return; - } if (instance->_videoCapture == nullptr) { return; } instance->_videoCapture->switchToDevice(front ? "front" : "back"); } +JNIEXPORT jboolean JNICALL Java_org_telegram_messenger_voip_NativeInstance_hasVideoCapturer(JNIEnv *env, jobject obj) { + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->_videoCapture == nullptr) { + return JNI_FALSE; + } + return JNI_TRUE; +} + JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNIEnv *env, jobject obj, jint state) { InstanceHolder *instance = getInstanceHolder(env, obj); - if (instance->nativeInstance == nullptr) { - return; - } if (instance->_videoCapture == nullptr) { return; } @@ -827,16 +907,33 @@ JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNI JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutgoingVideo(JNIEnv *env, jobject obj, jobject localSink, jboolean front) { InstanceHolder *instance = getInstanceHolder(env, obj); - if (instance->nativeInstance == nullptr) { - return; - } if (instance->_videoCapture) { return; } instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), front ? "front" : "back", instance->_platformContext); instance->_videoCapture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink)); instance->_videoCapture->setState(VideoState::Active); - instance->nativeInstance->setVideoCapture(instance->_videoCapture); + if (instance->nativeInstance) { + instance->nativeInstance->setVideoCapture(instance->_videoCapture); + } else if (instance->groupNativeInstance) { + instance->groupNativeInstance->setVideoCapture(instance->_videoCapture); + } +} + +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutgoingVideoCreated(JNIEnv *env, jobject obj, jlong videoCapturer) { + if (videoCapturer == 0) { + return; + } + InstanceHolder *instance = getInstanceHolder(env, obj); + if (instance->_videoCapture == nullptr) { + instance->_videoCapture = std::shared_ptr(reinterpret_cast(videoCapturer)); + } + instance->_videoCapture->setState(VideoState::Active); + if (instance->nativeInstance) { + instance->nativeInstance->setVideoCapture(instance->_videoCapture); + } else if (instance->groupNativeInstance) { + instance->groupNativeInstance->setVideoCapture(instance->_videoCapture); + } } JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignalingDataReceive(JNIEnv *env, jobject obj, jbyteArray value) { @@ -849,7 +946,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignali const size_t size = env->GetArrayLength(value); auto array = std::vector(size); memcpy(&array[0], valueBytes, size); - instance->nativeInstance->receiveSignalingData(std::move(array)); + instance->nativeInstance->receiveSignalingData(array); env->ReleaseByteArrayElements(value, (jbyte *) valueBytes, JNI_ABORT); } diff --git a/TMessagesProj/jni/voip/rnnoise/AUTHORS b/TMessagesProj/jni/voip/rnnoise/AUTHORS new file mode 100644 index 000000000..7cd8b9be6 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/AUTHORS @@ -0,0 +1 @@ +Jean-Marc Valin diff --git a/TMessagesProj/jni/voip/rnnoise/COPYING b/TMessagesProj/jni/voip/rnnoise/COPYING new file mode 100644 index 000000000..01ea4b1a2 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/COPYING @@ -0,0 +1,31 @@ +Copyright (c) 2017, Mozilla +Copyright (c) 2007-2017, Jean-Marc Valin +Copyright (c) 2005-2017, Xiph.Org Foundation +Copyright (c) 2003-2004, Mark Borgerding + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +- Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +- Neither the name of the Xiph.Org Foundation nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION +OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/TMessagesProj/jni/voip/rnnoise/README b/TMessagesProj/jni/voip/rnnoise/README new file mode 100644 index 000000000..4158a9be0 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/README @@ -0,0 +1,21 @@ +RNNoise is a noise suppression library based on a recurrent neural network. + +To compile, just type: +% ./autogen.sh +% ./configure +% make + +Optionally: +% make install + +While it is meant to be used as a library, a simple command-line tool is +provided as an example. It operates on RAW 16-bit (machine endian) mono +PCM files sampled at 48 kHz. It can be used as: + +./examples/rnnoise_demo + +The output is also a 16-bit raw PCM file. + +The latest version of the source is available from +https://gitlab.xiph.org/xiph/rnnoise . The github repository +is a convenience copy. diff --git a/TMessagesProj/jni/voip/rnnoise/include/rnnoise.h b/TMessagesProj/jni/voip/rnnoise/include/rnnoise.h new file mode 100644 index 000000000..c4215d96d --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/include/rnnoise.h @@ -0,0 +1,114 @@ +/* Copyright (c) 2018 Gregor Richards + * Copyright (c) 2017 Mozilla */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifndef RNNOISE_H +#define RNNOISE_H 1 + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef RNNOISE_EXPORT +# if defined(WIN32) +# if defined(RNNOISE_BUILD) && defined(DLL_EXPORT) +# define RNNOISE_EXPORT __declspec(dllexport) +# else +# define RNNOISE_EXPORT +# endif +# elif defined(__GNUC__) && defined(RNNOISE_BUILD) +# define RNNOISE_EXPORT __attribute__ ((visibility ("default"))) +# else +# define RNNOISE_EXPORT +# endif +#endif + +typedef struct DenoiseState DenoiseState; +typedef struct RNNModel RNNModel; + +/** + * Return the size of DenoiseState + */ +RNNOISE_EXPORT int rnnoise_get_size(); + +/** + * Return the number of samples processed by rnnoise_process_frame at a time + */ +RNNOISE_EXPORT int rnnoise_get_frame_size(); + +/** + * Initializes a pre-allocated DenoiseState + * + * If model is NULL the default model is used. + * + * See: rnnoise_create() and rnnoise_model_from_file() + */ +RNNOISE_EXPORT int rnnoise_init(DenoiseState *st, RNNModel *model); + +/** + * Allocate and initialize a DenoiseState + * + * If model is NULL the default model is used. + * + * The returned pointer MUST be freed with rnnoise_destroy(). + */ +RNNOISE_EXPORT DenoiseState *rnnoise_create(RNNModel *model); + +/** + * Free a DenoiseState produced by rnnoise_create. + * + * The optional custom model must be freed by rnnoise_model_free() after. + */ +RNNOISE_EXPORT void rnnoise_destroy(DenoiseState *st); + +/** + * Denoise a frame of samples + * + * in and out must be at least rnnoise_get_frame_size() large. + */ +RNNOISE_EXPORT float rnnoise_process_frame(DenoiseState *st, float *out, const float *in); + +/** + * Load a model from a file + * + * It must be deallocated with rnnoise_model_free() + */ +RNNOISE_EXPORT RNNModel *rnnoise_model_from_file(FILE *f); + +/** + * Free a custom model + * + * It must be called after all the DenoiseStates referring to it are freed. + */ +RNNOISE_EXPORT void rnnoise_model_free(RNNModel *model); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/TMessagesProj/jni/voip/rnnoise/src/_kiss_fft_guts.h b/TMessagesProj/jni/voip/rnnoise/src/_kiss_fft_guts.h new file mode 100644 index 000000000..17392b3e9 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/_kiss_fft_guts.h @@ -0,0 +1,182 @@ +/*Copyright (c) 2003-2004, Mark Borgerding + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE.*/ + +#ifndef KISS_FFT_GUTS_H +#define KISS_FFT_GUTS_H + +#define MIN(a,b) ((a)<(b) ? (a):(b)) +#define MAX(a,b) ((a)>(b) ? (a):(b)) + +/* kiss_fft.h + defines kiss_fft_scalar as either short or a float type + and defines + typedef struct { kiss_fft_scalar r; kiss_fft_scalar i; }kiss_fft_cpx; */ +#include "kiss_fft.h" + +/* + Explanation of macros dealing with complex math: + + C_MUL(m,a,b) : m = a*b + C_FIXDIV( c , div ) : if a fixed point impl., c /= div. noop otherwise + C_SUB( res, a,b) : res = a - b + C_SUBFROM( res , a) : res -= a + C_ADDTO( res , a) : res += a + * */ +#ifdef FIXED_POINT +#include "arch.h" + + +#define SAMP_MAX 2147483647 +#define TWID_MAX 32767 +#define TRIG_UPSCALE 1 + +#define SAMP_MIN -SAMP_MAX + + +# define S_MUL(a,b) MULT16_32_Q15(b, a) + +# define C_MUL(m,a,b) \ + do{ (m).r = SUB32_ovflw(S_MUL((a).r,(b).r) , S_MUL((a).i,(b).i)); \ + (m).i = ADD32_ovflw(S_MUL((a).r,(b).i) , S_MUL((a).i,(b).r)); }while(0) + +# define C_MULC(m,a,b) \ + do{ (m).r = ADD32_ovflw(S_MUL((a).r,(b).r) , S_MUL((a).i,(b).i)); \ + (m).i = SUB32_ovflw(S_MUL((a).i,(b).r) , S_MUL((a).r,(b).i)); }while(0) + +# define C_MULBYSCALAR( c, s ) \ + do{ (c).r = S_MUL( (c).r , s ) ;\ + (c).i = S_MUL( (c).i , s ) ; }while(0) + +# define DIVSCALAR(x,k) \ + (x) = S_MUL( x, (TWID_MAX-((k)>>1))/(k)+1 ) + +# define C_FIXDIV(c,div) \ + do { DIVSCALAR( (c).r , div); \ + DIVSCALAR( (c).i , div); }while (0) + +#define C_ADD( res, a,b)\ + do {(res).r=ADD32_ovflw((a).r,(b).r); (res).i=ADD32_ovflw((a).i,(b).i); \ + }while(0) +#define C_SUB( res, a,b)\ + do {(res).r=SUB32_ovflw((a).r,(b).r); (res).i=SUB32_ovflw((a).i,(b).i); \ + }while(0) +#define C_ADDTO( res , a)\ + do {(res).r = ADD32_ovflw((res).r, (a).r); (res).i = ADD32_ovflw((res).i,(a).i);\ + }while(0) + +#define C_SUBFROM( res , a)\ + do {(res).r = ADD32_ovflw((res).r,(a).r); (res).i = SUB32_ovflw((res).i,(a).i); \ + }while(0) + +#if defined(OPUS_ARM_INLINE_ASM) +#include "arm/kiss_fft_armv4.h" +#endif + +#if defined(OPUS_ARM_INLINE_EDSP) +#include "arm/kiss_fft_armv5e.h" +#endif +#if defined(MIPSr1_ASM) +#include "mips/kiss_fft_mipsr1.h" +#endif + +#else /* not FIXED_POINT*/ + +# define S_MUL(a,b) ( (a)*(b) ) +#define C_MUL(m,a,b) \ + do{ (m).r = (a).r*(b).r - (a).i*(b).i;\ + (m).i = (a).r*(b).i + (a).i*(b).r; }while(0) +#define C_MULC(m,a,b) \ + do{ (m).r = (a).r*(b).r + (a).i*(b).i;\ + (m).i = (a).i*(b).r - (a).r*(b).i; }while(0) + +#define C_MUL4(m,a,b) C_MUL(m,a,b) + +# define C_FIXDIV(c,div) /* NOOP */ +# define C_MULBYSCALAR( c, s ) \ + do{ (c).r *= (s);\ + (c).i *= (s); }while(0) +#endif + +#ifndef CHECK_OVERFLOW_OP +# define CHECK_OVERFLOW_OP(a,op,b) /* noop */ +#endif + +#ifndef C_ADD +#define C_ADD( res, a,b)\ + do { \ + CHECK_OVERFLOW_OP((a).r,+,(b).r)\ + CHECK_OVERFLOW_OP((a).i,+,(b).i)\ + (res).r=(a).r+(b).r; (res).i=(a).i+(b).i; \ + }while(0) +#define C_SUB( res, a,b)\ + do { \ + CHECK_OVERFLOW_OP((a).r,-,(b).r)\ + CHECK_OVERFLOW_OP((a).i,-,(b).i)\ + (res).r=(a).r-(b).r; (res).i=(a).i-(b).i; \ + }while(0) +#define C_ADDTO( res , a)\ + do { \ + CHECK_OVERFLOW_OP((res).r,+,(a).r)\ + CHECK_OVERFLOW_OP((res).i,+,(a).i)\ + (res).r += (a).r; (res).i += (a).i;\ + }while(0) + +#define C_SUBFROM( res , a)\ + do {\ + CHECK_OVERFLOW_OP((res).r,-,(a).r)\ + CHECK_OVERFLOW_OP((res).i,-,(a).i)\ + (res).r -= (a).r; (res).i -= (a).i; \ + }while(0) +#endif /* C_ADD defined */ + +#ifdef FIXED_POINT +/*# define KISS_FFT_COS(phase) TRIG_UPSCALE*floor(MIN(32767,MAX(-32767,.5+32768 * cos (phase)))) +# define KISS_FFT_SIN(phase) TRIG_UPSCALE*floor(MIN(32767,MAX(-32767,.5+32768 * sin (phase))))*/ +# define KISS_FFT_COS(phase) floor(.5+TWID_MAX*cos (phase)) +# define KISS_FFT_SIN(phase) floor(.5+TWID_MAX*sin (phase)) +# define HALF_OF(x) ((x)>>1) +#elif defined(USE_SIMD) +# define KISS_FFT_COS(phase) _mm_set1_ps( cos(phase) ) +# define KISS_FFT_SIN(phase) _mm_set1_ps( sin(phase) ) +# define HALF_OF(x) ((x)*_mm_set1_ps(.5f)) +#else +# define KISS_FFT_COS(phase) (kiss_fft_scalar) cos(phase) +# define KISS_FFT_SIN(phase) (kiss_fft_scalar) sin(phase) +# define HALF_OF(x) ((x)*.5f) +#endif + +#define kf_cexp(x,phase) \ + do{ \ + (x)->r = KISS_FFT_COS(phase);\ + (x)->i = KISS_FFT_SIN(phase);\ + }while(0) + +#define kf_cexp2(x,phase) \ + do{ \ + (x)->r = TRIG_UPSCALE*celt_cos_norm((phase));\ + (x)->i = TRIG_UPSCALE*celt_cos_norm((phase)-32768);\ +}while(0) + +#endif /* KISS_FFT_GUTS_H */ diff --git a/TMessagesProj/jni/voip/rnnoise/src/arch.h b/TMessagesProj/jni/voip/rnnoise/src/arch.h new file mode 100644 index 000000000..52de62334 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/arch.h @@ -0,0 +1,261 @@ +/* Copyright (c) 2003-2008 Jean-Marc Valin + Copyright (c) 2007-2008 CSIRO + Copyright (c) 2007-2009 Xiph.Org Foundation + Written by Jean-Marc Valin */ +/** + @file arch.h + @brief Various architecture definitions for CELT +*/ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER + OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifndef ARCH_H +#define ARCH_H + +#include "opus_types.h" +#include "common.h" + +# if !defined(__GNUC_PREREQ) +# if defined(__GNUC__)&&defined(__GNUC_MINOR__) +# define __GNUC_PREREQ(_maj,_min) \ + ((__GNUC__<<16)+__GNUC_MINOR__>=((_maj)<<16)+(_min)) +# else +# define __GNUC_PREREQ(_maj,_min) 0 +# endif +# endif + +#define CELT_SIG_SCALE 32768.f + +#define celt_fatal(str) _celt_fatal(str, __FILE__, __LINE__); +#ifdef ENABLE_ASSERTIONS +#include +#include +#ifdef __GNUC__ +__attribute__((noreturn)) +#endif +static OPUS_INLINE void _celt_fatal(const char *str, const char *file, int line) +{ + fprintf (stderr, "Fatal (internal) error in %s, line %d: %s\n", file, line, str); + abort(); +} +#define celt_assert(cond) {if (!(cond)) {celt_fatal("assertion failed: " #cond);}} +#define celt_assert2(cond, message) {if (!(cond)) {celt_fatal("assertion failed: " #cond "\n" message);}} +#else +#define celt_assert(cond) +#define celt_assert2(cond, message) +#endif + +#define IMUL32(a,b) ((a)*(b)) + +#define MIN16(a,b) ((a) < (b) ? (a) : (b)) /**< Minimum 16-bit value. */ +#define MAX16(a,b) ((a) > (b) ? (a) : (b)) /**< Maximum 16-bit value. */ +#define MIN32(a,b) ((a) < (b) ? (a) : (b)) /**< Minimum 32-bit value. */ +#define MAX32(a,b) ((a) > (b) ? (a) : (b)) /**< Maximum 32-bit value. */ +#define IMIN(a,b) ((a) < (b) ? (a) : (b)) /**< Minimum int value. */ +#define IMAX(a,b) ((a) > (b) ? (a) : (b)) /**< Maximum int value. */ +#define UADD32(a,b) ((a)+(b)) +#define USUB32(a,b) ((a)-(b)) + +/* Set this if opus_int64 is a native type of the CPU. */ +/* Assume that all LP64 architectures have fast 64-bit types; also x86_64 + (which can be ILP32 for x32) and Win64 (which is LLP64). */ +#if defined(__x86_64__) || defined(__LP64__) || defined(_WIN64) +#define OPUS_FAST_INT64 1 +#else +#define OPUS_FAST_INT64 0 +#endif + +#define PRINT_MIPS(file) + +#ifdef FIXED_POINT + +typedef opus_int16 opus_val16; +typedef opus_int32 opus_val32; +typedef opus_int64 opus_val64; + +typedef opus_val32 celt_sig; +typedef opus_val16 celt_norm; +typedef opus_val32 celt_ener; + +#define Q15ONE 32767 + +#define SIG_SHIFT 12 +/* Safe saturation value for 32-bit signals. Should be less than + 2^31*(1-0.85) to avoid blowing up on DC at deemphasis.*/ +#define SIG_SAT (300000000) + +#define NORM_SCALING 16384 + +#define DB_SHIFT 10 + +#define EPSILON 1 +#define VERY_SMALL 0 +#define VERY_LARGE16 ((opus_val16)32767) +#define Q15_ONE ((opus_val16)32767) + +#define SCALEIN(a) (a) +#define SCALEOUT(a) (a) + +#define ABS16(x) ((x) < 0 ? (-(x)) : (x)) +#define ABS32(x) ((x) < 0 ? (-(x)) : (x)) + +static OPUS_INLINE opus_int16 SAT16(opus_int32 x) { + return x > 32767 ? 32767 : x < -32768 ? -32768 : (opus_int16)x; +} + +#ifdef FIXED_DEBUG +#include "fixed_debug.h" +#else + +#include "fixed_generic.h" + +#ifdef OPUS_ARM_PRESUME_AARCH64_NEON_INTR +#include "arm/fixed_arm64.h" +#elif OPUS_ARM_INLINE_EDSP +#include "arm/fixed_armv5e.h" +#elif defined (OPUS_ARM_INLINE_ASM) +#include "arm/fixed_armv4.h" +#elif defined (BFIN_ASM) +#include "fixed_bfin.h" +#elif defined (TI_C5X_ASM) +#include "fixed_c5x.h" +#elif defined (TI_C6X_ASM) +#include "fixed_c6x.h" +#endif + +#endif + +#else /* FIXED_POINT */ + +typedef float opus_val16; +typedef float opus_val32; +typedef float opus_val64; + +typedef float celt_sig; +typedef float celt_norm; +typedef float celt_ener; + +#ifdef FLOAT_APPROX +/* This code should reliably detect NaN/inf even when -ffast-math is used. + Assumes IEEE 754 format. */ +static OPUS_INLINE int celt_isnan(float x) +{ + union {float f; opus_uint32 i;} in; + in.f = x; + return ((in.i>>23)&0xFF)==0xFF && (in.i&0x007FFFFF)!=0; +} +#else +#ifdef __FAST_MATH__ +#error Cannot build libopus with -ffast-math unless FLOAT_APPROX is defined. This could result in crashes on extreme (e.g. NaN) input +#endif +#define celt_isnan(x) ((x)!=(x)) +#endif + +#define Q15ONE 1.0f + +#define NORM_SCALING 1.f + +#define EPSILON 1e-15f +#define VERY_SMALL 1e-30f +#define VERY_LARGE16 1e15f +#define Q15_ONE ((opus_val16)1.f) + +/* This appears to be the same speed as C99's fabsf() but it's more portable. */ +#define ABS16(x) ((float)fabs(x)) +#define ABS32(x) ((float)fabs(x)) + +#define QCONST16(x,bits) (x) +#define QCONST32(x,bits) (x) + +#define NEG16(x) (-(x)) +#define NEG32(x) (-(x)) +#define NEG32_ovflw(x) (-(x)) +#define EXTRACT16(x) (x) +#define EXTEND32(x) (x) +#define SHR16(a,shift) (a) +#define SHL16(a,shift) (a) +#define SHR32(a,shift) (a) +#define SHL32(a,shift) (a) +#define PSHR32(a,shift) (a) +#define VSHR32(a,shift) (a) + +#define PSHR(a,shift) (a) +#define SHR(a,shift) (a) +#define SHL(a,shift) (a) +#define SATURATE(x,a) (x) +#define SATURATE16(x) (x) + +#define ROUND16(a,shift) (a) +#define SROUND16(a,shift) (a) +#define HALF16(x) (.5f*(x)) +#define HALF32(x) (.5f*(x)) + +#define ADD16(a,b) ((a)+(b)) +#define SUB16(a,b) ((a)-(b)) +#define ADD32(a,b) ((a)+(b)) +#define SUB32(a,b) ((a)-(b)) +#define ADD32_ovflw(a,b) ((a)+(b)) +#define SUB32_ovflw(a,b) ((a)-(b)) +#define MULT16_16_16(a,b) ((a)*(b)) +#define MULT16_16(a,b) ((opus_val32)(a)*(opus_val32)(b)) +#define MAC16_16(c,a,b) ((c)+(opus_val32)(a)*(opus_val32)(b)) + +#define MULT16_32_Q15(a,b) ((a)*(b)) +#define MULT16_32_Q16(a,b) ((a)*(b)) + +#define MULT32_32_Q31(a,b) ((a)*(b)) + +#define MAC16_32_Q15(c,a,b) ((c)+(a)*(b)) +#define MAC16_32_Q16(c,a,b) ((c)+(a)*(b)) + +#define MULT16_16_Q11_32(a,b) ((a)*(b)) +#define MULT16_16_Q11(a,b) ((a)*(b)) +#define MULT16_16_Q13(a,b) ((a)*(b)) +#define MULT16_16_Q14(a,b) ((a)*(b)) +#define MULT16_16_Q15(a,b) ((a)*(b)) +#define MULT16_16_P15(a,b) ((a)*(b)) +#define MULT16_16_P13(a,b) ((a)*(b)) +#define MULT16_16_P14(a,b) ((a)*(b)) +#define MULT16_32_P16(a,b) ((a)*(b)) + +#define DIV32_16(a,b) (((opus_val32)(a))/(opus_val16)(b)) +#define DIV32(a,b) (((opus_val32)(a))/(opus_val32)(b)) + +#define SCALEIN(a) ((a)*CELT_SIG_SCALE) +#define SCALEOUT(a) ((a)*(1/CELT_SIG_SCALE)) + +#define SIG2WORD16(x) (x) + +#endif /* !FIXED_POINT */ + +#ifndef GLOBAL_STACK_SIZE +#ifdef FIXED_POINT +#define GLOBAL_STACK_SIZE 120000 +#else +#define GLOBAL_STACK_SIZE 120000 +#endif +#endif + +#endif /* ARCH_H */ diff --git a/TMessagesProj/jni/voip/rnnoise/src/celt_lpc.c b/TMessagesProj/jni/voip/rnnoise/src/celt_lpc.c new file mode 100644 index 000000000..521351e9d --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/celt_lpc.c @@ -0,0 +1,279 @@ +/* Copyright (c) 2009-2010 Xiph.Org Foundation + Written by Jean-Marc Valin */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER + OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "celt_lpc.h" +#include "arch.h" +#include "common.h" +#include "pitch.h" + +void _celt_lpc( + opus_val16 *_lpc, /* out: [0...p-1] LPC coefficients */ +const opus_val32 *ac, /* in: [0...p] autocorrelation values */ +int p +) +{ + int i, j; + opus_val32 r; + opus_val32 error = ac[0]; +#ifdef FIXED_POINT + opus_val32 lpc[LPC_ORDER]; +#else + float *lpc = _lpc; +#endif + + RNN_CLEAR(lpc, p); + if (ac[0] != 0) + { + for (i = 0; i < p; i++) { + /* Sum up this iteration's reflection coefficient */ + opus_val32 rr = 0; + for (j = 0; j < i; j++) + rr += MULT32_32_Q31(lpc[j],ac[i - j]); + rr += SHR32(ac[i + 1],3); + r = -SHL32(rr,3)/error; + /* Update LPC coefficients and total error */ + lpc[i] = SHR32(r,3); + for (j = 0; j < (i+1)>>1; j++) + { + opus_val32 tmp1, tmp2; + tmp1 = lpc[j]; + tmp2 = lpc[i-1-j]; + lpc[j] = tmp1 + MULT32_32_Q31(r,tmp2); + lpc[i-1-j] = tmp2 + MULT32_32_Q31(r,tmp1); + } + + error = error - MULT32_32_Q31(MULT32_32_Q31(r,r),error); + /* Bail out once we get 30 dB gain */ +#ifdef FIXED_POINT + if (error=1;j--) + { + mem[j]=mem[j-1]; + } + mem[0] = SROUND16(sum, SIG_SHIFT); + _y[i] = sum; + } +#else + int i,j; + celt_assert((ord&3)==0); + opus_val16 rden[ord]; + opus_val16 y[N+ord]; + for(i=0;i0); + celt_assert(overlap>=0); + if (overlap == 0) + { + xptr = x; + } else { + for (i=0;i0) + { + for(i=0;i= 536870912) + { + int shift2=1; + if (ac[0] >= 1073741824) + shift2++; + for (i=0;i<=lag;i++) + ac[i] = SHR32(ac[i], shift2); + shift += shift2; + } +#endif + + return shift; +} diff --git a/TMessagesProj/jni/voip/rnnoise/src/celt_lpc.h b/TMessagesProj/jni/voip/rnnoise/src/celt_lpc.h new file mode 100644 index 000000000..34e0ff993 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/celt_lpc.h @@ -0,0 +1,59 @@ +/* Copyright (c) 2009-2010 Xiph.Org Foundation + Written by Jean-Marc Valin */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER + OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifndef PLC_H +#define PLC_H + +#include "arch.h" +#include "common.h" + +#if defined(OPUS_X86_MAY_HAVE_SSE4_1) +#include "x86/celt_lpc_sse.h" +#endif + +#define LPC_ORDER 24 + +void _celt_lpc(opus_val16 *_lpc, const opus_val32 *ac, int p); + +void celt_fir( + const opus_val16 *x, + const opus_val16 *num, + opus_val16 *y, + int N, + int ord); + +void celt_iir(const opus_val32 *x, + const opus_val16 *den, + opus_val32 *y, + int N, + int ord, + opus_val16 *mem); + +int _celt_autocorr(const opus_val16 *x, opus_val32 *ac, + const opus_val16 *window, int overlap, int lag, int n); + +#endif /* PLC_H */ diff --git a/TMessagesProj/jni/voip/rnnoise/src/common.h b/TMessagesProj/jni/voip/rnnoise/src/common.h new file mode 100644 index 000000000..5005bfffc --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/common.h @@ -0,0 +1,48 @@ + + +#ifndef COMMON_H +#define COMMON_H + +#include "stdlib.h" +#include "string.h" + +#define RNN_INLINE inline +#define OPUS_INLINE inline + + +/** RNNoise wrapper for malloc(). To do your own dynamic allocation, all you need t +o do is replace this function and rnnoise_free */ +#ifndef OVERRIDE_RNNOISE_ALLOC +static RNN_INLINE void *rnnoise_alloc (size_t size) +{ + return malloc(size); +} +#endif + +/** RNNoise wrapper for free(). To do your own dynamic allocation, all you need to do is replace this function and rnnoise_alloc */ +#ifndef OVERRIDE_RNNOISE_FREE +static RNN_INLINE void rnnoise_free (void *ptr) +{ + free(ptr); +} +#endif + +/** Copy n elements from src to dst. The 0* term provides compile-time type checking */ +#ifndef OVERRIDE_RNN_COPY +#define RNN_COPY(dst, src, n) (memcpy((dst), (src), (n)*sizeof(*(dst)) + 0*((dst)-(src)) )) +#endif + +/** Copy n elements from src to dst, allowing overlapping regions. The 0* term + provides compile-time type checking */ +#ifndef OVERRIDE_RNN_MOVE +#define RNN_MOVE(dst, src, n) (memmove((dst), (src), (n)*sizeof(*(dst)) + 0*((dst)-(src)) )) +#endif + +/** Set n elements of dst to zero */ +#ifndef OVERRIDE_RNN_CLEAR +#define RNN_CLEAR(dst, n) (memset((dst), 0, (n)*sizeof(*(dst)))) +#endif + + + +#endif diff --git a/TMessagesProj/jni/voip/rnnoise/src/denoise.c b/TMessagesProj/jni/voip/rnnoise/src/denoise.c new file mode 100644 index 000000000..5a628440d --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/denoise.c @@ -0,0 +1,646 @@ +/* Copyright (c) 2018 Gregor Richards + * Copyright (c) 2017 Mozilla */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include "kiss_fft.h" +#include "common.h" +#include +#include "rnnoise.h" +#include "pitch.h" +#include "arch.h" +#include "rnn.h" +#include "rnn_data.h" + +#define FRAME_SIZE_SHIFT 2 +#define FRAME_SIZE (120<rnn.model = model; + else + st->rnn.model = &rnnoise_model_orig; + st->rnn.vad_gru_state = calloc(sizeof(float), st->rnn.model->vad_gru_size); + st->rnn.noise_gru_state = calloc(sizeof(float), st->rnn.model->noise_gru_size); + st->rnn.denoise_gru_state = calloc(sizeof(float), st->rnn.model->denoise_gru_size); + return 0; +} + +DenoiseState *rnnoise_create(RNNModel *model) { + DenoiseState *st; + st = malloc(rnnoise_get_size()); + rnnoise_init(st, model); + return st; +} + +void rnnoise_destroy(DenoiseState *st) { + free(st->rnn.vad_gru_state); + free(st->rnn.noise_gru_state); + free(st->rnn.denoise_gru_state); + free(st); +} + +#if TRAINING +int lowpass = FREQ_SIZE; +int band_lp = NB_BANDS; +#endif + +static void frame_analysis(DenoiseState *st, kiss_fft_cpx *X, float *Ex, const float *in) { + int i; + float x[WINDOW_SIZE]; + RNN_COPY(x, st->analysis_mem, FRAME_SIZE); + for (i=0;ianalysis_mem, in, FRAME_SIZE); + apply_window(x); + forward_transform(X, x); +#if TRAINING + for (i=lowpass;i>1]; + int pitch_index; + float gain; + float *(pre[1]); + float tmp[NB_BANDS]; + float follow, logMax; + frame_analysis(st, X, Ex, in); + RNN_MOVE(st->pitch_buf, &st->pitch_buf[FRAME_SIZE], PITCH_BUF_SIZE-FRAME_SIZE); + RNN_COPY(&st->pitch_buf[PITCH_BUF_SIZE-FRAME_SIZE], in, FRAME_SIZE); + pre[0] = &st->pitch_buf[0]; + pitch_downsample(pre, pitch_buf, PITCH_BUF_SIZE, 1); + pitch_search(pitch_buf+(PITCH_MAX_PERIOD>>1), pitch_buf, PITCH_FRAME_SIZE, + PITCH_MAX_PERIOD-3*PITCH_MIN_PERIOD, &pitch_index); + pitch_index = PITCH_MAX_PERIOD-pitch_index; + + gain = remove_doubling(pitch_buf, PITCH_MAX_PERIOD, PITCH_MIN_PERIOD, + PITCH_FRAME_SIZE, &pitch_index, st->last_period, st->last_gain); + st->last_period = pitch_index; + st->last_gain = gain; + for (i=0;ipitch_buf[PITCH_BUF_SIZE-WINDOW_SIZE-pitch_index+i]; + apply_window(p); + forward_transform(P, p); + compute_band_energy(Ep, P); + compute_band_corr(Exp, X, P); + for (i=0;icepstral_mem[st->memid]; + ceps_1 = (st->memid < 1) ? st->cepstral_mem[CEPS_MEM+st->memid-1] : st->cepstral_mem[st->memid-1]; + ceps_2 = (st->memid < 2) ? st->cepstral_mem[CEPS_MEM+st->memid-2] : st->cepstral_mem[st->memid-2]; + for (i=0;imemid++; + for (i=0;imemid == CEPS_MEM) st->memid = 0; + for (i=0;icepstral_mem[i][k] - st->cepstral_mem[j][k]; + dist += tmp*tmp; + } + if (j!=i) + mindist = MIN32(mindist, dist); + } + spec_variability += mindist; + } + features[NB_BANDS+3*NB_DELTA_CEPS+1] = spec_variability/CEPS_MEM-2.1; + return TRAINING && E < 0.1; +} + +static void frame_synthesis(DenoiseState *st, float *out, const kiss_fft_cpx *y) { + float x[WINDOW_SIZE]; + int i; + inverse_transform(x, y); + apply_window(x); + for (i=0;isynthesis_mem[i]; + RNN_COPY(st->synthesis_mem, &x[FRAME_SIZE], FRAME_SIZE); +} + +static void biquad(float *y, float mem[2], const float *x, const float *b, const float *a, int N) { + int i; + for (i=0;ig[i]) r[i] = 1; + else r[i] = Exp[i]*(1-g[i])/(.001 + g[i]*(1-Exp[i])); + r[i] = MIN16(1, MAX16(0, r[i])); +#else + if (Exp[i]>g[i]) r[i] = 1; + else r[i] = SQUARE(Exp[i])*(1-SQUARE(g[i]))/(.001 + SQUARE(g[i])*(1-SQUARE(Exp[i]))); + r[i] = sqrt(MIN16(1, MAX16(0, r[i]))); +#endif + r[i] *= sqrt(Ex[i]/(1e-8+Ep[i])); + } + interp_band_gain(rf, r); + for (i=0;imem_hp_x, in, b_hp, a_hp, FRAME_SIZE); + silence = compute_frame_features(st, X, P, Ex, Ep, Exp, features, x); + + if (!silence) { + compute_rnn(&st->rnn, g, &vad_prob, features); + pitch_filter(X, P, Ex, Ep, Exp, g); + for (i=0;ilastg[i]); + st->lastg[i] = g[i]; + } + interp_band_gain(gf, g); +#if 1 + for (i=0;i \n", argv[0]); + return 1; + } + f1 = fopen(argv[1], "r"); + f2 = fopen(argv[2], "r"); + maxCount = atoi(argv[3]); + for(i=0;i<150;i++) { + short tmp[FRAME_SIZE]; + fread(tmp, sizeof(short), FRAME_SIZE, f2); + } + while (1) { + kiss_fft_cpx X[FREQ_SIZE], Y[FREQ_SIZE], N[FREQ_SIZE], P[WINDOW_SIZE]; + float Ex[NB_BANDS], Ey[NB_BANDS], En[NB_BANDS], Ep[NB_BANDS]; + float Exp[NB_BANDS]; + float Ln[NB_BANDS]; + float features[NB_FEATURES]; + float g[NB_BANDS]; + short tmp[FRAME_SIZE]; + float vad=0; + float E=0; + if (count==maxCount) break; + if ((count%1000)==0) fprintf(stderr, "%d\r", count); + if (++gain_change_count > 2821) { + speech_gain = pow(10., (-40+(rand()%60))/20.); + noise_gain = pow(10., (-30+(rand()%50))/20.); + if (rand()%10==0) noise_gain = 0; + noise_gain *= speech_gain; + if (rand()%10==0) speech_gain = 0; + gain_change_count = 0; + rand_resp(a_noise, b_noise); + rand_resp(a_sig, b_sig); + lowpass = FREQ_SIZE * 3000./24000. * pow(50., rand()/(double)RAND_MAX); + for (i=0;i lowpass) { + band_lp = i; + break; + } + } + } + if (speech_gain != 0) { + fread(tmp, sizeof(short), FRAME_SIZE, f1); + if (feof(f1)) { + rewind(f1); + fread(tmp, sizeof(short), FRAME_SIZE, f1); + } + for (i=0;i 1e9f) { + vad_cnt=0; + } else if (E > 1e8f) { + vad_cnt -= 5; + } else if (E > 1e7f) { + vad_cnt++; + } else { + vad_cnt+=2; + } + if (vad_cnt < 0) vad_cnt = 0; + if (vad_cnt > 15) vad_cnt = 15; + + if (vad_cnt >= 10) vad = 0; + else if (vad_cnt > 0) vad = 0.5f; + else vad = 1.f; + + frame_analysis(st, Y, Ey, x); + frame_analysis(noise_state, N, En, n); + for (i=0;ilast_gain, noisy->last_period); + for (i=0;i 1) g[i] = 1; + if (silence || i > band_lp) g[i] = -1; + if (Ey[i] < 5e-2 && Ex[i] < 5e-2) g[i] = -1; + if (vad==0 && noise_gain==0) g[i] = -1; + } + count++; +#if 1 + fwrite(features, sizeof(float), NB_FEATURES, stdout); + fwrite(g, sizeof(float), NB_BANDS, stdout); + fwrite(Ln, sizeof(float), NB_BANDS, stdout); + fwrite(&vad, sizeof(float), 1, stdout); +#endif + } + fprintf(stderr, "matrix size: %d x %d\n", count, NB_FEATURES + 2*NB_BANDS + 1); + fclose(f1); + fclose(f2); + return 0; +} + +#endif diff --git a/TMessagesProj/jni/voip/rnnoise/src/kiss_fft.c b/TMessagesProj/jni/voip/rnnoise/src/kiss_fft.c new file mode 100644 index 000000000..d6b9f26d0 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/kiss_fft.c @@ -0,0 +1,601 @@ +/*Copyright (c) 2003-2004, Mark Borgerding + Lots of modifications by Jean-Marc Valin + Copyright (c) 2005-2007, Xiph.Org Foundation + Copyright (c) 2008, Xiph.Org Foundation, CSIRO + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE.*/ + +/* This code is originally from Mark Borgerding's KISS-FFT but has been + heavily modified to better suit Opus */ + +#ifndef SKIP_CONFIG_H +# ifdef HAVE_CONFIG_H +# include "config.h" +# endif +#endif + +#include "_kiss_fft_guts.h" +#define CUSTOM_MODES + +/* The guts header contains all the multiplication and addition macros that are defined for + complex numbers. It also declares the kf_ internal functions. +*/ + +static void kf_bfly2( + kiss_fft_cpx * Fout, + int m, + int N + ) +{ + kiss_fft_cpx * Fout2; + int i; + (void)m; +#ifdef CUSTOM_MODES + if (m==1) + { + celt_assert(m==1); + for (i=0;itwiddles; + /* m is guaranteed to be a multiple of 4. */ + for (j=0;jtwiddles[fstride*m]; +#endif + for (i=0;itwiddles; + /* For non-custom modes, m is guaranteed to be a multiple of 4. */ + k=m; + do { + + C_MUL(scratch[1],Fout[m] , *tw1); + C_MUL(scratch[2],Fout[m2] , *tw2); + + C_ADD(scratch[3],scratch[1],scratch[2]); + C_SUB(scratch[0],scratch[1],scratch[2]); + tw1 += fstride; + tw2 += fstride*2; + + Fout[m].r = SUB32_ovflw(Fout->r, HALF_OF(scratch[3].r)); + Fout[m].i = SUB32_ovflw(Fout->i, HALF_OF(scratch[3].i)); + + C_MULBYSCALAR( scratch[0] , epi3.i ); + + C_ADDTO(*Fout,scratch[3]); + + Fout[m2].r = ADD32_ovflw(Fout[m].r, scratch[0].i); + Fout[m2].i = SUB32_ovflw(Fout[m].i, scratch[0].r); + + Fout[m].r = SUB32_ovflw(Fout[m].r, scratch[0].i); + Fout[m].i = ADD32_ovflw(Fout[m].i, scratch[0].r); + + ++Fout; + } while(--k); + } +} + + +#ifndef OVERRIDE_kf_bfly5 +static void kf_bfly5( + kiss_fft_cpx * Fout, + const size_t fstride, + const kiss_fft_state *st, + int m, + int N, + int mm + ) +{ + kiss_fft_cpx *Fout0,*Fout1,*Fout2,*Fout3,*Fout4; + int i, u; + kiss_fft_cpx scratch[13]; + const kiss_twiddle_cpx *tw; + kiss_twiddle_cpx ya,yb; + kiss_fft_cpx * Fout_beg = Fout; + +#ifdef FIXED_POINT + ya.r = 10126; + ya.i = -31164; + yb.r = -26510; + yb.i = -19261; +#else + ya = st->twiddles[fstride*m]; + yb = st->twiddles[fstride*2*m]; +#endif + tw=st->twiddles; + + for (i=0;ir = ADD32_ovflw(Fout0->r, ADD32_ovflw(scratch[7].r, scratch[8].r)); + Fout0->i = ADD32_ovflw(Fout0->i, ADD32_ovflw(scratch[7].i, scratch[8].i)); + + scratch[5].r = ADD32_ovflw(scratch[0].r, ADD32_ovflw(S_MUL(scratch[7].r,ya.r), S_MUL(scratch[8].r,yb.r))); + scratch[5].i = ADD32_ovflw(scratch[0].i, ADD32_ovflw(S_MUL(scratch[7].i,ya.r), S_MUL(scratch[8].i,yb.r))); + + scratch[6].r = ADD32_ovflw(S_MUL(scratch[10].i,ya.i), S_MUL(scratch[9].i,yb.i)); + scratch[6].i = NEG32_ovflw(ADD32_ovflw(S_MUL(scratch[10].r,ya.i), S_MUL(scratch[9].r,yb.i))); + + C_SUB(*Fout1,scratch[5],scratch[6]); + C_ADD(*Fout4,scratch[5],scratch[6]); + + scratch[11].r = ADD32_ovflw(scratch[0].r, ADD32_ovflw(S_MUL(scratch[7].r,yb.r), S_MUL(scratch[8].r,ya.r))); + scratch[11].i = ADD32_ovflw(scratch[0].i, ADD32_ovflw(S_MUL(scratch[7].i,yb.r), S_MUL(scratch[8].i,ya.r))); + scratch[12].r = SUB32_ovflw(S_MUL(scratch[9].i,ya.i), S_MUL(scratch[10].i,yb.i)); + scratch[12].i = SUB32_ovflw(S_MUL(scratch[10].r,yb.i), S_MUL(scratch[9].r,ya.i)); + + C_ADD(*Fout2,scratch[11],scratch[12]); + C_SUB(*Fout3,scratch[11],scratch[12]); + + ++Fout0;++Fout1;++Fout2;++Fout3;++Fout4; + } + } +} +#endif /* OVERRIDE_kf_bfly5 */ + + +#endif + + +#ifdef CUSTOM_MODES + +static +void compute_bitrev_table( + int Fout, + opus_int16 *f, + const size_t fstride, + int in_stride, + opus_int16 * factors, + const kiss_fft_state *st + ) +{ + const int p=*factors++; /* the radix */ + const int m=*factors++; /* stage's fft length/p */ + + /*printf ("fft %d %d %d %d %d %d\n", p*m, m, p, s2, fstride*in_stride, N);*/ + if (m==1) + { + int j; + for (j=0;j32000 || (opus_int32)p*(opus_int32)p > n) + p = n; /* no more factors, skip to end */ + } + n /= p; +#ifdef RADIX_TWO_ONLY + if (p!=2 && p != 4) +#else + if (p>5) +#endif + { + return 0; + } + facbuf[2*stages] = p; + if (p==2 && stages > 1) + { + facbuf[2*stages] = 4; + facbuf[2] = 2; + } + stages++; + } while (n > 1); + n = nbak; + /* Reverse the order to get the radix 4 at the end, so we can use the + fast degenerate case. It turns out that reversing the order also + improves the noise behaviour. */ + for (i=0;i= memneeded) + st = (kiss_fft_state*)mem; + *lenmem = memneeded; + } + if (st) { + opus_int16 *bitrev; + kiss_twiddle_cpx *twiddles; + + st->nfft=nfft; +#ifdef FIXED_POINT + st->scale_shift = celt_ilog2(st->nfft); + if (st->nfft == 1<scale_shift) + st->scale = Q15ONE; + else + st->scale = (1073741824+st->nfft/2)/st->nfft>>(15-st->scale_shift); +#else + st->scale = 1.f/nfft; +#endif + if (base != NULL) + { + st->twiddles = base->twiddles; + st->shift = 0; + while (st->shift < 32 && nfft<shift != base->nfft) + st->shift++; + if (st->shift>=32) + goto fail; + } else { + st->twiddles = twiddles = (kiss_twiddle_cpx*)KISS_FFT_MALLOC(sizeof(kiss_twiddle_cpx)*nfft); + compute_twiddles(twiddles, nfft); + st->shift = -1; + } + if (!kf_factor(nfft,st->factors)) + { + goto fail; + } + + /* bitrev */ + st->bitrev = bitrev = (opus_int16*)KISS_FFT_MALLOC(sizeof(opus_int16)*nfft); + if (st->bitrev==NULL) + goto fail; + compute_bitrev_table(0, bitrev, 1,1, st->factors,st); + + /* Initialize architecture specific fft parameters */ + if (opus_fft_alloc_arch(st, arch)) + goto fail; + } + return st; +fail: + opus_fft_free(st, arch); + return NULL; +} + +kiss_fft_state *opus_fft_alloc(int nfft,void * mem,size_t * lenmem, int arch) +{ + return opus_fft_alloc_twiddles(nfft, mem, lenmem, NULL, arch); +} + +void opus_fft_free_arch_c(kiss_fft_state *st) { + (void)st; +} + +void opus_fft_free(const kiss_fft_state *cfg, int arch) +{ + if (cfg) + { + opus_fft_free_arch((kiss_fft_state *)cfg, arch); + opus_free((opus_int16*)cfg->bitrev); + if (cfg->shift < 0) + opus_free((kiss_twiddle_cpx*)cfg->twiddles); + opus_free((kiss_fft_state*)cfg); + } +} + +#endif /* CUSTOM_MODES */ + +void opus_fft_impl(const kiss_fft_state *st,kiss_fft_cpx *fout) +{ + int m2, m; + int p; + int L; + int fstride[MAXFACTORS]; + int i; + int shift; + + /* st->shift can be -1 */ + shift = st->shift>0 ? st->shift : 0; + + fstride[0] = 1; + L=0; + do { + p = st->factors[2*L]; + m = st->factors[2*L+1]; + fstride[L+1] = fstride[L]*p; + L++; + } while(m!=1); + m = st->factors[2*L-1]; + for (i=L-1;i>=0;i--) + { + if (i!=0) + m2 = st->factors[2*i-1]; + else + m2 = 1; + switch (st->factors[2*i]) + { + case 2: + kf_bfly2(fout, m, fstride[i]); + break; + case 4: + kf_bfly4(fout,fstride[i]<scale_shift-1; +#endif + scale = st->scale; + + celt_assert2 (fin != fout, "In-place FFT not supported"); + /* Bit-reverse the input */ + for (i=0;infft;i++) + { + kiss_fft_cpx x = fin[i]; + fout[st->bitrev[i]].r = SHR32(MULT16_32_Q16(scale, x.r), scale_shift); + fout[st->bitrev[i]].i = SHR32(MULT16_32_Q16(scale, x.i), scale_shift); + } + opus_fft_impl(st, fout); +} + + +void opus_ifft_c(const kiss_fft_state *st,const kiss_fft_cpx *fin,kiss_fft_cpx *fout) +{ + int i; + celt_assert2 (fin != fout, "In-place FFT not supported"); + /* Bit-reverse the input */ + for (i=0;infft;i++) + fout[st->bitrev[i]] = fin[i]; + for (i=0;infft;i++) + fout[i].i = -fout[i].i; + opus_fft_impl(st, fout); + for (i=0;infft;i++) + fout[i].i = -fout[i].i; +} diff --git a/TMessagesProj/jni/voip/rnnoise/src/kiss_fft.h b/TMessagesProj/jni/voip/rnnoise/src/kiss_fft.h new file mode 100644 index 000000000..b2fe9a477 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/kiss_fft.h @@ -0,0 +1,203 @@ +/*Copyright (c) 2003-2004, Mark Borgerding + Lots of modifications by Jean-Marc Valin + Copyright (c) 2005-2007, Xiph.Org Foundation + Copyright (c) 2008, Xiph.Org Foundation, CSIRO + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE.*/ + +#ifndef KISS_FFT_H +#define KISS_FFT_H + +#include +#include +#include "arch.h" + +#include +#define opus_alloc(x) malloc(x) +#define opus_free(x) free(x) + +#ifdef __cplusplus +extern "C" { +#endif + +#ifdef USE_SIMD +# include +# define kiss_fft_scalar __m128 +#define KISS_FFT_MALLOC(nbytes) memalign(16,nbytes) +#else +#define KISS_FFT_MALLOC opus_alloc +#endif + +#ifdef FIXED_POINT +#include "arch.h" + +# define kiss_fft_scalar opus_int32 +# define kiss_twiddle_scalar opus_int16 + + +#else +# ifndef kiss_fft_scalar +/* default is float */ +# define kiss_fft_scalar float +# define kiss_twiddle_scalar float +# define KF_SUFFIX _celt_single +# endif +#endif + +typedef struct { + kiss_fft_scalar r; + kiss_fft_scalar i; +}kiss_fft_cpx; + +typedef struct { + kiss_twiddle_scalar r; + kiss_twiddle_scalar i; +}kiss_twiddle_cpx; + +#define MAXFACTORS 8 +/* e.g. an fft of length 128 has 4 factors + as far as kissfft is concerned + 4*4*4*2 + */ + +typedef struct arch_fft_state{ + int is_supported; + void *priv; +} arch_fft_state; + +typedef struct kiss_fft_state{ + int nfft; + opus_val16 scale; +#ifdef FIXED_POINT + int scale_shift; +#endif + int shift; + opus_int16 factors[2*MAXFACTORS]; + const opus_int16 *bitrev; + const kiss_twiddle_cpx *twiddles; + arch_fft_state *arch_fft; +} kiss_fft_state; + +#if defined(HAVE_ARM_NE10) +#include "arm/fft_arm.h" +#endif + +/*typedef struct kiss_fft_state* kiss_fft_cfg;*/ + +/** + * opus_fft_alloc + * + * Initialize a FFT (or IFFT) algorithm's cfg/state buffer. + * + * typical usage: kiss_fft_cfg mycfg=opus_fft_alloc(1024,0,NULL,NULL); + * + * The return value from fft_alloc is a cfg buffer used internally + * by the fft routine or NULL. + * + * If lenmem is NULL, then opus_fft_alloc will allocate a cfg buffer using malloc. + * The returned value should be free()d when done to avoid memory leaks. + * + * The state can be placed in a user supplied buffer 'mem': + * If lenmem is not NULL and mem is not NULL and *lenmem is large enough, + * then the function places the cfg in mem and the size used in *lenmem + * and returns mem. + * + * If lenmem is not NULL and ( mem is NULL or *lenmem is not large enough), + * then the function returns NULL and places the minimum cfg + * buffer size in *lenmem. + * */ + +kiss_fft_state *opus_fft_alloc_twiddles(int nfft,void * mem,size_t * lenmem, const kiss_fft_state *base, int arch); + +kiss_fft_state *opus_fft_alloc(int nfft,void * mem,size_t * lenmem, int arch); + +/** + * opus_fft(cfg,in_out_buf) + * + * Perform an FFT on a complex input buffer. + * for a forward FFT, + * fin should be f[0] , f[1] , ... ,f[nfft-1] + * fout will be F[0] , F[1] , ... ,F[nfft-1] + * Note that each element is complex and can be accessed like + f[k].r and f[k].i + * */ +void opus_fft_c(const kiss_fft_state *cfg,const kiss_fft_cpx *fin,kiss_fft_cpx *fout); +void opus_ifft_c(const kiss_fft_state *cfg,const kiss_fft_cpx *fin,kiss_fft_cpx *fout); + +void opus_fft_impl(const kiss_fft_state *st,kiss_fft_cpx *fout); +void opus_ifft_impl(const kiss_fft_state *st,kiss_fft_cpx *fout); + +void opus_fft_free(const kiss_fft_state *cfg, int arch); + + +void opus_fft_free_arch_c(kiss_fft_state *st); +int opus_fft_alloc_arch_c(kiss_fft_state *st); + +#if !defined(OVERRIDE_OPUS_FFT) +/* Is run-time CPU detection enabled on this platform? */ +#if defined(OPUS_HAVE_RTCD) && (defined(HAVE_ARM_NE10)) + +extern int (*const OPUS_FFT_ALLOC_ARCH_IMPL[OPUS_ARCHMASK+1])( + kiss_fft_state *st); + +#define opus_fft_alloc_arch(_st, arch) \ + ((*OPUS_FFT_ALLOC_ARCH_IMPL[(arch)&OPUS_ARCHMASK])(_st)) + +extern void (*const OPUS_FFT_FREE_ARCH_IMPL[OPUS_ARCHMASK+1])( + kiss_fft_state *st); +#define opus_fft_free_arch(_st, arch) \ + ((*OPUS_FFT_FREE_ARCH_IMPL[(arch)&OPUS_ARCHMASK])(_st)) + +extern void (*const OPUS_FFT[OPUS_ARCHMASK+1])(const kiss_fft_state *cfg, + const kiss_fft_cpx *fin, kiss_fft_cpx *fout); +#define opus_fft(_cfg, _fin, _fout, arch) \ + ((*OPUS_FFT[(arch)&OPUS_ARCHMASK])(_cfg, _fin, _fout)) + +extern void (*const OPUS_IFFT[OPUS_ARCHMASK+1])(const kiss_fft_state *cfg, + const kiss_fft_cpx *fin, kiss_fft_cpx *fout); +#define opus_ifft(_cfg, _fin, _fout, arch) \ + ((*OPUS_IFFT[(arch)&OPUS_ARCHMASK])(_cfg, _fin, _fout)) + +#else /* else for if defined(OPUS_HAVE_RTCD) && (defined(HAVE_ARM_NE10)) */ + +#define opus_fft_alloc_arch(_st, arch) \ + ((void)(arch), opus_fft_alloc_arch_c(_st)) + +#define opus_fft_free_arch(_st, arch) \ + ((void)(arch), opus_fft_free_arch_c(_st)) + +#define opus_fft(_cfg, _fin, _fout, arch) \ + ((void)(arch), opus_fft_c(_cfg, _fin, _fout)) + +#define opus_ifft(_cfg, _fin, _fout, arch) \ + ((void)(arch), opus_ifft_c(_cfg, _fin, _fout)) + +#endif /* end if defined(OPUS_HAVE_RTCD) && (defined(HAVE_ARM_NE10)) */ +#endif /* end if !defined(OVERRIDE_OPUS_FFT) */ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/TMessagesProj/jni/voip/rnnoise/src/opus_types.h b/TMessagesProj/jni/voip/rnnoise/src/opus_types.h new file mode 100644 index 000000000..718082666 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/opus_types.h @@ -0,0 +1,159 @@ +/* (C) COPYRIGHT 1994-2002 Xiph.Org Foundation */ +/* Modified by Jean-Marc Valin */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER + OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +/* opus_types.h based on ogg_types.h from libogg */ + +/** + @file opus_types.h + @brief Opus reference implementation types +*/ +#ifndef OPUS_TYPES_H +#define OPUS_TYPES_H + +/* Use the real stdint.h if it's there (taken from Paul Hsieh's pstdint.h) */ +#if (defined(__STDC__) && __STDC__ && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) || (defined(__GNUC__) && (defined(_STDINT_H) || defined(_STDINT_H_)) || defined (HAVE_STDINT_H)) +#include + + typedef int16_t opus_int16; + typedef uint16_t opus_uint16; + typedef int32_t opus_int32; + typedef uint32_t opus_uint32; +#elif defined(_WIN32) + +# if defined(__CYGWIN__) +# include <_G_config.h> + typedef _G_int32_t opus_int32; + typedef _G_uint32_t opus_uint32; + typedef _G_int16 opus_int16; + typedef _G_uint16 opus_uint16; +# elif defined(__MINGW32__) + typedef short opus_int16; + typedef unsigned short opus_uint16; + typedef int opus_int32; + typedef unsigned int opus_uint32; +# elif defined(__MWERKS__) + typedef int opus_int32; + typedef unsigned int opus_uint32; + typedef short opus_int16; + typedef unsigned short opus_uint16; +# else + /* MSVC/Borland */ + typedef __int32 opus_int32; + typedef unsigned __int32 opus_uint32; + typedef __int16 opus_int16; + typedef unsigned __int16 opus_uint16; +# endif + +#elif defined(__MACOS__) + +# include + typedef SInt16 opus_int16; + typedef UInt16 opus_uint16; + typedef SInt32 opus_int32; + typedef UInt32 opus_uint32; + +#elif (defined(__APPLE__) && defined(__MACH__)) /* MacOS X Framework build */ + +# include + typedef int16_t opus_int16; + typedef u_int16_t opus_uint16; + typedef int32_t opus_int32; + typedef u_int32_t opus_uint32; + +#elif defined(__BEOS__) + + /* Be */ +# include + typedef int16 opus_int16; + typedef u_int16 opus_uint16; + typedef int32_t opus_int32; + typedef u_int32_t opus_uint32; + +#elif defined (__EMX__) + + /* OS/2 GCC */ + typedef short opus_int16; + typedef unsigned short opus_uint16; + typedef int opus_int32; + typedef unsigned int opus_uint32; + +#elif defined (DJGPP) + + /* DJGPP */ + typedef short opus_int16; + typedef unsigned short opus_uint16; + typedef int opus_int32; + typedef unsigned int opus_uint32; + +#elif defined(R5900) + + /* PS2 EE */ + typedef int opus_int32; + typedef unsigned opus_uint32; + typedef short opus_int16; + typedef unsigned short opus_uint16; + +#elif defined(__SYMBIAN32__) + + /* Symbian GCC */ + typedef signed short opus_int16; + typedef unsigned short opus_uint16; + typedef signed int opus_int32; + typedef unsigned int opus_uint32; + +#elif defined(CONFIG_TI_C54X) || defined (CONFIG_TI_C55X) + + typedef short opus_int16; + typedef unsigned short opus_uint16; + typedef long opus_int32; + typedef unsigned long opus_uint32; + +#elif defined(CONFIG_TI_C6X) + + typedef short opus_int16; + typedef unsigned short opus_uint16; + typedef int opus_int32; + typedef unsigned int opus_uint32; + +#else + + /* Give up, take a reasonable guess */ + typedef short opus_int16; + typedef unsigned short opus_uint16; + typedef int opus_int32; + typedef unsigned int opus_uint32; + +#endif + +#define opus_int int /* used for counters etc; at least 16 bits */ +#define opus_int64 long long +#define opus_int8 signed char + +#define opus_uint unsigned int /* used for counters etc; at least 16 bits */ +#define opus_uint64 unsigned long long +#define opus_uint8 unsigned char + +#endif /* OPUS_TYPES_H */ diff --git a/TMessagesProj/jni/voip/rnnoise/src/pitch.c b/TMessagesProj/jni/voip/rnnoise/src/pitch.c new file mode 100644 index 000000000..bd101a6cc --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/pitch.c @@ -0,0 +1,526 @@ +/* Copyright (c) 2007-2008 CSIRO + Copyright (c) 2007-2009 Xiph.Org Foundation + Written by Jean-Marc Valin */ +/** + @file pitch.c + @brief Pitch analysis + */ + +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER + OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "pitch.h" +#include "common.h" +//#include "modes.h" +//#include "stack_alloc.h" +//#include "mathops.h" +#include "celt_lpc.h" +#include "math.h" + +static void find_best_pitch(opus_val32 *xcorr, opus_val16 *y, int len, + int max_pitch, int *best_pitch +#ifdef FIXED_POINT + , int yshift, opus_val32 maxcorr +#endif + ) +{ + int i, j; + opus_val32 Syy=1; + opus_val16 best_num[2]; + opus_val32 best_den[2]; +#ifdef FIXED_POINT + int xshift; + + xshift = celt_ilog2(maxcorr)-14; +#endif + + best_num[0] = -1; + best_num[1] = -1; + best_den[0] = 0; + best_den[1] = 0; + best_pitch[0] = 0; + best_pitch[1] = 1; + for (j=0;j0) + { + opus_val16 num; + opus_val32 xcorr16; + xcorr16 = EXTRACT16(VSHR32(xcorr[i], xshift)); +#ifndef FIXED_POINT + /* Considering the range of xcorr16, this should avoid both underflows + and overflows (inf) when squaring xcorr16 */ + xcorr16 *= 1e-12f; +#endif + num = MULT16_16_Q15(xcorr16,xcorr16); + if (MULT16_32_Q15(num,best_den[1]) > MULT16_32_Q15(best_num[1],Syy)) + { + if (MULT16_32_Q15(num,best_den[0]) > MULT16_32_Q15(best_num[0],Syy)) + { + best_num[1] = best_num[0]; + best_den[1] = best_den[0]; + best_pitch[1] = best_pitch[0]; + best_num[0] = num; + best_den[0] = Syy; + best_pitch[0] = i; + } else { + best_num[1] = num; + best_den[1] = Syy; + best_pitch[1] = i; + } + } + } + Syy += SHR32(MULT16_16(y[i+len],y[i+len]),yshift) - SHR32(MULT16_16(y[i],y[i]),yshift); + Syy = MAX32(1, Syy); + } +} + +static void celt_fir5(const opus_val16 *x, + const opus_val16 *num, + opus_val16 *y, + int N, + opus_val16 *mem) +{ + int i; + opus_val16 num0, num1, num2, num3, num4; + opus_val32 mem0, mem1, mem2, mem3, mem4; + num0=num[0]; + num1=num[1]; + num2=num[2]; + num3=num[3]; + num4=num[4]; + mem0=mem[0]; + mem1=mem[1]; + mem2=mem[2]; + mem3=mem[3]; + mem4=mem[4]; + for (i=0;i>1;i++) + x_lp[i] = SHR32(HALF32(HALF32(x[0][(2*i-1)]+x[0][(2*i+1)])+x[0][2*i]), shift); + x_lp[0] = SHR32(HALF32(HALF32(x[0][1])+x[0][0]), shift); + if (C==2) + { + for (i=1;i>1;i++) + x_lp[i] += SHR32(HALF32(HALF32(x[1][(2*i-1)]+x[1][(2*i+1)])+x[1][2*i]), shift); + x_lp[0] += SHR32(HALF32(HALF32(x[1][1])+x[1][0]), shift); + } + + _celt_autocorr(x_lp, ac, NULL, 0, + 4, len>>1); + + /* Noise floor -40 dB */ +#ifdef FIXED_POINT + ac[0] += SHR32(ac[0],13); +#else + ac[0] *= 1.0001f; +#endif + /* Lag windowing */ + for (i=1;i<=4;i++) + { + /*ac[i] *= exp(-.5*(2*M_PI*.002*i)*(2*M_PI*.002*i));*/ +#ifdef FIXED_POINT + ac[i] -= MULT16_32_Q15(2*i*i, ac[i]); +#else + ac[i] -= ac[i]*(.008f*i)*(.008f*i); +#endif + } + + _celt_lpc(lpc, ac, 4); + for (i=0;i<4;i++) + { + tmp = MULT16_16_Q15(QCONST16(.9f,15), tmp); + lpc[i] = MULT16_16_Q15(lpc[i], tmp); + } + /* Add a zero */ + lpc2[0] = lpc[0] + QCONST16(.8f,SIG_SHIFT); + lpc2[1] = lpc[1] + MULT16_16_Q15(c1,lpc[0]); + lpc2[2] = lpc[2] + MULT16_16_Q15(c1,lpc[1]); + lpc2[3] = lpc[3] + MULT16_16_Q15(c1,lpc[2]); + lpc2[4] = MULT16_16_Q15(c1,lpc[3]); + celt_fir5(x_lp, lpc2, x_lp, len>>1, mem); +} + +void celt_pitch_xcorr(const opus_val16 *_x, const opus_val16 *_y, + opus_val32 *xcorr, int len, int max_pitch) +{ + +#if 0 /* This is a simple version of the pitch correlation that should work + well on DSPs like Blackfin and TI C5x/C6x */ + int i, j; +#ifdef FIXED_POINT + opus_val32 maxcorr=1; +#endif + for (i=0;i0); + celt_assert((((unsigned char *)_x-(unsigned char *)NULL)&3)==0); + for (i=0;i0); + celt_assert(max_pitch>0); + lag = len+max_pitch; + + opus_val16 x_lp4[len>>2]; + opus_val16 y_lp4[lag>>2]; + opus_val32 xcorr[max_pitch>>1]; + + /* Downsample by 2 again */ + for (j=0;j>2;j++) + x_lp4[j] = x_lp[2*j]; + for (j=0;j>2;j++) + y_lp4[j] = y[2*j]; + +#ifdef FIXED_POINT + xmax = celt_maxabs16(x_lp4, len>>2); + ymax = celt_maxabs16(y_lp4, lag>>2); + shift = celt_ilog2(MAX32(1, MAX32(xmax, ymax)))-11; + if (shift>0) + { + for (j=0;j>2;j++) + x_lp4[j] = SHR16(x_lp4[j], shift); + for (j=0;j>2;j++) + y_lp4[j] = SHR16(y_lp4[j], shift); + /* Use double the shift for a MAC */ + shift *= 2; + } else { + shift = 0; + } +#endif + + /* Coarse search with 4x decimation */ + +#ifdef FIXED_POINT + maxcorr = +#endif + celt_pitch_xcorr(x_lp4, y_lp4, xcorr, len>>2, max_pitch>>2); + + find_best_pitch(xcorr, y_lp4, len>>2, max_pitch>>2, best_pitch +#ifdef FIXED_POINT + , 0, maxcorr +#endif + ); + + /* Finer search with 2x decimation */ +#ifdef FIXED_POINT + maxcorr=1; +#endif + for (i=0;i>1;i++) + { + opus_val32 sum; + xcorr[i] = 0; + if (abs(i-2*best_pitch[0])>2 && abs(i-2*best_pitch[1])>2) + continue; +#ifdef FIXED_POINT + sum = 0; + for (j=0;j>1;j++) + sum += SHR32(MULT16_16(x_lp[j],y[i+j]), shift); +#else + sum = celt_inner_prod(x_lp, y+i, len>>1); +#endif + xcorr[i] = MAX32(-1, sum); +#ifdef FIXED_POINT + maxcorr = MAX32(maxcorr, sum); +#endif + } + find_best_pitch(xcorr, y, len>>1, max_pitch>>1, best_pitch +#ifdef FIXED_POINT + , shift+1, maxcorr +#endif + ); + + /* Refine by pseudo-interpolation */ + if (best_pitch[0]>0 && best_pitch[0]<(max_pitch>>1)-1) + { + opus_val32 a, b, c; + a = xcorr[best_pitch[0]-1]; + b = xcorr[best_pitch[0]]; + c = xcorr[best_pitch[0]+1]; + if ((c-a) > MULT16_32_Q15(QCONST16(.7f,15),b-a)) + offset = 1; + else if ((a-c) > MULT16_32_Q15(QCONST16(.7f,15),b-c)) + offset = -1; + else + offset = 0; + } else { + offset = 0; + } + *pitch = 2*best_pitch[0]-offset; +} + +#ifdef FIXED_POINT +static opus_val16 compute_pitch_gain(opus_val32 xy, opus_val32 xx, opus_val32 yy) +{ + opus_val32 x2y2; + int sx, sy, shift; + opus_val32 g; + opus_val16 den; + if (xy == 0 || xx == 0 || yy == 0) + return 0; + sx = celt_ilog2(xx)-14; + sy = celt_ilog2(yy)-14; + shift = sx + sy; + x2y2 = SHR32(MULT16_16(VSHR32(xx, sx), VSHR32(yy, sy)), 14); + if (shift & 1) { + if (x2y2 < 32768) + { + x2y2 <<= 1; + shift--; + } else { + x2y2 >>= 1; + shift++; + } + } + den = celt_rsqrt_norm(x2y2); + g = MULT16_32_Q15(den, xy); + g = VSHR32(g, (shift>>1)-1); + return EXTRACT16(MIN32(g, Q15ONE)); +} +#else +static opus_val16 compute_pitch_gain(opus_val32 xy, opus_val32 xx, opus_val32 yy) +{ + return xy/sqrt(1+xx*yy); +} +#endif + +static const int second_check[16] = {0, 0, 3, 2, 3, 2, 5, 2, 3, 2, 3, 2, 5, 2, 3, 2}; +opus_val16 remove_doubling(opus_val16 *x, int maxperiod, int minperiod, + int N, int *T0_, int prev_period, opus_val16 prev_gain) +{ + int k, i, T, T0; + opus_val16 g, g0; + opus_val16 pg; + opus_val32 xy,xx,yy,xy2; + opus_val32 xcorr[3]; + opus_val32 best_xy, best_yy; + int offset; + int minperiod0; + + minperiod0 = minperiod; + maxperiod /= 2; + minperiod /= 2; + *T0_ /= 2; + prev_period /= 2; + N /= 2; + x += maxperiod; + if (*T0_>=maxperiod) + *T0_=maxperiod-1; + + T = T0 = *T0_; + opus_val32 yy_lookup[maxperiod+1]; + dual_inner_prod(x, x, x-T0, N, &xx, &xy); + yy_lookup[0] = xx; + yy=xx; + for (i=1;i<=maxperiod;i++) + { + yy = yy+MULT16_16(x[-i],x[-i])-MULT16_16(x[N-i],x[N-i]); + yy_lookup[i] = MAX32(0, yy); + } + yy = yy_lookup[T0]; + best_xy = xy; + best_yy = yy; + g = g0 = compute_pitch_gain(xy, xx, yy); + /* Look for any pitch at T/k */ + for (k=2;k<=15;k++) + { + int T1, T1b; + opus_val16 g1; + opus_val16 cont=0; + opus_val16 thresh; + T1 = (2*T0+k)/(2*k); + if (T1 < minperiod) + break; + /* Look for another strong correlation at T1b */ + if (k==2) + { + if (T1+T0>maxperiod) + T1b = T0; + else + T1b = T0+T1; + } else + { + T1b = (2*second_check[k]*T0+k)/(2*k); + } + dual_inner_prod(x, &x[-T1], &x[-T1b], N, &xy, &xy2); + xy = HALF32(xy + xy2); + yy = HALF32(yy_lookup[T1] + yy_lookup[T1b]); + g1 = compute_pitch_gain(xy, xx, yy); + if (abs(T1-prev_period)<=1) + cont = prev_gain; + else if (abs(T1-prev_period)<=2 && 5*k*k < T0) + cont = HALF16(prev_gain); + else + cont = 0; + thresh = MAX16(QCONST16(.3f,15), MULT16_16_Q15(QCONST16(.7f,15),g0)-cont); + /* Bias against very high pitch (very short period) to avoid false-positives + due to short-term correlation */ + if (T1<3*minperiod) + thresh = MAX16(QCONST16(.4f,15), MULT16_16_Q15(QCONST16(.85f,15),g0)-cont); + else if (T1<2*minperiod) + thresh = MAX16(QCONST16(.5f,15), MULT16_16_Q15(QCONST16(.9f,15),g0)-cont); + if (g1 > thresh) + { + best_xy = xy; + best_yy = yy; + T = T1; + g = g1; + } + } + best_xy = MAX32(0, best_xy); + if (best_yy <= best_xy) + pg = Q15ONE; + else + pg = best_xy/(best_yy+1); + + for (k=0;k<3;k++) + xcorr[k] = celt_inner_prod(x, x-(T+k-1), N); + if ((xcorr[2]-xcorr[0]) > MULT16_32_Q15(QCONST16(.7f,15),xcorr[1]-xcorr[0])) + offset = 1; + else if ((xcorr[0]-xcorr[2]) > MULT16_32_Q15(QCONST16(.7f,15),xcorr[1]-xcorr[2])) + offset = -1; + else + offset = 0; + if (pg > g) + pg = g; + *T0_ = 2*T+offset; + + if (*T0_=3); + y_3=0; /* gcc doesn't realize that y_3 can't be used uninitialized */ + y_0=*y++; + y_1=*y++; + y_2=*y++; + for (j=0;j +#include "opus_types.h" +#include "common.h" +#include "arch.h" +#include "tansig_table.h" +#include "rnn.h" +#include "rnn_data.h" +#include + +static OPUS_INLINE float tansig_approx(float x) +{ + int i; + float y, dy; + float sign=1; + /* Tests are reversed to catch NaNs */ + if (!(x<8)) + return 1; + if (!(x>-8)) + return -1; +#ifndef FIXED_POINT + /* Another check in case of -ffast-math */ + if (celt_isnan(x)) + return 0; +#endif + if (x<0) + { + x=-x; + sign=-1; + } + i = (int)floor(.5f+25*x); + x -= .04f*i; + y = tansig_table[i]; + dy = 1-y*y; + y = y + x*dy*(1 - y*x); + return sign*y; +} + +static OPUS_INLINE float sigmoid_approx(float x) +{ + return .5 + .5*tansig_approx(.5*x); +} + +static OPUS_INLINE float relu(float x) +{ + return x < 0 ? 0 : x; +} + +void compute_dense(const DenseLayer *layer, float *output, const float *input) +{ + int i, j; + int N, M; + int stride; + M = layer->nb_inputs; + N = layer->nb_neurons; + stride = N; + for (i=0;ibias[i]; + for (j=0;jinput_weights[j*stride + i]*input[j]; + output[i] = WEIGHTS_SCALE*sum; + } + if (layer->activation == ACTIVATION_SIGMOID) { + for (i=0;iactivation == ACTIVATION_TANH) { + for (i=0;iactivation == ACTIVATION_RELU) { + for (i=0;inb_inputs; + N = gru->nb_neurons; + stride = 3*N; + for (i=0;ibias[i]; + for (j=0;jinput_weights[j*stride + i]*input[j]; + for (j=0;jrecurrent_weights[j*stride + i]*state[j]; + z[i] = sigmoid_approx(WEIGHTS_SCALE*sum); + } + for (i=0;ibias[N + i]; + for (j=0;jinput_weights[N + j*stride + i]*input[j]; + for (j=0;jrecurrent_weights[N + j*stride + i]*state[j]; + r[i] = sigmoid_approx(WEIGHTS_SCALE*sum); + } + for (i=0;ibias[2*N + i]; + for (j=0;jinput_weights[2*N + j*stride + i]*input[j]; + for (j=0;jrecurrent_weights[2*N + j*stride + i]*state[j]*r[j]; + if (gru->activation == ACTIVATION_SIGMOID) sum = sigmoid_approx(WEIGHTS_SCALE*sum); + else if (gru->activation == ACTIVATION_TANH) sum = tansig_approx(WEIGHTS_SCALE*sum); + else if (gru->activation == ACTIVATION_RELU) sum = relu(WEIGHTS_SCALE*sum); + else *(int*)0=0; + h[i] = z[i]*state[i] + (1-z[i])*sum; + } + for (i=0;imodel->input_dense, dense_out, input); + compute_gru(rnn->model->vad_gru, rnn->vad_gru_state, dense_out); + compute_dense(rnn->model->vad_output, vad, rnn->vad_gru_state); + for (i=0;imodel->input_dense_size;i++) noise_input[i] = dense_out[i]; + for (i=0;imodel->vad_gru_size;i++) noise_input[i+rnn->model->input_dense_size] = rnn->vad_gru_state[i]; + for (i=0;imodel->input_dense_size+rnn->model->vad_gru_size] = input[i]; + compute_gru(rnn->model->noise_gru, rnn->noise_gru_state, noise_input); + + for (i=0;imodel->vad_gru_size;i++) denoise_input[i] = rnn->vad_gru_state[i]; + for (i=0;imodel->noise_gru_size;i++) denoise_input[i+rnn->model->vad_gru_size] = rnn->noise_gru_state[i]; + for (i=0;imodel->vad_gru_size+rnn->model->noise_gru_size] = input[i]; + compute_gru(rnn->model->denoise_gru, rnn->denoise_gru_state, denoise_input); + compute_dense(rnn->model->denoise_output, gains, rnn->denoise_gru_state); +} diff --git a/TMessagesProj/jni/voip/rnnoise/src/rnn.h b/TMessagesProj/jni/voip/rnnoise/src/rnn.h new file mode 100644 index 000000000..31b962fc6 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/rnn.h @@ -0,0 +1,69 @@ +/* Copyright (c) 2017 Jean-Marc Valin */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifndef RNN_H_ +#define RNN_H_ + +#include "rnnoise.h" + +#include "opus_types.h" + +#define WEIGHTS_SCALE (1.f/256) + +#define MAX_NEURONS 128 + +#define ACTIVATION_TANH 0 +#define ACTIVATION_SIGMOID 1 +#define ACTIVATION_RELU 2 + +typedef signed char rnn_weight; + +typedef struct { + const rnn_weight *bias; + const rnn_weight *input_weights; + int nb_inputs; + int nb_neurons; + int activation; +} DenseLayer; + +typedef struct { + const rnn_weight *bias; + const rnn_weight *input_weights; + const rnn_weight *recurrent_weights; + int nb_inputs; + int nb_neurons; + int activation; +} GRULayer; + +typedef struct RNNState RNNState; + +void compute_dense(const DenseLayer *layer, float *output, const float *input); + +void compute_gru(const GRULayer *gru, float *state, const float *input); + +void compute_rnn(RNNState *rnn, float *gains, float *vad, const float *input); + +#endif /* RNN_H_ */ diff --git a/TMessagesProj/jni/voip/rnnoise/src/rnn_data.c b/TMessagesProj/jni/voip/rnnoise/src/rnn_data.c new file mode 100644 index 000000000..22c53165e --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/rnn_data.c @@ -0,0 +1,11051 @@ +/*This file is automatically generated from a Keras model*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "rnn.h" +#include "rnn_data.h" + +static const rnn_weight input_dense_weights[1008] = { + -10, 0, -3, 1, -8, -6, 3, -13, + 1, 0, -3, -7, -5, -3, 6, -1, + -6, 0, -6, -4, -1, -2, 1, 1, + -7, 2, 21, 10, -5, -20, 24, 23, + 37, 8, -2, 33, -6, 22, 13, -2, + 50, 8, 13, 1, -15, 30, -10, 30, + 0, 3, 5, 27, 1, 4, -3, 41, + 56, 35, -2, 49, -13, 11, 13, -2, + -47, 5, -16, -60, -15, 77, -17, 26, + -3, 14, -21, 19, -5, -19, -13, 0, + 10, 14, 9, 31, -13, -41, -10, 4, + 22, 18, -48, -6, -10, 62, -3, -18, + -14, 12, 26, -28, 3, 14, 25, -13, + -19, 6, 5, 36, -3, -65, -12, 0, + 31, -7, -9, 101, -4, 26, 16, 17, + -12, -12, 14, -36, -3, 5, -15, 21, + 2, 30, -3, 38, -4, 1, -6, 7, + -7, 14, 38, -22, -30, -3, -7, 3, + -39, -70, -126, 25, 34, 94, -67, -22, + -33, 83, -47, -118, 4, 70, 33, 25, + 62, -128, -76, -118, -113, 49, -12, -100, + -18, -114, -33, 43, 32, 61, 40, -9, + -106, 2, 36, -100, -40, -5, 20, -75, + 61, -51, -9, 126, -27, -52, 5, -24, + -21, -126, -114, -12, 15, 106, -2, 73, + -125, 50, 13, -120, 35, 35, 4, -61, + 29, -124, 6, -53, -69, -125, 64, -89, + 36, -107, -103, -7, 27, 121, 69, 77, + -35, 35, 95, -125, -49, 97, -45, -43, + -23, 23, -28, -65, -118, 2, 8, -126, + 27, -97, 92, 5, 55, 82, 17, -57, + -115, 37, 8, -106, -46, 41, -2, 21, + -44, 8, -73, -58, -39, 34, 89, -95, + 95, -117, 120, -58, 31, 123, 1, -32, + -109, -110, 60, -120, -43, -74, 5, 91, + 26, 21, 114, 82, -83, -126, 123, 22, + -16, -67, 25, -83, 46, 48, -34, -121, + -124, -63, -35, -9, 31, 82, 123, 6, + -3, 117, 93, -2, -13, -36, 124, -112, + -6, -102, -5, -33, -15, 44, -69, -127, + -23, -40, -34, -85, 68, 83, -1, 40, + 8, 84, 118, -58, -55, -102, 123, -55, + -14, -123, 44, -63, -14, 21, 35, 16, + 24, -126, -13, -114, 35, 20, -36, 61, + -9, 97, 34, 19, -32, -109, 76, -104, + 99, -119, 45, -125, -51, -28, -8, -69, + -8, 125, -45, -93, 113, 103, -41, -82, + 52, 7, 126, 0, -40, 104, 55, -58, + 17, -124, -93, -58, 8, -45, 1, 56, + -123, 108, -47, -23, 115, 127, 17, -68, + -13, 116, -82, -44, 45, 67, -120, -101, + -15, -125, 120, -113, 17, -48, -73, 126, + -64, -86, -118, -19, 112, -1, -66, -27, + -62, 121, -86, -58, 50, 89, -38, -75, + 95, -111, 12, -113, 2, -68, 2, -94, + -121, 91, -5, 0, 79, 43, -7, -18, + 79, 35, -38, 47, 1, -45, 83, -50, + 102, 32, 55, -96, 15, -122, -69, 45, + -27, 91, -62, -30, 46, -95, 22, -72, + -97, -1, 14, -122, 28, 127, 61, -126, + 121, 9, 68, -120, 49, -60, 90, 3, + 43, 68, 54, 34, -10, 28, 21, -24, + -54, 22, -113, -12, 82, -2, -17, -9, + 127, 8, 116, -92, 0, -70, -33, 123, + 66, 116, -74, -4, 74, -72, -22, -47, + 1, -83, -60, -124, 1, 122, -57, -43, + 49, 40, -126, -128, -8, -29, 28, -24, + -123, -121, -70, -93, -37, -126, 11, -125, + -37, 11, -31, -51, -124, 116, -128, 8, + -25, 109, 75, -12, 7, 8, 10, 117, + 124, -128, -128, 29, -26, 101, 21, -128, + 87, 8, -39, 23, -128, 127, -127, 74, + -55, 74, 112, 127, 4, 55, 44, -92, + 123, 34, -93, 47, -21, -92, 17, 49, + -121, 92, 7, -126, -125, 124, -74, 3, + -59, 18, -91, 3, -9, 9, 56, 116, + 7, -29, 33, 87, -21, -128, -13, 57, + 74, 9, -29, -61, -97, -21, -95, -12, + -114, 16, 82, 125, -7, 10, -24, 9, + 77, -128, -102, -25, 3, -126, 10, 13, + -18, 51, 26, 127, -79, 35, 51, 12, + -50, -24, 1, -7, 22, 81, 65, 120, + -30, -38, 85, 122, -4, -106, -11, 27, + 53, 41, 8, -104, -66, -38, -124, 10, + 12, 76, 117, -109, 9, 11, 2, -18, + 3, 113, -16, -79, -39, -123, -20, -128, + 2, 13, -33, -58, 10, 84, -104, 13, + 64, 109, 1, 54, -12, 28, 24, 63, + -126, 118, -82, 46, -12, -15, 14, -43, + 60, 22, -32, -19, -46, 91, -107, 24, + -94, 26, -47, 125, 6, 58, -15, -75, + -26, -38, -35, 103, -16, -17, -13, 63, + -2, 45, -45, -73, -23, 70, -87, 51, + -17, 53, 76, 14, -18, -31, -14, 103, + 8, 21, -28, -33, -20, -47, 6, 39, + 40, -30, 7, -76, 55, 31, -20, -21, + -59, 1, 25, -11, 17, 5, -13, -39, + 0, -76, 50, -33, -29, -50, -16, -11, + -12, -1, -46, 40, -10, 65, -19, 21, + -41, -32, -83, -19, -4, 49, -60, 118, + -24, -46, 9, 102, -20, 8, -19, 25, + 31, -3, -37, 0, 25, 7, 29, 2, + -39, 127, -64, -20, 64, 115, -30, 36, + 100, 35, 122, 127, 127, -127, 127, -127, + 19, 127, -89, -79, -32, 39, -127, 125, + -80, 126, -127, 26, 8, 98, -8, -57, + -90, -50, 126, 61, 127, -126, 40, -106, + -68, 104, -125, -119, 11, 10, -127, 66, + -56, -12, -126, -104, 27, 75, 38, -124, + -126, -125, 84, -123, -45, -114, -128, 127, + 103, -101, -124, 127, -11, -23, -123, 92, + -123, 24, 126, 41, -2, -39, -27, -94, + 40, -112, -48, 127, 58, 14, 38, -75, + -64, 73, 117, 100, -119, -11, 6, 32, + -126, -14, 35, 121, -10, 54, -60, 89, + -3, 69, -25, -20, 43, -86, -34, 24, + 27, 7, -81, -99, -23, -16, -26, 13, + 35, -97, 80, -29, -13, -121, -12, -65, + -94, 70, -89, -126, -95, 88, 33, 96, + 29, -90, 69, 114, -78, 65, 90, -47, + -47, 89, 1, -12, 3, 8, 30, 5, + 2, -30, -1, 6, -7, 10, -4, 46, + -27, -40, 22, -6, -17, 45, 24, -9, + 23, -14, -63, -26, -12, -57, 27, 25, + 55, -76, -47, 21, 34, 33, 26, 17, + 14, 6, 9, 26, 25, -25, -25, -18 +}; + +static const rnn_weight input_dense_bias[24] = { + 38, -6, 127, 127, 127, -43, -127, 78, + 127, 5, 127, 123, 127, 127, -128, -76, + -126, 28, 127, 125, -30, 127, -89, -20 +}; + +static const DenseLayer input_dense = { + input_dense_bias, + input_dense_weights, + 42, 24, ACTIVATION_TANH +}; + +static const rnn_weight vad_gru_weights[1728] = { + -124, 23, -123, -33, -95, -4, 8, -84, + 4, 101, -119, 116, -4, 123, 103, -51, + 29, -124, -114, -49, 31, 9, 75, -128, + 0, -49, 37, -50, 46, -21, -63, -104, + 54, 82, 33, 21, 70, 127, -9, -79, + -39, -23, -127, 107, 122, -96, -46, -18, + -39, 13, -28, -48, 14, 56, -52, 49, + -1, -121, 25, -18, -36, -52, -57, -30, + 54, -124, -26, -47, 10, 39, 12, 2, + 9, -127, -128, 102, 21, 11, -64, -71, + 89, -113, -111, 54, 31, 94, 121, -40, + 30, 40, -109, 73, -9, 108, -92, 2, + -127, 116, 127, 127, -122, 95, 127, -37, + -127, 28, 89, 10, 24, -104, -62, -67, + -14, 38, 14, -71, 22, -41, 20, -50, + 39, 63, 86, 127, -18, 79, 4, -51, + 2, 33, 117, -113, -78, 56, -91, 37, + 34, -45, -44, -22, 21, -16, 56, 30, + -84, -79, 38, -74, 127, 9, -25, 2, + 82, 61, 25, -26, 26, 11, 117, -65, + 12, -58, 42, -62, -93, 11, 11, 124, + -123, 80, -125, 11, -90, 42, 94, 4, + -109, -1, 85, -52, 45, -26, -27, 77, + -5, 30, 90, 0, 95, -7, 53, 29, + -82, 22, -9, 74, 2, -12, -73, 114, + 97, -64, 122, -77, 43, 91, 86, 126, + 106, 72, 90, -43, 46, 96, -51, 21, + 22, 68, 22, 41, 79, 75, -46, -105, + 23, -116, 127, -123, 102, 57, 85, 10, + -29, 34, 125, 126, 124, 81, -15, 54, + 96, -128, 39, -124, 103, 74, 126, 127, + -50, -71, -122, -64, 93, -75, 71, 105, + 122, 123, 126, 122, -127, 33, -63, -74, + 124, -71, 33, 41, -56, 19, 6, 65, + 41, 90, -116, -3, -46, 75, -13, 98, + -74, -42, 74, -95, -96, 81, 24, 32, + -19, -123, 74, 55, 109, 115, 0, 32, + 33, 12, -20, 9, 127, 127, -61, 79, + -48, -54, -49, 101, -9, 27, -106, 74, + 119, 77, 87, -126, -24, 127, 124, 31, + 34, 127, 40, 3, -90, 127, 23, 57, + -53, 127, -69, -88, -33, 127, 19, -46, + -9, -125, 13, -126, -113, 127, -41, 46, + 106, -62, 3, -10, 111, 49, -34, -24, + -20, -112, 11, 101, -50, -34, 50, 65, + -64, -106, 70, -48, 60, 9, -122, -45, + 15, -112, -26, -4, 1, 39, 23, 58, + -45, -80, 127, 82, 58, 30, -94, -119, + 51, -89, 95, -107, 30, 127, 125, 58, + -52, -42, -38, -20, -122, 115, 39, -26, + 5, 73, 13, -39, 43, -23, -20, -125, + 23, 35, 53, -61, -66, 72, -20, 33, + 8, 35, 4, 7, 18, 19, 16, -45, + -50, -71, 31, -29, -41, -27, 10, 14, + 27, 9, -23, 98, 6, -94, 92, 127, + -114, 59, -26, -100, -62, -127, -17, -85, + -60, 126, -42, -6, 33, -120, -26, -126, + -127, -35, -114, -31, 25, -126, -100, -126, + -64, -46, -31, 30, 25, -74, -111, -97, + -81, -104, -114, -19, -9, -116, -69, 22, + 30, 59, 8, -51, 16, -97, 18, -4, + -89, 80, -50, 3, 36, -67, 56, 69, + -26, 107, -10, 58, -28, -4, -57, -72, + -111, 0, -75, -119, 14, -75, -49, -66, + -49, 8, -121, 22, -54, 121, 30, 54, + -26, -126, -123, 56, 5, 48, 21, -127, + -11, 23, 25, -82, 6, -25, 119, 78, + 4, -104, 27, 61, -48, 37, -13, -52, + 50, -50, 44, -1, -22, -43, -59, -78, + -67, -32, -26, 9, -3, 40, 16, 19, + 3, -9, 20, -6, -37, 28, 39, 17, + -19, -10, 1, 6, -59, 74, 47, 3, + -119, 0, -128, -107, -25, -22, -69, -23, + -111, -42, -93, -120, 90, -85, -54, -118, + 76, -79, 124, 101, -77, -75, -17, -71, + -114, 68, 55, 79, -1, -123, -20, 127, + -65, -123, -128, -87, 123, 9, -115, -14, + 7, -4, 127, -79, -115, 125, -28, 89, + -83, 49, 89, 119, -69, -5, 12, -49, + 60, 57, -24, -99, -110, 76, -83, 125, + 73, 81, 11, 8, -45, 1, 83, 13, + -70, -2, 97, 112, -97, 53, -9, -94, + 124, 44, -49, -24, 52, 76, -110, -70, + -114, -12, 72, -4, -114, 43, -43, 81, + 102, -84, -27, 62, -40, 52, 58, 124, + -35, -51, -123, -43, 56, -75, -34, -35, + -106, 93, -43, 14, -16, 46, 62, -97, + 21, 30, -53, 21, -11, -33, -20, -95, + 4, -126, 12, 45, 20, 108, 85, 11, + 20, -40, 99, 4, -25, -18, -23, -12, + -126, -55, -20, -44, -51, 91, -127, 127, + -44, 7, 127, 78, 38, 125, -6, -94, + -103, 73, 126, -126, 18, 59, -46, 106, + 76, 116, -31, 75, -4, 92, 102, 32, + -31, 73, 42, -21, -28, 57, 127, -8, + -107, 115, 124, -94, -4, -128, 29, -57, + 70, -82, 50, -13, -44, 38, 67, -93, + 6, -39, -46, 56, 68, 27, 61, 26, + 18, -72, 127, 22, 18, -31, 127, 61, + -65, -38, 1, -67, -1, 8, -73, 46, + -116, -94, 58, -49, 71, -40, -63, -82, + -20, -60, 93, 76, 69, -106, 34, -31, + 4, -25, 107, -18, 45, 4, -61, 126, + 54, -126, -125, 41, 19, 44, 32, -98, + 125, -24, 125, -96, -125, 15, 87, -4, + -90, 18, -40, 28, -69, 67, 22, 41, + 39, 7, -48, -44, 12, 69, -13, 2, + 44, -38, 111, -7, -126, -22, -9, 74, + -128, -36, -7, -123, -15, -79, -91, -37, + -127, -122, 104, 30, 7, 98, -37, 111, + -116, -47, 127, -45, 118, -111, -123, -120, + -77, -64, -125, 124, 77, 111, 77, 18, + -113, 117, -9, 67, -77, 126, 49, -20, + -124, 39, 41, -124, -34, 114, -87, -126, + 98, -20, 59, -17, -24, 125, 107, 54, + 35, 33, -44, 12, -29, 125, -71, -28, + -63, -114, 28, -17, 121, -36, 127, 89, + -122, -49, -18, -48, 17, 24, 19, -64, + -128, 13, 86, 45, 13, -49, 55, 84, + 48, 80, -39, 99, -127, 70, -33, 30, + 50, 126, -65, -117, -13, -20, -24, 127, + 115, -72, -104, 63, 126, -42, 57, 17, + 46, 21, 119, 110, -100, -60, -112, 62, + -33, 28, 26, -22, -60, -33, -54, 78, + 25, 32, -114, 86, 44, 26, 43, 76, + 121, 19, 97, -2, -3, -73, -68, 6, + -116, 6, -43, -97, 46, -128, -120, -31, + -119, -29, 16, 16, -126, -128, -126, -46, + -9, -3, 92, -31, -76, -126, -3, -107, + -12, -23, -69, 5, 51, 27, -42, 23, + -70, -128, -29, 22, 29, -126, -55, 50, + -71, -3, 127, 44, -27, -70, -63, -66, + -70, 104, 86, 115, 29, -92, 41, -90, + 44, -11, -28, 20, -11, -63, -16, 43, + 31, 17, -73, -31, -1, -17, -11, -39, + 56, 18, 124, 72, -14, 28, 69, -121, + -125, 34, 127, 63, 86, -80, -126, -125, + -124, -47, 124, 77, 124, -19, 23, -7, + -50, 96, -128, -93, 102, -53, -36, -87, + 119, -125, 92, -126, 118, 102, 72, -2, + 125, 10, 97, 124, -125, 125, 71, -20, + -47, -116, -121, -4, -9, -32, 79, -124, + -36, 33, -128, -74, 125, 23, 127, -29, + -115, -32, 124, -89, 32, -107, 43, -17, + 24, 24, 18, 29, -13, -15, -36, 62, + -91, 4, -41, 95, 28, -23, 6, 46, + 84, 66, 77, 68, -70, -1, -23, -6, + 65, 70, -21, 9, 77, -12, 2, -118, + 4, 9, -108, 84, 52, 2, 52, 13, + -10, 58, -110, 18, 66, -95, -23, 70, + 31, -3, 56, 56, -3, -7, 1, -27, + -48, -61, 41, -4, 10, -62, 32, -7, + -24, 9, -48, -60, -4, 79, -20, -38, + -76, 68, -49, -97, 0, -15, 5, -100, + -49, -95, -99, -115, -9, -40, 10, 104, + 13, 56, 127, -27, -109, -94, -118, -102, + -44, -85, 52, 127, -4, 14, 62, 121, + -122, -26, -79, -42, -34, 1, 25, -38, + -79, -58, -31, -31, -90, -30, -123, 32, + -56, 125, 66, 124, -1, 3, 91, -103, + -7, 23, 78, -18, 9, 69, -69, 76, + -38, -33, -2, -98, 18, 106, 84, 55, + 87, -47, 35, -124, 64, 41, -14, 46, + 25, -2, 120, -21, 82, 19, -79, -37, + -3, -8, -16, 21, 19, -5, -28, -112, + 39, -6, -30, 53, -69, 53, 46, 127, + 123, 78, 20, 28, -7, 73, 72, 17, + -40, 41, 111, 57, 32, -95, 29, 28, + -39, -65, 54, -20, -63, 29, -67, 3, + 44, -57, -47, 11, 61, -22, -44, 61, + 48, -100, 20, 125, 96, -24, -16, 3, + -69, -126, 74, -125, 9, 45, -67, -123, + -59, -72, 118, 69, 45, 50, -57, 67, + 13, -66, -106, 47, 62, 22, -1, -22, + -25, -40, -125, 3, 125, 32, 102, -56, + -25, -75, -30, 122, 60, -13, 36, -73, + 7, -84, 124, 40, -118, 17, -87, -118, + -8, 3, -27, 111, -40, 40, -51, 127, + 125, -45, -30, -54, 46, 80, -1, -30, + 101, -17, 18, 26, 54, 7, -12, 1, + -127, 123, -122, -27, -75, 64, 10, 25, + -15, -44, 127, -127, 5, -84, -81, -7, + 19, -26, 126, 15, 116, -126, 14, -76, + 44, 62, -110, -124, 125, -29, -87, -3, + -69, 82, 90, 57, -123, 123, 100, -19, + -51, -32, 69, 37, -57, -128, -124, -72, + -13, 51, -7, -45, -73, 5, 99, -26, + -117, -96, -109, 4, -31, -12, 0, 31, + -42, -27, 12, -81, 118, 39, 83, 14, + 41, -126, 107, -82, 94, -116, -122, -47, + -109, -84, -128, -35, -56, 66, 8, -65, + 19, 42, -46, -72, -109, 41, 43, -127, + -113, 58, 127, 42, -75, -1, 65, 117, + -55, -113, -123, 124, 43, -96, -115, -19, + 68, 15, 94, 3, 75, 0, 34, 9, + 42, 110, -48, 92, -76, 99, -17, 27, + 32, 13, 125, 50, -17, 56, 4, 53, + 34, -8, 99, 80, -126, -21, -65, -11, + -46, 44, -81, -3, -121, 123, 66, -81, + -84, 119, 127, 84, 105, 45, -66, -42, + -23, 32, -25, 12, 111, 127, 88, 125, + 30, 24, -127, -9, -54, 127, -116, -119, + 88, 70, 94, -120, 35, -93, 15, 22, + -21, 25, -110, -123, -45, 8, -109, 125, + -122, -86, -126, 8, -14, -120, -45, -45, + 69, -125, -122, 6, 81, 86, 125, 95, + 54, 77, 54, -123, 126, -85, -117, 56, + 11, 0, -61, -91, -12, -2, -113, -3, + -15, -122, -63, -91, 10, 84, -111, 125, + 93, 21, 62, -78, -116, 13, -57, 28, + -124, 126, 110, 12, 15, 95, 15, -19, + -125, -97, 52, -7, 101, 9, 20, -125, + -26, -56, 72, 77, 12, -126, 22, -29, + 47, 62, 95, 112, 69, 32, 97, -83, + -8, -5, 67, -63, -123, 79, 59, 0, + -6, -17, 4, -111, -52, 27, 65, 0 +}; + +static const rnn_weight vad_gru_recurrent_weights[1728] = { + 65, 83, 35, 56, 24, -34, -28, -2, + 125, 19, 42, -9, 124, -53, 24, -87, + 11, 35, -81, -35, -125, -31, 123, -21, + 33, -91, 113, -93, 45, -6, 53, 38, + -92, 8, -27, 87, 4, 43, 43, 10, + -128, -128, -46, 127, -38, -45, 25, -87, + 19, 5, 52, -96, -23, -29, 121, -126, + -24, -20, -2, 69, -50, 6, 71, -81, + -125, 90, -94, 1, -38, 36, 89, 17, + -60, 71, -48, 18, -15, 44, -18, 59, + 11, 114, -51, 32, 110, 1, 4, 109, + -24, 127, 27, 60, 88, 24, 45, -59, + 75, -36, 8, 57, -32, -25, 13, 126, + -89, -61, -76, 127, 18, -62, -68, 23, + -113, 5, 126, 43, -88, 26, -78, 18, + 75, 21, 9, -74, 20, 41, 126, -118, + -15, 9, 116, 126, -127, 34, -6, 126, + -128, -53, -54, -55, -121, 70, 127, -12, + -68, 82, -25, 104, -126, 126, -21, -26, + 124, -75, -127, -120, 13, 61, -64, -108, + -63, -65, -44, -35, -61, -39, 109, -74, + 113, -3, 108, -30, 125, 120, 39, 125, + -128, -95, -99, 111, 9, 25, 114, -75, + -92, -54, -12, -32, -38, 10, 31, 10, + 63, 51, 40, -99, 74, 4, 50, -128, + -36, -35, -11, -28, -126, -7, 66, -58, + -126, -22, -83, -61, -127, 49, 126, -8, + 7, 62, 36, -11, -32, -44, 63, 116, + 41, 65, -127, 126, 63, -30, -96, 74, + -92, 127, 38, -18, -128, 68, -5, 101, + -4, 85, 58, 79, 0, -58, 8, 119, + -70, -1, -79, -68, 114, -28, -90, -6, + -112, 2, 127, -8, 10, 55, -59, -126, + 127, 125, 80, 72, 35, -54, 95, -124, + -124, 79, 23, -46, -61, -127, -100, 99, + -77, 8, -87, 5, -2, 49, 85, 7, + -71, 82, 53, -41, 22, -22, -93, -103, + 6, 52, -56, 14, -8, -111, 85, 16, + 54, 32, -118, -24, 61, -53, 96, -70, + -5, -17, -67, -84, -7, -82, -107, -96, + 21, -83, -58, 50, 12, -126, -1, -28, + 34, -126, 115, 17, 91, 1, -127, 72, + 11, 126, -81, 6, 96, -8, 77, 15, + -6, 63, -27, 20, -123, -109, 85, -79, + -17, 126, -92, 2, -61, 20, 14, 17, + 121, 123, 30, 57, 120, 127, 57, 42, + 117, 98, 67, 39, -20, -70, 100, 7, + 125, 122, 40, 16, -79, 125, 83, 41, + -106, -57, 24, 55, 27, -66, -111, -44, + -7, -43, -66, 121, 42, -128, -45, 35, + 15, -127, 34, -35, -34, -40, -18, -6, + 63, 111, 31, 116, 127, 19, 24, -71, + -39, 34, 11, 19, -40, 27, 12, 106, + -10, 56, -82, -106, -2, -50, -52, 114, + -126, -34, -43, -68, 10, 76, 57, -118, + -128, 37, -104, 76, 125, 3, -76, 127, + -29, 84, -94, -15, 55, 125, 79, 127, + -57, -125, 104, -68, 126, 126, -77, 51, + 45, 33, -109, 115, -11, 1, 95, -121, + -5, -9, -126, -114, 39, 68, -126, -107, + -51, -42, 24, -8, 51, -27, -43, 66, + -45, 62, -98, -109, 69, 67, 0, -125, + -128, 49, 31, 126, -122, 2, -55, -67, + -126, -70, -128, -125, -77, 25, 16, -8, + -102, 11, -75, 82, 38, -5, 5, 19, + 34, 47, -127, -93, 21, 24, -97, -18, + 31, 39, 34, -20, 22, 123, 7, -77, + -81, -46, -9, 1, 23, 39, -127, -43, + -8, -50, 10, -21, 59, -9, -4, -13, + -27, 44, 127, 52, -47, 70, -43, 52, + 101, -49, 27, 45, 49, 33, -125, 55, + 114, 20, -1, 76, -24, -96, 105, 24, + 126, 75, -21, -105, 13, -42, 40, 126, + -30, -39, -95, 125, -63, 11, 6, 125, + 125, -14, 5, 42, -61, -4, 49, 88, + 6, -107, -28, 19, -29, 47, 126, 6, + -46, -89, -18, 91, -20, -6, 118, -21, + -22, 39, 115, 11, -42, 54, 73, -55, + -77, 62, -27, -59, -99, -12, -127, -40, + 56, -3, -124, -91, 71, -111, 6, -19, + 82, -24, -35, 102, -42, 7, -126, -126, + -125, 18, 98, -52, 127, 105, -52, 40, + -83, 126, -122, 109, 5, 127, 48, 6, + 5, -125, 100, -16, 29, 85, -89, 8, + 4, 41, 62, -127, 62, 122, 85, 122, + -107, 8, -125, 93, -127, 127, 102, 19, + 19, -66, 41, -42, 114, 127, -48, -117, + -29, -6, -73, -102, -3, -19, 0, 88, + 42, 87, -117, -20, 2, 122, 28, 63, + 71, 66, 120, 93, 124, -43, 49, 103, + 31, 90, -91, -22, -126, 26, -24, -21, + 51, -126, 87, -103, -69, -10, -66, -23, + 20, 97, 36, 25, -127, 30, -20, -63, + 30, 51, -116, 23, 40, -39, 36, -83, + -77, -25, -50, 110, 14, 13, -109, 125, + -65, -55, -87, 124, -126, -32, -72, -108, + 127, 127, -125, -124, 61, 121, 102, -128, + -127, 16, 100, 127, -124, -68, 72, -93, + -128, 43, -93, -19, -125, -97, -113, -33, + 83, 127, -44, 127, -75, 127, 16, 44, + 50, -122, 23, 118, 46, 19, 26, -128, + 10, 4, 99, -14, -82, -13, 30, 125, + 57, 65, 60, -71, 35, 98, 28, 7, + 1, 43, 89, 70, 75, 121, -59, 82, + -126, -53, -16, -116, -65, 52, -52, 0, + 80, 35, 45, -61, 46, 8, 107, 27, + -26, -118, 90, 57, -10, 7, -15, 0, + -39, -4, 12, 29, -1, 116, 84, 79, + 119, 125, -59, 28, -6, -25, -43, 2, + 90, 79, 67, 103, -82, 2, -6, 125, + 19, 73, 0, -105, 112, -17, 104, 107, + 124, 106, 19, 56, -44, 55, -112, 6, + -39, -83, 126, -93, -98, 57, -120, -23, + -38, 2, -31, -48, 106, 127, 127, 69, + 16, 110, 71, 104, 62, -12, -22, 42, + -37, -94, 34, -1, -32, -12, -124, -47, + -13, 60, -75, -66, 58, -127, -2, 64, + 76, -106, 73, -49, -31, 127, 126, 31, + 16, 127, -110, 107, -16, -53, 20, 69, + -14, -125, 59, -44, 15, 120, 125, 125, + 43, 6, 19, -58, 127, 127, 43, 16, + 82, 97, -127, 127, -93, -41, 88, 0, + 77, -15, 116, 16, -124, -31, -3, 95, + -40, -126, -54, -126, -83, -8, -59, 6, + 67, -29, 4, 124, -10, 112, -28, -8, + 85, -21, 45, 84, 6, -8, 11, 72, + 32, 84, -62, 77, 2, -36, 75, 31, + -50, 116, 126, 119, -88, -55, -14, -37, + 126, 40, -108, -6, -6, 57, 64, -28, + -76, 30, -117, -93, 31, -92, -44, -64, + 94, 58, 65, 114, 41, 47, 71, 42, + -26, 99, -126, 57, -5, 74, -19, -113, + -1, 67, -21, 126, 1, -3, 33, 60, + -82, 37, -48, 89, 114, -38, 127, -114, + 35, 58, -5, 21, -46, 121, -123, -43, + 127, 115, 123, 122, -101, 126, 127, 81, + 52, 89, -127, 102, 42, 117, -9, -2, + 125, 127, 110, 96, 120, 66, 70, 124, + 55, 84, -38, -58, 119, -127, -16, -79, + 123, 18, -127, -50, -38, 120, -85, 1, + 7, -56, 108, -77, -2, 21, 37, 1, + 13, -105, -69, 28, -87, 33, -104, -51, + 126, 41, 3, -121, 28, 71, 58, 86, + -8, 127, 94, -55, 125, 40, -19, 127, + -33, -87, -23, 7, -111, -68, 9, 84, + -119, 55, -82, 78, -37, -20, -9, -23, + 53, -13, 15, -46, 116, 126, -127, 56, + -126, 125, -7, -1, 45, 26, 125, 121, + 29, 47, -86, 30, 10, 76, -125, -7, + 23, 92, -12, -39, -18, 92, -97, -8, + -85, -41, 49, -50, 123, -37, -126, -30, + 14, 79, -49, -65, 9, -36, -38, -96, + 85, -24, -13, 37, -25, -5, -64, -127, + 55, -60, -18, -61, -63, 127, 56, 67, + 15, 124, 72, 120, 127, 40, -10, 114, + 24, -23, 46, 78, -53, 125, 86, 124, + 86, 0, 38, 93, 21, 127, 123, 75, + -72, 13, 48, 33, 83, -51, 15, -32, + -49, -33, 120, 64, 7, 9, 65, 60, + 21, -21, -61, -53, -113, 84, -97, 101, + 37, -114, -27, 41, 73, 126, -10, 59, + 61, -15, 70, -13, 82, -4, 69, 56, + 94, -91, -50, 92, -74, -48, 53, -7, + -107, 127, 28, 30, -26, -21, -61, 77, + 82, 64, -91, -125, 122, -104, 127, 123, + 122, 123, 76, -126, 127, -6, -80, 7, + 40, -66, -65, 54, -2, 23, 96, -64, + 74, 2, -53, -12, -123, 39, 60, -20, + 16, -17, -97, 23, -4, -53, -122, 32, + -16, -54, -95, 43, 71, -1, -67, -33, + 41, 18, 72, 28, -83, 31, -100, -91, + -27, 10, -128, -106, 2, 76, -13, 42, + 34, 112, -19, 44, 40, -9, -11, 65, + 92, -43, -125, 2, 47, -32, 25, 122, + -29, 12, 101, -8, -126, -23, 43, 7, + 125, -20, -124, 82, -2, 13, -73, -106, + 115, 31, 116, -23, -44, -71, 84, 3, + 47, 91, 127, 127, -15, 95, 7, 93, + 5, 113, -50, 54, 11, 13, -127, 17, + 72, 43, -23, 5, -70, 20, 15, -27, + 99, 69, -109, -122, -94, 16, 127, 0, + 116, 104, 45, 108, -34, 87, 72, -14, + 118, 46, 42, 109, -26, 95, 93, 127, + 60, 127, -93, -54, -122, 34, -105, 56, + 55, 103, 125, -71, -50, 95, -72, 127, + 107, 21, 73, 126, 61, 127, 127, 24, + -62, 90, 73, 90, -46, -78, -124, 72, + 123, -42, 50, -107, 17, -32, -62, -89, + 124, 1, 80, -2, 117, 119, -65, -127, + -95, -121, -52, 103, 66, 75, -3, -62, + -127, 127, -74, 124, 79, 49, 40, 105, + -67, -71, -70, 43, 127, 119, -4, 66, + 43, 23, 91, -126, 15, 63, -119, 112, + 103, 15, -99, 31, -127, 69, 116, -46, + -67, 2, -126, -29, 30, 30, -69, -98, + -47, -87, -70, -127, 23, -73, 30, -7, + 94, -52, -65, 98, -45, 97, 53, 23, + -9, -22, -52, -47, 6, -1, -85, -15, + -61, -14, 68, 110, -10, -121, -25, -35, + -15, -94, -123, 27, 75, 48, -66, -56, + -44, 93, 109, 67, -36, 24, 70, -126, + 8, -127, 126, 52, 11, -32, 120, -13, + -26, -28, -125, 127, 106, -50, 124, 36, + -126, -12, 0, -23, 76, -71, -126, -12, + -17, -82, 12, 124, 57, 33, 4, 77, + -46, 71, -34, 72, 125, -128, 124, -24, + -128, 75, -120, 69, -45, 55, 33, 127, + -33, 4, -105, -41, -59, -91, 123, 44, + -127, 127, -67, 52, 25, -125, -65, 100, + -25, 123, 6, 11, -123, -92, -33, 126, + -17, -4, 29, 33, 127, 96, 3, 87, + -48, -18, -70, 123, 58, -127, -3, -52, + -1, -36, -41, 127, 51, -52, -27, 46, + -83, 57, 9, 126, 127, 94, 79, -37, + -127, -40, 67, 52, 82, -66, 122, -13, + -73, 127, -8, -80, 46, -48, 4, -54 +}; + +static const rnn_weight vad_gru_bias[72] = { + 124, 125, -57, -126, 53, 123, 127, -75, + 68, 102, -2, 116, 124, 127, 124, 125, + 126, 123, -16, 48, 125, 126, 78, 85, + 11, 126, -30, -30, -64, -3, -105, -29, + -17, 69, 63, 2, -32, -10, -62, 113, + -52, 112, -109, 112, 7, -40, 73, 53, + 62, 6, -2, 0, 0, 100, -16, 26, + -24, 56, 26, -10, -33, 41, 70, 109, + -29, 127, 34, -66, 49, 53, 27, 62 +}; + +static const GRULayer vad_gru = { + vad_gru_bias, + vad_gru_weights, + vad_gru_recurrent_weights, + 24, 24, ACTIVATION_RELU +}; + +static const rnn_weight noise_gru_weights[12960] = { + -10, -8, 5, -72, 7, 55, -38, 3, + 10, 4, 21, 60, 73, 0, -3, 34, + 49, -36, 17, 8, 18, 41, -51, -42, + 34, -8, 126, 15, 112, 74, -60, -60, + 53, -17, 65, 6, 74, -1, 26, 80, + -46, -99, -47, 40, 29, -21, 85, -75, + 27, -117, 46, -22, -76, 56, 16, -67, + -49, -63, -35, -10, -20, 10, 68, 7, + -1, 37, 58, -53, 6, -79, -20, 12, + 6, 91, -63, 67, 58, -13, -23, -74, + -50, -77, -53, -22, -56, 123, -33, 28, + 74, -85, -9, -57, -32, 38, 21, 122, + 40, 23, -30, -81, -68, -29, -14, -98, + -1, -62, -32, 19, 102, 26, 28, -23, + 104, 28, -43, -20, -41, 28, 80, -22, + -88, 6, -26, 14, 34, -46, 57, -33, + -105, -16, 3, -17, -55, -1, -47, -122, + 11, 16, 62, 78, -1, -64, 71, 57, + -5, 45, 65, -93, 31, 30, -25, 21, + 23, 32, -1, -75, -10, 75, -90, 21, + 7, -110, -77, -39, 18, -39, 54, -97, + 12, 52, -97, 17, 73, 120, -81, -114, + 67, 82, 29, -71, -47, 69, -64, 17, + -105, -24, -70, -32, -2, -112, -31, -48, + 1, 22, 92, -21, -89, -65, 16, 49, + 3, 15, 80, -21, -1, 37, -4, -25, + 12, -80, -43, 56, -7, 36, -20, 18, + -12, 39, 66, 74, -100, 117, 76, -82, + -93, 63, 56, 36, 5, 41, -57, 31, + -47, -14, 52, -22, -56, 29, -123, -104, + 41, -113, 124, -106, -36, 41, -86, -40, + 44, 28, -6, 114, -32, -17, -26, -77, + -69, 42, -33, 61, 28, 82, -18, 71, + -53, -63, 122, -38, -49, -108, -32, 126, + 126, 45, 43, -56, 61, 9, -20, -53, + 122, 98, -3, 3, 24, -3, 80, -42, + -20, 57, -34, 108, -26, 48, 116, -14, + 53, 5, -45, 21, -55, 114, -49, -29, + -44, -5, -70, 98, 63, -115, -66, 53, + -55, -19, 83, -12, 7, 47, 42, 15, + -36, 44, 75, -85, 105, -84, -17, -127, + 15, -67, -105, 23, 36, -1, -16, -116, + 21, 58, 69, -57, -104, -81, -49, 91, + 26, -72, 33, -31, -37, -79, 5, 9, + 123, -61, 11, 67, -14, -29, 41, -53, + 37, -59, -20, -24, 95, 125, -66, -26, + 17, 28, -43, -8, -100, 80, -6, 0, + 37, -123, -54, -92, 28, 56, 127, 48, + 11, -58, 99, -90, 63, -4, 6, 10, + 37, -38, -15, 31, 5, -39, 25, -17, + 24, -23, -64, -68, 41, 65, 28, -113, + -115, -27, -51, 50, 9, -101, 73, -41, + -104, 88, -71, 3, -87, 119, -73, 20, + -35, 0, -30, 34, -31, -9, -4, 23, + -19, 35, -16, 111, 66, -52, -107, 101, + 54, -2, -3, 109, -40, -2, 119, 74, + -26, -116, -105, -90, 25, -111, -43, -92, + -3, -104, 102, 11, 19, -83, 14, -62, + 38, 57, 50, -10, 36, -95, 124, 32, + -34, -123, -7, -109, 124, -119, -67, -116, + -31, 114, -65, -34, -126, 8, 8, 5, + 53, -28, 53, 84, -9, -14, 92, -70, + 74, 116, -4, 121, -49, 108, 0, 126, + 123, -1, -25, 24, 56, -121, 20, -47, + -41, -11, -22, -32, -40, -41, -66, 29, + -128, -30, -28, 31, -39, 30, 57, -96, + 63, -121, 71, 1, -29, -20, 72, 114, + 12, -43, 23, -75, 24, -4, -123, 17, + 18, -68, -23, 51, -30, 39, -125, -48, + 13, -119, -75, -74, 51, 125, -10, 29, + -103, 6, -28, 22, -45, 19, 17, 19, + 33, -3, -18, -30, -12, -25, -128, 61, + 94, 47, -56, 59, -62, 66, -28, 18, + -115, 12, -3, -80, 60, -62, 55, -16, + 68, 23, -6, 109, 11, 0, -7, -96, + -11, 21, 44, -75, -8, -10, -10, 69, + 14, 14, -41, 26, 67, 37, -30, 44, + 11, -16, 3, 66, 1, -18, 21, 96, + -29, -100, 27, -8, -98, 21, -2, 58, + -45, -15, 93, 37, -66, -48, -7, -5, + 39, -57, 17, -81, 42, 0, -40, 123, + 3, 118, -14, 56, -113, -68, -127, 74, + -78, 46, 97, -61, -42, 68, -32, 16, + -10, -82, -6, 1, 98, -48, 20, 32, + -102, -35, 45, -5, -91, 26, 37, 18, + 59, -88, -29, 17, 43, 33, 14, 6, + -37, -37, 5, -7, -37, -13, 72, -6, + -128, -43, 17, 32, 45, -26, 4, -85, + -59, 8, 5, -27, 51, 55, 42, -79, + -13, -51, 49, 70, -26, -21, 9, 27, + 21, -26, -76, 28, 1, 89, -76, 23, + -4, 10, 31, -13, -22, 3, 41, 24, + 18, 25, -55, 10, -23, 4, -72, -18, + -91, -50, 1, -55, 12, -26, -43, 11, + -14, 27, -82, -73, 36, 27, -20, 62, + 53, 100, 75, -12, -37, -77, -127, 32, + -21, -24, 34, -26, -39, -5, -66, 94, + -97, 19, 16, 61, 59, 65, 37, -64, + 26, -34, 63, 74, 7, 38, -2, -27, + 82, -73, -10, 37, -43, 1, 23, 24, + 25, -5, 13, 6, -76, 78, 46, 44, + -107, 14, 7, -22, 28, -125, 47, -48, + 28, -16, 15, 1, -16, 21, 15, 51, + 37, -17, 2, 39, -23, -28, 10, -51, + -48, -1, 6, 88, 38, 22, -40, 37, + -22, -23, 67, -4, -3, -6, 9, 108, + -32, 31, 77, 28, -101, -23, -10, -38, + -13, 12, -34, 55, 24, -4, 48, 29, + -72, -83, 41, -31, -49, -68, 5, -3, + 124, -19, 44, -94, -4, -8, -31, 9, + -21, 58, -60, 24, 13, -9, 97, 53, + 93, -51, 105, 55, 36, -32, 6, -51, + -99, 19, 39, -63, -64, 29, 22, 5, + -24, -74, 72, -6, 35, 37, -25, 65, + 74, 29, 30, 65, 91, 30, -42, 15, + 42, -64, -87, -68, 53, -78, -33, 21, + -60, 33, 7, 6, 10, 68, 55, -47, + 51, -56, 79, -29, -1, -66, -29, 50, + 66, -12, -67, 69, -53, -90, -31, -123, + 49, 7, 10, -6, 55, -61, -14, -6, + 59, -2, -41, 21, 10, -21, -24, -23, + -34, 30, -49, -41, -27, 36, -56, 46, + 7, 18, -23, 78, -49, 1, -37, 43, + 77, -21, -19, 18, 14, 35, 92, 39, + -39, 44, -58, -1, 4, -63, 27, 79, + -14, -7, -41, -34, -24, -25, 13, -14, + -30, 5, -62, 13, -52, 53, 40, -18, + -29, 52, -20, 11, 20, 23, -47, 51, + 30, -91, -46, 39, 4, 53, -18, 2, + -28, -12, 62, -29, -57, -13, -20, 60, + -15, 3, 49, -26, 0, -30, -18, 97, + 11, 52, 43, 87, 107, -94, -30, 63, + -4, -62, 48, 2, 22, 7, -11, -79, + -41, 18, -28, 9, 30, -58, 80, -64, + 45, 2, 28, -49, -25, -34, 25, 87, + 108, -8, -42, -34, 61, -14, -13, 62, + -98, -5, 23, 15, -2, -1, -6, -52, + 40, -33, 61, -38, 76, -115, -23, 22, + 17, 25, 63, -37, -32, 26, -19, -8, + 54, 6, -39, -28, 25, 40, -29, 33, + 10, -50, 20, 25, 6, -22, 69, -24, + -115, 2, -13, -28, 28, -8, 109, -18, + -64, 96, 6, 7, 31, -10, 7, -34, + 24, -10, 50, 23, -59, -55, 45, 37, + -98, 27, -17, -47, 63, 57, 13, 35, + 4, -85, -65, 52, -54, -19, -40, 4, + -68, -61, -85, 98, -81, 44, 25, -17, + 44, -33, -31, -44, 21, -6, -29, -32, + -2, 50, -31, -16, 46, 50, -54, -18, + 70, -88, -44, 26, -51, -34, 21, 48, + -16, -15, 5, -28, -37, 25, -52, 25, + 37, -60, 19, -18, -49, 72, -120, -1, + 65, -61, -28, 25, -114, 89, -61, 126, + -48, -64, 69, 37, 46, 9, 18, -117, + -35, 64, -75, 28, 127, 33, -63, 22, + -15, -28, -9, -41, 27, 68, -4, 54, + 4, -89, -8, -10, 83, 73, -11, -90, + -8, 14, -92, -38, 11, -22, -36, 33, + -37, -38, -126, -74, -5, -12, -8, -4, + -28, -47, -30, -30, -6, 43, -5, 56, + 3, -16, -83, -73, -51, 23, -99, -10, + -2, 57, -18, -17, -53, 3, -21, 35, + 25, -116, -20, -33, 89, -44, 49, 102, + -74, -57, -65, -127, -33, 59, 60, 20, + -60, -1, -18, 10, -30, 106, 3, -24, + -15, 93, 45, -22, 7, 55, 9, -27, + -82, 3, 19, 9, 4, -14, -43, -36, + -19, 97, 85, 31, 42, -35, -19, -12, + -1, 68, -53, 46, -127, -93, 16, -63, + -58, -126, 55, 6, -52, 97, -41, 59, + 49, -9, 10, 54, -42, 5, -11, -25, + -1, 35, 72, 52, 28, -6, -54, 30, + -28, 18, 38, -17, 57, -8, -44, -20, + 42, -20, 94, -46, -2, -81, 110, 27, + -66, 5, 63, 36, -51, -55, -27, 71, + 125, -5, -12, -57, 65, -98, 36, -12, + 17, -8, -13, -8, -17, -52, -109, 15, + -31, 31, 9, -23, -22, -11, 10, 55, + -11, -52, -69, 52, 10, -23, 47, -35, + -4, -65, 15, 33, 53, -14, -104, 26, + -26, -29, -8, 97, -2, 58, -127, -4, + -106, 35, 53, -2, -71, 2, 79, 54, + 39, -74, -121, 124, 41, 25, -33, 4, + 28, -18, -9, -43, 59, -11, 31, -19, + -122, 86, 25, 54, -40, -18, 49, -25, + -28, 118, 65, -102, 111, -39, -7, -89, + -38, -17, 79, 0, -50, 72, 51, 22, + 24, 36, 59, 1, 66, -119, -84, -8, + 102, 44, 15, 56, 26, -74, -29, 28, + 13, -75, 32, 78, -38, -45, -80, -90, + 13, 3, 34, -76, -122, 120, -82, -34, + 6, -32, -100, -89, 14, -14, 73, 24, + -41, 53, 30, -80, -63, 51, -17, 33, + 47, -17, 14, -17, 32, 74, -52, 2, + 14, -67, -16, -18, -57, 18, -14, 44, + -73, 45, 107, 38, 69, -24, -12, 114, + -15, 91, 10, -26, -51, 78, 63, -78, + -5, -120, 14, 32, -6, -25, -49, 67, + 20, -66, 7, 65, 46, -41, -32, 62, + 41, -50, -87, -34, 64, 70, 23, -36, + 44, -51, -127, -22, -102, 33, -58, -23, + 105, -29, -33, 47, 9, -44, 35, -36, + -21, 126, -90, -34, 105, -6, 18, -35, + 3, -14, 65, 114, -2, -25, -27, -72, + -63, 61, -109, -13, -113, 8, -45, 22, + 105, 6, 45, -47, 65, 16, 79, 28, + -21, 82, 37, -15, -64, -34, -114, 29, + 67, 43, 78, 52, 34, -84, -54, -48, + -65, 63, -8, 18, -16, 10, 3, 71, + -101, 119, -24, 88, -26, 33, -38, -80, + 14, -123, 24, -33, -20, 52, -1, -40, + 49, -13, 8, -39, 23, -5, -11, -23, + -10, -17, -25, 43, 29, -13, -34, -19, + -35, -18, -21, 51, -21, -3, -19, 12, + -2, 50, 48, 22, -56, 39, -5, -38, + -60, -11, 36, 33, 13, -53, -9, 94, + 8, -62, 55, -11, 101, 22, 2, -8, + -127, 98, -25, -37, -73, 71, -16, 45, + 67, 8, -17, -90, -91, 23, -120, -39, + -9, 28, -128, 8, -52, -107, -27, 68, + 33, -31, 29, 124, -26, 30, -10, -31, + 33, 47, 9, -65, -46, 13, -90, 126, + 99, -37, -81, 1, -61, 15, -4, 4, + -9, -34, -33, -33, -28, -49, 14, -93, + 87, -80, 59, -56, -50, -45, 45, -65, + -97, 6, -121, 6, -113, 19, 56, -21, + 4, 12, 87, 5, -112, 126, 69, 27, + -70, 82, 31, -27, -123, -16, 21, 32, + -5, 83, -95, -7, -1, 93, -9, 15, + 124, 21, 21, -7, -45, -16, -66, 5, + -34, -118, -16, -32, -34, -44, 2, 124, + -78, 8, 90, -27, 127, 44, -28, 114, + -30, 114, -8, 27, -56, 18, 59, -24, + -62, 16, -25, -31, 71, 17, 3, 12, + 92, -4, -78, 37, 127, 85, -3, -17, + 80, 32, -2, 84, -71, -31, 62, -26, + 47, -81, -51, -95, 66, -52, -57, -31, + -10, 54, 116, 88, -3, -122, -93, 7, + 37, -70, -28, -91, 39, 12, -94, 41, + 44, 70, -55, 69, 20, 56, -34, 1, + 9, -34, -37, -4, -2, 23, 68, -44, + 2, -46, -5, -72, -104, -94, -56, -30, + -59, 56, 14, 108, 36, 115, -96, 29, + -114, 105, -64, 5, 65, -82, 25, -10, + 117, 58, 20, -19, 122, 33, -37, 35, + -19, -120, 6, -10, 78, -34, 126, -116, + -37, 59, -30, 55, 47, 51, -42, 11, + -2, -26, 29, 25, 51, -5, -34, 89, + -29, 76, -51, -44, 9, -17, 46, -42, + 5, -52, -24, -14, 6, 127, 127, -9, + -5, -81, -2, 65, -67, 72, 99, 14, + -13, -6, -7, -36, -54, -6, -4, 30, + -29, -27, -28, -56, 83, -1, 29, -24, + -48, -23, -20, 11, -2, -4, -31, 39, + -45, 0, -18, -73, -29, 48, 51, -20, + 61, 24, -62, 75, -32, -18, -44, -38, + 44, 26, 38, -56, 14, -34, -48, -7, + 19, -55, 20, -95, 45, 16, 13, 93, + -13, 21, -72, 23, 124, -33, -52, 51, + 5, 8, -25, -10, -77, 102, -25, -1, + -14, 14, 4, 16, -28, 98, 18, -43, + -26, 12, -30, -86, -68, 81, 9, -50, + 80, -56, -11, 37, 24, -11, 28, 1, + 55, 36, 34, 23, -87, -58, 10, 31, + -11, 19, -48, 48, 95, -12, 33, -46, + 100, 52, 32, -49, -24, -27, 46, -6, + -31, 21, 39, 33, 63, -65, -35, 79, + 127, 11, 34, -13, -124, 10, -54, 24, + 3, 24, 11, 16, -19, -45, 36, 52, + -32, 90, -33, -68, -51, 33, -16, 34, + 65, 98, -8, 125, 60, -83, -21, 6, + 111, 87, -46, -59, 44, -7, 89, 124, + 28, 32, 30, 68, 106, -37, -1, -2, + -97, -57, -92, -37, 56, -75, 22, -31, + 100, -44, -10, 12, -2, 95, 36, 3, + 74, 35, 127, 51, -41, 72, 27, 59, + -105, 103, -2, -32, -116, 23, 43, 6, + 48, -20, 110, 66, -42, -28, -41, -10, + 33, 117, 14, 89, -18, -36, 54, 39, + 88, -72, -27, 109, 27, -13, -119, 54, + -18, -14, 85, -12, 64, 6, 44, -15, + -66, -46, -18, 90, 109, 98, 119, -28, + 11, 46, 29, 115, -20, -106, -27, 97, + -45, -82, 43, -103, 122, -14, -122, 24, + 10, -128, 14, 10, 72, 40, -71, -10, + -21, -99, -103, 2, -120, 50, 0, 35, + -100, 46, 77, 88, -28, -1, 26, -46, + -3, -22, -37, -11, -82, -82, -128, -21, + -16, -4, -9, -69, -5, 40, 0, -63, + 33, 19, -14, 83, 54, 24, 66, -8, + 24, -122, -44, -32, 86, 38, -3, 6, + 48, 32, 62, 34, 3, -42, 28, -11, + -23, -23, 21, 12, -2, 36, 4, -20, + -1, 64, -20, 11, 73, 23, -7, -50, + 42, 7, 99, 40, -19, 39, 26, 65, + 117, 7, -16, -6, 79, 70, -48, -12, + 47, 19, 7, -54, -7, -43, 39, 50, + 23, 53, -48, -97, 28, 6, 83, -25, + 42, 38, 19, 32, -59, 22, -60, -94, + -45, -45, 83, -3, -69, 75, 34, 61, + 66, 30, 19, -14, -32, -4, 13, -38, + 8, -36, 31, -48, -56, -49, -24, 72, + -73, 60, 17, -40, 6, 125, 27, -18, + 41, 28, 44, 29, -32, 45, -33, -6, + -41, 123, 5, -31, 89, 92, 20, -66, + 73, -39, -51, 0, -31, 21, 69, 99, + -50, -3, -13, -10, -5, 72, 14, -13, + -57, 20, -33, 107, -84, 5, -57, -37, + -10, -46, -80, -108, 3, 49, -36, -28, + -44, 34, -125, 41, 48, -3, -33, 2, + 12, 27, -56, -41, 18, -42, -25, 81, + -67, -86, -29, -7, 94, -89, 30, 84, + 73, -21, 40, 29, -27, -19, -35, 68, + 64, -4, -100, -102, -94, -19, -18, -30, + -36, 26, -2, 33, -93, 56, 67, 103, + -73, -101, -45, 18, 11, 18, -33, 43, + 34, 37, -71, 27, -38, -13, -26, -13, + -16, 113, 33, 84, -26, -55, -17, -13, + 15, 32, -8, -37, 32, -5, 113, -10, + 126, 53, 23, -24, -52, -11, -55, -9, + -37, -33, 40, 65, 3, -95, -65, 78, + -13, -75, -22, 9, 93, 68, 46, 127, + 16, 87, -47, 59, -36, -5, -3, 37, + 16, 66, 19, -69, 42, -15, -18, 76, + 96, 91, -7, 24, -29, 47, -20, 56, + 45, -54, 50, -70, -52, 54, 41, 20, + 63, 71, -63, 40, 1, 80, 20, -39, + 6, -35, 71, -40, 7, -28, 63, -7, + -49, -12, 1, -16, 73, 9, 50, -46, + -10, 73, -81, 94, -13, 6, -1, 31, + -19, 15, 41, 3, -17, 0, -85, -93, + -86, -10, -37, 47, -6, -62, 30, 35, + 20, 99, 37, 63, -17, -42, -28, -96, + 2, -22, 3, 15, 28, 11, -115, 48, + -34, 6, -30, -78, -85, 38, 25, -32, + -29, -97, 2, 14, 26, 47, 99, 119, + 71, 8, -60, 42, -55, 30, 53, 1, + 31, -103, -20, -11, 0, 87, 37, -5, + 89, 15, -32, -12, 55, 60, 3, -32, + -124, -2, -88, 53, -51, 55, -4, -53, + -46, 94, 18, 57, -72, 14, -41, 11, + 14, -29, -3, -4, -9, 34, 18, -10, + -72, -14, -82, -90, -31, 11, -120, -48, + 44, 3, -6, 79, -15, 8, -16, -89, + 20, -125, -72, 69, 19, 118, -54, -2, + -10, 50, -28, -3, 17, -22, 104, 17, + 101, -61, -9, -117, 5, -24, -105, -117, + -115, 28, -120, 36, -62, -77, 50, 67, + 79, -41, -9, 4, 2, 15, 114, -12, + -16, 15, -49, 50, -122, -46, 30, 39, + 56, 49, 14, -28, -71, -125, 36, 115, + -46, 47, -45, -16, 69, 113, -7, -119, + -43, -16, 17, -11, 102, 120, -34, 64, + -5, -53, 14, 0, -124, 120, 14, -26, + 42, 74, 55, -12, 103, -37, 27, -54, + 13, -54, -9, 39, 6, 6, -28, 43, + 54, 21, 46, -90, -58, 122, -21, -81, + -13, -39, 50, 106, -2, 49, 9, -16, + 24, 15, -73, 110, 1, 104, 52, -104, + 2, -35, -17, 8, -58, 60, 26, 68, + -123, 6, 44, 70, -40, -4, -95, -21, + -110, 51, 80, -19, -97, -5, -50, -100, + -23, -30, 46, -66, -18, -38, -48, 38, + -9, -26, -71, 21, 25, 14, 16, 53, + 14, -56, 20, 79, -87, 50, -7, -28, + 52, 4, 11, -17, -26, 39, 2, 25, + 6, 13, 11, 18, -56, -36, 46, -115, + 32, -80, -44, -7, -32, -13, 74, -61, + 9, -89, 14, 80, 20, -61, 109, -21, + -66, -34, -126, -6, 12, 22, -14, 55, + -28, -47, -59, -12, 2, -38, 73, -42, + 91, -87, 37, -4, 29, 33, 122, 43, + 85, 41, -50, 11, 29, 60, -4, 31, + -18, 8, -27, -75, 76, -13, 35, 18, + -49, -34, -33, 6, 51, 51, -41, 53, + 47, 21, 62, -52, 30, 5, 16, 78, + -22, 28, -21, 31, -16, 21, -2, 62, + -94, -30, -83, -92, 122, -41, -113, -27, + -51, -123, 4, -116, 4, -68, -14, 3, + -21, -5, 29, -31, -15, -4, -27, -24, + 10, -121, -119, -30, -37, -74, -32, -63, + -46, -69, -72, -44, 90, 84, 21, -16, + 79, -16, -32, -111, 10, -25, 97, 57, + -59, -69, -83, -36, -24, -90, 14, 76, + -23, -16, 2, 26, 26, -50, 23, 120, + 44, 32, -12, -29, -11, -45, 8, 41, + -28, 107, -32, -40, -92, -8, -76, -52, + 76, 79, 93, 16, 86, 46, -14, 53, + -65, 53, 92, 63, 44, -30, 7, 5, + -4, 20, 22, 14, 8, 9, -58, -99, + -30, -119, 46, 2, -23, 34, 51, -63, + 45, -84, -8, 36, -59, -2, -98, -6, + 29, 121, -26, -1, -20, 39, 25, -66, + -56, 8, -40, -7, 25, -79, 90, 72, + -55, -12, -20, -123, -39, -25, -65, -12, + 47, 30, 33, 55, 18, 19, -22, 35, + 86, 65, 11, 119, -32, -47, -107, 80, + -50, -43, 44, -1, -14, 49, 17, 33, + 13, 84, 64, 125, 97, 17, 20, 20, + -62, -7, -13, -16, -8, 18, -36, -89, + -13, 98, 21, 108, -35, 51, 44, -127, + -31, 40, -83, 50, -122, 16, -82, -105, + -58, 65, 76, -31, 61, 40, 28, -92, + 43, -59, 63, -33, -33, 24, -37, -22, + 7, 51, 54, 29, 12, 40, 68, -44, + 79, 52, -3, 10, -62, 35, -26, 70, + 40, 61, 83, -73, 97, 16, 33, 49, + 0, -83, -15, -101, 67, -26, 108, 113, + 3, 93, -15, 83, 27, -67, 71, 119, + -48, -31, -28, 4, 4, -15, -46, 13, + -17, -70, 78, 49, 36, 21, -72, -45, + -1, -31, -52, 1, 61, -17, -18, -71, + 69, -65, -11, 104, -25, 52, 7, -70, + -14, -8, -16, -13, 72, 37, -91, -80, + 31, 7, -33, -59, -12, -20, 26, 48, + 69, -16, -87, -13, -11, -14, -14, 58, + -2, -3, -119, -17, 31, -17, -23, 75, + 62, 43, -97, -42, -23, -9, -5, -11, + -43, 21, 37, -37, 6, -3, 14, 8, + 18, -98, 37, -14, -50, -36, 31, 123, + 7, 19, 95, 17, 22, 15, -7, 59, + 62, 18, -93, 10, 23, 42, -26, -23, + -32, -28, 10, 42, 19, 38, 8, 31, + -109, -5, 81, -25, -40, 35, -96, -117, + -12, -4, -15, 13, 84, -70, -4, -93, + 24, 28, -66, -45, -70, -118, -33, 116, + -6, 7, -54, 2, 11, 85, 34, -4, + 67, 67, -96, -13, 3, 11, 50, 62, + -61, -28, 7, -17, 11, 22, -61, 62, + 45, 42, 50, -26, -43, 114, 69, 121, + 53, 127, 15, -3, 50, 30, 70, 26, + 25, -15, 35, -72, -48, -11, 15, 29, + 42, -40, 12, -38, -3, 16, -81, 65, + 53, 84, -48, -66, 11, 23, -22, 77, + 21, 115, -87, -35, -50, -89, -121, 67, + 18, 8, 40, 66, -3, 11, -24, -100, + 70, 35, 16, 16, -31, -62, 71, 64, + 74, -124, -15, -26, -17, -26, -55, 71, + -22, 20, -35, 24, -48, 40, 56, 27, + -35, -14, -8, -34, 113, 41, 58, -8, + -2, -114, -38, -73, -28, -57, 70, 3, + -22, 64, 31, 29, -46, -43, 88, 11, + -67, -6, 71, -27, -24, -38, -24, -80, + -21, 36, -32, -84, 37, 55, -22, 24, + -54, 11, -94, -28, 8, -30, -46, 39, + 25, 0, 6, 93, 34, 8, 3, 26, + -76, -69, 4, -71, 57, -65, -90, -40, + -43, -56, -16, -53, -11, -11, 7, 45, + -16, 7, 11, 39, -38, -9, -81, -86, + -50, -16, -39, -18, -11, -10, -69, -44, + -58, -49, 58, -63, 2, 64, 5, -81, + -36, 42, 56, 24, 11, 2, 36, 92, + 78, 33, -2, -98, -55, 46, 14, 14, + 42, -14, -12, -6, -41, -69, 88, -122, + 36, 34, 12, -15, 18, -98, 58, -28, + 44, 4, -107, 85, 46, 27, 8, 58, + 66, -70, -8, 21, -110, -9, 89, -83, + 55, 59, -110, 51, 44, 11, 16, 108, + 43, -33, -18, -34, 2, -3, 28, -50, + 53, 14, 44, 6, -19, 23, 41, 75, + 72, -18, 12, -51, 34, -86, 28, 30, + -103, 74, 4, -43, 49, 10, -31, -10, + -17, -65, -82, -92, -17, 25, 1, -9, + 30, 81, 15, 9, 72, 52, 27, 19, + 61, 14, -64, 62, 5, -1, -16, -21, + -25, -59, 28, -7, 28, -35, -28, -17, + -16, -46, -25, -25, -79, -33, -112, 21, + 41, 13, -6, 53, 7, 17, -54, -39, + -91, -94, 70, -128, -66, 28, -7, -93, + -120, 54, -47, 35, -111, -58, 54, -5, + -48, 11, -18, -104, -70, -78, 54, -7, + 17, -8, -96, 72, -119, -125, 28, -107, + 14, 16, -38, -48, 63, -21, 74, -45, + -65, -94, 118, 39, -100, 39, -41, 13, + 19, -122, -55, 10, 23, 33, 20, -68, + 24, -41, -113, 12, 95, 26, 0, -17, + -42, -66, -11, -107, -86, 76, 29, 49, + -108, 112, -28, 124, -55, -96, -23, 34, + 91, -30, 61, -94, 102, -18, 19, -77, + -60, 13, -125, -28, 7, -34, -91, 22, + -12, -50, 17, -8, 2, 7, -18, -62, + 51, -37, -55, 19, 35, -30, 8, 46, + -42, -56, -128, 61, -35, -16, -81, -8, + -30, -59, -22, -111, 6, -45, -76, 29, + 16, -72, -34, -28, 22, -5, -116, 3, + 18, -9, -56, -48, 18, 56, -97, -21, + -121, -116, -6, -24, -62, -26, -21, -69, + -52, -48, -22, 23, 72, -35, 68, 39, + 47, 37, -18, 0, -76, 26, 114, -10, + 25, 5, -12, 70, 17, -105, -25, -112, + 5, 24, -8, 7, -38, -119, -21, 34, + 125, -125, -2, 5, -6, 81, 40, 60, + 54, -104, 22, -42, -2, 120, -33, 16, + 38, -30, -23, -83, -60, -1, 86, 92, + -59, -10, -6, -11, -68, -96, -53, -3, + -9, -17, -17, 109, 34, -15, -121, -40, + 5, 89, -38, -26, 38, -47, -80, -40, + -116, -34, -30, -76, -35, -39, -118, 27, + 0, -67, 76, 0, -5, -2, 72, -15, + -1, -94, -75, -62, 37, -6, -91, 59, + -15, -8, -124, -25, -46, 17, -22, 28, + 5, -50, 21, 63, -7, 12, 67, 33, + 16, -35, -73, -120, -30, -14, -113, -77, + 45, 84, -16, -50, -21, 44, -97, 6, + -61, -40, 29, -104, 28, 4, -17, 50, + 14, 44, 13, -61, -34, -28, -8, 105, + 67, 0, 31, -113, -121, -65, -21, 24, + 57, 12, -16, 9, -5, -1, 38, -61, + -30, 60, -7, -55, 7, 32, 39, -33, + 12, 30, -60, 13, -75, 3, 55, -40, + -16, 20, -86, 68, 24, -57, 72, 24, + -8, 62, -126, -42, 54, 122, 125, 64, + 25, -38, -45, -78, -33, -109, 57, 15, + -79, -1, 73, 7, -20, -42, -67, -13, + -24, -69, 38, -38, -22, -115, 70, 15, + -104, 67, -35, -114, 27, 31, -80, -5, + 27, -8, -11, -58, 39, -29, 1, -18, + -4, 23, -12, 46, 33, 32, 21, -14, + 8, -13, 43, -8, 25, -37, 55, -30, + -37, -39, -2, -117, -12, -14, -3, -10, + 30, -27, 9, 7, -32, -25, -101, -115, + -40, -8, -5, -38, 34, 44, 45, -62, + 45, -25, 100, -29, 52, 24, -32, 66, + 31, 112, 72, 12, 121, -57, 21, 125, + 55, 36, -33, 22, 2, 52, 40, 25, + -1, 26, 6, -23, -18, -16, 11, 25, + 17, -62, 6, -60, -25, 65, 50, 114, + 62, -44, -19, 43, 70, 76, 40, -8, + 47, -31, -64, 17, -34, 42, 8, 20, + -19, -7, -59, 54, 26, -31, 120, 18, + -55, -3, 4, -2, 58, -113, 10, -41, + -6, -13, 2, 9, 28, -20, -34, 74, + -44, 45, 49, 1, -9, 72, -13, -65, + -50, 17, 22, 32, -10, 87, -21, -43, + -21, -24, 5, -83, 20, 29, -54, 14, + -20, -48, 94, 125, -17, 16, -15, 24, + 15, 17, 26, -34, 52, -4, 18, -59, + -1, 4, 14, 62, 17, -54, -41, 29, + -77, -15, 31, 29, 13, 5, 27, -35, + 33, 8, 48, -21, 30, -5, -22, -67, + -118, 18, 0, 69, 26, 2, -120, 65, + 27, -28, 57, 41, 48, 7, -52, 14, + 6, -11, 54, -17, -40, -28, 82, -14, + -27, -12, 2, 16, 30, -113, 13, -48, + -37, 61, 72, 2, 8, -30, -30, 39, + -78, -96, 42, -80, -16, 45, -28, 57, + 24, -123, -47, -7, 32, -25, 6, -76, + 50, 97, -89, -40, -49, 89, 70, 114, + -29, -14, -43, -127, 83, -100, -79, -16, + -19, 78, -27, 46, -30, -65, 37, 46, + 34, -12, -41, -29, -17, -68, 53, 99, + 59, 51, 69, -11, 32, -5, -53, 33, + -14, 4, 55, -68, 23, 26, -63, -123, + -31, -39, -67, 58, -6, 23, -3, 25, + 41, 12, -31, -11, -55, -63, -90, 8, + -11, 27, -31, -127, 15, 29, 28, -74, + -46, 44, -1, -122, -46, -44, -113, 40, + 11, 23, -44, 4, 6, -26, -118, 30, + -70, 42, 19, -29, 45, -76, 34, 11, + -94, 1, 125, -26, 11, 35, 39, 48, + -20, -83, 48, -36, -23, -53, 11, -53, + -38, 57, -34, 33, -59, -2, 51, 121, + -3, -29, 30, -48, 51, 20, 36, 2, + -22, -12, 42, -7, -8, -52, 20, -66, + 61, -64, -53, -21, -83, 9, -20, -39, + 61, 6, -75, -13, -12, 42, 90, 48, + -17, 47, -3, -97, 4, -87, -7, -39, + -19, -14, -64, 70, 27, 86, 30, -23, + -23, 110, -21, -81, -38, 63, 20, 44, + 10, -1, -106, -26, -122, -45, -25, -61, + -7, -45, 3, -3, -8, 4, 1, -38, + -14, -41, -31, -10, 2, 0, -54, -37, + 96, 25, -52, 4, -2, 25, -2, 16, + 21, -15, 39, -29, 58, -77, 62, 39, + -53, -66, -14, -78, 31, 47, 5, -43, + 12, 38, 45, 33, -33, 53, 31, -14, + 18, -28, 40, 36, -32, 68, -77, 78, + -31, 10, 124, 23, 26, 61, -46, 80, + 17, -17, -11, -64, -27, 72, -54, 55, + -1, -8, -102, 33, 9, 38, 39, 122, + -36, -21, 51, -27, -16, -12, 35, -7, + -13, 0, -117, 49, 0, -53, 4, -91, + -61, 5, -30, -102, -43, 17, 13, -48, + 44, -40, 27, 84, -19, -13, 72, 101, + 10, 12, -16, 15, -37, 18, -37, 1, + 22, -79, -55, -42, 6, 123, -8, -31, + -19, 35, -31, -74, -35, 30, -21, 30, + -76, -8, -57, 11, -9, 29, -46, 29, + -30, -15, -1, -43, 13, -9, -3, -72, + -3, 36, -62, -91, -5, 32, 7, 10, + 0, -46, -44, -8, 23, 39, -3, 15, + 13, 19, -107, 7, -45, 11, 30, -72, + -23, -25, -93, -116, 19, -1, -36, -25, + -4, -59, 18, -22, -88, 0, -20, 12, + 82, 10, 44, 89, 64, 100, -39, 101, + 60, 70, 93, 108, 121, 120, -20, 31, + 20, 66, 123, -11, -118, 4, 82, 32, + -19, 24, 4, 6, 28, -9, 57, 76, + -15, 79, 86, 123, 79, -94, 23, 73, + 90, -54, -43, 4, 12, 12, 121, -49, + 3, -45, -6, 33, -10, 74, -30, -56, + 35, 54, -92, 29, 10, 82, 84, 95, + 112, 36, -54, -120, -83, 78, -1, 36, + 54, 57, 3, 26, -48, 1, -46, -3, + -22, 48, 45, -38, -51, -19, 33, -14, + -88, 61, -39, 17, -4, -56, -100, 19, + -40, -72, 7, -33, -6, 21, -64, -122, + -40, 14, -119, -102, -69, 14, -19, -8, + 60, 23, -128, -37, -28, -40, -2, 13, + -4, -22, -15, 15, -2, -35, 42, -118, + -4, -77, -1, -127, -35, -19, -68, -58, + -72, -4, 8, -1, -15, -4, -125, 6, + -108, -9, 56, -121, -6, 13, 0, 1, + -118, 119, -13, 42, -52, -72, -72, 52, + -61, -18, -37, 63, -112, 23, 31, -119, + 34, 61, 46, 127, -68, -120, 19, -21, + -12, 41, 25, -112, 21, 92, 83, 78, + -63, -20, -61, 8, -24, 27, -19, 76, + 31, -4, -22, 2, 8, 88, 122, -27, + -72, -30, -52, -42, 25, -44, -67, 33, + -65, 28, -64, -36, -127, -5, 119, 23, + -112, -8, 84, 51, 77, -32, 93, 21, + -3, 9, 10, -23, -109, 40, -99, -9, + -10, 32, -21, -1, 1, -31, -54, 47, + -49, -5, -83, -61, 4, 1, -2, 7, + 45, -85, -78, -9, 122, -24, 26, 57, + -10, 18, -14, -4, 3, -97, -7, -17, + -4, -24, 0, -69, 40, 67, -63, 20, + 51, 6, -36, 21, 53, -57, -41, -103, + -34, 29, -88, 2, 49, 56, 31, -37, + -26, -8, -22, 28, 18, -44, 0, -54, + 61, 52, -97, 56, 7, 90, -17, 97, + 1, -116, -86, -80, -64, -18, -26, -47, + 105, -111, -16, 49, 23, 116, 127, 1, + -11, 8, -2, -31, -51, 59, 21, 78, + 90, 61, -4, -8, -82, 117, -34, 102, + 8, -63, 96, -41, 25, 35, -15, -18, + -13, 79, -33, -34, -75, -103, -82, -41, + 37, -56, 13, 54, -84, -56, 88, 7, + -66, -74, -3, -23, -118, -19, -34, 7, + -44, -8, 26, -37, -9, 52, -58, 27, + 54, -128, -15, -5, -126, 27, 61, 50, + -15, 72, -37, -35, 17, -125, -16, 27, + -34, -41, 9, -77, -1, 23, -91, -66, + 38, -38, 41, -90, 67, -18, 16, 58, + 23, -22, -11, 25, -10, 13, -71, 90, + -13, 34, -41, 26, -124, 40, -42, -15, + -29, 33, -8, -41, -84, -17, 78, -73, + 120, -31, 69, 77, 54, 96, 7, -25, + 98, 48, 120, 78, 65, 59, 59, 124, + 69, 41, 33, -93, 32, 51, 44, -3, + -127, -90, -25, -26, 37, 27, -14, 119, + -46, 84, 4, -27, -3, -53, -12, 49, + 86, 44, -15, 69, 15, -95, -18, 99, + 27, -17, 1, -35, -11, 27, 15, -30, + -78, -3, 41, 7, 127, -1, 102, 24, + 45, 39, -37, -50, 11, 0, -16, 5, + 23, -18, 63, 89, 63, 34, 47, -126, + -8, 77, 21, -121, -51, -9, 29, 42, + 43, 60, -107, 24, -35, 40, -36, 42, + -35, -62, -23, -19, 43, 2, -52, 12, + -70, 17, -122, -23, -54, 45, 19, 31, + 40, -60, -9, 8, -38, 3, -62, -38, + 15, 29, -15, 45, 18, -42, 66, 17, + 48, -124, 39, -53, -52, 36, -16, -10, + 18, 90, -29, 2, 26, 15, -11, -22, + 65, 18, 53, 89, -88, 122, -86, 82, + -63, -16, 111, 40, 55, 61, 22, 126, + 17, -45, -58, 23, -30, 61, -98, 48, + -35, -72, -7, -52, 25, -89, 80, -98, + 15, -85, 78, 13, 6, -11, 52, -2, + 29, -3, -3, 7, -37, 88, 61, -98, + 8, -35, 10, -73, 11, 63, 27, -38, + 30, -46, 2, 45, 20, 7, 45, 74, + 67, 78, 27, -28, 33, 53, -119, -42, + 32, 56, 34, -67, 49, 3, -36, 11, + -62, 122, 6, -47, -3, -17, -40, 35, + -48, 98, -67, -31, -35, 11, -64, 42, + -18, -34, 33, -48, 26, -28, -6, -68, + 33, 2, -70, -78, -27, 45, -20, 6, + 13, -43, -35, -23, 4, 25, -49, 18, + 8, 1, -15, -26, -41, 13, -16, -28, + -8, -24, 23, -87, -22, 6, -26, 33, + -16, -35, 19, -5, -27, -7, -74, 5, + -81, 26, 15, 119, -15, 35, -111, -64, + -70, -53, 34, -9, -30, -14, 20, -51, + 57, 15, -13, 57, -74, -7, -39, -36, + -3, -30, 13, -32, 8, -20, 47, -61, + -63, -53, 33, 15, 32, 24, 81, -39, + -42, -43, 46, 29, 26, 6, -30, -6, + 42, 11, 23, -31, -22, 18, 18, -53, + 28, 30, -18, -49, 53, -57, -13, 27, + 31, -1, -7, -21, -6, -100, 49, -69, + 120, -53, 10, 59, 14, -24, -27, 80, + -63, -28, -26, -9, -13, -65, 8, -28, + -13, 1, 66, -4, -20, -5, 25, -41, + -18, 37, -16, -17, 9, -52, -32, 92, + -26, -8, 9, -42, 26, 2, -15, 81, + -13, -20, -7, -14, 75, -36, 44, 10, + 6, 21, -38, -74, 15, 12, 58, -34, + -22, 69, -41, -19, -60, 42, -57, -12, + 9, 19, 10, -18, 3, -42, -23, 32, + 9, 83, 38, 76, -111, 76, -98, 88, + -114, -63, 0, -19, 41, 37, -33, 36, + -7, 8, 53, 74, 51, 12, -16, 6, + -15, -127, -5, 87, -51, 27, 72, 12, + -104, -7, 53, 124, -21, -103, 122, 14, + 56, -21, 52, -64, -23, 88, 122, -82, + 0, 13, -54, -52, 31, 93, 122, -49, + 15, -71, 84, 41, -53, -124, -43, 88, + 70, 42, -5, 120, 77, -23, -94, -57, + 51, -79, 58, 0, 8, -21, -61, -19, + -37, 126, -11, -53, 20, -10, -68, 79, + -124, -30, -59, -25, -8, -29, -125, 26, + -36, -28, -64, -8, -5, 100, 70, -20, + 7, -126, -54, 4, -1, 45, -18, 73, + -19, -123, -64, 11, 20, 22, 1, 1, + 3, -12, -41, 91, -69, -75, 16, 46, + -29, -66, -31, -79, -85, -10, 41, -10, + -83, -121, -10, -17, -90, 6, -128, -51, + 76, 40, -19, 81, -123, 104, -17, 88, + 19, 30, 92, 58, 29, 95, 14, 84, + 109, 12, 20, -96, -16, -20, -45, -120, + -13, -98, -126, 66, -104, 6, 106, 91, + -1, 59, -50, 5, -24, -66, 32, 69, + 68, 29, 23, -24, 63, -17, 55, -57, + 5, 20, 66, -9, -25, 74, -16, -115, + 71, -6, 4, 63, 34, -45, 30, 58, + 37, 26, -22, -26, -32, 6, 9, 83, + -30, -8, -2, 75, 43, 71, -62, -104, + 44, 35, -75, -85, 21, -51, -1, 74, + -97, 26, -67, 28, -34, -75, -62, -4, + -55, -34, -24, 12, 16, -20, -4, -54, + -62, -7, 20, -126, -1, 24, 4, 10, + 32, -44, -76, -13, -49, -28, 6, -6, + 34, 1, 17, 84, -22, -2, -40, 30, + -34, -35, -91, -57, 30, 20, -15, 14, + -27, 25, 24, 5, 13, 7, -112, -3, + -30, 38, -33, 3, 25, 111, -20, -48, + -23, 58, 5, -30, 29, 122, -72, 45, + 110, -16, 47, -21, -16, 0, 34, 13, + 40, -19, -14, 36, 9, 24, -6, -59, + 39, 121, 123, -29, 25, 38, 52, 53, + 31, -107, -89, 12, -29, -9, 59, -3, + -13, 41, 67, 0, 64, 46, -23, 17, + -52, 4, 34, -41, -47, 54, -19, 125, + -13, -17, -56, -6, -1, 4, -28, -59, + 72, 23, 39, 78, -114, 31, 5, -35, + -39, 80, -30, 19, -117, -45, -74, 53, + -26, 120, 22, -99, -24, 49, -60, -37, + -15, 24, -29, -1, -2, -19, 4, 34, + -8, -47, -15, 1, -78, 68, -33, 18, + -11, -14, -6, -29, 10, -57, 2, -22, + 37, 1, 52, -118, -22, -81, 25, 4, + 35, -25, 16, -22, -97, -12, -73, 26, + 13, 11, -36, -48, -63, -24, -16, -31, + 19, -67, -11, 127, -13, 9, -31, 110, + 83, -107, 25, 33, 63, 122, -30, 18, + -61, -128, -49, -92, 10, -103, -37, 1, + -21, -91, 80, 61, 41, -84, -24, 112, + -15, -38, 2, -3, 7, 22, 68, -67, + 44, -15, -75, -13, -71, 7, 52, -118, + -88, 27, -34, -69, 30, 4, 88, -91, + 4, -5, 13, -14, -32, 9, 47, 93, + -27, 98, -5, 40, -65, 38, -21, 35, + 62, -40, 10, 14, 4, 13, 17, -50, + -23, 12, -90, -13, 35, 63, 23, 35, + -128, 3, -103, 14, -53, -72, -31, 13, + -42, -63, 17, -58, 6, 25, -24, -116, + -48, -20, -41, -39, 80, -47, -54, -27, + 38, -50, -116, -38, -76, 18, -39, -38, + 12, 15, -75, 12, -62, 10, 33, -23, + -21, -38, -95, -118, -71, -11, -25, 4, + -52, -118, -2, -11, -117, -38, -119, 12, + -24, -53, 43, 8, 64, 21, -37, 53, + 27, -54, 40, -83, 55, 90, -16, 48, + 39, -35, 102, -15, -63, 94, -6, 45, + -23, -64, -123, 43, -29, 7, -23, 118, + -58, -46, 23, -73, 37, -53, -8, 7, + -9, -24, -33, -48, 31, 26, 28, 52, + -48, 43, 33, -22, 56, 77, -26, -85, + -66, 42, 0, -49, 12, -18, 26, 56, + -13, 13, -14, 7, -29, -4, -89, 40, + 25, 45, -15, 35, -7, 42, -7, -59, + -10, 30, -92, -29, 3, -60, 1, 12, + -6, 64, 0, 57, -99, 24, -46, 13, + 1, 56, -21, -11, 0, -41, -15, -28, + -36, 14, 17, -42, -57, 49, -9, -11, + -23, 16, -103, 18, -28, 1, 13, -86, + 4, -7, -22, -6, 5, -11, 41, -32, + 55, -45, 1, -125, -39, -20, -12, 0, + -20, 66, -17, -17, 3, 33, 24, 3, + 55, -100, -103, 49, -127, 59, 74, -10, + -93, -60, 45, -27, -23, 13, 107, 38, + -75, -31, 70, -10, 12, -104, -68, -6, + 31, 82, 17, 74, 56, 113, 72, 42, + -52, -4, 75, 40, -117, -16, 15, 42, + 19, 6, 33, -41, 92, 60, -13, 28, + -12, -17, -11, -90, -118, 35, 21, -63, + -32, -48, -50, 22, -25, -20, 41, 28, + 22, 24, 8, -7, 14, 30, -20, 5, + 59, -28, -21, 2, -41, 65, 56, -47, + -94, -5, 19, -82, -60, -16, -22, -73, + 16, 65, -35, 49, -34, 26, -20, 51, + -28, -2, -34, 81, 8, -53, 2, 50, + -43, 0, -48, -78, -5, -7, -37, 26, + 98, -22, 7, -10, 37, 0, -23, 118, + -14, -33, -11, 23, 3, -64, 3, 41, + 102, -56, -101, 34, 1, 82, -22, -66, + -1, 7, 58, 3, -27, -57, -7, -7, + -72, 0, 84, 17, 14, 126, -87, 35, + -3, 70, 126, -43, 50, 90, 52, 10, + 102, -35, 23, -40, -91, 15, 26, 6, + 102, -42, 16, -84, -9, -40, 63, 13, + 27, -14, 98, 120, 59, -123, 21, 48, + 121, -61, 32, 49, -23, 13, 45, 45, + 58, -76, -14, -35, -6, 65, 32, -57, + 99, 14, -81, 34, 34, 46, 70, 9, + 31, -23, -48, -14, 14, -9, -62, -48, + -18, 103, -45, 9, -8, 29, 46, -17, + 45, 41, -58, -53, 1, -35, 31, -80, + -26, 30, 88, -39, -76, 40, -9, 24, + 10, -26, 31, -48, -17, 16, -10, -126, + -57, -22, -127, -20, -50, 62, 12, 65, + 43, -23, -63, 6, -64, 23, -38, -11, + -91, -43, -6, 5, -21, 40, 49, -120, + -58, 7, 20, -13, 44, -27, 61, -7, + -21, -83, 44, -28, 9, -77, -45, -15, + -97, -80, -25, -29, 89, -43, -51, -7, + -3, -40, 89, -67, 118, 3, -38, 5, + 4, 12, 6, 79, -28, -6, 27, -17, + -23, 74, 33, 29, 22, -97, -64, -119, + 33, 10, -115, 124, 33, 59, -41, 49, + 34, -77, 3, -31, -15, 67, 31, 47, + 89, -66, 33, -40, 33, -5, -46, -44, + -25, 109, -93, 50, -119, 26, 122, 85, + 10, -1, -29, -124, 61, 21, -67, -41, + -47, -55, -27, 123, -30, 20, -88, 78, + -49, -4, -88, -17, -10, -7, 86, -48, + -30, 82, 46, 42, 45, -23, -112, -32, + 1, -35, 0, -70, 49, 5, -65, -128, + 36, 86, 14, 127, 9, -24, -16, 6, + -42, 36, -127, -91, 24, -112, -48, 32, + -48, 17, 50, -123, -4, 68, -35, 10, + 105, 5, -2, -126, -34, 57, -123, 14, + 25, -27, 1, -85, 3, -28, -123, -51, + 8, 15, -60, 9, 28, -71, -67, 88, + 24, 65, 123, -28, 20, 65, 79, -45, + 118, 63, -88, 83, -98, 91, 11, -31, + 118, -109, 36, 53, -68, 11, 22, -76, + -38, -14, -85, 116, 109, -28, -34, 47, + 41, -9, -27, -27, 4, 17, 2, 73, + 86, -68, 56, 13, 40, -24, -23, -4, + -45, 80, -84, 28, 8, -32, 116, 87, + -19, -7, 10, 42, -43, 104, 34, 13, + 39, 37, 13, 80, -1, -20, 51, 27, + -30, 79, -45, -5, 10, 25, 91, 24, + -43, 22, 99, 100, 32, 8, 60, 100, + 48, -10, 5, 15, 15, 26, 6, -51, + 40, 19, 45, 127, -11, -46, 31, -26, + -50, 54, -9, 21, 4, -126, -37, -16, + -66, 23, 17, -28, -10, 55, -31, 23, + 37, 22, 13, -10, 86, -17, 6, 51, + 16, 44, -5, -31, 42, 4, -73, -44, + 14, -5, -2, 1, 14, 7, 37, 11, + 1, 13, 11, 5, 4, -37, 10, 19, + 5, 3, -15, 15, -26, 17, -5, -1, + 30, 32, 8, -7, -5, -12, -12, 11, + 6, -26, -19, -9, 8, 7, -10, 19, + 11, -3, 13, 14, 6, 7, -13, -5, + 39, -17, -6, 33, -8, -16, 31, -35, + 1, -32, -62, 16, 11, -6, 37, -5, + -16, -10, -47, -10, 12, -9, 26, 4, + 49, 1, -18, 26, 10, -5, -23, 17, + -21, -35, -70, 58, 22, 5, -62, 17, + -9, -15, 25, 7, 9, 14, 3, 17, + 0, -9, 9, 8, -5, 22, 4, -1, + 7, -7, 32, 24, 10, -39, 3, -27, + 33, 3, 4, -3, -1, -9, -3, -1, + -27, -2, 4, -3, 4, 0, 1, 5, + -11, 11, 15, 10, 6, 0, -8, -13, + 3, -1, 12, 9, 7, -6, 5, -15, + 44, 18, -14, -17, -16, -15, -26, 25, + -7, 3, 9, 15, 0, -11, 10, 7, + 2, -14, -43, 8, 45, 18, 1, -5, + 1, 18, -12, 1, -25, -18, 1, -63, + 4, 32, 8, -52, 15, -54, -27, 12, + -27, -35, -53, 18, -10, -14, 34, 4, + -23, -10, 51, 54, 5, 20, -19, 62, + -66, -38, 27, -18, 10, 43, 40, 67, + -9, -29, -34, 54, 1, 18, -17, 61, + 26, 5, 113, 22, 1, 6, 63, -29, + 47, 118, -41, -12, -4, 7, -41, -27, + -3, 14, 1, -20, 38, -15, 10, 12, + -35, -38, -33, -9, 10, -56, -38, -9, + -9, -55, 26, 26, -15, 5, 12, -43, + 30, 8, -68, -11, 14, -33, -2, 23, + 8, -8, 27, 3, 22, -15, -23, 14, + 22, -44, 12, -52, 36, 1, 3, -40, + -55, 16, -40, 7, 27, 27, -45, 22, + -17, 26, -23, -26, -2, 30, -15, 19, + 4, -1, 30, -24, -3, -36, -33, 13, + 13, -21, -1, 28, 21, 32, 19, -7, + -13, 16, -9, -23, -33, -66, -13, -44, + 15, -18, 14, 9, -23, -68, -7, 55, + -12, -98, -32, -4, -6, -13, 11, 33, + -30, -5, -27, -40, 52, 106, -40, -13, + 19, -35, -17, -40, -24, 9, -90, 54, + -1, -58, 15, 101, 18, -15, -105, 90, + 39, 23, -7, 70, 8, -24, -89, 19, + -21, -8, 18, -74, 112, -86, -14, 47, + -81, 43, -50, 2, 13, -9, -38, 23, + -12, -64, -10, 13, 23, 29, 106, -25, + -22, -115, -43, -5, -2, 5, 7, 7, + 33, 8, -60, 10, -48, -47, 6, 50, + 43, -3, 15, -17, 36, 1, -17, -14, + 7, -22, -24, -110, 21, 3, 64, 99, + -31, 3, -10, -21, -7, 20, -61, -22, + -50, 31, 35, 0, 4, -8, -35, -26, + 13, 56, 32, -12, -10, -7, -45, -5, + 15, -17, 78, -11, 51, 15, -26, -16, + -40, -20, 31, 0, 12, -52, 25, -33, + 22, -26, -12, 81, -17, -19, -12, 3, + 60, -76, 11, -5, 24, 9, 34, 2, + 54, 12, -68, -18, 10, -33, 50, -2, + -52, -43, 10, -18, -6, -20, 69, -4, + 15, -59, -21, 5, -18, -45, 32, 21, + 87, 19, -24, -85, 26, -56, 64, 27, + -14, -35, 13, -18, 45, -33, -77, -36, + -123, 55, 13, 61, -15, 6, -41, -21, + 86, -19, 72, -10, -12, -27, -114, -25, + -8, -31, 8, -4, 17, -27, 31, -5, + -1, -10, 52, 55, -70, -55, 23, 52, + 45, 32, -57, -30, 6, -20, -1, 20, + -48, -10, 47, -21, 13, -27, 50, -25, + 46, -20, 7, 12, -3, -39, -36, 22, + -42, 24, 27, -12, -13, 3, -28, -52, + 14, 48, -13, 30, -6, -4, -37, 13, + -12, 28, -9, 48, -30, -25, 0, -6, + 49, -9, 39, 10, 66, -32, -43, 85, + 5, -4, -9, 76, 16, 46, -18, -122, + -59, 48, -35, -26, 20, 13, -23, -41, + -26, 51, 120, -54, -38, -53, -1, 126, + 3, 8, 84, -49, -40, 33, 88, 113, + 22, -54, 0, 44, 10, -76, 80, -75, + 66, -58, 10, 109, -24, -17, 49, 11, + -32, 1, 15, 55, 103, 56, 10, 48, + -28, 83, 6, 28, 19, 5, -46, 23, + 89, 74, 32, 4, -24, 23, 0, 26, + 11, 38, 42, -6, -2, -8, 5, -8, + -7, -9, -1, 40, 15, -59, 8, 43, + -36, 24, -75, 43, -37, 0, -43, -27, + -15, -78, -11, 17, 9, -12, -41, -36, + -87, 7, -36, 1, 15, -29, 27, -60, + 106, 77, 4, 39, 20, -5, 123, -45, + -2, -28, 8, 6, -32, 20, -8, 3, + -27, 13, 0, -75, 6, -52, 8, -2, + 105, 89, 6, 22, 29, -18, 20, -60, + 39, 61, 39, 14, 2, 26, 15, -21, + 14, 95, 57, -7, 29, -43, 10, -70, + -23, 112, -14, -24, -41, 2, -25, -121, + 30, 111, -19, 23, 24, 48, 21, 85, + -3, -31, 6, 16, -3, -59, -120, 44, + 33, -11, 7, -126, -128, 5, 10, 19, + 73, -36, 24, -16, -18, 57, -103, -126, + -16, 67, 39, -40, 120, 61, 2, -44, + -1, 45, -14, 56, 12, 30, 46, -6, + 13, -8, 30, 25, -54, -6, -8, -7, + -38, -23, -16, 16, 56, -19, -13, 36, + -70, 9, -22, -8, -67, -3, 16, 6, + -22, 87, -6, 23, 0, 3, -49, 71, + 21, 41, 55, 14, -25, 8, -8, -8, + 127, 62, -75, -25, -2, 4, -46, 49, + -123, -58, -92, -127, -23, 22, -125, -32, + 34, 125, 13, 93, 53, -47, 122, -80, + 50, 119, 1, 40, -127, -118, 33, 124, + 22, -93, 9, -38, 49, 58, -52, -117, + 41, 120, -120, 44, -74, -7, 9, -26, + -107, 7, 21, 13, 72, -7, 116, 45, + -82, 3, 60, 104, -127, -81, -122, -69, + -105, -28, -6, -16, -12, 97, -113, 119, + -48, 127, 124, 124, -126, -6, 78, -72, + 72, 38, 127, 34, 116, 33, 34, 127, + 45, -17, 28, 127, 65, -124, -29, 23, + -128, 50, -18, 110, 76, -70, -125, -20, + -65, -49, -44, 20, 20, -12, 93, -31, + -105, -120, 71, 113, 51, -125, -56, -46, + -70, 60, 68, -54, -126, 84, -121, -39, + -66, -125, 120, -13, 124, -24, 67, -8, + 120, -14, -93, -52, -78, 11, -27, -52, + 65, -128, 11, 17, 6, -5, 32, 120, + -121, 0, 45, -3, 93, 104, 108, -41, + -7, 46, 19, -10, 93, -27, -91, -112, + -128, -125, 45, 21, -125, 1, 64, 38, + -127, 97, -5, -37, 94, 52, 24, 122, + -125, 23, 98, 63, -37, 85, -40, -63, + -1, 93, 127, -8, 60, -56, 94, -16, + 127, -96, 17, 58, -6, 110, 71, 66, + -4, -95, 12, 81, 105, 19, 83, -84, + 125, -66, -46, -25, -58, -76, -125, 39, + 127, -57, -75, -56, -64, 56, -123, 45, + -53, -83, -7, -111, 44, -126, 34, 97, + -65, 104, -67, 114, 11, 127, -122, 120, + -64, -8, 6, -106, 24, -55, 104, -4, + -52, -38, 21, 126, -119, -95, -8, -127, + 122, -23, 126, -60, -54, 42, 36, 120, + -128, -46, -127, 55, -40, -38, 70, -8, + -125, -127, -124, -128, -14, -56, 115, 60, + 60, -123, -37, -128, 48, 104, 125, -88, + -67, 38, 23, 23, 106, -83, 121, -30, + -44, 126, 17, -42, -23, 127, -1, -73, + -76, -124, 104, -5, -58, 86, -112, 23, + -57, -100, -2, 26, 21, 88, 75, -127, + 71, 124, -27, 94, 25, 120, -125, -120, + -113, 126, -127, 31, 127, 18, -124, 74, + -125, 15, 23, 43, 103, 123, 74, -47, + 96, -102, -14, 127, -20, 124, -82, -121, + 106, -112, 5, 44, 99, -17, 122, 69, + -46, 74, 29, -56, -127, -53, 51, 93, + 79, -85, 87, -126, -100, -29, 116, -121, + 127, 126, -21, 119, 3, -77, 17, 120, + 52, 127, 106, -4, -112, -30, 124, -34, + 2, -103, 125, 127, -68, -109, -36, 3, + 68, -119, 127, 118, 126, -99, -89, 38, + -53, -121, 67, 95, -44, -39, 82, 127, + 75, -127, 91, -12, -67, -91, 37, 4, + 40, -122, 65, -84, -8, 30, 46, -1, + 55, -55, 106, 12, -61, 47, -11, -102, + 54, 96, 18, -4, 75, -93, 76, 73, + 119, -24, 108, -9, 124, -127, 116, -43, + 110, 48, 15, -32, 33, -95, 23, -9, + 78, 113, 123, 127, -104, 40, -29, -57, + 74, -114, 121, -41, -113, 127, 6, 123, + 83, 81, -39, -27, -30, -120, 58, 14, + 16, 2, 6, 13, -41, 120, -112, 11, + 121, 124, 58, -126, 77, 32, -124, -32, + -13, 39, -92, 36, -33, -46, -13, 125, + 20, -127, 108, -109, -97, -57, 26, -108, + 82, 78, -127, -84, 32, 31, -96, -124, + 60, 96, -121, -107, -113, -14, 113, 15, + 69, -53, 11, 91, -44, -33, 56, -75, + -127, -122, 114, -27, 34, 36, 120, 122, + -70, -16, -35, -122, -119, 4, 127, -86, + -18, -113, -74, 87, -83, -123, 65, -1, + -14, -119, 32, -20, -122, 31, -18, 23, + 119, 120, 112, -107, 29, 24, -123, 22, + 67, 127, 106, -128, 79, 17, 73, -53, + -27, 58, -10, -7, 127, -51, -119, 15, + -74, -113, -45, -36, 44, -120, 105, -80, + 98, 103, -102, -22, -108, 7, -122, 70, + 89, -127, 58, -33, -8, 91, -1, -4, + -62, 127, 65, 74, -124, -111, -67, 89, + 12, -122, -25, 45, 117, -73, 23, 51, + 15, 88, 25, 116, 77, 95, 121, -5, + 103, 80, 108, 127, 100, 8, -113, -112, + 119, -106, 127, -114, -82, 125, 81, -127, + -121, 127, -124, -8, -126, 58, 127, -126, + 119, 25, 26, -68, -83, 77, 70, 51, + -64, -30, 127, 127, -126, -124, 127, -128, + -119, -1, 122, 112, 13, 94, 46, -22, + 80, -26, 102, -122, -35, 37, -11, -108, + -128, -95, -111, 125, -123, -79, 116, 8, + 90, 97, 26, -34, 109, 120, 61, 69, + -5, 4, 119, 11, -61, 91, -3, 38, + 19, 127, -87, 125, 125, 33, 127, 46, + 39, 4, 36, 121, 127, -124, -27, -25, + -79, 39, 105, -71, 30, -14, 78, -123, + 119, 80, 106, 125, -59, 125, -106, 24, + 30, -68, -116, -56, -78, -13, 103, 10, + 120, -29, 82, 119, -107, -32, 17, -41, + 123, -126, 54, 121, 54, 127, 10, 119, + 126, -120, 40, 115, 121, 85, -13, -11, + 103, 33, 60, 72, -21, 50, -66, 127, + -69, 33, -118, -128, 29, -12, 123, 125, + 45, 127, -121, -74, -72, 80, -82, -55, + 120, 0, -65, -39, -124, 63, 126, -20, + 124, -21, 109, 127, -119, -125, 98, -125, + -36, -94, 58, -34, -123, 108, -24, -61, + 42, -10, -63, -14, -76, -3, 22, -109, + -1, -126, 58, 119, 9, -83, 124, 75, + -17, -33, 58, 40, 114, -126, -31, 120, + 64, 20, -21, -37, -33, -19, -51, 42, + 1, 127, 39, 62, 125, -86, 62, 22, + -12, -22, 2, -48, -28, -117, -60, -3, + -60, 101, -29, 25, 83, 41, 75, -83, + 65, -88, 10, 104, 22, 93, -11, -86, + -67, 98, -18, 44, 126, -100, -123, 49, + 126, -80, -44, -39, -14, 108, -122, -126, + 84, 102, 77, 112, -61, 125, 121, 35, + 102, -124, -9, 119, -119, 45, 19, 114, + 127, 61, -7, -57, -103, -104, 88, -30, + -125, 123, 123, -121, 125, -5, 127, -49, + 28, 127, -69, 87, 5, 73, 127, 126, + -12, -9, 46, -4, -122, 42, -54, 18, + -17, 85, 39, 127, -117, -127, 0, -91, + 24, -6, -103, -39, -126, 127, -7, -19, + 95, 79, -36, 118, 95, -65, -89, 117, + 76, -47, -124, -53, 5, -53, -15, 71, + 124, -56, 35, 48, -54, 104, -76, 43, + 65, -121, -70, -48, 40, 18, -128, -8, + 42, 124, 33, -5, 102, -124, -128, 120, + -128, 51, -99, -88, -128, -121, -70, 92, + 78, 112, 110, 122, 117, -107, 97, 111, + 65, -52, -23, -116, -5, -113, 11, -38, + -37, 127, 119, 23, 124, -43, 79, 124, + 125, -122, 67, -104, -127, -53, -24, -112, + 120, 92, 69, -93, -6, -118, 110, 111, + -128, 15, -46, -118, -121, -35, 107, 115, + 116, 67, 117, -30, -96, 126, -20, 127, + -127, 108, 22, 123, -46, 63, 112, 121, + 8, 124, -125, -90, 14, 4, -5, 127, + -119, 99, -63, 1, -107, -103, -86, -28, + 42, 103, 67, 32, -48, -95, 78, -77, + -91, -46, -128, -84, 14, 125, 8, -73, + -124, 11, 66, 111, -125, -4, -37, -125, + -83, -29, -47, 71, 122, -42, 34, -31, + 103, 103, 21, 22, -103, -102, -83, -120, + 127, -15, -80, 125, -92, -34, -123, 21, + 80, -71, 111, -65, 119, -119, -109, 109, + -26, -100, 10, 16, -127, -79, -121, 4, + 70, 19, 19, 39, -35, -25, 33, -118, + 1, 113, 93, -63, 52, 30, -22, 39, + 117, -7, 48, 50, 93, 9, -8, 45, + 36, -125, -8, -121, 39, -76, -92, -19, + 95, 101, 19, 59, -72, -106, 82, 121, + -74, -123, -66, 121, -24, -69, 116, -28, + 6, -17, -69, -38, -90, 76, 33, -127, + -67, 127, 31, 122, -113, 106, 123, -120, + 83, 117, -41, 83, -48, 29, 123, 42, + -5, -84, -103, 106, -116, -9, -124, 117, + 17, 120, -121, -15, 73, -91, 120, -127, + -70, 126, -128, 99, 127, 117, 124, -13, + -118, -27, -82, -52, -31, 39, -104, -125, + 102, 14, -120, -94, 60, -39, -58, 9, + 51, 120, -111, -103, 126, 54, -112, -124, + 126, -10, 108, -39, 54, 51, -116, 61, + 126, -72, 69, -60, 125, -17, 4, 46, + 43, 127, 26, -51, 9, -104, 125, 37, + 95, 45, -67, -65, 62, 122, -66, -22, + -89, -28, -99, -117, -81, -31, -51, 127, + 88, -121, 8, 62, -4, -45, 16, -68, + 9, -3, -73, -71, 29, -42, 69, -46, + 115, 123, -14, -118, 51, -3, 79, -91, + -74, 91, -7, -119, 127, -95, -30, -49, + 62, -16, 62, 69, 58, 72, 5, 40, + -121, -66, -4, 117, -121, 101, 53, 97, + -118, -111, -127, 115, 122, 40, -3, -44, + -5, -13, 5, 121, 49, 40, -124, -59, + 126, 6, -30, -98, -116, -2, 65, 29, + -126, -121, -123, 15, 127, 119, -120, -51, + -71, 127, 7, 1, -112, 33, 106, -20, + -114, 59, -105, -81, -11, 28, 96, 4, + -92, 27, 32, -125, -60, -107, 69, -106, + 23, 100, -53, -105, 13, -68, -126, 109, + 22, -127, 27, -127, 26, -127, -122, 127, + -88, 66, 2, -105, -3, -113, -126, -40, + 74, 44, -9, 107, 41, -74, 77, -74, + 77, 127, 106, 53, 108, -71, -105, 122, + 99, -114, -123, 24, -127, -62, -20, 125, + -12, 38, -43, -108, -33, -125, 103, -9, + -67, 127, -3, -113, -128, 71, 120, 57, + -127, -117, -122, 127, 124, 15, -54, 122, + -24, -24, -58, -25, 50, 76, -56, -119, + 64, -88, -36, 125, 8, -117, 126, -60, + -116, -127, 105, 69, 10, -127, -112, -31, + -89, -58, 71, 31, -62, -2, -127, 8, + -74, 124, -84, -87, -119, -103, 41, -128, + -66, 127, -93, 55, 127, 19, 127, -97, + -22, -54, -124, -78, 32, 70, 26, -1, + 124, 101, -127, -125, -82, 80, 98, 1, + -40, 30, -66, 33, -15, 85, 125, 74, + 75, -73, 11, 126, 127, -34, -44, -47, + 117, -124, 118, -126, -68, -126, -111, 116, + 27, 92, 101, 45, 15, -113, 44, 47, + 122, -128, 45, -81, -90, 116, -117, 127, + 10, -7, -2, 2, 79, 46, -70, 72, + 35, -35, -125, -44, 34, -83, 9, -26, + 92, -121, -73, -105, -116, -6, 113, 43, + 47, -86, -1, -21, 78, -120, -31, 124, + 118, 127, -78, -113, 79, -47, 124, 120, + -79, -31, 66, -4, -117, 125, 65, 127, + 14, -69, 84, 126, 89, 82, 86, 114, + -65, 15, -68, 57, -10, 123, -110, -108, + -113, 93, 86, 83, -26, -118, -86, -87, + -41, -126, -14, -127, 127, 55, 126, -85, + -87, -62, -126, 59, 127, 89, -72, -123, + -44, -63, 5, -34, 107, 37, 127, -102, + -50, 39, -126, -85, 118, 122, 82, -125, + -124, 106, -128, 52, 26, -128, 40, 11, + 105, 91, 23, -4, 88, 14, -44, -5, + 111, -122, -27, -125, -81, -69, -25, 87, + 23, 125, -64, -31, 50, -120, -124, -123, + -109, 118, 32, 103, -6, -54, -76, 76, + -97, -3, 126, -47, 122, -22, -52, -125, + -6, -75, -79, -38, 79, -49, 77, -104, + -77, -35, -104, -128, -81, 36, 124, -59, + -36, 74, -6, 39, -60, -31, 81, 123, + -84, -123, -10, 124, -69, -53, -125, -126, + 5, -20, 28, -68, 33, 118, -62, -122, + 78, -63, -3, 126, 22, 28, 127, 100, + 111, 14, -28, -77, 89, 124, 27, 56, + -118, 126, 123, -35, -112, -7, 38, -122, + -61, -70, 124, -64, 127, -22, 84, -57, + -24, 17, -76, 24, 44, 115, 107, 118, + -39, 89, 35, 126, 121, 23, 50, -25, + 116, 63, -107, -122, 67, 114, 126, 108, + -27, 38, -60, -44, -127, 7, -88, -46, + 52, 119, -96, -84, 93, 43, -94, 39, + 44, -98, 84, -69, -33, -116, -122, 81, + -22, 63, 104, -62, 126, -127, 78, -117, + 109, 111, 108, -40, -52, 45, -118, -22, + -50, -125, 7, -51, 125, -33, -106, 17, + 119, -122, -98, -18, 3, 16, 5, 101, + 8, 50, -1, 102, -108, 85, 65, 95, + 4, 116, 2, -67, 21, -47, 50, -67, + -31, 58, 28, -23, -32, -13, 59, -23, + -80, -21, -13, -45, -91, -2, -24, -123, + 30, -92, 51, 127, -73, -10, 118, 127, + -12, -123, -57, -116, 46, 113, -128, -118, + 33, -32, 87, -110, 121, -102, 125, -124, + -60, -50, 20, -110, -56, 0, 61, -107, + -75, -126, -46, 118, -14, 58, -125, 8, + 13, 31, 73, -115, -120, 47, 122, -85, + -17, 31, 6, -65, 70, 106, 123, -108, + 40, -71, -20, -89, -74, -88, -32, -12, + 24, -45, 73, 127, -39, -43, 110, 36, + 76, -1, 113, 83, 95, 15, 53, 8, + -9, -126, -33, -102, -51, 69, -128, -14, + -98, 15, 119, -79, -15, -57, 126, -55, + -82, -95, 37, 99, 73, 121, -1, -40, + 120, -127, -24, -22, -15, 22, -24, 28, + -117, -3, -26, 18, -54, 122, 73, 127, + 116, -114, -40, -123, -66, 9, 4, 35, + 78, 57, 52, 51, -2, 85, -10, -9, + -20, -22, -69, 106, -11, 36, 19, 48, + 127, 7, 70, -121, 16, -19, 114, -17, + -117, -126, -3, -60, 26, 72, -123, -126, + 33, 26, -128, -118, -84, -86, 121, -84, + 80, -87, -56, -106, -79, -127, -3, -44, + 7, 17, 36, 103, -1, -31, 127, -106, + -20, -78, -45, -91, 120, 77, 114, -122, + -48, 16, -102, 6, 56, 26, 127, 105, + -103, -117, 31, -124, 1, 19, 31, -13, + -82, -118, 123, 88, -24, 26, 107, -98, + -46, -13, 24, -2, -6, 81, -25, 55, + 10, -38, -75, -60, -73, -52, 87, -41, + -88, -127, -123, -113, -25, -120, -67, 40, + -58, 29, -87, 117, 88, 40, -50, -117, + 124, -95, -12, -25, 100, -28, 0, 11, + 13, 62, -30, 26, -74, 127, 78, 127, + -103, -128, 3, 31, 112, -35, -32, 121, + 126, 69, -2, 4, 86, 123, 100, -127, + -83, -11, -34, 75, -75, 52, 9, 118, + 127, 5, -12, -93, -108, 17, -9, -21, + -62, -101, -10, 122, -24, 39, -100, -121, + -118, -58, 0, -55, 97, 120, 120, 26, + -22, -75, -22, 57, -89, 107, 117, -32, + 75, -100, 49, 17, -20, 96, -64, 72, + 28, 125, 38, -127, -69, 108, 33, -100, + 36, 39, 3, 82, -50, -1, 127, -86, + 74, 44, -64, -124, -83, 13, 18, -39, + 40, -20, 92, -70, -16, -15, -17, -61, + 91, 51, 114, 1, -21, -29, 123, 20, + -20, 5, -19, 7, -1, 86, 121, 12, + 77, 23, 17, -83, 16, -34, 28, -2, + -76, -27, -52, -33, 64, -83, 122, -2, + 64, -128, -9, 122, 127, -42, -37, 97, + 2, 118, 38, -32, 49, 46, 60, 52, + -128, 0, 72, -2, 114, -109, -8, -37, + 48, 65, -21, 127, 66, -48, -128, 123, + -26, 39, 8, 63, 2, -122, 35, 99, + 56, -89, -1, -37, 3, 2, -126, 52, + 4, 17, 46, -104, -126, -105, -46, -125, + -2, 27, 35, 45, -119, 42, 115, -18, + 34, 56, 104, 2, 61, 52, 124, -34, + -73, 110, 19, 9, -15, -74, -6, -56, + 86, -96, 17, -127, 28, -126, 3, 29, + -127, 76, 37, 125, 7, -7, 127, 52, + 94, 23, 123, -115, 120, 41, 91, -88, + 87, -50, -49, 88, 51, 1, -7, -20, + 77, -39, -92, -47, -49, 33, -67, -33, + 25, 29, 44, 14, -37, 118, -126, -63, + 125, -6, 19, -124, -9, 104, -121, -94, + 47, -101, -5, 90, 86, 64, -126, 29, + 126, -128, 59, -105, 63, 1, 34, 62, + -43, 101, -109, -112, -22, 39, 86, 48, + -84, -126, -110, -43, -51, 68, 7, -99, + -81, 109, -43, -127, 2, 118, 123, -127, + -31, 21, 55, 36, 113, 19, -48, 123, + 60, -1, -123, -108, 20, 32, -128, -13, + -11, -123, 72, -115, 126, 47, 61, -126, + 89, 118, -20, 14, -127, -19, 102, 20, + 11, 98, -71, -87, 24, -3, 127, 13, + 92, -127, 83, -69, 9, -118, -38, -53, + 19, 14, 27, -125, -9, 110, 115, 7, + 126, 112, -124, 119, -43, -8, 127, 29, + -9, -29, -32, -110, 64, -108, 0, -92, + 127, -12, -19, 31, -18, 2, 79, -52, + 32, 79, -79, -83, 27, -35, -89, -124, + -125, 67, 23, 82, -37, -125, 127, -92, + -1, 5, 14, -117, 52, 42, 57, 94, + -87, -123, -60, -11, -7, -47, -122, -20, + -8, 123, -118, -13, -128, -95, -127, 74, + -104, -69, -125, 35, -30, 87, -64, -11, + 124, -69, -93, 125, -119, 123, 106, -121, + -22, -68, 81, 124, -22, -96, -126, 69, + 35, 29, -125, -85, 106, -59, -60, -61, + -121, 88, 87, -67, -112, -65, 127, 73, + -47, -122, -42, 127, 113, -121, 23, 26, + -12, 109, 76, 19, -21, -30, -11, -31, + 12, -66, -17, 118, -83, -109, -128, -53, + -49, -128, 96, -2, 76, 102, -6, -19, + -126, 30, 61, -96, 51, 54, -8, 75, + 117, 111, 87, -126, 104, -123, -42, -82, + -112, 58, 39, 127, -123, 68, -107, -128, + 13, -81, 76, -108, 28, -49, -52, -67, + 16, -19, -23, 31, -127, 84, -15, 21, + 3, -70, -119, 26, 91, -122, -122, 89, + -117, 24, -78, 41, -17, -65, -114, -49, + 7, -113, 78, 48, 117, 53, -110, -28, + -49, -60, -128, -44, -67, 125, 16, -26, + -119, 72, -41, -83, 120, 51, 114, -56, + 127, -20, -15, -45, 61, -62, 119, -43, + 118, -126, -32, -38, -112, -108, 121, 15, + -84, -90, 87, 83, 43, -41, 85, -128, + -3, 103, -122, 124, 18, -52, -58, -128, + 109, -122, -49, -21, -110, 105, 2, -19, + -4, 111, -16, 74, -73, -27, -126, -44, + 125, -126, -1, 49, -59, -66, -53, 116, + 123, 126, -105, 119, 127, 127, 40, 43, + -44, -63, 18, -111, 124, -91, 18, 10, + -83, 4, -17, 1, 78, 121, -79, 9, + -128, 5, 112, 3, 13, -104, 55, -54, + 3, 58, 27, 41, -45, -46, -26, -38, + -59, 122, -41, -34, 63, -40, 8, -4, + -12, 4, -75, 2, 83, -25, 25, 109, + 46, 30, -9, 75, -45, -57, -58, -118, + 74, 89, -124, -6, 25, -82, -107, -58, + -30, 117, 116, -19, 123, -30, -23, 51, + -11, -13, 8, 50, -121, -11, 46, -78, + 120, -127, -6, 33, 53, 52, 126, -72, + 109, -113, -118, -16, -82, -71, 2, -43, + -122, 55, -33, 49, -26, -118, 24, 113, + -24, 28, 77, -118, 12, 90, 95, 25, + 127, 29, 101, 116, -9, -87, -78, 120, + 120, 114, -122, 79, -94, -89, 61, -39, + 5, -127, -14, 29, -127, -74, 76, 9, + -94, 73, -125, -62, -28, 47, 86, -83, + -124, -68, -11, -68, -31, 98, -79, -123, + 122, 62, -103, -16, -48, 49, -79, -106, + 65, 118, 56, 73, 53, -11, -112, -97, + -73, -39, 5, -69, 111, -45, 13, -3, + 65, 4, -4, -88, 121, 11, 45, -19, + 87, 84, -67, -114, 73, -83, -19, 30, + 35, -79, 55, 77, -81, 63, -71, 2, + -83, 44, 127, 61, -75, 9, -125, 65, + -128, -23, -110, 43, -11, -19, -36, -102, + -77, 122, 63, 95, 105, -18, 11, 36, + 125, -71, -59, -8, 16, 95, -61, -102, + -125, -69, 29, -16, -52, -27, 55, 102, + -24, -105, 52, -10, -123, -39, 81, -128, + -11, 91, 40, 11, 123, 27, 26, -28, + -97, -122, -121, 27, 49, 24, 112, -34, + -71, 125, 115, 75, 73, -65, 68, 53, + -99, -37, -86, 59, 73, -115, -128, 37, + -66, -66, -76, 115, -128, 20, -56, 3, + 36, 91, -85, -121, 120, 24, -17, 108, + 38, -82, 87, -39, 92, -10, 58, -20, + -30, 46, 66, -10, -95, -16, -57, -108, + 68, -14, -57, -113, -55, -7, -91, 53, + 112, 22, 24, 26, 102, -29, 76, 41, + 13, 27, 17, 86, -33, 126, 103, 50, + -11, -49, -118, -103, 26, 116, 47, -56, + -88, -40, -24, -56, -2, 87, 105, -99, + 104, -54, 14, 25, 118, -125, 11, 28, + -76, -70, -32, -71, 97, 15, -22, -35, + -100, -21, 24, -25, 17, -31, 80, -65, + 50, 37, 115, 84, 53, 5, -33, -33, + -99, 25, -11, 20, 86, -96, 44, 86, + 126, -111, 66, 21, 0, 63, -123, 93, + -15, 36, 127, -76, -123, -72, -111, -127, + -62, 1, -85, 1, -32, 61, 50, 57, + 27, 87, -34, -30, -2, 28, 111, -59, + -18, 16, 3, 82, 4, -128, 126, -76, + -21, -122, -21, -93, -28, 42, -52, 1, + -16, 37, 67, 17, -91, 29, 121, 6, + 118, -43, -54, -125, 87, 4, -25, 50, + 124, 57, 43, 22, -51, 21, -49, 21, + 12, 71, 122, 45, -66, -58, -54, 44, + 41, 52, -6, 84, -56, 127, -122, 57, + 75, -42, 127, -11, -42, 31, -124, 54, + 21, 5, -23, 108, 123, -64, 23, 45, + 100, -28, -86, -50, -32, -27, 116, 99, + 58, 35, -64, 71, 32, -51, 63, 0, + 46, -89, -69, 40, -12, 27, 71, -24, + -76, 91, -40, -88, 119, 47, 126, -122, + -81, -81, 7, 10, 50, 113, 2, 58, + 26, -55, -98, -59, -50, -59, -110, -125, + 16, -59, -41, -16, 103, 48, 83, -26, + 54, -119, 11, -36, 126, -16, 35, 38, + -96, 118, 0, -43, -30, -26, 21, -28, + 32, -75, -17, 29, -7, -127, -16, -23, + 3, 18, 75, -42, 97, 117, 26, 126, + 118, 62, -126, 17, -112, -70, 119, -47, + 60, -42, -49, -33, 69, -43, 79, 7, + 80, 116, -45, 113, -45, -74, 72, 21, + 63, 19, -116, -15, -110, -67, -121, -2, + -42, 101, 79, -36, 15, -50, 81, 127, + -51, 120, -47, 38, 22, -75, 126, -39, + -38, -123, -13, 31, 16, 62, -119, -76, + 35, -7, 17, -11, -122, -6, 65, 93, + 36, 104, -100, 73, -9, 95, 92, -80, + 75, 34, -65, 18, -112, 20, 0, -27, + -2, -127, 54, 25, -45, -61, -126, -53, + -15, -29, -7, -1, -41, -126, 21, -124, + -75, 86, -43, 62, 75, 55, 122, 54, + -8, -124, -109, 108, 71, 20, -26, 28, + -29, 11, 125, -51, 90, -11, 29, 67, + -124, -80, 51, 8, 72, 46, -120, -75, + -44, -52, 34, 2, -19, 127, 113, -34, + -97, 85, -32, -58, -5, 35, 125, 80, + -17, 40, -34, -30, -50, 6, -63, -40, + 102, -12, -17, -2, -80, 120, 66, 121, + -40, 87, -13, -38, 36, -36, 1, -6, + 72, 61, -1, 82, -102, -37, -32, 4, + -6, 66, -73, 126, -6, -4, -42, -27, + 24, 118, 5, 10, -32, -20, -36, -23, + -74, 5, 85, 67, -45, -115, -101, 115, + 85, -75, -61, 29, -17, -89, 104, 91, + -46, 102, -68, 56, -112, -67, -75, -114, + 45, 111, -35, -51, -125, -87, -20, 59, + -120, -15, -22, 1, -96, -95, 15, 119, + -14, -111, 18, -6, -42, -88, 42, -2, + 22, 112, 89, 97, 40, 98, 117, -123, + -4, -27, 61, -96, -83, 10, -1, -1, + 31, 51, 1, 9, -93, 47, -38, 88, + -75, -79, -28, 13, -16, -31, -94, 36, + 63, 120, -72, -127, 4, 78, 21, 34, + -30, 43, 23, -123, -124, 17, -71, 38, + -103, 3, -15, 17, 124, 84, -88, -21, + 73, 37, 50, 44, 107, 34, 81, 33, + -55, 2, 8, 33, -15, 10, -119, 33, + 10, -10, 44, 74, -70, -67, 90, 7, + -67, 28, -123, 33, -27, 63, 2, 123, + 87, 5, -76, 53, -128, -55, 120, -115, + 111, -43, 16, 14, 98, -2, -24, 46, + -4, -123, 34, -50, 113, 27, 38, 117, + 106, 46, 10, 2, -2, 72, -123, 3, + 14, 28, 3, -124, -51, -40, 73, -117, + 32, 86, 22, -32, -76, -119, 83, -96, + -94, -51, -51, -87, 53, 94, -87, -2, + -22, 36, -127, -34, 77, 127, 121, 57, + 39, -71, 98, 13, 13, -43, 3, -34, + 71, 124, 117, 48, 74, 10, 20, -2, + -105, -71, 10, 62, -74, 4, 74, -17, + -40, -50, -116, 29, 27, 47, 68, 7, + -60, 67, -58, -119, 53, -25, -79, -102, + 70, 27, 5, -14, -74, 51, -12, -13, + 1, 17, -22, -87, -12, 0, -50, 19, + -114, -33, -56, -99, 45, -27, -6, 47, + 21, 21, -91, 35, 87, -32, 8, -126, + -2, -51, -122, 93, -63, -118, 4, -36, + 40, -46, 56, -79, 126, 30, -4, 81, + 11, 12, 106, -1, -89, -90, -45, 46, + -118, 12, -81, 75, -99, 20, -120, 78, + 5, 93, 79, 9, 101, -47, -78, -104, + -62, 123, -13, 108, 101, -55, 27, -4, + -127, -35, 63, 79, -98, 36, 15, 62, + -9, -23, 45, -17, -37, -5, 49, -120, + -89, -10, 123, -38, 125, -103, 119, -104, + 11, 8, 72, 39, -39, 110, -20, 19, + -38, 91, -104, 103, -47, -24, 25, -14, + -59, -29, -23, 82, -8, 7, -106, -11, + 5, -15, -30, 87, 27, 102, -95, 52, + 66, -48, -124, -58, -126, -30, 127, 63, + -121, -16, -42, -125, 77, -4, -107, -19, + -29, 22, 5, -56, 29, -88, 10, 26, + -127, 36, -17, 67, -26, 93, -16, -31, + 7, -30, -17, -24, -56, 33, 10, -125, + 120, 119, -128, -77, -68, -9, -33, -60, + 7, 1, -88, 40, -6, -67, 43, 14, + -128, -26, 39, -26, -80, -120, 7, 125, + 69, -86, 47, 5, 119, 76, 38, 52, + 6, 62, -31, 15, 71, -25, -61, -75, + 88, 109, -4, -93, -126, 56, -99, 103, + 16, 93, 93, -11, -127, 106, 5, -126, + 19, 56, 68, 59, -41, 9, 17, 29, + 0, 112, -74, -51, -127, -119, -110, -122, + -9, -64, -86, -50, 61, 40, 33, -105, + 127, -6, -28, -9, 26, -20, -81, 50, + -111, -118, -25, -49, -109, -35, -96, 127, + -51, -120, 127, 22, -125, 43, 29, 127, + 1, 66, 19, 4, 15, 86, 42, -71, + -26, 125, 80, 48, -100, 125, -13, 126, + 33, -14, 33, -16, -68, -33, -4, 75, + -127, 20, -12, -10, -3, 25, -43, -30, + 14, 12, -33, 44, 51, 46, 63, -116, + 76, 93, -128, -35, 13, 9, -39, -97, + 33, 55, -39, 24, -33, 52, 84, -65, + 120, 11, 103, -32, 101, 127, 23, 3, + 20, 31, 53, 39, -38, 41, -19, 94, + -41, 67, -33, 15, 9, 24, -77, -26, + 93, -71, -8, 46, -26, 51, -21, -127, + 42, -12, 126, 48, -121, 48, 39, -123, + -5, -4, 45, 60, -122, -55, 13, -43, + -9, 100, -7, -122, 32, -63, -71, -85, + 58, -59, -60, -70, 93, 68, 85, -77, + 126, 43, -5, 63, -103, -69, -49, 59, + -51, -120, -53, -75, -98, -21, -20, 127, + -43, -35, 127, 27, 57, 83, -8, 127, + -50, 119, 126, -65, -14, -95, -22, 33, + -17, -39, -39, 12, 122, -118, 17, -26, + -94, 47, -30, 111, 26, 14, -79, 105, + 127, 31, -51, -96, -75, 21, 67, 25, + 22, 79, -98, 41, 110, 59, 127, 127, + -18, 3, 97, 124, 36, -8, 9, -99, + 23, 1, 66, 39, -44, 116, 123, -5, + 124, 84, -6, -55, -50, 121, 47, -117, + -80, 85, -113, 22, -126, 101, -81, 119, + 6, 39, 1, 81, -30, -50, -120, -9, + 42, -126, 1, 23, 111, -33, 14, -128, + -4, -57, 26, -45, 126, -115, 8, 14, + 84, -11, 19, -3, -123, -127, 80, -37, + -100, 13, -57, -121, 122, 45, -59, 36, + 50, 3, 39, -15, 7, -12, -15, 96, + -128, -7, 47, -47, -19, 71, 31, 26, + 63, -67, -112, -36, 45, 14, 65, -103, + 4, 59, -127, 37, 127, -47, -35, 44, + 9, 115, 120, -127, 96, -125, -33, 114, + 66, -120, -1, 15, -128, -126, 28, -125, + 16, 8, 56, 83, 109, 116, -59, 111, + 40, -4, -60, -15, -10, 101, 41, -100, + -3, -39, 91, 34, 26, -78, -4, 126, + -18, -11, 112, -108, 40, 11, 44, -50, + 62, -67, 37, 80, 22, 11, 101, 21, + -115, -69, -32, 59, -125, -126, 107, 120, + -24, 119, -127, -48, -3, 6, -70, 65, + -114, -3, -5, 120, -23, -80, 72, 68, + 63, 48, -7, -112, -109, 5, -34, 116, + -73, 126, -15, -87, 22, -7, -39, -12, + 74, 14, 4, -113, 44, 33, 82, 32, + -113, -107, -20, 102, -111, 1, -122, 63, + -115, -45, -61, -53, -85, -16, -9, -23, + -24, -5, -38, -76, 37, 59, 4, 79, + 23, 70, -113, 36, -42, -41, -68, -59, + 9, -100, -105, 117, 127, -127, 45, -128, + 9, 26, -115, -80, 99, 72, -74, 17, + 8, 8, 60, 31, -128, 126, -122, 107, + 7, -28, 28, -109, -32, -44, -81, 16, + -128, 13, -126, -39, 18, -69, -84, -22, + -113, -31, 48, 49, 0, -65, -109, 82, + 57, -95, 12, 13, -43, 19, 36, 95, + -113, 40, 95, 116, -11, 61, 55, -61, + 127, 67, 127, -46, 126, 31, 116, -30, + -67, 113, 90, -23, 54, -55, 13, -45, + 127, 63, -22, 120, -13, -22, 58, 91, + 52, 45, 13, 83, 40, 87, -25, 125, + 91, 74, 50, -124, 86, 66, 5, 127, + -19, 116, 6, -12, 4, 42, 92, -40, + -100, -21, 123, 27, -45, 4, -97, -22, + -113, -4, -27, 18, 49, 78, 107, 6, + 126, 127, 8, -47, -124, -110, -8, 17, + 67, 51, 5, 13, -11, -76, 15, -124, + 37, 58, -71, 25, -112, 45, 35, -128, + 44, 127, 34, 39, 15, -17, 49, -3, + -24, -27, -49, 12, 127, 111, -78, -63, + 30, 29, 29, -25, 73, -66, 94, 17, + 6, 75, -82, 118, -96, 55, -47, -53, + 16, 21, 115, 15, -10, 64, 7, 42, + 73, -33, -38, 6, -88, 94, 31, 51, + -99, -27, 0, 123, -124, 81, 51, -54, + -112, 115, 42, -124, -60, 99, 23, 101, + 62, -52, 20, 6, -43, 4, 9, 0, + 44, 51, 8, -59, -11, 110, -6, 100, + 29, 127, 0, 92, 127, 13, -38, 65, + 23, 76, -1, -127, -69, -14, 59, -33, + -57, 11, 0, 83, -38, 52, 110, -36, + -35, -112, 99, -16, -113, 125, -50, -114, + 63, -62, -6, 39, -9, 110, 0, -100, + -29, 123, 121, 7, 82, -51, -32, 51, + -34, 84, -76, -82, 90, -105, 111, 24, + 17, -16, -41, -116, -123, 1, 48, -127, + 55, 66, -16, -84, 74, -37, -91, 27, + 83, 47, 49, 92, -80, 32, 72, 65, + 29, 16, -7, 8, -106, -3, 15, -69, + -48, -27, 45, 8, -51, 58, 35, -10, + -15, 0, 15, 4, 50, -64, 6, -16, + -35, 59, -2, 14, 1, 7, 4, -74, + 7, 124, 38, -46, 70, 0, -73, -35, + -5, -4, 0, 54, 7, -4, 10, 93, + -8, 42, -93, -38, -46, 1, 29, 50, + 78, 75, 63, -73, 14, -26, 6, 58, + -81, 7, -3, 86, 69, 5, -6, 14, + 48, -41, 124, 0, 43, 65, 24, 54, + -79, 32, 46, 17, -13, -57, -35, -10, + -19, 40, -78, -47, -4, 30, -86, -61, + 0, 15, -15, 13, 127, 5, -22, 61, + 68, -33, 8, -16, -34, 10, 33, 5, + -48, 14, 49, 126, -27, 20, 96, 26, + 8, 24, -4, 57, 54, 45, -11, -126, + 126, -13, -51, -32, 4, -65, 74, -77, + -35, -3, -7, -69, 10, 13, 0, -49, + -123, -56, -9, -61, 6, 18, -1, 12, + -67, 23, 5, -30, 7, 37, -34, 12, + -40, 3, -25, 32, -60, -66, 97, -27, + 32, 20, -95, -3, 8, -76, -15, 61, + 8, 19, 9, 18, -20, -13, 105, -13, + 47, -11, 19, 10, -47, 1, 125, 77, + 90, 34, -76, 49, 64, -28, -67, 2, + -32, -24, -1, -25, -120, 1, -68, 55, + 81, -85, -11, -12, -29, -110, 2, -44, + 41, -12, -16, 7, -1, -118, 76, -76, + -17, -43, -8, 101, 86, -123, 70, 45, + -13, -39, 114, -72, 101, -5, -8, -5, + -54, -49, -92, -26, -117, 19, 14, -116, + 37, 8, 15, -15, 30, -22, -16, 23, + -83, 5, 57, 14, -19, -9, 51, -1, + -20, -26, -3, -8, 3, -14, -51, 11, + -56, 30, -5, -108, -10, -115, -89, -38, + -70, -4, -15, -30, 35, 48, -119, -63 +}; + +static const rnn_weight noise_gru_recurrent_weights[6912] = { + 20, -47, 122, 31, 41, -36, 18, -22, + 0, 87, -28, -26, -68, 8, -54, 104, + 112, 19, 31, -118, -31, -31, 9, 17, + 11, 5, -53, -24, -17, 24, -35, 16, + 53, -26, 6, 24, 48, 11, 17, -34, + 17, -15, -51, -32, -44, -11, 10, 63, + 25, -92, -52, -30, 26, -57, 44, -18, + 20, 49, -40, 74, 96, -43, 99, 90, + -15, 40, 11, -119, -125, 42, 58, -127, + -27, 9, -64, -64, -87, -31, -53, -39, + -1, -11, 53, -6, -124, -6, -81, -67, + 46, 23, 11, -124, -48, 100, -62, -6, + -17, -124, 10, 81, 39, 35, -125, -52, + 4, 41, -43, 12, -48, 13, 27, 41, + 25, -20, 4, -71, -59, -30, 43, 46, + 22, 34, 50, -78, -117, -26, 67, 22, + -27, -86, 48, -51, -52, 32, 48, 1, + -11, 63, -38, 62, -13, 17, 32, -6, + 11, -15, -65, -45, 69, -9, -33, -13, + -114, -30, 55, -29, -33, 3, -59, 44, + 72, -52, 22, 65, 14, 5, -20, 19, + 12, 81, -2, 100, -6, 125, -41, 124, + -2, 24, 107, 21, -38, 66, 46, -20, + -11, 8, 51, -59, 8, 50, -78, -26, + 98, -21, 120, -30, 34, -49, 72, -22, + -74, 69, -86, -11, 23, -126, 10, 12, + -35, -36, 20, 21, -24, 39, 44, -110, + -54, 33, 1, 10, -83, 36, -37, 7, + -57, 0, -73, -23, 15, -10, -14, 12, + 52, -80, -21, -40, 0, -47, 24, 8, + 58, -41, -48, 14, 27, -83, -11, 62, + 5, 101, 35, -75, 89, -101, 44, 50, + -77, -115, 1, 50, 25, -113, -64, -6, + -14, 30, -80, -91, -48, -126, -51, -93, + -103, -1, -49, 73, 25, -39, 51, 70, + -39, -53, 41, -60, -70, -72, -6, -19, + 63, 75, 120, 79, 1, -57, 8, -3, + 18, 5, -56, 31, -13, 73, 83, 77, + 4, -122, 122, -127, -17, 63, -119, -30, + -74, -109, -106, 44, -12, 106, -15, -31, + -19, 17, 87, 15, -79, 81, 32, 35, + 60, 127, 0, -53, -60, 30, -93, -31, + -19, 36, -19, -21, -46, 44, 16, 88, + -98, 61, 65, 25, 119, -70, 127, 52, + 12, -1, -6, -72, 15, 99, -117, 22, + 3, 99, -63, -54, -56, -1, -32, -2, + -120, 53, 66, -8, 112, 43, 2, -57, + -27, -92, 39, 100, 5, -20, -66, -7, + 33, -81, -127, -80, 27, -112, -76, -53, + 122, 12, 33, -88, -25, 47, -112, 10, + -38, 117, 29, -125, -65, 30, 60, -124, + -61, 14, 108, 7, -30, -50, -38, 44, + 16, -35, 39, -2, 125, 83, -1, -24, + -27, 106, -76, 58, 122, 24, 15, -86, + -101, -82, 121, 6, -4, 33, 0, -40, + -80, 22, -24, -124, -39, 120, -53, -1, + 63, 12, 48, -54, 83, -109, 40, 7, + 26, -51, -74, 38, -12, 0, 33, -66, + 50, -118, -25, -109, 18, 59, 124, 50, + 10, -42, 12, 68, -85, -59, 46, 34, + 5, -1, -2, -41, 53, 3, -8, -62, + -8, 33, -95, -24, 52, 30, -16, 98, + -13, -54, -54, 39, -30, 9, -78, -13, + -21, -39, -20, -21, 36, 24, 1, 63, + 28, -72, 63, -15, -127, -19, 16, 56, + 4, 22, -71, 9, -13, 18, -42, 5, + -10, -14, -71, -37, -47, -48, -68, 64, + 56, 23, -90, 15, -67, -38, -1, 10, + -34, -119, 55, -61, -25, 24, 29, -47, + -53, -28, 32, 23, 51, 45, -17, 111, + 12, -16, -63, -25, -50, 31, -18, -45, + 2, -15, -79, -11, 78, -102, -46, 31, + -47, 69, 8, 63, -123, 107, -74, -78, + 45, 55, 20, -64, 30, 95, 44, 6, + 13, 28, -37, -70, -21, -84, 16, 6, + 5, -53, 8, -9, -11, 24, 41, -39, + -84, -40, 24, -126, -41, -3, 13, -84, + 59, -62, -43, 11, -15, 14, -46, -125, + -72, -38, -48, 22, -38, 84, 39, -29, + -97, 31, 8, -46, 22, -68, -60, 8, + -47, -123, -18, -102, -43, 34, -79, 30, + -74, -94, 64, -57, -1, 98, -9, 97, + -119, 45, 19, -24, -26, -99, -55, 24, + 46, 7, -6, -126, -80, 15, 1, -20, + -72, -68, -23, -79, -74, 65, 8, 26, + 118, 13, -82, -38, 20, -9, 104, 41, + 42, 126, -15, -54, -65, -97, 59, 50, + 12, 76, -47, 92, -126, -39, 2, 46, + 27, 101, -87, -7, -26, -25, -2, 22, + 12, 57, -99, -16, 54, 126, 80, -17, + 54, 21, -21, 83, -54, 62, -11, 35, + -37, 80, -2, 20, 6, 39, -20, -74, + 33, -53, 56, -21, -48, 37, 37, 0, + -40, 13, 44, 107, -47, -21, 19, -23, + -116, 104, 72, -43, 59, 37, 53, 70, + -59, 26, -14, 41, 13, 17, 32, 123, + -73, 62, 101, -47, -60, 15, -44, 7, + -114, -40, -39, -79, 84, 18, 114, 70, + 103, 55, -24, -8, -80, 71, -50, 55, + 54, 46, 25, -59, 15, 19, 122, -104, + 121, -5, 45, -22, 118, 88, -2, 123, + -13, 21, -33, -70, -54, -86, -29, -85, + -40, -38, -20, -71, 8, -97, -23, -48, + -4, -9, 25, -122, -52, -119, -5, 3, + 39, -28, -113, -65, -44, 5, 13, -14, + 83, 9, 50, 27, -24, -13, -14, -37, + 106, -31, 58, -26, 37, 1, -78, -128, + -12, -39, -127, 20, -53, 54, -61, 71, + 16, 41, 54, -92, 57, 88, 48, -51, + -7, 18, 24, -12, 83, 29, 63, -20, + -88, 11, -17, -79, -84, -51, 53, -2, + -21, -64, -28, 98, -14, 7, 127, -122, + -22, -25, 6, 4, 121, 124, 111, 38, + -33, -17, -106, -2, -10, 12, -35, 67, + -2, 8, -15, -17, 48, 66, -5, -10, + -33, -2, -37, 73, -110, 124, -27, -14, + -7, 46, 6, -14, -37, -32, -32, -33, + 39, 33, -21, -111, 1, 21, -126, -99, + -81, 31, -32, 58, 32, -104, 23, 61, + -48, 20, -5, -51, 57, -126, -10, 110, + -6, -32, -35, 65, 42, -95, -74, -72, + 6, 45, -81, 34, -61, -128, 46, 22, + 5, -30, -59, 116, -95, -31, -14, 16, + 18, -39, 52, -36, -35, -1, -107, 59, + 34, -24, 35, 28, 52, -65, -49, -66, + -1, 59, -87, 45, 42, 124, -47, -59, + -68, -85, 52, -18, -119, 19, 25, -126, + -118, -26, -9, 82, -111, 20, -72, -86, + 6, -36, 18, -45, 100, 3, -86, -17, + 125, -121, -50, 25, 37, 123, 20, 52, + -118, 53, 19, -31, -86, 123, 63, -110, + -12, -28, 64, -39, 75, 96, -95, 58, + 45, 43, 97, -71, -25, -38, -8, -33, + -85, -56, -44, 41, 82, -77, 71, -35, + 29, -36, -16, 21, -84, -7, -37, -82, + 60, 21, 26, -39, -23, 87, -2, 46, + -27, 45, -38, -25, -67, 45, -48, -3, + 11, -82, -15, -34, -35, 87, -94, -17, + 47, -33, 41, -17, -36, 30, 41, -18, + 81, 33, 25, -80, 65, -6, 65, -35, + -18, -105, 33, 57, 5, -12, 25, 120, + 6, -69, 90, -41, 45, -59, -82, 80, + 41, -11, 6, 38, 105, 51, 58, -73, + 20, 19, -4, 40, -28, -124, 116, -33, + -15, 23, 124, 68, 28, 6, -25, -29, + 18, 80, -35, -4, -25, 10, -118, -20, + 9, 94, -111, -76, 43, -19, 15, 97, + -19, -21, 63, 92, 72, -67, -23, -66, + -49, -14, 6, -47, 17, -101, 21, -29, + 89, -72, -86, 112, -92, 38, -71, 114, + -53, 47, -102, 6, -59, 64, -16, -116, + -22, 6, -43, 19, 9, 9, -37, -68, + 86, -6, -84, -40, 10, -14, 80, -10, + -49, 76, 31, 47, 123, 90, -49, 35, + 9, 64, 71, -29, -40, -107, 60, -33, + -23, 25, 63, -116, -16, -118, 82, -125, + -7, 26, -70, 77, 37, 124, 29, 80, + 69, 88, -5, -56, -77, 68, -77, 61, + 20, -72, -27, -120, -58, -70, -10, -126, + -49, -66, -44, 127, -28, 83, 63, 65, + -90, -29, -121, -1, -30, 85, -41, -91, + 40, -43, 68, 85, 85, -75, -43, 122, + -24, -103, -82, 20, -113, -22, -36, -72, + 29, -39, 28, -24, -16, 11, 27, 28, + -1, -61, 15, -126, 1, -31, -43, -26, + 24, -27, -42, 28, -80, -34, 15, -44, + -42, 31, 23, -40, 30, -2, -54, -6, + -48, -114, 36, -71, 13, 57, -127, 94, + 1, -28, 116, -62, -3, -29, -23, -75, + -110, 49, 42, 127, 19, -121, -92, 56, + -58, -28, -41, -19, -5, 38, 106, 120, + 40, 2, 86, -58, -49, -20, 102, -56, + -86, -7, 1, -85, 10, 34, -11, -63, + -1, 85, -106, 31, -48, -64, 12, -14, + -111, -82, -28, -11, -26, -52, -43, -33, + -20, -57, -91, 27, -64, 54, -55, -32, + 27, 97, 11, 13, -8, -120, -35, 35, + 6, -117, 25, 104, 7, -1, -8, 0, + 31, -29, -10, -20, -33, -23, -1, -6, + -107, 60, -62, 70, -28, -96, 48, 17, + -103, -64, 30, 42, 18, 66, 0, -69, + -16, 77, -21, -4, 35, 45, -70, 70, + 36, 27, 97, -27, 57, 23, -23, 91, + 118, -12, 24, 21, -54, 1, 40, -17, + -109, 39, 33, -22, -44, 46, 20, -15, + -42, 29, -10, -64, -16, 35, -95, -83, + 40, -34, -63, 54, -12, 2, 104, -5, + -103, -79, -94, 126, -7, 18, -17, 116, + -15, 76, -48, 19, -5, 76, 58, 53, + 1, 20, 22, -13, -63, 11, -15, -117, + -39, 43, -21, -5, 18, 16, -7, -72, + 34, -103, 86, -76, -114, 26, 39, -4, + 3, 45, -37, -55, -34, 56, -113, 14, + -56, 21, 83, -115, 52, -41, 71, -51, + -125, -95, 11, 93, -48, 5, -41, -128, + 11, 22, -20, -103, 1, 17, -69, -28, + -32, 25, 119, 52, -92, -92, -19, 57, + 77, -51, 59, -57, -8, -90, -17, -44, + -94, -43, -90, 5, 18, -27, -72, 42, + -18, -29, 32, -30, 19, -7, -27, -102, + -82, -20, -7, -127, -26, 60, 1, -37, + -124, -92, -47, -8, 8, -55, -25, -88, + -18, 98, -5, -35, -44, 15, 121, -38, + -13, 9, -88, -126, 68, -126, 62, -49, + -44, -56, -71, 3, 65, -27, -46, -103, + -78, 29, -65, -23, 100, 75, 47, 8, + 57, -43, -47, 69, 112, -85, 51, -56, + 36, -101, 13, -36, 73, 26, 31, -8, + 53, 52, -38, 29, 81, -26, 116, -18, + 35, -49, 2, -6, -4, 52, 94, 59, + -120, 68, -59, 6, -3, -54, -22, 18, + 1, -126, -127, -34, 19, 105, -26, -16, + 113, -43, -31, -7, -60, -4, 72, 39, + 17, 38, 118, 31, 5, 19, -70, -14, + -93, -8, -107, -56, -59, 56, -127, 41, + -63, -30, -71, -27, -4, 13, -15, -66, + -50, 33, 38, 34, -47, 2, -50, 71, + -49, 18, -39, -18, 46, -3, 50, -18, + -32, -67, -25, 58, -14, -7, 23, -3, + -24, 7, 8, 118, -20, -55, -24, -30, + -16, -9, 73, 63, -74, 42, 112, -35, + -27, 38, 59, 13, 72, 2, 109, -11, + -75, 30, 28, -20, -74, 16, 103, -83, + -6, -115, -44, -11, -33, -120, 66, -58, + 22, -12, 15, -32, -26, 1, 30, 35, + 76, 62, -121, 9, -32, 12, 75, -77, + 80, 25, -18, -78, -123, -61, 23, 36, + -127, -65, 15, 29, 100, -23, 2, 15, + 21, 52, -120, -14, 12, -38, -39, 100, + 13, -3, 22, -125, -115, -44, -65, 85, + -44, 35, -27, -44, 9, 77, 2, 4, + 5, 35, -31, -126, -112, -91, 32, -24, + 6, -73, -41, 41, -23, 42, -127, 41, + -51, -8, -55, -52, -32, -6, -52, 55, + -16, 26, 110, -63, 70, 127, 38, 80, + 17, 87, 44, -51, 34, 127, -16, 117, + 126, 22, -24, -123, -6, -76, 12, -4, + -3, 47, -31, 28, 30, 12, -24, 84, + 110, -105, 64, 44, 84, 27, -13, 54, + 104, -20, 60, -60, -10, 45, 120, -55, + -6, -91, 35, 10, 2, 38, -30, -125, + -100, -23, -126, 47, -16, -17, 20, -73, + 6, 31, -28, -121, -53, 79, -100, -4, + 69, 0, -20, 125, 35, 16, -44, -20, + -5, 44, -113, -11, -6, 57, 33, -3, + -21, 50, 101, -9, 42, 72, -10, 21, + 95, 39, 8, 72, 42, -43, -24, 30, + 33, -27, -17, 79, 52, -127, 3, 39, + 1, -66, -43, 85, 11, -56, -20, 14, + -7, -30, -8, -10, -34, 48, -109, -12, + -99, -58, 123, -82, 35, -102, 7, -83, + -104, 29, 99, -91, 13, 15, -64, -111, + -17, -24, -98, -97, 86, 0, 12, 123, + -113, 61, -13, 3, 31, 4, -92, -115, + 126, 7, 6, 74, -68, 46, 19, -85, + -94, -20, -20, -117, 59, -51, -59, 20, + 18, 3, 6, 21, 1, 94, -78, 112, + -12, -46, -30, 67, -16, -83, 6, 42, + -118, 49, -82, -21, -4, 15, -12, -67, + 20, -71, -69, 57, -10, -51, -46, 99, + -5, -9, -40, 59, -61, -11, -59, -69, + 2, -55, -32, -48, -4, -6, -33, -110, + 94, 43, -19, -18, 31, -126, -5, -19, + -80, -20, -20, -28, -56, -70, 45, -46, + 43, 77, 6, 77, -90, 26, -37, 35, + -39, -48, 28, 9, -56, -42, -31, -46, + -31, 13, -65, 77, 34, -41, 9, 17, + 20, 24, 51, -44, 11, -21, -44, -78, + -96, -23, -22, 6, 18, -126, -110, -33, + 8, -32, 17, -123, -43, -7, -53, 17, + 46, -53, 26, -15, 70, 19, -51, 67, + 25, 117, 56, 7, -6, 74, 57, 101, + 122, 41, -32, -44, -21, 54, 32, 22, + 119, 120, 72, 68, -24, 29, 48, -6, + 2, -14, 5, -35, -77, 4, -72, 53, + 46, -31, 57, 25, 9, 69, -21, -7, + 70, -41, 43, 63, 10, -53, -68, -31, + 20, -19, -90, 48, -22, -54, 33, 64, + 20, -79, -13, -17, 45, -41, 103, -16, + 25, 6, -35, -24, 28, 9, -29, 76, + 4, 40, 47, 23, 24, -51, 92, -17, + 49, -23, 107, -13, 65, 72, 34, -19, + -20, -60, -45, -55, -2, 19, -58, -49, + 7, 20, 78, 120, -69, 28, 45, -119, + -15, 13, 20, -45, 25, -108, -20, 12, + -54, -17, 32, -17, -13, -3, -3, 17, + -25, -48, 95, 8, 36, -66, -4, -26, + -17, 12, 39, 57, 53, -17, -33, -46, + -18, -61, 19, 64, -72, -12, -69, -31, + -31, 66, 65, -62, -10, 120, 4, 46, + 122, -36, 75, -47, -5, -82, 49, -3, + -123, -125, -37, 127, 33, 111, 58, -68, + -36, 87, 51, 22, -49, -1, 115, -10, + -40, 37, -56, -90, 11, 34, 77, -5, + -37, 61, 123, -11, 17, 46, 64, -86, + -96, -5, -18, -84, 116, -126, 127, 55, + 58, 27, -68, 42, 47, -63, -108, -103, + -33, -1, 77, 0, -9, 33, 126, -106, + -14, 127, 8, 2, -42, 37, 41, -23, + 17, 11, 4, -65, -42, 0, -47, -36, + -11, 34, -83, -25, 52, -125, -25, 0, + 7, 24, -83, -56, -21, 112, -60, 25, + -124, -114, -67, -14, -55, -28, -34, -9, + 119, 8, -47, 52, -6, 20, 23, 9, + -104, -27, -46, -42, -32, 55, 118, 49, + -35, -41, -19, -103, 1, 63, -47, 9, + 1, 0, 124, 72, 1, 8, 47, 118, + 29, -39, -20, 26, -38, -17, 13, 68, + -52, -37, -30, 105, 7, -5, 27, 24, + 71, 15, -48, 80, -9, -9, 66, 19, + 19, -31, -74, 115, -4, 21, 78, -19, + 2, 17, -51, 28, -109, 58, 78, -10, + -3, 119, -44, -12, -41, -57, 67, 38, + 49, -113, -43, -101, 59, 104, -26, 116, + -23, -47, -33, 57, 17, -99, -123, -3, + -55, 9, 94, -50, 30, -48, -128, -63, + 3, -61, 61, -31, -40, -31, 4, -54, + -123, 37, 70, 14, -13, -32, -115, 61, + 21, -93, -34, -7, -32, 26, -20, -110, + 73, -15, -123, -69, 41, 13, 106, 10, + -39, 69, -102, -80, 18, 51, -81, 86, + 30, 39, 48, -78, 106, 14, 30, -27, + 65, 62, -20, -113, -30, 47, 22, -101, + -24, 119, -15, -40, -95, 76, -112, -5, + 38, -123, 77, -22, -84, 63, 17, 3, + -113, 50, -6, -60, 5, 16, 29, 21, + -1, -25, -16, -2, 58, -50, -58, -43, + -28, -85, -71, -45, 66, 46, 127, 37, + -63, -9, -60, 56, 22, 48, -47, 23, + 28, 21, 27, -32, 7, -71, -82, -7, + -11, -27, -24, -2, -45, 67, 32, 21, + 86, 62, -51, 39, -41, 2, -41, -31, + -15, 50, -50, 54, -8, -21, 39, 51, + -32, -24, -22, -27, 5, -8, 16, -2, + -17, 17, -43, -35, -32, 29, -21, -58, + 27, 49, -2, -63, -109, 40, 26, -55, + 63, 87, -37, 76, -88, -23, 42, 59, + 24, -32, -47, 18, -16, -30, 71, 39, + -13, -10, -83, -2, 24, -12, -36, -44, + -79, 66, 17, -21, -27, 48, -4, -7, + 101, 16, -13, 17, 20, 29, 70, -46, + 19, -30, 25, 20, 8, 32, 86, -76, + -17, -59, 102, -29, 10, 38, -104, 6, + -86, 8, -39, 4, -2, -28, 8, -27, + 8, -1, 36, -82, 17, -53, 44, 72, + 43, -83, 16, 70, 42, -61, 12, -74, + -70, -10, -23, -43, 33, 29, 12, -3, + -18, -81, -26, -48, 10, 27, -37, 96, + -27, 24, 67, -36, -28, -6, -10, -26, + 15, -41, 7, 110, 25, -101, -75, 62, + -54, -64, -11, -38, -126, -24, -93, 101, + 0, 13, 20, 35, 41, -122, 91, 62, + -45, -12, 13, -36, 40, -43, -104, -29, + 61, -117, -70, -121, 17, -21, -3, -127, + 107, -22, 62, 47, -3, -36, -18, 69, + -67, -33, -19, -89, -13, -7, -4, -49, + -20, -112, -13, 35, -126, 38, -27, -114, + 12, -54, 19, -108, -61, -79, 5, -65, + 71, -121, -115, -64, -2, -52, 35, 22, + -46, -8, -82, 41, -113, -126, -6, -19, + 43, -51, 35, -11, 84, -44, -100, 23, + 67, -11, -20, 81, 52, 22, 21, 124, + 12, -23, 4, -27, -128, 111, 25, 35, + 48, 28, 9, 28, -24, 84, 56, 10, + 71, -22, -22, 78, 16, 92, 117, -22, + 33, 29, -10, -98, -35, 45, 1, 5, + 24, -46, -69, 13, 21, 28, 4, -27, + -117, 44, -29, 116, 26, -21, -38, 111, + 43, 14, 6, -7, -79, 82, -57, 4, + -127, 0, 86, 113, 2, 7, 42, -72, + -31, 11, -6, -97, 22, 113, 10, -31, + -30, -57, -35, 21, -21, 47, -3, -42, + -36, 74, -102, -24, -59, -58, -50, -67, + -113, 25, -128, 4, -108, 16, 113, 82, + 32, -128, -17, 65, -121, -92, 81, 0, + 28, 27, -8, -93, -31, -41, -22, -20, + -78, -111, 44, -105, -67, -117, -123, -72, + -76, -90, 9, 29, -48, -66, 4, -82, + -51, 16, -28, 11, -54, 4, 22, 10, + -91, -64, 6, 119, -63, -6, 24, 124, + 75, -54, 70, 67, 38, -31, -48, 68, + 71, -38, -7, 7, 34, 91, 53, 53, + -33, 10, 90, 3, -37, 55, 58, 38, + -17, 54, 19, -45, 63, 69, -9, 82, + 9, -5, -33, -13, -35, -17, 4, -64, + -126, 102, -116, -3, 17, -42, 19, -50, + 62, 41, -21, -103, 38, 36, 14, -9, + -10, -46, -35, -2, 43, 56, -112, -109, + 70, -17, -35, -42, 34, -20, 94, -8, + -1, -29, -30, -47, 71, -29, 56, -12, + 6, -76, 53, 1, 8, 55, -24, 63, + 15, 61, -113, -27, -57, 71, -42, 47, + 74, -72, -12, 13, -39, -75, -112, -46, + 40, 24, 82, 32, 48, -20, 40, 14, + 61, -11, -117, -37, -7, -67, -123, -21, + 20, -36, -31, -7, 28, -95, 39, -5, + -98, -39, 10, 40, -67, -5, -47, 12, + -23, 31, -2, -4, -9, -70, 29, 45, + -37, 49, 15, -122, -47, 34, 9, 62, + -7, -42, -127, -72, -22, -55, -64, -24, + 2, -12, 86, -77, -36, -6, 56, 55, + 46, 117, 105, -72, 27, 1, 97, 81, + -24, -83, -94, 6, -16, 68, 9, -57, + 12, 108, 11, 13, -89, 25, 59, -38, + 49, -18, 16, -53, -2, -1, -5, 50, + 17, 89, -9, -50, 27, -34, 62, -7, + 4, -49, -6, -3, 12, -12, -118, -9, + 26, -32, 29, 76, 37, -112, -26, -76, + -38, -22, -34, 125, 59, 1, -53, 40, + 21, -4, -27, -83, -19, 47, -123, -123, + 30, 111, 3, 27, -1, 127, -47, -97, + 38, 87, -101, 1, 15, 79, -2, 44, + -18, -40, -17, 18, -49, -26, -125, -120, + 16, -55, -37, -25, -86, -87, -23, 8, + 29, 3, -14, 9, -9, 34, 10, 69, + 4, -32, -20, 68, 10, 21, 15, -58, + 16, -45, 81, 81, 104, -20, 45, 6, + 45, 41, 55, 12, 18, 1, 11, -84, + -48, 88, 2, 50, 30, 11, 17, 5, + -23, 70, 121, 24, -31, 97, 36, 102, + -95, 88, 5, -43, 123, 92, -20, -43, + -28, 22, 127, -42, -47, -37, 86, 56, + -60, 94, -13, -108, 21, -82, 25, -76, + 36, 21, 54, 123, -70, 39, 3, 9, + 73, -9, 31, -35, -55, -128, 14, 36, + 15, 32, -63, -80, 64, -76, -72, -58, + -115, -124, 127, 33, 17, 23, -65, 111, + 18, 60, -34, -120, 80, 19, -125, 49, + -20, 53, -66, 31, 22, 35, -125, -86, + -42, -73, 37, -87, 22, 113, 70, -39, + 1, -57, -44, -85, -12, 103, -49, -72, + 79, -26, 89, -46, -67, 51, -11, 125, + -121, -87, 32, -90, 21, -42, -3, 77, + -6, 33, 38, -7, -27, 31, -28, 51, + 18, 49, 55, 6, 22, 6, -29, 71, + -123, -10, -46, 64, 30, -52, 124, 33, + -1, -48, 27, 105, -74, -104, 38, -45, + 102, -8, -28, 20, 19, 28, 8, -50, + -44, 43, 123, -23, 121, -28, 88, 3, + 57, -31, 13, 17, 52, 27, 8, 53, + -25, -28, -34, 18, -68, 63, -21, -32, + 11, 53, 66, -38, -79, 59, 5, -65, + 50, -23, 61, -52, 64, -120, -20, 12, + -77, -9, -67, -95, 16, -127, -111, -74, + 124, 126, 78, 45, -29, -3, -70, -68, + 43, -27, 32, -127, -69, -16, -33, -127, + -125, -43, 13, 81, 67, -10, -41, -7, + -38, -26, -1, 64, -66, 106, 127, -15, + -28, 14, 117, 28, 41, -81, 46, -58, + -121, 48, -86, -39, 8, -36, -128, -3, + -78, 35, 45, 24, -7, -25, -3, 115, + -21, 15, 14, -30, -40, -30, -55, 39, + 79, -29, 126, 69, -97, 18, -31, -11, + 58, -60, 14, 99, 11, 54, 43, 30, + -50, -53, 4, 125, 26, 108, 32, 36, + -17, 9, -68, -104, 27, 7, 84, -73, + 8, -58, 4, 35, 85, 76, 125, 31, + 32, 42, 15, -65, 63, 89, 31, 44, + 7, 24, -22, -117, -4, -84, -32, -2, + 78, -53, -74, 108, -96, -26, 26, 27, + 8, -62, 9, 61, -20, 32, -37, -39, + 20, -5, -88, 38, -22, -47, -79, 30, + -35, -12, 13, -35, -79, 34, 10, -117, + 34, -14, 106, 12, 11, -24, 59, 28, + 25, -21, 10, -73, 9, 9, -18, -31, + -52, -70, -45, -5, -20, -22, 89, -21, + 41, 39, 58, -16, -2, -128, 23, 9, + 61, 22, 25, -43, -126, -27, -20, 14, + -57, -32, 126, -5, 25, -44, 32, 51, + 108, 62, -3, 12, -16, -39, -35, 37, + 41, -64, 37, 58, -63, 100, -29, 66, + 31, 14, -3, -12, 98, -58, 9, -5, + -22, 35, -1, 2, 78, 46, 45, -33, + 35, -62, 78, -54, -40, 17, 60, 99, + -43, 42, 47, 24, -42, -80, -72, -81, + 14, -48, -41, -40, 83, 36, 24, 55, + 48, 124, -39, -75, -57, 96, 28, -107, + -64, 88, 18, 4, -18, -47, 53, 35, + -61, 60, 21, -23, -15, -112, -55, -39, + -75, -17, -122, 78, 51, 15, 51, -126, + 79, -100, 94, 35, -33, 42, 45, -26, + 41, -20, -76, -2, 11, -82, -12, 60, + -44, -9, -40, 3, -93, -116, -125, 1, + -60, 34, -28, 43, -3, 18, 33, 29, + -117, 87, 19, -88, -92, -92, -64, 37, + -125, 68, -123, -113, 29, 39, 5, -8, + 6, 100, 127, -7, 8, 62, -9, -10, + -48, 96, 104, 121, 122, 21, 46, 116, + 124, -77, 0, -124, 120, -10, -72, 0, + -5, 100, -44, 124, 12, 52, 26, -46, + -18, 126, 8, -30, -124, 4, 94, 12, + 10, -39, 126, -73, 109, 22, -72, -82, + 32, -33, -56, 37, -58, -50, -24, -6, + 114, -62, 33, -124, 44, 27, 111, -2, + 50, -12, 35, 12, 126, 12, 21, 94, + 25, 22, -8, 51, 74, 46, -102, 24, + -57, 0, -54, -51, -27, 116, 113, -25, + 0, 9, -29, 16, 47, -69, 41, -63, + -66, -36, 21, -74, -40, 41, 65, 62, + 8, -86, 59, 35, 90, 24, -9, -31, + -23, 25, 14, 50, -113, -48, 59, -9, + -48, 7, -37, -124, -25, 15, 18, 77, + -66, 35, -1, -15, -84, -29, 6, -9, + 25, -26, -108, 53, 5, -13, 26, -124, + -47, -39, 21, -5, 21, -64, -43, 74, + 51, -34, 42, 60, -30, 23, -32, -6, + -53, 58, 41, -92, 30, -67, -22, -16, + -30, -10, -43, 96, 22, 64, 121, 18, + -105, -61, 8, 69, -90, 80, 65, -118, + 9, -16, 20, 54, 6, -68, 66, 31, + 29, -101, 81, -79, 37, 68, 5, -123, + 115, -9, -33, -94, 7, -42, 113, 39, + -3, 110, -74, -13, 37, -75, -62, -13, + 119, 2, 0, -120, -114, 62, -53, 28, + -99, -58, -7, -63, -120, 85, -79, 4, + -28, 61, -126, -65, 62, -94, -60, 20, + 125, -112, -124, -56, 31, 127, -59, -66, + -125, 62, -92, 25, 39, -128, -24, 24, + -34, 50, 2, -30, 127, -91, -43, 65, + 127, -8, -21, 12, 99, 7, 120, -126, + -35, -5, 109, -117, -32, 10, 114, -11, + -18, -83, 66, -76, 87, 35, -12, -90, + 17, -25, 123, 14, 20, -5, -62, 23, + -35, 125, 36, -5, 28, 64, -113, 5, + -11, -27, -123, 14, -48, -13, 26, 19, + -20, -4, -19, 33, 0, -81, 41, -47, + -30, 16, -67, -52, 39, 68, -7, 85, + 6, -10, -6, 17, 59, 18, -48, 113, + 73, 85, 60, -3, 94, -43, 21, 9, + 15, -74, -40, -52, 45, -104, -34, 3, + 26, 20, -36, 17, -121, 21, 44, 66, + -67, 5, 4, 63, 8, -23, -45, 12, + 52, 4, 53, 8, -116, -64, -42, 68, + -124, 39, -25, -22, -2, -17, -31, -77, + 43, 32, -55, -127, -40, -6, 44, 66, + 27, -21, 60, -110, 16, -71, 65, 103, + -25, 50, 47, -49, 18, -35, 18, 21, + -29, -19, 38, 4, -47, -4, -66, -21, + -41, 43, -22, 50, -78, -62, 106, 71, + -66, 31, -38, -17, 45, -65, 92, 0, + -23, 13, 90, 48, -42, 103, -42, 18, + 11, 50, -20, 19, -76, 74, 70, 43, + 106, -119, 18, -40, 34, 24, 20, -17, + -83, -64, -10, 27, -6, 62, 127, -88, + -43, 49, 124, -23, -95, -17, 3, 75, + 65, -45, -14, 88, 48, 30, -53, -35, + 29, -87, 11, 66, -2, 109, -28, -29, + -71, 96, 28, 24, 38, 23, 63, -55, + 57, -42, -18, -55, -42, 23, -40, 16, + 18, 33, 10, 29, -24, 36, -79, -81, + -13, 38, 24, 10, 126, 46, -9, 7, + 34, 59, -1, -52, -8, -28, -23, -95, + 55, -128, -32, 17, 120, -89, 40, -62, + -63, 17, -97, -77, -46, 41, -121, -5, + 45, -70, -50, -92, 94, 71, -121, -15, + 127, -77, 6, 125, -34, -48, -21, -128, + 127, -39, -47, -109, 36, 127, 76, -55, + -83, -30, -124, -128, 52, 23, -123, 127, + -21, -91, -9, 28, 36, -67, -51, 27, + 33, -38, 4, -74, -123, 10, -120, -25, + -30, 54, -9, 123, -118, -14, -77, 30, + 70, 8, -18, 106, -7, -55, -39, -75, + -59, -47, 25, 48, -6, 7, 83, -47, + 25, -127, -33, -114, 33, 30, -3, 49, + 105, -41, 72, 75, -91, 0, -36, -64, + -18, 16, -36, -99, 116, -85, 61, -70, + -14, -126, -39, 122, -81, 91, -42, -108, + -116, -41, 46, 119, -1, 15, -19, -47, + -19, 17, -73, -20, 3, -66, 40, -122, + 81, -5, -61, 23, 45, -79, -25, -79, + 66, -2, 6, -36, -47, 95, -9, -34, + -24, 3, 94, -11, 127, 75, 38, -6, + -45, 12, 25, -28, -124, 111, 108, -78, + -7, -87, -45, -3, 26, -66, 91, -87, + 72, 64, -88, -78, -41, 29, -86, -59, + -1, -55, -35, 13, 124, 15, 40, -43, + 29, 59, -112, -38, -10, -77, -9, 18, + 20, -47, 39, 43, -34, -47, -17, 34, + 1, 70, 90, 47, -63, -15, -4, 28, + 102, 48, -78, 24, 58, -12, -33, 8, + 16, 17, -52, 18, -4, 81, -28, -28, + 28, 56, -40, -43, 9, -52, 5, -54, + -26, 15, 40, 6, 18, 29, -25, -35, + -87, -21, 17, -25, -5, 6, 65, 34, + 59, 33, -38, -17, 6, -18, 42, -42, + -61, 45, -46, 63, -80, 16, -56, -118, + 27, 44, -8, -74, -24, 7, -49, -47, + -87, 16, -64, -22, -16, 127, 24, -39, + -44, 42, -125, -75, -16, 52, -40, -39, + 40, -48, -5, 16, 10, -59, 25, -13, + 19, -23, -96, 43, 71, 51, -21, -24, + -22, -10, 66, -91, 23, 74, -119, 93, + -42, 5, 33, -95, 53, -121, -68, 35, + -63, 33, -50, -48, -65, -8, 21, -3, + -1, -87, 88, 82, -51, 33, -32, -26, + -72, 15, 61, 2, 55, 122, -32, 48, + 15, -71, -3, -41, -11, -22, 19, 6, + -27, -85, -11, 1, 25, -52, 94, -104, + 52, 42, -49, 49, 118, 79, 78, -46, + 22, -7, 11, 63, 24, 5, 3, -2, + -37, 118, -4, 7, -50, 1, -27, 50, + 39, -33, -20, -116, -67, 4, -13, 99, + 58, 16, 38, 47, 65, 86, 68, -62, + 62, 8, 9, -81, 10, -26, -87, -18, + 89, 42, 51, -19, -71, 126, -87, -64, + -69, -80, -3, -13, -20, 33, -4, 89, + -6, -16, -45, 93, 29, -7, 58, 78, + 45, 23, 32, -67, 5, -25, 59, -47, + 33, 60, -23, 41, 12, -13, 26, -18, + 15, -64, -30, -122, -31, 66, 120, -86, + 98, -120, 66, -22, 6, -37, 39, 90, + -124, -75, -124, 16, -59, -42, 96, -4, + -51, -34, -10, 36, 8, 127, -60, -80, + 44, 46, -31, -3, -20, 116, 45, 50, + 5, -6, 78, -25, -27, 40, 49, -24, + 68, 1, 25, -42, 0, -93, 94, -37, + 8, 31, 27, -24, 22, 8, 15, -33, + 33, 18, 75, -113, 75, -14, -63, 28, + -4, -63, 3, 45, -5, -6, -38, -66, + -4, 58, -4, 125, 89, 56, -21, -119, + 54, -26, -35, 43, -51, -51, 74, -9, + -57, -30, -30, -126, -122, -42, 90, 5, + -74, 37, 49, -3, -69, 33, 20, -2, + 70, -84, 0, -51, -74, -26, 100, 10, + -89, 3, -92, -119, 31, 50, 20, 24, + 51, 23, 114, -120, -46, 70, -7, -11, + -11, -16, 36, 3, -122, 20, 46, -52, + -23, -35, 3, -53, -37, -35, -36, 28, + 38, -69, -90, -4, -43, -58, 30, 13, + -70, 5, -44, -17, -59, -26, -48, -74, + 29, -127, -23, -34, -31, 29, 14, -50, + -93, -20, -53, -35, -59, -42, 1, -44, + 16, -126, 16, -125, 22, -22, -98, -8, + 71, -57, -76, 13, 61, 125, 48, 52, + -110, -90, 37, -75, -41, 0, -36, 32, + 33, -2, 28, -70, 59, 93, -116, 13, + 79, -18, 83, -43, -12, 0, 4, -74, + -121, 0, -63, 42, 52, -65, 96, 7, + 18, 11, -24, 100, 29, 23, 45, 24, + 6, -33, 17, -75, -66, 59, -8, 42, + -8, 8, -36, -35, 3, 22, -26, -19, + -23, 17, -62, 44, -32, 120, -5, 26, + -14, 11, -54, -40, 43, -47, -62, 8, + 11, 18, -5, -44, 9, 15, 41, -13, + -42, -58, 45, 32, 8, -28, 52, 29, + 18, -14, 31, -18, 63, -19, -103, 60, + 4, 75, -36, -86, -90, 68, 7, 17, + -24, -44, -16, 11, -62, -31, 71, -78, + 15, -51, 1, -46, 71, 124, -2, 9, + -20, -9, 39, 5, 71, -12, 0, 54, + 68, 41, 33, -19, 103, 1, -66, 20, + 41, -56, -80, 7, -35, -85, 23, -37, + 9, 62, 56, 63, -23, 25, 127, 26, + 1, -76, 124, -23, 44, -55, 87, -58, + -47, -18, -15, -8, -24, -50, 54, -68, + -63, -12, 25, -14, -25, -49, -16, 109, + -55, 9, -49, -2, -73, -33, -101, 28, + -53, -10, 21, -42, 11, -53, -31, -125, + 57, 24, 85, -11, -76, -39, -40, 34, + -30, -34, -127, -4, 55, 96, -3, -44, + -32, -24, 115, -9, -92, -63, 6, 71, + -23, -74, -16, -86, 84, -127, -116, -109, + 108, 18, 91, -96, -76, 65, -55, -5, + 29, -26, 23, 40, 9, -127, 40, -103, + 69, -127, -10, -16, 99, -34, 53, -5, + -108, -40, -81, -42, 63, 61, 23, 123, + -72, 7, 55, 114, 24, 73, -15, 17, + 37, -43, 48, -105, 77, -12, 33, 29, + -36, -1, 41, -35, -81, 81, -41, -80, + -29, -43, 115, 19, 50, 40, 32, 32, + -82, 87, 28, -5, -22, -77, 56, -12, + 28, 26, 5, 49, 37, 25, -29, -35, + -13, -82, -57, -74, 36, 104, 62, -78, + -82, 46, -3, -57, -18, -57, -9, 61, + 14, -12, -41, -84, 125, 124, 24, -80, + -44, -21, 21, 82, -48, 123, -72, -23, + -93, 38, 10, -103, 55, -126, 31, 29, + -48, 127, 26, -51, -74, -54, 4, 35, + -125, -40, -30, 37, 58, 31, 11, 10, + 2, -24, -75, -18, 29, 35, 10, 37, + -3, -106, -56, -70, -11, 14, -59, -125, + 27, -49, -18, 12, -59, -63, -74, -1, + -66, -13, -121, -73, -4, 5, -121, -70, + 6, -128, -23, -20, -25, -65, 14, -23, + 112, 67, 109, 41, -15, 71, 3, -9, + -21, 121, -119, 63, 93, 34, 35, 65, + 126, -39, -46, -118, 105, -103, -25, -29, + 76, 51, -77, 122, 95, 65, 77, 67, + 11, -4, -119, -52, -39, 48, 6, -28, + 68, -42, 113, 9, 23, -18, -7, -14, + 23, -12, -9, 5, 6, -93, 43, -32, + 7, 20, -65, 55, 18, -128, 53, -7, + -5, 21, -2, 13, 43, -5, 13, -126, + 92, -60, 32, -84, -116, 62, 70, 115, + -21, -38, -116, 0, 39, -10, -10, -29, + -101, -40, -9, -29, 43, -16, -1, -4, + 29, -42, -52, 13, 44, -126, -91, 39, + 64, -52, -60, -52, 66, 15, -7, -35, + -105, -55, 63, -24, -76, 89, -18, 38, + -73, 57, 6, 67, -80, 64, -48, -42, + 19, -74, -128, 36, 4, 20, -125, 15, + 51, 13, -19, -29, -78, -60, 56, -9, + 63, -23, -37, -57, -12, 7, 33, -115, + -30, 62, -32, 93, -19, 73, 20, 15, + -2, -48, -19, -128, -54, 126, -103, -39, + -27, -65, -24, 63, 7, -40, 27, -26, + 71, -110, 116, 36, 20, 7, 2, 40, + 45, -41, -62, -78, 28, -54, -59, -1, + -19, -75, 50, 32, 107, 20, -5, -104, + 105, -39, -38, 72, 70, -121, -4, 99, + -23, 16, -47, 14, 89, 34, -65, -16, + -46, 18, 52, -65, -51, 11, -108, -37, + -57, -25, 27, 19, -36, -100, 97, -112, + -24, -118, -31, -39, 122, -61, -52, 95, + 42, -53, -27, -71, -79, 3, -62, -126, + -115, -30, 65, -48, -98, -110, 77, 13, + 55, 26, -4, 2, -73, -36, 50, 20, + -86, -70, 22, -35, 118, -119, 7, 62, + -124, -7, -107, -95, -119, -60, -47, -94, + 14, 105, -70, 104, 16, 75, 114, -128, + -95, -113, 17, 7, -127, 24, 28, -53, + -76, -57, -118, -19, -59, -14, -64, -22, + -27, 0, -10, -103, 13, -116, -59, -1, + 66, -123, -107, -5, -48, 4, -61, -86, + -50, 5, 51, -81, -108, -59, 19, 54, + -103, -17, -35, -127, -81, 59, -122, 0, + 36, -24, 117, -29, 31, -9, 13, 15, + 80, 81, 36, 123, 90, 77, 21, -49, + 8, -12, -34, 65, -71, 35, -3, 15, + -54, -58, -41, -45, 10, 54, 102, 77, + 23, 28, -39, -43, -47, -49, -87, -35, + 24, -2, 21, 33, -22, 26, 19, 80, + -26, -32, -101, 34, 31, -6, -57, 20, + 73, 78, -107, 36, -49, 2, 23, -17, + 109, 5, 15, -34, 41, -23, 61, -5, + -56, -102, 88, -16, 49, -52, -57, 59, + 90, -8, -125, 30, 72, 75, -46, -75, + -126, 97, -28, 118, -62, -47, 113, -29, + -48, -29, 101, 23, -7, -42, 18, 36, + 0, 11, -29, 31, -53, -5, -18, -80, + 46, -18, 29, 5, 17, 0, -19, 65, + -6, -1, 21, 11, -35, -40, 26, -68, + -29, -43, -4, -24, 29, 85, -25, -77, + -10, -27, 3, -34, 56, 74, -118, 13, + 14, 101, -48, 22, -41, 59, 22, 21, + -20, -127, -73, -7, 70, -48, 13, -95, + 31, -14, -63, 22, -71, -93, -74, -38, + -120, -28, -33, -45, -14, -48, -18, 81, + 24, -60, -29, 0, -16, -7, 47, -84, + -72, 58, 14, -37, 71, 20, 41, -42, + -6, -105, -8, -2, -25, -16, 23, -50, + 16, 33, 52, 22, 42, -4, -5, 54, + 15, -22, -105, -18, -8, -25, -112, 26, + -32, -46, -70, 86, -41, 105, 46, -30, + -38, 1, -24, -20, -24, 3, -13, -44, + 19, -79, 23, -60, 21, -54, -120, -21, + -3, 60, 4, -29, -2, 17, -2, -10, + -20, 12, -71, -108, 47, 4, 26, 20, + -11, -28, 0, -27, 2, -50, 7, 9, + -42, -59, -52, 65, 9, 77, -67, -21, + 5, 44, -30, -33, -87, 8, 109, 120, + -80, -27, -74, 4, 40, 22, -17, 91, + 13, -31, 80, 0, 34, 24, -29, -74, + -120, 71, -96, 85, 125, 117, 59, -53, + -75, -42, -21, -19, 9, 29, 47, -124, + 97, -7, 54, 29, 9, 67, 56, 13, + -16, 33, -10, -11, 100, 26, -65, -10, + 2, 80, -43, -1, -98, -37, -1, 63, + 29, 41, -92, -5, 48, 36, -2, 48, + 2, -23, 7, -2, 19, -121, 95, -85, + -2, -126, 47, 44, 4, -49, 53, 27, + -80, 48, -51, 127, 13, 84, -39, 2, + 40, 29, 45, 53, 14, -21, 27, -38, + -79, -63, -28, -20, 7, -117, 62, 1, + 58, -68, -6, 22, -5, 9, -16, -23, + 55, -43, 25, -31, -85, 39, -5, 65, + 1, 16, 11, 2, -27, 31, 65, 88, + -15, 16, -127, 58, 45, -45, -20, 45, + -22, -61, 62, -62, -74, -107, -62, 96, + 37, 57, 23, -83, 3, 11, -69, 31, + -13, -67, 14, -93, 35, 61, -62, -31, + -61, 9, 25, -53, 127, 117, -4, 67, + 59, -65, -38, -11, 46, -75, -20, -48, + -9, 55, 18, -39, 30, -55, 114, -45, + 47, 15, -32, -27, -84, 12, -42, 85, + 0, 4, -56, -48, -43, -24, 24, 26, + -85, -10, -90, 28, -9, 77, -111, -51, + -79, -41, 26, 16, 46, -65, -69, -106, + 121, 68, 19, 16, -62, -25, -62, 115, + 116, 20, -94, 61, 45, 78, 53, 55, + 28, -42, 20, 7, -109, -34, 55, -124, + -39, 125, 40, 25, -28, 82, -60, -101, + -13, 25, -71, -34, 27, 71, 16, -7, + -33, -37, -1, 12, -1, 9, -52, -12, + 25, 25, -34, -41, -37, 32, -18, -31, + -124, -85, -101, 0, -37, -122, 49, -21, + 85, -68, 5, 36, -20, -13, -12, 43, + -26, -57, 30, -12, -46, -90, -83, -120, + -3, -26, -59, 16, 47, -83, 32, -44, + -14, 35, -94, 69, -61, 108, -20, -17, + 54, 36, -49, 53, -50, 48, -4, 9, + 38, -70, 56, -23, 80, -48, 56, 37, + -55, 20, 41, -9, -97, -55, 81, 18, + -42, -6, -110, 97, -86, -91, 21, 9, + -14, -10, -14, 92, -39, 45, -37, 62, + -43, 34, 114, -14, 24, -25, 126, -92, + -8, 6, 0, -59, 34, 26, 18, -3, + -53, 24, -36, -125, -75, -21, -36, -37, + 75, -124, -50, -48, -117, -29, 38, -10, + 117, -84, 82, -126, -4, -123, 9, 27, + -91, -81, -21, -126, 50, -29, 47, 51, + 44, 51, -17, 125, -39, -56, 49, 44, + 127, -14, 101, -46, -3, -21, -52, -2, + 4, -35, 65, 25, 33, -47, 89, -36, + 34, 72, 70, 43, -72, -38, 90, 54, + 121, 13, 63, -18, 45, -41, 95, 23, + -4, 82, 14, 37, -10, 86, 47, 46, + -13, 15, -35, -123, -58, -7, 102, -79, + 40, 13, -6, -42, 75, -127, -109, 20, + 61, 20, -74, -113, -28, 96, -97, -67, + -41, -28, 118, 15, -10, 41, 26, 17, + -84, 34, 125, 7, 61, -126, -104, -124, + 125, -109, -1, 50, -44, 40, 62, -118, + 86, -50, -45, -58, 9, -126, -94, 26, + -45, -28, -77, 11, -54, -38, 70, -74, + -49, 39, -127, -21, -98, 39, 113, -109, + -123, -69, 47, -73, -111, -59, 1, 3, + -43, 25, 115, 102, 5, -83, -72, 122, + 84, -126, -42, -51, -13, 17, 19, -106, + -56, 38, -40, -75, -20, 56, -7, 32, + 58, 57, 39, -118, -23, -123, -82, 66, + -40, -115, -84, 29, 120, 28, 6, 12, + -66, 15, 58, -77, -26, -71, 80, 120, + 38, -13, 92, -113, 23, 61, -122, -39, + -5, -114, -30, -11, 0, 43, 19, -122, + 111, 73, 0, -51, 49, -36, -25, 101, + -41, -45, -33, -91, -52, 52, 65, 37, + -78, 93, 78, -73, 9, -9, 9, -79, + -35, 25, 16, -29, -36, 9, -16, -40, + 33, 118, 7, 36, -19, 43, -46, -123, + -49, 18, -95, 101, 29, -35, -17, -7, + 60, 20, -75, -25, -35, -17, -117, -106, + -30, -98, -40, 68, 35, -15, 14, -101, + -36, -83, 2, 20, 53, 126, 8, 74, + 63, -57, -20, -13, 33, -9, -90, -75, + -2, -105, 86, 37, -48, 14, -40, -20, + 5, -127, -12, -30, -53, -14, 39, 93, + -24, 0, -20, 66, 42, 7, -39, 68, + 80, 25, 81, 4, 28, -22, -41, 56, + 51, -39, 2, 28, 45, -3, 62, 92, + 19, 73, 43, 45, 61, -53, 102, -35, + -34, 47, -85, -29, -12, 46, -31, -89, + 47, -25, 120, -8, 30, 63, -27, 56, + -67, -10, 71, 30, -21, 88, 86, -72, + 23, 16, -46, 126, 116, 45, -53, 15, + 90, 20, 10, -47, -35, 61, 28, -68, + 121, 108, -24, -8, -97, -13, -124, -27, + -21, -104, -22, -61, -118, -124, -41, -3, + -86, -31, -15, 71, -1, -42, -2, 45, + 40, -33, -115, -38, -33, -36, -4, 34, + 74, 98, -8, 107, -49, 11, 121, -29, + -49, -15, -37, 43, -80, -31, 19, 15, + 18, -71, 78, -108, -87, 34, -4, -30, + -98, 124, -115, -18, -116, 35, 10, -125 +}; + +static const rnn_weight noise_gru_bias[144] = { + 51, 32, 88, 60, -64, 92, 5, -36, + -49, 95, 102, -20, -1, 14, 8, 21, + -36, -68, 62, 46, 10, -60, -103, -16, + -30, -42, -43, 35, -4, 23, 97, 46, + -29, -16, 71, 52, -20, -23, 91, 16, + 69, -13, -23, 73, -17, 13, 30, 23, + 1, -27, 53, -24, -71, 45, 42, -49, + 28, -16, -20, 61, 40, -104, 54, -5, + 31, 10, -51, -37, -6, -85, 9, 51, + 16, 2, -26, 56, -39, -5, -27, -13, + -49, 30, 4, -64, -41, 45, -23, 14, + -19, -10, -55, -61, -35, 46, -31, -12, + -93, -28, 11, -6, -46, -12, 1, 15, + -37, -107, -50, 3, 54, -26, -86, 14, + 66, -54, -38, -70, -1, 69, 46, -12, + -128, -55, 0, 17, 48, -64, -24, 9, + -67, -107, -101, -43, -4, 80, -52, -90, + -23, -64, 31, 86, -50, 2, -38, 7 +}; + +static const GRULayer noise_gru = { + noise_gru_bias, + noise_gru_weights, + noise_gru_recurrent_weights, + 90, 48, ACTIVATION_RELU +}; + +static const rnn_weight denoise_gru_weights[32832] = { + -53, 26, -20, 28, -57, -30, -79, 58, + -68, 103, 70, 4, 92, 14, -71, -3, + 26, 54, -9, -86, -8, 49, -60, 121, + -98, -4, 103, -38, -89, 28, 85, 30, + 33, -45, 42, 53, -37, -116, 72, -44, + 24, 1, 17, -26, -7, 9, 5, -71, + -97, -86, -118, -59, -27, -51, 26, 14, + -89, -63, 76, -16, -5, 11, 86, 121, + 67, 5, -20, -44, -43, -7, 18, 25, + -41, 14, 11, 24, -5, -58, 15, 10, + 89, 41, -100, 42, 41, 89, -89, 30, + -16, -35, -29, -119, -45, -1, -104, 24, + 9, 46, 21, 122, -5, -29, -64, 19, + 72, -60, 79, -52, -15, -37, 15, 38, + -13, -9, 28, 56, -12, 121, 23, -104, + -81, -7, -94, 17, 124, 49, 108, -41, + 118, -33, -106, 42, -27, 50, 57, -24, + 26, -107, 15, -103, -23, -13, -7, 23, + -40, -113, 14, -36, -59, 25, 44, 54, + 37, 40, -10, 59, 29, -16, -49, 18, + 4, -27, 15, 15, 37, 29, 50, -61, + 28, -16, -96, 48, -102, -97, 12, -31, + 43, 54, 9, -58, -42, 18, 10, 115, + -16, -29, -14, -66, -38, 69, -65, 50, + 7, 6, 11, 8, -53, -1, 25, 32, + 77, -5, -42, 58, -43, -63, -86, -36, + 18, 21, -49, 38, 72, 22, -25, 22, + -16, -95, 77, 16, 55, -8, 31, -39, + -117, 3, -38, -24, 27, 24, 25, -29, + 21, -44, -13, -61, 6, 5, 19, -20, + -11, 89, 40, -113, -86, 65, -84, -21, + 20, 10, 15, 28, -56, -14, -17, 50, + 90, -15, 29, -27, -54, -40, -32, -58, + -59, 4, -17, 58, 3, -85, -9, 64, + -38, -5, 104, -21, -30, -4, 79, -44, + -55, 8, 44, 10, -58, 31, 7, 25, + -49, -2, -30, -29, -118, 49, 68, 17, + -34, 0, 46, -109, 28, -14, -47, 88, + 13, -55, 24, 62, 44, -16, -54, -67, + 40, 69, 3, -46, 1, -43, 127, -30, + -78, 25, 106, 16, -30, 104, -12, 20, + -58, 35, -71, -79, 18, 7, -12, -28, + -49, 17, 98, -64, 25, 84, 32, 33, + -36, 13, -31, 29, -9, -97, -78, 4, + 16, 26, 86, 28, -29, 5, 23, -5, + -112, 83, -27, 9, -47, 43, 12, -7, + -11, 27, 92, -9, -14, -9, 69, 68, + 32, -25, 20, -70, 40, -20, -32, 36, + -94, 19, 5, 101, 11, -47, 44, 65, + -40, -56, -21, -43, 72, 7, 36, 14, + 1, -27, -18, -10, 18, 14, 58, -4, + 12, 3, -60, -6, 68, -9, -42, 10, + 40, -17, -17, 13, -16, 86, -62, -20, + -22, 24, 75, 61, -5, -32, -75, 0, + -41, -65, -19, -37, 21, -51, -54, -79, + 3, 34, 10, 75, 91, -59, 25, 21, + -5, 26, -71, 9, 27, 28, 49, -17, + -12, -44, -28, -51, -8, -18, -4, -12, + -45, -50, 32, 26, 65, -54, 29, 16, + -80, 16, 38, -94, -27, 33, 3, 51, + 17, 26, -10, -35, 18, 57, -6, 7, + -9, -35, 22, 10, -9, 5, -18, 7, + -3, 15, -82, -12, -18, 51, 6, 21, + -29, 58, 14, -16, 6, 29, -35, 59, + 10, 32, 54, -11, 25, -27, -13, 33, + -19, 11, -83, 61, 37, 42, 20, 21, + 13, 57, 40, -1, 8, 6, -8, 22, + 57, -18, -1, -7, 29, -40, -17, 50, + 8, -4, -4, 32, 26, 35, -60, -35, + -36, -17, -66, -63, 2, -124, 8, 26, + 17, 31, -84, -12, 15, 49, 6, 39, + 33, 28, 20, -48, 42, 8, -5, -6, + 26, -33, 12, 42, 91, 25, 5, -91, + -27, 23, 6, -63, -119, -99, -38, -32, + -97, -31, 15, -28, -18, -59, -24, -7, + -2, -7, -119, -34, -121, -121, 26, 74, + -85, -7, -22, -26, -43, 2, -32, 34, + 51, -82, -92, 14, 80, 19, -2, -39, + -81, -22, 48, -39, -21, -46, -49, -19, + -68, -111, 80, -45, -49, -86, 23, -2, + -55, -9, 2, 1, -55, -32, -32, 18, + -104, 88, 1, 25, -48, -100, 9, -89, + 9, -8, 70, 43, -122, 90, 45, -60, + -48, -22, -47, -100, 48, 22, 6, 26, + -16, 18, -37, 5, -102, -6, 3, -55, + -87, -117, -33, -106, -63, -21, 10, 74, + 16, -19, -14, -60, 15, -38, 90, -58, + -14, -2, -58, -45, 50, 30, -72, -26, + 94, -108, -79, -4, 53, 63, -80, 45, + 20, 35, 73, -9, -5, -83, 46, -6, + -68, 0, 0, -127, -17, -69, 25, -91, + -75, 3, -62, -31, -47, 16, 35, -39, + 20, -22, -63, -17, -3, -72, 48, 15, + 74, -33, -86, -63, -67, -44, -15, -32, + -42, -34, 37, 9, 18, -41, 19, -8, + 27, 41, -81, -120, -113, 7, 83, -16, + 17, -17, 22, -28, -73, -54, 1, 11, + 3, 33, 33, 16, 6, -20, -64, 11, + 2, -14, -69, -36, -42, -13, -115, -16, + -27, 9, 26, 7, -58, 6, -55, 52, + 23, -20, 74, -61, -13, -31, -63, 68, + -7, -20, 64, 0, 17, -10, 24, 15, + 47, 16, -23, -1, 9, -4, -69, 14, + -6, -7, 3, 9, -20, -29, -41, 64, + 23, -21, -1, 41, 48, -46, 11, -22, + -2, -7, -54, -86, -44, -44, 8, 31, + -47, -25, 33, 95, -35, -125, -8, -3, + 52, 19, 9, -27, 62, 32, -11, -7, + -60, -13, 110, -28, 118, -11, 45, 5, + -2, 22, 22, -61, 6, -72, -42, -12, + 0, -17, 13, -91, -29, -50, -54, -126, + -60, 119, 25, 24, 51, -32, -15, 77, + 14, 34, -33, -53, -53, 56, -72, -27, + 57, -11, -61, 32, 3, -18, -54, 19, + 70, 34, 17, -68, -65, -2, 48, 18, + 10, -72, 88, -15, -63, -38, 2, -20, + -4, 42, -88, 96, 60, 79, 6, 77, + 127, 9, 22, 21, -26, -55, -33, -69, + 39, -62, 33, 5, -29, -40, 29, -51, + 7, 18, 37, -14, 88, -25, 52, -6, + 17, -104, -10, -68, -42, -116, 83, 81, + -74, -9, -127, -55, -79, 26, -59, 37, + -27, 20, 36, -46, -67, 51, 10, -19, + 101, -28, 53, -62, 10, -6, -15, -13, + 5, 9, -61, -123, -33, 30, -39, -48, + 11, -126, 59, 21, -3, -121, 27, 46, + 13, -59, -3, -122, 37, -120, 9, -43, + -33, 24, -33, -42, 9, -34, 68, 16, + 6, -1, -49, -105, -9, -13, 41, -46, + 78, 7, -55, -38, 82, -26, 9, 24, + 43, -18, -91, -56, -34, 30, 28, 16, + 4, -2, -120, -42, -125, -16, 45, -29, + 42, -25, -1, -43, -12, 4, 39, 16, + -17, -12, 10, -37, -5, 8, 16, -12, + -14, -18, 19, 0, -30, -24, -101, 2, + -30, -6, -6, 3, -40, 0, 13, 52, + -31, 27, -56, 63, 35, 32, -64, 29, + 34, -41, 47, -24, 13, 52, -26, 29, + -2, -30, 25, -44, 36, 93, -54, 3, + -3, -17, 15, 5, 72, -61, 48, -5, + 63, -34, -26, -48, 2, -28, 69, -10, + 42, 50, -89, -123, 12, -8, -10, 50, + -8, -30, -50, -40, 13, -26, -41, -8, + -23, -9, 61, -117, 29, 50, -20, 20, + 31, 45, -82, 59, 12, -96, 8, 8, + -49, 28, -33, 14, -31, 9, -44, -60, + 49, 98, -4, -19, 16, 3, 75, 31, + -55, -75, 114, -12, -113, 3, -116, 80, + -12, 19, 9, 74, -28, -109, 9, 21, + 27, -15, 12, -37, 23, 81, 103, -19, + -40, -113, -42, -121, -104, 36, -29, -70, + -38, -75, 73, 16, 0, 30, -10, -82, + -35, 82, 112, -19, 43, 41, -65, 29, + -65, 7, -82, -14, -36, -32, -19, 76, + 9, 71, -61, 79, 71, 8, -82, -31, + 35, -33, 41, -46, 17, 23, 28, 33, + 86, -21, 66, 65, 68, -55, 37, -50, + -92, -17, -8, -39, -90, -47, 81, -7, + 57, -71, -2, 0, -18, -17, -18, -117, + -47, 60, -3, 19, -115, -17, -37, -64, + -7, -37, -36, -100, 2, 122, -68, 7, + -29, -46, -4, -67, 63, 44, -9, 40, + -94, -123, -6, -126, 2, -84, 31, -46, + -23, 51, -37, 31, -27, -64, -60, -16, + -125, -38, 29, -80, 5, 54, 10, 96, + 46, -1, -34, 6, -37, -69, 17, -32, + 31, 49, -31, -42, -120, -54, -6, -7, + 43, -50, -15, 66, -124, 4, 21, 10, + -8, 31, 11, -35, -8, 81, 26, 9, + -14, -44, -87, -66, 9, -82, -45, -39, + -3, -17, -21, 61, 17, -4, -4, 15, + 40, -23, 1, -44, -30, -24, -51, -56, + -21, 19, -45, -24, -22, 37, -50, -3, + -6, -17, -19, -54, -65, -51, -17, 15, + 40, 19, -26, -61, -26, 0, 8, -17, + -26, 8, -5, 3, 22, -16, 7, -54, + -25, -15, 18, 47, -31, 6, -34, -22, + -25, -16, 31, -8, 49, -38, -33, 30, + -39, 21, 4, -26, -36, -48, -28, 24, + 0, 32, 38, -21, 97, -10, 45, 10, + -31, -32, -39, -57, -52, 6, 11, 5, + 7, -13, 25, 0, 54, 14, -32, -4, + 59, 17, -30, -10, 23, 21, 29, 8, + -48, 39, 56, 18, -18, 10, 26, -16, + 90, 8, 19, -55, -36, 49, -28, 39, + -28, -127, 123, 11, 8, 83, 4, 26, + -67, 21, 31, -43, -33, 25, -32, 92, + 44, 9, 19, 108, 39, 53, -15, 94, + 13, 28, 36, 38, 28, -6, -13, 14, + -99, -17, -17, 2, -15, -3, 10, 19, + 56, -15, 124, 21, 39, -23, 3, 61, + 59, -29, 48, -2, -1, 51, -11, 41, + -42, 76, -108, -50, -15, -43, 37, -38, + 61, -47, -42, -62, 3, -6, -66, 18, + -48, -25, 19, 66, 1, 78, -26, -127, + 26, 0, 10, 21, -25, -12, 3, 11, + 126, -126, 1, 3, 17, 123, -9, -29, + 96, 125, -2, 32, 21, 24, 52, 8, + 42, -53, 42, -12, -18, 23, -11, -125, + 43, 47, 40, -68, 3, -5, -14, -43, + -49, 23, -34, 35, 35, -33, 58, -126, + 26, 42, 34, 8, -39, 20, 72, -2, + -61, -26, 32, 1, 31, 36, -107, 19, + -6, -128, -41, -65, 15, 21, 36, 6, + 74, 27, 29, 6, -71, 46, -30, 37, + 33, 43, 105, 31, -26, -59, -19, 20, + 37, 30, -71, -9, 92, 1, -21, -11, + 50, 81, 22, 62, 61, 23, -64, 77, + -22, -31, -2, 1, 17, 9, -7, 31, + 20, 17, -12, 36, 42, 0, 71, 7, + -52, -101, 1, 2, 7, 45, 38, -103, + -24, 32, 4, 61, 60, 36, -33, 40, + 60, 79, 15, -85, 74, -71, 26, -11, + 46, 8, 40, 53, 26, 62, 19, -5, + 39, 68, -57, -59, 67, 18, -20, -1, + -45, 9, 41, -33, -5, 46, 12, 14, + 3, -10, -12, 42, -18, 118, 65, -32, + -2, -12, 112, -36, 18, -43, 38, 6, + -41, -22, 20, -31, -5, -25, -31, -54, + -35, -10, -5, -8, -44, -124, -122, 37, + -113, -24, -29, -18, -7, -69, 63, 3, + 33, 50, -47, 114, 6, -15, 37, -38, + 21, 23, -37, -44, -24, 48, 104, 47, + -8, -80, 37, -28, 76, 4, 3, 36, + 19, 16, -45, -59, -17, 16, 5, -18, + -36, -26, -125, -24, 12, 29, -34, -6, + -76, 31, 50, -33, -38, 12, -14, -63, + 24, 29, 116, 5, 26, 23, 31, -8, + 91, 17, 0, 2, 125, -90, 33, 8, + -67, -99, -41, -64, -73, -15, -23, 7, + 7, 10, -97, -127, -14, -77, -127, -40, + -45, -23, -12, 14, -3, -7, 12, 0, + -2, -80, -38, 22, -100, -60, -121, -11, + -1, 119, -2, 40, -20, -125, 63, 10, + -32, -110, -15, -103, -7, -7, 87, -32, + -7, -35, -33, -33, -54, -1, -12, -43, + 15, 6, 12, 48, -1, -14, -100, -20, + 95, 31, 41, -50, 18, -36, -9, 16, + -10, -6, -28, -17, -55, -79, -9, -3, + 46, 29, -31, 44, -84, -23, -37, -40, + 18, -8, 21, -33, 30, -44, -14, -13, + -7, -4, -116, -19, 45, -60, 2, 26, + 13, -4, -4, -6, -15, 20, -122, 26, + -38, -46, 4, -101, -124, -12, 0, 46, + 5, -20, -10, 35, -49, -46, -42, 19, + 6, -21, -127, 108, -4, -7, -20, -25, + 15, 67, -38, 12, 27, -28, 21, -19, + -11, 18, 5, -3, -23, 17, 32, -10, + -25, 19, -90, 27, -6, 9, -24, 19, + -123, -17, 50, -85, -16, -98, -8, -9, + 27, 1, -24, -37, -27, -28, 5, -26, + 26, -12, 1, 14, -16, -2, -12, 31, + 0, -36, -9, 19, 13, -1, 9, 9, + -16, -51, -15, -34, -61, -3, -15, -30, + -17, 2, -8, 30, -39, -27, 23, 46, + 17, -9, -29, 56, -91, -91, -58, 23, + -15, 19, -6, -30, -10, -31, -34, -14, + -41, -30, -78, 12, 16, 51, -7, -38, + 0, -19, -16, -54, 4, 3, -1, -16, + 17, 3, 7, -20, -14, -38, 30, -23, + 27, 12, 84, 60, 19, -16, -55, -15, + 5, 32, 33, 1, -15, -3, -35, -121, + 52, 75, 31, 30, 0, -20, -26, 32, + 38, -14, 33, 81, -17, -4, -16, -84, + -37, -29, 7, -14, 5, -30, -52, 27, + 29, -119, -48, 0, -101, -28, -11, -69, + -56, -34, 48, 85, 22, 35, -18, -90, + 53, 10, 8, 13, -60, 52, -54, 10, + 99, -95, 32, -60, 0, 124, -19, -1, + 4, 65, -48, -2, -18, -28, 0, -58, + 43, -63, 40, -73, 0, 32, -26, -36, + -25, 16, -37, 7, -70, -50, 41, 0, + -18, 10, 21, -10, 24, 35, -35, -38, + 48, 16, 26, -33, 94, 3, 27, -22, + -17, 69, 19, 21, -57, 78, -5, 8, + 17, 5, 3, -39, -4, 19, -26, 14, + 7, -1, 2, -31, -22, -8, -2, -9, + -48, -51, 71, 10, 20, 21, -1, 11, + 26, 13, 1, -20, 60, 18, -38, -9, + -30, 30, -20, -30, 45, -39, 21, -6, + 18, -16, 5, 42, -41, 58, 41, 9, + 7, 38, 11, 41, 21, 35, -53, -49, + -43, -3, -30, 38, 1, -5, -34, -52, + -10, 22, -11, -20, 22, 13, -54, 20, + -19, 19, -24, -38, 24, -6, 67, 92, + -6, 46, 60, -25, 5, -68, 0, -36, + 6, 14, -33, -40, -33, 38, -26, 1, + 20, -23, -54, -13, 4, 25, -30, -7, + -51, 14, -37, -53, -11, -7, 5, 10, + 7, 0, -29, 8, -27, 1, -14, -37, + 60, -7, -41, -29, -31, -13, 22, -13, + 52, 8, 65, 48, -10, -74, 5, -4, + 31, 30, -25, -9, -13, 24, -12, -22, + 64, 13, 18, 28, -22, 46, -8, 39, + -48, -14, -17, -6, -40, 16, -5, -29, + -2, -46, -47, -25, -19, 52, -1, -22, + 21, -66, -5, 29, -3, -28, 17, 21, + 18, 11, 67, -26, 31, -8, -1, 36, + 46, -9, 14, 39, 18, 9, 23, 38, + 56, -25, 46, 70, -22, -11, -19, 7, + -11, 49, -4, -25, -55, -43, -28, 66, + 79, 16, 2, 17, -19, 20, -20, -2, + -26, 41, 27, 19, -11, 67, 33, -49, + 69, 16, 0, -5, 3, 47, -2, -48, + 56, -77, -22, -3, 21, 84, 67, -17, + 10, 21, 19, 23, -4, 7, -45, 40, + -21, -64, 19, -23, -15, 68, -9, 7, + -3, 38, 14, 12, -73, -6, -28, 36, + 15, 20, 15, 28, 40, 42, 100, 47, + 37, -43, -31, -80, 39, 12, 20, -14, + 19, 17, -117, 24, -36, 16, 34, 31, + -22, -48, 4, 4, 27, 33, 39, 38, + -12, 34, 13, 37, -9, 40, -23, -11, + 75, -6, -14, -30, -61, 20, 20, -38, + -16, -6, 34, -14, -2, 4, 11, 9, + -1, -17, -24, -112, 59, 20, 25, -17, + -10, 9, -18, 11, -7, 16, -3, 17, + 2, 66, 38, 12, 9, 3, -17, -10, + 35, 22, -38, -23, 40, 19, -51, -32, + 18, 17, 2, 21, 10, 16, -16, 0, + 13, 6, -8, 28, 47, -6, -25, 43, + 17, -4, -13, -94, -1, 18, 24, -1, + 14, 15, 3, 1, 41, 36, 1, -16, + -10, -50, 27, -50, 18, 16, -6, 6, + -14, 38, -49, 73, 9, 57, 31, -41, + 58, -45, -11, 20, 15, -61, 27, 4, + -3, 8, 56, -101, 29, 21, 27, 8, + 73, 31, -16, -87, -17, -87, 4, -22, + -45, 59, 3, 7, 12, 13, 0, 17, + 23, 94, -50, 3, -29, 55, -124, -10, + -32, 41, -38, 25, 2, 18, 34, 31, + 34, 15, -82, 61, -13, 32, -17, 20, + -5, -19, 24, 14, 53, 38, 72, 41, + -47, 24, 43, 7, 82, -7, -7, 38, + -1, -28, 13, -7, 8, -7, 16, -26, + 9, 12, 17, -48, -23, 33, 64, -8, + 0, 71, -37, 67, -85, 20, 19, 25, + 18, 15, -6, -34, 97, 17, 8, 30, + 11, -23, -12, 41, 6, 62, 21, 4, + 18, -31, 71, -70, 9, 2, -32, 21, + -11, 44, 47, -11, -34, -5, 3, -5, + 11, 33, 59, 6, 6, 12, 49, 6, + 43, 33, 3, -26, 34, 53, -11, -29, + -1, -10, 39, 82, 116, -25, -38, 28, + -18, 26, -59, -47, 21, 6, -31, 44, + -81, -72, 38, -104, 39, 98, 4, -68, + -4, -36, -49, -19, -18, -52, 37, 5, + -56, -44, 21, -5, 69, 42, 10, -26, + -45, 28, -6, -22, -37, 39, 44, 5, + -29, 54, -1, 90, -81, 118, -54, 20, + 28, 7, 77, 0, 34, -2, -25, 60, + -64, 8, -49, -31, -46, -26, -18, 12, + 4, 22, 5, 42, -6, 6, 59, 34, + 11, 10, -62, 14, 4, -11, -15, -47, + -1, -15, -34, 38, 26, -33, -18, -12, + 2, 6, 9, -15, 43, 13, 35, 25, + -53, 9, 10, 47, -27, 85, -25, 12, + -12, 5, 12, -125, 12, -89, -4, 55, + 20, 17, 48, 22, -45, -7, 5, 65, + 22, 13, 2, 22, 38, 12, 35, -17, + 32, -40, -49, -10, -100, -97, 13, 17, + -29, -24, 32, -77, -4, -81, -83, 49, + -99, 41, -34, -58, -10, -69, -2, 33, + -120, -81, -36, -31, -78, -27, 83, 15, + -49, -51, -19, 83, 29, -63, 25, -15, + -84, 21, 30, -88, 45, -11, -44, -73, + -86, 22, 11, -126, 66, -41, 16, -5, + -59, 37, 62, -29, -5, -128, 116, 102, + -44, -30, -43, -73, -121, 86, -39, -14, + -127, -7, 63, -24, 12, -47, 4, 24, + -58, 31, -33, -34, -98, 90, 119, -56, + 4, -16, -42, -35, 48, -24, -85, 12, + 73, -48, 52, -22, -109, -26, -29, 31, + 105, -5, -39, -60, -81, 24, -114, 46, + -51, 56, -5, -96, 58, 97, -26, 0, + 84, 51, -85, -39, -20, -10, -11, -6, + -55, -47, -65, -50, -63, 77, -54, 3, + 41, -127, 44, -63, 61, 17, -44, 29, + -22, 1, 2, -83, 36, -43, -7, -30, + -19, -37, 28, 7, -63, -73, 81, 51, + -37, -13, 18, -123, -7, 33, 75, 43, + 85, 40, -56, 10, 91, -45, 7, -60, + 14, -26, -13, -118, -33, 38, -27, 63, + 7, 36, -30, -52, -115, 113, -42, 39, + 3, 3, 26, 53, -109, -103, -23, 1, + 121, 36, 60, -62, -124, 39, -60, -16, + -22, -12, -86, -13, 27, -9, -3, -22, + 60, 9, -16, 100, 51, -33, -34, -57, + -9, -29, -1, -29, -42, 2, -19, -11, + -1, -8, 93, -46, -43, -42, 9, 9, + 15, -23, -11, 14, -20, -73, -59, 17, + 7, -26, -57, -6, 54, -43, -70, 82, + -12, -1, -99, 25, 4, -24, 5, 34, + 39, 17, -89, 10, 32, -93, 18, -9, + -20, 8, 41, -37, -25, -19, 8, 28, + -72, 60, 66, -80, -52, -4, -19, 14, + -25, -16, -49, 15, -91, 84, 34, 47, + -123, 44, 18, 20, -77, 0, 9, -66, + 20, -28, 13, -99, 24, 48, -1, 32, + -42, 33, 47, 20, 85, 71, 57, -27, + -53, -73, 60, 119, -81, -51, 4, 38, + 38, -1, 5, 25, 38, 36, 92, 125, + 122, 29, -89, -109, 65, 53, -17, 39, + 121, -19, 71, -3, -97, 29, 55, 102, + 8, -53, 101, 40, 78, -56, -77, 29, + 81, 60, 117, -4, -45, -3, 100, 87, + -122, -12, -80, 100, -116, -18, 33, 121, + 112, 21, 35, 5, 47, -51, 10, 46, + -95, -42, -28, 78, -39, -32, -114, -3, + -53, -104, -94, -1, -50, 27, -38, 20, + -30, -59, 111, -63, 21, -18, -31, 11, + 17, 46, 21, 30, -22, 59, 13, 62, + -17, 42, -75, -42, 23, 30, 33, -39, + -49, -110, -46, 56, 21, 23, 24, -80, + -16, -69, 11, 80, -42, -27, -44, -46, + 16, -24, -7, 35, 61, 43, 16, 24, + 9, 43, 28, -88, 53, 26, -80, -128, + 73, -34, -44, 10, 38, -114, 80, 2, + 72, -1, 56, -52, 79, 9, -40, -103, + -35, 2, -40, 0, 49, -54, 110, 8, + 3, -8, 17, -38, 38, -52, 45, -4, + -22, -82, 28, 79, 18, -56, -45, -13, + -57, -7, 14, -16, 8, -10, 44, -58, + 22, 120, -30, 10, 22, -14, -104, 58, + 7, 28, -30, 15, 7, -19, 39, 90, + 51, 39, -128, 46, -34, -55, -37, -65, + -2, 29, 79, 63, 39, -38, -63, -30, + 14, 4, 30, 74, 62, 17, -21, 40, + 107, -62, 18, -85, 60, 2, -12, -67, + 36, -65, -67, 23, 25, 14, -22, 12, + 68, 10, 59, 28, 53, -16, 34, -116, + -15, 12, 66, 76, 111, -33, -58, -7, + 21, -11, -50, 94, -55, -49, -102, 9, + -3, 17, -5, 83, 22, 29, 73, -17, + 22, 1, -37, -90, 17, 14, -22, -39, + 46, 13, 2, 77, 36, -25, 66, 17, + 17, 25, 11, 9, 25, 15, 51, -24, + 2, -53, -9, 24, 26, 14, -18, -2, + 15, -34, 35, -15, 59, -36, -10, -7, + -26, -123, -10, -10, 69, -33, 5, 22, + 3, 40, 0, 89, 29, 28, -37, -32, + 23, -14, -25, -11, -22, 18, 36, 62, + 21, -61, 26, 15, -68, 73, 59, -34, + 1, 21, 17, 124, -16, 40, 7, -31, + -43, 80, -5, -14, -3, 27, -42, -70, + -31, -54, -25, 46, 112, -35, 35, -32, + -73, -33, 69, -13, 40, -2, 7, -17, + 60, -3, -32, 41, 25, 108, 7, -41, + 55, -76, -27, -4, -7, -19, 33, -2, + 30, 17, -3, 43, 21, 9, 33, -36, + 15, -29, 46, -36, -15, 81, 27, -14, + 9, 10, 42, -6, -22, -49, 43, 5, + 12, 10, 115, -13, 63, 43, 37, 50, + 5, 7, -60, -7, 15, 0, 7, -48, + -8, 51, -61, -11, -12, 110, 83, -17, + 6, -5, 19, 35, 41, -16, -5, -74, + -8, -22, -30, 17, -28, -19, 39, -15, + 6, -32, -44, -26, -38, -54, -22, -7, + -55, 4, -33, -11, -3, 8, 14, -4, + -8, 21, 6, -77, 27, 13, -5, -5, + 43, 55, 19, 3, -18, 10, -17, -6, + 39, 20, -5, -28, 25, -33, 16, 7, + 10, -43, -3, -7, 38, -26, 14, 18, + 26, 27, -17, -28, -16, -24, -13, 6, + 4, 12, -4, 20, -13, -2, -23, 79, + -6, -3, -8, -14, -47, 41, -18, 27, + 27, -3, -49, -21, -1, -32, 25, 15, + -32, -25, -1, -121, -24, -11, 23, -7, + -12, 15, -48, 20, -14, 2, -27, -26, + 24, -16, 65, -17, 26, 72, -33, 9, + -63, -63, -8, -63, -68, -22, 40, 6, + 68, 24, 20, -44, 45, -15, -27, 4, + 8, -37, 2, -1, 16, 16, -17, -2, + 16, 93, -19, 89, -2, 65, 27, -6, + 21, 44, 122, 45, 3, -2, 24, 44, + 42, -30, 6, -18, 27, -21, -8, 24, + -14, 54, 6, 21, 56, 13, 55, 25, + 32, 41, 62, -100, -37, 14, 15, 50, + -23, -22, 119, 10, 18, 61, 39, 45, + 48, 110, 23, 73, -31, -19, -16, 5, + 72, 41, -23, -47, 38, -14, 8, 53, + -7, 8, 41, -5, 65, 39, -12, 31, + 34, -10, -15, 4, 21, -42, -39, -23, + 112, -35, 93, -28, -11, -37, 40, 49, + 54, -29, -39, 93, -11, 32, 48, -1, + -5, 93, 80, 0, 1, -37, 5, -17, + 20, 16, 11, 13, 19, -10, -41, -29, + -19, 10, 44, -48, 62, 4, -37, -13, + -6, -28, 28, -42, 22, -14, -37, -18, + 2, 19, 3, -50, -12, -16, -6, -7, + 45, -43, -34, -88, -29, -95, 11, 33, + -31, -10, -53, -13, -7, -19, 30, -24, + 10, 17, -5, 6, -22, 36, 45, -1, + -46, 10, 19, 8, 5, -65, 4, 27, + 3, 11, -37, -45, 15, -3, -39, 24, + 7, 35, 14, -35, -8, -14, -18, 49, + 7, -37, -4, -14, 10, -2, -9, 4, + 35, -6, -19, 17, 12, 23, 39, 22, + 24, -63, -25, 1, 21, 38, 24, 7, + 13, 6, 10, -9, 29, -19, 8, 45, + 14, -21, 12, -22, -26, 2, 20, 60, + 29, 67, -115, 8, -2, -32, 8, 42, + -13, -10, 5, 19, 0, -34, 2, 6, + 42, 5, 33, -63, 13, 27, -19, 23, + 23, 16, -38, -11, 21, 116, 3, -53, + -96, -88, 119, 56, -82, 4, -14, 4, + 21, -3, -17, 103, 5, 38, 37, -33, + 73, -1, -12, 7, -53, -19, -12, -41, + 32, 18, -26, 65, -24, -113, 91, -35, + 23, 47, -41, 51, -40, -48, 93, 22, + -51, 13, 51, -49, -37, -14, 19, 37, + 22, -48, -55, 15, 72, -12, 30, -20, + 51, -15, 56, 69, 30, -39, -37, 79, + 101, 10, 3, 18, 40, 21, 3, -14, + -26, 4, 30, -18, -2, -20, -24, -19, + 53, 43, 34, -26, 44, 13, -24, -56, + 122, 13, -1, 96, -58, -40, -24, 10, + -85, -127, 26, -57, 35, -5, -4, 0, + -75, -33, 19, 66, 26, -38, -89, -40, + -7, 8, -25, 85, -48, 17, 44, -21, + 54, -59, 37, -39, 57, 101, 64, 12, + 39, 35, -7, -91, -22, -21, 125, -20, + 55, -105, -26, -22, -17, 48, 124, -38, + -17, 15, -14, 41, -42, -25, -35, -9, + -34, -50, -19, 62, 15, -8, -16, 58, + 58, 103, -38, 2, -51, -53, 45, -37, + 32, 30, -33, 51, 82, -51, 1, 4, + 65, -19, -12, 45, -71, -8, 40, -19, + -6, -14, 15, 29, -5, 25, 3, 3, + -39, -18, -42, 21, 32, 17, -10, 21, + 32, 32, -7, -27, 13, -35, 68, 18, + -3, -37, 36, -67, -13, 5, -18, -15, + 34, 6, -54, -25, 65, 55, -9, -1, + 8, 22, -69, 26, 16, 42, 9, -23, + -3, -102, -46, -33, -19, -13, 49, -29, + 23, -9, 12, 13, 15, 30, -12, 30, + -4, -5, -1, 12, 27, -40, -19, -42, + 29, 18, -2, 29, 29, -73, -6, -31, + -1, 23, -10, -31, 40, 14, -4, 0, + -80, 5, -66, -9, -7, -13, 81, -8, + 59, 48, -56, 7, 22, 71, -13, 24, + 27, -28, 31, -69, 40, 52, 37, 73, + 33, -38, -76, -31, 26, 23, 29, 13, + 91, -88, 95, -14, 3, 31, 85, -22, + 25, 59, -82, 31, -93, -2, 67, -9, + -19, -6, -119, 23, -15, 70, -7, 31, + -23, 98, 33, -6, -20, 33, -31, -27, + 47, -107, -76, -51, -8, -108, 4, 1, + -50, -35, 62, 9, -8, 122, 28, 12, + 45, 56, 49, -3, 14, -80, 34, 64, + -11, 40, 24, 49, -34, -32, 22, -17, + 20, -28, 7, 43, -24, 1, -71, -21, + 12, 45, -8, -34, 5, 6, -57, 93, + 21, 40, 42, 19, -6, 51, 23, 12, + -106, 13, 25, -9, 21, 26, 75, -58, + 95, 21, 15, -48, -11, 63, 61, 19, + 37, -80, -1, 82, 46, 78, 47, -65, + -7, 9, 34, 50, 2, 25, 25, 17, + 63, -58, -14, -8, 32, 17, -47, 19, + 0, 22, -50, 10, -55, 8, -19, 27, + -22, -9, -9, 3, -66, 47, 111, 91, + -54, -21, -41, 24, -9, -4, 87, 11, + -22, 75, -59, 8, -14, 39, 71, -31, + -9, -5, -113, -58, 36, -6, 15, 10, + -38, 2, -39, 23, 12, 58, 24, -15, + -50, 3, 9, 24, -3, 8, 22, 7, + -16, 22, 33, -7, -26, -29, 23, 44, + 3, 47, -60, -97, 33, -17, 36, 57, + 50, 21, -2, -15, 66, 82, 61, 22, + 16, 5, 110, 20, 35, 13, 2, -27, + 18, -82, -40, 46, 23, -6, 4, 40, + 45, 3, -34, 19, 46, -10, 47, 22, + -5, 3, 15, 29, 9, 30, -43, 9, + -73, 20, -16, -11, 7, 43, 44, -1, + 49, -12, -27, -16, 79, 24, 24, 37, + -10, 41, 31, -93, 46, 20, 18, 33, + 19, -103, 36, 41, 54, -10, 50, -5, + 74, -17, -43, -14, -48, -60, 54, 3, + -25, 28, 41, -53, -30, 19, 36, 15, + -1, 8, -36, -37, 28, 35, 13, -33, + 9, 26, 1, 4, -22, -35, -19, 48, + -8, 15, -23, -44, 35, -70, -34, -37, + -35, -15, -19, 15, 17, -31, -58, 69, + 11, -29, -55, 21, -39, 1, 17, 0, + 46, 69, 25, 60, 26, -46, 49, -99, + -71, 49, -2, -24, -25, -37, -1, -95, + 44, -15, -49, 49, -3, 31, 36, 4, + 67, -16, -17, -45, -21, 5, 7, -5, + -10, 37, -28, 71, 1, -53, -10, -1, + 20, -18, -39, 8, -29, 9, -31, -10, + -4, -15, 17, 19, 46, -83, 57, -71, + 97, 119, 1, -45, 6, 79, 40, 7, + 44, -126, -19, -40, 23, 74, 54, -3, + 14, 62, 74, -32, -19, 34, -5, -106, + 12, -13, 46, -32, 40, 96, -8, -40, + -35, 20, -7, 55, -18, -41, -28, 1, + -36, -42, -71, -20, 1, 19, -72, -52, + -18, 0, 3, -73, 8, 18, 7, -18, + 54, 21, -12, 13, -14, -62, 4, -38, + 52, -30, -40, -6, 48, 29, 30, 25, + 44, -48, -6, -24, 23, -42, -3, -20, + -50, -5, -31, -13, -14, 32, -11, 16, + 46, 10, -81, -30, 3, -22, 24, 0, + 28, 54, 25, -78, 60, 22, -12, 4, + -4, -47, -1, 4, -19, 33, 60, 27, + 17, 16, -14, -88, 27, -87, 20, -31, + -3, -30, -12, 6, 6, 77, 13, -10, + 7, -7, -5, -62, 22, 6, 39, 26, + 25, 30, -3, -7, 11, 6, -65, -26, + -44, 30, 94, -68, 21, 48, 19, -14, + 32, 32, -6, 36, -7, 17, -42, -2, + -2, 13, 18, 71, 1, 8, -9, 23, + -20, 16, 54, -47, 73, 38, 17, -11, + -83, 91, -11, 14, 79, 93, -65, 114, + -10, 21, 13, 8, -20, -23, -14, 45, + 48, 23, 1, 2, -15, -18, 39, 56, + 4, 63, 13, 21, 86, -33, 30, 12, + -31, 92, -88, 50, -4, 14, 7, 48, + 23, 70, 123, -5, 42, 2, 16, -19, + 127, -16, 95, -26, -72, 53, 37, 5, + 46, -52, 42, 17, 30, -12, 65, 28, + 22, -13, 125, -9, -7, -26, 17, -9, + -64, -84, 16, 8, 105, -30, 9, -58, + 53, 126, 29, 39, 19, 47, 40, 14, + -22, -3, 19, -53, -105, -22, 11, -15, + 34, -64, -46, -18, -2, -26, -45, -22, + -43, 1, -8, 15, -28, 64, 48, 59, + 18, -4, -8, 31, -45, 53, -109, -10, + -3, -19, -3, 18, -20, -48, 34, 77, + -19, 117, -9, -43, 93, -30, -113, 11, + -15, -19, -6, 17, 24, -48, 74, -29, + 67, -15, 45, -38, -9, 30, -18, 22, + 60, 8, 72, 10, -1, -2, -32, -52, + 47, -17, -35, -3, -2, -18, 36, 50, + 18, -1, -19, -115, 8, 7, 28, 45, + -41, -25, -70, -6, -59, -68, -13, -76, + 31, 22, 31, 7, 31, 12, 47, -19, + -28, -58, 21, -37, -23, -35, 59, -35, + -32, -25, -12, 2, -12, 23, 9, 39, + -30, -30, -1, -37, 2, -19, 35, 18, + 18, 27, -50, 24, -38, -20, 3, -11, + 13, -3, -30, 67, 8, 63, 7, -10, + 2, 3, 75, -7, 31, -2, -10, -10, + 15, -11, 48, -37, -4, 13, 25, -12, + 72, -45, -37, -7, -49, 15, -36, 17, + -6, 1, -52, -3, -23, 29, 49, 18, + 62, 0, 25, 27, -13, -4, 9, 6, + 10, 40, 10, 46, 83, 68, 15, 9, + 67, 71, -122, -15, 73, 44, -125, -44, + 11, 12, -28, -56, 15, 3, 6, -7, + 50, 35, -2, 125, -60, 60, 76, 55, + 103, -42, 50, 19, -10, -104, -11, -1, + -1, -35, 3, -34, 27, 23, 24, -45, + -2, 19, -17, 56, -53, 52, 18, -53, + -121, 0, -10, -65, -47, 22, 81, -58, + 33, 5, -84, -9, -10, -35, -40, 47, + -24, -73, 15, 49, -48, -127, -26, 11, + 118, -75, -16, -69, -32, -74, -46, 127, + 111, -6, 26, 25, 53, -10, -32, 33, + -3, 30, 54, 84, -14, -32, -73, -76, + 77, 88, -9, -15, -6, -102, -59, 39, + 13, 54, -64, 10, 67, 62, 7, -18, + -21, 73, 13, 53, -27, 34, 11, -81, + 122, -21, 65, -27, 1, 104, 22, -96, + 68, -111, 25, 4, -2, 66, 106, -21, + 8, 49, 19, 10, 10, 5, -6, 45, + 32, -116, -15, -68, 38, -1, -3, 13, + -22, 18, -20, -10, -119, -20, -4, 64, + -23, 56, 17, -28, -12, 16, 4, 45, + 67, -76, -6, -60, 8, -59, -10, -65, + -29, 47, -68, -38, -76, -59, -16, 90, + -10, 1, -2, 72, -21, -20, -36, 50, + -23, 4, -9, -14, -14, 30, 5, 6, + -99, -49, -19, 16, 21, -2, 65, -15, + -39, -5, 25, 4, 17, -28, -8, -4, + -46, 31, 42, -17, -8, -29, -14, 45, + 38, -32, -28, 24, 40, 28, -5, -22, + -32, 56, -6, 11, 5, 48, 5, -57, + -4, 11, -12, 32, 12, 23, 0, -10, + 2, 10, -1, -17, 56, -25, 34, 12, + -37, 12, -4, -38, 9, -93, 36, 28, + 32, 83, 48, -48, -21, 3, -18, -69, + 45, 0, 36, 9, -37, 29, -6, -16, + 36, -10, -29, 16, -39, -12, 23, -14, + 2, -27, 77, -39, -77, -57, 23, 40, + 32, 19, -13, 0, -18, 39, 33, 26, + -4, 6, 59, -121, -7, 24, 2, -53, + 61, 4, 6, -4, -6, -38, 5, -12, + 55, -9, -8, 2, -13, -14, -31, -18, + 25, -46, 81, 26, 21, -25, -47, 37, + 25, 8, -36, 19, -7, -42, 8, -19, + -13, 17, 45, -3, -12, -7, 5, 11, + -31, 49, 10, 34, 2, 64, -17, 75, + -61, 24, -1, 5, 22, 21, 24, -46, + -35, -15, 16, 14, 46, -12, -15, -60, + 6, -14, -15, 48, -7, 22, -13, 33, + 8, -25, -15, 0, 10, 33, 9, -36, + 71, -115, 3, -29, -35, -73, -46, -68, + -17, -38, -24, -11, 31, -12, -3, -52, + 52, -36, -6, -54, -18, -14, 13, -54, + 13, -6, -4, 25, -22, 49, 9, 1, + 18, 15, -4, -16, 10, -15, -32, 15, + -2, -67, 55, -63, -4, 48, -44, -14, + -43, -6, -4, -5, -32, 1, 11, 23, + -20, 36, 40, -28, 22, 22, -40, 2, + -4, 11, -14, -7, -25, -17, 9, 41, + -16, 10, 5, 10, -9, 9, 57, 25, + 0, 1, -20, 8, 13, 127, -24, 17, + 9, -24, -2, -6, -40, -2, -5, 4, + -49, -34, -25, -5, -2, 25, -99, 11, + -10, -13, -16, 8, -6, 0, -23, 21, + -42, 12, -22, -55, 18, -9, -13, -17, + -20, -10, 13, 23, 19, 10, 14, 5, + 3, 17, -7, -4, 9, 126, 6, -29, + 12, -19, -29, -38, 8, 4, 5, 22, + 19, -20, 0, 98, -1, 12, -17, 12, + 0, -15, 38, -25, -11, -17, -46, 48, + 3, -19, 22, -89, 13, -37, 20, -26, + -2, 8, 23, -13, -3, -7, 28, 21, + -16, -3, -10, 12, -6, -22, 27, -14, + 49, -16, 0, 54, 48, 67, -34, -37, + -56, 78, -24, 55, -23, -10, 30, -33, + 92, 16, -61, -1, -30, -24, 38, -66, + 79, 69, 16, 27, 19, 12, 48, 31, + 40, -47, -106, -118, -51, 96, 30, -12, + -48, -15, 74, 117, 106, 24, 18, -39, + 1, 38, 86, -28, 2, 17, 126, 22, + 17, -121, -65, -9, -17, 47, 61, -7, + 74, -107, 71, 68, 71, 95, 83, 127, + 122, 53, -8, -2, -88, -49, -95, -23, + -36, 92, 58, -33, 41, -26, -61, 43, + -104, 63, 41, 41, -36, -55, -40, 36, + -26, 35, -19, -27, 5, 64, 27, -40, + 46, 19, -57, 48, -19, 33, 127, -86, + 28, 4, 83, 82, -15, 111, -30, 47, + -49, 34, 121, 30, -82, 24, 111, 73, + 44, -13, 39, 79, 90, 77, 17, 13, + -92, 126, -32, 50, 123, 32, -66, 55, + 21, 13, 24, 106, 71, -79, -19, 83, + 25, 79, 27, 68, 33, 49, -42, -50, + 126, 20, 8, 15, -25, -72, 10, 126, + 40, -56, 23, -6, 121, 38, 33, 104, + -50, 38, -3, 123, 26, 31, 127, -67, + 30, -123, -62, 27, -7, 28, -20, -39, + -28, 36, -43, 44, 41, 61, 38, -17, + -17, 4, 73, 49, -19, 87, 109, 92, + 49, 60, 66, 38, -9, -32, 16, -13, + 57, 21, -6, -53, -39, -84, 126, 79, + 21, -46, -37, 56, 52, 102, 62, 36, + 26, 83, 74, 77, 7, -124, -57, -22, + -6, -115, 18, 46, 122, 14, 7, 0, + 45, 29, 1, 57, 8, -14, 58, -17, + 49, 24, -66, 28, -15, -8, 82, -71, + 13, 10, 91, -46, -24, -36, 18, -9, + 59, -19, 10, -60, -27, -24, 28, 3, + 127, 48, -2, -57, 77, -3, -7, -11, + -31, 31, -31, 78, 12, -127, -17, 15, + 22, 51, -61, -59, -38, 87, 10, -23, + 21, 10, 58, -58, -37, -14, -52, -4, + -48, -1, 10, 0, -44, -56, -46, -15, + -71, 16, 23, -2, 53, 11, -46, 2, + -52, 59, -104, 24, -26, 112, 45, -55, + -103, 17, -46, 0, 43, -51, 18, 16, + -30, 14, 4, 50, 14, -23, 44, -40, + -19, 64, -34, -8, -12, -27, -29, 88, + -73, -34, 7, -32, -27, 68, -44, -2, + -13, -15, 53, -13, -120, 18, -1, 56, + 37, 96, 126, 17, 60, 15, -35, 74, + -13, -17, -32, 37, -28, -63, 68, 6, + 10, -29, 89, 57, 20, 14, -118, 67, + -10, 71, 14, 73, 7, -37, -75, -27, + -69, -50, 48, 5, 34, -51, -4, -66, + 23, -18, 7, -17, -19, 115, -82, -22, + 42, 92, -52, 17, 9, 104, 118, -16, + 58, 5, -24, 33, -41, -90, -97, 4, + 80, -16, -37, 14, -9, -13, 38, -1, + -23, -25, 5, 13, -12, 18, -4, 35, + -68, 0, 2, -30, -21, 40, 36, 28, + -4, -6, -87, 90, 44, -6, -122, -36, + 16, -104, 47, 60, -32, -61, 14, -107, + -4, 48, -3, -60, -11, 65, 37, 16, + -36, -5, 57, 1, -34, 1, -108, 14, + 17, 54, -4, 33, 24, -25, -47, 3, + 12, -46, 16, 5, 3, 27, 21, 54, + -35, 34, -1, -30, 4, -36, -13, -8, + 4, -14, -91, 88, 0, -28, 19, 28, + 7, 47, -45, 53, -2, -22, 15, 32, + 24, 2, 7, 3, -34, -42, 39, -7, + 1, 18, 5, 63, -34, -30, 2, 64, + -26, 16, -54, -96, -18, -91, -32, 20, + -58, 21, 49, 14, 96, -14, -8, 46, + 26, 20, -61, -13, -8, 23, -9, -16, + 44, 1, 23, -71, -3, 56, 43, -5, + -17, -29, 55, 74, -8, -82, 14, -1, + 96, 12, -3, 16, -34, -4, 34, 3, + 10, 79, 109, -32, -85, -120, -114, -69, + 30, -10, -60, -67, 4, -22, -81, 45, + 35, -54, 27, 29, 33, 127, -86, 42, + -7, -1, 29, -80, -38, 13, -3, -21, + 12, -8, 48, -11, 4, 56, 4, 22, + -26, 41, -122, 28, 59, -49, 44, 125, + 2, 8, -116, -3, -57, 14, 11, 42, + -45, -59, 1, -47, 16, -72, -101, -27, + 22, -24, -24, 11, -14, 8, -36, 22, + 45, 3, 33, 60, 66, -58, 5, 2, + 10, -63, -122, -67, 26, -10, -24, 9, + -7, 23, -74, 10, -25, 43, -111, 40, + -69, 18, 9, -27, 0, -86, -6, 37, + -48, -42, 19, 16, -42, 23, -96, 46, + 14, 115, 29, -11, -4, -119, 87, 25, + -13, 19, -1, 22, 22, -90, 19, 27, + 23, -52, -10, 26, 31, 11, 20, -51, + 68, -25, -15, 11, 3, -33, -65, -109, + 94, -18, -64, 17, 61, -41, -22, 26, + -18, 23, -66, 9, -125, 7, 29, 2, + -48, -58, -36, -72, -127, -82, 30, -40, + 28, 46, 14, 72, -9, -43, 9, 56, + 51, 38, -53, 11, 45, -53, -1, 22, + -6, -29, 17, -37, 39, 11, -93, -68, + -10, -54, 13, -36, -38, 3, -46, -5, + -57, -16, -12, 19, -19, -59, -76, 15, + 38, -81, -123, 13, 12, 89, 64, 41, + -12, -3, 27, -21, -11, 113, -27, -40, + 26, 4, -3, 13, -35, 14, -33, 8, + 29, -7, -17, 0, 38, 16, 69, -24, + 41, 30, 24, -63, 15, -84, 1, 23, + 52, 12, 37, -21, -35, 13, -31, -24, + 35, 38, 75, 1, -42, 13, 23, 1, + 10, -15, 66, -14, 47, -60, 11, 14, + 47, -11, 77, 68, -47, 123, 47, 26, + -12, 18, 35, -65, 10, 21, 24, 12, + 43, 2, -9, -42, 37, -55, -65, -11, + -23, 32, 2, 37, 26, -28, -22, -10, + 62, -46, 43, -18, 56, 18, 19, -6, + -33, 69, -29, -37, 64, 23, -71, -3, + -50, -18, 79, 10, 29, 7, 67, 38, + -2, 91, -98, -22, -23, 31, 126, -4, + -2, 70, 76, -14, 36, -23, 22, 6, + -51, -46, -12, -56, -22, 70, -13, -19, + 30, -36, 17, 13, -20, -17, -5, 11, + 72, 18, 18, 76, -14, -60, 3, -29, + -23, -6, -37, -48, -31, -48, -17, -5, + -67, -71, -2, 88, -42, 52, 43, -11, + 2, -60, 21, 42, -24, -24, 48, -20, + 31, 12, 27, 25, 30, -123, -90, -19, + 89, 112, -50, -51, 18, 64, 109, 8, + 124, -74, 41, 53, 77, -96, -102, -25, + 92, -70, 49, -48, 26, 16, 29, 27, + -35, -35, -8, 12, 37, -56, -38, -85, + 87, 74, -46, 8, -22, 108, 7, 70, + -38, -4, -3, 93, -27, -13, -68, -27, + 89, -48, 12, 38, -116, -30, 62, -48, + 46, 30, 15, -47, 50, -21, -3, 88, + 48, 81, 7, 35, 49, -90, -86, -33, + -17, -27, -4, -84, 53, 58, 51, -54, + 14, 15, 32, -4, 98, 45, -99, 27, + -15, 79, -32, -28, -6, -39, 1, 6, + 3, -34, -128, -66, -13, -16, 125, 41, + -50, -35, -31, -19, 12, 81, -52, -56, + 7, 28, 6, 79, -50, 48, -57, 16, + 65, -30, 27, -18, 56, -29, 27, 21, + -5, 46, -40, 43, 38, 95, -28, 0, + 8, 53, -23, 71, 96, 11, -33, 6, + -16, 35, -4, -45, -53, 11, -49, 7, + -44, -21, 97, -23, 103, 37, -2, -11, + -9, -11, -57, 36, -8, -44, 125, 3, + 78, 76, 42, 10, 79, 25, 75, 27, + 34, -87, 20, -43, 17, -20, -57, 80, + 40, 7, 111, -66, 4, 33, -19, 92, + 25, -64, 51, 64, 28, -13, 66, 25, + 31, 21, 21, -35, 81, 51, 127, -6, + 3, 60, 52, -25, -17, 17, 42, -3, + -30, 55, -22, -41, 127, 28, -54, -119, + 92, 77, -9, -47, -7, 35, 0, -40, + 15, -25, 71, 104, 34, 36, 60, 38, + -16, -3, 6, 106, 42, 89, -16, 16, + -31, -8, 94, -3, 20, -26, 12, -68, + 42, 45, -58, -59, 30, -64, 45, 122, + 75, 22, 69, -42, -60, -114, -33, -99, + -99, 9, 14, 13, 99, -1, 65, 5, + -29, -121, 73, -3, 15, 40, 19, 37, + -31, 85, -43, -35, -34, 24, 80, -29, + -25, -23, 41, -24, 62, -6, -124, -24, + 12, 7, -9, 103, 88, 18, -126, 123, + -70, -28, 12, -114, 6, -71, -13, -80, + 31, -69, 56, 13, 109, -75, 3, 24, + -85, -58, 18, -91, -27, -48, 2, -67, + 39, 70, -19, 82, -30, 37, -34, -2, + 43, 77, 48, 22, 23, 39, -47, 65, + -36, -24, 4, -50, 9, 16, -89, 15, + 11, 11, -107, -71, -54, 10, -52, 17, + -29, -19, -50, 10, -27, 80, 35, 35, + -67, 5, -5, -35, 40, -48, -24, -7, + -44, 1, -16, -52, -118, -20, -1, 24, + 34, -105, -60, -50, -54, -17, 18, 79, + 26, -11, -23, -63, -28, 10, 69, -124, + -45, 8, 7, 13, -96, -24, 35, 37, + 24, 24, -73, -3, 44, 85, -3, 7, + -81, 12, -3, -30, -1, -18, -35, -36, + -67, 28, -39, -61, 1, -36, -27, 24, + 29, 28, 3, -32, 40, -32, -46, 45, + 58, -62, -104, 31, 45, -12, 87, 12, + 7, -63, -17, 29, 102, 8, 57, 22, + 9, 8, -11, 45, 125, 106, -65, -28, + 93, 0, 53, -31, 49, -20, 6, 61, + 39, -19, 43, -52, 6, 3, 91, 29, + -69, 11, -23, 32, -9, 31, 29, -47, + 26, -66, 8, -44, 59, -6, -18, 34, + 5, -33, -46, 59, -34, -23, -48, -19, + 105, -30, -14, 31, 121, 88, 1, -1, + 27, 70, 93, 31, -56, 20, 27, -75, + -35, -18, 18, 71, 24, 40, -3, -15, + -13, -10, -65, -13, -22, 41, 33, 3, + 74, 25, 32, 49, 110, -5, 90, -23, + 22, -64, -35, -15, 22, 77, 17, 1, + -51, 50, 107, 70, 26, 66, -78, -106, + 40, -110, -60, 26, 41, -32, 54, -10, + 32, -41, 30, -1, -25, 6, 69, 117, + 6, 1, -8, -16, 8, -12, 49, -72, + 22, -16, -34, 24, 0, -50, 57, 80, + -1, 55, 58, -122, 63, -63, 97, 60, + 26, -43, -22, -29, 99, -30, 30, 53, + 49, -49, 8, -21, -44, 127, -9, 38, + -58, 26, 38, 46, 48, 56, 58, 56, + -36, -25, -12, -45, -82, 7, -13, -48, + -116, -52, 7, 11, 37, 40, 95, 1, + -7, 14, 44, -33, -46, -19, -8, 52, + 61, -37, -22, 9, 44, -11, -21, -35, + 20, -53, 106, -45, -8, 11, 70, 49, + 34, 91, -60, -25, -21, 14, 40, -65, + -12, -7, 6, 9, -9, -75, -40, 41, + 53, 9, -31, -31, 48, -18, 11, -3, + 30, 6, 40, -45, -35, 38, 72, 64, + -117, -32, 38, -53, 10, 1, -7, -10, + 29, -36, 22, 6, -8, 33, 20, 44, + -89, 74, -66, 5, 60, 21, 26, 29, + 31, -61, 11, 24, 29, -36, 33, -16, + -27, -40, 39, -75, 12, -25, 12, 46, + -2, 0, 3, 86, 14, 13, -26, 73, + 10, 5, -16, 31, 35, -9, 96, -56, + 22, 15, 51, 18, -60, -25, -34, -5, + 19, -13, 20, 74, 70, 16, -120, 10, + 103, -27, -92, -46, 25, 39, -49, 17, + 40, -56, -25, -68, -81, -40, 126, 51, + -9, 4, 30, 5, -68, -33, 53, 60, + 8, 34, -36, -14, 52, 30, 114, -25, + -65, 38, -53, 45, 38, 59, 12, 113, + 9, -29, -9, -10, -81, 13, -56, 75, + 19, 46, 21, -14, -49, 31, -100, -24, + -34, 38, 100, -60, -30, -9, -46, -104, + -21, -3, 12, 31, 65, 97, -14, 7, + 27, 30, -83, 40, -74, -9, -62, -7, + 112, -59, -49, -24, 23, 113, 96, 40, + -35, -42, 38, 69, 73, 42, 41, 18, + -3, 95, 96, 77, -32, -20, -90, 105, + 40, 82, -1, 108, 58, 73, 118, -15, + -4, 26, -4, -16, 68, -50, -59, 10, + 32, -58, 31, 44, 4, -50, 31, -19, + 91, 43, 11, 40, -32, 119, 75, 86, + 113, 0, -108, -14, 121, -104, 17, 126, + -38, 2, 52, 56, -20, 51, -16, -35, + -18, 26, -26, 13, -65, 25, -105, 67, + 50, 18, 20, 13, -22, 24, -9, 27, + -9, 8, 3, 81, 40, 73, -26, -33, + -7, 22, 6, -7, -11, 19, 14, -47, + 96, -16, 52, 60, 17, -71, 56, -50, + 12, 7, 6, 31, -59, 1, 23, 17, + -49, -17, 21, -9, 30, 31, 35, -1, + -18, 7, 29, 15, 65, -68, -9, 89, + -8, -30, 44, 74, -4, 46, 28, -69, + 62, 27, -22, -57, 28, -20, 11, -3, + 5, -65, 28, 7, -26, -13, 20, 20, + 32, 71, 115, -47, -11, 36, -65, 5, + -50, 52, 16, 22, 77, -9, -8, -16, + 56, 30, 17, 50, -35, -69, 19, 71, + 60, -55, 2, 55, -47, 3, -13, -109, + 28, -3, -5, -38, 0, 34, 110, 38, + 15, 46, 18, -1, 34, 32, 32, -1, + 53, -3, 75, 65, -14, 63, 61, 0, + 4, -78, 18, 21, 5, 47, 19, 26, + -65, -59, 84, 45, 2, -126, -38, 24, + 56, 15, 47, -13, 45, -69, 7, 122, + -42, 34, 84, -8, 6, 21, 6, -11, + 26, -47, 117, 3, 23, 10, 34, 29, + -48, 123, -11, 53, -47, 46, -8, 45, + 4, -16, 14, -28, -30, -16, -67, 59, + 10, 34, 15, 63, 1, -28, -11, -24, + 8, 27, 54, -4, -110, 80, 57, 31, + 7, 104, -35, 60, -10, -12, 41, 66, + -122, -18, 9, 52, 41, 42, -47, -28, + -32, 68, 8, -56, 13, 28, 115, 42, + 31, 0, -55, 24, 39, -47, 9, -41, + -14, 41, 34, 53, 71, 0, -5, 53, + -55, 14, 17, 23, -59, -22, 52, 96, + -40, 72, -7, -7, -24, 5, 48, 33, + 29, 18, -106, 36, 30, -53, -29, 88, + -21, 77, -5, -22, 29, 7, 21, 0, + 27, 19, 63, 8, 37, 32, -45, 16, + -25, 45, -16, -2, 61, -13, -31, -8, + -19, 65, -14, -69, 29, -25, -83, -53, + 35, -60, -23, 4, 52, -10, 29, 9, + -5, -80, -30, 3, -22, 8, -40, -28, + 15, 66, 23, 64, 7, 41, 11, -24, + -97, -21, -16, 79, 7, -4, -29, -20, + -42, -33, -35, -66, -18, -76, -18, 22, + -59, -20, -59, 0, -72, -77, 6, -60, + 28, 18, 19, -51, 96, -34, 8, 22, + 2, -98, 38, 29, 21, 3, -15, -26, + 31, 0, -7, -9, 26, -8, -128, 118, + -75, 33, -69, -8, 121, 26, 46, 37, + 101, -19, 7, -82, -39, 34, -65, -14, + 40, 20, 127, -126, 29, -44, -44, 29, + 23, 2, 80, -14, 126, -31, -22, -21, + -12, 15, -16, 80, -18, 49, 41, -8, + -43, -121, -67, 59, 123, -24, -27, -28, + 36, -103, 18, 36, 0, -64, 10, 69, + -11, -74, -99, 102, -64, 35, 6, 16, + 122, -19, -6, 3, -21, 73, 5, 10, + 121, 18, 87, -16, -28, 79, 4, 96, + 39, -1, -21, -40, 110, 24, -42, -56, + 109, -10, -57, -41, -76, -53, -126, 27, + 63, -47, 51, -40, 17, 83, 79, 3, + 31, 13, 13, 26, -86, 37, -96, 77, + 31, 47, 127, 22, -5, -18, -48, 30, + 19, 25, 106, 117, -66, -49, 124, -90, + -4, -90, 7, -1, -64, 30, -91, 32, + 1, 103, 40, 96, -26, -47, 21, 8, + -23, -12, -29, 25, 36, -28, -115, -27, + 80, 13, 65, 3, -2, 124, 86, 25, + 108, -77, -57, 16, 19, 5, 26, -39, + 126, 27, 14, 30, 5, -6, 47, 16, + 4, 1, 3, 24, 44, -58, -27, -128, + -13, -25, 0, -13, -9, 18, -14, -19, + 22, 92, -31, 14, -14, -20, 37, -27, + -37, -11, 36, 3, -27, 11, 4, 16, + -50, -2, -74, -2, 61, -5, 75, -5, + 6, 31, -40, -6, 127, -63, 31, -7, + -4, -41, -50, -125, -33, -42, -3, -99, + 14, 0, -12, 22, -22, 15, -14, 29, + 9, 0, -4, 30, 31, -5, -17, -24, + 13, -56, -6, -36, 37, -6, 53, 29, + -2, -3, 5, -34, -121, -68, -38, -33, + 53, -23, 9, -12, 12, -10, 11, -69, + -4, -51, 16, 84, -71, 15, -38, 4, + -26, 31, -40, 11, -64, 13, -26, -7, + -14, -20, 27, 2, 31, 67, 30, 108, + 28, 66, -14, -5, -9, -11, 9, -38, + 35, 83, -49, -13, 81, 32, 40, -22, + -20, 91, 1, -24, -26, 53, 69, -66, + -48, 50, 12, -39, 11, 55, 13, -22, + -17, 67, -28, 21, 103, -45, 65, 11, + -18, -20, 28, 0, -5, -12, -15, 31, + -54, 14, 82, -99, -38, -46, -22, -81, + -20, -62, 65, -47, -21, -32, 14, -50, + 73, 11, 14, 5, 38, -14, 53, 27, + -14, 39, -18, 63, -47, 61, -12, 12, + 58, 0, 15, -47, -51, -50, 3, -10, + 52, 20, 28, 23, -19, 4, -58, -3, + 13, 97, -44, 10, -98, 31, -38, 4, + 50, 72, 21, 24, 127, 33, 48, 64, + 48, -100, 7, 46, 1, 105, 11, 59, + -16, -46, 86, -11, 23, 12, -42, -50, + -81, 21, 108, 91, -125, 19, 55, -21, + 117, -75, -24, 9, -58, 39, 10, -12, + 26, 126, -77, 31, -3, 127, -13, 76, + 64, 40, -10, 10, -29, -56, 40, 7, + 68, 4, 12, 119, 99, 36, 31, -20, + -117, 127, -122, -33, 97, 22, 43, 0, + -7, 4, 35, -16, -16, 16, -4, -15, + -50, -24, 85, -4, -59, 23, 11, -76, + 21, 57, -19, 6, -32, -7, -58, 39, + -46, 14, -27, 6, -76, -5, 65, 24, + -30, -57, -22, 45, 23, -3, -11, -122, + -29, -38, -16, -10, 23, 25, -45, 12, + 7, -69, 25, 19, 24, 2, 7, 9, + -40, 37, 26, 45, -92, 16, 22, 53, + 22, -24, 15, 53, 35, -107, 56, 19, + 20, 17, 1, -50, -31, 0, 18, 13, + -1, 11, 23, 71, -5, 21, 34, -63, + 50, 77, 24, 29, 9, 77, -20, 66, + -3, 15, 24, 81, 83, 25, -5, 42, + -121, 126, 34, -84, 44, 16, 112, 38, + 22, 24, -78, 102, 40, 14, -18, 95, + 70, -15, 32, 28, 31, 82, 58, -1, + 26, 30, 59, 16, 5, -41, -15, -45, + -29, -15, 71, -9, 17, -11, -6, 31, + 30, 89, 48, -34, 95, -5, -72, -14, + 108, -34, 41, -24, -45, -79, 117, 0, + 25, 14, 68, -24, 23, -15, -23, -61, + -3, -21, 24, 55, 7, 46, 19, 17, + 66, -75, 10, 95, 14, 18, 6, -54, + 23, 90, 113, 34, 44, 57, 12, 66, + -9, 116, 9, 32, -88, -105, -123, -31, + -17, -7, -50, 16, 83, -57, 70, -110, + -127, -38, 55, 5, -108, -35, 49, 52, + -4, -19, 20, -41, -101, 89, -127, -121, + -6, 14, 61, -91, -47, 27, 62, 39, + -71, -64, -112, -21, 27, 8, -70, 17, + 14, 5, -37, 10, -2, 61, -77, -23, + -40, -39, -67, 18, -124, -33, -40, 68, + 11, 4, 118, -14, 24, -56, 67, 33, + 52, -11, -13, -16, 122, -115, -34, 46, + -50, 43, 59, 78, 19, 120, 6, 41, + -50, 37, -28, 28, 103, -128, 24, -44, + -3, -14, 3, -10, 62, -51, 36, 2, + 73, -93, -59, -26, 30, 17, 90, -21, + -31, -17, 23, 22, 39, 20, 17, -9, + -96, 57, -6, -60, -50, -18, -56, 19, + 8, -76, 8, -21, -32, 14, -38, 1, + 20, -90, 55, -54, 7, 35, 19, -28, + -33, -86, 54, 72, 29, -19, -32, -33, + 97, 16, 11, -15, 23, 18, -56, 29, + -17, -9, 46, 30, 46, -122, 106, -21, + 48, -63, 32, 16, -61, -55, 22, -21, + 121, 29, -15, 30, -25, 39, -21, 62, + -25, -11, -34, 62, 42, 6, 26, -29, + 80, -57, 7, 29, 17, 71, 2, 37, + -45, -111, 27, 47, 112, 7, 77, -22, + -29, -73, 39, 106, 37, 45, 65, 43, + 26, -119, 33, -28, 16, 123, -52, -61, + 64, 2, 34, -39, -3, 6, 18, 35, + 4, 24, 2, -3, 53, -17, 19, 14, + -4, -30, -14, 72, -6, -50, -94, -23, + 30, -115, 77, 17, 69, -20, 5, 13, + -13, -83, 25, 3, -23, 15, -128, 46, + 112, 13, 41, -4, 125, 47, 23, -39, + -13, 123, 43, -18, 5, 6, 3, -32, + 114, -11, -69, -23, 43, 13, 5, 106, + -8, -8, 22, 8, -22, 61, -27, 51, + 28, -2, -75, 13, 61, -8, 71, 56, + -28, -15, -29, -53, 83, -33, -13, -26, + -76, 98, 75, 97, -11, 101, -126, 124, + 29, -105, -36, 83, 6, 57, 45, 98, + -55, 9, 14, -45, 124, 42, 88, 62, + 87, 43, 18, 8, -6, 28, 99, 26, + 127, 27, 2, -20, -47, -41, -39, -52, + 35, -18, -37, 41, -1, -4, 9, 29, + 48, 46, -14, 42, 34, -38, 1, 87, + 13, -22, 61, 123, 4, 26, -16, 32, + 125, 19, 43, 28, -32, 114, 4, -87, + -61, -99, 22, -10, -8, -27, -7, 5, + -24, 6, 4, 23, 10, 23, -33, -31, + 82, -48, -25, -74, 68, -9, 35, -16, + 30, 10, 24, 71, 50, -127, -50, 21, + 114, -31, -56, 2, 48, 39, 39, -14, + 33, -9, 34, -13, -35, 49, 59, -27, + 15, 61, 28, 17, 15, -14, -5, 38, + 7, -8, 2, -62, -7, 1, -32, 22, + -30, 17, 6, -12, 42, 8, 24, 61, + -101, 1, 13, 48, -16, 27, -10, -9, + -5, -7, 75, 21, -39, -33, 25, -49, + -30, 17, 11, 75, 7, -11, 20, 3, + -49, 52, -25, 72, 52, -11, 57, -6, + 21, 99, 32, -81, 37, -59, -65, 18, + -24, 119, -37, -69, 57, -20, -1, 78, + -32, -15, 6, 33, 108, 81, -120, 38, + -47, -82, -4, 62, 29, -11, -53, 9, + -23, -10, -78, 63, -12, 38, -37, 21, + 64, 3, -17, -117, 24, 3, -24, -75, + -34, 34, -5, 26, 9, 15, 41, 51, + -8, 52, 25, -42, -101, 100, 57, 9, + -98, 127, -86, 36, -63, -92, 50, 43, + -7, -16, 30, 66, 56, 54, 33, 29, + -49, 8, -1, -8, -16, 73, -14, -72, + 26, -34, 44, 43, -117, 12, -43, -121, + -70, -28, -5, -23, -68, 23, 123, -11, + 70, -14, -64, -82, 99, 79, -113, -45, + 118, -12, -35, -79, 94, -43, -125, -45, + 121, -126, -45, 4, -18, -47, 11, 25, + -73, -51, 104, -19, 46, -116, 117, -32, + -11, 106, -14, -112, 52, -73, -127, 82, + 14, -22, -2, 35, -65, -108, -88, -4, + 8, -33, 28, -128, -23, 0, -20, 0, + 97, -45, -31, -27, 126, -65, -31, 23, + -25, -53, -92, 65, -123, 62, 127, 4, + 83, 40, 35, -14, -11, -32, -71, 6, + 6, 51, 22, 30, -23, -46, -38, -11, + -24, 41, -31, -22, -123, 36, -57, -46, + -22, -10, -4, -21, 30, -12, -124, -27, + 21, 38, -25, 8, -38, 73, 24, 27, + -2, 21, -7, -10, 19, 28, -78, 17, + -10, -8, 34, -60, 103, 27, -18, -36, + 48, 24, -51, -34, -52, -11, -61, 102, + 5, -1, -37, -70, -66, -19, 48, -90, + -18, -23, 70, 17, -100, 87, 55, -19, + -23, 73, -11, -65, -39, -83, -16, -10, + -68, 46, -92, -126, -66, -121, -94, -92, + 47, 2, -39, -95, -43, 42, 2, -50, + 8, 38, 81, -84, -1, 36, 0, 16, + -54, -61, 83, 51, -31, -50, -19, 0, + 1, -64, 22, -3, -12, 5, 10, 58, + 38, 71, 47, -23, -3, -4, -112, 35, + -44, -39, -44, -7, -121, 13, 33, -28, + 41, 26, 60, 86, -87, 37, 11, 25, + 42, -15, -25, -49, 27, 27, 58, -62, + 91, 48, 59, 15, -11, 118, -55, 22, + -5, -1, -1, 68, -16, 86, -95, -95, + 46, -9, -40, -18, 20, 5, 48, 44, + 17, 10, 13, 1, 19, -17, 12, -66, + -44, -48, 33, 23, -50, -16, 59, -11, + -48, 2, -6, -5, -43, 33, 44, -12, + 23, -63, 46, -23, -51, 3, 126, -54, + -50, 85, 15, -6, -3, 42, -58, 61, + -55, 75, 94, 26, -97, 71, -107, -80, + 13, 61, -37, 30, 5, -36, 63, 9, + -68, -18, -32, 39, -58, 21, -21, -30, + -46, -127, 42, 34, 6, -84, 31, 39, + -10, 36, -58, 38, 10, 10, -4, 29, + -128, -28, 65, 29, 10, -78, 116, 19, + 67, -122, 31, -43, -27, 68, 9, 35, + -90, -8, 7, -49, 56, -29, 25, -21, + 31, 0, 28, -44, -51, 61, 30, -5, + 57, 83, 2, 1, 54, 74, 27, -3, + 20, -35, 6, 17, -17, 27, 65, 35, + -7, 67, -46, 19, -34, 23, 26, 34, + -26, 18, -22, -4, -113, -48, -10, -12, + -32, 1, 0, 15, 33, -11, 24, 35, + -18, 35, -85, 41, 52, -18, 29, 26, + -37, -25, 4, 34, 5, 1, -16, 4, + 13, -12, -2, -66, 29, -29, 70, -15, + -48, 14, -6, -32, 36, -9, 25, -12, + 25, -127, 27, -26, -54, -55, 13, 78, + 26, -26, -15, -21, -4, -50, -17, 10, + -18, -11, -42, -19, -8, 12, 0, 24, + 27, 64, 42, 83, 34, -23, 16, 7, + -123, 21, -16, -53, -5, 14, -24, 50, + -22, -20, 36, -9, 35, 9, 64, -16, + -6, -41, 99, 67, 26, 21, -59, 4, + -13, 48, 67, 38, 67, 127, 38, 14, + -5, 43, 3, 1, -3, 13, 62, 24, + -5, -49, 39, 74, 61, -44, -39, 70, + 112, 6, 62, 52, 49, 17, 0, 80, + -21, 11, -19, -34, -2, 4, 38, 31, + -9, -3, 109, 51, -14, 121, 66, 15, + -12, 34, -33, -2, 0, 10, 27, 63, + -6, 37, -55, -36, 22, 15, 30, 68, + 37, -65, -17, 79, -8, 32, -50, 4, + 83, 36, -57, 18, -21, -48, 39, 98, + 35, 27, -10, 24, 36, -13, 41, 22, + -15, 44, -98, 62, 32, -1, 116, 62, + -35, -78, -13, 34, -37, 123, -31, -26, + -2, 10, 52, -77, -35, 78, -67, 63, + -5, -60, -13, 71, -10, 22, 118, 42, + 126, 59, -21, 30, 2, 9, 8, 14, + -31, 38, -70, 57, -36, 109, 9, -13, + -33, 88, -9, 70, 8, -44, -20, 73, + -2, -13, -19, 118, -27, 2, -16, 58, + -82, 39, -59, 36, -7, 4, 60, 23, + 38, -6, 37, -3, 29, -32, 1, -34, + 44, 45, -10, -29, -10, 9, -39, -58, + -25, -26, 37, 44, 71, 60, -1, -18, + 45, -76, -12, -24, -5, 2, 15, 5, + 10, -72, -38, 31, -58, -27, 4, -10, + 6, -4, -110, 11, -34, 31, 36, 2, + 32, -14, 19, -4, 13, 17, -8, 5, + -1, -18, 15, 16, 47, 56, 48, -6, + -53, -8, 66, 27, -57, 45, 51, -24, + 35, -33, -29, 74, 16, 14, 22, 15, + -72, -26, 101, -27, -14, -22, 14, -79, + -9, 34, 81, -114, 33, 9, -24, -31, + 31, 13, -79, -61, -40, 82, -115, 45, + -107, 57, -37, -66, 12, -25, 4, 7, + -24, 4, 27, 6, -64, 6, 89, -84, + -121, 75, 40, -36, 13, -57, 48, -59, + -128, -62, 7, -15, 26, 75, 16, -59, + 27, -20, 16, 6, 13, 2, -84, 15, + -40, -70, -8, 56, 107, -1, 12, 15, + -1, 25, 58, -12, 53, -110, -67, -38, + -67, -46, -42, 16, -54, -103, -109, -5, + -70, -67, -56, -67, 80, -13, -53, 55, + 15, 83, -124, -44, 0, -17, -44, -60, + -32, 8, -23, -17, -54, 36, -49, -9, + -65, 16, -39, 88, 43, 50, -8, 3, + -25, -57, 30, 37, 34, -60, 74, 57, + -27, -30, -104, -67, 46, -46, -12, -120, + 52, 36, -117, -45, 35, 45, 62, -66, + 58, -52, 54, 14, 28, -24, -86, -35, + 49, 17, 91, 75, 39, 125, -24, -7, + -109, 70, -24, -88, 25, -59, 127, 55, + -81, -16, 17, 14, -41, 7, 69, -9, + 15, 25, 46, 87, 13, -42, 20, 12, + -38, -14, 109, 16, -66, 107, 91, 30, + 83, 4, 15, -41, 36, -38, -106, 75, + -120, 18, 92, 58, -123, 9, -42, 95, + 4, -2, 26, 24, 1, -4, 22, -14, + -11, 3, -122, -53, -76, -53, -8, 1, + -100, -6, 26, 3, 1, -35, 105, 25, + 11, -43, -105, -27, -35, -54, 56, -16, + 8, 8, 14, 33, -13, 33, 39, 24, + 8, 26, 27, 30, 19, -13, 13, 7, + 67, 67, -20, -40, -50, -17, 22, -18, + 18, -1, -2, 22, 35, -33, 5, 3, + -56, 9, -51, 6, 15, -74, 86, -11, + 5, -11, -3, 95, 8, -44, 29, 1, + -26, -54, 101, -84, 33, 45, -34, 78, + 61, 36, -20, 44, 10, 74, -84, 5, + 13, -26, 35, 80, -43, 126, -75, 4, + 23, -64, 54, -42, -20, 68, 87, 44, + -63, -39, 4, -99, -27, 14, 58, -17, + 91, -10, -39, 82, -13, -47, -44, -22, + 34, -17, 23, -128, -53, -2, 12, -8, + -5, -29, -19, 64, -42, 13, 28, -42, + 28, 67, -65, 63, -47, -9, 37, -66, + -2, 56, 51, -3, 40, -22, 47, -2, + -24, -12, 25, -68, 89, -27, 18, 121, + 59, -27, -46, -11, 6, -61, -15, -46, + 2, 15, 16, -34, -17, -18, -3, 27, + 55, 40, 31, 22, -19, 2, -72, -14, + 89, 94, -24, 24, -96, -119, -65, -13, + 40, -69, -62, 10, -49, -60, -52, 27, + -75, 8, 42, 57, 48, -15, -70, -117, + 82, 71, 83, 34, 87, 22, 2, 22, + 2, -114, 33, -46, -32, 99, 10, 4, + -44, 92, 67, 23, 42, -127, 33, -35, + -11, -25, 54, -15, -31, 26, -115, 18, + -6, -112, 3, 39, -63, -66, 0, -51, + -39, 26, 1, 37, 7, -25, 20, 5, + 38, -21, -2, -9, 108, -77, 16, 12, + 24, -5, 14, 88, 59, 1, 8, -5, + -20, 36, -124, -50, -23, 38, -67, 0, + -10, 4, 1, -42, 0, -64, 1, -10, + 7, -32, -80, -17, -13, -113, 107, -54, + 71, -24, -38, 11, 21, 24, -122, -6, + -31, -67, 56, -68, -12, -23, 0, 2, + 37, -114, -1, -19, -1, -72, -91, -16, + 32, -4, -22, 118, -17, 41, 45, 32, + -14, -27, -45, 6, 18, -62, 21, 33, + 30, 14, 5, 39, 17, -10, 25, 12, + -15, -2, -4, 17, -9, -2, 38, -66, + -50, -18, 18, 41, -21, -24, 7, -18, + -15, -20, 61, 18, -1, 10, -24, -67, + -16, -17, 2, -21, -25, 21, 7, 24, + -8, 25, 47, -46, 122, -11, -48, 20, + -39, -60, -30, 53, -64, 14, 59, -82, + -10, -10, 37, -11, -1, 26, 1, 2, + 41, -58, 87, -18, 20, 5, -36, 59, + -52, -18, 56, 80, 49, -115, -16, -2, + 26, 20, 22, 28, -6, 24, -69, -6, + 6, 8, 7, -8, 105, 49, -13, 46, + -86, 46, 31, 70, 10, -25, 51, 27, + -14, 12, -14, 5, 28, 8, 95, 56, + -4, 43, -18, 7, 3, 70, -5, -9, + -105, 21, -97, 2, -28, 67, 27, 18, + 109, -52, 88, 32, 49, -1, 18, 25, + -35, 61, -47, 13, -124, -52, 71, 8, + -31, -6, -31, -32, 60, 62, -38, -9, + -83, -107, 126, -22, -16, -47, -78, -55, + 30, 16, -24, -27, -45, 74, -58, 50, + 6, 15, -7, 91, -127, -2, -8, -55, + 81, 103, 14, 27, -16, 26, 125, -13, + 98, -80, 68, -4, 35, 126, -59, -103, + 58, -31, 92, -2, 16, 42, -5, -58, + 45, -48, 64, -106, -14, -62, -30, 36, + 10, 9, 50, -5, -6, 33, -39, -10, + -16, -71, 83, 38, -10, -70, -77, 42, + 35, -128, 64, 37, 49, 60, -9, 33, + -33, -37, -23, 7, 8, -30, 19, 35, + -17, 16, 17, 6, -36, -22, -82, 54, + 3, 6, 35, 12, 57, 0, 12, -32, + 58, 108, 7, 49, -8, -38, -79, 30, + -1, 7, 15, 32, -20, 16, 68, 16, + 22, -41, 33, -25, 13, -42, 49, 14, + 2, 60, 41, 8, -25, -21, 46, -43, + -26, -9, -15, -10, 36, -19, 0, 36, + -12, 17, -7, 13, 16, -49, 79, 59, + -8, 10, 59, -12, 15, -125, -18, -8, + 7, 6, 10, 39, -37, -64, 29, 0, + -50, -6, -24, 15, -32, -28, 27, -12, + -44, 84, -30, -10, -51, 5, -59, 77, + -26, 3, 3, 9, -39, 39, 100, -33, + -80, -7, -36, -83, -30, 19, -21, 59, + 44, 86, -29, -4, -112, 28, 8, 97, + -24, -37, 13, 110, -2, 16, 72, -7, + 9, 22, 10, 37, 32, -20, 66, 7, + -72, -127, -5, -11, 50, 22, 1, 5, + 74, -74, -118, -36, 123, -8, 14, 29, + 21, -71, 54, -94, 11, -27, 45, 95, + -126, 70, -39, -60, 52, -36, 73, 23, + 48, 12, 36, 63, 33, 68, 88, -24, + -65, -4, -2, -20, -9, 48, -63, -121, + -66, 40, -128, -22, -13, 11, -105, 20, + -30, -127, 26, -21, -92, -108, 60, -14, + 56, -117, -113, -123, -46, 57, 111, 124, + 82, 58, 25, 5, -57, -42, 116, 9, + -24, 16, -17, 86, -116, 68, 10, -35, + 124, 66, 53, -51, -65, -10, -14, -12, + 26, 5, -51, -15, 12, 25, -3, -33, + -54, -20, -116, -10, -25, 117, -1, -24, + 80, 55, 4, 112, -8, -67, 0, 41, + 12, -31, 102, 36, -3, -69, 96, 18, + 126, 116, 27, 16, 114, -28, -82, 113, + -57, -119, 123, 63, 126, 33, -1, 29, + -17, -28, -13, -4, 85, 39, -5, -53, + -53, -63, 5, 29, -76, 27, -23, 3, + -70, -31, -46, -48, -15, -93, -13, 50, + -1, -6, 0, 26, -34, -22, 3, -112, + 39, 48, -12, 73, 18, 38, 17, -18, + -15, 35, -10, -72, -19, 13, -72, -21, + 19, -58, -17, 70, 4, -32, -4, 35, + 86, 11, -6, 14, 38, 4, 99, 29, + 117, -7, -57, 11, 44, 2, 12, -37, + 30, 117, 36, 5, 93, -56, 27, -4, + -104, -83, -76, -26, 70, -63, 108, 37, + 57, 62, -23, 26, -9, 11, -12, 6, + 22, -28, 24, 11, 63, 12, 28, 15, + 78, -3, 85, -2, -29, 37, -41, -30, + -37, 39, -19, -39, 50, -5, 15, 39, + 12, 3, -34, 17, 35, 5, -13, 10, + -45, 12, 109, 5, 71, -28, -11, 22, + 3, 63, 11, -11, 40, -13, 53, 32, + -7, 41, 1, -14, 27, -1, -25, 54, + 16, -64, 14, 92, 67, 94, -10, 52, + 56, 84, 0, 28, 59, 39, -68, 6, + -31, 73, -87, -40, 1, 23, 31, -35, + -21, 34, -50, 82, -13, 62, 24, 84, + 44, -11, 29, 56, -11, -14, 10, 18, + 12, -85, -127, -3, -65, 4, 52, -27, + 11, -89, 1, -64, -78, -24, -44, -12, + -20, -51, 13, 97, -77, 50, 63, -74, + -47, 31, 1, -8, 4, 23, -74, -126, + 13, 62, 57, -25, -30, -125, -105, 13, + 127, 90, 24, 3, 24, 22, -3, 3, + 122, -59, -50, 18, -48, -20, -26, -37, + 122, 109, 124, -29, -23, -114, -4, -120, + -2, 7, -70, 55, -73, 12, -80, -124, + -11, 33, 83, -14, 31, 2, 126, 78, + -101, 48, -104, 18, 53, -1, -28, 54, + 87, -126, 40, 30, -85, -7, 86, -95, + 86, 23, -70, 17, 10, -49, 2, 94, + -20, 123, -35, -22, 28, -31, -35, -13, + 41, 38, -45, -58, 50, 8, 6, -96, + 49, 9, -2, -70, 55, 66, -55, 38, + -86, 81, -72, -17, -11, -72, 18, 27, + 17, 49, -84, -44, -7, 16, 82, 27, + -89, -18, 23, -28, 24, 36, -8, -92, + -24, 34, -20, 127, -23, 19, -6, 3, + 28, 111, 14, -19, 7, 35, 10, 27, + 27, 2, 95, 29, 66, -56, -55, 24, + -38, 127, -59, 4, 56, -20, -69, 14, + -12, 17, -37, -23, -43, 31, -35, -35, + -56, 29, -9, -11, 123, 24, 19, -23, + -52, -3, 7, -39, 25, 23, 34, 95, + -16, 35, -9, 39, -55, -25, -43, 47, + 108, -3, -3, 61, 56, -9, 54, 54, + -8, 51, -20, 62, -92, -39, 40, -8, + 34, 29, -41, 52, 18, 14, 21, -30, + 39, 64, 81, -32, 71, 41, 28, -8, + -42, -75, 20, -14, 12, -32, 66, 34, + -30, -68, 31, -104, -37, 31, 86, 42, + 14, 12, 3, -20, 15, -7, 29, -26, + -45, -32, -12, 9, 48, -25, 41, -70, + 21, -3, 26, 3, 28, 41, 31, -6, + 60, 76, 126, 5, -14, -32, -42, -36, + -3, -3, 4, 18, 3, -6, 51, 45, + -60, 104, -41, -8, 64, 7, 10, 125, + 61, 6, -3, 43, 5, 9, -126, 83, + 13, -126, 24, 51, 33, 118, -28, -25, + -127, -13, -20, 96, -27, 15, -124, 21, + -75, 86, -11, -9, -9, 40, -17, 111, + -52, -94, -126, 26, -3, 72, 7, 116, + 55, 30, -2, -119, 127, 126, 32, 9, + -77, -1, -50, 24, 21, -55, -125, 6, + 99, 13, -12, -3, -53, 26, 106, 113, + 125, 125, -51, -12, -3, 2, -46, 69, + -35, -1, 57, 46, 65, 19, 43, -39, + -44, -24, 26, -4, 0, 26, 28, 64, + -22, 5, 32, 36, 1, 16, 3, 69, + 18, -52, -20, -36, -56, -22, -19, 21, + 75, -70, 56, 15, 23, -4, -50, 85, + 48, -46, 43, 64, 53, -4, -46, 19, + 35, -23, 18, 27, 13, -35, -4, 127, + -50, 7, 52, -41, 3, 31, 29, -9, + -19, -94, -22, -30, 58, 74, -53, -22, + -21, 0, -43, -6, -58, -3, 115, 10, + 93, -71, 46, 10, -38, -19, 49, -60, + 6, -30, 60, -22, 62, 24, 23, 63, + -22, 5, 57, -8, 64, 106, 28, -80, + 112, -50, 57, -122, -128, 47, -12, -51, + -10, 4, 1, -124, -11, -87, 30, -36, + -10, -36, -21, -19, -125, -76, 53, 104, + -125, 32, 33, -64, 64, 6, -14, 15, + 94, 12, -69, 2, 8, 0, -51, 28, + 34, -124, -21, -49, -70, -71, 54, -56, + -2, 45, 70, -3, 127, 87, 101, -10, + -28, 100, 116, 19, -14, 43, 19, 100, + -113, -44, 52, -8, -90, -7, 15, 36, + 39, 5, -79, -27, 8, -40, 24, 93, + -39, -3, 40, 56, 57, -33, 3, 48, + 81, -41, 7, 111, 44, -62, 51, -126, + -51, -111, 40, -35, -122, -49, 70, 75, + 84, 29, -72, -40, -119, -3, 14, 61, + 127, 14, -38, -23, -26, -127, -116, -122, + -121, -96, -5, 40, 40, -12, -112, -1, + 118, -102, 16, 26, -18, -34, -25, -95, + -40, 19, 47, -128, -18, -115, -23, -48, + -44, -99, -41, 30, -111, 4, -7, -16, + -52, -45, -16, -88, -115, 50, 59, -41, + -105, -121, 108, -42, 77, -109, -116, -51, + -60, -25, 2, 59, 55, 78, 104, -70, + 23, 85, -123, 52, -2, 2, 26, -75, + -14, 50, 14, 21, 3, 85, -9, -38, + 28, -72, 60, -116, 25, 37, -28, 41, + 69, 19, -80, -53, 12, 32, -87, -5, + -48, 9, 80, 81, -79, -74, 37, 116, + -45, 41, -28, 19, -40, -103, -117, 51, + -44, 41, -80, 79, -61, 69, 10, -41, + 15, 83, -35, -58, 65, -21, 32, 127, + 62, 1, -1, -56, -15, 13, 37, -50, + 11, 8, 25, 11, -90, 16, -12, -37, + -116, 77, -22, -62, 3, 45, -12, 34, + -43, 27, -50, 41, 31, -66, 36, -13, + -9, 3, 104, -11, 42, -25, -34, 17, + 37, 30, 22, 1, 19, 26, -9, 27, + -106, -41, 97, 37, -16, 53, -10, -48, + -10, -17, 52, 18, 62, 38, -24, 48, + -8, 34, 96, -57, 27, 23, -18, 56, + 105, -38, 63, -36, 51, 53, 82, 79, + -22, 64, -12, 28, 37, 50, 91, 59, + 5, 12, 39, -19, 73, 38, 63, 26, + 57, 35, -33, 124, 63, 49, 68, -6, + 34, 63, 49, -43, 42, -39, 5, -34, + 68, 65, 61, 52, 24, 36, 46, 63, + 45, 92, 22, 22, -46, 24, 11, 73, + -30, 127, 15, 47, 38, 28, 43, -9, + -11, -42, 0, -9, -14, 56, -103, 9, + 27, 15, -22, 65, -28, -18, 6, -43, + 23, -25, 31, 54, -23, -41, -29, 27, + -9, -10, 29, 25, 116, 46, -84, 58, + 19, -37, -6, 29, -20, -30, -8, 10, + -59, -20, -26, -27, 97, 64, 117, -7, + -61, 15, -14, 47, -61, -37, 36, 82, + 18, 61, 17, 23, 30, -40, -65, -76, + 0, -72, 59, 43, 49, -7, 20, 22, + 101, 46, 31, 14, 72, -41, 24, -26, + 7, -71, 14, 66, -6, -35, -23, 15, + 32, 27, -34, 2, -12, -26, 44, -64, + 33, 47, 23, 41, -7, -8, 17, 7, + -59, 44, -16, -18, -37, 4, -91, -4, + -38, 9, -31, 22, 57, -17, 68, 1, + 24, -23, 43, -11, -18, 2, -72, 3, + 11, 29, 48, 39, -34, -22, -36, -4, + 13, -35, 43, 41, -57, -125, 97, -41, + -10, -57, -32, 10, 15, -10, -41, -62, + -17, 20, 33, -95, 17, 30, 6, -46, + -22, -24, 51, 89, -50, -78, -17, -24, + 34, -124, 51, -35, 58, -6, 14, -27, + -51, -11, 6, -9, -32, -22, -16, -69, + -41, 7, -50, -61, 45, -47, 39, 17, + -12, -7, -12, -25, -11, -7, -30, -9, + 57, -125, 20, 14, -66, 73, -17, 18, + -57, -20, 28, 58, 124, -31, -18, 14, + 11, 38, -104, 65, -95, 125, -81, 24, + 26, -31, 25, 40, 127, 17, -27, 62, + -13, -38, 120, 22, -29, 54, 56, -99, + 16, -33, 32, -39, -5, -1, 31, 17, + 10, -26, -52, -64, 9, 24, 38, -9, + 28, 99, 30, 56, -18, 39, -85, -70, + 55, 16, 118, 20, 4, -97, -18, -3, + 6, -34, 69, 14, 107, -4, -30, -38, + 24, 26, -114, 54, -36, -21, 33, -40, + -43, 29, -19, 20, -9, 114, 30, 6, + -65, -1, 75, 10, 12, -21, 41, -5, + 113, -39, -12, -105, -43, 78, 74, 18, + -53, -28, 9, -25, -2, 23, -9, 84, + -26, -5, -21, -44, -49, 46, 63, -1, + 2, 90, 72, -63, -44, 59, 18, 64, + 13, 49, -21, -21, 64, 106, 30, -26, + 73, -18, -83, -24, -77, -6, -47, 27, + -34, 8, 47, 4, -76, -117, -28, -22, + -32, 70, 94, -20, 35, -22, -12, 10, + -28, 72, -66, 103, 100, 46, 20, -42, + 50, 21, -16, -35, 49, 104, 56, 111, + -48, -31, -1, -17, 20, -23, -12, 12, + -36, 32, -2, 9, 14, 77, 14, 17, + -11, -38, -88, -54, 30, -15, -37, -15, + 27, 21, 29, -68, 14, -91, -6, 10, + 13, -82, 11, 19, 124, 38, 80, -5, + 6, -15, 7, 22, -4, 32, 20, -55, + 24, 90, 5, -15, 18, -70, 25, -16, + 52, 2, -15, -39, -17, 14, 47, -4, + 41, 3, 45, 20, -26, -32, -66, -37, + -98, 65, -17, 24, 45, 22, -34, -36, + -68, -13, -24, 18, -29, -125, -1, 25, + 13, -68, -82, 20, -85, -101, 13, 19, + 4, 37, 28, -23, 47, 14, -50, 43, + -46, 52, -12, 82, 31, 77, 101, -28, + 42, 27, 81, -117, -19, 14, 2, -69, + 27, 23, -27, 127, 59, 101, 90, -4, + 40, 26, 3, 64, -23, -53, 31, 42, + 51, 8, -109, 35, -41, 25, 30, 42, + 43, -54, -22, 38, -72, 11, 3, 38, + 31, -69, -17, -1, -85, 15, -24, 17, + 29, -1, -13, 11, 44, 33, 115, -36, + -11, -39, 112, 27, 20, -11, 9, -5, + -40, -21, 14, -31, -9, 28, -24, 22, + 21, 19, -27, 19, 49, 9, 7, -79, + 79, -81, -6, 48, -75, 72, -72, -62, + 19, 4, 38, 18, -17, 54, 107, 60, + 18, 14, -37, -9, 28, 52, 12, -33, + 6, 50, 127, 42, -67, 46, 37, 43, + -18, 20, 74, 17, -10, 56, -30, 66, + 27, 36, 27, -24, -34, 55, 12, -32, + -26, -46, 63, 21, -66, -14, 98, 16, + -3, 12, 29, 28, 17, 58, -5, -2, + 23, 8, -10, 0, -23, 29, 28, -79, + -10, -21, 32, -15, -29, 47, 5, 31, + -49, -44, 21, 58, 81, -92, 48, -13, + 119, 52, -6, 19, 11, -26, 13, -57, + 23, -16, 30, 34, 35, -10, -28, 4, + -9, -6, -10, -47, -35, 7, -33, 28, + -28, -3, -3, -3, 19, 11, 30, -7, + -27, -14, 0, -90, 8, -62, -44, -13, + -5, -2, -27, 1, 34, 67, 13, -37, + -15, 32, 33, 15, 22, -113, 28, 18, + -6, -27, 31, 46, 10, -24, 60, -2, + -114, -7, 8, 8, 39, 14, 124, 55, + -6, -30, 10, -35, -5, -18, -1, -11, + -7, -11, -16, -51, 49, -30, 64, 3, + -10, -38, 2, -3, -2, 27, 22, 15, + 46, 41, 0, 6, 4, 54, 8, -5, + 2, 7, -10, 108, -43, 11, -30, 15, + -47, 51, 26, -21, 71, 22, -33, -54, + -29, 120, 11, 106, 1, 10, 55, 53, + 38, 11, 19, 42, 45, 5, 20, -39, + 44, 120, -68, 51, 127, 24, 66, -28, + 60, 8, 10, 32, 17, -8, 35, -6, + -3, 57, -32, -8, -51, 8, 24, -63, + 47, 28, 15, 18, -8, -70, 81, 10, + -125, 13, -86, 62, 37, -27, 28, 127, + -8, 18, 56, 6, -108, 22, -36, 10, + 14, -10, -45, 19, 72, -27, -53, -12, + 42, 6, 7, -2, 127, 1, -41, 13, + 3, -81, -25, 1, -11, 45, -107, 62, + 18, 54, 22, 4, 35, 34, -32, -120, + -106, 18, 38, 100, 11, 29, -56, 116, + -37, -23, 122, -1, -78, 31, 23, -4, + -2, 42, -28, -57, 97, -63, 12, 56, + -4, -127, -14, -45, 44, -65, 103, -52, + -67, 7, -11, 23, -38, -32, -3, -4, + -24, -38, 31, -20, 20, 42, -17, 79, + 34, 29, 60, -71, 71, 49, 101, 35, + 22, 19, -60, 40, 11, 25, -34, -102, + 8, 62, -3, 17, -9, -57, 47, -26, + -44, -29, -43, 89, 121, 19, 49, 71, + 6, -61, 15, 100, -124, -20, 0, 37, + -21, -56, 21, 59, 26, 15, -40, -34, + 53, 11, 60, 15, 63, -3, -13, 16, + -34, 57, -13, 61, -15, 38, -3, 33, + 7, 36, -1, 72, -59, 18, -90, -6, + 78, 67, 14, 20, 34, 37, -12, -47, + -34, -123, 19, 53, -61, -4, -38, -37, + 6, 4, 32, -46, -13, -18, 0, 27, + -73, 27, -11, -47, 63, 24, 66, -124, + -64, -5, 34, 6, 25, 44, 0, -42, + -26, 39, 95, -25, -45, 87, 10, -33, + -30, -18, -71, -3, 6, 31, -6, 10, + -21, -60, 39, 64, 24, -11, 102, -20, + 127, -70, -119, -69, 103, -17, 42, 6, + -81, 43, 26, -12, -33, -72, 51, 55, + 17, -57, -3, -12, -24, -22, -60, -18, + -57, -22, 22, 32, 101, 40, -47, 47, + -1, -38, -15, -56, 58, 57, -36, -7, + 51, -6, -12, -66, 33, 73, 19, 26, + -6, 77, 74, -46, -50, -16, 125, 47, + -30, 41, 115, -91, -11, -4, -47, 127, + -121, 112, -83, 25, 22, -32, 31, 6, + -78, -84, -44, -15, -25, -1, 3, -50, + -46, -5, -28, 64, -74, 94, 8, 16, + 28, 30, 41, -11, -17, -4, 57, -69, + -2, -32, -15, 12, 96, -96, -15, 61, + -38, -5, -20, -37, 61, -63, -50, -60, + 34, 44, 53, 37, 47, 18, 22, -24, + -19, -54, -8, -7, -60, 56, 2, -41, + -22, 70, 33, 83, -46, -71, 35, 124, + 22, -38, -43, -70, 7, -62, -22, -51, + 15, -46, -6, -25, -34, -54, 51, -22, + -29, -13, 117, -53, 20, -9, 23, 27, + -13, 127, 68, 11, 41, -31, 18, -50, + -53, 29, -109, -2, -18, 13, -43, -125, + 23, 15, -6, -25, -19, -95, 4, -106, + 25, -24, -14, 21, -20, 28, 2, 38, + -92, -86, 47, 22, 108, 29, 118, -17, + 22, 3, 61, 4, -3, 28, -125, 10, + 81, -6, -19, 22, 22, 75, 16, 20, + -26, -14, 46, 89, 99, 29, 43, 20, + 29, 87, -57, 108, 16, 39, 42, -39, + 16, 69, -65, -11, 6, 12, -19, -6, + -60, 11, -32, -21, -41, -23, -128, 50, + -11, 24, -44, 17, 7, -1, -20, -91, + 33, 47, 10, -6, 17, 57, 42, -30, + -41, 12, -50, -4, 22, -16, -31, -21, + 10, -69, 20, -29, -51, -32, -6, -15, + -50, -1, 13, 9, 38, 33, 68, -12, + 58, 115, 18, -68, 12, -127, -37, -8, + 39, -4, -70, 15, 31, -3, -17, 58, + -56, -19, 25, 3, -22, -92, 63, -4, + 122, -56, -9, 55, 39, -80, -90, 40, + 7, -6, 59, 26, 38, 0, -46, -60, + 43, 34, -127, -71, 32, -17, 29, -80, + -33, -11, -16, -7, 19, -5, 52, 25, + -16, 13, 108, -13, 53, -28, 11, 106, + 99, 35, -113, -103, 9, 4, -6, -67, + 37, -22, 1, -13, -9, -34, -9, -7, + -20, -12, 5, 84, 64, -35, -8, 48, + 31, -20, 22, 61, 95, -55, 17, 32, + -4, -57, -120, 5, 30, -31, 85, -27, + 22, -8, 101, -121, 32, 33, -117, -48, + -15, 18, 38, -13, -57, 30, 79, -127, + -50, -108, -56, -126, -121, 6, 36, -12, + -9, 2, 66, 85, 21, -48, -6, 14, + 44, 18, 97, -127, -20, -128, -85, 31, + 45, -54, 25, 22, -105, -127, 43, -10, + -37, -28, 32, -126, -13, -14, 73, 90, + 127, -28, -13, -73, 97, -49, -52, -17, + -44, -9, -9, 11, -124, 0, -59, -69, + -18, -27, 19, -25, -120, 56, -32, 23, + 18, 41, -6, -2, -57, -76, -9, -13, + 70, -86, -92, 6, 39, -109, 28, -14, + 70, 37, 37, 53, 42, 90, 24, -15, + -120, -14, -11, -10, 8, 22, -17, -61, + -9, -19, 96, -128, -68, 93, 77, 6, + 36, 30, -128, 39, 1, -6, 8, 0, + 4, -83, -124, -86, 6, -103, -58, 43, + 19, -2, -39, -109, -111, -41, -107, 37, + 1, 30, -46, -28, -23, 61, 49, -38, + 31, -35, -96, -126, -38, 54, -24, 28, + 19, 66, -126, -79, -119, 37, 7, -14, + 24, -27, -59, -113, 55, -56, -15, 24, + -7, 5, 14, -92, -52, -43, 91, -23, + 2, 93, -16, -93, -66, -34, -11, 46, + -48, 3, -75, 47, 8, -17, -40, -87, + 113, 121, -117, 19, 16, 74, 106, 29, + -8, -1, 54, 8, 24, 33, 59, 64, + 38, 68, -89, 72, -26, 9, -14, 20, + -34, -85, 33, 33, 48, 17, 120, -11, + 16, 19, 80, 37, -18, 11, 8, 8, + 5, -41, -76, 7, 31, -42, -14, -6, + -50, 114, 34, 45, -48, 66, 47, -8, + 53, 53, -66, 15, -59, 2, 24, -49, + -17, 14, -22, -62, 73, 20, 5, -55, + 57, 83, 91, 59, -49, 22, 64, -44, + -4, -60, -114, -88, -62, -23, 34, -29, + 110, 123, 117, -49, 1, 124, -67, 123, + -52, -45, 0, -27, 13, -76, -44, 11, + -14, 16, -9, -92, 33, -72, -125, -1, + -41, -122, 13, -56, -29, -44, -121, 27, + -127, 9, 63, 47, -47, 23, -11, -2, + -119, -39, -32, -61, -36, -11, -32, -27, + 98, 61, -122, -84, -23, 106, 114, 42, + 11, -29, -9, -74, 124, 15, -11, 29, + 67, -9, 12, 31, 68, 23, 13, -60, + -46, 40, -37, -41, -113, 70, 37, 7, + 8, 92, 34, 46, -21, 0, 13, 63, + -50, -57, 8, -22, 13, 35, -52, -35, + -11, 24, -30, -47, -1, 56, -35, -11, + -123, -26, 93, 4, -28, -28, -56, 56, + -55, 26, 14, -41, -7, -14, -19, 35, + 16, 21, -5, -19, -10, -26, -70, -5, + 26, -5, 62, -15, -15, -97, 49, 56, + 63, -30, 2, -103, 15, 22, -6, -30, + 18, -55, 46, 16, -16, 41, 78, -18, + -53, 21, -11, -73, 27, -4, -19, 27, + -21, 37, 12, 50, 5, 0, 13, -26, + -60, -39, -61, -24, 75, 36, -26, -28, + 26, 18, 10, 25, -47, -4, 35, 7, + 21, -3, -51, 28, 16, -23, -44, 15, + -35, 89, -36, -81, 73, -7, 22, -34, + 6, 24, -17, 72, 90, 92, -35, 43, + -14, 20, -26, 104, 78, -22, 39, 29, + 38, -29, 15, -24, 55, -23, -18, 10, + 51, -12, 84, 22, -10, 10, 127, -27, + 127, 17, 6, -6, 54, 0, 117, 49, + 8, -13, 114, -15, -28, 4, 8, -28, + -60, 125, -46, -60, -15, 10, 43, -23, + -121, -27, 20, 50, 25, 72, 61, -98, + 35, -21, 39, -7, 81, 27, 10, -23, + 41, -123, 97, -76, 65, 10, 125, -120, + -86, -73, -4, 73, 109, -126, 50, 94, + -47, -74, -8, -83, -50, 1, 16, 46, + -34, -62, 24, -25, -13, -58, -117, -108, + -28, -8, 63, -10, 56, 25, -65, 41, + 53, -46, -47, -109, 98, 7, 72, -7, + -38, 47, -76, -126, 42, 124, -10, 65, + -52, -62, -25, -108, -101, 14, 3, 75, + -9, 4, 86, -124, -126, 72, 35, -2, + 21, 26, -66, 43, 30, -123, -28, 46, + 3, 29, -49, -96, -79, 40, -43, -98, + 60, 27, -125, 72, 1, 63, -49, -3, + 22, -49, 23, 27, -32, -20, -21, 52, + -20, -49, -11, 0, 24, -4, 20, -67, + 12, -23, 6, 17, 1, -10, -27, 5, + -15, 3, -35, -5, 12, -14, -33, -20, + 35, -68, -27, -66, -58, -14, 52, 40, + -5, 38, 96, 32, 11, -47, 27, 0, + 44, -30, -70, -23, -33, -46, -58, 66, + -19, -14, 1, -24, 25, 12, -99, 30, + 28, -14, 36, 19, 27, 10, 51, -47, + -118, 11, -44, 16, 30, 14, -16, -12, + -52, 1, 4, 4, 9, 27, -26, 47, + 15, 7, -26, 11, 37, -30, 7, -22, + -13, -87, -56, 118, -31, 12, -61, -65, + 22, 127, -69, -111, 115, -57, 86, 121, + 66, 108, 61, -64, 52, -25, 33, 45, + -101, 123, -19, 20, -5, -34, 61, 62, + -14, 63, 55, 18, -31, -89, -60, 68, + 63, -31, -46, 44, -44, 23, -125, -23, + -122, 103, 44, -102, -5, 124, -90, -71, + 12, -5, 77, 40, -47, 54, 52, 47, + 9, -53, -77, 33, -30, 23, 49, 39, + -126, -34, 2, -33, -123, 30, 48, -69, + 11, -87, -109, -60, 27, 30, -26, -26, + -8, 56, 8, 0, 25, -1, 106, 1, + -35, -98, 79, 20, -37, 0, -79, 64, + -2, -99, -123, -65, 28, -31, -63, 48, + 40, 68, 55, -32, -117, -2, -75, -67, + 3, 111, -36, 7, -53, -63, 125, -73, + 121, -74, -11, -63, -74, 54, -20, 21, + -91, 118, 30, 16, -33, -128, 69, -3, + -64, 19, -15, -100, -14, -127, -84, -86, + 26, -37, -14, 13, -20, -46, 31, 34, + 43, -4, 70, -128, -60, 19, -19, -75, + 48, -79, 84, -34, 127, 14, -119, 82, + 82, -63, 88, -28, -127, 59, -35, -127, + 48, 11, -116, -126, -3, -110, 24, -78, + -4, 46, -25, -76, -29, 53, 10, 47, + 16, 46, 5, 34, -18, -45, -30, -2, + 88, 68, -4, -40, -21, 24, -112, -115, + -13, 5, -47, 21, -32, 109, -120, -51, + -8, -47, 127, 66, 28, 4, -76, -22, + -10, -4, -44, 5, -77, -12, 12, 35, + 115, -92, -39, -123, 8, -106, 17, 124, + 21, 9, 57, 42, -11, 49, 36, -115, + 104, 18, -17, -20, -63, 47, 29, 19, + 22, 82, -73, -128, -66, -102, 18, 20, + -74, 58, -22, -115, -121, -121, -66, -74, + 4, -25, -34, 67, 42, -3, 0, -68, + 50, 16, 47, 74, 30, 125, -49, -14, + 105, -32, 97, -30, -67, -20, 10, 15, + 4, 2, -18, -62, -43, 15, 8, -61, + -4, -33, 7, -46, 71, 46, -5, 66, + -24, -1, 86, -99, 27, 78, 41, 15, + 14, 5, 100, 58, -50, -32, -35, -32, + 89, 32, -93, -7, 69, 18, -43, 36, + 97, -29, 68, 41, 77, -25, 65, 67, + 89, 20, 25, 28, -35, -16, -10, 125, + 101, 126, -53, 39, -112, 120, -23, 8, + -10, 9, -38, 43, 4, 127, 43, 51, + 86, 5, -79, 38, -14, 36, 9, 29, + 46, -9, 7, 34, -31, -89, 28, 18, + 50, 84, -67, -12, -12, 23, -91, -95, + -28, -82, 44, -40, 10, 1, -10, 0, + 12, 46, -5, -40, -76, 11, -27, -42, + -22, -5, -71, -39, -89, 96, -56, -89, + 21, 73, 72, 1, 15, -122, 73, 13, + -31, -16, -57, -27, -10, -27, -51, -71, + -18, -48, 0, -5, -37, -6, 81, -7, + 19, -25, 13, -28, 24, -82, 90, -73, + -66, 38, -11, 17, 19, 65, -30, 80, + -23, -68, 65, 16, 18, 66, -17, -58, + -52, -71, 14, -1, -2, 39, -2, 55, + 10, 21, 10, 3, -110, -53, -10, 39, + 36, 84, -45, 41, 28, -10, 22, 35, + 8, 23, 32, -40, 37, 34, 39, 15, + 7, 22, 70, 35, 18, -40, 5, -8, + 33, 31, -36, 8, -6, -24, 25, -23, + 6, 12, -79, -16, -21, 126, 54, 7, + 4, 66, 18, -22, -4, 35, 40, -40, + 68, 41, 13, 43, -8, -7, -34, 39, + 21, 24, 6, 32, 16, 5, -38, -126, + -36, 26, 61, 61, -1, 78, -35, -19, + -44, 30, 12, -40, 12, -3, -13, -12, + 48, 32, 32, 42, -4, -18, -26, 22, + 11, -108, 88, 82, -39, 65, 39, 35, + -16, -95, 29, 63, 104, -17, -18, 12, + 33, -81, 1, 103, 16, -39, 31, 6, + 44, -102, 97, 10, 48, -38, 124, 34, + 10, 23, -66, -35, -102, 19, 81, 87, + 34, -27, 44, -14, -44, 1, -25, 27, + 125, 10, -110, -27, 5, 2, 10, -60, + -28, 15, -75, -13, 52, 29, -17, -47, + 10, -15, 98, 43, 1, -25, 37, -110, + 76, -20, 24, 20, 24, 45, -50, -35, + -10, 80, -17, -62, 47, 17, -31, 16, + 2, 77, 94, -108, -43, -12, 44, -124, + -31, -15, -78, -84, -114, 97, -47, 91, + 33, 23, -104, -24, 127, 28, 61, -109, + -53, 68, -26, -108, -12, -88, 75, -10, + 87, 99, 28, 32, -125, -48, 121, -70, + 56, -61, 37, -90, 123, 87, -43, 3, + -14, 96, -6, -4, 91, 35, 11, 66, + -38, -108, -21, -7, -5, 96, -34, -52, + 69, 28, 33, 82, 22, 36, 19, 6, + 10, 47, 86, -122, -11, -13, -66, 41, + -24, 22, 89, 66, 24, -24, -11, -25, + -28, -67, 113, -110, -128, -109, -69, 102, + 114, -8, 19, -51, -12, 120, -78, 41, + 9, 125, -118, -11, -7, 60, 0, -68, + 28, 124, -14, 50, 24, -37, 56, -21, + 6, 26, 53, 0, 19, 19, -30, -60, + -53, 52, -26, -6, -19, -48, 37, 20, + -17, -9, -22, 69, 31, -27, -10, 7, + 25, -65, -52, 39, -40, -80, 29, -32, + -78, -4, 13, -26, 22, -45, 94, -24, + 1, -12, 17, 73, 116, -34, 69, -39, + -7, 7, -57, -45, 38, -70, 62, 7, + -12, 55, -35, 66, 48, -21, 48, -5, + 98, -114, 81, 14, -127, 25, -17, 78, + -53, 124, 12, 65, -23, 15, -42, 79, + 7, 0, 45, -20, 62, -20, 25, 35, + -34, -34, -104, 70, 74, 79, -26, -16, + -114, 31, 79, -23, -18, 15, 119, -110, + -54, -83, -7, -8, 57, -50, 3, 12, + 48, -6, 23, -111, 9, -54, 19, -4, + 14, 19, -26, 37, 84, 36, -12, -16, + 39, -95, -65, -20, -23, -15, -7, -24, + -3, 0, 75, -23, 13, 6, 119, 5, + 65, 9, -90, -112, -22, 30, 54, 26, + -58, 39, 116, -37, -22, -54, 44, -1, + 40, -31, 6, 41, -35, 48, 79, -86, + 26, -8, -12, -10, -4, 77, 59, 2, + 58, -84, 14, -85, -7, -11, -22, 5, + 3, -110, -53, -127, -23, -3, 93, 11, + 19, 19, 13, -126, 29, 14, -28, 42, + -33, -50, 35, 94, 3, -56, -88, -115, + -9, -127, -74, -83, 34, 26, -21, -6, + -5, -73, -10, -25, 68, -77, 35, -5, + -114, 58, -64, 21, 2, -7, -111, 26, + 1, -80, -35, -116, -124, -40, 87, 58, + -30, 18, 121, -125, -25, -20, 64, -3, + 3, -44, -24, 59, 125, 10, 29, 26, + 33, -7, -43, -42, 88, 44, 26, -84, + 1, 5, -65, -50, -31, 100, -27, 37, + 28, -14, 6, -61, -14, -106, -6, 16, + 86, -124, -9, -91, -33, -27, -122, -28, + -8, -32, -99, -75, -1, 22, -69, -33, + -1, -45, -24, 38, -44, 45, -51, -127, + -5, -4, 111, -119, 3, -106, -84, -1, + 30, -6, 16, -11, 18, -28, 18, -11, + -20, 48, -65, -29, -63, -104, 16, 60, + -18, 6, 6, -63, 6, 28, 16, -28, + 14, -5, -25, 2, 3, 68, 80, -18, + 101, -51, -126, 56, 61, -22, 37, 1, + 106, 4, -14, 18, -12, 13, 77, 13, + -47, 36, -31, 10, 12, -15, 5, -7, + -7, -17, 7, -46, 5, -21, 81, -19, + -31, -103, 63, -27, -32, 3, 120, 65, + -21, 35, -12, -34, -26, -1, -33, 26, + -5, 35, -41, -13, -27, -78, 19, -5, + 15, -43, 9, 22, -12, -64, -39, -16, + -49, 106, -13, 13, -6, -18, 0, -38, + 12, 5, -44, -26, 116, -63, 76, 41, + 19, -47, 1, -5, 39, 1, 110, -11, + 59, -31, -33, 13, -31, -31, -43, -51, + 48, -3, -76, -49, 10, 94, 11, -18, + -1, -31, -94, 14, -17, 35, 3, 55, + 8, 46, -23, 31, -4, 31, -15, -41, + -50, 19, 47, -35, 80, -89, 27, -36, + -12, -32, 49, -64, -68, 62, -75, 11, + 15, 9, -29, 3, 30, -36, 8, -57, + -40, 50, 19, 59, 39, -23, -60, 4, + 2, -4, 3, -28, -2, 125, 10, 5, + -21, -77, 42, 71, 20, -18, 17, -8, + -24, 20, -17, 73, -30, -34, -71, -68, + 40, 1, -1, 30, 47, -16, 84, -16, + 16, 99, -13, -1, 4, 127, -37, 15, + 15, 27, 35, 1, -23, 8, -10, -5, + 48, 22, 9, 120, 110, -56, 6, -52, + 67, 110, -32, 47, 21, -42, 39, -34, + -2, 0, 8, 32, -11, 1, -8, 29, + -34, 44, 50, 1, -8, -44, -32, 61, + 40, 24, -17, -4, -21, 10, 22, 28, + 51, 31, -34, -56, 22, 0, -9, -17, + -5, -48, 2, 34, -3, 37, -23, -8, + -6, 6, -105, -21, 14, 3, -53, 10, + -12, -47, 51, -15, 17, -45, 20, 25, + -19, 17, 10, 6, -59, 5, 35, 25, + 39, 10, 59, 63, 5, -22, 59, 96, + 22, 43, 33, -13, 10, -37, -20, 14, + -41, 9, 25, 40, 39, 32, -27, -45, + -43, 8, 28, -25, 19, 12, 22, 31, + -76, -46, -30, 32, -5, -103, -11, 0, + -19, 109, -115, -38, 31, -54, 76, -48, + 9, 81, 38, -80, 8, -56, -44, 117, + -123, -69, -85, 3, -47, -42, 87, 40, + 64, 124, -84, -128, -105, -41, -122, -38, + 18, -21, -90, -64, 19, -34, -78, -125, + -51, -20, -7, -51, -126, 9, 13, -29, + -114, 61, 126, 13, -54, 83, 21, 5, + 30, -27, -113, -83, -54, -65, -66, 17, + -15, -86, -127, -36, 20, 19, 17, -58, + 37, -13, -24, -22, -125, 81, 12, -11, + 84, -80, -127, -38, -73, 43, -37, -30, + 53, -114, 56, -2, -52, -40, 8, -12, + 17, -77, -89, 46, -13, 17, -95, -73, + -27, -32, 31, -115, -66, 24, 27, -3, + 0, 27, 71, -72, -36, 37, -10, -73, + 15, -35, 14, 7, 60, -17, -63, -11, + 0, -103, 23, -94, 82, 20, 71, 7, + -38, -51, 18, 5, 99, -47, -114, 7, + -38, -58, 18, 31, -33, 13, 8, 62, + -21, 109, 47, -126, -10, 71, -12, -7, + 11, -2, -5, -2, 121, 94, -53, -9, + -25, -20, 30, -7, -127, 67, -84, -118, + 22, 34, -32, -106, -90, -91, -29, -75, + -21, 85, 0, -61, 2, -58, 16, 14, + 68, 99, 5, 50, -12, -11, -5, -15, + -37, 10, 20, -29, 25, 20, 43, 24, + 93, -18, 5, -12, -20, -13, -21, -78, + -68, -5, 82, -43, 13, -49, -26, -11, + -12, -40, 40, -12, -53, -21, -23, 41, + 94, -41, -42, -96, -32, -62, 11, 116, + -9, 18, 24, -64, -97, 8, 71, -117, + 37, 40, 34, 5, -48, 49, -7, -59, + -16, 36, -58, 54, -38, -37, -68, -2, + -33, 23, 9, -53, -126, -91, -87, -49, + -126, 42, 29, 52, 69, 35, 54, -11, + 55, -5, 40, 6, 62, 2, 28, 20, + 28, 0, 16, -53, -48, -7, 57, 11, + -17, 2, -89, 1, 42, -77, 31, 10, + 47, 97, -52, -45, 37, 70, -37, -85, + -37, -11, 6, 68, 127, 2, -17, 28, + 6, 35, -9, 27, 34, -25, 17, -45, + 46, -32, 98, 55, -18, 25, -8, 1, + 7, -24, 2, 64, -6, -48, -49, 57, + -37, -64, -4, 1, -81, 58, -44, 44, + -40, -27, -52, 69, 8, -8, 7, 7, + -62, -25, -1, -69, 117, 16, 15, 39, + -22, -5, 12, 6, 126, -6, 0, 16, + 24, -60, -28, -3, -8, 66, 53, 15, + 72, 23, 71, -45, -105, 2, 26, -111, + -82, -69, -9, -83, -5, -22, -120, 118, + 94, -15, 125, -40, -81, -35, -6, 16, + -51, 119, 28, 40, -127, 18, 8, 42, + 22, -125, 36, -30, -58, -62, 48, -1, + -96, 57, 86, -28, 4, -26, 87, 14, + -41, -39, -45, 11, 41, 55, 86, 33, + 30, 74, -18, -24, 23, 5, 30, -25, + 38, 34, -60, -75, 3, 84, 18, -29, + -96, 9, -12, -6, 1, -21, -4, 62, + 33, 21, -18, 71, 65, 9, -46, -19, + 31, -24, 5, 57, 47, -1, 32, -27, + 22, 16, 51, 38, -54, -61, 26, -17, + -10, -1, 20, -44, -27, 15, 16, -22, + -36, 26, 18, 6, -3, -34, -57, 32, + -11, 40, -51, 11, 89, 51, -10, 25, + 18, -117, 61, 54, 35, -61, 21, -39, + -3, 23, 65, 55, -8, -21, 44, -61, + -41, 6, 3, -5, 4, -7, 0, 89, + 9, -10, -65, 64, -12, 23, -34, 11, + -29, -92, 7, -90, 50, -92, 53, -37, + -31, -6, 59, -15, 4, 18, -29, 70, + -55, 53, 13, -8, -121, -7, -33, 10, + 28, 8, -17, -26, 14, -23, -71, -7, + -51, 86, -4, -12, -21, -3, 33, 19, + 19, -39, 12, -69, 51, 24, -96, 8, + 107, -17, -41, 105, 11, 16, -17, -71, + 4, 65, 0, 29, 87, -24, 89, 90, + 73, -20, 18, -9, 48, -55, -22, -1, + -43, -26, 47, 1, 79, 13, 68, 18, + -11, 90, -3, -11, -19, 20, 107, -1, + -8, -45, 4, 31, -5, 9, 41, 9, + 71, 42, 79, -1, 6, -20, 13, 2, + 5, -8, -10, -11, -25, -35, -19, 15, + 26, -10, 47, -14, -37, 5, 10, -21, + -21, 98, -4, 62, -14, -30, -12, 25, + 35, 39, -79, 25, -61, -21, -4, -79, + -20, 36, -98, 82, 3, -2, 36, -2, + -23, -1, -39, -20, -84, 49, 104, 10, + -32, -35, -20, 19, 51, -16, 10, 24, + -87, 90, -25, 0, 9, 25, 32, 37, + 40, -51, -16, 24, -4, 4, -39, 49, + 54, 48, -3, 44, -38, 12, 98, 28, + -40, -79, 41, 41, 17, 123, 34, 25, + 27, -49, -48, -29, 51, 16, 19, -39, + 57, 87, -18, 30, -22, -98, -45, 56, + 18, 30, -6, -39, -64, 57, -109, 36, + 15, 37, 0, 42, -15, -75, -13, 5, + 42, -54, -18, -118, -28, -101, -5, -78, + -9, 43, 66, -24, 8, -22, -20, -47, + 12, 42, 30, 23, 6, -19, -9, 15, + 44, 58, 31, -44, -21, -25, -69, 26, + 3, 57, 0, -4, 12, 29, 39, 18, + 14, -2, -81, 8, -26, -66, -2, -3, + 14, 3, -19, -27, 65, 17, -29, -20, + -24, 40, 35, -89, -8, 24, 57, 37, + 31, -54, 16, -40, 13, 106, -18, -14, + -36, -18, 17, 88, 44, -7, -21, 18, + 28, 35, 16, -3, -18, -9, 16, -1, + 36, 16, -31, 37, 49, -48, -33, 32, + -12, 12, 25, -121, 92, 6, -1, 19, + 22, 18, -7, -15, 28, -7, -79, 69, + 35, -62, 113, -28, 52, -39, -24, 27, + -11, 14, 33, 21, 40, 20, -4, 4, + 4, -29, 74, 49, -5, 8, -3, -48, + 25, 23, -34, 64, 11, -15, -29, 38, + 12, 86, -27, -18, -36, -46, 15, 42, + 40, 12, -38, 51, -4, 30, -17, -37, + -63, -16, 35, 9, 26, 40, 60, -19, + 19, 6, -9, 31, -13, 9, 5, -22, + -32, 93, -54, -43, 65, 31, 37, -60, + -6, 10, 11, 127, 32, 42, -58, -88, + 32, -19, 109, 29, -27, -50, -51, 63, + 40, -52, 6, -20, -10, 23, -17, 78, + -3, -27, 59, 31, 26, 26, 21, 31, + -72, -38, 80, -29, 9, 51, -76, -114, + -38, -19, 59, -54, 27, 43, 0, -8, + -60, 11, -1, 19, 18, -70, -78, 117, + -47, -65, 15, 56, 31, 4, 52, 38, + -14, -36, 67, 16, -20, 12, -88, 35, + 18, -127, 15, 44, 125, -24, 56, 19, + 20, -1, 47, -124, -1, -31, 83, 1, + -15, -125, 23, 70, 53, -61, 67, 36, + -16, 24, 31, 42, -25, 35, -25, -53, + 22, 49, -19, -12, -67, 23, -72, 20, + -24, -118, 1, -9, -16, 8, 68, 10, + -5, -10, 35, -48, -66, -11, 87, 55, + 3, -28, -53, -11, -3, -13, 3, 117, + 12, 15, -51, 5, -69, -48, 31, 10, + 14, 12, 1, 25, 113, -74, -28, -36, + 37, 14, 14, 2, 1, -15, -31, 19, + 21, 22, 20, -19, -10, 81, -44, 30, + 9, -122, 30, 24, -24, 28, 4, 31, + -4, -29, 87, -18, 25, -21, 3, -56, + 58, 39, 89, 34, 33, 10, -29, 29, + 123, -88, -6, 19, 10, -42, -25, 47, + -47, -120, 89, 17, 73, -33, 31, -118, + -12, -10, -47, 6, 74, 116, 23, 94, + 22, 69, 113, -4, 111, 68, 15, 56, + 65, 51, 98, 31, -14, 119, -3, 26, + 62, -9, -88, -21, 77, 62, 0, 86, + 92, -17, 101, -30, 38, 53, -72, -38, + 52, -42, -15, 35, -6, -7, 70, -25, + 67, -42, 52, -83, 44, 2, -56, 60, + 105, 116, -48, -29, 126, 76, -85, 42, + 122, 118, 67, -4, -5, 5, 5, -37, + -29, 20, -34, 31, 117, 27, 75, 88, + 38, -32, -80, 26, -29, 113, -60, 6, + 64, 45, -49, -24, 16, -18, 119, -102, + 5, -26, -25, 7, 26, 106, -75, 21, + -56, 74, 105, 89, 77, 109, -41, 114, + 62, -6, -2, 125, 10, 87, 46, 71, + -48, 22, -27, -15, 75, 127, -55, 36, + 25, 6, 67, -28, -9, 20, -4, 80, + 14, 79, -10, 37, 49, 11, -31, -22, + 54, -109, 8, 20, 36, -29, 15, 73, + 102, -56, 19, -10, 49, 33, 72, 15, + -12, -57, 25, 120, -54, -86, 36, 60, + 46, -71, 51, 15, -16, 14, -87, 10, + -41, -50, -19, 29, 2, 22, -8, 41, + -47, -5, -5, 30, -71, 51, 14, -39, + 48, -1, 69, 36, -45, -57, -20, 53, + 23, -15, 44, -19, -38, -75, 90, -2, + 49, -9, -22, -2, -8, 22, 37, -50, + 95, 57, 34, 77, -35, -81, 46, -34, + 25, -4, -12, 29, 71, -32, -66, 0, + 48, -39, 22, -30, 10, -53, -11, 16, + -32, 26, -13, -42, 18, -8, 76, -44, + -51, -66, 6, -30, 25, 0, -2, -77, + -26, -8, 42, 41, 28, -23, -46, -16, + -63, 37, -57, 7, 50, 7, 30, -22, + -34, 4, -35, 70, 19, -22, 43, 33, + 20, 58, 4, -62, -8, 38, -25, 74, + -43, 10, -57, 121, 88, 6, -18, -16, + 13, 1, 49, 73, -34, -17, -52, 32, + -59, -1, 19, -14, -38, -72, -23, -39, + -25, -54, -97, -13, 64, -31, -33, -33, + -7, -58, -15, 24, 34, -32, -34, -35, + -50, -89, 10, 1, -12, -7, 120, 28, + -41, -8, 81, -35, 65, 4, 45, -10, + -56, -11, -26, 13, 23, 45, -63, -65, + 6, 40, -18, -10, 7, 2, -67, 5, + 31, -34, -32, -9, 55, -4, 28, 75, + 20, -56, -15, -21, -123, 88, -24, -16, + 6, -21, 7, 14, 15, -112, -38, 35, + -42, -33, 30, 122, -58, -7, -23, -10, + 10, 27, -32, 8, -112, 3, -67, 11, + 27, 21, 0, -41, -5, 51, -51, -2, + -29, -55, 23, 8, 87, 9, 71, 25, + -53, -65, 84, 19, 52, -21, 22, 124, + -49, -3, -117, -34, -82, 45, -2, -3, + -50, 51, -20, 11, -6, 88, -3, -16, + 25, -4, 14, -88, -83, -3, 43, 69, + 4, -43, 2, -73, -26, -74, -57, -33, + -104, 14, -25, -90, -126, 9, 97, -48, + 15, 29, 11, 18, 43, 2, 14, -39, + -75, -28, 12, 10, -9, -6, -43, -64, + -9, -26, 47, 36, 58, 10, 45, 23, + -21, -22, -30, 38, 5, -2, -56, 52, + 16, 19, 55, 33, 2, -26, 7, -46, + 24, 27, 46, 16, -2, -85, -61, -12, + 17, 33, -33, 26, 12, -44, 30, -38, + 20, -13, 13, 42, -61, -30, 25, 30, + 41, -16, 1, 12, 5, -7, 68, 22, + 1, -53, 40, -42, -16, 36, -63, 20, + 57, 4, 12, -49, -56, -17, -65, 58, + -56, 92, -28, 2, 30, 11, -53, 8, + 8, 28, -53, -60, 51, 62, -110, 32, + -85, -9, -92, -47, 73, 70, 47, -28, + 102, -31, 14, -14, -10, 57, 2, 26, + 109, -17, 46, 71, 13, -60, 12, -21, + 3, 13, 25, -24, 1, 19, 52, -27, + 2, 67, -56, 102, -5, 38, -23, 19, + -29, -114, -63, 27, 26, -14, 9, -14, + 15, 2, -18, 53, -8, 10, 66, 15, + -60, -118, -55, 28, -54, -95, -2, 39, + 88, -1, 57, 60, -11, 63, 25, -11, + 110, 32, 47, 3, -42, 15, 74, -36, + 0, 8, -49, 6, 71, 39, 13, -104, + 25, 110, -39, -66, 37, -30, -89, 51, + -86, -21, 67, 8, 2, 88, -18, -69, + -76, 10, 75, 32, -112, -61, 50, 49, + 19, 27, 119, 41, -81, 31, -124, 51, + 39, -49, 87, -10, 3, 31, 123, -47, + -41, -34, 18, 31, 33, -24, -62, -59, + -37, 107, -3, 37, 39, -32, -77, 4, + -47, 6, -55, -4, 45, 38, -50, 8, + -9, -23, 77, -59, -13, -7, 80, 10, + 127, -45, -121, -8, 88, 22, 12, 33, + 75, 17, -69, 13, -31, 37, 47, -2, + 27, -7, -39, 35, 22, -83, 0, 61, + 12, 9, 0, 28, 13, -3, 11, -13, + -16, 83, -11, -23, -80, -9, 59, -24, + -2, 32, 20, 37, -51, -12, -41, 30, + -57, 34, -102, -91, 118, 3, 29, 25, + 33, -34, -50, -1, -120, 10, 2, 19, + 12, -44, -23, -57, 27, -61, -11, 60, + -4, 116, -25, 39, -9, -29, -103, 33, + -23, 13, 10, -17, -6, 21, 52, -15, + -22, 19, -31, 48, 64, -69, 23, 2, + 19, 87, -43, -78, -84, -33, 19, 33, + 71, 28, 43, -37, -51, 70, 9, -4, + 10, 27, 56, -16, 28, 19, 10, 10, + 8, 46, 12, 81, -16, 120, 1, -12, + -15, -7, 34, -55, -57, 93, 101, 67, + -68, -3, 64, 54, 14, 29, -40, 15, + -35, 24, -83, -68, 119, -66, 50, -67, + 0, -48, 62, 15, 117, 26, 41, -12, + 29, 49, 65, -59, 44, -79, 63, -65, + 19, -78, 30, 36, 33, 63, 14, 127, + 5, 22, 13, -6, -45, -66, -2, 4, + -3, 2, -34, -41, 50, -31, -22, 50, + -45, 67, -19, -47, -14, 123, -12, 36, + 23, 11, -110, 46, 12, 92, -4, 86, + 88, -23, 51, 34, 21, 50, -87, 35, + 87, -3, 15, -37, 24, -23, -15, 1, + 109, -43, 70, -109, -43, -23, 110, -113, + -114, 29, -61, 68, 31, -16, -13, 10, + -59, 1, 77, -56, 13, -8, 50, 30, + 27, -58, -100, 6, -30, 1, 11, -7, + 78, 8, 37, 44, 21, -61, 59, -33, + -11, -75, -44, 17, 17, 13, -9, 4, + -31, 70, 71, -88, 57, 26, 40, -39, + 46, 41, -40, 45, 5, 11, -59, 31, + 27, 35, 53, 70, -31, 14, -30, 16, + 117, -24, 51, 16, 45, 127, -38, -48, + 9, -32, 51, -18, 17, -70, -23, 5, + -11, -65, -8, -22, 32, -67, 22, -3, + 0, 112, 23, 5, 61, 9, -10, 22, + 32, -10, 34, -27, 17, 0, 37, 37, + 110, 20, 41, 85, 4, 61, -9, 24, + -39, 46, 41, 35, -9, -59, 44, -2, + 9, 19, 82, -66, 23, 80, 6, 40, + 15, -31, 55, 59, -51, 107, 16, -24, + 2, 37, -4, 71, 38, 16, 53, 23, + 32, 8, -24, 20, -13, 40, -122, -26, + 2, -5, 50, 105, 48, 47, -29, 22, + 40, 38, 19, -33, 54, -65, 3, 16, + 31, 85, 63, -58, 35, 33, 11, -3, + -51, -50, 31, -4, 38, 55, 63, 15, + 38, -82, -4, -3, 53, -5, 98, 15, + -20, -61, -2, 69, 81, -30, 15, 1, + 17, -78, -88, 58, 65, -87, 44, -80, + 31, -10, -19, 5, -104, 27, 8, -71, + -7, -63, 38, 58, -29, 2, -25, 16, + -14, -5, 5, -26, -48, 7, 22, 15, + -50, -73, 24, -6, -88, -6, 53, -49, + 3, 0, -7, 18, 13, -1, -112, 15, + -85, 28, 31, 53, -20, -57, -10, -28, + 1, -12, -34, -14, 45, -13, -13, 74, + -36, 5, -73, 28, 10, -18, 2, 3, + -37, -57, 63, -75, -59, 33, -7, 53, + 48, -7, 114, -29, -38, -4, 12, -121, + -53, -36, 42, 38, -6, 105, 58, 57, + -32, -14, -50, 87, -84, 31, -56, -35, + 68, 21, 6, 79, 49, 2, -8, 9, + -42, -14, -125, 16, -45, 48, 125, 15, + -9, -19, 49, 64, -30, 11, -67, -38, + -125, -57, -19, 66, -8, 13, -52, -72, + 0, -4, -52, 109, 92, 15, -61, 51, + 36, -41, 26, 60, -59, 4, 17, 72, + -2, 48, 29, -11, -15, -7, -2, -23, + -9, 16, 2, 14, -18, -1, 92, -111, + -2, -9, 14, -17, -21, 23, 30, -4, + 51, 53, -38, -20, 3, 47, -76, -10, + 14, -105, -94, -8, -5, 16, -1, 41, + 24, 38, -10, 31, -109, -16, -4, 39, + -46, -33, -20, -14, -39, -49, -13, -7, + 0, 31, 19, 7, -9, -12, 17, 26, + 16, -120, 6, 25, 39, 45, 24, -29, + -34, -20, 16, -64, -14, 2, -48, 53, + 15, -17, 5, 30, 14, 127, 8, -49, + 62, -89, 42, 66, 16, -14, 0, 30, + -12, 10, -49, 36, 36, -47, -35, -8, + 39, 54, -24, 15, 57, -63, 12, -5, + -46, 1, 96, -119, 14, 4, -48, 17, + -123, 81, 1, 113, 89, 20, -39, -13, + 34, -39, 52, 11, 35, -23, 34, -25, + -4, -9, 10, 23, 30, 27, 20, -30, + -1, -29, 24, -27, -37, -10, -15, -57, + -31, 22, -32, 75, 45, -35, 42, 3, + -32, 76, 21, -38, 14, -52, -9, 5, + -62, 35, -23, -33, -64, 42, 57, 45, + 18, 10, 58, 50, -1, 11, -79, -14, + 59, -61, 14, 58, 26, -21, 62, -44, + 20, 15, 2, 77, 42, -14, 20, 108, + 27, 7, 3, 48, 62, 0, 15, -33, + -13, -49, 29, 66, -66, 56, -127, -1, + 10, -14, -64, 32, 1, 0, -34, 73, + 82, 67, 85, -93, 29, -26, 9, -74, + -50, 20, -29, 40, -24, 70, -105, 51, + -18, -23, -40, 2, 39, 105, -33, -82, + -18, 127, 72, 6, 42, 59, -80, 1, + -22, -4, 36, 52, 117, -96, 123, -14, + -84, 33, -58, -18, -24, 41, 17, -15, + -70, -30, 42, -5, 21, -69, -83, 107, + 96, 122, 70, 60, -109, -68, 21, 51, + 90, 93, -34, -5, -13, -13, 77, 76, + 70, 85, -56, 50, 5, -124, 0, 13, + -40, 57, 7, -23, -21, 20, -25, -87, + 23, -53, -19, -41, -27, -3, 37, 65, + -33, -30, 42, -75, 15, 15, 76, 25, + 37, 28, -60, -54, -99, 35, -53, -19, + 4, -69, -46, 5, 25, -33, -31, 49, + 16, -58, -23, -6, 53, -14, 68, 47, + -35, -4, -13, 3, 81, -16, -64, -37, + 5, -4, 4, 33, -55, 29, 31, 44, + 5, 13, 77, -24, -31, -56, 15, 41, + 80, -35, -3, 24, -101, 30, 17, 63, + 64, 16, -17, -20, -6, 11, 4, 1, + -35, -79, -3, 42, 46, -26, -29, -11, + -85, -62, 58, 8, 6, -61, 6, -123, + 11, 0, -89, -121, -55, -123, -24, 5, + -81, 6, 123, 32, -62, -82, 2, -89, + -74, -48, -109, -6, 42, 20, -121, 34, + -117, -3, 42, -71, 82, 70, -88, 20, + -3, 9, -69, 11, 56, -23, -66, -49, + 9, -121, -127, -124, 2, 10, 17, -60, + 49, 30, 74, -40, 45, -87, 68, -7, + 74, -26, -25, 0, -119, 14, -5, -16, + -103, 77, -31, 16, 43, 5, 53, -58, + 35, 50, 72, -38, -121, 48, 70, -29, + 33, -30, 36, 36, -61, 28, -18, 37, + 104, -126, 44, -122, -120, -12, -13, -122, + -33, -86, -122, -86, -69, -30, -20, 70, + -60, 47, -52, -87, -51, 20, 125, -34, + 106, 28, -71, -34, 64, -66, -36, -97, + -81, -127, 34, -93, 42, 115, -23, 2, + 32, 66, 100, -32, 70, -44, 16, 52, + 4, 24, -60, -126, 9, -122, -126, -118, + -96, 2, -4, -50, -30, -117, -67, 34, + -57, -1, 12, -70, -82, 17, 53, -39, + 11, -88, -3, 35, 127, -77, -33, -21, + 75, -30, -47, 80, 35, -35, 95, -118, + 22, -12, 11, -7, -124, 127, -67, 64, + -37, -47, 14, -21, -20, -10, 11, 20, + 18, 11, 24, -62, 18, -56, -24, 14, + -9, 9, -126, -56, -39, -3, -47, -35, + 12, -23, -8, 10, 120, -21, 63, -36, + -44, 72, 69, -101, -2, -16, -102, 9, + 9, 5, -100, -51, 116, -90, 42, 17, + 43, 50, -64, -78, -82, -12, -107, 124, + 50, -24, -14, -47, 11, -57, 43, 125, + -49, 3, 38, -4, -46, 46, 32, 20, + -65, -27, -83, -6, -5, -36, -2, 2, + -16, 23, -31, 3, -73, 57, -75, -94, + -24, 5, -13, 15, 9, -12, -5, 8, + 5, 18, -37, 104, -50, 24, -81, 91, + 33, -76, 10, 29, 45, 19, -25, -86, + -15, -81, -19, 62, 20, 41, 34, -11, + -37, 45, 88, -126, 77, 77, -63, -4, + -12, 28, 39, 63, 53, 12, 51, -51, + -13, -56, -117, 17, -27, 23, -66, 41, + -29, 77, 65, 62, 105, -51, -67, 24, + 92, -118, 60, -6, -14, -23, -57, -35, + -11, -45, 19, 120, -42, -85, -51, 20, + 120, 26, -24, -71, 92, -49, -11, 17, + 34, 24, 64, -23, 27, 29, -29, 6, + 39, -61, 7, 47, 9, -10, -10, -100, + 67, 70, -64, 99, 57, 67, 17, -20, + 58, 28, 15, -43, 57, 14, -97, 7, + 89, 18, 24, -99, 9, 10, -27, 46, + -48, -124, -81, -41, 19, -32, -75, 4, + 80, 58, 83, 59, 65, -26, -11, 42, + 17, -64, -69, 1, -106, 28, -38, 54, + 22, 7, -10, -10, -1, -46, 109, 27, + 13, 6, -43, 5, 10, 17, 7, 65, + 39, -80, 33, 93, -53, -107, -36, -3, + 0, -61, 87, 69, -121, -83, 26, 33, + -62, 73, -77, 26, 125, -29, 31, 108, + -120, 40, -22, 104, 10, 117, -88, -10, + -29, 12, -4, 20, -39, -81, 51, -68, + 41, 70, 9, 3, 63, 35, -14, 15, + -51, -44, 1, -41, 42, -18, 90, 0, + 6, 28, -41, 19, -36, -49, -34, 75, + 19, -8, 18, -2, -9, 46, 22, -22, + 47, -8, 35, 22, 0, -19, -6, 1, + 1, 39, 8, 64, -26, -42, 16, -15, + -16, -6, 8, 3, 39, -6, -1, 31, + -20, -51, 46, 32, 17, -50, -38, 64, + -18, -69, -3, 8, -24, -15, 30, 59, + 31, -12, 9, 27, 56, 7, -32, 10, + 7, -20, 11, -62, 13, -4, -37, 6, + -36, -63, 29, -10, 9, -4, -35, -16, + -36, 18, -18, -23, -57, -16, -38, 19, + 21, 72, -45, -73, 16, -19, -7, -17, + 70, -45, 13, 46, -29, -12, -3, -2, + -54, 122, -5, 91, -119, 4, -23, 39, + -1, -53, 19, -5, -76, 19, 22, -5, + -44, 32, 51, 45, -14, 30, -51, 18, + 106, -18, -33, 43, -15, 9, 38, -22, + 23, -104, 80, 19, 64, -11, 6, 10, + -70, 71, 97, 40, -49, 14, 31, 35, + -126, -5, -8, 15, 47, -20, -13, -4, + 21, 14, 46, 18, -77, 34, 17, 0, + 27, 32, -33, -5, -5, 4, 118, -15, + 9, -10, 60, -16, -15, -48, -6, -86, + 92, -25, -8, -24, 6, -4, -28, -17, + 40, 116, -42, 21, -26, -15, 22, -14, + 42, -12, 20, -37, 49, 53, -30, 11, + 76, 65, -104, 50, -45, -26, 10, -127, + -9, -40, 10, -14, -53, 8, 56, -48, + 26, 14, -7, 19, 55, 5, 28, -30, + -3, -44, 30, 68, 43, -16, -14, -4, + 26, -74, 71, -35, 49, -56, -1, 7, + -30, 62, -95, 33, -9, -37, 74, 36, + 49, -18, -16, 10, 54, -11, -13, -45, + 2, 25, 8, -12, 20, 91, 8, 16, + 8, -39, 20, -3, -17, 43, -8, 19, + 3, -25, -3, 16, -1, 26, 39, -15, + 6, 25, -43, -17, -65, 46, -39, 61, + 21, -21, 29, 65, -56, 0, 93, 15, + 17, 1, -40, 43, 11, 36, 35, 31, + -20, -66, -28, -6, -6, 89, -35, -41, + 121, 17, -4, 71, 76, 19, -54, 18, + 21, 10, 6, -44, -34, -14, 25, 4, + 50, -92, -53, -61, -4, -38, -15, -5, + -37, -8, -28, -77, -11, -105, -64, -8, + 53, -44, 5, 8, 30, 25, 8, 30, + -26, 68, -62, 52, -7, 66, -101, -3, + -1, -24, -49, -56, -105, -121, 47, 31, + 58, 2, 19, 40, -14, -22, -1, -24, + -14, -51, 59, -19, -21, -47, -84, 15, + 9, -30, -84, -120, -114, -52, 120, -47, + 2, 81, 0, 19, -32, 16, -3, 4, + 25, 56, -127, 3, 49, 21, 9, -28, + -50, -32, -5, 18, 45, 26, -47, -21, + 12, -25, 21, -46, 6, -72, 6, -23, + -58, -5, -75, -52, -45, 19, 2, 52, + -7, -43, 26, 58, 24, 1, -34, -21, + -29, 89, 91, -5, -57, -38, 15, -21, + -41, -10, -16, 91, 52, 106, -58, 36, + -19, -20, 124, -37, 17, 5, 83, -102, + 18, 25, 66, -48, 28, 27, -77, -32, + 24, -31, 31, 0, 79, 25, -56, 28, + 51, -80, 29, -33, -92, 33, -3, -58, + -59, 26, -126, 93, -63, 118, -50, 10, + -28, 59, 21, 27, 3, -109, 53, 78, + 1, 46, 8, 19, -123, -56, 124, 39, + 15, 59, 57, -18, 0, -72, 73, -36, + -6, -24, 6, -1, -54, -26, 5, 35, + 72, -52, -31, -31, 22, -11, 30, 58, + -62, -62, 77, -49, 63, 10, 60, -59, + 8, -15, -21, -91, 2, 37, 24, -57, + 53, 36, -40, 2, -43, -127, -17, 23, + -29, 24, -7, -19, 24, 27, 3, -50, + -22, -13, -39, 76, -44, 100, -36, 6, + 1, 2, 30, -38, -73, 60, -76, -7, + -18, 32, -107, 87, -94, -45, -18, -2, + -8, -21, -41, 45, 104, -4, 52, -8, + 10, -18, -7, 9, -61, 10, -22, -102, + 25, -34, -7, 46, -40, -14, -18, 19, + -35, -15, -28, 76, 15, -109, 45, -3, + 35, 27, -27, -67, -40, -88, -123, 69, + -57, 2, -39, 26, -5, -47, 7, -23, + 13, -29, 24, 70, 37, 14, 26, -67, + 14, -6, 38, 39, 62, 13, 37, -27, + -14, 63, 120, -34, 113, -3, -41, -54, + 41, -40, 17, -43, 85, -66, 30, -52, + 55, 12, -15, -10, 12, 73, -32, 64, + 80, -18, 18, 48, 26, 3, 88, 13, + 9, -64, 15, -19, 30, 52, 117, -1, + 50, 12, -7, 40, 23, 31, 110, -5, + -46, 51, 46, -116, 25, 88, 14, -24, + 21, 79, 18, -58, -78, -29, 11, -37, + -86, -31, -40, -28, -17, 40, 23, 64, + 17, 5, 15, 50, 26, -28, 13, 66, + 57, 11, 51, -81, -26, -72, 39, -16, + -120, -51, -100, -85, 24, -76, -14, 72, + -42, -30, -103, 1, -75, 49, -38, -82, + 32, 53, 113, -33, -40, -6, 24, 47, + 0, -51, 118, -4, 16, 77, -35, -36, + -6, 4, -16, 124, -91, -65, -31, 35, + -12, 0, -36, -96, 0, 17, -108, -9, + -47, 10, 8, 58, -2, 11, 7, -1, + -17, -8, 8, -77, -10, 16, 111, 13, + 10, -32, 47, -8, 62, 69, -35, -4, + 39, 34, -92, -8, 45, 37, -7, -75, + -1, 45, -38, 71, -90, 37, 31, -43, + 17, -33, 1, -17, -71, 35, 16, 86, + -113, -23, 0, -80, 5, -28, 14, 10, + 33, 47, -6, -36, 19, -16, -52, 33, + -35, -12, 25, -21, -7, 63, 2, 32, + -17, 13, -6, 12, 14, -49, -43, 66, + 5, 19, -103, 34, 27, -8, -20, 1, + 10, -47, 35, -26, -11, -18, -61, 21, + -25, 9, 11, -16, -20, 8, 13, -19, + -14, 27, 5, 15, -16, 9, 59, 0, + 52, 28, -66, -115, 68, 17, -17, 21, + -21, 11, -28, -20, 4, 26, 15, -9, + -25, -6, 17, 5, -19, -13, -11, -3, + 34, -1, 29, -74, 115, 33, 20, -12, + 49, 54, 127, -55, 9, -9, -87, -113, + 120, 8, -25, 19, 63, 21, -44, -31, + 17, 111, 93, 16, 10, -99, -125, 98, + -12, -58, -40, 14, -47, 92, -40, -6, + -38, -24, 30, 78, -22, 109, 76, -10, + 66, 29, -15, -33, -18, 3, 63, 11, + 47, 96, 49, 0, 74, 72, -43, 6, + 24, -26, 80, -56, 66, -23, -109, 55, + -27, 52, 27, -59, -99, 58, -17, -34, + -7, -35, -89, 66, 15, 18, 36, 32, + 0, -101, -40, 35, 121, 14, 37, 106, + -90, -62, -37, -73, -45, -18, -12, -48, + -21, -81, 113, -49, 33, -75, 74, -55, + -73, -85, -51, -116, 53, 16, -67, -5, + -49, 85, 23, 111, 19, 119, -107, -8, + 29, -2, 50, -117, 83, 34, -42, -38, + 7, -3, -46, 94, 95, 127, -45, 71, + 37, 65, -2, 4, -1, -34, 118, 35, + 0, 81, -56, -73, -6, 22, 89, -28, + 7, -94, 40, 73, 37, 60, 33, 125, + 57, 127, -37, 46, 67, 19, 89, 86, + -31, -11, -70, 126, 0, 2, 4, -45, + 35, 39, 46, 78, 46, 93, 85, 89, + 74, -72, -21, -11, -31, 93, 16, -38, + -2, -55, 49, -25, -21, -11, 21, 16, + -63, 19, 27, 25, 3, 70, -23, 52, + 18, 15, -101, -47, -66, 99, -126, 101, + -37, 0, 46, 74, -10, 33, 29, 6, + 60, 16, -41, 115, 17, -11, 14, 43, + -22, -11, 78, -69, 14, 69, 4, -98, + -73, 22, 5, 52, 17, -64, 65, 49, + -1, 21, -121, -39, 61, 20, 23, 36, + 108, 57, 19, -36, 62, 38, -29, -8, + 28, 87, -38, -86, -46, -86, -86, 9, + 4, 39, 13, -20, 40, -15, -43, 4, + 9, 24, -16, 4, 68, 41, -6, 74, + -78, -43, 27, -68, -7, -9, -54, -2, + -7, -49, -94, 100, 2, 0, 1, 15, + -53, 25, 117, 1, 125, -22, -2, -21, + -19, 73, 19, 75, -112, 30, 68, -13, + -55, -26, -12, 54, 26, 50, 67, 12, + 44, 123, 8, 42, -14, -22, -70, 2, + 58, -68, -24, -9, -20, 2, 30, -13, + -21, -21, 11, 98, 0, -63, 37, -22, + 29, -1, 23, 50, 28, 74, -15, 23, + -48, 18, -5, -28, 54, -14, -15, -3, + -23, 13, 10, 63, -3, 11, 32, -21, + 39, 80, -106, 41, 1, -55, -25, 53, + -17, -57, 54, -39, 21, -40, 8, 44, + 112, 9, 88, 52, -24, -2, 7, 3, + -11, -13, -32, -6, 75, -14, 75, 21, + -66, 16, 14, 18, 12, 34, 94, -45, + 30, 6, -36, 94, -125, -26, -23, -117, + -93, -3, 0, -1, -33, -33, 34, -91, + -76, 31, -16, 62, 47, -9, -27, 19, + 23, 41, 9, 66, -3, -118, 83, 20, + 118, 5, -22, 37, -91, 31, 3, 23, + 5, 15, 11, 120, 89, 84, 7, 75, + -123, 15, 7, 28, 17, -101, -23, -9, + -5, 15, -2, 72, 21, 10, 19, -29, + 60, 13, 20, 46, -27, 54, 7, 24, + -61, -25, 25, -17, -19, -14, -30, 21, + -37, 61, -63, 41, -48, -15, -20, 15, + 44, 28, -13, -60, -55, 15, 18, -2, + 57, 16, 102, -17, 1, 3, 23, 28, + -20, -104, 66, 99, -31, 44, -25, -59, + 54, 15, 4, 66, 44, 13, -88, -3, + -4, -18, 21, -43, 12, -123, -14, 37, + -28, -11, -9, 73, -5, -62, -12, 25, + 17, -25, 65, 22, -4, 2, -8, 9, + 9, -7, 41, -62, 10, 34, -4, -6, + 6, -6, 0, -10, 4, -9, -5, 9, + 8, 2, 16, 4, -9, -9, -2, -28, + -6, 2, 32, 1, 2, 1, 10, -3, + -25, -4, 12, -18, 4, 5, -6, -4, + -10, 0, 20, -2, 18, -3, -16, 9, + 1, -4, 15, -7, 13, 5, 1, 6, + -10, -1, -8, -5, -10, -3, -12, 18, + -3, 2, 3, 12, 7, 16, -5, 11, + 7, 25, -7, 25, 14, 7, -14, 1, + -20, 11, -30, -18, 6, 4, 1, 2, + 0, 8, 0, 4, -16, 9, -2, 20, + 14, -7, -2, -8, 4, -3, -7, 5, + -12, -24, -41, -4, -4, 0, 22, -4, + 6, 34, 7, 44, -6, -23, -14, -9, + 8, -32, -47, 19, -26, 3, 20, -37, + -14, -16, -9, -46, -9, -6, 7, -27, + 13, -28, -10, -10, -71, 21, -41, -6, + 49, 41, -9, -16, 6, 12, 32, -1, + 36, -9, -14, -22, 4, -19, -4, -13, + 24, 24, 41, -8, -8, -19, -2, -38, + -6, -20, -12, 21, -21, -26, -90, -22, + 8, 18, 67, 45, -12, -47, 33, 14, + -31, 7, 14, -4, 11, -5, -25, 8, + 19, -49, -12, -19, -20, -1, 21, -26, + 19, 3, -13, -3, -12, -6, -7, 22, + 8, 17, -1, 0, 6, -3, -7, 1, + 10, 5, 48, -13, 17, 2, -5, -26, + 1, 11, 6, 11, 15, 11, -18, 6, + -21, 9, -13, -9, -2, -9, 9, 0, + -5, 23, 33, -14, -2, 1, 26, 7, + -17, -4, -8, -3, -2, 20, -5, -36, + 4, 7, -9, 29, 2, -1, -6, 6, + 13, 27, 6, -7, 1, -11, 20, 5, + 10, 2, 5, 10, 17, 10, -28, 3, + -14, 27, -7, -12, 12, -6, -22, 3, + -5, 11, -21, 1, -17, 1, -14, -9, + 3, -19, 18, -39, -16, -12, 29, 14, + 12, 18, 8, 40, 34, -15, -33, 19, + 3, -5, 9, -7, -26, -2, -38, 27, + -21, 28, -39, -40, -13, -53, -2, 5, + -42, 27, 10, -13, 11, 3, -2, -17, + -36, -5, -21, -13, -3, -6, -46, 25, + 13, 2, 7, -17, 66, -16, 18, 15, + 7, -15, 22, 16, -2, 45, 3, 0, + 32, -5, 22, 37, 8, -1, -39, 14, + -21, 25, -67, 16, 0, 9, 7, -3, + 4, -1, 9, 6, -29, -2, 14, -2, + 1, -9, -9, -28, 3, 7, 14, 16, + -25, -18, 32, 42, -39, -32, -42, -60, + -9, 20, 4, -20, -61, 4, -37, -5, + 17, -16, -25, 67, -35, 17, 33, 27, + 90, 9, 17, -55, 112, -7, -33, -4, + 17, 50, 13, -7, -81, -12, 9, 85, + 22, -20, -2, 32, 10, -9, -4, -6, + 3, -3, 10, -12, -24, -62, -10, 40, + 6, 1, 13, -7, 2, -110, 24, 31, + -15, 42, -54, 69, 45, 88, 13, -5, + 7, -25, 28, -49, -3, -59, 53, 34, + 59, 80, -23, 58, 75, -4, 6, -14, + -27, 91, -16, -22, -5, -29, 12, -50, + 26, 27, 5, -8, -1, -4, -22, -30, + -39, 18, 32, -45, -18, -12, 45, -33, + -14, -38, 36, -14, -21, 18, -20, -11, + -44, 6, -29, -32, -2, -5, 0, 47, + -40, -18, -49, 5, -2, -19, 0, -2, + -17, 37, -16, -10, 39, 17, -34, 14, + -18, -16, 12, 12, 20, -1, -27, -27, + 36, 15, -6, 22, -28, 10, -17, 13, + 46, 13, 28, 6, 32, -62, 33, 23, + 16, 3, 16, -35, -5, -22, -13, 22, + 10, 43, -18, 2, 46, -14, -35, -39, + 31, -31, -19, 25, 15, 45, -23, 11, + 24, -8, 32, -6, 6, 4, -4, -5, + -27, 40, -29, 19, 36, 4, 91, -20, + 4, -19, 16, 47, -45, 20, -21, 2, + 46, 26, 8, -56, -28, -74, -10, 9, + -52, -6, -42, -17, 0, 21, 11, -26, + -25, -36, -3, -5, -25, -1, -52, 23, + 22, -29, -7, -3, -55, -31, 43, -13, + -13, -53, -23, 3, 28, 34, 14, 2, + 18, -31, -5, 41, -15, 34, -14, 30, + -62, 15, -45, -81, -22, 5, 10, 9, + -1, 13, 30, -12, -45, -1, 1, -39, + -1, 4, -9, -39, 2, 17, 28, -15, + -11, -54, 83, 4, -5, -23, -45, -54, + -1, -48, 30, -29, -2, 45, 48, 60, + 13, 10, -26, 5, -13, -8, 41, 23, + 66, 15, -43, -8, -103, -5, 61, 16, + 5, -31, 58, -38, -51, -10, 35, -112, + -25, 56, 28, 9, 8, 12, 1, -43, + 26, 1, 36, 12, 84, -88, -63, 100, + -42, -4, -109, 10, -49, -88, -11, 27, + -28, -77, -53, 51, 45, -106, 13, 23, + 59, -31, 79, 44, 12, -80, 6, 66, + -10, -35, 37, -8, 60, 39, -6, -45, + -12, -53, 59, -15, 27, -28, -1, -69, + 23, 29, 19, -53, 36, -37, 0, -28, + -32, -31, 43, -31, -33, -20, 0, 44, + -61, -77, 19, -37, -32, 10, 3, -14, + 49, -16, 10, -24, 11, -53, 2, 55, + -7, 26, -38, 18, -3, 2, 36, 61, + -18, 43, -28, -23, 66, 3, 11, 25, + 4, 6, -16, 11, 33, -44, -24, 4, + -17, 11, 1, -5, -18, 33, -17, 8, + 52, 5, 23, 19, 18, -19, -6, -2, + -21, 18, -5, 25, -3, -33, 45, 30, + -4, -17, 10, -1, 69, -77, -21, -60, + 64, -2, -44, 9, 17, -29, -14, -18, + 35, 7, -8, 42, -5, 23, -20, -12, + -19, -19, -20, 10, -22, 45, 33, -40, + -17, 25, -16, 44, 12, -4, 9, -75, + 8, -35, -66, -2, -36, -22, -49, -9, + 13, 16, -30, -19, -10, -8, 21, -12, + -24, -33, 47, 5, -14, 17, 25, 18, + -42, 6, -15, -3, 21, -21, 25, -25, + -22, 14, -5, 12, 46, 47, 13, 25, + 18, 7, -8, 12, -30, -24, 0, 60, + -81, -4, 30, 103, -63, -18, -11, 5, + 11, 7, 66, -13, -39, 8, -2, 12, + -2, -11, 1, 36, -37, -9, 18, -46, + 12, -22, 3, 14, -43, -74, 15, 51, + 52, -7, -46, -9, 10, -76, -40, 12, + -111, -20, 11, -85, -5, -69, 6, -102, + -15, -34, 50, 25, 121, -17, 118, -16, + -72, -33, -3, -10, 0, 14, -52, 35, + 10, 62, -60, -49, -29, 4, 1, -17, + 29, 3, 12, -48, -99, 7, 38, -36, + 0, -13, -51, -43, -77, 10, -7, -4, + -6, 103, -58, 72, -19, 58, -43, -30, + 44, 54, 42, 68, 3, -65, -64, 74, + -21, -72, -57, 67, 70, 63, -97, -24, + 64, -11, 32, -3, -14, 5, 2, -74, + 31, 3, 2, -36, -50, -19, 14, 94, + 28, -11, 5, 32, 12, 15, -17, 18, + -23, 62, -3, -41, 34, -29, -7, -54, + 26, -25, 33, 23, -9, 32, -8, 18, + 3, -12, 40, -31, 10, 17, -22, -86, + -18, 34, 65, 8, -11, 13, 0, 11, + 14, -22, 6, 17, -22, -14, 35, 3, + 29, 15, 16, -24, -22, 51, 7, -42, + 63, 11, 18, -7, 7, -38, -20, -30, + 13, 81, 64, -94, 30, 45, 95, 9, + -5, -35, 6, 56, 25, -93, 22, -28, + 8, 17, -48, -90, 12, -41, 5, -38, + -4, 13, -48, 11, -6, 9, -52, -10, + 59, -33, -2, -60, -8, -24, 33, 45, + -31, 4, 34, -9, -8, -36, -22, -37, + -62, -31, -13, -108, -16, 76, 17, -23, + 25, 25, -28, -3, -23, -15, 14, 20, + 16, -3, -35, 9, 29, 2, 7, 17, + -52, 12, -32, 8, 17, -10, -41, -28, + -28, 56, 18, -39, 12, 64, 11, 2, + 12, -22, 37, 5, 6, -10, 6, 42, + 6, 3, -40, -122, -19, -5, -12, -14, + 37, -9, 13, -16, -54, -12, -26, 5, + 31, 0, -2, 100, -15, -12, 21, -22, + -13, -3, -45, -118, 39, 14, -90, 73, + -52, 10, 6, -116, -6, -60, 32, -39, + -35, 42, -12, -76, -16, -105, -9, -54, + 71, 92, -121, -24, -123, 16, -26, -62, + -65, 0, 32, -24, 12, 24, -5, 59, + 15, 21, 4, 5, 3, -4, -5, 36, + 0, -20, 64, 3, 27, 16, 8, 14, + -2, 47, 33, -73, -24, 65, 46, 63, + 5, -46, 49, 14, 3, -45, 44, 39, + -50, -45, 11, 0, 31, -87, -52, 12, + 61, 92, -78, -41, -44, 42, -67, -9, + 36, 94, -5, 71, 23, -7, -11, -52, + -5, -45, -11, 28, 57, 17, -7, -36, + -52, 3, -27, -12, -6, -62, 29, -71, + 8, 27, -31, 21, -38, -58, 3, -12, + -69, 8, -54, 91, 22, 90, -11, -126, + 23, -9, 67, -44, -3, -30, 22, 47, + 21, 1, 38, 21, -85, 1, 26, 2, + 46, 8, 3, 22, -1, -39, -28, 26, + -3, 16, 26, -35, -22, 34, -17, -26, + 88, 30, 3, -19, -8, 9, -48, 0, + 16, 4, -72, 67, -24, 57, 56, 23, + 31, -7, 28, -23, -40, -14, 32, 7, + -1, -40, -9, -60, -2, 62, -16, -14, + -36, 28, -52, 0, 10, -1, 11, -2, + -31, -89, -3, -55, 1, -29, -83, -16, + -5, -39, -15, -41, -48, -20, -35, 49, + -31, -10, -2, 9, 15, 21, 62, -19, + -1, -42, -21, 34, -31, -1, -3, -18, + 45, 17, -55, -10, -20, 6, -31, 21, + 6, -14, -20, 7, -101, -5, -45, -1, + 16, -21, -6, -12, 21, 73, -9, -17, + 4, -2, 15, 9, 23, 7, 10, 20, + 6, -11, 36, 30, 14, 22, 6, -5, + 15, -31, -55, 13, -15, 7, -14, 38, + 15, 22, -8, 36, 12, 6, -5, 56, + 13, -31, -24, -16, 46, 25, 30, 6, + 40, 0, -8, 100, 9, 43, 26, -86, + -47, 54, -119, -18, 36, 49, -38, 53, + 27, -6, 122, -25, 125, -18, -108, -111, + 91, -10, 87, 10, -120, -28, 21, -126, + -12, 42, -3, -76, -2, 58, -32, 75, + -13, -59, 22, -50, 89, -16, 23, 16, + 1, 7, 89, -10, -19, 12, 54, -25, + -6, 124, 71, -13, 10, -7, -49, 9, + -10, -69, 76, -118, 33, -44, 41, -3, + -6, -46, -59, -126, 8, -76, -79, -34, + 7, -93, -2, -25, -25, -50, 16, -66, + -41, -77, -14, 16, -83, 22, -12, -68, + 7, 44, -2, -45, 22, 20, 45, 15, + 21, -47, -71, 69, 25, -9, -5, 45, + 127, 14, 21, -32, -18, -30, -3, -75, + 22, -22, 19, -49, -3, -43, 4, 6, + 2, 17, 36, 23, -77, 14, -36, -25, + 34, -12, 37, 7, 83, -56, -14, 6, + -3, 2, 12, -40, -61, 48, 10, 96, + 95, -19, 0, -15, 17, -66, -30, -28, + 63, -81, 15, -126, -18, 36, -47, 7, + 45, 17, -43, 13, -36, 35, 22, -9, + -44, 77, 0, 22, 4, -2, -17, 25, + -45, 82, -101, -5, -40, -5, 15, 32, + -23, -45, -51, 1, -57, -11, 120, -7, + 91, 9, -24, -42, 37, 100, 80, 20, + -5, -125, 88, 11, -124, 42, -35, -127, + 120, -123, 41, 125, -124, 21, -76, 81, + 29, -11, 61, 15, -120, -115, -17, 79, + 111, 74, 20, 63, 127, -42, 28, -26, + -30, -116, -71, -73, 98, 110, -114, 3, + -48, 2, 4, -64, -40, 36, -14, 18, + 61, -16, -36, 12, 27, -11, -32, 124, + -34, 65, -51, -43, 53, 45, -44, -6, + 13, 33, -25, -15, -119, -51, -26, 124, + 118, 16, 18, -121, -33, -58, -79, 26, + 18, -77, -12, -95, -79, -38, 23, 32, + 83, 125, 17, 77, -32, 127, 104, -56, + -18, -9, -80, 17, -125, -127, 123, -61, + 38, -1, -78, 22, 10, -21, 3, 126, + -125, 123, 49, -47, -126, 89, -113, 127, + 89, -29, 15, -62, -128, 44, -5, 120, + 123, 120, -15, 105, -126, 111, 25, -127, + -122, -127, 115, -65, 122, 24, -2, 38, + -88, 20, 20, 70, -7, -77, 50, 25, + 73, 117, 20, 114, 93, -52, -125, 118, + -43, 127, 5, -8, 36, 22, 122, 42, + -128, -120, -123, -52, 127, -124, -125, 96, + 1, -35, -76, 62, 60, 46, -102, -5, + 70, 66, 13, 127, 2, 126, 77, 88, + -63, -91, 103, -7, -110, -128, -45, -127, + 100, 109, 18, -51, 61, -18, 47, -127, + 57, 13, 46, 23, 127, 127, 127, -52, + -66, -24, -68, -1, -126, -15, 10, -2, + 33, -61, -127, 119, -75, 127, -34, 126, + 127, -61, -1, 60, -9, -42, 22, -123, + 19, -41, 50, 27, -46, -3, -75, -12, + -75, -55, -90, -99, 14, -2, 90, 47, + -35, -108, -78, 34, -27, -77, -21, 38, + 32, 34, -115, 84, -62, -14, 33, 18, + 30, -122, -3, -3, 10, 99, -7, 4, + 51, 92, 9, 69, 23, 125, -60, -19, + 25, -26, 3, -62, 92, 116, 20, -11, + 86, -34, 105, 25, -98, 17, 45, -54, + -70, 58, -114, 38, 5, -51, -36, 56, + 121, -55, 21, 68, -127, 76, -35, 28, + 28, 49, -113, -30, 112, 125, 82, -12, + -68, 55, 17, 64, 14, 53, -25, 124, + -44, 84, -70, -60, 80, 35, -66, 88, + -25, 77, 107, -63, 125, 48, -101, 127, + -7, -50, 13, -12, -124, -109, -53, -87, + 66, 81, -51, -79, 76, 8, -8, 106, + -26, -85, -113, 88, 99, -40, 84, 62, + 106, -125, -124, 3, 75, 127, -22, 2, + 76, -47, -128, 46, 127, -71, -40, -127, + -117, 9, -22, -92, -60, 72, 35, -128, + -74, 77, -117, -95, 123, 49, 127, -9, + 8, -125, -58, -67, 127, 126, -128, -20, + 126, 115, 122, 110, -127, 116, 0, -118, + -102, -126, -34, 56, -125, -87, 81, 19, + -95, 42, -4, -101, 80, -10, -62, -21, + -55, 127, 77, -128, -126, 127, -127, 109, + -95, -128, -80, -126, 81, 70, 122, 69, + -70, 101, -24, -22, -4, -43, -126, 77, + -51, -54, -122, 123, 41, -5, -50, -63, + 4, -12, -103, -126, 115, 127, -88, -32, + -79, -125, -83, 95, 61, 56, 48, 127, + 121, 60, 75, -3, 35, 22, 64, 126, + 127, 83, 121, 46, -72, 117, -127, -83, + -128, 34, 31, 49, 121, 46, -53, -51, + 6, 27, -124, -4, -119, 72, -119, -126, + 57, -83, -61, 31, 83, -76, -119, -96, + -55, 101, -62, -128, 3, -36, -42, -61, + 57, -124, 59, 25, 2, -116, 127, 95, + 36, 127, -84, 80, -34, 35, 21, 49, + 9, -12, -17, -39, -91, -24, 30, 40, + -26, 79, 30, -57, 23, 78, 114, 55, + -66, -34, 42, 64, -40, 105, 36, 127, + -43, -122, 35, -60, -73, 83, -44, -15, + 102, -126, 35, 37, -122, -50, -44, -5, + -76, 10, 6, -89, 16, -70, 24, 79, + -41, -98, 100, 72, 108, 7, -71, 27, + 25, -12, -128, -9, 127, 103, 50, -60, + -82, -39, 77, -46, 32, -1, 85, 124, + -9, 2, 43, 28, 122, 0, 1, 59, + 13, 38, 120, 7, 113, 15, -10, 101, + 79, 88, 121, -126, -126, -76, -41, -122, + -13, 79, -126, 102, 73, 126, -56, 99, + -58, 32, 127, 17, 12, -99, -120, -127, + 101, 96, 79, -104, 126, -50, 115, 43, + -105, -50, 127, 71, -126, 73, 16, -29, + -94, 103, -126, -96, -113, -63, 87, 127, + -124, 94, -67, -123, 124, 19, 22, 76, + 19, -111, 15, 87, -128, 123, -19, 88, + 53, 120, 127, -116, -117, 108, 100, -99, + -127, 127, 119, 12, 103, 51, -121, 21, + -90, -126, 85, 98, 13, 27, -42, -102, + 123, 26, 127, -91, 118, 121, 126, 123, + -117, 121, -41, -124, 82, 127, 118, 126, + -124, 100, 64, -62, 127, 88, -22, -89, + 8, -63, -114, -125, 35, -42, 55, 68, + 19, 37, 23, -127, -123, -126, -6, 121, + 56, 73, -64, 121, 9, 88, -25, -115, + 125, -42, 126, -91, 84, 18, 115, -79, + 126, -17, -24, -83, -128, 42, 54, -75, + -104, 123, -108, 60, 50, 18, -121, 34, + -84, 97, -128, 27, -125, 70, -43, 20, + 73, 19, -80, 115, 79, 16, -87, -107, + -17, 2, 103, 71, -17, 126, -55, -108, + -33, -115, 121, -5, -33, -4, 30, 125, + 72, -64, -12, 124, -8, -38, 127, -63, + -48, -40, 101, -67, 123, -19, -55, 6, + -23, -29, 79, 62, -46, 69, -119, -44, + 13, -33, 114, -126, 3, -106, 103, 126, + -17, -6, 27, -46, 125, -29, -76, -6, + 54, -89, 10, 119, -125, 52, 123, -19, + -44, -9, 47, -35, -101, -59, 59, 81, + -64, -43, 22, 13, 38, 22, -42, 24, + 45, 52, -89, -117, -17, 87, -94, -37, + -119, 13, -30, -59, -127, 68, -2, 48, + -125, -27, 97, 17, 67, 63, -19, 32, + 10, -54, -39, -1, 32, -26, 27, 16, + 25, 84, -34, -37, -117, -64, -103, -77, + -126, 90, -102, 55, 84, 20, 91, -32, + 15, -7, 126, 41, 82, 40, 100, 30, + 125, -39, 13, -67, 57, -126, 121, 105, + 113, 45, -128, 31, 127, 124, -79, -49, + 39, 21, -123, 65, -29, 46, 53, -105, + -36, -16, -61, -124, -92, -114, 123, 7, + -32, -126, -28, -53, -102, 18, -124, -62, + -102, 13, 127, -128, 127, 80, 49, -16, + -105, -128, 49, -34, 74, -127, -53, -61, + -111, -96, 67, 25, 16, -22, -23, -123, + -125, -59, 53, 111, 104, -127, 123, 38, + 15, 68, -3, -125, -74, -119, 43, 124, + -128, 127, 89, 27, -20, 30, 127, -63, + 3, 17, -13, 125, -39, 106, 32, -32, + 75, -42, 17, 127, 102, -128, -70, 126, + 46, -10, 82, 99, -86, -44, 96, 51, + 81, 34, 88, -105, 58, -40, 46, 126, + -20, 78, 80, 16, -40, 116, -70, -69, + -8, -25, 45, 87, 3, 30, -40, -8, + 11, 127, -127, -13, 75, 125, 38, -110, + 63, 21, 15, -42, 55, -115, -72, -117, + 121, -120, 0, -119, -17, -125, 34, -128, + 35, -1, 31, -92, 69, 62, 16, 17, + -120, -37, -50, -17, -2, -1, 84, -33, + -58, -67, 125, 38, 127, 11, -128, -99, + 86, 40, -104, 34, -14, -39, -29, 39, + 126, 21, -85, -105, 122, -127, -72, 114, + -62, -25, -91, 122, 16, -80, -14, 20, + 127, -108, -22, -13, -125, -1, -80, -48, + 37, -16, 11, 114, 30, -9, -3, 12, + -70, 31, 31, 22, -94, 120, -57, 3, + -75, 120, -115, 33, -101, 67, -81, 67, + -114, -11, -116, -57, -81, -109, -109, -123, + -125, -27, 36, 1, -2, -127, -9, -14, + -37, -125, -104, 14, -49, 14, 49, 8, + 94, -43, -17, 95, -126, 2, -110, 113, + -127, 118, 30, 3, -13, 15, 13, 30, + -52, 105, 75, 31, 123, -91, 122, -7, + 116, -52, 70, 126, -127, -128, 78, 116, + -64, 108, 82, -51, -127, 64, -120, 127, + -111, -64, 90, 112, -29, -76, 5, 126, + 3, -11, 64, 94, -117, -32, -4, 54, + -20, -121, -85, 115, 115, 126, 15, 80, + 44, -27, 8, 126, 125, 119, -92, 39, + -122, 111, -52, -20, -111, -64, 1, 90, + -63, 85, -71, 1, 123, 85, 57, -124, + -14, 112, -73, -104, -90, 127, 127, -76, + -55, 21, 85, 125, 103, 55, 120, 126, + -127, 109, 116, -125, 25, 53, 127, 96, + -47, 50, -102, 43, -3, 127, 126, -4, + 117, -4, 7, 124, -121, -48, 59, -6, + 125, -98, -83, -123, -113, 17, -123, 82, + 113, -122, 125, 60, 92, 14, 22, -127, + 33, 18, 1, -21, -82, 105, 38, -75, + 98, 107, -89, 2, 123, -83, -55, 91, + -64, 127, -69, 91, 126, 74, 33, -85, + 112, 5, -35, 29, -69, -36, 35, 126, + 34, 126, 121, 124, -61, 103, -123, -128, + 15, 78, -124, -56, 109, 52, 52, -56, + -83, -28, 107, -128, 23, -79, 126, 126, + -4, -46, 38, -115, -109, 119, 16, 87, + 11, -76, 54, 118, 13, 45, -55, 18, + 94, -69, 14, 127, -120, -126, -98, 87, + 100, -42, -44, 42, 18, 121, -128, -47, + -51, 3, -87, -28, -115, -46, 104, 89, + -88, 83, -112, 49, -17, -13, 43, 30, + 9, -76, -111, 55, -41, 21, -4, -53, + -119, -16, -128, -9, -10, 46, -7, -50, + -124, -100, -27, -44, -26, -56, 23, 7, + -128, -2, -65, 45, 80, -107, -102, -14, + -8, -57, -120, -19, -58, -15, -43, 50, + -32, -13, 8, 108, -127, -66, -54, 123, + -127, 127, -85, 110, -108, 118, 6, -42, + -116, 108, 95, -23, 94, -2, -45, -126, + -34, -124, 43, -124, -73, -122, 127, 90, + 64, 49, 126, 43, 127, -126, 122, 127, + 33, -67, -128, 100, -125, -9, 92, -128, + -123, -21, -65, 125, -93, -125, 124, -119, + 64, 6, -116, 4, -127, -86, 125, -128, + 80, -89, -127, 127, -29, 125, -13, -71, + -86, -128, -124, 126, -53, -37, -117, -4, + -47, 126, 21, -98, 125, -45, 52, -124, + 42, -22, -128, 120, 124, 126, 11, 127, + 56, 99, 31, 121, 127, -122, 125, 116, + -117, 43, -65, -27, 124, 57, 126, -96, + 3, -42, -67, 28, 30, -102, -45, -56, + 4, -42, 69, -127, 43, 61, -79, -127, + -6, -13, -96, -5, 49, 37, -57, -100, + 126, 36, 61, 22, 55, -114, -58, 116, + 91, 30, 78, -48, 103, 16, -42, -58, + 108, -17, -21, 126, -120, -127, 77, -19, + 71, 126, -10, 49, 36, -44, -23, -110, + -34, -112, -26, -118, 48, -65, 23, 125, + 24, 124, 84, -127, -69, 54, -124, -126, + 82, -2, -118, 14, -2, 31, 122, 33, + 124, -57, 44, -128, 22, -51, 111, 126, + 20, -121, -30, 108, 115, -58, -62, -106, + -116, 61, -25, 127, -120, -82, 29, -117, + -103, 90, -62, 125, 63, 52, -41, 12, + 75, 12, -100, 73, 41, 69, 4, 95, + -87, 74, -72, -65, -85, -36, -29, -60, + 24, 65, 123, -34, 31, 88, -53, 15, + 11, 34, 46, 4, -101, -97, -37, -85, + -14, 19, -16, -101, 76, 38, 8, -11, + -127, -103, -103, 118, 48, -27, -69, -78, + -77, -125, 29, 24, 10, 30, 32, -111, + -35, 23, -41, -96, 41, -22, 14, -44, + 123, 12, -116, 100, -121, -22, -116, -36, + -123, 118, 73, -45, -81, 37, 113, -28, + 77, 30, -21, 108, 124, 41, 57, 108, + 67, 31, 13, -125, -51, 75, 2, 97, + -9, 59, 58, 67, -128, 100, -124, 126, + 126, 56, -22, 53, -18, -73, -125, 123, + -124, -23, -1, 6, 68, -29, 117, -125, + 45, -120, -37, 117, 126, -6, 121, 121, + -24, -124, 6, 52, -121, 108, -76, 0, + -89, 124, -125, 107, -128, 92, -42, 127, + -127, -81, 64, -33, 127, 82, 91, -127, + -121, -43, -35, -127, -124, 127, 90, -49, + -122, -100, -18, 125, 41, 60, 118, 127, + 59, -77, -105, -119, 18, 102, -70, 71, + -65, -37, 54, -127, 39, 107, 124, 55, + 79, -30, 72, -128, -104, 110, -52, -127, + 64, 26, 64, 90, -124, -42, -123, -66, + -34, 19, -13, 126, 34, 55, 39, -100, + 62, -24, -69, 95, 42, -32, 37, -114, + 120, 117, 6, 126, 124, -16, 120, -34, + -122, 126, 49, 125, -14, -72, 116, -125, + 33, -128, -111, 122, -102, 121, 18, 123, + -83, 124, -78, 31, 95, 55, -96, -127, + -123, -3, -125, -1, -47, 121, 8, -47, + -127, -23, 124, -124, 39, -128, 127, 127, + 2, -69, 1, 24, -40, -45, 30, -51, + -66, -13, 8, 43, -2, -35, -85, 127, + -1, -41, 117, 125, 79, 91, -56, -106, + -66, -89, -49, -22, -28, 95, -128, 9, + -128, -106, 40, -43, -128, 77, 20, 85, + -38, 78, 31, 18, -94, 57, 8, 127, + 119, -33, -38, 5, -18, -77, 115, -60, + -43, 46, 122, -43, -22, 19, 11, -34, + -127, -56, 27, 88, 110, 49, 71, 127, + 76, 57, 127, 40, -62, 22, 43, -123, + -109, 46, -105, -97, 124, -20, -8, -88, + -109, -47, 30, 55, -127, 47, -42, 11, + -43, 121, 21, 48, -126, -78, -29, 53, + 21, -18, -128, 39, 51, -76, 113, 20, + 127, -23, 40, 115, -27, 123, 94, -71, + -69, 39, -26, 8, 127, -42, 107, 126, + 11, 42, -87, -39, -77, 73, -35, -121, + -77, -64, -72, 121, -109, -47, -64, 58, + -87, 70, -36, 122, -51, -42, 48, -53, + -36, -121, -22, 124, 115, 47, -78, 74, + -108, -128, -90, 52, -70, -120, 29, -61, + 49, 112, -20, -51, 69, 27, 34, -127, + 125, -124, -81, 53, -127, 114, 127, 6, + -89, 124, -16, 34, 116, -58, 17, 124, + 80, -77, -128, 122, 76, 19, -127, -55, + 59, -2, 7, 109, 22, -8, 76, 26, + -51, -97, -77, -30, 36, 127, -123, -125, + 114, 39, -15, 127, -40, 127, 91, -88, + 3, 13, 109, -29, 123, 32, -123, 51, + 127, -120, 1, 57, 96, -128, 25, -107, + 76, 127, 114, 122, 3, -17, -2, 13, + -45, 126, 52, -37, 127, -37, -17, 7, + 33, -121, -120, -51, -51, -55, -122, 43, + -127, 87, -124, -124, 79, -121, 16, -125, + -75, 9, -106, -47, -127, 127, -107, -8, + 127, -66, 123, -122, -13, 76, 127, -94, + 116, -126, -23, 117, 49, -93, 14, -71, + 43, -121, 47, 94, -123, 127, 18, 63, + -94, 112, 122, 38, 34, 4, 119, -121, + 9, 4, -125, 92, -122, -100, 123, -6, + -65, 80, 77, -116, -124, 41, -119, 37, + 61, -15, 119, 116, -15, 46, -40, 103, + 47, 62, 122, -37, -17, -17, 43, -4, + -29, -30, -87, -9, 116, -60, 10, 25, + -125, 74, -124, 55, -8, -105, 37, -122, + 68, -128, -126, -21, -104, -117, 36, -120, + -21, -126, 9, -31, 9, -20, 52, -18, + 67, -69, -124, -31, -126, 121, -46, 18, + -86, 89, 87, -75, -72, -68, 127, 23, + -50, 70, 89, 76, 104, 46, 111, 19, + 47, -26, 28, 125, -66, -78, 48, 96, + -7, 64, 108, 65, -125, -9, -122, 126, + 118, -78, 69, 127, 22, -36, 93, 127, + 87, -26, -70, 48, 31, -42, 3, 115, + 87, -120, -43, 121, 53, 8, -127, 124, + -117, -125, 20, 78, 48, 127, -126, -73, + -80, 124, 97, 127, -126, -29, 123, 123, + -61, -20, 86, 18, 120, 112, -3, -128, + 17, -125, 57, -128, -122, 79, -10, -125, + 121, -16, 50, -39, 125, 33, 17, 124, + 108, -121, -81, -49, -122, 105, -123, 71, + -72, 97, 87, -125, 127, -121, -15, -31, + 72, 75, 82, 126, -122, 108, -71, -60, + 87, 2, 127, 12, -92, 53, -127, 119, + -76, -12, -128, 0, 118, 3, 127, 56, + 93, -120, 14, 24, -1, -122, -5, -80, + 67, 126, -62, 27, 124, 123, 7, -34, + -79, 127, 102, 122, 113, -40, 67, -3, + 126, -109, -38, 87, -68, 1, 22, 124, + -126, 121, 125, -35, 31, 120, -74, -128, + -59, 79, 116, -54, -124, 127, -55, -120, + -123, 35, 116, 98, -35, -127, 105, -127, + 62, 73, -10, 53, -19, 116, -116, 83, + 10, -119, 123, 83, 127, 62, 118, -122, + -45, 40, 11, -31, -57, -52, -14, -119, + -29, 105, 62, -127, 120, -112, -29, -51, + -94, -33, 33, 86, -49, 80, -6, 72, + -110, 93, 2, -34, 107, 113, 127, 121, + 50, -70, -127, -64, 31, 13, 96, -31, + -25, 45, 126, -17, -9, 57, 80, -3, + -104, 35, 106, 33, -46, -124, 83, -89, + -76, -73, 58, -37, -46, 59, 79, -125, + -67, -36, -62, 50, -19, -95, -52, -112, + 56, 26, 110, -89, -92, -75, -100, 46, + 16, 31, 3, -127, -118, -47, -126, -8, + -28, 54, 125, 30, -126, 13, -33, 99, + -38, -119, -107, 9, 117, -127, 93, 61, + 65, -72, -118, 15, 119, 80, 66, 127, + -100, 126, 127, 28, 98, -77, -35, -19, + -54, 6, 35, 124, -127, 127, 56, -80, + -68, -49, -1, 126, 62, -50, 51, 53, + -22, -75, -72, -22, 70, -62, -67, 48, + 86, -125, 14, 126, 14, -76, -11, -76, + -97, 127, 44, -87, -120, -21, -63, -125, + -2, -106, 51, 39, 70, 94, 55, -124, + 27, 15, -123, -47, 88, -128, -113, 81, + 61, -127, -105, -124, 15, -84, -8, -90, + -48, -25, -8, 77, -62, -127, 86, 19, + 26, -118, -46, -26, 109, 10, -19, 127, + -68, 127, -88, 69, -121, 27, -102, -15, + 3, 26, 19, -55, -14, -5, 58, 11, + 15, -114, -121, 114, 126, -117, 56, -37, + 31, 123, 19, 127, 116, -65, -118, -122, + -24, 127, 26, -67, 127, -128, -24, 12, + 127, -33, 13, -51, -13, 127, -114, -34, + -126, 127, -87, -119, -123, -59, 11, -123, + -24, 49, 123, 122, -80, 127, -69, -44, + 127, -70, 51, 124, 117, -124, -48, -128, + 118, -60, -67, 124, 116, -31, 39, -94, + -21, -78, -18, 4, -128, 6, 77, 69, + 108, 126, 82, -127, -23, -118, 56, -125, + -127, 24, -121, 29, 53, -117, 123, 38, + -25, -12, 55, 62, -67, 87, 44, -51, + 70, 17, 109, 44, -69, 125, 104, 125, + 62, 59, 114, 25, -124, -30, 124, 32, + 41, 6, 126, -55, 1, -41, 58, 1, + -108, 26, -31, 32, 58, 85, 123, 41, + 68, -15, 85, -80, -127, -103, 44, -113, + 125, 5, 10, -36, 117, -77, 112, -111, + 78, 51, 74, -112, 47, 95, -113, 121, + 16, -48, 127, 17, 7, -125, 126, 25, + -113, 103, -126, 126, 119, 61, 31, 4, + -43, -119, 27, 125, 88, 48, -125, 84, + -57, 32, 21, 15, -127, -127, -126, 127, + 110, -28, 90, 88, -49, 71, -51, 120, + 82, -43, -75, 96, 125, 19, -4, 125, + -61, -33, -35, -85, 122, -24, -111, 124, + -110, -126, -44, 111, 59, 27, -126, -49, + 52, 102, -95, -120, -127, 121, 50, 50, + 93, -4, -20, -56, 127, -35, 28, -128, + 112, -59, 127, -127, -123, 104, 100, -128, + 111, -71, -6, -114, -71, 72, 120, 127, + 127, -117, -17, -19, -122, 39, -66, 124, + -99, -18, 33, -89, 124, -107, 29, -103, + 126, -7, 53, 124, -117, 122, 15, 124, + 125, 42, 125, -53, 64, 7, -33, 127, + -126, 106, 69, -18, 78, -48, 57, -124, + -86, -127, -116, -85, -74, -66, -119, -24, + 15, 22, 9, -82, -5, 52, -2, 8, + -105, 127, 21, -47, 53, -128, 107, -12, + 97, -30, -37, 121, -68, 127, 74, -2, + -58, 126, -124, 68, 105, -53, -126, -60, + -48, 51, 127, -116, -124, 127, 17, -124, + -123, 41, 127, 127, 3, -127, -101, -121, + 83, -111, 111, 125, -126, 62, -5, 49, + 94, -87, -51, 62, 117, 126, 124, -95, + 5, 93, 122, -124, -117, 74, -53, -118, + -27, -46, 127, 12, 112, -68, -93, -109, + 108, -119, 52, -7, -71, 97, -123, 121, + 38, 68, 85, 2, -87, 123, -124, 126, + 69, -26, -57, 14, 127, -46, 89, 25, + -115, -27, 56, 124, -34, -14, 2, 73, + -128, 4, 118, -94, 117, -77, 124, 76, + -34, 3, 16, -42, -77, 45, 11, -116, + -32, 101, 24, 127, 92, -92, 126, -98, + -124, 111, 36, -113, -127, 98, -10, -10, + 124, -93, -12, -116, 103, -126, -115, -47, + -126, 113, 34, 66, -55, -13, 95, 119, + -49, -43, 74, 127, 121, 124, -117, 25, + 75, 117, -123, -9, 123, -118, 127, 126, + -86, -46, 99, 102, -126, 54, 126, 125, + 91, 21, -86, 103, -127, -121, -35, 127, + -127, 65, -37, 123, -117, -56, 44, 58, + -123, 91, 13, 124, -8, -44, -123, -101, + 127, -125, -85, -1, -63, -9, -111, -100, + -19, 118, -20, -94, -127, 7, -123, -128, + -18, -124, 108, 125, 109, -49, -76, -77, + 11, -28, -126, -85, 126, 108, 116, 118, + 79, -72, -39, 23, -60, -87, -29, -102, + 79, 81, -14, 124, -64, -120, 109, 61, + -122, -77, 102, 127, -16, 119, -113, 103, + 119, 118, -72, -82, -31, 127, 121, -49, + -13, 123, 124, -42, 124, 2, -74, 94, + 12, -125, 44, -74, 17, -127, 103, -14, + 11, 58, 17, -13, 62, 86, -127, 79, + -122, 121, -23, -87, 91, -128, -123, 100, + 127, -85, -62, -127, -23, 121, -13, -69, + 55, 94, 123, -119, 116, 111, -17, -35, + -77, -5, 126, 28, -31, 126, -42, -54, + 127, 49, 86, 127, 60, 25, -128, -116, + 62, -110, 113, 118, -26, 0, 69, -75, + 116, -31, -115, -2, -112, -127, -61, -5, + -122, 44, -119, 117, 87, 26, -50, -115, + -122, 70, -110, -94, 65, -23, 125, 33, + -125, 103, 91, 123, 109, 123, -17, -13, + 126, 46, 17, -102, -33, 125, 117, 120, + 97, -82, 109, 14, 24, -24, 63, 16, + 62, -3, 61, 42, 71, -38, -73, 72, + -127, 70, -68, -67, -36, -7, 90, -125, + 124, -122, -117, -24, -78, 0, 113, -19, + -60, 52, 68, -7, -126, 10, 79, -22, + 57, 22, 100, 126, -15, 66, -113, -85, + 122, -128, -12, -58, 127, -125, 64, 105, + 45, 107, 117, 88, 29, 62, 122, 106, + -83, -121, 7, 44, -97, -125, -127, 72, + 2, 28, 118, 9, -124, 79, -57, 99, + 38, 127, 43, 90, 26, -34, -59, -125, + 118, 43, 24, 55, 127, 96, 49, 1, + -116, 106, -5, -90, 115, -48, -53, 127, + -39, 18, 125, -54, -124, 116, 18, -58, + -91, 68, 102, -8, -22, 110, -86, 88, + 90, 121, -46, 19, -45, 113, -34, -119, + -68, 5, 3, 18, 106, 20, -127, -119, + 127, -91, -45, 24, 119, -61, -69, 81, + 56, -77, 104, -58, -107, -100, -46, 15, + 45, 62, 108, 71, 6, 114, 11, 8, + -31, 9, 126, 127, 125, -86, -42, 15, + 125, 81, 127, 89, -90, -53, -112, 12, + -34, 81, 79, 0, 127, -1, -124, 16, + 1, -32, 127, -32, -92, -125, -77, 3, + 85, 110, -18, -15, 51, 54, -83, -19, + -93, 65, -22, -3, -98, -128, -29, -84, + 121, -125, -29, 27, 46, 119, 37, 125, + -96, 49, 119, 85, -40, 1, 92, -96, + 34, 18, 15, 44, 58, 127, -27, 20, + 10, 124, 44, -13, -108, 17, -128, 61, + 37, 11, 73, 61, -28, 27, -96, 67, + 29, 99, 53, -51, 125, 119, -59, -12, + -60, 30, 67, 121, -43, 64, 103, -11, + -20, 31, -81, 55, 66, -127, -39, -19, + -13, -122, 98, 69, 32, 46, -127, 111, + -36, 113, -72, 70, -65, 56, -125, 94, + 47, -82, -117, -111, -58, -128, 32, -10, + 51, 13, 99, 31, -27, -36, -52, 24, + 33, -97, -98, -31, -86, -125, 122, -52, + -86, -107, 50, 57, -82, -124, 16, -93, + -108, 84, -52, -10, -92, -85, 83, 3, + -110, 127, 116, 76, -43, -42, -103, -111, + 121, -120, 8, 37, 50, -122, -115, 34, + -22, -84, 127, 9, -19, 36, -126, -34, + -39, -118, -15, -112, -3, -80, -69, 77, + 23, -73, -112, 44, 100, 51, 99, 38, + -112, -5, -69, -56, -120, 123, 113, 72, + 84, -64, -51, -123, -90, -59, 3, -105, + -110, -3, 47, 117, -20, 123, 112, -7, + -120, -52, 85, 24, -36, -128, -51, -27, + -5, -90, -6, 103, -54, -89, -40, -116, + -42, 125, 111, -22, -126, 7, -93, -127, + -119, -67, -94, 127, 123, -118, -108, 27, + 15, 1, -17, 23, 127, 45, -125, 124, + 50, -68, 58, 47, 127, -125, 14, 56, + -51, -30, 36, 126, -113, 105, 119, -25, + -32, -43, 115, 44, -104, -57, -23, -33, + 19, 22, -20, 26, 22, -95, -29, 9, + -32, 7, 107, 35, -47, -29, 2, -21, + 55, -126, -90, -128, 3, -125, 127, -33, + 74, 54, 121, 60, -77, 126, 65, -21, + 18, 4, -70, -126, 48, -127, -20, -9, + 30, -125, -125, 88, 58, 121, -69, -63, + -31, 125, -14, -125, -46, 26, 28, 20, + -115, -72, -120, 27, 127, 125, -71, 25, + 123, -28, 122, -126, 61, -29, -124, 62, + 101, -93, 47, -2, -120, -59, -97, 46, + -69, 127, 6, 74, -125, 83, -87, -123, + -30, 20, -74, 55, 71, -83, -123, 125, + 36, 3, 4, 112, 64, -120, -18, 126, + 37, -91, 63, 52, 32, 81, -71, -76, + 50, -12, 17, 120, -105, 23, -106, 78, + -96, 60, 75, 57, -50, -11, -40, -65, + -111, 11, 25, 87, -97, -31, 35, -34, + -56, -8, 3, -122, -9, -39, 123, -127, + -127, -29, -124, 63, 84, 102, 44, 11, + 80, -56, 41, -102, -13, 125, -95, -55, + 50, 51, 5, 112, 100, -48, -17, -53, + 0, -112, 14, -30, 61, -7, 32, 38, + 89, -125, 122, 40, 35, 100, -1, -37, + -13, -73, -25, -37, -89, 126, -121, 18, + 79, 76, 115, 21, -7, -54, -50, 124, + 118, 100, -118, 92, -4, -117, 90, -107, + 104, 40, 81, 16, 119, 93, 61, 126, + -127, -40, 25, -36, 16, 66, -5, -27, + -101, 57, 95, 114, 79, 13, -41, 5, + -12, -38, 13, 71, -55, -57, -76, 127, + 16, 73, -31, 33, 75, -76, 25, -125, + 3, -5, -61, -35, 119, -22, -114, 50, + -50, -59, 87, 69, 118, 70, 8, 126, + 11, -30, 126, 18, -125, -39, 16, 37, + 60, -120, 119, -20, 7, 26, 99, 12, + -38, -1, 103, 119, -22, -6, 24, -110, + 111, 24, 110, -125, 24, -1, -85, -9, + 42, -51, 113, 123, 6, -111, -37, 19, + 1, -124, 127, -127, -32, -128, -104, 13, + 89, -4, 92, -92, -114, 0, 120, -62, + 2, -13, -36, -33, 127, -128, 51, -19, + 52, -126, -54, 119, 51, 77, 123, 127, + 40, 86, 77, 38, -15, -69, 28, 8, + 13, -2, -127, -96, 121, 29, -1, 55, + 67, 75, 127, -127, -41, -27, -108, 68, + 18, -11, 40, 8, 127, 25, -52, 2, + -110, 18, 115, -70, 8, 117, -87, 22, + 36, -16, -110, -14, 66, -54, -49, -57, + -7, -99, 42, -27, 28, 39, -64, 35, + 18, 12, 109, -111, 77, 22, -120, 56, + -4, 75, -39, -102, -47, 18, -115, 121, + -22, -81, 54, -81, 13, -11, -125, -28, + 112, 17, -74, 118, -5, -31, -6, 11, + -116, 28, -69, 10, 84, -121, -62, 46, + -127, 4, -33, -28, -9, 54, -25, 29, + -11, 105, -113, -124, 45, 17, -91, 74, + -107, -40, -89, 49, -43, -51, -70, -43, + 69, -31, -34, -49, 48, 47, -119, -48, + -125, -50, 121, -15, 10, 5, 127, 117, + -19, -128, 30, -103, 81, -22, -28, 61, + 4, -62, -11, 4, 24, -97, 47, 38, + -42, -59, -125, -49, 83, -15, -119, 102, + 29, 89, 53, 116, -109, -95, 73, 16, + -17, -54, 3, 127, 63, 10, -61, -111, + 104, -15, 41, 62, -43, -32, 74, 72, + -93, 125, 127, 122, 116, -115, -14, -91, + -63, 51, 14, 11, 121, 104, -33, -8, + -103, 31, -44, 81, -31, -79, -49, -29, + -39, -3, -87, 20, 37, -94, 26, 62, + 20, 16, 49, -6, -43, 103, 37, 7, + -17, -122, -15, 10, -111, 13, 99, 14, + -95, 8, 69, 69, -64, -25, -8, -96, + 113, 112, -15, -99, -127, -12, -31, -113, + 16, 111, -12, 24, 116, -6, 10, 96, + -34, -63, 49, -14, 2, -126, 9, -20, + -10, 108, 2, 123, 127, -69, 40, 90, + -54, 12, -50, 125, 87, -106, 31, -127, + -36, -88, -26, -116, 70, -125, 15, -72, + 104, 120, 74, -88, 105, 13, -20, -12, + -124, -40, -87, 6, 108, -125, -87, -1, + 23, -39, -76, -87, -60, 62, -108, 28, + -31, 31, 9, 24, 79, 15, 17, 78, + 4, -47, -12, -39, -14, 71, -17, 70, + 69, 58, 30, 42, 60, 32, -16, -26, + 71, 13, 101, 3, -99, 44, 98, 28, + -24, 14, -24, 125, -59, -29, 122, 16, + -35, 53, -36, -38, -4, 84, 2, 61, + -3, 115, 59, -85, 49, 73, 16, 85, + 97, -92, -1, 41, 18, 6, 30, 9, + 37, -26, 115, 9, 110, -22, -50, 45, + -127, -50, 4, -9, 54, 63, -55, -18, + -125, -12, 20, -4, -99, -8, -23, -42, + 19, -9, -42, 3, -61, -11, 26, -10, + 2, 43, -33, 44, 3, 105, 124, 26, + -21, 48, 21, 117, -63, -118, -126, -44, + -42, -122, -75, 127, -42, 25, 122, -75, + -122, 28, 18, -27, -127, 19, 15, -122, + 27, 80, -13, -18, 25, 89, -16, -52, + 127, 126, -89, 96, 45, 117, 82, 81, + 103, -33, 84, -124, 69, -51, -113, 5, + 80, 123, 125, 98, 61, -86, 59, -46, + -44, -29, -62, 124, -125, -118, -13, -10, + -13, 81, 103, 107, -24, -45, 82, 26, + -27, -34, -81, -41, -41, 10, -18, 40, + -128, -125, 44, 25, -80, -1, 3, -85, + 63, 37, -6, 94, 47, -80, -12, -3, + -21, -17, 125, -28, 48, 19, -35, 46, + -12, 37, 3, -10, -51, 28, -70, -62, + 22, 66, 34, 58, -70, 67, -106, 91, + -94, -12, 83, 111, 8, 114, -17, 8, + 9, -20, 1, 113, 99, 69, 19, 35, + -26, -128, -54, -10, -41, 96, 71, -93, + 12, 44, 3, 22, -21, -5, -18, 126, + 42, 48, 85, 12, -54, 2, 25, -52, + 93, -128, -26, 78, 123, -25, 27, 8, + -118, 50, -3, 75, 40, 35, -11, 107, + -13, -93, -13, -24, -24, 72, -85, 10, + -61, 51, 116, -100, 21, 67, 118, 36, + -71, 37, -20, 46, 16, 54, 126, -93, + -15, -5, 33, -114, -11, -5, -88, -11, + -24, 54, 82, -66, -44, -128, 82, 87, + -122, -51, -60, 123, -29, 33, -125, -13, + -59, -8, -22, -4, 37, 24, 28, 123, + 93, 86, -66, -12, 44, 124, -106, 51, + 104, 19, 49, 82, 77, 11, -109, 58, + -5, 19, -73, -56, 127, -74, -12, 120, + -118, 1, 121, 116, 20, 70, -15, 53, + 84, -47, -27, -18, -123, -17, -3, -23, + 5, 28, 68, 120, -65, -33, 111, 122, + -113, -45, -19, 18, -119, -90, -126, -33, + 45, 36, 18, -35, -121, -10, -102, 37, + 127, 112, -95, 44, -126, 59, 102, -81, + 114, 85, 3, -50, 81, -52, -54, -32, + -13, 39, 66, 41, -88, 120, -123, 125, + 87, 23, 67, -53, -43, -25, -125, 63, + 41, -105, 35, -39, -108, -123, -34, -41, + 10, 88, -7, -31, -127, -113, 31, -59, + -92, 62, -127, 96, 109, 127, -11, -27, + 115, -71, -40, 45, 123, -40, 38, 60, + 124, 17, -60, 95, 117, -23, -9, 64, + 100, 126, -40, -56, -7, 126, 24, -125, + 51, 37, 26, -115, 76, -51, -91, -29, + -75, -116, 72, -87, -71, 80, 63, -95, + -1, -24, -57, -32, 25, 105, 14, 20, + 60, 74, -128, 126, -29, 92, 43, 127, + -75, 126, 116, 127, -10, 116, -126, -25, + -37, 26, 58, -92, 27, 67, -43, 56, + -78, -32, 76, 71, 73, 48, 76, -92, + -126, 55, 24, 55, -104, -18, -58, 24, + 109, 30, -128, 115, 66, 7, 31, -85, + 116, 85, 106, -53, 39, -33, 61, 10, + 100, 79, -13, -5, 126, 25, -21, -98, + -74, -41, 7, 12, -8, -85, -121, 65, + -4, 11, -9, -89, -30, 5, -39, 49, + -44, 90, 100, -83, 42, 120, -24, 53, + 122, -114, 2, -83, 38, 17, -31, -43, + -38, -56, -67, 26, -49, -48, -121, -41, + -125, -1, -3, 59, -77, 9, 76, -55, + -53, -11, 1, -38, 51, -112, -76, 101, + -24, -9, 3, 24, -48, 67, 26, -52, + 47, 7, -24, 44, 82, 13, -47, 50, + 3, -97, 39, 56, -18, 49, -69, 126, + -14, -3, 82, 6, -75, -18, 4, 14, + 127, 57, 76, 23, -53, -7, 34, -45, + -100, -94, -97, 12, -32, 20, 51, -53, + -17, -77, 69, 103, -124, -112, -95, -122, + -18, -116, 35, -110, 79, 109, 38, 56, + -124, -13, -87, -86, -43, 54, 108, -108, + -6, -11, -68, -11, -124, 8, 122, -7, + 86, -111, 117, -127, 65, 107, 63, -126, + -127, 126, 30, 42, 6, 53, 123, 32, + 126, -71, 20, 32, 125, -125, 96, 126, + -106, -48, -125, 80, -127, -106, -105, 69, + -98, -126, -119, -20, 80, -128, -73, 55, + 119, -127, 92, -12, -77, -12, -38, 35, + -117, 44, 3, 37, 126, 119, 28, 61, + -72, -127, 24, -24, 120, 123, 3, -127, + 78, 61, 40, -128, -28, 71, 14, -47, + 81, -111, -57, -127, -40, -40, -121, 36, + -116, -127, 4, -79, -20, 81, 62, -107, + 24, 16, 105, -8, -38, -85, 127, 90, + 13, -106, -15, 74, -68, -56, 4, 47, + -89, 28, 89, -68, 9, 40, -25, 32, + -33, -119, -21, 25, -74, 68, 112, 19, + -77, 30, -5, 108, -118, 27, 120, -127, + 100, 0, 41, 67, 14, 43, 100, -107, + 77, 127, 70, 48, -106, 52, 17, 40, + 90, -11, -68, 34, 111, 77, -35, -56, + 43, -126, -49, -31, 6, 5, -117, -74, + 12, 49, -63, -68, -36, -97, -117, -123, + 85, 112, -38, 124, -126, -110, 104, -106, + 80, 125, 86, 60, 52, 32, -20, -15, + -102, -5, 23, -9, -31, -126, 121, 41, + -88, -97, 33, -20, -65, -10, 112, 45, + -85, 58, 114, -32, 25, -2, 35, 59, + -99, -22, -40, -71, 0, -58, 85, -58, + 18, -55, 60, 0, 88, 9, -14, -56, + 18, -34, -91, -18, -34, -14, 7, 122, + -121, -126, -35, 95, -40, 105, 6, -2, + -46, 115, -125, 52, -117, 17, 2, 9, + 25, 52, 18, -112, -84, -71, 62, -97, + 22, -80, 9, -22, -75, -104, 127, 76, + 6, -127, -120, -104, 17, -86, -54, 98, + -127, -111, 57, -126, 68, -122, 13, -98, + -49, -125, -1, -118, 121, -37, 35, -111, + -122, -113, -125, -51, -90, 78, -124, 127, + 1, 58, 17, -123, -51, -81, -122, 34, + -32, -66, 14, -113, -126, -124, 127, -126, + -15, 92, -95, -121, -100, 20, -3, 89, + -9, 127, -48, 15, 2, 126, -55, -10, + -20, 125, -109, -32, 68, -119, -15, 17, + -46, -10, -100, 13, 127, 120, 48, -121, + 126, 46, 71, -76, -75, -89, -34, -122, + 65, -21, 43, 11, 102, -128, 44, 127, + 126, -120, -19, 81, 127, -1, -125, 31, + -36, 127, 43, -60, 66, -99, -127, -50, + -56, -80, 119, -22, -2, -84, -125, -17, + -65, 126, -63, 105, 51, 86, 46, -123, + -13, 106, -54, 117, -9, 119, -13, 30, + 83, -121, 126, 17, 63, 7, -22, 60, + 7, 26, 18, -23, -39, 8, -63, 43, + 124, -39, 40, -27, 18, -42, -31, -90, + -51, -113, -51, -16, -62, -33, 96, 39, + -76, -78, 110, -49, 51, 11, -10, -72, + 31, -110, -73, 2, 24, 66, -125, -49, + -45, 39, -128, -12, -117, -28, -23, 75, + 1, -122, -13, -63, 104, -44, 14, 3, + -78, -25, -6, 104, -81, 123, -66, -96, + -47, 70, 127, -107, -124, -66, -102, -94, + 127, -121, 32, 89, 53, -21, -128, 8, + 24, 56, 36, -59, -70, -73, -43, 45, + -127, 73, -117, -35, -53, 34, -128, 2, + 83, 127, 1, -45, -14, 6, -44, 6, + -34, 117, 127, -34, 64, 6, -43, -61, + -80, 0, 15, -29, 11, 22, -23, 38, + -124, -30, 108, -78, 25, -54, -57, 48, + -40, -108, -97, -94, -117, -47, 18, -96, + 1, -120, -69, -84, 10, 126, -128, 112, + -14, -18, 14, -34, -119, 28, -105, -100, + 113, 49, 30, -47, -70, -123, -4, -125, + 15, -115, -128, -3, -110, 126, -42, -126, + -11, 21, -63, -123, 127, 121, 108, -80, + -45, 86, 60, 64, 115, 115, -91, 25, + 18, -121, -117, 62, 47, 9, -125, 67, + 4, 88, 106, -127, -125, 123, 69, 125, + -81, -126, 119, -88, 3, -128, 8, 66, + -42, -127, -17, -15, 4, 17, -97, 22, + 127, 98, -114, -19, -67, -17, -90, 78, + -35, 114, -21, 110, 119, 122, 64, -90, + -8, -70, -94, -59, -49, 127, 0, -54, + 55, -21, -113, -33, -127, 70, -100, -24, + 10, 41, -76, -64, -52, -126, -46, -20, + 19, 49, -126, -33, -127, 127, 125, -110, + -4, -56, -70, 45, 3, 18, -122, 91, + 11, 102, -39, -53, -126, 111, 71, 33, + 116, -105, -106, 12, -7, 30, 37, 47, + -92, 42, 9, -121, -45, 23, 87, -20, + 125, 117, -13, -126, 56, -16, 28, -90, + -35, 77, 58, -10, 63, -67, -24, 45, + -29, 8, -31, 119, 0, -13, 100, 122, + -33, -50, -34, -18, -19, -58, -124, -22, + 35, 29, -128, -89, 8, -86, -4, 0, + 44, -114, -47, -32, -62, -35, -123, 19, + -72, -84, -60, 7, 110, -19, 45, 70, + -9, 120, -125, 9, -74, -97, 71, 17, + 3, -40, 1, -103, -94, -33, 127, -31, + -12, 10, 0, -4, -20, -75, 28, 84, + -57, -15, 43, 17, 29, 40, 60, -9, + 118, 58, 27, -89, 42, 75, -106, 122, + 31, -114, -114, 43, -35, -90, 95, 116, + -106, 6, -22, 100, 37, -72, 11, -6, + 87, -88, -126, -119, -31, 44, 29, 59, + 50, -60, 32, 75, -115, -18, 28, 126, + -62, -126, 14, -21, -34, 126, 122, -74, + -36, -38, 106, -119, 116, 92, 58, -81, + -121, -78, -127, 6, 127, -70, -97, 112, + 63, -105, -11, -40, 127, -23, -127, -127, + 26, -34, 75, -124, -28, -115, 78, 28, + -30, -124, -41, 15, 120, 127, 81, 72, + -35, -114, -38, -93, 72, 71, 82, 57, + 12, -8, 73, -126, -127, -39, 101, -95, + -92, 126, 54, -30, 11, 26, 34, 74, + 39, -127, -22, -123, 113, 23, 23, -14, + -123, -73, -35, -128, 100, -122, -80, 32, + 23, -118, -40, -53, 48, 18, 21, -42, + -104, -22, -127, -19, 55, -57, 64, -89, + 24, 11, -1, -12, 99, 10, -52, 41, + 37, -123, 62, 123, -4, -57, -51, 127, + -45, 121, 124, -39, -113, 10, 7, -111, + 53, 55, 112, -124, 51, -85, 30, 7, + 6, -4, -50, 21, -124, 110, -126, -6, + 2, -71, 115, 13, 0, -64, -43, 8, + 8, 7, 4, -49, 82, 10, -2, 125, + 124, -58, 68, 29, 9, 37, 78, -86, + 60, -34, 85, -106, -52, 19, 4, 86, + 118, 46, 113, 20, -103, 113, -75, -57, + -54, -65, 86, -16, 12, 68, -90, 84, + -37, -31, 23, -37, 25, 26, -54, 32, + -27, -121, -26, -67, 124, -24, 20, 124, + -75, 74, -28, 14, -29, -19, 33, -36, + 87, -2, 5, 6, -121, -17, -1, 106, + -20, -19, -17, 97, 15, -25, -53, -9, + -60, -34, 61, 35, 11, 84, 11, 0, + 30, 127, 17, -43, 41, 93, -12, 75, + -71, -54, 47, 11, -47, -57, 40, 8, + -36, -36, 127, -120, 17, 57, -15, 38, + -101, 55, -72, -43, 32, -73, -12, -22, + -126, -20, -95, -17, -53, 23, 3, -53, + -9, 27, 122, -5, -25, -40, 18, -32, + -5, -20, 11, -64, 61, 68, 127, 20, + -41, 35, -22, 26, 127, -102, -55, -57, + 51, -21, -45, 34, 47, 0, 85, -3, + -10, 42, 43, 11, -34, -10, -4, -67, + -89, 92, 25, -62, 7, 20, -117, -41, + 103, 70, 34, 45, 38, 53, 26, 64, + 39, 51, 27, -70, 42, -36, -14, 35, + -6, 121, -25, -23, -105, -44, 127, -64, + 23, -10, -53, 70, -63, -23, 5, -15, + 28, 85, -51, 101, -71, -41, 53, -52, + -26, 19, -46, -64, 25, -21, 16, -13, + -63, -68, 26, 52, -61, 116, 24, -88, + 15, -20, 7, -3, 1, 97, -16, -16, + -100, -103, 4, -8, 88, -89, 24, 51, + -12, -6, 30, -32, -69, 20, 82, 18, + -8, -41, -56, -67, -111, 92, -77, 70, + -30, 5, -3, 11, 26, 10, -115, -30, + -17, 26, -30, 41, 84, -27, -1, -11, + -8, -125, -21, 12, -35, 124, -13, -48, + -13, 16, 9, 4, -75, 24, 11, 37, + -42, -11, 26, 22, 29, 54, -9, -94, + 107, 112, 57, 18, 84, 97, 26, 0, + -121, 6, 28, -1, -55, 54, 9, 46, + 68, -21, 55, -1, 6, -47, -28, 9, + -24, -28, 61, -45, -102, 114, 43, 45, + -37, 11, 6, -15, -102, -70, 49, -125, + -8, -16, -53, -24, 68, -6, 21, -13, + 10, -28, 120, -54, 2, -77, 27, -62, + -112, -104, -14, -59, 45, 35, -117, 7, + 34, -28, -63, 26, 59, -14, -43, 22, + 36, -86, 18, -16, -12, -58, 15, 51, + 56, -73, 24, -13, 67, -15, 125, -13, + 12, 8, -56, 44, 62, -77, -10, 76, + -122, 87, -27, 21, 19, -39, 10, -2, + 44, 12, 113, 31, -11, 37, 16, -19, + 56, -53, 9, -43, -25, -13, 45, 34, + -45, -35, -10, 35, -118, -33, -126, 1, + 122, -37, -21, -15, -128, -125, -29, 24, + -123, 21, -47, -36, -66, 35, -20, -9, + 39, -50, -122, -2, -18, -12, -59, -24, + 14, -87, 109, -46, -67, 51, -125, 66, + 72, -13, 57, -112, -116, -42, -110, 68, + 2, 43, -76, 34, -61, -114, -60, -81, + -33, 1, 33, 37, -36, -10, 35, 24, + -19, -17, -40, -32, 52, -28, -48, -13, + -89, -91, -66, -35, 126, -27, 1, -50, + 74, -60, -17, 16, 75, 44, 28, 105, + 103, -4, -9, -43, -76, 6, -10, -124, + 39, -27, 19, -47, 25, 126, -44, -4, + -116, 22, -32, -49, -106, -95, 111, 15, + -10, 4, -121, -20, 84, 76, 70, 18, + -57, -23, -92, 69, 1, 70, 78, 13, + -21, 114, 58, -20, 23, 50, -28, -16, + -6, 118, -67, 127, -22, -79, -37, -31, + -40, 108, 42, 11, 79, -41, 59, -23, + -115, 11, 14, 38, 86, 3, -25, -3, + -102, 3, -113, -43, 38, -54, 30, -90, + 120, -72, 9, -87, 49, 90, 49, -20, + 14, 11, 15, 33, 115, 125, -12, -68, + -59, 21, 6, 27, 21, -127, -77, 20, + -45, -1, -46, -77, 57, 105, -77, 2, + 29, 78, -35, -104, -75, 96, -50, 52, + 22, -21, 38, -73, -14, -3, -49, 23, + -65, -13, -112, 95, 125, -120, -123, 96, + -74, 108, -5, -20, 17, -19, 67, -17, + -54, -47, 81, -7, -32, -10, 25, 7, + -5, 111, -128, -36, 51, -55, -1, -18, + 3, -20, 20, -32, 31, -37, -24, -31, + -70, -95, -17, -1, 40, 73, 33, 10, + 88, 45, 71, -24, 27, -47, -13, -78, + 121, 63, -127, 26, -21, 104, -3, 31, + 4, -21, 26, -38, -37, 42, -10, -3, + 119, -15, 11, -119, -128, -128, 0, -11, + 54, -109, 37, -96, -107, 55, 37, -17, + -97, 11, -88, 70, -29, -31, 6, -52, + -73, 17, 120, 69, -17, 12, 107, -10, + -23, 16, 117, 122, -47, 4, 61, -86, + -47, 27, -100, 50, 114, -92, 95, 43, + 47, -31, -34, 107, 99, -127, 53, -11, + -26, 57, -16, 57, -97, 0, -113, 59, + 6, -10, -99, -2, 60, -63, 5, 8, + -43, -15, 9, 39, 126, 28, -4, -74, + 27, 34, 19, -71, 83, 78, 57, 37, + -98, -72, 33, -26, -103, 82, 29, -71, + 20, -4, 1, -49, -127, -85, 24, -37, + 28, 30, -39, -24, -15, 40, -6, -61, + -46, -39, -108, -26, -21, 17, 63, -61, + 77, -29, 41, -20, 52, -49, 117, 2, + -35, 9, 127, -66, 7, 22, -10, -42, + -69, 14, 59, -106, 12, -87, 6, 2, + -13, -123, -94, 32, -116, -76, 35, 17, + -76, 21, -7, 53, -18, -13, 13, -110, + -30, -21, 109, 73, -24, 75, 4, -34, + 46, 127, 78, 40, -84, -15, -66, -7, + 96, -14, -58, -47, -12, 71, -4, -6, + -87, 28, 76, 0, 7, 84, -58, -34, + -45, 30, -84, 9, -125, -43, -25, -34, + -40, 127, 80, 72, 114, 107, 16, 60, + 87, 12, 127, -29, -12, 84, 126, -86, + 92, 127, 91, 86, -123, -91, 126, -45, + 43, -103, -21, 60, 14, -15, 25, 50, + 14, 19, 92, 25, 5, 6, -26, 50, + -18, -37, 120, -81, 46, 3, 31, -6, + -66, -107, -13, 73, -31, -94, -54, 7, + -58, -20, -16, 77, -30, -40, 127, -61, + 69, -21, -66, -24, 48, -55, 9, -25, + -31, 44, 127, -17, -54, 71, -20, -28, + -42, 5, 6, 23, 33, 19, 4, -21, + 74, -106, 32, -98, -119, -89, 102, 46, + -29, -50, -95, -115, 127, -34, 32, 10, + 17, -23, -12, -69, 35, -7, -1, -76, + 64, 37, -114, 59, 83, 34, -22, -61, + 3, -31, -100, -120, -88, 22, -128, 127, + -11, 10, -100, -107, -122, -6, -49, 8, + -38, -28, -45, -83, -31, -20, -39, -1, + -63, 124, -22, -31, -33, -6, 23, 5, + -3, 38, -123, 73, -23, 104, 25, -27, + 79, 115, -48, -44, -125, 25, -1, -67, + 20, 117, -35, 112, 86, 15, 23, -111, + 122, 21, 9, -7, -128, -2, 0, -125, + 31, 42, 11, 37, 122, -119, 29, 51, + -31, 51, 46, 30, 127, -110, 29, 49, + -3, 30, -75, -21, -42, -29, -127, -49, + -43, 123, 102, -16, -106, -28, -127, -83, + -74, 24, -9, 126, 3, 36, 107, 67, + 11, -18, 54, 77, 18, -28, -15, 15, + 45, 69, 80, -47, 112, 82, -104, 2, + -24, 20, -10, -79, -39, 1, -27, 30, + 13, -1, -77, -10, -3, -106, -6, 13, + -32, 123, -16, 27, 1, 90, -64, 12, + -96, -33, -61, 65, 53, 102, -41, -44, + 51, -47, 65, -23, -9, 61, -81, -9, + -21, 61, -58, -48, 122, 58, -54, 6, + 20, -112, -35, -104, 10, 122, 100, -69, + -51, 57, 52, -29, 90, 74, -115, 18, + 35, -127, 124, 62, -9, -5, -82, 13, + 8, -63, -42, 3, 15, -23, 23, 41, + -51, 87, 123, -36, 11, -34, -74, 38, + -5, 36, -52, -74, -4, -2, -122, 19, + -66, -11, 10, 50, 75, 4, -25, -1, + -12, 57, 108, 84, -38, 29, 123, 62, + -18, 39, -120, 14, 13, 78, -1, 2, + -82, 7, -93, -78, 5, 75, 48, 44, + -60, -33, -5, -48, -20, 39, 28, -86, + -25, -69, -28, -102, -103, 121, -120, 18, + 108, 65, -62, -40, -128, 0, -49, 31, + 37, 29, 88, 33, -96, -59, 23, -45, + 59, -105, 84, -63, 11, -41, -13, -27, + -54, 83, 64, 117, 14, 5, 76, -112, + 16, 94, -61, 28, 101, -56, -9, 2, + 89, 23, -78, 58, -49, -22, 60, 18, + 34, 19, 62, -93, -27, -47, 76, 4, + -33, -35, -12, -23, 87, -43, -2, 21, + 26, -49, -68, 22, 18, -60, -43, -31, + 32, -28, -93, -11, 11, 26, -45, 7, + -40, 6, -76, 86, -30, -79, -77, -34, + 1, 47, -34, -83, -124, 57, -35, -17, + 18, -1, -36, -18, -123, -119, 29, -2, + -4, 37, 34, -67, 84, -80, 49, 7, + 89, -123, -127, 43, -55, 46, 66, 100, + -6, -31, -122, 81, -34, -71, -58, -15, + -15, -24, -18, 80, -24, -68, 28, -12, + 71, -76, -32, 2, 12, -120, -24, -20, + -96, 15, 3, -88, 51, 0, 37, 37, + 106, 73, -17, -23, 97, 39, 36, -1, + -88, -45, 87, -78, 75, -119, 4, 5, + -1, 11, 23, -18, -33, 114, -36, 90, + 27, 91, 10, -15, 16, -90, -52, 16, + 59, 41, -38, 19, -127, -91, -45, 26, + 55, -61, 37, 45, -19, -10, -65, 127, + 9, -44, -84, -79, -16, 69, 43, 1, + -63, 118, -123, -1, 121, 12, 53, -90, + 15, -42, -19, -33, -62, 32, -24, -57, + 36, 66, 77, 13, -37, 17, 97, 59, + -61, -126, -19, 35, 83, 91, 21, -18, + 39, 78, 58, 55, -29, -44, -38, 20, + 29, -26, -127, -84, 0, -7, 127, 70, + 22, 24, -56, 58, 36, 44, 73, -33, + 121, -85, 115, -106, -93, 82, 64, 26, + -16, -59, -24, 4, 24, 33, 36, 20, + -8, -45, -22, -75, 35, -15, 40, -21, + 56, 80, -77, 36, 108, 90, 84, 6, + 13, -67, -29, 56, 89, -24, 71, 17, + 64, 6, -75, -10, 72, 28, -54, -70, + 31, -39, -30, -110, 72, -21, 48, 86, + 15, -127, 26, 65, 8, 30, 15, -17, + -89, -85, 112, 20, 21, 8, 30, 74, + -68, 40, -30, -64, 22, -14, 5, -10, + 38, -1, 109, 73, -46, 107, -58, 5, + -67, -123, 0, 111, 124, 3, -68, 11, + -126, 50, 49, -53, 104, 20, 36, 116, + 0, 15, -16, 18, 84, 47, -72, -84, + -26, 1, -74, -20, 122, -40, 47, -29, + 96, 63, 20, -36, 101, 31, -46, 21, + 31, -50, 5, 84, -42, 11, -117, 37, + -63, 86, 91, 112, -10, -12, -16, -41, + 62, 16, -95, 25, -48, 30, 117, 0, + -1, -5, -57, 101, -6, -59, -52, -8, + -34, 83, -6, 51, 9, -95, 98, 18, + -74, 8, 1, 34, -13, 22, -58, -128, + 36, -40, 5, -26, -49, -89, -14, -40, + -93, -92, 82, 25, -26, -60, 68, 6, + 49, 18, 33, 18, -10, 15, 22, -7, + -63, -58, -52, 16, 35, 55, -32, 27, + 12, -24, -19, 74, -21, -34, -124, -19, + -3, 112, -41, -117, -94, -91, -46, -116, + -108, -42, 59, -46, -61, -109, -66, 37, + -67, -47, -91, -74, 3, -23, -4, 30, + 95, 75, -76, -122, -123, -9, -82, -68, + 32, -48, 124, -81, -125, -90, -99, -127, + -102, 69, -12, 85, -60, -43, -126, -79, + -112, 59, 24, 54, -126, 127, -82, -18, + 39, -126, 9, -53, -124, -19, 17, -49, + -86, 24, 1, 12, -80, 75, -35, 48, + 2, 11, -26, 68, -119, 50, -9, -46, + 17, 39, -95, -25, -60, -8, -60, 9, + 121, 125, 2, -83, -99, 50, -47, -38, + -123, -42, -124, 50, -125, -40, -14, 102, + 125, -88, -42, -127, -6, -27, -98, 58, + 41, -21, -9, -39, -31, -78, -66, 115, + 112, 8, -25, -105, 2, -84, 10, -13, + -67, 3, 105, 108, -28, 84, 37, -92, + -2, 116, 24, 68, -97, -127, 16, -125, + -73, 23, 105, 44, 127, -21, -67, -40, + -42, -88, 48, -32, -26, -105, 6, 36, + -56, 27, 18, -3, -97, 54, -30, -113, + 0, -127, -23, -88, -123, 19, 36, -56, + -61, 4, 41, -61, 57, -128, -38, -68, + -3, 125, -10, 11, 4, -7, -10, 85, + 38, 126, 5, 57, -124, -99, -94, -18, + -72, 79, -30, -97, -124, 123, -43, 46, + -33, -6, -88, 12, -6, 30, -76, -6, + 27, -26, 47, -67, 126, -58, -23, -16, + -47, 15, -71, -97, 2, -127, -62, -124, + 82, -60, 5, 13, -56, -50, -13, 75, + 43, 12, -1, -127, -4, 30, 67, -38, + -77, 75, 10, 32, -127, -59, -24, 33, + 4, 0, -108, 11, -89, 0, -86, 60, + 1, -18, -127, 0, -122, -54, -48, 20, + -90, 102, 121, -86, 127, -25, 79, -13, + 2, -54, -29, 66, 0, 15, -94, 24, + -33, 114, -12, 127, 66, -7, -11, 69, + -37, 38, 20, -58, 2, -71, -25, 69, + -119, -63, 60, -63, 41, -43, 24, -10, + 82, 5, -29, -26, -125, -52, -45, -21, + 54, 3, -4, 8, 39, -75, -83, -127, + -67, 16, -77, 72, -12, -10, 104, 14, + -127, 46, 22, 5, -4, 127, -34, 15, + 72, 126, -8, -83, -10, 6, -57, -44, + -109, 35, 11, 2, 35, 9, -24, -29, + -12, -7, -114, -40, -127, 120, -48, 10, + 48, -33, -39, -124, -119, 113, -63, 22, + 123, 126, 33, 61, -11, -13, 95, 6, + 87, -56, -122, 7, 127, -63, 37, -95, + 125, 68, -30, -92, 28, -13, -105, 91, + -20, 21, 87, -58, 17, -20, 34, 113, + 126, -26, -61, -127, 54, -11, -61, 11, + -29, -53, 71, 24, 60, 26, 76, 43, + 15, 117, 12, -57, 25, -126, 12, -47, + -55, -2, 19, -43, 105, 115, -14, 12, + -58, -56, 47, -16, 56, 68, -7, 6, + -75, 57, -51, -4, 17, 21, 56, -30, + -2, -120, 20, -39, -127, -1, -1, 53, + -50, 29, 50, -27, -69, -128, -78, 37, + -85, 125, 5, 11, 66, -36, 22, -1, + 124, 10, -50, -88, 116, -67, -3, -12, + 24, -20, -22, -59, 89, 112, -19, 24, + -31, -46, 125, -7, -126, -6, 23, 24, + 8, -26, 62, -88, 126, -77, -34, 7, + -39, 61, -4, -30, 3, -128, 12, -124, + -13, -83, -1, -24, 102, -6, -25, 91, + 125, -3, 16, -125, -69, 34, 42, -43, + -29, -32, 59, -34, 13, -4, 10, 92, + 74, -39, -79, -27, -7, 39, -51, 123, + -6, 20, -28, 75, -126, -46, -42, 30, + 0, -29, 10, -4, 112, -19, 46, -10, + -48, -19, -20, 43, 65, 13, 16, 16, + 84, 75, -39, 127, 125, 93, 105, 0, + 75, 124, -31, -114, -48, -13, -12, 98, + 126, -104, 66, 25, -9, -110, -95, -94, + -20, -118, 70, 121, -128, -118, 78, -29, + -34, 2, -9, 40, 107, 20, -81, -126, + -38, -42, -112, -55, 65, 123, 118, 56, + -46, 19, 17, 18, 127, 93, -47, 66, + 121, 114, 8, -128, 123, 92, -37, -45, + 25, -79, 19, 46, 32, -70, 11, -41, + 94, -35, -79, 12, -49, 63, 46, 115, + -44, 26, -7, -122, -61, 74, 105, -22, + 76, -70, 86, 120, 70, -84, -31, 53, + 83, 65, -115, -72, 125, -90, 108, -105, + -128, 101, 74, 122, -66, -108, -126, -17, + -38, 17, 4, -15, 121, 115, -38, -43, + -120, 13, -117, -100, 100, 113, -122, -12, + 45, -127, 60, -124, 68, -27, 48, 110, + 85, 15, 111, 10, 58, -82, 102, 59, + 42, 95, -111, -125, -126, 69, 120, 126, + -40, 50, -80, -103, 60, 112, 69, -19, + 26, 41, -24, -6, 58, -115, -35, 94, + 86, -62, 110, -22, -127, -14, -76, 121, + 0, 4, -16, -3, -87, 1, 65, -34, + -26, 124, 37, 59, 40, -106, -13, -16, + 104, -126, -5, -76, 126, -3, 77, 40, + 120, -28, -39, 116, 120, 48, 90, -22, + -127, 96, 5, -88, -128, -95, 20, -19, + -50, 29, 25, -60, 127, -3, -80, -76, + 6, -60, -43, -127, -59, 51, 9, -50, + -9, -110, -56, 61, 0, -17, 49, -34, + 50, 8, 24, -126, -125, 35, 56, 109, + 83, -18, 55, -122, 122, 119, 79, -115, + 116, -8, 65, 59, 51, 127, -27, 119, + 36, 27, 106, 122, -127, -21, 39, 96, + -88, 91, -31, 116, 56, -97, 27, 14, + -63, 99, 51, 117, -126, -98, -50, -16, + 120, 3, 45, -125, -98, -31, 65, -116, + -66, -76, -82, -54, -110, 5, -127, 23, + 16, -73, -127, -41, 22, 69, 39, 106, + 97, -35, 124, 52, -128, -31, 14, 37, + -12, -85, -2, -79, 16, -61, -127, 8, + 39, 57, 38, 111, -67, 77, -127, -21, + -46, -46, -102, -57, 74, 87, 53, 26, + 48, -123, -71, -78, 31, 78, -82, 100, + 61, 38, 79, 94, -39, 127, 1, -20, + -33, -22, 59, 37, 73, 42, -53, -77, + 14, -23, -96, 53, -46, -61, -117, -22, + 68, -111, 17, -62, -8, 53, -34, -78, + -126, 49, -9, 69, -127, -32, 24, 111, + -125, -124, 19, 125, 34, -6, -122, -13, + 39, -5, -123, -21, -52, -8, -83, -15, + -126, 65, -41, -66, 114, -37, 85, 18, + -71, 69, 45, -19, 64, -2, 31, 125, + -45, 4, 114, 101, -82, 67, 19, -5, + 25, 122, -117, 67, -122, 29, -20, -13, + -124, 12, -87, -125, -16, -33, 32, 30, + -22, 0, 24, 34, -52, -122, -35, 127, + -92, -21, -95, 12, -85, 50, 29, -36, + 27, 19, -103, 117, 38, 64, 124, -63, + 7, 50, -1, 24, -79, -44, -40, 113, + -125, -107, -85, 124, -124, 121, -90, 14, + 22, 124, 71, 48, -121, -38, 22, -11, + -53, 4, -128, 52, 48, 18, -21, -123, + -123, -15, 32, 14, 127, -13, 33, 67, + -28, -32, -77, -127, 46, -14, -101, -63, + -47, 41, 3, -58, -118, -102, 62, 32, + -38, 4, 5, -25, -27, 58, -40, 60, + -44, 109, -120, -32, -56, 10, 105, -112, + -25, 43, -50, 9, -64, -52, -126, 73, + -37, -15, -71, -32, -85, 81, -48, 10, + 16, 45, 17, -74, 95, -48, 78, -27, + 67, 28, -4, -6, -13, -99, 8, 36, + 69, 13, -54, 60, 54, 78, -28, 127, + -18, 47, -10, -88, -4, 13, -26, -23, + -98, -124, 92, 29, 43, 127, -19, 78, + 9, 22, -21, 17, -126, -16, -30, 6, + 31, -128, 22, 100, -21, -46, -126, -98, + 92, -2, 38, -82, -51, -25, 92, 75, + -48, -51, -20, 88, -113, 80, -68, 45, + 68, 127, 32, -74, -89, 120, -93, 49, + 115, 51, 9, 8, 8, 19, 3, -34, + -73, -45, -72, -83, 51, -44, 62, -126, + -36, 2, -16, 82, -127, 118, -21, -19, + -49, 33, 8, 18, 33, -8, 39, -57, + -82, 14, 30, -39, 121, 5, 36, -23, + -103, -124, -124, -20, 38, 69, -104, 54, + 30, -34, 92, -23, 51, 27, 59, -54, + -16, 20, 67, -60, 85, -46, 8, 27, + 25, -62, -110, 31, -111, -49, -95, 116, + -40, -29, 25, -36, 56, -10, -12, 88, + -34, -43, -76, 3, 10, 90, -111, -103, + -31, -38, 19, -100, -115, 124, -75, 18, + -37, 44, -38, -21, -7, -13, 56, 75, + -120, 25, -128, -127, -38, 10, 41, 26, + -25, -43, 103, 117, -50, -44, 118, 7, + 47, -69, -2, 46, -53, 122, 28, -92, + 23, -14, -19, -16, 26, 54, -102, -94, + -95, -83, -104, -31, 43, -127, -85, 18, + 72, 126, 121, -100, -118, 100, 7, 55, + 8, 28, -53, 22, 127, -104, 124, 99, + 11, -52, -32, -2, -57, 45, -80, -96, + -26, 32, -10, 72, 121, 127, -49, 40, + 12, 1, 9, -75, -55, -30, 126, -84, + 2, -70, 59, 93, -117, 64, 86, -33, + -8, -74, 64, -4, -49, 42, -59, 89, + 7, -48, -19, -15, 43, -66, -81, 3, + -35, -123, 45, -64, 84, 4, -23, 39, + -79, -1, 17, 58, 11, -4, -57, -50, + -5, -100, 76, 127, 100, -48, 43, -96, + 35, 12, -101, -96, -26, 7, -71, -72, + -7, -61, -105, 127, -4, -103, -15, 88, + 46, 27, -16, 41, -125, -75, -22, -58, + -27, 11, -58, -5, -74, -125, -42, -13, + -77, -20, -80, 3, 19, -65, 61, -89, + -47, -56, -35, -80, 124, 86, -5, -49, + 72, -117, -34, -54, 68, 67, -87, 54, + 9, 1, 9, -21, -30, 32, -38, -2, + 51, 21, -118, -25, 107, 30, -32, 60, + 5, -48, 127, 28, -113, -65, -23, -1, + -69, -75, -65, -25, 17, -115, -19, 40, + 49, 120, 42, 11, -48, 32, 66, -26, + 75, -50, -75, 13, -29, 67, -126, -27, + 25, 24, -126, -2, -59, -61, -89, -124, + 78, -26, 76, -126, 36, 28, -3, -16, + -29, -81, 13, -123, 67, 52, 23, 37, + -73, -96, 40, -95, -112, 63, -55, 117, + 63, 118, 34, -74, -119, -77, -23, -14, + -113, 111, 28, -80, 37, 103, 5, -25, + -79, 73, -76, -84, 45, 57, -67, 6, + 8, -51, -67, -127, -58, -127, -33, 0, + -105, -24, -77, 48, -29, 125, 118, 77, + -112, -128, 41, 68, 126, -30, -34, 13, + 115, 109, -9, -34, -44, 121, -11, 100, + -12, 43, -88, 51, -41, 37, -110, 43, + 127, 36, -42, -58, -108, -119, 36, -107, + 20, 23, 111, -4, 126, -9, 125, -69, + -42, 32, -33, 106, -7, 127, 6, -126, + 29, 57, -29, -100, -91, 126, -4, 74, + 33, -5, 5, 94, -20, 66, 34, 68, + 37, 39, 82, -59, 69, 124, 75, -123, + 86, -70, 91, -70, 17, 107, 93, 60, + -21, 51, 74, 21, -39, -90, -96, 13, + 17, 121, -111, -30, 72, 28, 73, -13, + 0, 12, -18, -7, 9, 32, 16, -21, + -87, 46, 37, 8, -7, 47, 8, 24, + 0, -3, 36, -35, -20, 12, 36, -4, + -61, -5, 6, -77, -52, -4, 74, -23, + -4, -18, -25, -6, 127, 39, 44, -28, + 13, 37, -22, 9, 24, -9, -36, 121, + 7, 15, -10, -3, 2, 20, 15, 32, + -4, -71, -12, 57, 107, 21, 11, -9, + 50, 2, 44, -6, 17, -5, -44, 13, + 81, -6, 37, 7, 3, 56, 10, 2, + -20, 44, -36, 7, -6, -31, 17, -5, + -11, 20, 2, 61, 115, 0, 11, 3, + -8, -23, 3, -70, 4, -65, 11, 9, + 52, -19, -51, -16, -3, -34, -32, -49, + -61, -12, 0, 49, 23, -52, 119, -12, + 32, 68, 8, -5, 12, 32, -14, -18, + 37, -11, 45, 6, -68, 9, 35, 15, + 10, 89, 114, 38, 53, 26, -1, -51, + -24, -85, 25, -50, -10, 20, -16, -22, + -6, 127, 21, -7, -127, 12, 9, -18, + -42, 6, -58, 27, 1, 24, 18, 16, + -14, -2, -52, 63, -95, 10, -13, 127, + -35, -22, 6, 112, 64, 10, -16, -20, + -39, 1, 0, -19, -101, 5, -1, 110, + 2, 44, 7, 4, -49, -17, 17, -8, + 37, -61, -7, -6, -16, -99, 73, 2, + -16, 22, -2, 18, 15, 4, -27, 24, + 11, -47, -3, 15, 127, 21, -7, 15, + 15, -20, 3, 26, 127, 38, -38, 0, + 6, 37, -10, 7, 23, 1, 36, -128, + -10, 13, 29, 16, 17, 39, -27, 8, + 98, 5, 14, -75, -126, 12, -10, 30, + -31, 19, -36, -55, 0, 28, 83, 86, + 11, 6, 15, 41, 13, 38, 9, 12, + 15, 14, -21, 13, 116, 14, 0, 9, + 10, 99, -53, -14, -128, 8, 19, -5, + 5, -30, 2, 60, 36, 75, -69, 6, + 53, 28, 44, 27, -33, -30, 36, 7, + -59, 11, 80, -71, 44, -39, -42, -25, + -38, 13, -123, -28, -24, 23, -58, 33, + 4, -29, 16, 67, 19, 126, -7, 9, + -32, -7, -34, 51, 5, -5, -16, -3, + -41, 5, 23, -7, 26, -11, 126, -12, + 37, 60, -2, 14, 11, 0, 42, 43, + 11, -51, -50, -28, -15, -3, 21, 16, + -119, 55, 21, 71, -9, -17, 13, -60, + 112, 36, 55, 2, 5, 0, -7, 1, + 47, 4, 18, 44, -36, 51, 0, -41, + -47, -66, -9, 39, -19, 54, 11, 4, + -71, -35, -33, -17, -40, -92, 125, 31, + -28, -7, 25, -19, -89, 14, -94, 61, + 124, -29, 8, -123, -8, -65, -30, 38, + 2, -23, 23, -80, 5, -43, 21, -19, + -13, -17, 20, -23, 24, -126, 26, 8, + -27, 12, -111, -6, 42, 20, -126, -33, + -22, -81, -1, 42, 8, -59, 42, 96, + -11, -10, -44, -41, 6, 39, -3, 6, + -43, -30, -38, -8, 121, -41, 17, -13, + 101, 7, -98, 1, 26, 35, 27, 15, + -56, -10, 4, -23, 12, 80, -25, 8, + 8, -14, 1, 33, -17, -4, -18, -7, + -44, -80, -63, -50, 16, -125, 75, -17, + 8, 16, -53, 5, -30, -6, -83, -16, + -22, -78, 13, -7, -89, 5, -6, -10, + 11, 39, 8, -72, -28, -126, -32, -7, + -19, -1, -8, 38, 6, -34, 5, -1, + -12, 48, -71, -5, -11, -126, -33, -17, + -3, -17, 1, -83, 7, 8, 60, 27, + -1, 14, -69, -11, 27, -5, 73, 3, + -27, 64, -28, 2, 3, 22, 0, -12, + -41, 3, -25, 16, 9, 25, -27, 21, + 22, 11, 0, 1, -1, 16, 6, -9 +}; + +static const rnn_weight denoise_gru_recurrent_weights[27648] = { + -76, -99, -23, -18, 12, -119, -3, -53, + 6, 9, -8, -124, 91, -33, 50, 0, + -52, 95, 19, 54, 43, 29, -17, -122, + -83, -29, -107, -57, -8, -4, -27, 118, + -96, 4, 81, -5, 44, -90, -103, 39, + -29, -25, -56, -13, 71, -13, -103, -1, + -15, -5, -100, -89, -14, 4, -16, 36, + -10, -44, 59, -44, -103, -109, 50, 37, + 24, -48, -121, -9, -101, 30, 29, -5, + -69, 89, 56, -41, 39, -52, -4, -111, + -39, 16, -54, 31, -49, 34, -1, 12, + -20, -45, -121, -40, -28, -3, -13, -38, + 23, -83, -13, -6, 97, 44, 13, 30, + -38, 2, -25, -82, 46, 38, -25, 31, + -106, 9, 20, -113, 27, 17, 12, -91, + 34, -7, 10, -44, 3, -2, -66, -3, + -93, -70, -32, -58, -24, 20, -49, -31, + 6, 87, -30, -89, 10, 23, -60, -16, + -113, 22, -1, -4, 60, -45, -41, 1, + 17, -9, 39, -38, -36, -22, 47, 38, + -39, 10, -89, -27, 10, 45, 35, 2, + 62, 5, 42, 14, 28, -27, 13, 10, + -22, -23, -67, 41, -10, 12, -55, -57, + -76, -35, -108, 12, -26, 0, -42, 104, + -48, -64, 4, -3, 34, -126, -19, -11, + 36, 55, -54, -55, -123, -44, -45, 36, + -61, 18, 2, -127, 52, -30, -119, 33, + -8, -45, -26, -102, -91, 36, 49, 45, + 51, -4, 5, -105, 56, -128, -83, 11, + -16, 16, -126, -108, -71, 9, -9, 36, + -6, -63, -100, -56, -10, -51, -34, -13, + 15, -17, -28, 61, -65, -72, 32, -80, + 38, -13, 75, 56, -15, 81, 50, 21, + -127, 12, -86, 45, -41, 57, 23, 11, + -28, -71, -119, -22, -62, 79, -96, 32, + -61, 20, 9, -117, -52, 33, -82, 82, + -16, -100, -47, -128, 39, -105, -15, 10, + -5, 90, -124, 19, -28, -66, -76, -13, + -40, 31, -20, -5, -24, 41, 14, 121, + -56, 56, 43, -123, -44, -24, 28, 73, + -37, -17, -19, -65, 45, 52, 63, -1, + 30, 16, 51, -7, 45, -13, -70, 16, + -1, -4, -22, -116, -37, 37, -124, -10, + 28, 12, -109, 2, 16, -30, 13, 28, + -24, 32, -26, -53, 72, 10, 40, 42, + -73, 36, 25, -51, 19, 27, 34, -6, + 66, 36, -4, 38, -87, -33, 36, -41, + 15, 3, -32, -72, 73, 35, -32, -124, + 117, -36, 0, -22, -114, 76, 5, -125, + 7, 1, -50, -104, 1, -74, 11, 8, + -28, -8, 13, -115, -50, 3, 89, 75, + 22, 44, -99, -61, 97, 41, -123, -53, + 91, 85, 108, -12, 11, -23, 13, 0, + -12, 102, -18, -74, -44, 54, -17, 16, + 0, 53, 21, -19, 34, -11, 80, 25, + -58, 62, -55, 78, -29, 20, 35, 29, + -51, 42, 65, -86, -60, -25, 94, 14, + 52, -18, 58, -54, 84, -115, -17, 18, + -64, -9, 27, -94, -5, -3, -46, 5, + 11, 6, -125, -64, -54, 21, 59, -50, + 49, 38, 47, -39, 60, 3, -11, 16, + 71, -56, -7, 55, 51, -27, -51, -29, + 7, -6, -63, 89, -16, 36, -76, -35, + -102, -93, 11, -84, -5, -25, -59, -6, + -19, -8, -23, -121, -60, -126, -71, 8, + -17, -128, -95, 15, 13, 64, 37, 16, + 23, -78, 36, -111, -8, 69, -53, 62, + -37, 7, 9, -25, -63, -66, -25, -53, + -36, -7, -47, -40, -50, -42, -81, 127, + 70, -22, -107, -115, 11, 95, -54, 12, + 1, 15, 62, -14, -127, 47, 72, -43, + -2, -4, 127, 11, 22, 20, 11, -10, + 4, 26, 71, -3, 85, -42, -9, -10, + -35, 27, 34, 35, 35, 0, -23, 27, + 43, 0, 68, -13, 34, -21, 1, 70, + -34, 22, 16, 17, -11, -27, 0, -65, + 41, -40, -28, -27, 18, 3, 1, 37, + -36, 16, 44, -17, 61, -32, 50, 3, + 11, 1, -18, 24, -38, 19, 21, -8, + 3, 50, 28, -14, -55, 6, 0, -23, + 12, -17, 32, -18, 20, 22, -15, -2, + -17, -23, 48, -14, -9, -41, -4, -10, + 20, -11, 23, -10, 40, -3, -4, -38, + 4, -18, 8, 61, -7, 23, -31, -24, + -7, 45, -81, 12, -49, -19, -7, 38, + 52, -26, 25, -54, -5, 23, 17, -14, + -32, 46, 1, 18, -24, 2, -94, 21, + 34, -24, 36, -7, -11, 35, -62, 19, + 15, -47, 45, 5, 30, 26, -23, -38, + -48, 6, 77, 50, 9, 25, 11, -29, + -7, 13, 12, 14, 40, 33, 21, 62, + -6, -27, 38, -45, -1, 87, 11, -27, + -20, 43, -68, -28, 27, 25, 3, -13, + 53, -38, 4, 44, -41, -72, -7, -39, + 41, -25, -35, 86, -59, 41, 127, -56, + 34, -38, -25, 22, 0, -22, -24, -1, + -65, 19, -23, 25, -20, 79, -68, -87, + -123, -123, -28, -76, -114, -10, 1, -95, + -126, -128, 10, -68, -103, -11, -122, 127, + -119, -43, -28, -55, 69, -76, 60, 106, + 122, -118, -40, -50, -11, -25, -30, -18, + 39, -22, -80, -77, 121, -23, -88, -23, + -20, 80, 35, 71, -75, -114, -128, 127, + 23, 11, 38, -52, -41, 7, -127, 29, + 37, -74, -59, -47, 126, -16, -92, 10, + -92, 4, -59, 2, -127, -87, 126, 24, + 122, -79, 97, 98, 1, 41, -61, -124, + 71, -25, 37, -48, 32, 85, 34, 56, + -21, -31, 0, 85, -11, -10, 41, 14, + -41, 7, -17, 97, -79, 84, -98, -34, + 17, -1, -60, 15, 39, -37, -35, 16, + 26, -15, -37, 49, -20, -25, -56, -13, + 5, 16, 6, 77, 67, 123, 96, 5, + 29, -17, 46, -10, -23, 57, -79, 29, + -28, -17, 52, 86, -30, 6, 14, 51, + 125, 17, -23, 90, 47, -8, -19, -42, + -19, 118, 27, 20, 38, 36, 12, 122, + 42, 4, -14, -5, 1, 36, -6, 29, + -53, 15, 8, 21, 32, 31, -35, -2, + -56, -18, 12, 74, 48, 5, -4, 33, + 60, -92, -21, 44, 92, -62, -2, 35, + 4, 48, 69, -45, 73, -35, 3, 28, + 81, -48, 18, -34, 29, 123, -32, 29, + 6, 9, -118, 4, 30, -1, -24, 65, + 3, 34, 40, 20, -13, -53, -50, 31, + 9, -86, -87, -36, -57, -27, -35, -30, + -123, 18, 8, -107, -6, 5, 48, 21, + -54, -1, 60, 61, 26, -1, 12, -11, + 41, -26, 110, 29, 27, 8, 48, 43, + -12, 123, -33, -8, -91, 122, 9, 120, + -18, 71, -23, 63, -34, -20, 30, 46, + -45, -24, 24, 57, -20, -15, -123, 3, + 5, 59, 13, -87, 45, -15, -28, 38, + -3, 29, -23, -6, 31, 69, 33, -4, + -64, -31, -48, -1, -4, 23, 53, -4, + 1, 96, 29, -44, -123, -19, 3, 32, + 80, -28, 6, -57, 2, -67, -18, -50, + 16, -125, -87, -25, -51, -127, -23, 5, + 0, 62, 90, 67, -41, -11, 41, -64, + -67, 15, -3, 78, -86, -11, 30, -10, + -57, 23, -41, 23, 4, -103, -57, -119, + -94, -94, 69, -19, 48, 23, -32, -20, + -58, -17, 60, 113, 69, 57, -27, 24, + -25, -2, 23, 3, 8, -37, -35, -9, + 5, -23, -38, -19, 26, 6, -49, 3, + 95, 45, 0, 15, -102, 3, -46, -1, + -19, -2, 2, -86, 14, -3, 45, -20, + 22, 23, -75, 93, 38, 54, -44, 107, + -87, 80, 19, 20, -54, 73, 4, 50, + 14, -5, 32, 7, 32, 7, -46, 10, + -21, 8, -40, -45, 55, 33, 17, 31, + 44, -10, 88, -11, -14, 29, 32, 54, + 116, 33, -53, 107, -88, -34, 47, 124, + 18, 14, -1, 5, 1, -39, 21, -15, + 58, -27, -70, 100, -37, -25, 4, -56, + 69, -22, 23, 3, -53, 20, -10, -48, + 22, -95, -14, 39, -43, -70, 35, -6, + 8, -95, -86, 9, 22, -59, 121, -127, + -1, 62, -18, 14, -13, 56, 76, -80, + -62, 10, 74, -56, 16, -11, 34, -52, + 50, -7, 40, -61, 38, -38, 31, -51, + 16, 23, -17, 25, 42, -51, 101, -19, + 8, -19, -4, -46, -115, 3, 126, 41, + 2, -1, -66, -26, -110, -49, -72, -39, + 20, 92, -23, 60, -34, 80, -23, 32, + -4, 56, 59, -82, 81, 127, 61, -17, + 44, -49, 33, -38, -119, 68, 74, -38, + 87, -15, -21, -21, -70, -7, -8, -46, + -7, -107, 42, 20, -108, -37, -5, 7, + -123, 30, -125, -17, -48, 79, 26, -103, + -42, -18, 4, 48, -21, 14, 34, 12, + 27, 37, 74, 57, -124, -5, -4, 5, + 55, -36, 44, 64, 3, -48, -37, 51, + -45, 95, -59, -32, 5, 28, -80, 24, + 18, -86, -6, -83, 5, -8, -16, 4, + -113, -23, 9, -24, -86, 8, -30, 16, + 72, 12, 41, -55, -18, 101, -5, -17, + -108, -15, 62, -36, 29, 5, -16, 64, + -8, -10, -42, 22, -128, -37, 43, 82, + -42, -93, -20, -29, 46, 29, -41, 4, + 18, -12, -51, 53, -43, 1, -61, 7, + 21, 7, -94, -11, 99, 89, 84, -14, + 14, -59, 56, 46, 22, -11, -35, 4, + 40, -68, -32, 100, -8, 48, 1, 17, + 1, 20, -4, 98, 3, 0, -21, 15, + 39, 27, 66, -8, -37, 3, -8, -17, + -39, -53, -10, 9, 28, 82, -10, 33, + -36, -14, -47, 25, -8, -24, 8, 14, + 10, -31, -44, -31, -53, 77, -18, -76, + 35, -3, 124, -4, 92, 92, 34, 90, + 125, 10, 63, 38, -16, 41, -4, 3, + 27, 93, 62, 23, -48, 6, -36, -42, + -3, -3, -50, 2, 64, -36, -79, 122, + 79, 22, 32, 19, 22, 34, -24, 5, + -11, 23, 38, -65, -15, -3, 61, -7, + -6, -41, -37, 15, -5, 1, 52, 11, + -42, -54, -7, 18, -34, 65, 113, -3, + -29, 61, 5, -36, -12, 106, 10, -95, + 57, 124, 95, 32, -25, -26, 4, 13, + 43, -22, -38, -26, 17, -46, -4, -122, + -39, 40, 79, 45, -48, -35, -74, 60, + -34, 50, 69, -68, -10, 94, -67, -80, + 80, 55, 46, -61, 2, 14, 77, 8, + 21, 4, 68, 53, 108, -3, -30, -18, + -102, -127, -12, 8, 77, -86, 27, -127, + -26, 100, -8, -77, -128, -47, -51, 2, + -111, -11, 13, -23, 44, -123, -2, 28, + 21, -71, -124, 9, 28, -18, 66, 53, + 64, 83, -31, 28, -8, 36, -21, 9, + 8, -46, -23, -101, 12, -73, -49, -38, + 52, 106, -82, 57, 41, -17, 59, 20, + -74, -7, 10, 28, 3, -15, 0, -92, + -9, -29, -64, 106, 36, -106, 4, -46, + -114, -59, -104, -71, -128, -68, -41, -4, + 56, -115, 51, -34, 29, -21, -24, 14, + -59, -113, -57, 4, -6, 78, 24, -4, + 23, 23, -12, -4, -24, -17, -79, 23, + -16, 17, -31, 6, 29, 26, 14, -50, + 37, 27, -13, 22, 15, 1, 0, 72, + -62, 58, -6, -38, 18, 90, -2, 14, + 65, 41, 48, 59, 53, 12, 55, 9, + 14, 38, 34, -35, 19, 25, 55, 31, + -22, 22, 81, 48, 14, -15, -49, 19, + 67, -54, 20, 13, 8, 3, 7, 32, + 6, -6, -11, 19, 66, 40, 35, 19, + 12, 29, -45, -61, 54, 105, 56, -20, + 7, 46, 5, 4, 60, 10, 37, -19, + -37, 66, 44, 15, 19, 35, -21, 29, + 55, 16, 61, 85, -26, -3, -93, -30, + 9, -36, 2, -42, -67, -32, -23, -2, + 15, 79, 27, -17, -4, -126, -29, 18, + -3, -40, -118, -28, 10, 42, 64, -30, + -9, 101, 4, 6, -20, -53, -10, 18, + -14, -62, -29, 8, -38, -3, 9, -21, + -26, 63, 31, -5, 20, -28, 33, -25, + -46, 6, 65, -6, 94, -13, 9, -8, + 15, -21, 114, 12, -8, -42, -116, 5, + -22, -2, 1, -27, -18, 8, 4, -70, + 14, 65, -22, -9, -23, -1, 56, 15, + -55, 20, 20, 44, -14, -70, -27, 33, + 24, -12, 45, 78, 69, 50, -48, 91, + 100, -29, 43, 19, 126, 57, -46, 16, + 77, 70, -65, -18, 101, -27, -22, -53, + 73, 39, 126, 96, -125, -20, -124, 19, + 15, -99, 72, 36, -11, 108, 91, -123, + -6, -49, -68, 61, -54, 107, 9, -35, + 63, 126, 33, -4, 23, 61, 127, -10, + -126, -1, -20, 29, 43, 20, -68, 56, + -40, 43, -90, 72, -37, 38, -48, 57, + -58, 48, -8, -57, 76, 36, 28, -34, + -110, -15, -116, 103, -30, 29, 14, 126, + -121, 127, -23, 49, -33, -125, 30, -13, + 0, -3, -25, -93, 42, 36, -24, -19, + 87, 8, -101, 54, -30, 27, -8, 69, + 25, 33, 65, -17, 49, -1, 37, -92, + 9, 46, -15, -40, -4, 3, -52, -13, + -11, -19, 3, -42, 1, -31, 55, 45, + 8, 31, 16, -29, -15, -25, -7, 6, + 7, -62, -35, 103, 33, 13, 67, 26, + -6, -53, -18, -36, 3, 32, 112, 0, + 40, 12, -67, -27, -57, 37, 24, 8, + 88, 70, -17, 48, 82, 37, 1, -1, + 77, 5, -27, -17, 15, 8, 27, -85, + 33, 0, 43, 37, 3, 19, 3, -47, + 60, -55, 50, -23, 12, -12, 61, -86, + 79, -37, -69, 38, 19, 10, -53, -26, + -6, 103, -5, 18, -29, 74, 11, 97, + -48, -36, 46, -45, -72, -65, -122, -39, + -101, 85, 31, -4, 6, 25, -45, -4, + -3, -107, 7, -120, 71, 19, 64, 43, + -87, 30, -31, 14, 7, 21, 83, 74, + -7, 30, -94, 66, 63, -85, 92, 8, + 22, -24, -4, 31, -91, 39, 13, 37, + 65, 60, -62, 92, -44, -8, 74, -7, + -63, 54, -24, -90, -34, 17, 84, -41, + -97, 13, 28, 14, -33, 1, -26, -25, + -9, -48, -2, -54, -104, 4, -5, 38, + -120, -48, -35, 124, 38, -57, -63, -8, + 37, 91, -40, 7, 17, -58, 67, -56, + 5, -86, 22, -17, -53, 6, 114, 37, + -24, -96, -17, -69, -44, -58, -123, -18, + -47, -123, -108, 2, -89, -38, -15, -34, + 14, -31, -2, -99, 34, 28, 124, 92, + -16, -28, -22, -60, -22, -2, 126, -122, + 31, -20, 32, 42, -128, -41, 61, 30, + -23, 79, -84, -36, -26, 87, 15, -2, + -125, -24, -22, 39, -108, 16, -26, -31, + -62, -22, 83, -123, 69, -108, 0, -12, + -19, -6, -13, -70, -83, -30, 36, -32, + 3, 95, -111, -18, 6, -91, 34, 56, + -61, -92, -54, 23, -60, 17, -64, -54, + 4, 39, -123, 24, -53, -46, 14, 65, + -100, 0, -54, 21, -4, -119, 72, -35, + 95, -56, 14, 24, -68, -40, -49, 21, + -16, -91, -123, -75, -39, 25, 0, 38, + 80, -32, 29, 25, -99, 0, -10, -28, + 38, 34, 0, -3, -29, -74, -93, -1, + 0, -26, 33, -65, -56, 28, -76, -22, + -68, 1, -22, 71, -21, -12, -59, -11, + 66, 12, 0, -82, 17, 27, -66, -56, + -34, -11, -37, 75, 100, 78, 2, 12, + -29, 40, -92, -11, 52, -4, 5, 38, + 25, 44, -63, 40, -4, -19, -38, -38, + 44, 44, 11, -12, 84, 47, -127, -32, + 2, -57, -102, -23, -51, -12, 26, -59, + 17, -5, -97, 98, -117, -36, -65, -35, + -1, -1, 1, -93, -40, -39, 96, -120, + -2, -26, -25, 66, -33, -76, -67, -12, + -44, -8, 22, -58, -2, 4, -110, -21, + -6, 30, -30, -113, -57, 83, 38, 5, + -28, 14, -93, -35, -107, 6, 59, 17, + 107, 74, -20, -19, 22, -93, -43, 49, + -6, 17, 21, -108, 12, -39, 33, -127, + -43, 14, -22, -18, 15, -74, 41, -6, + -20, 35, 4, 99, 10, -16, -58, -14, + -50, -20, -13, -30, -26, -82, 60, -28, + 26, -110, -34, -51, 28, -128, -57, -20, + -47, 97, -54, 104, -127, 50, 63, 20, + -44, -126, -35, 13, -5, 5, 83, 34, + -45, -12, 27, 0, -33, -55, 29, -94, + 70, -44, -97, -9, 25, 55, 48, 57, + -4, -27, -84, -93, -44, -36, -9, 49, + 66, 39, 20, -11, -82, -66, 113, -18, + -15, -3, 98, -8, -26, 9, -83, -9, + 41, 61, 84, -22, 65, -57, 34, 7, + 61, -53, 97, 6, 20, 28, -21, -105, + -60, -63, 23, 41, -77, -54, -1, -18, + 3, 51, -78, -96, 53, 83, -21, -39, + 21, -104, -69, -12, -36, 76, 20, -77, + 59, -37, 40, -31, -107, 29, 3, 5, + -18, -37, -16, -41, 10, 56, 51, -40, + -73, 25, 27, -80, -12, 73, 6, -34, + 16, -64, -62, 28, 86, 39, 4, 78, + 21, 8, 28, 55, -124, -107, -105, -24, + 55, 6, 68, -23, -43, 60, -116, 1, + -37, -38, 88, 46, -45, -53, -44, 49, + -58, -40, -117, -63, 55, 62, 24, 53, + 9, -20, 127, 83, 62, -12, -2, 3, + 126, -26, -29, -110, 32, 33, 51, 43, + 35, -8, 90, -3, -37, -120, -11, 100, + 17, 0, 76, -21, 116, -24, 6, -116, + -87, 125, -3, -57, -13, 125, 13, -116, + 13, 43, -86, 32, -45, -14, 70, 41, + -121, -18, -7, -51, 9, 57, 0, -6, + -41, -4, -14, -40, -51, 124, 47, 13, + -126, 124, -41, 5, 34, 7, 2, -34, + -16, 9, -39, -107, -49, 41, -52, -23, + -38, 39, -9, 92, -115, 45, -128, 35, + -95, -24, -20, -40, 5, 0, 7, -107, + 37, 0, -63, 49, 38, 35, 40, -33, + -115, 55, -19, -77, -36, -88, 116, 74, + 10, 73, 38, 12, -76, -127, -48, 79, + 84, -57, 46, 78, 37, 102, 18, -95, + -61, 17, 26, -113, -12, 43, -1, 68, + -25, 67, -21, 69, -17, -14, -27, -21, + -52, -7, -70, 5, -29, -110, -105, 35, + -65, -25, -56, -1, 85, 18, -54, -81, + -91, -71, 24, 25, -26, -68, -64, 9, + 91, -107, 26, 34, 17, 21, 116, 81, + 67, 25, -122, -43, 70, 46, -22, 101, + -12, -42, 13, 10, 13, -38, 3, -3, + 7, -81, -36, -23, 48, 76, 22, 22, + -123, 4, 31, 37, 2, -2, 25, 40, + 47, -41, -66, -23, -53, -33, -127, 20, + -4, 18, 57, 38, -33, -19, 42, 40, + 16, 94, 38, 0, 32, -36, -40, 29, + -5, -57, -7, 8, -1, 40, 40, 48, + 12, -26, 7, -24, 5, -61, -21, -50, + 46, 13, -63, -91, 1, 12, -76, -42, + -88, 26, 62, -35, -28, -51, -12, 8, + -9, -102, 38, 23, 11, 32, -9, -43, + -15, 30, 20, -9, 21, 63, -6, -61, + -81, -25, -63, 37, 85, -30, 36, -4, + -63, 64, -43, -15, -65, 75, 19, 51, + 36, 4, -50, -9, -31, 35, 35, 36, + 27, 27, 3, -41, -68, -118, 11, 88, + -38, -48, -49, 10, -42, 16, 63, 12, + -30, 3, -15, -6, -111, 15, 19, 13, + 70, 34, -9, 37, 14, -26, 91, 51, + 43, 5, 2, 5, 86, -31, -55, -86, + -51, 20, -1, 56, -34, -41, -7, -88, + -3, -24, 54, -27, 40, -40, -23, -72, + -51, -104, -27, 38, -29, -57, -2, 4, + -79, -1, 48, -36, 2, -62, 24, 20, + -34, -119, -29, 2, 64, 73, 22, -100, + -72, 7, 3, -3, -25, 68, -71, 26, + -18, 8, -46, -6, -2, -66, 50, 25, + 44, 29, 3, -2, 18, -67, 21, 12, + 15, -4, -65, 126, -34, 68, 66, 36, + 9, -32, -22, -5, 19, 27, 35, 47, + -2, -70, -50, 60, 2, 12, -26, -18, + -107, 9, -11, -16, 24, -9, -16, -49, + -43, 9, 23, 6, -4, -26, -114, -125, + -72, -41, 42, 64, 32, -1, -3, -73, + -88, -37, 0, 63, 80, -46, -9, 86, + -6, -86, -17, 20, -1, -17, 2, -8, + -75, 22, -47, 26, -50, 65, -41, -13, + 105, -75, 2, -47, 13, -2, 26, -10, + -11, -82, 50, -24, -7, 16, -66, -32, + 52, 25, 24, 59, -11, -12, -90, 67, + -4, 1, 3, 33, -22, 77, -12, -90, + 11, 28, 57, 17, 49, -31, -44, 68, + 4, -54, 5, -2, 66, 73, -96, -50, + -7, -7, -5, 99, 32, 58, 66, -60, + 0, -116, 53, 6, 116, -41, -62, -51, + -6, -24, -7, -113, -66, 21, 3, -39, + 10, -62, 74, -5, 57, -56, -9, -26, + 13, 114, 0, 80, 83, -29, -3, 80, + 35, 55, 41, 94, -72, 8, -44, 77, + 48, 21, -27, -11, 9, -5, -80, 32, + -20, 59, 26, -71, 90, 37, -51, 127, + -4, -51, -120, 38, -25, -43, -124, 46, + -75, 40, 9, -7, 36, -58, 22, 42, + -39, -123, -15, -20, -49, -76, -45, -127, + -82, 32, 56, 77, 41, -86, 108, -9, + 12, -75, 119, -57, -6, -77, -17, -63, + 11, -116, -20, 14, -55, -9, -8, 109, + -22, -30, 22, -71, -18, 19, 68, 119, + 36, 34, -6, 2, -6, 29, 122, 50, + -128, 87, 22, 68, -3, 75, 6, -53, + -39, -54, 24, -43, 0, -9, -26, -42, + -28, -73, 50, -70, 40, 93, -102, 65, + -27, 53, 45, -17, 44, -15, 60, 69, + 0, 38, -6, -49, 27, 25, -88, 15, + 22, 13, -40, 55, -19, 26, 48, -52, + 14, -6, -5, 14, -53, -61, 69, -44, + -71, 59, 66, 87, 117, -33, 15, 99, + -50, 7, -37, 98, 51, 62, 55, -39, + -36, -61, -103, 8, -52, 50, -27, -110, + 12, 15, 87, -44, -22, -23, -56, -4, + -17, 18, 58, -73, -128, 17, 39, 87, + 5, -69, -20, -47, 64, 13, -8, -27, + -26, -10, 2, 9, 64, 25, 26, -32, + -53, -22, -18, 35, -63, 7, 119, 43, + 28, 100, 2, -54, 110, 5, 4, -5, + 98, -32, 29, -17, 83, -95, -116, 73, + -47, -76, 13, -89, -12, -35, -62, 77, + 36, 52, 116, 117, 61, -19, -33, -7, + 31, -41, 16, -8, -51, 9, 13, 36, + 63, 43, -22, 19, -36, 6, -40, 29, + -4, 26, 42, -69, -66, 77, -73, 110, + -104, 46, 47, -40, -11, -41, 73, -39, + 59, -29, 18, -12, -6, 9, 47, 13, + 7, -24, -45, -109, 40, 26, 26, -2, + 42, -93, 46, 92, -1, -85, 69, -4, + 115, -72, -11, -47, 1, -25, 9, 7, + -49, 34, 24, 30, -48, 116, 3, -12, + -126, -21, 36, 17, -38, -13, -73, 2, + -84, -64, 127, -45, -50, -79, -81, -35, + 14, 49, 91, -122, -52, -73, -42, 17, + -34, -12, -102, -33, -28, 45, 27, 47, + -9, -28, -4, -53, -49, 15, -3, -68, + 9, 25, 60, -69, 0, 126, -55, -9, + -52, -94, -90, -13, -27, 109, 16, -6, + 20, -69, 46, 43, -119, 29, 78, -54, + 55, 67, -40, -49, -84, 25, 27, -16, + -84, -94, 15, 127, -125, 56, -24, 29, + -93, -33, -80, -82, 45, -5, -49, -29, + 46, 21, 18, 27, 52, 9, -111, 8, + -81, -125, 109, -9, -91, 72, -18, -9, + 3, -97, 12, -119, -99, -69, 57, -25, + -25, -106, -43, 34, -36, 114, -14, -23, + 2, 46, 69, -9, -103, 87, 127, 74, + 14, -5, 0, 14, -16, 73, 93, -30, + -2, 10, -123, 26, 74, 116, 22, 51, + -94, 108, -38, -38, -81, -38, 1, -46, + -86, -12, 60, -53, 64, 1, 63, -125, + -125, -20, 106, 88, 20, -127, 60, -10, + 85, 19, -11, 45, 33, 2, 6, 66, + -76, 86, 47, 63, 91, 119, 56, 114, + 86, -6, -13, 21, -40, 20, 21, 8, + -27, 44, 59, 36, -13, 99, 118, -47, + 26, -94, 52, -76, -62, -76, 9, -25, + 94, -27, 123, 34, 119, -35, 8, -16, + 8, -44, -54, 24, -8, 31, 36, 31, + 101, -42, -4, 18, -18, -4, 31, 49, + -8, -17, -28, 10, -20, -104, -55, -39, + 75, -63, -10, -1, 33, 102, 69, 43, + 49, 53, -42, 42, 24, 75, 3, -55, + -24, -20, -65, 78, 63, -1, -33, -31, + -40, -36, -42, -49, -1, 48, -70, 63, + 51, -3, 70, 45, -33, 32, 62, -11, + -94, -94, -11, 33, -27, 101, -70, -4, + 32, -48, 15, 44, -33, 16, -3, 124, + 4, 64, 14, 73, -119, -72, 71, -39, + 15, -33, -32, 54, -53, -32, 9, 45, + -56, 111, 11, 42, 35, 75, 8, 34, + -29, 24, -12, 45, 59, 45, -52, 35, + 126, 90, 8, 9, 16, -31, 99, -72, + 80, -100, 2, 20, 21, -36, 42, -22, + -38, 44, -20, -13, -8, 19, 25, 31, + -22, -89, 105, -52, 64, -19, -2, -98, + 38, -23, -13, 22, -18, 4, 29, 24, + 34, 56, 66, -5, -82, -80, 5, 57, + 57, 0, -25, 44, -7, -73, -45, 33, + 93, 41, 36, -18, 3, -76, -41, -59, + -14, 65, 28, 9, -55, 22, 75, 27, + 56, 104, 16, 35, 39, -50, -13, -90, + -59, -35, 85, 42, 13, 46, 38, -35, + -84, -13, -30, 54, -16, -11, 12, 19, + -21, -11, -25, -17, 40, -115, 64, -120, + -68, 19, 79, 35, 2, 17, 4, -6, + 29, 26, -68, -65, 73, -48, -64, -64, + -58, 3, 1, -15, 4, -7, -11, 7, + -8, 11, -52, 4, 51, 28, 8, 18, + 8, 16, 6, 14, -60, 88, 65, 51, + 30, 6, -33, 47, -26, 9, 59, 7, + -6, 13, 91, 97, -104, -19, -105, -44, + 90, 21, -55, 45, 5, 40, 10, 73, + 9, 33, -109, 57, -14, -1, 51, -51, + 19, -4, 64, 19, -45, -80, -18, -77, + 38, -32, -66, -40, 18, -7, -7, -38, + -47, 11, -59, -31, -59, 6, 86, 5, + 5, -82, -30, 15, 30, 44, 35, 45, + 52, -21, 61, -75, 61, -58, -7, 92, + -57, -17, 88, 0, 20, -9, -49, 15, + 32, 57, 54, -116, -59, -57, -47, -6, + -33, -2, 71, -121, 113, 48, -15, 41, + -6, 67, -19, 37, 25, 22, 28, -11, + 112, 20, -25, -19, 79, -45, -14, 48, + -51, -105, 123, 23, -39, 127, -2, 23, + -46, 68, 45, -46, -53, 54, 102, 28, + 63, 14, 40, -26, 25, 43, 33, -55, + 48, 5, -67, 55, -17, 60, -87, -4, + -92, 23, 122, -9, -58, -85, 49, -34, + 43, -37, -25, -14, -54, -86, 53, -68, + 5, 84, -19, -117, -84, 38, 70, 95, + -76, -86, 36, 54, 42, 50, 40, 83, + -9, 16, -25, -85, 48, 122, -30, 5, + 47, -124, 57, -63, 44, -28, -122, -113, + 27, -12, -119, 38, -69, 44, -39, -29, + -17, -126, 12, -4, -29, -23, 90, 0, + 41, -103, 21, -48, 8, 90, -15, -60, + -43, 90, -51, 63, -15, 70, -17, 0, + 4, 19, -81, -100, 42, -54, -18, 76, + -39, -43, -29, 29, 25, 9, -16, -34, + -23, 27, 51, -78, -91, 40, 91, 0, + 102, -36, -126, -52, 22, 73, 44, -41, + -35, -34, 110, 14, -55, -3, 29, 16, + -33, 106, -39, 39, -61, -42, -1, -49, + -7, -1, 28, 18, 7, -19, 40, -13, + 6, -52, 3, 2, 19, -19, 119, -100, + -51, 73, -50, -89, 8, -30, 6, 33, + 28, -24, -1, 47, 23, -35, 46, -4, + -6, -33, 78, 40, -28, -10, -1, 39, + 14, 25, 52, -1, 11, -39, 59, -22, + -2, 61, 24, 40, 111, 30, -4, 61, + -92, -95, 4, 66, 37, 19, 3, -3, + 107, 1, 5, -10, 11, 21, 12, 63, + -13, 71, -93, -41, -3, 82, 31, -1, + 2, -31, 1, 55, -36, 35, 17, 7, + 111, -8, -31, -2, -70, -15, 19, 41, + -58, 10, 6, 15, -55, -55, -58, 26, + 12, 54, 35, 3, 19, 35, 124, -54, + 19, -68, -114, -2, 27, 37, -37, 34, + -58, -3, -95, 51, -31, -1, -49, 26, + -62, -7, 12, 7, 101, 16, -17, 126, + -9, -27, -24, 95, -22, -40, 29, -15, + 24, 15, 25, 30, -43, -4, 25, 12, + 33, 29, 21, 6, -72, -9, 21, 65, + -54, 65, -48, -22, 45, -36, 6, 5, + 0, -20, 14, 25, -6, -6, 14, -31, + 21, -22, -7, 58, 5, 53, 50, -16, + 53, 73, 8, 22, -4, 15, 29, 67, + -119, -9, -28, -14, 74, 54, 58, -17, + 69, 29, 66, 96, -70, -108, -128, 16, + -35, 54, 2, -69, 94, -7, 19, 2, + -66, -3, -126, 44, 59, 43, -5, -34, + 16, -2, -13, -126, 14, 109, 37, 9, + 32, -26, -86, -39, -71, -34, 4, 9, + -4, -16, -31, 80, 106, 21, 23, 29, + -37, -37, 8, 75, 59, 10, -41, -32, + 30, -17, -44, -11, -28, 34, -35, 24, + -18, -9, -111, -53, -78, -127, -19, 24, + -21, 16, -110, 32, -94, -4, -17, -46, + -1, -122, -53, -21, 11, -96, -3, -21, + 7, 2, -32, 46, 90, 35, -33, -19, + 74, 43, 34, 30, 37, 63, 28, 23, + -83, 8, 27, -18, 20, -13, -44, -8, + 21, 29, 47, 7, 24, -13, -119, 27, + -54, 9, 14, 26, 32, 102, 89, 25, + 20, -5, 2, 33, -12, 22, 46, -45, + 52, -25, 97, -65, 79, 79, 32, 18, + 28, -78, -6, 88, 32, -29, 81, 31, + -13, -38, 27, -44, -35, 19, 14, 30, + 51, -27, -11, -16, -12, 45, 51, -37, + -3, 11, 39, -3, -36, 23, 47, -9, + -21, 22, 84, 55, 56, 47, -12, -8, + -97, 83, 61, -119, 44, 27, 7, -43, + -21, 88, 7, -30, -52, -9, 125, 31, + -81, 56, -5, -71, -30, -52, -2, -24, + -7, 39, 52, 101, -124, -3, 20, -41, + -83, 65, 23, -42, 8, -38, -60, -14, + -21, 33, -30, -18, 123, -25, 68, -120, + -66, -3, -89, 3, -22, 22, -42, 75, + -34, -33, 78, -28, -40, -5, 24, 28, + -49, 29, 2, -59, 2, 8, 5, -71, + 33, -83, -110, 24, 99, 33, -70, -41, + 56, -7, -29, -46, -106, 20, -19, -42, + 24, 18, -35, 18, -67, 119, -15, -34, + 56, -40, -26, -50, 61, 98, 30, -53, + 44, 123, 55, 42, -99, -15, -21, 41, + -94, 25, 56, -125, 51, -126, -45, 27, + -83, 20, -19, 67, 10, -103, -34, 11, + 4, 42, -117, -15, -38, -59, 66, -54, + -63, -1, -49, 36, -31, -57, -28, 14, + 46, 69, -24, 38, -54, 79, -31, -32, + -73, 3, -19, 122, 61, -20, -65, 107, + -7, -57, -125, 65, 8, -55, -32, -10, + -30, -90, -104, -119, -12, 18, -34, 2, + -28, -99, 67, -117, -113, -9, 6, 58, + -62, 126, 35, -40, -16, 98, 21, -2, + -5, 14, 24, 43, 48, 44, -9, 47, + 58, 61, 17, 20, -30, 19, 103, -121, + 21, -15, 45, 73, 25, 25, 28, 35, + 30, 27, 2, 68, 24, 23, 114, 55, + -85, -10, -44, 38, 80, -27, -12, -24, + -28, -22, 88, -10, -35, 23, 18, 16, + 86, 14, 53, 33, -86, 0, 35, -21, + 22, 40, 33, 50, 13, 22, 15, -44, + 16, -5, 50, 50, 23, 57, 18, 9, + 29, -53, 64, -10, 7, -26, 13, 1, + -33, -29, -54, -32, 70, 40, -38, 27, + 25, -24, -7, 28, 9, 46, 30, 43, + 19, 26, 25, 37, -18, -1, -36, 45, + -50, -43, 35, 17, -9, -10, 4, -88, + -43, 40, 39, 30, 31, 77, -8, 95, + -70, 40, 96, 45, -41, 16, -126, 15, + -29, -12, -19, -21, 17, 53, 22, -24, + -62, 6, 33, 17, -28, -16, 114, -51, + 9, -45, 37, 30, 111, -28, 10, 0, + -30, -27, -5, 84, -75, -47, -2, 78, + -58, 0, 33, -14, 91, -124, -1, 32, + 0, -61, 1, -56, -4, -15, 42, -38, + 12, -7, 10, -26, -6, 40, 3, -57, + -77, 38, 79, 47, -14, 48, 15, -10, + -23, 25, 23, -41, 29, -5, 10, -127, + -61, -58, 25, -51, -37, 29, -5, 36, + -82, -95, -40, -9, 35, 42, 65, 24, + -9, -46, 3, -58, -11, -70, -48, -26, + 28, -21, -53, 18, 10, -13, 75, 22, + 54, 41, -29, -55, 50, -5, 28, 15, + 15, -59, -22, -46, -35, -51, -33, -11, + -66, -23, 9, 2, 11, 20, 7, 55, + -52, -15, -28, -54, 5, -120, -63, -39, + 14, -49, -18, -61, -87, 19, 20, 36, + 113, -9, -28, -29, -64, 7, -13, -50, + -8, 18, 34, -29, -33, -25, -9, -2, + -39, 49, 14, -16, 37, 44, 67, -3, + -93, 57, -26, 14, 24, 77, 5, 38, + 10, -10, -8, 38, -21, 20, -9, 71, + 2, -3, -32, -34, 2, -125, -51, 3, + 17, 26, -104, -98, -51, 30, 17, -5, + -19, -21, -21, -41, -93, 8, -52, -25, + 54, -1, -12, 19, -2, 35, 56, -39, + 49, 56, -52, -14, -58, 37, 50, -66, + -27, 18, 35, -80, 22, 12, -6, -77, + 30, -22, -18, 37, -29, 19, 18, 15, + -34, -45, -1, -5, 58, 52, 9, -9, + -105, -115, 91, 45, -3, 20, 27, 2, + 54, 116, 109, 84, -52, 9, 33, 7, + 13, 13, -47, 14, 30, -3, 111, 11, + 51, 98, -44, -112, 93, -41, -45, 122, + 73, -9, -28, -48, -9, -61, -91, 18, + 67, 91, -54, -52, -86, -124, 65, -50, + -56, 20, -42, 4, -75, 56, 90, -81, + -104, 42, 16, 102, 35, 107, 73, 51, + 9, -50, -100, 24, 64, 38, -17, 120, + -16, 27, 114, -107, 85, 74, -50, -13, + -80, 3, -14, 6, 67, -23, -123, 49, + 49, 22, -85, -69, -110, 126, 58, -15, + 80, -16, 91, 51, 45, 55, -23, 96, + -90, -5, 20, -9, 69, 49, 6, -15, + 124, 36, 38, 36, 29, -91, 56, 51, + 26, 4, -120, 3, -42, -23, -12, 13, + 5, 5, -69, -33, 12, -81, -2, -4, + 59, 78, 100, -8, -109, 28, 5, -121, + -1, 35, 2, -13, -9, -67, -120, 6, + 29, -111, -58, 45, -33, -30, 12, 123, + 16, -43, 2, -24, 45, 19, -90, -12, + -36, -88, 0, 15, 18, -4, 35, 39, + -60, 34, -8, -35, 15, -120, 10, -22, + 81, -100, -21, -37, 0, -19, 34, -87, + 6, 54, -18, 41, 43, -4, 11, 25, + 49, -19, 24, -20, 31, -97, 4, 13, + 74, 90, -59, 27, -11, -19, -5, 53, + 43, 20, -5, -84, 34, 13, 34, -47, + -56, -30, -64, 32, 37, 52, 28, 60, + 19, 38, 72, -40, 16, 10, -36, -5, + -30, 50, 24, -13, 36, 25, 43, -39, + -23, -22, -24, 47, 95, 40, 40, 2, + 11, -66, -7, 48, -24, -26, 27, -7, + 25, -8, -31, 89, 25, -12, 40, -7, + 22, 45, 56, -21, 4, -16, 70, 48, + 13, 28, 3, 0, -27, -1, 30, -3, + 31, 8, -1, 18, 8, 50, 15, -10, + 76, 112, 27, -55, 41, 38, -23, 18, + -65, -74, -27, 9, 17, 21, 78, 60, + 17, 29, -63, -49, -53, -102, 8, 52, + 112, -7, -52, 37, 27, -39, -15, -5, + 5, -14, 33, 109, 65, 64, 68, -1, + -18, -79, 74, -10, 50, 37, 20, -113, + -40, 84, -50, 2, -24, 122, 42, 60, + -16, -41, 44, 54, 15, -52, 29, 17, + 4, -59, 85, -10, -11, 28, 29, 33, + -49, -72, -101, -45, 65, -21, -8, -25, + 3, -36, 24, -59, 12, -1, -52, -31, + -59, 33, -17, -30, 53, 113, -116, 70, + 20, 0, -20, 27, -119, -53, 19, -20, + -85, -92, -20, -32, 14, -120, 22, 22, + 126, -47, -18, 50, -38, 81, -126, -62, + -86, -105, 26, 29, -44, 125, -54, -127, + -120, -21, -126, -54, 16, -112, 36, -3, + 75, 123, 8, -54, 38, 59, -16, -23, + 11, -127, -26, -64, -121, 56, -94, -5, + 7, 13, -10, -18, 58, 25, -6, 29, + 21, 42, 70, -33, -35, 36, 43, 48, + 109, -9, -120, -123, -38, 36, -121, -34, + -112, 66, 16, -125, -78, 127, -128, -51, + -27, -102, -119, 118, -18, 127, -12, -24, + 7, -16, 10, -66, -36, 9, -36, 9, + 41, 67, 18, 82, 14, -15, -67, 67, + 2, 5, -84, -16, -37, -10, -51, -27, + 74, 41, -114, 0, 67, 0, 100, -22, + 20, 8, -42, -15, -36, 11, -12, -85, + 34, -24, 13, 18, -43, -18, 27, 38, + -28, 5, 49, -2, 36, -17, 101, -76, + 27, -20, 58, -25, -27, 0, -17, -18, + 23, -64, 24, 35, -21, -106, 24, 2, + -21, -63, -14, -19, -7, 19, -63, 10, + 29, -35, 6, 10, 13, -10, -15, 1, + -8, 14, 16, -7, -7, 19, 25, -14, + -56, -32, 53, -10, -48, -28, -127, 7, + -49, -75, 3, 25, -7, 7, -49, -114, + 2, 31, 19, 56, -56, 24, 103, 64, + 51, -77, -66, -20, -28, 82, -85, 11, + -54, 49, -8, -10, 23, -101, -4, 103, + -70, -40, -4, 91, -45, -42, -73, 1, + -58, 1, -10, 7, 9, -52, 17, -30, + 1, 6, -125, 12, -16, -70, 49, 126, + -8, -56, -26, 63, 127, -127, -70, 32, + -50, -88, -15, -19, 38, -32, -11, 25, + 91, 124, -55, 77, 52, -22, -21, -66, + -66, 125, -41, 2, 33, -79, -42, 6, + -47, 44, 18, 32, 30, 15, 26, -8, + -97, 8, 49, -29, 51, 95, 90, -69, + -54, 2, -70, 17, -66, -41, -16, 116, + -14, 74, 8, 4, 51, -28, -125, 17, + 74, -30, -44, 39, 30, -68, -6, 27, + 68, -1, -46, 44, 39, -102, -69, -79, + 81, -25, 62, 89, 5, 20, -43, -1, + 46, 11, 29, -10, -57, 13, -21, -12, + 37, -35, -3, 21, 78, -122, -90, -48, + -2, -16, 24, 45, -120, 4, 30, 15, + 115, -80, 38, 95, -60, 80, 61, -29, + 13, 58, 82, 29, 81, 3, 28, 31, + -14, -47, 40, -45, -6, -56, 17, 30, + -52, -21, -122, -102, 22, -17, -62, 12, + -7, -12, 94, -38, 11, -88, -44, -16, + 37, -43, 31, -36, 31, -10, -89, 126, + -123, -2, 19, -81, 23, -78, -13, 25, + -5, -34, 2, -30, 24, -5, -33, 6, + 44, 10, -9, 8, 6, 47, -8, 32, + 9, 8, -4, 24, -17, 53, 89, 17, + 40, -20, -57, -112, 31, 6, 55, -21, + -112, 111, -124, -26, -47, -16, 76, -8, + -21, -59, -38, -17, -10, 20, 73, -48, + 50, 2, -33, -79, -40, 82, -2, 3, + 101, -40, 14, 42, -94, 34, -55, -126, + -9, 12, 35, -112, 58, -30, -30, 4, + 55, 37, -36, -126, -14, -68, 77, -37, + 99, 2, -23, 11, -31, -120, 22, -101, + -51, 52, -17, -37, 1, -12, 15, 20, + 45, 58, -37, -86, -13, 42, 53, 76, + -48, -1, -45, -60, 17, 42, -81, 1, + 18, 48, -61, 127, 23, -23, -8, 27, + 41, 6, 36, -125, -61, -64, 33, 1, + 63, -44, 22, -63, 40, -118, -83, -3, + 31, -83, -94, -105, -43, 84, -6, -69, + 41, 25, -66, -13, 3, 91, 31, 72, + -25, 52, 73, -126, -8, -116, 72, 14, + -34, 12, -111, -26, 4, -102, 67, -28, + 47, 5, -52, 114, -11, 55, -120, -93, + -105, -46, -24, -47, -16, 0, 15, -13, + -101, 64, 127, -96, -67, -128, -125, -29, + -30, -17, -56, 3, -33, 14, 4, 22, + 56, -46, -60, -121, -21, 35, -16, 115, + 6, 0, 11, -12, 19, -4, -4, -122, + 46, 33, 40, -40, 42, 67, -39, -32, + -51, 67, -127, 54, 13, 23, 58, -3, + -88, -2, 61, 72, -69, 7, 123, -52, + 10, 10, 17, 22, -17, 6, 21, -55, + -3, -23, -12, -47, 2, 66, 12, -7, + 25, 78, -55, -29, -23, -36, -12, -58, + -115, -13, 13, -52, -17, 5, 30, -20, + 69, 26, -45, -33, 19, 12, -16, 21, + -21, -37, 5, -18, -54, -43, -1, 49, + 1, 34, 23, -24, 57, -86, -9, 48, + 12, -95, -59, -47, -36, 6, 81, 28, + -2, 11, -4, -4, 27, 13, -21, 34, + 60, -31, 11, -25, -5, 18, -32, 125, + 51, 65, -68, 11, 22, 115, 56, -66, + 24, 41, -90, -17, 20, -35, 81, 7, + 58, 71, -24, 61, 17, 16, 22, -37, + 61, 124, 88, 18, 11, -64, -73, -22, + 58, -92, -96, 106, 18, -41, 88, 83, + 122, 105, -77, -21, -97, -128, 13, 41, + -11, -70, 55, -12, 5, 35, -41, -80, + -33, 52, 10, -75, 0, -41, -35, -51, + -51, 18, 31, 25, 92, -107, 35, 28, + 117, -2, 26, -127, -14, -48, -53, 9, + -123, -25, -62, 90, 14, -5, 43, 1, + 25, 64, 90, -23, -35, -75, 25, -42, + 18, -73, 2, 34, -65, 29, 28, 33, + -32, -37, 6, -7, -90, -83, 2, 29, + 15, -47, -51, 47, 21, -8, -9, -43, + 53, -23, 45, 19, 23, -57, -39, 14, + 56, -114, -64, -2, 13, -64, -34, 61, + -8, 24, 26, -1, 6, 14, -126, 0, + -6, 21, -43, -18, -29, 21, 46, -26, + -3, -25, -23, 18, 60, -48, 47, -11, + -69, 74, -34, -41, 4, 18, 16, 41, + 32, -128, -42, -59, 48, -17, -81, -40, + -56, 19, -3, -19, -53, -4, -36, 52, + 93, 40, 123, -25, 26, -93, -122, -127, + 38, 10, 37, 25, 18, -29, -24, 22, + 3, 41, -15, 90, -64, -53, -23, -64, + -77, 91, -54, 80, -21, -41, 17, 19, + 16, 7, -3, -19, -61, 31, -43, -57, + -29, -45, 44, 40, 30, 57, 43, -23, + -28, -53, 4, -38, -26, -87, -8, -11, + 28, 42, -54, 75, 1, -42, 58, 3, + 24, -26, -27, 37, 15, 96, 52, -20, + -5, 46, 52, -11, -30, -14, 22, 2, + -11, 3, 28, 38, 61, 23, 99, -27, + -20, -1, 21, 16, 31, 24, 16, -29, + -22, -21, -2, -76, 19, -81, -124, -47, + -4, 30, 18, -22, -74, 46, -43, 31, + 33, -18, -54, -8, 43, 41, -19, 36, + -22, 41, 72, 87, 36, -24, -11, 2, + 8, 98, -30, -37, 47, -21, 12, -30, + -128, -71, 91, -72, -39, -15, 100, -20, + 98, 20, -29, -46, -36, -46, 40, -12, + -7, 45, -48, -10, -39, -8, -45, 9, + 57, -49, -10, -21, -21, -49, -17, -18, + -28, -31, 90, -10, -63, 17, -33, -40, + -71, 34, 22, -9, -31, 121, 4, -1, + -83, -18, 25, 5, 18, -19, 20, 49, + 9, 16, 27, 14, 23, 18, 53, 28, + -99, -42, 17, -43, -114, 3, -12, -13, + -10, -7, -37, -27, -7, 26, -14, 19, + -6, 25, 13, 17, 19, 63, -67, 47, + -39, -126, 12, 32, 17, 12, 9, -50, + -126, -90, -28, -70, 9, 102, -59, -19, + 20, -58, -126, -13, -76, 1, -4, 69, + -42, -49, -18, -124, -59, -68, 32, -31, + 18, 127, 24, 123, -18, 30, -25, -7, + 13, -127, -71, -19, -43, -110, -11, -111, + 53, 33, 77, 119, 74, -70, -127, 83, + 68, -15, -2, -29, -48, -10, -38, 16, + -5, -32, -98, 25, -64, -51, -124, -126, + -126, -54, 62, -116, -28, -76, 2, -123, + -27, -52, 127, 102, -76, 99, 70, -64, + -3, -114, 124, -56, -29, -26, 1, 11, + 55, 101, -38, 50, 111, 10, -43, 48, + -72, 20, -78, 23, 46, -25, -23, 44, + -30, -27, 31, -17, 55, 27, -75, 59, + 81, -38, 25, -44, 111, 57, 19, 12, + 84, 7, -15, -56, 42, 49, 15, 38, + -9, -56, 38, 92, 81, 17, 92, 32, + 25, -39, 8, 40, 2, 34, -41, 26, + 35, 64, 57, 47, -32, -120, 46, -5, + -51, -114, 82, 61, 55, 31, -59, 120, + 106, 115, -3, 6, 16, 54, -12, 43, + 12, 21, 3, -12, -35, 15, 41, 67, + 67, -15, 48, 65, 57, -23, 3, -37, + -46, -72, 18, -69, 97, 12, -91, 13, + 16, -61, 8, 12, -107, 26, 15, -61, + 3, 42, -51, 39, 40, 69, 29, 6, + -4, 42, 16, 16, 94, 40, -9, 72, + 40, -2, -2, 34, -18, 26, -20, 80, + 8, 120, 30, 65, 98, -42, 83, 23, + -47, -30, -14, -25, -18, 22, -118, 90, + 49, 2, 15, 41, 22, 11, -30, -27, + 58, 30, 50, -66, 13, 100, -57, 5, + 58, 26, -8, 94, 17, -61, 56, -72, + 28, -113, -52, -24, -18, 28, 55, 0, + 9, -109, -102, -1, 19, 67, -15, 61, + -20, -108, -1, -32, -89, -9, 35, -80, + -19, 66, 90, 76, -97, 34, -20, 51, + -69, 18, 42, -19, 76, -21, -1, 71, + -51, 72, -29, 40, -60, -8, 72, 16, + 21, 36, 22, 55, 31, -17, 75, 70, + 24, -10, 28, 1, -15, 22, 51, -17, + -31, 54, 75, 84, 51, 99, 55, 4, + 26, 5, 6, 51, -1, -36, 53, 27, + -72, -63, -38, 17, 102, 114, -35, -61, + 50, 66, 125, -14, -36, 25, -32, -17, + 17, -9, -70, -83, 41, -36, -40, 8, + 87, -60, 15, -75, -58, -54, -24, -4, + 4, -109, 36, 46, 73, -66, -6, -34, + 12, -30, 74, -55, -23, 70, 25, -1, + -52, 72, 35, 109, 27, -28, -37, 44, + 39, -66, -125, 18, -49, -9, -5, 53, + -15, 42, 49, 80, -90, -60, 108, -46, + -13, 16, 37, 4, 55, 7, -47, -28, + -79, 38, 25, 57, -20, -9, 53, -74, + 68, -22, 29, 20, 51, -114, -1, -23, + 88, 16, 71, 30, 54, 82, -12, -21, + -34, 16, 35, -25, 54, -94, 36, -87, + 0, -62, 29, -50, -22, -41, -108, -54, + 28, 24, 122, 6, 74, 91, -35, 18, + -35, -3, -38, -62, 32, 112, 50, -53, + -73, 25, 58, -27, -69, -45, 44, -3, + -50, 63, -19, 47, -50, 44, -11, 10, + 77, -1, -17, -24, -97, -98, 22, 33, + -34, 14, 10, -72, 123, -62, -47, 75, + -34, 9, -43, 61, -71, 42, -103, -38, + 47, -114, 0, 16, -52, -24, 62, 27, + -29, -63, -13, 51, 126, -31, 19, 8, + -67, 11, -111, 126, -17, -15, 51, 127, + -18, 8, 20, 5, -24, -38, 19, -39, + -5, 94, 15, -21, 4, -64, -58, 71, + -92, 30, 43, -23, 3, 17, 58, 27, + 38, -37, -22, -12, 40, 62, 33, 9, + 5, 33, -34, 15, -35, 36, 118, 10, + 17, 42, 45, 57, 19, 32, 55, 58, + 79, -126, 16, -99, -28, 10, -22, 16, + 45, 55, -1, 125, -38, -7, 40, 56, + 4, -76, 41, -50, -20, -40, 59, -45, + -24, -60, 62, 97, 47, 65, 2, -57, + -4, -41, -7, 67, -4, -16, 41, -54, + 15, -42, -4, 94, 20, -101, -63, -126, + 56, -36, -5, 47, -11, -33, -24, 35, + 64, 28, -24, 104, 44, 22, -19, 40, + -8, -49, -54, 80, 62, 43, -29, -8, + -7, -20, 3, 32, -59, 53, -4, 10, + -82, -122, -126, -98, -60, -33, 124, 126, + 12, 68, 10, 1, 127, -12, -12, -46, + 54, -127, -86, -41, 26, -35, -125, -14, + 3, 56, -50, -123, -14, 59, -47, 109, + -12, -18, 115, 55, 32, 22, 108, 22, + -47, 18, -128, -50, 94, -4, 2, -17, + -25, -49, 37, 11, -30, -10, 70, 29, + -8, -81, 16, 56, -63, 0, 45, 40, + -2, 124, 15, -5, 22, -89, -9, 39, + -52, -1, -5, 70, -67, 15, 14, -27, + 26, -18, -34, -22, 11, 1, -1, 6, + -65, -104, 30, -32, -18, -125, 72, -63, + 70, 26, -82, -103, 7, -44, -26, 26, + 115, 11, -1, -22, 10, 75, 42, 7, + -35, -105, 52, -9, -41, -1, -125, 22, + 49, 8, 44, -128, -68, 47, -112, -121, + 27, 108, 64, -118, 4, -68, -7, 42, + 24, 64, -121, 47, -29, 120, -44, 124, + -29, 38, 21, -5, -7, -21, -8, 85, + 4, 38, 16, 27, -32, 100, -3, -11, + -25, -18, -111, 40, 32, 46, -4, 28, + -70, 102, -23, 50, 13, -25, 41, -29, + -22, 25, 12, -50, -108, 36, -38, -3, + -12, -35, 21, 108, -103, -127, -11, 0, + -125, -39, -126, -122, 22, -123, 55, -123, + 26, -89, 50, 14, -127, -21, 117, 26, + -69, -127, 95, 1, -81, -41, 51, -27, + 10, -59, -18, -128, -50, -91, -123, -69, + 21, -14, -63, -119, 75, 24, -73, 7, + 52, -39, -100, -33, -127, -25, 19, 23, + 56, 7, 2, -12, -26, -2, 27, -45, + 24, 13, -32, 64, 40, -34, 47, -35, + -89, 76, -71, 70, -34, 30, 22, 9, + -124, 13, -105, -33, -31, 42, 9, -68, + 110, -38, 54, -72, 21, -126, -27, 16, + 19, -22, 1, -31, -63, 28, 3, -58, + 37, -9, 10, -75, -6, 97, -54, 113, + -40, -37, 90, 63, 27, 52, -58, 61, + 23, 118, -3, 23, 35, -14, -66, -60, + 5, -38, 37, -94, -17, 47, 115, 36, + -87, -46, -29, 0, 4, 72, -9, -73, + -50, -10, 14, 31, -10, 20, -44, -89, + -12, 63, 63, 36, -50, -71, 59, 109, + -62, 48, 30, -19, -53, -95, -3, 60, + -48, 76, -12, 24, 98, -20, -99, 23, + 58, 7, -7, -7, -9, -50, -13, 18, + 80, -118, 22, -16, -48, -17, -36, 33, + -24, 47, 56, -114, -25, -120, -110, -73, + 91, -41, -16, -46, -21, -73, 14, 113, + -24, -40, 63, 33, 17, 120, -53, -22, + 127, 58, 25, 11, -16, -7, 17, 127, + -73, -95, 28, -18, 51, 121, 0, 42, + 6, 17, -16, -18, -18, 114, 27, 61, + -29, -51, -15, 1, -71, 37, 25, 99, + 85, -36, 8, 11, -88, -15, -114, 72, + 23, -59, 50, 85, -55, -32, -16, 70, + 84, -29, 50, 56, -5, -19, 62, 56, + 11, 51, 43, -31, 5, 92, 1, 9, + -76, 18, -89, 3, 6, 44, 45, 40, + -1, -33, 35, 21, -25, 35, -11, 50, + -50, -61, 28, -108, -6, -62, 111, -13, + 44, -66, -31, -9, -22, 18, -46, 8, + -14, -43, -5, -95, 77, 80, 31, 23, + 32, -48, -3, -29, -9, -41, -25, -80, + -34, 55, 1, -11, -10, 22, -69, 6, + -19, 83, -49, -28, 20, -4, 52, -8, + -31, -18, -6, 48, -22, 53, -16, -24, + -56, -59, -32, -34, -11, -50, 49, 31, + 55, -29, -53, 35, 6, -16, 1, -19, + -6, -12, -25, 21, 41, -1, -2, -38, + -43, -83, 18, -2, -18, -27, 54, 47, + 24, -33, -50, 4, -6, -3, -23, -16, + 34, 4, 34, 29, -72, 89, 58, 91, + 25, -5, 35, -98, 33, 0, -14, -52, + 96, -38, -81, -24, 36, 68, 0, -42, + 8, -20, -14, 10, 115, 17, 6, 37, + 0, -52, 64, -10, -28, 26, 89, 10, + 16, 68, -7, 12, 26, 21, 73, -51, + -53, -65, 1, -30, -6, -17, 3, 1, + 11, 7, 82, 39, -37, -59, 59, 3, + 111, 13, 14, -19, -68, 44, 11, 11, + 22, 120, 13, 18, -32, -17, 1, 18, + 51, 53, 31, -15, -49, -16, -74, -39, + -52, 55, -68, -28, 116, 12, 25, 37, + 60, 64, 54, 56, -60, 79, 51, -46, + 62, -17, 2, 38, -46, 99, 123, 28, + 114, 46, 87, 1, -61, 64, 67, 21, + 0, -38, -19, 29, -61, 1, 50, 16, + -8, 51, 48, 89, 49, 47, 96, -13, + -29, 73, 27, 126, -12, 0, -23, -22, + 18, -42, -39, -28, 23, 118, -26, -62, + 63, -44, -27, 92, 32, -16, 42, 90, + -22, -63, -41, 19, -46, 98, -61, -70, + 46, 0, -65, 37, -16, 54, 1, -3, + 72, -56, -108, 7, 27, -38, -125, -41, + 10, -6, -60, 127, -32, -71, 68, 10, + -35, 64, 44, 48, -21, -41, -37, 23, + -59, 40, 38, -22, 26, 18, -20, 73, + -114, -24, -53, -39, -27, -11, -98, -21, + 5, 71, -115, 11, -37, -107, -60, -5, + 72, -84, -62, -28, -5, -121, -86, -2, + -51, 53, 127, 124, -39, 44, -44, -125, + 79, 29, 42, -6, -39, -29, 19, -26, + 10, -72, 49, 13, 41, -51, -97, 0, + -93, 52, -84, -32, -66, 12, -35, 20, + 3, 22, 69, 20, 97, -43, 41, 8, + -36, -26, 118, 26, 10, 35, -52, -2, + 22, 8, -13, -43, 2, -44, -1, -18, + 5, 61, -118, -103, -55, 5, 13, 10, + 5, 4, 106, 2, -22, 30, -13, -117, + -121, -38, 29, 24, 14, -14, -62, 33, + -13, -18, -8, -69, -6, -113, -91, 1, + -36, -32, -17, -36, 40, -10, -34, 10, + -74, -27, -33, -33, 12, 18, 4, -1, + -1, -33, 34, 8, 26, -41, 120, 60, + -41, -5, 11, -41, 37, 10, -7, 31, + 31, 26, -55, 25, -1, -12, 53, 43, + 29, -8, -48, 29, -94, 39, 41, -45, + 91, 15, 3, 29, 12, 69, 13, -68, + 65, -12, 54, 55, -18, -11, -40, -18, + -36, 76, -11, -79, -11, -16, 38, -42, + 86, -14, 37, -6, -8, 57, 2, -28, + 11, -47, -89, -34, -47, -73, -121, -19, + -73, 69, -6, 35, -5, 114, 82, -39, + 14, 5, -67, 12, -21, -17, -96, 14, + -20, -12, 2, 81, 12, -1, -126, 65, + -16, -36, -2, 4, -35, 2, -6, 49, + 1, -42, 69, -11, -47, -25, 25, 9, + 103, -50, -54, -8, 6, -56, -8, 21, + 6, 7, -104, -18, -106, 51, 127, -85, + 50, -6, -27, 20, 91, 22, 4, 39, + 25, 43, -5, -20, -32, -95, -3, 40, + -104, -11, -55, -128, -46, -71, 37, -101, + 77, -6, -44, 49, 26, -63, -105, -54, + 60, -58, -13, -37, 39, 43, 72, 42, + 22, 65, -60, -75, -59, -116, -106, -3, + 25, 45, -109, -84, -123, -28, 35, -24, + 0, -114, -82, -109, -114, 3, 14, 117, + 55, -17, 19, 32, 68, 45, 8, -128, + 75, -46, 82, 80, -85, -15, 106, -44, + 92, -31, -106, 93, -33, 80, 37, 3, + -92, 15, -18, 40, -107, 28, -48, -87, + -127, 37, 47, -64, 14, 36, 20, 10, + 25, 66, 18, -69, 92, 59, 17, -128, + -48, -39, 120, -14, -63, -31, -48, 8, + -9, 60, -21, -19, -2, -2, 37, -84, + 98, 80, -120, 82, 21, -126, -14, 8, + -54, 3, -6, 52, 8, -16, 23, -3, + 9, 61, -124, -9, 31, 14, -31, -2, + -30, -3, 2, 2, 0, 111, -42, 5, + -21, -43, 52, 36, 17, 39, 79, -30, + 53, 15, 5, -97, -29, 10, -7, 1, + -91, 78, 35, 34, 14, 38, -9, -16, + -96, 15, -125, 7, -22, 32, 17, -101, + 16, 15, 23, -30, -62, -15, 6, 27, + 4, 55, 38, -37, 29, -70, 33, -101, + -59, -103, -50, -34, -50, -62, 53, -26, + 127, 1, -1, -10, -6, -13, 7, -13, + 95, -111, 55, 12, 6, -3, -108, -59, + 27, -64, 36, -122, 125, 27, -75, 12, + 1, -59, 84, -105, 109, -94, -76, 20, + 23, 11, -36, -36, 43, -43, -123, 15, + -100, 65, 20, 16, -42, 25, 69, 28, + 37, 22, 112, -78, 23, 17, -10, 34, + -63, -11, -127, -46, 39, 39, 12, -47, + -69, -67, -53, -60, 8, 15, 45, -83, + 6, -30, -114, 9, -78, -43, -47, 69, + 15, -72, 68, 44, -53, 20, -120, 58, + -4, 42, -11, -16, -125, 9, -65, -9, + -4, -84, 105, -12, -92, -105, -21, -33, + -121, -68, -8, 1, -39, -119, 38, -1, + -2, -127, 3, -104, -43, 127, -4, -35, + -125, 117, 106, -67, 49, 69, 15, -102, + 80, 8, -84, -84, -96, -94, -27, -32, + 4, -3, -16, 98, -25, 17, -24, 51, + 11, 37, 23, -85, -61, -41, 62, 28, + -127, -124, -101, -27, 98, -71, 56, 1, + -92, 28, 7, 13, -11, 34, 23, -125, + 56, 111, -124, -44, -15, -47, 5, -7, + 2, 45, 1, 26, -54, 26, 11, 0, + 5, -11, 22, 35, -41, 5, 41, -78, + 24, 18, -5, -30, 49, 22, -1, -19, + 39, -26, 1, -16, 8, -8, 3, 17, + 75, -1, -50, 30, -49, 3, 24, 35, + 38, 1, 3, -9, 10, -45, 42, -6, + -26, -35, 71, 75, 0, 17, 5, 11, + 57, -90, -100, 0, 50, -1, 26, 32, + 11, -20, 121, -41, -9, 22, 5, -2, + -40, 22, 4, -71, -14, 3, 10, 6, + -5, 0, 29, -22, 86, -3, 4, 8, + 13, 8, -3, -58, -30, -19, -15, 18, + 6, 16, -19, -87, 26, -11, -46, 64, + 40, 6, 108, -16, 49, 67, -45, 15, + 51, -6, 75, 87, -39, -92, 27, -14, + 7, -32, -128, -15, 39, -4, 50, 0, + -26, 25, 62, -24, 67, -57, 27, 64, + -25, 29, -35, 69, 6, -56, -70, -1, + 7, -69, -32, 45, 52, 31, 20, 27, + -29, 26, -72, -95, 38, 125, -8, 67, + -33, -126, 7, -13, 86, -111, -9, 23, + -52, 8, 16, 59, -26, -9, -19, 40, + -10, 87, 21, 17, -60, 126, 120, -22, + 35, 22, -14, -24, -33, 2, -96, 97, + -126, -118, 17, 9, 41, 70, 14, -50, + 11, -20, -37, -7, 76, 16, -52, 21, + -34, 34, 13, -13, -81, -126, 82, 65, + -14, 22, -23, -82, 61, -124, 34, -78, + 76, 56, -128, 41, 36, -48, 45, -40, + 33, 1, -102, -45, 0, -126, -58, -62, + -2, 60, 6, 108, -17, -37, -76, 13, + 8, 24, -6, -9, 69, 62, 26, -11, + -9, 15, -30, -6, -48, -121, -98, -60, + -125, 26, 52, 18, -64, 108, -20, -126, + 58, -123, 48, 70, -117, 75, 46, -62, + 9, -34, -21, 39, -15, -91, 37, 1, + -32, 18, -10, 44, -11, 15, 39, -32, + -55, -55, 92, -52, 37, -6, -9, -45, + -89, -30, 14, -29, 11, -69, -35, -62, + 21, 55, -1, 85, 21, 50, -20, 5, + 9, -81, 22, 80, 33, 38, 35, 29, + -22, 39, 83, -54, -5, 5, 14, -25, + 3, -20, 29, 20, -14, 7, 20, 7, + 11, -32, -52, -15, -8, 8, 36, -8, + 21, -9, -34, -116, -14, 32, -23, 38, + 30, 5, 27, -5, -78, 67, -4, -53, + -75, 9, 53, -12, 8, -3, 15, -4, + -10, -4, 21, 52, -16, -13, -36, -20, + -67, 113, -57, -27, 46, -5, -9, -27, + -123, -71, 71, 38, -30, 113, 31, 24, + 58, 8, 43, -77, -13, -81, -40, 55, + 29, 9, 13, 30, 4, -29, 24, 10, + -124, -10, 1, -25, -77, -30, 25, -12, + -29, 5, 47, -36, -39, -41, -39, -19, + -44, 0, 72, 34, -10, 9, 58, 45, + 24, -45, -75, -17, -9, 74, -20, -35, + 28, -68, 5, -82, -22, 12, -63, 35, + -6, 23, 37, -9, -46, -19, -22, 32, + 13, 7, 1, 14, -29, 65, 35, 4, + 66, -23, 33, 42, -28, 111, -18, 19, + -26, -79, -3, -68, -89, 126, -56, 0, + -83, 19, 95, 24, 52, -66, 10, 98, + -18, 51, -31, -13, 8, -126, -5, -48, + 91, -37, 127, -11, 105, -119, 18, 72, + 60, -128, -117, -102, 36, 91, 17, 24, + 4, 116, 11, 103, -15, -43, 22, -12, + -82, 74, -128, -49, -18, 54, -86, -2, + 10, -2, 18, 50, -1, 66, 99, 118, + 19, 46, -126, -90, 38, -126, -93, -24, + 8, -2, 23, 2, -39, 5, 3, -13, + 16, -80, -127, -37, -87, 29, 12, -54, + 21, 0, 0, -122, -14, -38, 71, 45, + 21, 16, -24, -72, -35, 18, -37, -20, + 17, -30, -97, 50, 36, 127, -44, -19, + 4, -40, 2, -25, -3, 25, -27, 55, + 5, 28, -68, 39, 58, -61, -50, 22, + 56, -68, -45, 75, -1, -8, 62, -59, + 36, -5, -113, 71, -42, 29, 32, 4, + -73, -23, 55, 66, 37, -40, -25, -53, + 18, -7, -78, -70, -22, 39, 48, -43, + 77, -6, 35, 17, 23, -45, -22, 61, + 15, 42, 35, -42, 0, -125, -65, -20, + 31, 20, 74, -12, -37, -46, -33, 28, + 31, -64, -13, 25, 69, 15, -23, -55, + 93, 1, -20, 55, 81, 52, 18, 3, + -79, -22, 88, -29, 114, -6, -40, 12, + 34, 19, 104, 45, 4, -43, 22, -10, + 61, -82, 25, 35, 29, -30, 7, 107, + -72, 2, -52, 21, -28, -5, 127, 8, + -27, -96, -95, -14, -127, 12, 47, 32, + -116, 14, -48, 35, -34, 114, 100, -14, + -73, -36, 11, 50, -23, 11, -38, 12, + 20, -20, -125, 67, -45, -28, 4, 88, + -5, 58, 10, -52, 21, 17, 41, -37, + 66, -82, 21, 39, 0, -43, -9, -55, + 42, -16, -81, -31, 11, -20, -17, 119, + -29, -68, 16, -125, 32, 107, -92, -126, + 25, -35, -26, -51, -53, -79, 58, -19, + -127, -36, 3, -20, -55, -74, 47, 36, + -103, -52, -110, 62, 12, -123, 78, -43, + -127, 30, 36, 34, -110, 34, -39, -89, + -126, -29, -29, -60, -34, 39, -19, 4, + 22, -70, -98, -52, -67, -77, 5, 33, + 9, -23, -14, 28, 25, 9, -80, -99, + 17, -26, -101, 47, -24, 24, -36, 53, + -126, -23, -84, -28, -105, 10, -60, -60, + -56, -87, -74, 26, -41, 60, -67, -10, + 96, -89, -89, 42, -29, -3, 47, 4, + 30, -4, -26, -70, -20, 70, -27, 44, + -74, -8, 54, 35, 115, 12, -21, -13, + 10, -41, -70, 39, 86, 54, -40, 22, + 42, 13, 6, 24, 13, -1, -58, -37, + -53, -3, -25, -23, 5, -30, 31, -28, + -103, -1, 13, -54, -70, 4, 2, 4, + 10, 20, 32, 2, -38, -84, -3, 37, + 23, 54, 5, 16, 12, -14, -10, -3, + -26, 2, -36, 83, 8, 33, -10, -5, + 48, -32, -32, -36, 16, 9, -42, 38, + 60, 36, -81, -52, 56, -88, -55, 21, + -26, 24, -26, 127, -59, -60, -12, 23, + -26, -2, -100, -58, -33, 17, -2, -71, + -40, -124, 102, -10, 72, -119, 121, 104, + -47, -35, -24, 113, -122, -23, 82, -101, + -60, 69, 27, -4, 110, 32, 16, -104, + 44, 10, -89, 25, 30, 44, -1, 78, + 82, -1, 29, 25, -58, 44, 70, 70, + 9, -31, -30, -8, -17, 60, -102, -35, + 4, 43, 85, 84, -6, -80, 5, -71, + -45, 22, -63, 52, -116, -37, 90, 93, + -58, 2, -2, 1, 20, -31, 35, -55, + 31, -20, -70, -26, 59, -32, -24, 76, + -108, 27, -19, -6, 30, 68, -1, 4, + 79, 45, -43, 45, -13, -47, -14, -66, + -41, 85, -39, -24, -17, -50, 22, 2, + 51, 27, -28, 62, 12, 50, -107, -62, + 2, -17, -9, -63, 34, -30, 35, -10, + -123, -24, -49, -12, 57, -45, -46, -63, + -29, 37, -103, 49, -80, -31, 31, 46, + -53, 6, 65, 10, -38, -44, -71, -4, + -23, 10, -61, -48, -46, -33, 17, -23, + 77, 39, 25, 0, 58, 50, -1, 25, + 44, 66, 42, -4, -6, -12, -36, 50, + 1, 113, -5, -23, -54, -4, -5, -10, + 3, 25, -4, -98, -37, 49, -33, -21, + -40, -18, -68, 93, -29, -3, -9, -1, + 48, 22, -65, -12, 40, 2, 29, 2, + 36, -119, -9, 4, -27, -40, -1, -17, + 15, -44, -7, 2, 59, -23, -33, 10, + 54, -17, -26, 111, -5, 46, 73, 50, + -5, -91, 45, 27, -105, -13, 37, -55, + -68, 25, -23, -7, 5, 80, -28, 4, + 127, -59, -102, 18, -9, -29, 8, 32, + 56, -123, 5, 19, -98, 52, -126, 36, + 35, 26, -17, -78, -97, 71, 5, -38, + 92, -31, -42, 39, 9, 81, 4, 33, + 3, -49, -25, -1, 0, 106, -75, -85, + 14, -127, -67, -41, 46, 32, 10, -3, + -40, 86, 73, 73, 20, 99, 42, 41, + 30, 77, -1, -3, 57, 19, -13, -19, + 43, 81, 41, -13, -16, -95, 17, 35, + -4, -30, -98, -59, -2, 103, 74, -103, + 2, -13, -1, -2, 84, -3, 126, 15, + -103, 18, -30, -35, 20, -21, 72, -40, + 46, -15, -10, -6, 11, -19, 17, -25, + 110, 85, 48, -22, 2, -64, 42, -69, + 6, -10, -37, 12, -50, 5, 31, 1, + 61, 74, 60, -50, -29, 65, -67, -35, + -41, -128, 106, -4, 92, -21, -57, -44, + -127, -84, -57, 49, -32, -12, 56, -86, + 17, 50, 47, 57, -20, 42, 11, 32, + 26, 43, -12, 59, 27, -84, 62, 6, + 7, 16, -5, -44, 36, -75, 9, 25, + 17, 89, -7, -54, 1, -54, 12, 34, + -7, -4, -71, 18, 38, -26, -19, 13, + 3, -10, 56, 48, 9, 100, 47, 34, + 123, 98, 44, 84, -9, -16, -10, -17, + -20, 5, 50, 111, -28, -64, -20, -127, + -69, -3, 64, -27, -37, -79, -15, -84, + -81, -16, 55, -78, 98, -97, -20, 86, + -15, -79, 57, 24, 31, 29, 26, 33, + -21, -52, 31, -46, 73, -47, -17, -44, + 102, 84, -63, -59, -69, 123, 39, 80, + -83, -37, 37, -116, -37, -2, -54, -101, + 93, -21, 36, -19, -8, 93, -115, 69, + -18, -37, 13, 49, 99, 10, 2, -31, + -109, -127, 50, 49, -43, -20, -56, -43, + 18, -10, -127, -52, 45, 82, 3, 11, + -82, -22, -90, 70, -55, 99, 82, -41, + 39, 6, -104, -14, -104, -9, 34, 127, + -15, 7, 99, 0, -58, 1, -21, -9, + -17, -34, -28, -15, -41, -10, 16, 12, + 46, -13, 10, -11, -15, 45, -73, 16, + -37, 114, -50, -79, 110, 81, 14, -17, + -1, -128, -124, -35, 77, 72, 61, 124, + 41, 29, -40, 57, -3, -24, 115, -62, + -53, 35, -9, 15, 109, 71, 18, 22, + 39, -128, -49, -127, 19, 116, 74, -10, + -36, 123, 102, 19, 108, 23, 4, 95, + 126, 101, -16, -10, 13, -90, -95, -18, + 61, 0, 79, -115, 18, 57, -5, 114, + -123, 35, 124, -126, 46, -18, 21, 98, + -117, 3, -123, -56, 119, 45, -61, 31, + 34, -26, -51, 15, 34, -24, 61, -18, + -57, 3, -75, -116, 22, -117, -18, -3, + -22, -30, -33, 27, 33, 125, 24, -33, + 35, -77, -92, -33, 25, -127, 32, 36, + -69, 34, -6, 54, -16, 34, -13, 64, + -126, -128, -8, -86, -8, -63, 70, 22, + 11, -37, 37, -99, 40, 127, 68, -23, + -28, 126, -67, -43, 52, 39, 94, 13, + -62, 24, -110, -95, -11, -39, -67, 48, + -111, -12, 34, -66, 126, 35, -26, -59, + -127, 31, 73, 30, -128, 30, -126, -128, + -113, 28, -126, -127, -1, -2, -57, 23, + -53, -5, -56, -41, -76, 122, 8, 45, + 76, -120, -86, 37, -123, 20, 0, 35, + -77, 63, -102, -63, -17, -51, 7, -116, + -7, -10, -79, -119, -29, 23, -18, 70, + -89, -33, -40, 14, -16, -102, -17, -77, + 33, -11, 49, 58, -15, -61, 40, -49, + -36, -47, 18, -23, -81, 123, -15, -14, + -83, -32, 113, -1, 1, -85, -20, -85, + 4, -66, -118, -31, -90, 79, -74, 7, + -45, -45, -54, 113, 59, -25, -23, 55, + 23, -115, 18, -80, -111, -61, 15, -93, + -68, -59, 125, -50, -10, -40, 39, -113, + 21, -92, -126, 79, -23, 34, 37, -16, + -6, 28, 31, 12, -96, 9, 34, -45, + -32, 114, -36, -5, 118, -49, -22, 119, + -5, 51, -37, 3, 17, 14, -52, -92, + 41, 88, 2, -80, 98, -52, 46, 31, + 52, -6, -40, 33, -25, 55, -16, 50, + -119, -41, 77, -91, 43, -70, 26, -4, + -2, 35, -68, 57, 10, -97, -21, -54, + 18, 117, 121, 13, -40, 104, -10, 50, + 48, 13, -5, 37, -23, -31, 13, -37, + 11, 74, 43, -13, 35, -36, 23, 106, + -54, 40, -68, -58, -9, 52, 10, -58, + 27, 90, 7, 43, -58, -4, 93, 1, + -89, -64, -42, -17, 20, -48, -126, 122, + 71, -116, 8, 123, 52, 37, -17, -46, + 0, 57, -11, 113, -93, 50, -61, 41, + -1, 33, 22, 73, 35, -43, 48, 98, + -55, 33, -10, -43, 37, -47, -3, 14, + -32, -126, -23, 25, 0, -41, 30, 40, + 81, 102, 84, 15, 43, 28, -6, -68, + -10, 62, 83, 0, 13, -25, 35, 30, + 39, 19, -8, 29, -12, -126, -21, 64, + 3, 107, 29, -124, 19, 6, -114, -126, + -42, 23, -90, -67, 65, -127, 27, -70, + 42, 5, 125, -3, 28, -72, 27, 14, + -72, -41, -80, 42, 0, -11, 28, -3, + -37, 39, 43, -47, 57, 41, -54, 23, + 53, 100, 9, -31, -17, 42, 126, 32, + 109, -3, 16, -57, -13, 36, -58, 63, + -30, 37, 38, 20, 55, 6, 14, -16, + 24, 38, 23, -8, -13, 73, 127, -3, + 21, 93, 36, 33, 12, 44, 36, -33, + -8, 29, -51, -26, -24, -7, 44, 66, + 20, -9, 86, 33, 20, -56, 2, 48, + 12, -83, 23, 72, 14, 39, 57, -78, + 28, -23, -10, -8, 20, 31, 8, -70, + 47, -16, -64, 1, 4, 42, -13, -1, + -45, 81, 64, 58, 48, -12, 45, -20, + -7, 32, -3, -75, 102, -18, -32, 66, + -16, -48, 21, 10, 52, 11, -70, 31, + -88, -46, -45, -37, 55, 111, 23, 44, + 6, 40, 36, 40, -10, 127, -20, 6, + -42, -45, 6, 68, -4, 40, 37, 19, + -36, -61, 30, 26, 18, -57, -7, 5, + -121, 20, -15, -90, 23, 30, 11, -1, + 26, -23, -8, 24, 15, -80, -96, 15, + 29, 90, -51, 4, -42, 33, 13, 25, + 48, -33, 96, -19, -45, 25, 17, -68, + 16, -80, -1, -6, 30, -11, -23, 8, + -110, -39, 22, -32, 87, 76, -34, 62, + -62, -82, -20, 127, -28, -49, 54, -2, + -96, 30, 13, 53, -71, 5, -45, 52, + -6, 5, -19, -17, 12, 0, 74, 51, + 71, 83, -11, 11, 88, -35, 10, -7, + 8, 42, 13, -49, -29, 0, -75, 8, + 10, 88, -12, 21, 23, -82, -17, 19, + -5, -62, 127, 93, 19, 26, 89, -15, + 18, 12, 41, 24, 12, -22, 92, -21, + -7, -58, -9, -45, 38, -72, -15, -125, + -96, 9, 67, -7, -15, -17, -28, 30, + 6, -7, 99, -47, 29, -31, 24, -14, + -107, 15, 12, 57, 26, 3, -57, 19, + 1, 3, -16, 27, 12, 5, 52, 31, + 16, -124, -2, 24, -26, -18, -76, -115, + 3, 97, 30, -20, 127, 32, -6, 29, + -37, -59, -40, -20, 29, 49, 11, 20, + 37, 33, 29, 7, 11, -12, -16, -84, + -49, 4, 38, -5, -9, -9, 50, 6, + 2, -25, 103, 21, 73, 55, 15, -13, + 58, -9, -4, 50, 7, 1, -8, -6, + 19, 18, 12, 10, 10, -26, -3, -5, + 62, 56, 21, -7, 41, -32, 0, 11, + -32, -14, 48, -1, 26, -19, 24, 46, + -8, 0, 26, 58, -5, -26, -27, 4, + 36, 59, -25, 3, 4, -28, 26, 63, + 51, 48, -61, -121, -19, -45, 49, 1, + -1, 58, 5, 38, 47, 28, 14, -2, + 20, -11, -33, -13, 42, -29, -53, 38, + -44, 107, -9, 60, -10, -78, -15, 78, + 21, -71, -36, 58, -27, 15, -4, -41, + -50, -85, -61, -62, -105, 59, 39, 29, + -27, -16, 68, -26, 26, -4, -48, 58, + -15, -13, -2, 48, -33, -47, 34, 1, + 4, -36, -13, 32, -3, 63, 14, 15, + 107, -31, -37, -25, 49, 112, -29, 22, + 20, 89, -12, 5, -1, 91, -54, 44, + 32, -42, -14, 43, 113, -61, -46, 126, + -99, 63, -16, -75, 119, 57, 42, -124, + 99, 91, 57, 31, -81, 16, -57, -32, + -79, -6, 123, -36, -47, 109, -125, 1, + -72, 21, -50, 82, -4, -2, -36, -127, + -28, 8, -105, 51, 67, 34, 50, 9, + -25, -125, 54, 12, 28, 104, -15, -22, + 29, 3, -14, 35, 9, 26, -25, -72, + -26, 98, 16, 23, 16, -8, 20, -112, + 74, 98, -18, -56, 85, 127, -51, 1, + -84, 38, -31, -86, -45, 46, 5, -54, + -54, -114, 47, -43, 20, 121, 21, -18, + -11, 95, 23, -27, -25, 39, 8, -10, + 17, 28, 61, 6, -56, 58, 4, -37, + 35, 23, 51, -19, 5, 19, -1, -1, + 14, 33, 15, 37, -11, 19, 35, 45, + 13, -14, 46, 19, -42, 55, 28, 51, + 30, 54, 120, 17, 66, 42, 66, -12, + -9, -13, 44, 38, 5, -7, 27, 75, + 78, 35, 31, 78, 49, -4, -14, 40, + 33, 41, 1, -68, 23, 53, -12, 11, + 13, 28, 29, -43, -41, 42, -25, -12, + 38, 18, -4, 7, 38, 29, 44, 31, + -26, 61, 37, 49, -19, -6, -17, -12, + 49, 56, 33, 80, 43, 42, 29, 62, + 4, -53, 34, -57, 30, 13, -55, -28, + 19, -6, 43, -43, 19, 48, -16, 29, + -25, 64, 30, 51, -62, -18, -16, 15, + -30, 8, -14, 60, 20, -12, 49, -32, + -28, -42, 3, 17, -55, 11, 26, -51, + -25, -38, 47, 48, -29, 33, 9, -20, + -6, -12, -54, 111, 4, 31, 77, -5, + -2, -50, 46, -5, -18, -30, 28, 33, + 11, 48, 22, -3, 3, -34, -41, -4, + -27, -16, -41, -10, -21, 34, 9, 46, + 23, 46, 25, -28, 28, 32, -16, 47, + 127, 36, -8, -21, 57, 20, 35, 80, + 7, 18, 83, 16, 59, -56, 3, -23, + 111, 40, -24, 23, 62, 101, 29, -122, + 26, 74, 36, -27, 8, 79, -94, 47, + -121, 12, -63, -4, -2, -76, 8, -18, + -45, -3, -112, 66, -71, -7, 19, 21, + -108, -120, -10, -27, -43, 76, 67, -127, + -35, 12, -2, 61, 103, -34, -3, -6, + 24, 80, 44, 5, -13, -52, -4, -14, + 70, 38, -46, 65, 79, -62, -93, -5, + -101, 95, -20, -52, 55, 11, -126, 11, + -38, 23, 16, -47, 7, 66, -65, 2, + -16, -26, 21, 43, 27, 42, -41, -50, + 9, -126, -21, -67, 34, 51, 47, 77, + -3, 0, 30, 19, 17, 3, 50, 1, + 120, 20, -12, 90, 56, 1, -21, 24, + -15, 97, -2, -19, -35, -1, 45, 12, + -41, 34, 8, 12, -23, 43, 121, -33, + 62, 39, -119, -26, 36, 44, 14, 26, + 5, 8, -64, 16, -10, -43, -45, -22, + 5, -16, -21, 40, 13, -16, 31, 109, + -31, -38, 121, 4, -59, 85, 14, -57, + 20, 3, -51, 45, -40, 33, 13, -11, + -9, 27, 55, 43, 10, 6, 14, -37, + -31, 127, 22, -7, 34, 11, -102, -60, + 29, -58, 21, 14, 99, -16, -101, 17, + -64, 36, -6, -62, 22, -37, -49, 30, + 74, 88, 95, -33, 45, 20, 52, 55, + -98, -49, 19, 56, 9, 124, 93, 0, + -14, -66, 34, 20, -63, -40, -12, 59, + -7, -51, -9, 68, 4, 124, -37, -78, + 32, -9, -4, 69, 21, -19, -27, -33, + -58, 60, -39, -32, -112, -5, -35, 124, + -18, 47, -99, -57, 15, -85, 49, -98, + 104, -86, -104, -57, -17, 9, 21, -9, + -85, 65, -127, 75, 113, 71, -5, 127, + -53, -60, 14, -120, 35, -2, 20, -38, + -82, -50, -67, -8, -54, -52, -7, -8, + -10, 14, 11, 112, -5, -40, 37, 21, + -90, -39, -67, -127, -25, -27, 80, -14, + -39, 16, 61, -18, 38, 41, 93, -4, + -5, -80, -35, -47, -6, -12, 23, -37, + 108, -8, -126, -125, 12, -54, -26, 40, + -52, -72, -26, 47, -35, -98, -15, -68, + -51, -21, 21, 40, -89, 121, -116, -5, + -23, -1, -123, -125, 21, -34, 2, -47, + -18, -49, -123, -19, -94, -21, 37, 13, + 43, -79, 81, -2, -21, -11, 104, -16, + -37, -29, -46, -3, 64, -38, -120, -10, + 25, 15, -17, 1, 110, -5, -24, -42, + -23, 43, -17, -93, -122, -3, -35, 0, + 11, -84, 24, -49, 78, 0, -22, -49, + -54, 83, -8, 8, -13, 64, 46, 27, + -99, 89, -27, 69, 1, -53, -83, 5, + 19, -91, 42, -97, 92, 62, 51, -79, + 7, 13, 34, -37, 57, 16, -53, -55, + 4, -26, -11, -29, -39, -60, 69, -25, + -8, 19, 4, -6, -123, 73, 26, -61, + 21, -90, 8, 7, -123, -20, 12, 29, + 53, -121, 83, -39, -24, -46, 15, 24, + 31, 2, -89, -64, -3, 85, -8, 127, + 7, 14, -25, -3, -64, 104, 84, 74, + -38, 39, 25, 119, 67, 15, -71, 28, + 113, 65, -27, -25, -19, -11, 19, 76, + -3, -88, -75, -34, -110, -64, 19, 69, + -76, 102, -8, -68, -12, -43, -31, -9, + 120, -36, 46, 96, -73, 89, 83, 98, + 50, -81, 68, 34, 39, 68, -17, -19, + 47, 111, 15, -58, 96, 1, 18, 2, + -73, 82, -59, 5, 47, -112, 9, -43, + 4, 18, -47, 82, -93, 88, -48, 58, + -62, -57, 67, -41, 87, -29, 65, 69, + 63, 54, 5, -75, -4, 38, 57, 24, + -48, -8, 13, -18, 38, 126, -128, -66, + 43, -4, -103, -21, 47, 59, 19, 70, + 26, 1, -9, 10, -60, 18, -120, 21, + -8, -36, 15, 120, -111, -5, 58, 2, + -33, -119, -77, -105, -7, 118, -116, -3, + 18, 73, -35, 2, 17, 1, 56, 16, + -26, -26, 17, 27, 52, 0, -12, -5, + 41, -19, 36, 68, -7, -7, 70, -127, + -49, -19, 44, -88, 22, -118, -52, 48, + 63, 66, -97, 54, 46, 43, -47, 59, + -62, -91, -58, -39, 95, 35, 19, -15, + 42, -31, -41, 31, -17, 36, -114, 34, + -105, 71, -4, -40, -40, 11, 47, 5, + -9, 101, 80, 36, -9, 8, 47, 23, + -19, 31, 22, 69, 39, -14, 124, -37, + 23, 60, 69, 11, -3, 57, 28, 59, + 1, -3, 125, 17, 65, 23, -24, 0, + 33, 64, 34, 30, 33, 42, 29, 31, + 71, 64, -16, 58, 40, -61, -46, 29, + 2, -38, 11, 14, 68, -2, 21, -26, + 25, 5, -19, -22, 92, 38, 51, 22, + 127, -32, 99, 77, 42, 34, 75, -58, + 46, -3, 2, 13, 66, 27, -22, 37, + 13, 20, -29, 47, -56, 68, -78, 14, + 43, -36, -9, 44, 39, -20, 18, -53, + 18, -11, 9, 16, 33, 19, -23, -9, + 40, 55, 52, 23, 21, -4, -31, -18, + 22, 4, -12, 95, 2, -10, 63, -17, + -12, -15, 82, -23, 1, 16, -22, 88, + -34, -17, 4, 49, 9, 41, 62, -9, + -6, 32, -64, 35, -14, -11, -31, 19, + 50, 41, 91, -2, -62, -16, -5, -23, + -10, 11, 66, -56, -2, -59, -1, 74, + 11, 110, -15, -25, 90, 69, -18, 68, + -38, -7, 10, -14, 114, 1, -15, -57, + 52, 118, 45, -61, -65, 116, -100, 11, + -73, -116, 81, 11, 24, -33, 1, -39, + -88, -48, 11, 70, 7, 59, -50, 13, + 12, 30, -96, -65, 44, 60, 12, 23, + -50, 118, -28, 41, 59, -10, 47, -5, + -54, 97, -27, -102, 33, 31, -56, -4, + -8, -83, 45, -29, -19, 122, -15, -39, + 89, 11, 3, -67, -21, -18, 71, -2, + -49, 49, 97, -69, -68, -8, -79, 63, + -27, 115, 1, -116, -40, -119, 19, 37, + -30, 86, -17, -85, 53, -40, -23, -78, + -58, -88, 99, 119, 47, 22, -30, 42, + 25, 40, -62, 11, 34, -29, -32, -27, + 26, 16, -15, -72, -5, 1, -81, 20, + -17, 13, 120, -30, 124, -29, 41, -72, + 19, 34, -92, 63, 44, 76, 12, -33, + 50, -2, 77, -1, 41, -1, 17, 41, + 73, 40, -8, -22, 95, 46, 71, -1, + -80, 41, -44, 10, 4, 37, -43, 70, + 43, -123, -36, 42, 39, -8, 13, -17, + 3, 76, 29, 22, 9, 29, 28, 22, + -44, 14, -44, 4, 26, 55, 38, 23, + -38, -44, -8, 59, 14, 37, 43, 48, + 49, -20, -3, 38, 12, 46, 5, -12, + -25, -21, -25, -71, 64, 55, 124, 18, + 57, 28, 48, -118, -15, 41, 43, 45, + 18, -76, -24, 11, -8, -72, -20, 41, + -45, 97, -35, -68, -1, 104, -95, -39, + 9, -77, -23, -8, -9, 24, -13, -22, + 86, -3, 55, -125, -10, -65, 44, 49, + 49, 19, -122, -6, 41, 54, 80, -16, + -76, 48, 111, -125, -21, 98, -6, -21, + 31, -16, 0, -33, -119, 42, 64, 12, + 6, 56, -8, 55, 44, 66, 27, -20, + -73, -37, 31, -16, -24, 11, 126, 24, + 26, -61, 13, -24, 69, -4, -33, 0, + 14, 11, 60, -15, -125, -68, -56, 60, + -10, -126, -26, -124, 73, -54, 40, -70, + 17, 57, -28, -37, -28, 20, -72, 27, + -13, -119, 6, -123, 23, 106, -18, 65, + 28, 61, -126, -59, -16, -57, -35, -125, + 3, 3, -122, -127, -36, 51, -3, -15, + 42, -87, -71, -117, -117, 99, -120, -11, + -6, 0, -12, 48, -58, 15, -128, -59, + -19, 43, 2, 49, 49, -18, -75, 22, + 105, 19, -73, 117, -20, 10, -4, 13, + -99, 25, -125, -79, -89, 51, -27, -28, + 0, 40, 3, -55, 6, 31, -27, 39, + -3, -12, -14, -11, 28, -55, -57, 33, + 54, -126, -74, 27, -8, 96, 17, -70, + -21, -35, -44, -7, -14, 19, 57, 33, + 9, -46, 16, 18, -14, 43, -43, 0, + 11, 16, -3, 0, -28, 80, 54, -3, + -12, 12, -13, -15, -26, 102, 4, -2, + 16, -21, 31, 16, 3, 4, -4, -5, + 37, 37, -61, 41, 27, -4, 1, -30, + 13, -41, -14, 16, -2, -7, 31, 127, + -10, 31, 105, 19, 35, 100, -14, -30, + -3, 27, 82, 89, -71, 61, 9, -18, + 26, 49, 78, 33, -22, 1, -47, -24, + -117, -54, -55, 6, -59, 116, -48, 41, + 38, 3, 114, -65, 0, 21, 16, -16, + -13, -33, 7, 15, 12, 44, 69, 35, + 11, -32, -59, -87, 71, 59, -20, 26, + -35, -3, 1, -25, 56, 56, 127, -15, + -15, 96, -4, 66, -47, -76, 57, -48, + 84, 8, 42, 78, -10, 126, -40, -34, + -14, 33, 22, -23, 48, 8, -10, 3, + -32, 13, 61, 21, -2, 0, -62, -26, + -96, 61, -59, -93, -72, 119, 34, -58, + -18, 35, -18, -68, -13, -38, -49, -28, + -79, -30, 33, 78, 80, 69, 46, 50, + -8, 21, -69, -77, -41, 105, 7, -122, + 42, 24, 69, 39, -69, 118, -125, 25, + -124, 12, -18, -59, -7, -31, 62, 8, + 0, -2, -15, 0, -46, -99, 9, 86, + 9, 33, 35, -42, 3, 121, 125, -50, + -76, 113, -85, 21, 8, -68, 73, -18, + -17, -30, -128, 8, -119, -33, 31, 28, + -11, -5, -9, 9, -31, -24, 42, -79, + 25, 5, 30, 121, -1, -22, -95, 88, + -89, -29, -100, -104, -100, 94, -75, 31, + 75, -65, -125, -91, 48, -125, -27, 20, + 105, -126, -95, -49, -29, 14, -6, -35, + -36, -22, -25, 16, 53, 5, 52, 32, + -24, 80, 16, 83, 31, 42, -120, 17, + 34, -56, -2, 32, -99, -40, -17, -27, + 122, 87, -70, 38, -28, -116, 0, 17, + -41, 40, -82, 31, -27, -2, 39, -68, + 17, 24, 81, 40, -47, -9, -22, 8, + 30, 37, 30, -26, -20, 17, 41, -27, + 64, -20, 31, 27, -38, 37, -13, -36, + -57, -32, -32, -31, -31, -63, -12, -23, + -26, -19, 13, -55, -23, 79, -17, 5, + 30, -5, 22, -27, 34, 14, 4, -14, + -109, -1, -10, 21, -8, 19, 33, 44, + 1, -2, 39, 16, 77, -22, 0, -8, + 37, -30, -28, -125, 0, -48, -14, -117, + -71, -60, -55, 40, 27, 14, 98, 47, + 120, -75, 93, 7, 5, -2, -18, 60, + -52, 66, -73, 11, 8, -75, -26, 38, + -15, -106, 91, 76, -94, -39, -87, -15, + -38, -13, 55, -26, -123, -68, 39, -60, + 8, -18, -5, -7, -5, -37, 65, 55, + 47, 115, -20, 48, 40, -34, 27, -23, + -65, 4, -79, -126, 36, 93, 43, 5, + 84, 14, -49, 52, 65, -80, -50, -29, + -31, 56, -37, -56, -66, -51, 30, -43, + -24, 41, 3, 110, 56, 29, 40, -9, + 23, 40, 6, -16, 5, -77, 73, 2, + -30, 6, 0, -42, 32, -1, -3, 82, + -106, 6, -47, -41, -58, -38, 85, 3, + 12, -79, -18, -1, 48, -19, 9, 88, + 4, 35, -66, 61, 72, -7, -42, -30, + -29, 21, 113, 28, 93, -4, 71, -116, + 6, 4, 23, -51, 26, -3, 3, 26, + 16, -28, 27, 1, 69, -50, -35, -19, + -47, -41, 12, -99, -72, 78, 19, 20, + 110, 46, -65, -36, -9, -10, -18, 39, + -27, 43, 15, 15, 97, 57, -23, 11, + 38, -24, -88, 18, -41, 71, 42, -34, + 21, 94, 53, -19, -25, -23, -82, 61, + -61, -17, -1, -127, -19, -2, 53, -75, + 44, -118, -45, -12, 80, -120, 36, -95, + -55, 46, -16, -29, -32, 46, -98, 30, + -52, 10, -44, 38, -29, -14, -63, -7, + 21, 39, -66, 72, -17, -39, -17, 9, + -23, -40, 36, -51, 42, -87, 29, -42, + -122, -9, 44, -47, -39, -34, -76, 58, + -99, -7, 8, -18, -8, -44, -42, -11, + -71, -29, 59, 4, -82, -13, 25, 52, + 31, 5, -22, -3, 67, -43, -68, -44, + 29, -20, -63, 61, -55, 98, -31, 42, + -102, -8, 30, -88, -5, -24, -127, -110, + -38, -29, 81, -71, 33, 9, -27, -20, + 67, -108, 55, -27, -94, 11, -60, -16, + 33, -6, 24, 127, -94, -86, -85, 16, + 4, -40, -70, 20, -43, -20, 48, -25, + 49, -126, 78, -62, -22, 87, 36, 31, + 33, -125, 39, 81, -117, -60, 2, -19, + -48, 18, 24, -27, 12, -22, -14, -14, + -70, -2, -42, -109, -1, 56, 9, -110, + 103, 35, -17, -31, -60, 72, -49, 45, + 70, 43, 37, 107, 124, 70, -13, 78, + -48, -47, -38, 82, 23, 127, -29, -22, + 9, 30, -112, -9, 86, -69, 52, -77, + 36, 67, 3, 113, -32, -53, -23, 18, + -122, -53, 7, 27, 22, 1, -71, -51, + 87, 62, -56, 43, -116, -126, -27, 16, + 53, 46, -62, -13, 36, 94, -27, 29, + 6, 58, -14, 103, -21, -24, -48, -54, + 2, 94, -3, 57, -124, -104, -9, 106, + 38, -69, -13, -43, 35, -65, -83, -89, + -126, 35, -4, -45, -28, -47, -65, -82, + 75, -36, 61, -81, 85, -112, 119, 59, + -7, -11, -63, 50, 23, -1, 59, 0, + 4, -56, -68, -40, 8, 15, -20, 21, + 58, 3, -85, -64, -2, -26, 28, 32, + -53, -27, 70, 9, 87, -1, -15, -121, + -34, -69, 10, 56, -10, 61, -58, 105, + 25, -59, 28, -50, -32, 33, -14, 42, + -24, 9, -14, 10, -3, -28, 13, 56, + -59, 10, -68, 14, 37, 57, -20, -9, + -17, -80, 24, 20, -18, -1, 122, -8, + 5, -72, -57, -118, 2, 14, 30, -27, + -127, 38, 39, -79, 18, -37, 93, 46, + 27, 22, -9, 1, -9, 33, 45, -46, + 43, -45, -43, 15, -7, 21, 2, -59, + 34, 9, 41, -42, -28, 24, -12, -12, + 5, -16, 6, -58, -51, 37, 49, 25, + -51, -5, 31, -124, -32, -39, -48, 6, + 34, -86, -63, -9, 34, -4, -32, -27, + 22, 6, -3, 94, -44, 1, 42, 3, + -34, -39, 127, -32, 53, 72, 20, -42, + -75, 27, -21, -9, -25, 17, -61, 62, + -77, 2, -8, -94, -26, -42, 40, -69, + 0, 17, 56, -115, 1, -6, 12, 14, + 23, 10, 12, -56, 107, -29, -28, -33, + -45, -64, -105, -22, -19, 59, -10, -69, + 45, 4, -58, 32, -12, 22, -59, 35, + -22, 70, 19, 49, -17, -78, 4, 21, + 50, -49, 28, 18, 32, 72, 60, 5, + 47, 46, 30, 11, 73, 1, -57, -6, + 87, -97, 32, -23, 55, 26, -35, 9, + 37, 105, 10, 18, -118, 99, 125, 54, + 45, 109, -26, 26, -39, -56, 7, -19, + -38, 35, -128, 47, 17, 0, 97, -69, + -63, 27, 23, -19, -79, 73, -46, 3, + -22, 19, -41, 123, -44, 109, 33, 34, + -2, 45, -126, -43, -14, -13, -5, 40, + 26, -4, -127, -124, -19, -82, -111, 109, + 76, -43, -30, -94, 77, -82, 0, -1, + -7, 9, -36, -60, -19, 42, 28, -10, + -90, 20, 83, -4, -60, -120, -8, -37, + -18, -37, -84, -85, -46, -37, 22, -36, + 15, 71, -23, 14, -125, -2, -41, -9, + -40, 39, -43, -126, -6, -10, -94, -2, + 10, -113, 15, -79, -7, -51, 25, -30, + -40, -28, -121, -25, -45, -70, -11, -51, + -128, 46, 81, -92, -89, -9, -54, -18, + -16, -96, -84, 37, -77, -66, 21, -38, + -54, -3, -33, -33, -111, -100, -11, 3, + -6, 20, -128, -3, -54, -41, -21, 54, + -53, -128, -127, 28, -55, -107, -10, 41, + 112, 35, -25, 11, -43, 93, -61, 38, + -54, -52, -104, -1, 5, -60, -122, -101, + -22, -19, 104, -57, 7, -54, -84, -33, + 78, 34, -2, 15, -122, -14, -60, 114, + -75, -31, -42, -86, -67, -66, 38, -26, + -5, -124, -15, -3, -52, -127, -13, -50, + -108, 64, 37, -19, -51, -67, -16, 32, + -128, 31, -46, -48, -66, 7, -31, -61, + -5, -53, -113, 47, -45, 0, 102, -18, + -46, -101, 16, -37, -11, 29, 14, 15, + -20, 23, -24, -30, 43, 3, -28, 16, + -21, 18, -42, 2, -16, -69, -75, -75, + 27, 19, -20, 52, 37, -121, -22, 17, + 12, 45, -84, -50, 12, -126, -15, 19, + -24, -60, -23, -61, -67, 6, 122, -116, + 82, -35, 24, -30, -87, 26, -26, -17, + -42, 5, -25, -118, -17, -67, -60, 39, + 15, -100, 30, -121, -39, -67, -13, 33, + 20, -36, -10, -9, -94, -107, -22, 21, + -126, -34, -11, -83, -82, -46, -119, -35, + -86, 4, -123, 44, 23, -42, -84, -56, + 4, -53, -6, 39, -12, -50, -18, 37, + 42, 45, -123, -32, -6, -33, -39, -4, + -97, -68, 9, 74, -53, -125, -83, -2, + -22, 57, -21, -90, -69, -9, -76, 43, + -15, -1, -18, -20, 2, 18, 5, -93, + -31, 76, 5, -1, 5, 44, 14, -5, + 6, -48, 48, -2, 38, -59, -36, 77, + -50, -32, -11, 46, 33, 69, 62, -63, + 20, 110, 15, -42, 10, -4, 78, 8, + -24, -59, 33, 65, 34, 48, -22, 26, + 122, 51, -7, 10, -58, -2, 18, -27, + 7, -126, -84, -4, -40, 48, -124, -127, + -6, 46, 55, -13, -83, 84, 31, 2, + 34, -1, -19, -65, 38, 41, 29, 11, + -121, -55, 74, 20, 19, -2, 14, 35, + -15, -80, -33, -46, 36, 13, -49, 40, + -71, -41, 65, -107, 32, 17, 3, 8, + 45, -50, 24, 4, 2, 55, -43, 13, + -12, -20, 15, 9, 16, -26, -25, 126, + -9, 126, 31, 27, -20, -125, -41, -46, + -7, -39, -60, -31, -108, 8, -47, -12, + -89, 67, -8, 57, 7, -10, 95, -7, + -109, 29, 47, 93, 79, 44, 15, -45, + 50, -41, -13, 7, -56, 43, 48, -54, + 60, -10, -29, -47, 0, 82, 22, -84, + 2, -8, -39, 31, 4, -27, 12, 24, + -91, -8, 39, 3, -23, 5, -122, -74, + 64, -9, -42, -53, 27, -94, 18, 18, + -3, 73, 68, 18, 6, 98, -18, -2, + -78, 8, -45, 14, 54, 15, 58, 40, + 33, -31, 6, -10, 7, -6, 0, 41, + 37, 121, -29, 36, 14, -56, 63, 9, + 15, -122, -7, -67, -4, -115, 0, -41, + 19, 37, -13, 17, 11, -13, 10, -29, + -77, 18, -9, -10, -79, -3, -8, -40, + -25, -22, -9, 0, 27, -105, -23, -65, + 7, -35, 29, -13, -29, -34, -71, 55, + 37, -3, 24, 69, 7, 32, -24, 18, + -60, -20, 39, 101, 9, 28, -78, -68, + -88, 22, -19, 28, -87, -6, -33, 44, + 1, 37, -69, 8, 18, 48, -15, -41, + -110, 23, 43, -32, 5, -75, 10, -16, + 28, -31, -17, 68, 17, -38, 4, 47, + 1, -4, 33, 35, 83, 50, 38, 11, + 40, 51, 1, -20, -9, -35, 76, 34, + -4, -24, -27, -10, 6, 60, -44, 14, + 104, 43, 19, -23, -16, -127, 21, 13, + -11, -27, 30, -42, -6, 70, 27, -52, + 41, -13, 52, -22, 30, 29, -28, -85, + -55, -72, 125, -49, 6, -12, 23, -4, + -44, 32, 49, 68, 4, 13, -35, -61, + -6, 51, -124, 27, 82, 31, 28, 121, + -120, 26, -33, -42, -44, 107, 88, -16, + -98, 30, 25, -19, 6, -42, -77, -8, + 35, -13, 35, -23, -70, -71, -35, -45, + -54, 127, -103, -46, -74, -110, 32, 33, + 10, -38, -13, -29, -23, 56, 18, 59, + -110, 18, 11, -7, 41, 2, 126, 10, + 6, -20, -7, -46, -55, -4, 22, -6, + 35, -6, -79, -72, -11, 4, -8, 126, + -3, -18, 71, 21, -7, 2, -107, -123, + -22, -45, -68, 37, -45, 87, 48, -32, + -12, -54, 82, 76, -44, 26, -36, 48, + -127, -126, 65, -10, -28, -18, -49, -127, + -57, -75, -82, -57, -91, -17, 12, 18, + 6, -1, 0, 38, 25, -121, 77, 38, + -109, 24, -119, -17, -22, 10, -10, -1, + -72, -44, -26, 49, -58, 48, 14, 9, + -30, -44, -26, -48, -30, -36, 84, -87, + 54, 11, -42, -120, 56, -102, 17, 33, + 65, -1, 27, -41, 11, 2, 30, -44, + 12, -2, 86, -39, -59, -115, -46, -60, + -9, 75, -10, 4, -117, 62, -34, -126, + -33, -47, 54, 15, -127, 45, 3, -17, + -21, -55, -51, -104, -41, -20, 59, 21, + 11, 25, 38, 74, 40, -23, 8, 23, + 31, -75, -3, 56, 26, -38, 104, -11, + -8, 100, -5, -35, 39, -1, 67, -62, + 47, 48, -8, 101, 39, 45, -20, -1, + -22, 39, -3, -17, -7, -17, 19, 10, + 26, 4, -8, -5, 69, 4, 2, -3, + 13, 45, 19, -8, 90, 12, 39, 10, + 18, 1, -30, 15, 53, 19, -40, -20, + -4, 29, -7, 9, 14, 5, 16, 49, + -61, -31, -64, -36, -8, -8, 25, 21, + -26, 7, 0, 4, 2, 21, 2, -60, + 42, 5, -48, 79, 8, 29, 34, -26, + -79, 22, -43, 69, 78, -72, 30, -41, + -103, -15, -35, -52, 8, 29, 110, -51, + -32, 1, 40, 33, -46, 22, 48, 24, + 24, 58, -107, 6, -72, 39, 35, 8, + -28, 67, 28, 79, 118, 3, -45, 121, + 0, 18, -40, 93, -61, -28, -33, 43, + 19, 83, 45, -9, -123, 82, 39, -3, + -39, 16, -17, 25, -27, 25, -49, 71, + 45, -59, 8, -44, 12, -84, 4, -3, + 56, -97, -17, -84, 88, 61, 1, -22, + 116, 35, -31, -13, 26, 120, 10, 51, + -17, 38, 48, -24, -21, 124, -8, 43, + -33, 29, -34, 32, 27, 31, 64, -127, + 64, 4, 12, -50, -128, -33, -26, 45, + -124, -127, 3, 0, 4, -17, 0, 56, + -13, 86, -128, -46, -126, 52, 55, 54, + 13, -10, -27, -52, 31, 6, 51, 121, + 50, 9, -102, -32, -87, 8, 61, -4, + -19, 2, 24, 11, 74, -27, -114, -3, + 58, -1, 40, -112, 70, -24, -71, 29, + -24, -75, 55, 1, 64, -1, -18, 39, + 7, -78, -22, -111, -122, 108, 22, 37, + -13, 13, 17, -17, -113, 32, -9, 23, + 71, -93, 11, 110, 74, 72, -26, -6, + 50, 48, -51, -73, 120, 30, -8, 28, + 41, 82, 34, 63, 9, 51, 123, 57, + 16, -105, 29, 38, 18, -39, 47, 31, + 0, -56, 39, -36, -24, 62, 87, 39, + -25, -38, -67, 84, 6, 48, 107, 26, + -49, -94, 52, -16, -17, -4, 1, 68, + -19, 90, -7, 53, -19, 10, -16, -38, + -9, -55, 12, -31, 44, 19, 70, 28, + 28, -32, -11, 31, -3, -58, -3, 50, + 46, 28, 37, 6, 31, 5, -27, 54, + 26, 33, -43, 46, 126, 44, 80, 12, + 8, 73, 20, -62, 86, 42, -10, -15, + 25, -83, -25, -119, -54, 16, -35, -21, + -40, 35, 37, 5, 7, -8, -36, -20, + 43, 38, 61, -124, 65, 33, -30, 107, + 58, -26, -41, -14, -43, 29, 116, -40, + -55, 76, -83, 59, 36, -116, -1, -106, + -57, 121, 26, -67, -32, 77, -9, 35, + -111, 47, 36, -41, 80, -64, 11, 121, + -80, -4, -116, -71, -47, 56, -15, 27, + 24, -74, 11, 86, -20, 66, -93, 18, + -35, 21, -14, -4, -37, 8, 19, 64, + -81, -57, -33, 126, 32, -32, -12, -22, + 63, 43, -24, -36, -49, 29, -105, -115, + -73, -124, -43, 77, -99, 120, 4, 42, + -77, 46, 55, 61, 45, 108, -120, 34, + -49, -18, -61, -46, 8, -121, -79, 18, + 91, 45, 29, -61, -20, -118, 41, -20, + 105, -5, 46, -12, -4, 14, 8, 43, + 1, -38, 56, 88, -22, -94, 109, -71, + 13, -27, 16, 70, 74, -123, 60, 44, + -25, 18, -24, -85, 39, 72, 21, 22, + -35, 4, -18, 28, -1, -42, -53, 18, + 32, -5, 45, -21, 35, 123, 63, 7, + 45, -128, 1, -16, -86, 26, 25, -14, + 20, 33, 112, 38, -3, -49, 67, -1, + -20, -74, 6, -46, -23, -23, -114, 31, + -77, -15, -24, -67, 74, 42, -61, 22, + 25, -99, 50, 71, 6, -42, 58, 54, + -19, 17, -3, 60, 9, 56, 116, 32, + 82, -28, -10, 15, -43, 25, 52, -105, + 57, 4, -41, 2, -84, 43, 36, 15, + 2, -60, -48, -34, -49, 2, -71, 66, + 54, 33, 3, 60, -22, 18, 32, 30, + -8, 52, -64, -21, -16, -23, -13, 17, + -51, 0, 5, -58, 23, 5, -56, -5, + 72, -27, 83, 18, 42, -24, 20, 10, + -30, -17, 49, -23, 51, -54, -7, 11, + 20, 89, 21, 9, -83, -22, -121, -36, + 95, 29, 42, -71, -38, 6, 60, -79, + -119, 35, 8, 35, 14, 110, 68, 35, + -62, -26, 14, 52, -51, -41, -3, -35, + 35, -34, 64, -81, 74, 10, -7, -45, + 33, 19, -55, -26, -71, -8, 19, -67, + -4, 49, -36, 25, -33, -40, -39, -10, + -25, 4, -30, 100, 8, 33, 8, 9, + 22, 5, -39, 61, 10, 79, 89, 42, + -27, -9, -15, -64, 24, 80, 26, -9, + 19, -11, -68, 43, -51, -14, 39, -16, + 54, -11, -121, 48, 64, -95, -31, -46, + 7, -43, -19, -61, -111, 79, -34, -126, + 31, 5, 60, 51, -108, 42, -44, -43, + -31, -81, 29, 24, -55, 11, -21, 0, + 50, 1, -66, -31, -40, 6, -35, -31, + -13, 41, -7, 74, -45, -123, 44, -25, + -78, -24, 40, 49, -3, -38, -25, 3, + -36, -93, 50, 53, 58, 81, -21, -24, + -47, 7, -15, -47, -13, 29, 124, -10, + 13, -41, -82, 8, -51, -20, 13, -48, + 57, 38, 26, 15, -78, -23, -46, -3, + -18, -16, -53, 6, 26, -43, 20, 12, + 6, -1, -56, 40, 81, -62, 37, -30, + 29, -10, 28, -44, -37, 11, -48, -7, + -29, -90, 23, -25, -35, 6, 49, -10, + -2, -30, 6, -79, -124, -13, 56, 44, + -8, -80, 66, -28, -56, -11, -41, -74, + 14, -4, -33, 43, 0, -30, -40, -37, + 10, 7, 30, -24, -13, 22, -33, 36, + -18, 64, -9, -25, -59, -72, -15, -31, + 21, 56, -6, -102, 0, 46, -35, -3, + -23, 39, -15, 117, 54, -10, 48, 19, + -42, -31, -26, 11, -2, -35, -94, -30, + 41, 26, 41, -23, -62, -47, -88, 51, + -16, 13, 0, 56, -21, -57, -26, -17, + 64, -5, -128, -24, -51, 53, 32, -76, + 39, -81, 87, -116, -32, -24, 63, 76, + -44, -128, 46, 69, -128, 12, 24, -128, + 5, 88, -16, 76, 117, -6, 5, 26, + -27, -36, -40, -73, 2, 4, -69, 39, + 127, 53, 93, -124, 16, 46, 19, 27, + 82, 102, -58, -96, -1, 123, 64, 13, + 121, 116, 127, -77, 6, -70, 19, -127, + -101, -23, -125, 126, -126, -17, 65, -65, + 13, 124, 64, 35, -95, 0, 127, 42, + -18, -37, -64, -47, 85, -126, -122, 40, + 7, -67, -12, 6, 1, 105, 86, 20, + 22, -16, -36, -22, -13, 1, -14, 39, + 70, 16, -40, -29, 16, 42, 8, -47, + 11, -50, 1, 1, -33, 5, -31, -91, + -19, 21, -42, -65, 0, -10, 56, 12, + -48, -19, -4, -12, 3, -9, 16, -15, + -64, 5, -73, 9, 9, -4, 23, 14, + -46, 21, -8, -43, -32, 43, -6, -2, + 15, 4, -19, 26, 11, -15, 28, -13, + 18, 56, 34, 42, -18, 25, -32, -11, + 45, -37, 1, 43, 38, 4, -42, -15, + -40, 8, -19, 19, -42, -31, -20, 34, + -16, -50, 3, 18, -18, 17, -6, -15, + -21, 5, 4, 91, 80, 8, -6, -33, + -13, -82, -1, 42, 22, 6, 25, -16, + 7, 16, 22, 14, 12, 18, 57, -6, + 92, 13, 50, -54, 18, 58, 8, -18, + 14, 62, 16, 126, 16, 18, 57, 7, + 39, 104, -84, -4, -8, 29, -35, 13, + 24, 13, 75, 16, -32, 23, 5, 21, + -41, -94, -68, 31, 32, 0, -37, -5, + -12, 6, -44, -20, 27, 19, 48, -4, + -37, 23, 127, 24, 39, 97, 12, -1, + 6, -13, 31, 7, 27, 8, 15, 44, + 17, 97, 106, -68, -59, 20, -17, -13, + 10, 9, 8, 89, 29, 47, 47, 39, + 79, -18, 55, -5, 101, -2, 9, 105, + 21, 87, -55, 26, 35, 49, 123, -13, + 87, 10, -79, -40, -70, -50, 34, 6, + 29, -17, 71, -18, 9, 111, 2, -21, + -4, 127, -24, -52, -2, 104, -15, -27, + 126, -78, 41, 59, 52, 72, -23, -12, + -7, 52, -11, 17, -19, 79, -37, -23, + -8, -23, -13, -75, -40, -57, -125, 60, + -65, 24, -66, -72, -34, 79, 7, -29, + -24, -100, -62, -62, -54, 40, -55, 42, + 81, -51, 55, 21, 33, 93, 9, 85, + -24, 37, 20, 21, -5, 72, 9, 60, + -22, 15, 9, 80, 68, 10, -61, 57, + 67, 127, -11, 22, 108, 103, -42, 43, + -19, 97, 11, 33, 124, -61, 24, 23, + -35, 122, -72, -45, -90, -48, 15, -48, + -20, 48, 12, -4, 90, -3, 0, 13, + -33, 64, 86, 19, 74, 119, 120, -128, + -126, 37, 28, -7, 15, 72, -26, 62, + 35, 8, -79, 4, -42, 9, -5, -37, + -39, 72, -99, 10, 25, -1, -88, 66, + 39, 28, -73, -123, 37, -12, 29, 44, + 7, 75, -11, -98, 27, 61, -11, -10, + -52, 74, 18, -62, -14, 66, -6, -2, + -53, 13, 48, 13, 4, 4, -9, 16, + -47, -16, 32, -49, 6, -51, -15, 55, + 48, 64, 32, 41, -12, -72, 69, 7, + 17, 26, 45, 53, -110, 16, 11, 20, + -75, 48, 77, 1, -31, 1, 74, -66, + -8, 73, 44, 19, -40, 41, 27, 62, + 60, 93, 20, 12, 0, -39, -27, -1, + 7, -13, -1, -15, 84, -47, 21, 62, + -18, 25, 19, 6, 64, 97, -3, 9, + 12, 101, -14, 46, 30, 22, -9, 22, + 13, -6, -17, -26, -63, 21, 105, 49, + -25, -26, -43, -22, 26, 17, 54, 101, + -25, 44, -4, 40, -5, 50, 37, -21, + 62, 53, 19, 38, -60, -84, 13, 40, + 15, 59, 105, 33, -14, 19, -17, 69, + -7, 13, -45, -20, -16, -16, 68, -17, + 32, 12, 2, 40, -35, -23, 29, 9, + -25, -38, 51, -62, 13, -84, -3, -48, + -76, -76, -65, -24, 16, -21, 14, 2, + 56, 18, -47, 49, -17, 31, 46, 18, + 78, 44, 55, -72, 1, 34, -15, -46, + -8, 6, -39, -24, 3, 121, 1, -11, + -41, -61, 57, -33, 4, 3, -43, -47, + -1, 97, -14, -14, -5, -13, -40, -15, + 31, 1, 22, -7, -83, -19, 15, -35, + -13, -2, 60, -31, 32, -20, -69, -50, + 27, 69, -64, -48, 51, 40, -96, -63, + -88, -24, 3, 12, 127, -111, 75, 54, + -44, 14, -19, -59, 8, -16, 19, -128, + -27, -76, -25, -2, -73, 125, 27, -127, + 12, -15, -55, 40, -126, -1, -81, 50, + 126, 100, 29, -6, 43, 32, 19, -2, + 52, -27, 28, -108, 53, -24, 0, 42, + -15, 20, -53, -51, 27, -108, -88, 49, + -6, 34, 0, -54, -48, 49, -53, 13, + 57, -64, 8, -19, 48, 8, -2, 13, + 44, 13, -8, 26, 62, -23, 35, -6, + -11, -16, 29, 94, 95, 22, 25, -42, + 57, 41, 24, 43, 87, 49, 19, -18, + 113, -11, 52, 102, 119, 31, 100, 47, + 6, 16, 36, -11, 21, -23, 59, 8, + 59, -14, 38, 82, 12, 58, -31, 39, + 86, -52, -12, 51, -7, -65, 29, -44, + -39, 12, 39, -49, 13, -41, 23, -9, + 24, 13, 43, -48, 37, 75, 9, 88, + -22, 22, -99, 24, 42, -20, 8, -31, + -17, 8, 48, -29, 64, -9, -32, -2, + -30, 11, 26, 124, -64, 25, 7, -5, + -11, 58, 73, -93, 64, 24, -43, -60, + -13, 8, 0, -16, -5, -83, -103, 108, + 27, -20, -20, 32, -49, -2, -25, 126, + -18, 19, 126, 3, 25, -27, 90, -24, + -38, -38, 48, 55, 24, -28, 0, -22, + -31, -8, 98, 36, -12, -7, 7, 16, + -16, -88, -10, 102, 79, 117, -25, -40, + -11, 26, -8, 19, 25, 35, 73, 47, + 33, 14, 82, -20, -42, 22, -74, -25, + 90, 16, -45, -66, 81, -65, -66, 34, + -17, 22, -13, 102, 39, -3, -51, 32, + -35, 4, -9, 53, -17, -92, 35, 41, + 66, 125, 35, -68, 13, -34, -22, 22, + 2, 13, -59, 26, 41, 12, -76, -78, + 4, 78, -6, -30, 1, 46, -22, 29, + -53, -44, -128, -83, 7, -128, -87, 45, + 66, 21, -74, -13, -125, 2, -9, 0, + -63, -67, 37, 32, -28, 1, 74, -109, + 9, 25, -51, 31, 98, -32, 10, -22, + -82, 30, 102, 41, -64, -43, 85, 40, + -13, -125, -98, -5, 4, -66, -69, 6, + -86, 125, 45, -81, -57, 76, -127, 91, + -57, 2, -22, -65, -36, 127, -54, -12, + -11, 6, -29, 20, 37, -13, -58, 32, + -55, -5, -67, -51, 1, 0, -77, -43, + 31, -43, 12, -61, 24, 53, 27, 6, + 19, 25, -29, -4, -11, -97, -64, -43, + 17, 19, -96, 13, 35, -48, -43, -34, + 75, 6, -30, -6, -29, 26, -18, -47, + -1, 42, -2, -29, -6, 40, -2, -95, + -22, -95, 7, 67, 12, -35, 25, -15, + -21, -72, -44, -89, -25, -97, 107, 4, + 52, 11, 121, 0, 75, 9, -18, 23, + -27, -50, -15, 5, 45, -31, -18, -55, + -30, 40, 27, 2, 94, -14, -24, 26, + 29, 86, 127, 16, -64, -38, 16, 110, + 7, 22, -47, 64, 87, -11, 65, 92, + -43, 89, 15, -68, 113, 68, -79, 80, + 55, -57, 48, 12, -61, 2, -71, 22, + 28, -25, 35, 8, 124, -66, 30, -112, + -127, 74, -7, 52, -88, -15, -4, -45, + -40, 124, 13, 61, 60, 29, -124, -73, + -99, -85, -125, 79, 21, 124, -35, 123, + 11, 28, 73, -62, 62, 44, 76, -1, + -58, -25, -58, -40, 99, -57, -126, 6, + -63, -124, -90, -105, -55, 84, -49, -17, + 36, 66, 39, 19, 83, -34, -107, 27, + -17, 20, 90, 11, 17, 50, 7, -47, + 35, -17, 24, 57, 29, -27, -77, 11, + -74, -49, -9, -39, 4, -25, 32, 117, + -8, 37, 52, -104, -13, -87, 0, -32, + 126, -10, 59, 15, -44, 26, 46, 8, + -4, 25, -85, 24, 15, 20, -27, -6, + 51, -8, 12, 50, -6, -23, 52, 109, + -47, 13, 69, -112, -54, 36, -15, 32, + -23, -106, -40, 19, 40, -49, -81, 23, + -28, -3, -61, -47, -62, -52, 90, -25, + 69, -122, 5, 119, -13, 9, 38, 17, + -5, 34, -18, -34, 23, -27, 65, 59, + -3, 16, 9, -16, 1, 72, -9, 36, + -43, -8, 36, 27, 20, 35, 81, 4, + 57, -25, 43, 0, 14, 31, -25, -32, + -9, 35, 55, 18, -78, 25, 80, 31, + 11, 15, 37, 0, -74, -10, 49, 44, + 1, 8, 120, -14, -2, 29, 122, 33, + 96, 23, -95, 39, 67, 8, -18, 73, + 9, -50, -105, 80, -23, 88, -23, 6, + 108, -15, -8, 36, 12, 27, 34, -22, + 46, 28, -10, 61, -60, 0, 20, -31, + -8, 32, -33, 87, 121, 35, 17, 45, + -19, 21, 61, 90, -33, 57, 38, 36, + -32, 119, -30, 25, 66, 14, -13, 28, + 15, -34, 57, -56, -16, 27, 89, -63, + 0, 3, -23, -37, -11, 43, 5, -65, + -15, 58, -15, 25, 1, -70, 21, -43, + -25, -52, -28, -52, 17, 2, 30, 5, + -36, 76, 63, 37, 79, 88, -4, -25, + 79, -77, 16, -20, -6, 48, 52, -47, + 66, 68, 4, -11, 10, -54, -60, -68, + -99, -16, 56, -67, 26, -70, 42, -13, + 32, 57, 86, 40, 44, -8, -28, 30, + 73, -29, -36, -22, 2, 4, 16, -52, + 87, -29, 44, 26, 25, -21, 19, -64, + -127, -11, 12, 47, 86, 97, -6, -34, + 2, 69, 0, 8, 29, -7, -117, 17, + -73, -19, 33, -86, 24, -124, 123, 94, + 46, -103, -59, -5, 40, -114, 3, 68, + 109, -2, -95, -33, -10, 8, 43, -10, + -19, 78, -66, 10, 58, -114, -54, 65, + 28, 67, -10, 63, 40, 18, 7, -26, + 32, -8, 3, 3, -35, 43, 15, -78, + -127, -2, -118, 47, 62, -124, -16, -57, + -111, -123, 3, -45, -108, -122, 83, -103, + 114, -124, -35, 35, -62, 55, -9, 87, + 127, -74, 56, -28, -41, 40, 6, 5, + -80, -6, 4, -4, -5, 57, -36, 17, + 22, 47, -15, 22, 28, 37, 115, 36, + 65, 21, -10, 0, 20, 18, 77, -51, + -30, 7, 52, 13, -61, 13, -30, 15, + 28, 8, -63, 15, -2, 13, -11, -2, + -27, -68, 110, -43, -7, 81, 83, 24, + 30, 33, -31, -28, -39, 15, 39, -46, + -2, 13, -21, 6, -17, -51, -22, -33, + -51, 54, 9, -54, 16, 35, 39, 44, + -84, 12, 45, -21, -37, -46, -18, -53, + 8, 21, 1, 11, 14, -71, -20, -5, + -19, -9, -23, -16, 7, -3, -8, -16, + -30, 43, 20, 43, 19, 17, -25, 11, + -18, -24, 3, -31, -31, 82, -92, -69, + 51, 3, -57, -82, 12, -86, -19, -10, + 27, 23, -11, 3, -6, -34, -33, -29, + -56, 13, 52, 36, -62, -34, -4, 17, + -45, 2, 71, -48, -26, 17, -45, 25, + -43, -18, 38, -23, 33, -11, 38, 28, + 15, -83, -7, -11, -46, -53, 46, -13, + 31, -36, -79, 57, -54, -70, -11, -42, + -30, -35, 35, -11, -12, 18, -16, 111, + -23, 22, 64, 28, 48, -31, 48, -18, + -7, 11, -34, 1, -46, 31, -6, 71, + -108, -121, -31, -57, -28, 61, 25, 32, + -32, -126, 5, -13, 52, 117, -56, 17, + 53, 9, -125, 48, 21, -42, -34, -16, + 70, -48, 5, -2, -15, 78, -29, -121, + 57, -28, 47, 19, -4, 45, 95, 13, + 30, 31, -26, 54, -49, -65, 45, -97, + 65, 98, -38, -30, -49, -36, 15, 34, + -94, -6, -57, -105, 73, 5, -49, 1, + -71, 22, -86, -124, -23, -53, -48, 4, + 30, -1, 124, 29, 30, 68, 20, -128, + 26, 12, -95, -8, -128, 75, 60, 6, + -23, 5, 17, -12, -45, -72, 7, -18, + -19, 48, -21, -46, 79, 31, -127, 36, + -69, -75, -5, 102, 40, -29, 1, -21, + 18, 1, 60, -99, -102, 21, 24, -42, + 43, -1, 46, 16, 84, -53, -45, 33, + -12, -3, 30, -1, -26, 0, 21, -48, + -4, -6, 16, -41, 22, 9, -25, 6, + -8, 15, 8, 6, 49, -18, 20, 10, + 33, 62, 32, 15, 37, -16, 44, 5, + 6, -47, 12, -16, 2, 50, 72, 94, + -4, 38, -103, -17, 73, 50, 25, 7, + -1, -15, -39, 0, 19, 24, 19, -49, + 44, 3, 5, 85, -22, 33, 2, -16, + 18, 53, -61, -2, 4, 6, -22, 66, + -39, 5, -30, -73, 45, -62, 121, -60, + 113, -65, 26, 34, -44, 57, -23, 42, + -39, 31, 30, -79, 5, 31, 20, 22, + -24, 42, -59, 120, 108, -28, -8, -11, + 5, -3, 63, 64, -91, -2, -71, -21, + 10, -70, 58, -43, -16, -12, 73, 11, + -50, 15, -19, 41, -12, -9, -123, 35, + 19, -19, -40, 34, -19, -43, -109, 24, + 25, 1, 127, -7, -17, -91, 13, 3, + 125, 2, -105, -53, 11, -28, 15, 66, + 20, -10, 42, -45, 6, 20, -7, 38, + -8, 43, -49, -46, 15, 60, 9, -124, + 75, -31, -14, -70, -119, 38, 9, -44, + -126, -92, -96, -17, -128, 1, -110, 40, + 104, -23, -126, -61, 3, -78, 1, 75, + 14, 41, -14, -43, 41, -80, -34, 52, + -128, -13, -106, 53, 77, 82, -61, 34, + 32, -117, -29, -20, -44, 55, -78, 56, + 3, -16, -9, 119, 41, 27, -71, 0, + 7, -26, -41, -117, 73, -84, -122, 34, + 51, 89, 11, -48, -74, -14, 21, 22, + -90, -96, -4, -86, 124, 19, 14, 101, + 80, 10, -31, -32, 13, -32, 37, -21, + 36, -52, 27, 15, 38, 16, 32, -33, + -55, -28, -1, 123, -10, -54, 27, 46, + -7, 127, 39, -1, 40, 3, 40, -12, + -22, -30, -5, -30, -62, -64, -34, 123, + -61, 39, 42, -31, 24, 49, -21, 77, + -20, 45, 49, 85, 127, 79, -3, -23, + -28, 77, 55, -31, 70, 4, 16, 59, + 49, -36, 127, 85, -13, -27, -67, 67, + 25, 8, 32, -3, -25, 104, 36, 44, + 38, 48, -8, 22, -65, 71, 49, -10, + 3, 6, 77, 49, 21, 6, 103, -17, + -11, 40, -25, -2, 89, 54, 37, -68, + -53, 68, -7, 58, -120, 56, 0, 66, + -49, -70, -53, -59, -9, 14, -101, -38, + 46, -103, 1, -12, 30, -57, -17, -7, + 10, -39, -50, 0, -65, 68, -109, -82, + -38, -36, 47, 99, -36, 2, 30, 34, + 68, -39, 88, 6, 15, -20, 29, -40, + -11, 2, 62, -50, -34, -36, 70, 127, + -53, 55, 96, 38, 40, -46, -18, 65, + 73, 56, 32, -48, -79, 17, -23, 105, + -82, -84, 71, 31, 15, -61, 40, 65, + 8, 106, -47, 25, 5, 13, 12, 80, + 102, 40, -45, -37, -89, -49, -14, -67, + 89, 103, 66, -56, -26, 92, 33, 58, + -52, -78, 30, -47, 21, -116, -57, 17, + -44, -32, -104, -57, -46, 4, -127, 37, + -34, -42, -102, -72, 6, 126, -21, -94, + 42, -18, -62, -77, -21, -14, -79, -34, + 10, 2, -3, 27, 22, 9, -38, 5, + 68, -73, 86, 20, 44, -34, -26, 88, + 10, -12, 63, 32, -89, 39, -7, -56, + -6, -7, 6, -81, 43, -127, 37, 65, + -89, 22, -74, -57, -3, -13, 86, -4, + 83, -38, -85, 18, 26, 12, 4, -128, + 43, -1, 25, -41, 26, 75, 53, 18, + 0, 13, -8, 35, 43, 0, -17, 3, + -77, 75, 47, 119, -31, 9, 34, 26, + 39, 64, 65, 47, 60, 4, 65, 14, + -101, 10, -25, 102, 116, 76, 104, 33, + 98, 28, 16, 23, 21, 60, -15, 53, + 112, -3, -42, -19, 27, 12, 106, 16, + -33, -4, 101, 48, -9, 42, -19, 22, + 48, 48, 65, 43, -42, -127, -79, 28, + -30, -9, -28, -26, -34, 54, 5, -47, + 24, -41, 105, -27, 8, 85, 10, 52, + -9, 22, 18, 25, 99, 59, 22, -86, + -53, 20, 24, -77, 63, -6, 6, 41, + 122, 60, -37, -2, 17, -21, 107, 15, + -62, 7, 87, -12, -1, -68, 65, -71, + -47, -100, 33, 120, 17, 6, -25, 116, + 0, 39, 27, 19, 0, 47, 83, 37, + -12, 2, 83, 89, -70, -81, -21, 28, + 16, 25, -36, 48, -21, -15, -6, -84, + -22, -44, 53, 44, 58, -27, 34, 118, + -3, -106, 85, 71, -1, 111, -44, -29, + 127, 9, 81, -103, 11, 31, -125, -10, + 53, 19, 111, 30, -34, -84, -43, 2, + 36, -2, 122, 61, -96, 126, -34, 125, + 18, -41, -35, 6, 51, 38, -16, 39, + 33, 49, 42, 38, 11, 30, 46, 9, + -54, -17, 30, 8, 4, 16, 95, 23, + 28, 100, -29, -22, -29, 41, -2, 93, + 55, -58, 40, 6, -84, 1, 20, -46, + -3, -26, -33, 51, -101, 51, -15, -20, + 54, -78, -90, 31, 47, -89, -49, 1, + -29, 1, 24, -11, -30, 16, -6, -15, + 17, -57, 43, -33, 80, -93, -3, 31, + -124, 21, -66, -94, -6, 104, 72, 27, + -40, -85, -99, -57, 9, 7, -31, 1, + 17, 24, 13, 29, -76, 88, 27, 50, + 12, 56, 75, 102, -55, 20, 77, 25, + -7, -3, -21, 84, 1, -102, 56, -5, + 14, 24, 29, -82, -17, -80, -44, 80, + -19, 53, 23, 23, 72, -9, -14, 35, + -72, -10, -74, -27, -13, -53, -51, 37, + -34, 62, 63, -42, 18, -95, -118, 25, + -23, -14, -33, -22, 63, 20, -22, 15, + 18, -33, -124, -110, -36, 7, -20, 115, + -23, -8, -33, 53, -2, 49, -15, 18, + 95, 12, -18, 103, 42, 7, 15, 17, + -1, 26, -34, -6, -13, 26, 27, 19, + -67, -9, 122, 32, -34, 18, 8, 37, + 98, -7, -20, -54, -33, 59, -27, -44, + 25, -15, 6, -108, 2, -9, 46, -20, + 47, -17, -76, -10, -88, 11, -21, 40, + 26, 13, 57, 72, -56, -31, 121, -127, + -24, 60, 24, -23, -13, 61, -43, -25, + 14, -65, -36, -13, -2, 120, -12, -46, + 88, 127, 20, -21, 3, -24, 43, 38, + 102, -28, 101, -116, 16, -20, -22, -66, + 19, 89, 93, 123, 23, -111, -6, -45, + -69, -46, -42, 21, 54, -73, 73, -13, + -21, 88, 51, -19, -18, 78, -12, 8, + -4, 50, -82, 1, -35, -86, -31, -25, + -55, -3, -63, -17, 11, 32, -69, -29, + 22, 6, 37, -46, -6, -62, -50, 74, + -87, 43, -41, -84, 55, -43, 7, 12, + 52, 29, 25, 58, -19, -1, 34, 5, + -106, -62, 50, -77, 28, -11, 2, -16, + -52, 3, -50, -2, -20, -90, 13, -92, + 36, 11, -21, -78, -19, 24, -20, -12, + 28, 9, -19, -6, -123, 35, 61, -47, + -34, -3, 12, 31, 17, 17, 101, -35, + 47, 16, 45, 48, 0, 15, 16, -11, + 2, 40, -34, -92, 42, -78, -25, 0, + -42, 35, -126, -126, -6, -83, -128, -48, + -105, -29, -10, -74, 11, 27, 26, 42, + 1, 12, -24, -14, 0, 110, 29, -16, + -29, 59, -92, -35, -91, -26, 0, 11, + -1, 42, 54, 2, 15, 23, 42, -23, + -54, -29, 1, -120, 31, -16, -51, 71, + -32, -72, 51, 31, 40, -6, -13, 14, + 37, -8, -71, -20, 76, -17, 41, 28, + -23, -119, 1, 107, 5, 43, -13, 117, + -41, 8, -26, -14, 33, -18, 57, 60, + 7, 5, -11, -39, -32, -2, 15, 41, + 98, 111, -44, 46, 44, 20, 88, 18, + 62, 17, 107, 16, 5, -4, 40, 15, + -19, 9, 58, -77, -40, 119, -54, -35, + 45, 75, 12, -45, -32, -3, 72, -41, + -21, 6, -5, 2, 106, -40, 41, 62, + 57, 8, 24, -60, -44, -61, 104, -48, + -34, -58, 62, -20, 6, -60, 40, -29, + 117, 76, -56, 22, 15, -1, 112, -1, + 23, 42, 57, -53, 75, 26, -11, 35, + 78, 85, 0, 19, 17, 3, -29, 20, + 28, -54, 6, 5, -11, 45, 34, 37, + -13, 2, 15, 13, 9, -28, 32, 8, + 27, -1, -63, 42, -33, -2, -19, 4, + -127, -15, -52, 38, -5, 35, -26, 44, + 13, 1, -39, 73, 17, 24, 33, 103, + -56, -41, -1, -28, -40, -81, 21, 10, + -2, -79, -88, -42, -3, -44, -63, 81, + 16, -42, -71, 37, -14, -27, -127, -6, + -108, -94, 19, -71, -80, -12, 65, -9, + -7, 92, 8, -125, -36, 2, 47, 0, + 19, 8, -65, -58, -60, -1, 17, 33, + 46, 78, -103, -127, 63, -3, -65, 16, + 13, -10, 16, 0, 27, -8, -104, 2, + -17, -44, -24, 41, 37, -4, -124, -4, + 18, -109, -125, 41, -7, 11, 29, 17, + -42, 24, -120, -70, -123, -26, -25, 14, + 66, -12, -38, -106, -29, 45, 5, -6, + -11, 31, 6, 13, -85, 44, -15, -2, + -3, -39, -4, 4, 8, 78, 51, -47, + 19, 3, -63, 22, -34, 28, -13, 22, + 15, 23, 5, 1, -87, -122, -66, -14, + -10, -1, 19, 40, -40, 2, 24, 13, + -23, -27, -125, -3, -12, 9, 18, -4, + -12, 8, -8, 66, -2, -13, -27, 20, + -38, 33, -24, -20, -11, -6, 11, -9, + -3, -30, -1, -16, 23, -60, -19, 16, + -33, 21, -34, 9, -2, -33, -43, 21, + -39, 1, 54, -24, -24, -22, -30, -1, + -9, -27, -8, 14, 40, -5, 10, 4, + -30, -6, -21, 34, -34, 52, -12, -6, + -47, -95, -34, 43, 6, 31, 32, -38, + -23, -27, 42, -22, -118, 19, 34, 23, + 5, -50, 22, 25, -19, -40, -17, -2, + 26, -15, -69, -126, 10, 33, 23, -7, + 13, -9, 26, -42, -62, -74, -9, 8, + -47, 38, -82, 38, -4, 11, 57, -10, + -12, -66, -25, -10, 24, 4, 16, 1, + 41, -18, -96, 18, -11, -13, 103, 11, + 17, -3, -76, 19, -81, 110, 18, -46, + -51, -53, 19, 3, -36, -31, -13, -32, + -44, -37, -70, 43, -128, -11, 35, 2, + -58, -33, 20, -62, -105, 11, -70, -77, + 44, 125, -15, -25, 18, -60, 69, -96, + -125, -4, 95, -69, 13, -30, -43, -53, + -61, -6, -8, 60, 5, -75, -125, 5, + -12, 45, -101, 111, 11, -98, -17, 39, + -85, -7, -46, -56, -37, -97, -45, -37, + -16, 80, 64, 110, -74, -30, -34, -24, + -95, 2, 4, 56, -71, 39, -39, -14, + -46, -22, -124, -115, 8, 19, 69, -11, + -70, -68, 120, 95, -100, 112, -103, -70, + -35, -128, -97, -71, 29, -72, -123, -48, + 10, -36, -51, -8, -8, -26, -15, -26, + -5, 34, 6, 38, 13, 67, -14, -35, + 28, 21, 127, 6, -28, 41, 20, 8, + -48, -33, 1, 28, 9, 15, 48, -48, + 88, 91, -19, 54, -22, -80, 35, -37, + -2, 7, -44, 18, -67, 36, 50, 21, + -60, 26, 26, 34, -29, 8, 28, 2, + -10, 19, 26, 43, 36, 0, 116, -24, + 12, 13, 1, -33, -7, 0, -4, -13, + 3, 8, 4, 32, 23, -72, -70, 42, + 54, -16, -21, -48, -38, 22, -46, 22, + -40, -1, -64, 0, 40, -27, -26, 9, + 12, 35, 59, -48, 46, -18, -25, 13, + -66, -14, -55, 57, 24, -67, -15, -15, + -38, -84, -11, 1, -45, 19, -60, -49, + 24, 38, 22, 53, -52, 20, -1, 53, + 82, 19, 43, 13, -32, -56, -51, 56, + 16, -36, -33, -53, 7, -22, -20, -14, + -34, 11, 24, 22, -21, -127, -51, 15, + 47, -20, -49, 22, -56, 40, -44, -20, + -2, -65, 9, -2, -27, 22, -1, -12, + -9, -17, -55, 89, -76, -16, -10, 37, + -35, 0, 4, 16, 52, -10, 23, 39, + -25, 9, 51, 79, -6, 21, -18, 10, + 64, -37, -33, -48, 24, 78, 15, 17, + -34, 81, -35, -82, 32, 110, 59, -126, + -25, -124, 11, -55, 20, 63, 100, -10, + -123, -69, -6, 5, -74, 26, -14, 16, + -12, -13, -115, -76, -70, -124, -19, -81, + 40, -96, -66, 62, -32, -79, 28, 59, + 12, 31, -53, 119, 74, 26, -17, 44, + 81, 100, -19, -25, 41, -11, -103, 48, + 41, -8, -26, 4, -112, 53, 31, 28, + 50, -73, -9, -127, 1, -84, -109, -35, + -80, 9, -38, 42, -105, 89, 32, 94, + 68, -73, -8, 53, -32, -2, -8, -127, + 101, -2, 43, 110, -13, 43, 27, -4, + -55, -12, 81, 52, 114, 30, 66, -67, + 107, 88, 44, -63, -14, -92, -27, -58, + -16, 82, 98, -26, -9, 0, -108, -82, + -11, 23, 26, -22, -2, -82, 93, 89, + -67, -31, 5, 34, 36, 32, -49, -53, + -81, 86, 43, 34, 11, 59, -46, 4, + -60, 14, 12, 30, 34, 14, 29, 10, + -34, -105, 32, 122, 7, 63, 24, 43, + 55, 34, 61, -26, 70, 9, 8, -3, + -40, 28, -42, -35, -3, 53, 35, -9, + -49, -26, 39, 61, -48, 64, 81, 18, + 78, 27, -3, 0, -66, 108, 40, 28, + -45, 57, -17, 44, -56, -12, 18, -93, + -43, -7, 32, -38, 55, -17, 121, 71, + -27, 39, 36, -13, 26, -27, -13, 52, + 30, 3, -26, 11, 21, 52, -7, 127, + -5, -84, 62, 47, -93, 89, -90, -8, + -18, 40, 21, -86, -10, -47, 28, -48, + 69, -76, -66, 55, 28, -87, -11, -53, + 38, -29, 13, 103, -79, -126, -39, 82, + -87, -6, 18, -54, 65, -24, 61, 0, + -60, 34, 43, -5, -15, 36, 25, 26, + 60, 18, -28, -16, -9, 60, 36, 13, + 34, 57, -43, 7, 82, 53, -7, -24, + 33, 63, 49, -79, 110, 34, -11, 94, + -80, -43, 102, 22, 3, 30, 78, -60, + 127, -126, -48, -15, -20, 63, -98, 27, + -60, 5, -7, 9, -39, 55, 20, 43, + 32, -57, 34, 41, 20, -2, 53, -6, + 2, 26, -105, -41, 21, -14, -84, -9, + 57, 14, -63, 36, -62, 8, -45, 92, + -56, -1, 23, -17, -45, 42, -30, -14, + 60, 17, -5, 87, -42, -18, 73, 5, + 12, 6, 11, -2, -7, 13, -19, 40, + 16, 87, 12, -76, 48, -18, -37, -105, + 86, 4, -43, 20, 35, -53, -25, 49, + -31, -5, 71, 57, -127, 51, -49, -10, + -30, -41, -9, 15, 16, 40, 17, 13, + 0, -3, 1, 43, -76, 5, 98, 20, + -4, 26, 53, 19, -65, 40, 47, -58, + 79, 15, -19, 80, -28, 117, 50, -28, + 11, -32, 28, -27, -72, -13, 9, 22, + -53, 13, 8, -29, -37, -34, 46, -68, + -18, -126, -89, -87, -36, -19, -2, -45, + 16, -3, 1, 17, 4, 3, -72, -62, + 51, -48, 3, -87, -23, 13, -117, 17, + 24, -76, -42, -18, -128, 25, -95, -45, + -17, -48, -112, 106, 12, -97, -23, -18, + 45, -42, 25, 65, -67, 84, -67, -110, + -7, 64, -8, -75, 83, 10, 49, 121, + -82, 125, 52, 82, -49, 20, 79, -3, + 86, 60, 14, 35, 33, -46, -39, 85, + 53, -17, 4, 59, 12, 111, -14, -29, + -23, 57, -24, -37, -125, 86, -58, 82, + 77, 30, 67, 1, 113, 77, 49, 72, + -35, -90, 3, 106, 5, -85, -21, -87, + -76, -40, 32, 4, -76, 1, 13, -70, + -23, 4, -29, -12, -28, -18, -75, 22, + -100, -77, 17, 87, -56, -25, -13, 13, + -20, -37, -39, -75, -56, -1, 44, -25, + 24, -12, -27, -35, -59, -122, -15, -42, + 53, 0, -38, -28, -8, 75, 13, -78, + 36, -110, 30, 19, -50, -14, -127, -22, + -69, 34, -25, -12, -6, -7, 49, -15, + -36, -19, 54, -55, -22, -117, -103, -36, + -26, 37, -41, -112, -17, -28, 15, -11, + 6, -58, 3, -39, -74, -58, -60, -22, + 25, 0, -24, -99, -22, 28, -58, -71, + 35, 29, -9, 16, -20, -34, -45, 31, + 77, -94, -70, 19, -11, 112, -26, -11, + -39, -19, -58, 57, -41, 41, -93, -35, + 39, 22, -38, 80, -33, -91, 13, -7, + 5, 11, 23, 0, -43, 127, 27, -38, + 18, 12, 127, 11, 13, -88, 18, 47, + -19, -17, 15, -125, -34, -14, 32, 15, + 59, 126, -36, 39, 16, -124, -85, 14, + 5, -5, 32, 50, 114, 54, -82, -26, + -30, 31, 32, 54, 3, 4, -1, 24, + 27, 0, 100, 89, -4, -16, 18, 0, + 22, -35, 35, 10, -6, -79, 6, 17, + 26, 0, 13, 42, 1, -21, 49, -67, + -110, -7, -25, -8, -30, 57, 17, 5, + 50, -7, 62, 14, 49, 9, 3, -1, + 8, 41, -11, 16, -23, -23, 31, -12, + 44, -127, 27, -35, 6, -126, -55, 0, + -4, -14, -126, -123, 26, -86, 11, 40, + -4, 82, -105, -44, 23, 38, -119, 23, + 8, -95, -48, 6, 81, 14, 113, -32, + -5, -48, 105, -59, -30, 55, -126, -24, + -30, 11, 0, -24, 50, -87, 14, -21, + -55, -90, -27, -126, 10, 2, -7, -10, + -24, 14, -19, 10, -14, 12, 3, 14, + 20, 29, -50, -10, -28, 6, -67, -107, + 35, 63, 84, 56, 37, -109, 36, -2, + 77, -34, -48, 87, 7, 21, 66, -18, + 112, 49, -59, -5, -99, 27, 33, -16, + 18, -35, 36, -39, -124, -36, -56, -64, + -50, 8, 88, -102, 38, 57, -68, 35, + 127, -45, -6, -36, -17, -2, -111, -22, + -124, -38, 44, -28, -75, -43, 58, -21, + -29, -110, 46, -107, -83, 121, 46, -14, + -50, -83, -73, 120, 7, -41, 0, 13, + 31, 2, 43, -63, 39, -68, -110, 41, + -88, 10, -17, -23, 57, 79, -43, -15, + 45, 33, -73, -56, -63, -32, 83, -32, + -16, -45, 55, 59, 19, 80, 3, -11, + -110, -29, 115, 85, -29, 125, 3, -25, + -9, -68, 126, -43, -19, 16, -28, 60, + 18, -3, 28, -21, 77, 8, 45, 72, + -29, 32, -1, -27, 112, 4, 69, 30, + -26, 55, 44, 13, 35, 40, -41, 5, + -74, 38, -71, 30, 74, 11, 54, -25, + -34, 54, 87, 34, 8, 20, -2, 13, + 2, 10, 46, 40, 95, 33, 2, 6, + -55, -36, 9, 49, 40, -52, -21, 108, + 10, 32, 14, -13, 40, 6, 114, 63, + 16, 62, -31, 57, -18, 6, 6, 72, + 9, 4, 38, 25, 49, -21, 66, -7, + 11, -12, -23, 81, -48, 51, 125, -14, + 80, 9, -19, 39, 49, 92, 9, -13, + 64, 43, 56, 98, 0, 7, -67, 15, + 118, 63, -48, -46, -73, -19, -4, 88, + -7, 3, 1, -8, 82, 29, 35, -31, + 30, 30, -23, -9, 57, -51, 71, 33, + -37, 76, 11, -69, 42, 117, -27, -34, + -15, -13, -56, -90, -50, 82, 104, 98, + -33, 10, 37, -69, 2, 7, 23, 10, + 99, -66, -12, -54, -54, 8, -36, 4, + -33, -30, 9, -72, 26, -53, 35, -42, + 27, 18, -36, 41, -35, -22, -14, -6, + 22, -69, -111, -10, -12, 55, 47, -54, + -72, -21, 28, 13, 29, -43, 33, -62, + -6, -20, 29, -17, 31, -92, -9, 36, + -28, -49, -25, -121, -91, 27, -31, -17, + 45, -80, -37, -16, -34, 17, 28, -48, + 68, -51, 30, -23, 8, 17, -16, 23, + -25, -9, 114, -21, -18, -122, -81, -17, + -28, -42, -86, -107, -79, -60, 7, -35, + 15, 44, 4, 38, -124, -37, -84, 31, + -8, -23, 10, -76, 8, 44, -123, -50, + 45, -47, 59, 34, -1, -9, 24, -65, + 41, -73, 9, 94, 5, 53, 20, -9, + 28, 7, -16, 68, -24, 6, -5, -35, + -38, 43, -100, 29, 13, 35, 41, -59, + -7, 28, -24, 38, 94, -85, -85, 97, + -125, 78, 46, 90, 70, -77, 47, -12, + 2, -97, 24, 88, -18, 14, 32, 97, + 65, -22, 27, 63, 25, 11, -13, 52, + 43, 65, 32, -9, 63, -13, -93, -61, + -33, 30, 66, -5, 100, 12, 21, -7, + -62, -41, 8, -26, 31, 36, 54, 6, + 52, 106, -82, 38, 27, 26, 26, 26, + 54, 19, 43, 6, 29, 125, 93, -49, + -6, 127, 33, -10, 26, 90, 75, 0, + 106, -40, 3, 43, 52, 52, 48, -58, + 34, 0, 58, 38, 44, 57, 31, 91, + -1, 80, -72, -45, -39, 25, -75, -52, + 12, 30, 47, 2, -15, -36, 67, 89, + 36, 21, -22, 25, 18, 16, -25, 7, + 90, 10, 17, 18, -60, -9, 37, 84, + 59, 2, 16, -27, -77, -1, 43, -31, + 23, -46, -34, 127, 117, 127, 82, 27, + 18, -7, 46, -71, 87, 35, 46, -85, + -60, -2, 11, -72, 32, -56, -105, -77, + 51, 53, -46, 21, -42, 17, -12, -69, + 19, 7, 58, -43, -104, -68, 5, -3, + -29, 27, -126, 88, 0, 127, -118, 52, + 57, -10, -33, 41, 12, -28, 5, -5, + -57, 23, 21, -18, 34, -48, 28, -125, + 120, -59, 25, 108, 71, -33, 80, 38, + -46, -12, 46, 6, -68, -44, 116, 106, + 20, 93, -127, -56, 124, -105, -96, 122, + 34, -57, -27, -67, 29, -124, -90, 120, + -88, 29, -118, 8, 33, -90, -64, 6, + 43, -45, 14, 127, -47, 40, -26, -79, + -81, -3, 2, -51, -4, 12, 23, 66, + 37, -118, -7, 7, 100, 13, 8, 85, + -85, -92, -123, -111, -124, -2, 15, 45, + -57, -58, 10, 82, -24, 46, 30, 127, + 80, 41, -9, -64, 33, -7, -4, 10, + -21, 44, 15, -22, 26, 59, 31, 28, + -20, 18, 81, -50, 0, -69, 43, -18, + -23, -85, -28, 86, -62, 35, 20, 106, + -10, -74, 97, -17, -68, -40, -24, -78, + 4, -116, -56, -86, 78, 34, -94, 49, + 5, -97, -94, 62, 23, -13, -83, 53, + 10, -12, 54, -27, -8, -16, 49, -38, + 20, -43, 107, 9, -64, 22, 18, -53, + 10, -7, -57, 33, 3, -127, 59, 7, + 7, -57, 49, 14, 7, 29, -22, -82, + 86, -63, -1, -10, 0, 29, -25, -72, + -69, -51, -124, 20, 49, -32, 35, 11, + 4, 29, -109, -44, -67, 51, -45, 120, + 7, -51, 96, 11, -1, 12, 45, -40, + -50, -46, 8, 49, -9, 10, -37, 69, + 50, -45, 22, -12, -38, -19, 38, -13, + 7, 82, -112, -16, -31, -69, 72, 0, + -58, 31, -8, 110, 20, -118, 77, -27, + -61, 40, -18, -32, -55, 33, -39, 25, + -49, 16, 0, -9, 9, 7, 48, 63, + 99, 41, -2, -30, -32, 35, -34, 5, + -71, -45, -50, 10, -13, -58, -24, -31, + 48, 13, 17, 44, 3, -7, 44, 63, + 120, -55, 99, 2, 68, 9, -5, -122, + 61, 29, 27, 3, 37, -56, -3, -43, + -11, 53, 10, 54, -66, 54, 34, 39, + 2, 29, -80, 55, 13, 29, -55, -6, + 124, 2, -39, 29, -126, -44, 25, -46, + 39, 88, -55, 126, 10, 97, 83, 48, + 25, -111, -41, 70, 4, -88, 33, -34, + -29, 28, 103, 101, -50, 13, 79, 13, + -64, -10, 6, -37, 4, 37, -11, -10, + -6, -11, -32, 56, 45, -18, -10, -87, + -100, -22, -38, -79, 23, -118, 14, 27, + -22, 14, 32, -34, 16, -78, -66, 4, + 16, -62, -20, -18, -23, 57, -4, 10, + 62, -39, 37, 71, -1, -77, 4, -128, + -69, -70, -117, 104, 52, 21, -21, -16, + 6, 17, 5, 2, 64, 72, 16, -92, + -73, -109, 23, 2, -38, 9, -111, -10, + 4, -82, 18, 100, 29, 47, -19, 100, + -94, 21, 106, 68, -3, -112, 58, 13, + 47, 55, -119, 7, -11, 102, -61, -30, + 85, -34, -73, 48, -116, -31, 49, 17, + -1, -37, -30, 68, -120, -66, -96, 126, + 65, 69, 104, -19, -88, -73, 30, 89, + -75, 0, 29, 103, -81, -56, -68, -58, + 26, 8, 8, 40, -8, -3, -33, 38, + -15, -13, 125, 13, 24, -25, -17, 25, + -6, -43, 6, -14, -47, -73, -67, -83, + 25, 19, 32, -97, -15, 15, -4, 126, + -46, -6, 42, 32, 48, 13, -55, -6, + -33, 12, -81, -56, 85, 100, -48, 39, + -25, -98, -2, 47, 108, 87, 121, 32, + -114, -34, -13, 12, 97, -47, 1, -10, + 13, -115, -23, -30, -15, -85, 15, 23, + -15, 82, -16, -8, 4, -31, 9, 102, + 105, 2, -33, -10, -63, -89, 51, 75, + -12, 0, 28, 100, -78, 95, 47, -90, + 74, 113, -19, 54, -58, -21, 20, 116, + 29, -17, 55, -62, 46, 27, 13, 37, + -4, -123, -118, -68, 33, -52, -1, -108, + 62, -46, 0, 21, 13, -35, 77, 23, + -12, -51, 50, -33, 113, -65, -104, 18, + -26, -49, -58, -3, -29, -19, -59, -45, + -45, -3, -61, -12, 120, -39, 54, -10, + 74, -61, -57, -13, -43, 18, -6, 6, + -34, -18, 28, -54, 16, 36, 65, -57, + -3, -22, 71, -40, -13, -110, -9, -52, + 35, -85, -19, 10, -5, 1, 3, -34, + -122, -36, -28, 26, 2, 45, 36, -76, + -105, 123, -77, 3, 26, 83, -16, 17, + 7, 16, 37, -46, -67, -40, 33, -52, + -14, -11, -11, -53, -63, 28, -6, 35, + 53, 62, 71, 47, 70, 19, 4, 23, + -70, -65, -33, -37, -19, -38, -125, 108, + 35, -36, 8, -28, 17, 4, -73, 19, + 4, 5, -52, 5, 44, -25, 14, 13, + 27, -127, -69, -94, -62, -22, -26, 86, + -37, -25, 51, 29, 7, 15, 47, 105, + 49, -12, -44, 3, -108, 23, 60, -11, + -95, 20, 37, -15, -68, -44, 127, 17, + -19, -55, 44, 26, -14, 22, 36, -84, + 23, 10, -46, 68, -9, 121, -7, -12, + 29, -11, 61, 2, -14, 28, -30, -102, + 23, -105, 0, -35, 18, 8, 15, -4, + 11, 33, -26, -89, 11, -14, -5, -112, + -8, -28, -43, 34, -10, -35, -30, -37, + -57, 14, 25, 45, -77, 28, 4, -50, + -4, 87, -24, -97, 14, -36, 53, 31, + 18, -37, 16, -51, 11, 41, -87, -53, + -98, 1, 37, -30, -53, -29, 33, -4, + -40, -67, -13, -52, -56, 8, 60, 35, + -25, 44, 2, -6, -1, -31, -27, -41, + -26, 8, -43, -2, -3, 1, 24, -31, + -6, 9, -126, 2, -48, -33, 48, 6, + 3, 32, 29, -50, 28, -31, -17, 27, + 41, -122, -90, -41, 9, 86, -22, -89, + 20, -61, -33, -21, 62, 41, -86, 3, + -42, 14, 25, 48, -125, 9, 25, -2, + 9, -66, -11, -98, 2, -65, -4, -14, + 6, -2, -66, -83, 38, -22, -13, -21, + -8, 41, -125, -24, -43, -100, 72, -52, + -43, -4, -1, -54, -59, 11, 96, -20, + 48, 20, 60, 13, -15, 57, -30, 11, + -50, 121, -116, -31, 19, -32, -4, 9, + -111, 12, -84, -106, -54, -59, -67, -24, + 10, 22, -25, -12, 16, -123, 15, 7, + -8, 14, -21, -45, 63, 47, 3, -14, + 6, 100, 86, 40, 39, -23, -9, -127, + 24, 20, 15, 41, 69, 13, 2, -36, + 25, -18, 11, 23, 27, 51, -74, 8, + -69, 12, 61, 74, -14, 34, -7, 14, + 1, -5, 2, -28, 46, 63, 56, 6, + -42, 34, 81, 18, -70, 6, 20, -26, + 108, 11, -31, -1, -46, -29, -38, 22, + -9, -33, 43, -67, 71, -20, 30, -114, + 62, -1, 97, -40, 23, -99, 5, 11, + -38, -55, 70, 9, -48, 9, 54, 23, + -24, -48, -120, 67, 27, -49, -55, -11, + 88, -36, -4, -10, 80, 14, -65, -2, + -72, -46, -91, 99, -23, -78, -39, -84, + -11, -104, 27, -73, -12, 26, -1, -103, + -10, 2, 69, 43, 105, 19, -82, -45, + 33, -26, 41, -38, 119, 16, -62, -34, + 10, -10, 20, 38, 87, -67, -26, 91, + 30, 113, -11, -28, -48, -93, -120, 43, + -88, 71, -80, 7, -21, -1, 17, 50, + -26, 55, 125, -35, -40, 0, -44, -15, + 33, 5, -83, 7, 64, -49, 19, 56, + 32, -25, -27, -15, -5, -16, -19, 66, + 9, 114, -126, 12, -47, 15, 43, 18, + 28, 52, 34, -50, 11, -11, -29, -43, + -14, -84, -39, -25, -75, -101, 111, 21, + 33, -44, 90, 14, -36, 31, 29, -31, + -17, 22, 17, 48, -93, 30, 42, 30, + -86, -79, -12, -98, 49, 68, -53, -69, + 53, 124, -29, 65, -89, -35, -1, 37, + -5, -111, -38, -76, -28, 6, 24, 47, + -61, 19, 12, 23, -49, -18, -77, 18, + 33, 11, -17, -35, -16, 78, -32, 48, + 62, -69, -78, -18, 72, -122, -29, 3, + 11, 6, -17, 3, -74, 51, -10, 29, + 21, 46, -31, 49, -1, 22, -69, 8, + -2, -74, -29, 29, 25, -29, 13, 20, + 37, 9, 1, 16, 10, -20, -13, -13, + -24, 37, -11, 42, 35, 36, -12, 24, + -33, 43, 65, 64, 25, 23, 9, 27, + -124, 1, 92, 16, -4, 23, -5, 17, + 5, 42, 32, 12, 53, -3, 41, 1, + 37, 32, -11, 23, 37, 80, 21, 127, + 29, -12, -102, 127, -21, 9, -9, 33, + 41, 16, -4, 33, 57, 29, -55, -60, + 11, -33, -25, -26, 35, -2, 116, -6, + -52, -31, -3, 14, -63, 13, 73, 76, + 25, 5, 29, 8, 17, 50, -27, 88, + -96, -68, -67, 5, -56, 28, 25, -88, + -11, -19, 35, -78, 27, 11, -37, -21, + 2, -11, -43, 78, -93, 34, 27, 5, + -21, 34, -48, -11, 86, 4, 20, -18, + -6, 39, 15, 14, 40, 36, -40, 46, + 127, 12, 3, 1, -14, 33, 40, -3, + 16, -58, 71, -38, -1, -103, -22, -16, + 35, 10, 127, 59, -38, -67, -29, -126, + 58, -11, -4, 14, -85, 22, 75, 10, + 4, 19, 78, 32, -31, -33, 127, 7, + 4, 48, -20, 13, 94, 45, -10, -9, + 7, -55, -6, -43, 23, -58, 76, -1, + 4, 21, 7, 65, -20, 70, -7, 4, + 38, 104, 22, -44, 7, -8, 12, 9, + 65, 31, -6, -1, 29, 36, -126, -45, + -25, 32, 45, 3, 7, -28, 4, 29, + -80, -34, -64, -3, 0, -63, -65, -11, + -6, 38, -119, 53, 17, -24, 19, 4, + -53, 85, 27, 26, 45, -51, 5, -126, + 17, 32, 22, -52, 2, 7, 22, 60, + 74, 74, 38, 53, 13, 4, 44, 32, + 49, -46, 95, 70, 68, -50, -128, -10, + -79, 125, 26, 37, 24, -51, -12, 52, + 27, 13, -16, -4, -2, 16, 2, 33, + 10, -12, -5, 25, -40, 5, -31, 31, + 2, 60, -93, -23, -118, 34, 3, 18, + 12, 11, -38, -7, -35, 6, 18, -37, + -99, -68, 5, -103, -117, -108, -114, 43, + -115, -72, 46, 2, 50, 4, 37, 8, + -51, 10, -68, -29, 18, -63, -80, 34, + 36, -49, -110, -28, 3, -40, -125, -9, + 74, 10, 20, -25, 37, 74, -29, -25, + 66, -34, -48, -27, -35, 0, 30, -26, + -121, 6, -123, 14, 11, -49, -13, -102, + 18, -23, -98, -1, -8, 67, 52, 8, + -27, 33, -57, 7, -126, -13, -47, 12, + 22, -109, -68, -34, -94, -122, 20, -63, + -18, 8, -124, -61, -32, -40, 117, -12, + -7, -36, 16, 89, -24, 71, -46, -122, + 12, -101, -38, -70, -57, -39, -116, -60, + -125, -35, 37, -114, -14, -127, -77, 2, + -50, 115, 9, -79, -22, -74, -13, 103, + -79, 18, 11, -123, -26, -4, -1, 8, + -63, 124, 36, -32, 41, -99, -17, -18, + -124, 7, 122, -127, -23, 20, 41, -19, + 106, -41, 7, -125, -21, -126, -24, -26, + -8, 26, -126, 125, -34, 33, 111, 46, + 50, -20, -79, 6, -127, 97, -13, 52, + -32, -43, 2, 18, 17, -126, 35, 8, + 67, -123, -127, -103, 3, -71, -32, 29, + 6, 22, -110, -68, -6, -9, -44, 58, + -121, -127, -13, -50, -54, 22, -25, -78, + -7, 39, 104, -108, -94, -127, -36, 7, + 5, 3, -124, -17, 18, 42, 1, -22, + 19, 55, -99, -125, 38, -19, -38, 17, + 5, -23, 7, 87, -125, -127, 7, 7, + -121, 16, -60, -9, 7, 33, 18, 30, + -62, 19, -125, -112, -2, -14, -25, 60, + 1, -30, -124, -22, -65, -125, 64, 79, + -19, 9, -26, -127, -56, -84, 122, 63, + -2, -16, 26, 6, 35, -33, -21, -84, + -44, 72, -55, 52, -34, 66, 32, 17, + -37, -7, -41, 113, 1, -20, -58, -59, + 68, 29, -125, -56, 42, -92, 3, -6, + 19, -24, -11, 61, 39, 48, 30, -3, + -51, 15, 2, 11, -17, 3, 52, -7, + 51, -11, 62, 30, 21, 71, 68, -27, + 19, -32, -11, 40, -1, 11, 13, -10, + 65, -39, 119, -114, 43, 19, -74, 62, + -45, -54, 37, -28, -120, 26, 63, 26, + 58, 9, 20, -5, -12, 14, -5, 126, + 16, 72, 40, -44, 52, -24, -7, 46, + -114, 101, 0, 15, 37, -112, 37, -46, + -76, 6, 70, -25, -24, 75, -43, -29, + 76, -63, 93, 33, -24, 2, 2, 72, + -5, 57, 73, -31, -36, -103, 34, 10, + -25, 36, 20, -10, -36, 6, -83, -3, + 13, -18, -73, 66, 46, -31, -51, -41, + -32, -17, 45, 41, -14, 30, -121, 10, + -39, -66, 15, 10, -77, -2, 43, 11, + 28, 3, -77, 52, -26, 47, -75, 73, + -4, -110, -111, -20, -66, 27, -8, 17, + -3, 66, 22, 54, 21, 0, 68, -126, + 85, -27, 82, 11, -25, 2, -109, -60, + -11, 35, 12, 3, 77, -43, -3, -68, + -30, -53, 21, -21, -75, -8, -54, -11, + -53, -16, -88, -7, -19, 14, 52, 12, + 27, 18, -103, -128, 7, 1, -33, -7, + 15, 32, -1, 90, 8, 41, 106, 18, + 2, -11, -41, -93, 10, -18, -48, -12, + 54, 10, -4, 21, 72, -18, -125, 10, + 54, 0, -5, -66, -17, 57, -126, -4, + 29, 19, -5, 80, -34, 13, -125, 78, + -81, -125, 76, 26, 3, 25, 22, 3, + -5, -18, 46, 39, 84, -18, 38, -110, + 58, -128, 72, 39, 12, 5, 16, -47, + 23, -58, -40, 13, 59, -32, 24, 3, + 7, 83, -9, -67, -1, -2, 9, 16, + -55, 69, 50, 26, 20, -47, 3, 83, + -77, -21, -17, -7, 30, -27, 14, 71, + -8, 47, 99, -66, -12, -93, -59, 20, + -43, -24, -93, -19, 64, 4, -58, 5, + -42, 21, -122, -41, 19, 2, -33, 126, + -15, 3, 88, 3, 77, 48, 61, -8, + 78, 7, -47, 37, -63, 57, 36, -31, + -9, 26, -46, 17, 55, 10, 47, -46, + 6, -106, -123, 5, -97, 63, 40, 36, + 23, 2, -15, 17, -65, 51, 12, -18, + 29, -22, -6, 46, 10, -50, 29, -67, + 58, 82, -20, -95, 15, -41, 57, 29, + 0, 7, 29, 27, -90, -49, 49, 29, + 1, 38, -13, 3, -32, -32, -46, 32, + -38, 9, 41, -7, -55, 87, 93, -18, + 55, 11, -21, -104, -5, 45, 7, 62, + 7, 35, 11, -30, -33, -10, -30, -13, + 14, -9, 72, 127, -37, -16, 32, -57, + -37, -12, 5, 58, -66, 0, 59, 18, + 116, 31, -53, -28, -8, -5, -74, 67, + 39, -62, -77, 48, 25, -12, -5, -8, + -6, -36, 8, -53, -63, -22, -18, -5, + -36, 25, 26, -89, -106, -84, -40, -45, + -77, 47, -97, -64, -30, -51, -23, -75, + 9, -31, 6, 33, -32, 31, -126, 13, + 25, 4, -102, 40, 45, 47, 24, 71, + -55, 126, -125, -27, 9, -127, -93, 29, + -23, 20, 24, -123, -58, -20, -10, 5, + 2, -67, -14, -23, -88, 113, 16, -81, + 20, -14, 38, 124, -11, -9, 127, -127, + 32, -36, 38, 93, 9, 14, 16, 28, + 28, 97, -28, -58, -14, 88, -128, 0, + -121, 23, -2, -125, 0, 28, -127, -42, + -87, -12, -33, -21, -30, 125, 7, -30, + -53, 46, -56, -121, 34, -27, -64, -73, + 5, 70, 8, -18, 7, -17, -89, -47, + -75, -30, -9, 7, -10, -55, 50, -63, + 78, -71, 45, -1, 15, -11, -35, -10, + -55, -35, -41, 28, -75, 127, 119, -9, + -84, -74, 47, -23, -10, -40, 29, -59, + -20, 124, -124, -118, 9, 1, -22, -21, + 127, -35, -55, 110, -113, 79, -4, 41, + 4, 25, 22, 56, -69, 14, -31, -108, + -21, 5, 14, -82, 6, -1, 21, -92, + -52, -48, 19, 37, -75, -3, 63, 55, + -28, -51, 8, -54, 55, 15, 46, 50, + 35, -3, -9, -27, 2, -1, 66, 56, + 18, -16, 12, -19, 61, -5, -26, -37, + 63, 58, 59, -62, -11, -127, 6, 7, + 40, 114, 18, 7, 1, -123, 17, -20, + -76, 113, 122, 6, -56, -88, 34, 5, + -21, -101, -71, 77, -41, -106, 67, 31, + -128, 7, 11, -126, 31, -58, 69, 16, + -123, 44, -17, 61, 0, -8, -11, 28, + 28, -37, -94, 28, -10, 48, -34, -93, + -11, 14, 7, -92, -78, -45, 22, 67, + 14, 113, 5, 23, 40, 27, 31, 57, + -82, -1, -44, -29, 24, -31, -70, -87, + -100, 79, -122, -73, -46, 37, -68, -128, + 65, -128, -126, 14, 85, -46, -61, 20, + -16, 48, 35, 30, 52, -15, -12, -83, + -89, -97, -60, -8, -36, -65, -56, -112, + -128, 113, -56, 22, -57, -84, 14, -5, + -26, 6, 2, 47, -1, 72, 19, 47, + 31, 3, 0, -116, 24, 20, 64, 46, + 28, 33, 126, -58, -9, -16, 61, -127, + 17, -125, -80, -34, 57, -74, 21, 73, + -37, -2, -32, 13, -34, 31, -28, -121, + 25, -84, 48, -120, -107, 39, 87, 32, + -104, -105, -119, 81, 23, -112, -90, 102, + -11, -50, -15, 14, 55, 20, -31, -16, + 6, -11, 39, -8, -4, -29, -31, 56, + 47, 26, -7, 7, -25, -4, 13, -43, + -42, -27, -13, -26, 47, -11, -18, 3, + -2, 10, 10, -39, 12, 70, 108, 88, + -10, -45, -69, 23, 79, -30, 12, 24, + 29, 80, -32, -85, -21, 10, -20, 39, + 74, -39, -54, -43, -18, -37, 14, 59, + 55, 44, 44, 30, 30, 86, -96, 33, + -7, 2, -73, 35, -12, -36, 13, -29, + 28, 31, 19, 75, 42, 79, -12, -30, + 124, 11, -41, 31, 0, 9, -47, 74, + 25, 16, 19, -16, 63, 21, -77, -25, + -43, 60, 36, 13, 38, 53, 22, -45, + 53, 39, 15, 9, -43, -88, 42, -67, + -2, 34, -62, 16, 26, -4, 9, -57, + 54, 17, 10, 3, 60, 23, 15, 72, + 37, 0, 2, 12, 74, -91, -40, 8, + 5, 69, -37, 0, 18, 21, 33, 33, + 77, 20, 31, 52, -9, -33, -13, -46, + 17, -26, 33, 22, 10, -49, -27, 8, + 19, 17, -54, 27, 7, -29, 5, 25, + -19, 98, 11, 37, -112, 125, 63, 17, + -37, -10, -75, 31, 6, -24, -22, 2, + 49, 11, 39, -87, 53, -65, 0, -25, + -93, -29, -32, -33, 2, 32, -34, 13, + 51, -9, -46, 27, -53, 7, 16, -40, + 8, -121, -58, -58, -12, 91, 8, -15, + -18, 51, 4, 23, 36, -125, -24, -10, + -55, 13, -36, -18, 40, 56, 26, 24, + -8, -64, -106, -73, 20, 23, 17, -23, + 110, -12, -30, -62, -1, 21, 34, -62, + 5, 66, -26, -78, -7, -3, -7, -39, + 6, -43, 59, 68, 60, -10, -78, -53, + -57, 60, -83, -112, 25, 16, 12, -6, + 7, 13, -29, 107, -19, 44, -5, 46, + -124, 67, -80, -30, 49, -1, 5, 20, + 19, 34, -3, 2, -18, -19, 123, 14, + -67, -19, 44, -10, 24, -106, 2, 26, + 6, 67, -16, -56, -89, -128, 96, 14, + 38, -19, -81, 52, 32, 0, 60, -12, + 60, -5, 89, -23, -30, 2, -39, -7, + -63, 73, 17, -40, 63, 22, -24, -18, + 37, 8, 33, 7, -30, -123, -19, -1, + -2, 38, 55, -86, -44, -7, 28, -19, + 1, -19, 36, 6, 48, -11, 4, 81, + -36, 80, -37, -12, -79, -19, 15, 8, + -8, -14, 59, 30, -42, 5, 14, -92, + -118, -25, -43, 20, 9, 34, -21, 93, + -32, -45, 58, -121, -49, 126, -1, 52, + -53, 15, -19, 27, -68, -23, 45, -9, + 39, 57, 5, -117, -34, 10, 7, -61, + -18, 10, -44, 21, -77, -76, 103, -25, + -43, -36, 86, 44, 47, 27, 23, 0, + -77, -46, -12, 63, -111, -17, -16, 34, + -46, -17, 14, 98, 15, 126, -95, -45, + 0, -67, -78, -5, -24, -9, 2, -11, + -6, -96, 73, -55, -19, -32, -7, -69, + 0, 23, -24, 58, -19, -6, 101, -22, + 24, -21, 57, -51, 100, -7, -4, -11, + -128, -13, 28, 28, 18, -56, 36, -7, + -89, -63, -8, 34, 52, -14, 23, 5, + -85, -10, 47, 50, 8, -126, 78, 43, + -9, 36, 1, 27, 8, -114, -3, -72, + -31, 34, 12, 54, -30, 20, 101, 1, + 78, -121, 34, -64, 14, -106, -42, -33, + 36, -5, 15, 40, -25, 56, -21, -42, + -53, 29, 25, -51, -108, 33, -8, -41, + 51, -60, -60, 8, -45, -92, 0, -127, + -39, -53, -7, 17, -90, -15, -34, -126, + 81, -127, 49, 1, 96, 8, 59, 7, + -58, -125, 121, 63, 49, 7, 42, 9, + 23, -11, -45, -125, 17, -32, 1, -22, + -5, 78, 18, -40, -45, 19, -3, 85, + 62, 24, -39, 4, 24, 22, -7, 2, + 117, -81, -15, -31, -38, 50, 5, -41, + 40, 21, 29, -56, -62, 8, 6, 32, + -6, -19, 71, -10, -12, 28, 25, 10, + -92, 23, -8, -76, 26, 33, 86, -23, + -22, -56, -108, 15, 68, 123, -12, 59, + 124, 46, 40, 124, 24, 35, 17, -29, + 29, 77, 2, 35, -48, -23, 3, -43, + 102, 69, 47, 0, -93, 44, 13, -15, + -8, 10, -29, 28, -60, -12, 48, -26, + 47, 22, 55, -80, -64, -29, -11, -25, + -41, 68, -52, -10, -42, 46, -53, 46, + 27, -38, 90, -100, -34, -80, -30, -124, + 8, -8, 2, -6, 41, -64, 19, -37, + 5, -52, -55, -18, -5, -39, -96, -29, + 5, 125, 2, 28, 16, 117, -14, 80, + -69, -61, -55, -117, 99, -32, -39, 41, + 35, 9, -83, 87, -86, -125, -36, 28, + -126, 8, -77, 127, 12, -21, 65, -78, + -11, 24, -45, -24, -94, 71, -50, 33, + -25, -3, -40, -39, 12, -92, -122, 37, + 120, -6, 52, -48, 12, -12, 65, -127, + 78, -14, 15, -88, 38, -15, -34, 8, + 31, -125, -18, -93, 30, -43, 3, 78, + 18, 25, -29, -47, 25, 39, 7, -121, + -13, -118, 24, -59, -17, 3, -6, 36, + -37, -7, 20, -51, 54, -74, -19, -6, + -51, 17, -125, 30, 15, -4, 88, 69, + 5, 15, -58, -114, 54, 30, -73, 8, + 9, -32, -44, 14, -3, -27, -20, 11, + 10, 122, 85, 32, -48, 12, 40, 34, + 13, -21, 99, 11, 70, -77, 5, 82, + -37, 54, -89, -45, -126, -61, 12, -63, + 8, 35, -7, 10, -51, 24, -25, -12, + 10, -7, -35, 44, 84, -47, -80, 14, + 93, 6, 11, 51, 60, 48, 30, 7, + -71, 6, 10, 0, -10, -5, -52, -3, + -48, 30, 30, 18, 70, -17, 43, 44, + -126, 33, 120, 53, 47, -18, 22, 52, + -35, 93, 47, -27, 85, 0, -17, 0, + -13, 53, -2, 28, 65, 17, 23, 121, + -5, 45, 50, 44, -4, 34, 7, 32, + 76, 12, 35, 46, 30, 50, -35, 46, + -42, 28, 8, -36, -32, 2, 41, -40, + 1, 0, -4, 14, -41, 74, 88, 33, + 17, 71, 40, -20, -17, 32, -27, -41, + -48, 6, -56, 80, 42, -31, 33, 44, + -1, -98, -23, -61, -15, -4, 67, -36, + 67, -37, -66, 50, -126, -18, 24, -128, + -55, -30, -12, -10, -9, -48, 45, -118, + -11, -29, -19, 57, -9, 32, -23, 18, + 126, 65, -11, 4, 2, 17, 14, 18, + 22, 23, 31, 9, -15, -74, 34, -27, + -20, 44, 125, -28, -19, -107, 11, -88, + -14, -39, -8, 11, -74, -37, 22, 15, + -50, 34, 14, 12, -24, -3, 107, 43, + -8, -17, -42, 69, 31, 31, -30, 14, + 17, -65, 100, -34, -4, -28, 28, -69, + 90, 14, -18, -23, 27, -4, -27, 26, + -76, -64, 9, -78, -36, -4, -101, -3, + 47, 13, -122, 32, 7, -64, 30, -69, + -50, -98, -12, -59, -26, 14, 26, -40, + -126, 42, -51, 33, -26, -46, -32, -34, + 17, -12, -89, -9, 32, -25, 15, 24, + -3, -4, -18, -39, -24, 16, -43, -41, + -52, 40, 2, -40, -14, -64, -3, 0, + 18, 125, 42, 14, -35, 19, -101, -24, + 15, 12, -12, 1, 7, -41, 9, 26, + -14, 118, 19, -36, 32, -18, -43, -11, + 22, 11, 26, -1, 2, -12, -39, -54, + -5, 15, 32, -29, 55, -22, -78, -27, + -44, -120, 1, -5, -67, 12, -13, -46, + -81, -14, 64, -16, -70, -24, -23, 55, + -3, 31, -31, 12, 26, 33, 12, 16, + 46, 71, -27, 64, 6, 24, 45, -69, + -33, 27, 41, 16, -34, 23, 7, -11, + 1, -12, 126, 51, -18, 15, -75, 10, + -66, 1, -7, -13, -42, -25, -37, -3, + 16, -22, 63, 54, 37, -67, -42, 37, + -81, -37, -9, -73, -39, -127, -15, 27, + 127, -37, 40, -12, 2, -10, 5, -69, + -16, 38, 24, 97, 116, -33, -17, 10, + -58, 4, -67, 90, -45, 13, -126, 43, + 22, -22, 49, -65, -46, -54, 2, -39, + -62, -32, -15, -42, -105, -28, 68, 43, + 9, -23, -26, -113, -104, -46, -67, -18, + 3, 51, 8, -50, -2, -33, -15, -22, + 54, 7, -63, -19, -128, 47, -93, 13, + -13, 36, -98, 49, 20, -13, -9, 11, + -44, -35, -106, -37, -38, -32, -47, 42, + 24, -45, -115, 79, 25, -5, 16, 36, + -87, -45, 73, -4, -53, -66, -11, 15, + 0, 52, -55, -44, 37, -3, 23, -35, + 13, -50, 22, 9, -5, -75, -46, 23, + -20, 34, 17, 35, 30, -5, 9, -106, + 2, -39, 46, 1, 0, 30, -51, -16, + -125, -83, -4, -3, -127, -10, 90, 46, + -19, 33, -48, -14, 62, -54, -104, 0, + 7, -13, -5, 7, 14, -70, 27, 27, + -4, 1, -84, -45, 30, -6, -71, 14, + 31, -110, 44, 37, 35, 31, -56, -122, + -60, 6, -1, -28, -89, 24, 30, 13, + -2, -28, -108, -40, -12, -126, -126, 24, + -94, 49, 46, -20, -89, -123, 62, 23, + -125, -67, -20, 19, 38, 37, 54, 59, + 92, -127, 107, -62, 90, -9, 24, 8, + -8, -40, -32, -33, -23, -78, 0, -76, + 28, -27, 12, 24, -110, -34, 105, 38, + 3, 36, -51, -28, -8, 38, 15, -3, + 117, 45, -44, 45, 104, 94, 11, -53, + 64, 41, 7, -7, -8, -39, -44, 89, + 69, 29, -54, -55, -16, 15, -54, -5, + -14, 12, -67, 12, 20, -47, -12, 42, + -35, 38, 25, 11, 21, -11, -21, -20, + -1, 6, -51, 9, -48, -53, 40, -41, + -113, -42, 57, 22, 39, 61, -43, -2, + 47, -29, -101, -58, 16, 21, 1, -29, + -8, 21, -24, 4, -20, 23, -30, 0, + 18, 125, 20, 63, 25, 21, 127, -21, + -20, -55, -14, -98, 81, 3, 37, 58, + -33, -36, -5, -13, -124, -43, 127, -11, + -15, -48, 122, -111, -11, -15, 28, -42, + 33, -127, 100, -72, -13, 127, -54, -3, + 79, 2, -42, 38, 16, 8, -62, -19, + 36, 19, 39, 78, -7, 121, -66, 11, + -36, 47, 83, -69, -23, 34, -79, -62, + 39, -56, 73, -40, -87, 89, -96, 68, + 101, -65, 25, -32, 26, -8, -25, -61, + -82, -9, -89, -28, -66, 126, 73, -15, + -31, 36, 40, -38, -9, 9, 35, 16, + 16, -44, 38, -73, -14, -5, -26, 92, + -114, 63, 32, 22, 79, -30, -11, 77, + 109, 66, 95, 27, -22, 14, -97, -22, + -51, 15, 41, -81, -10, 0, -44, 12, + 58, -44, 23, -32, 5, 106, 10, -3, + -43, -13, -118, 57, -51, -88, 21, -12, + 17, -81, 4, -20, 31, 18, -24, -2, + -121, 20, -5, 66, 20, 62, 4, -45, + 14, 61, -36, -27, -26, -14, 20, 123, + 35, -9, -31, -96, 11, -55, -60, -52, + -117, 32, -127, -45, -36, -10, 12, 87, + -101, -38, -30, -106, -50, -33, -5, 1, + -9, -7, 0, -32, 15, -12, -10, -25, + 110, 5, -19, 1, -57, 83, 93, 8, + 19, -3, 29, 5, 49, 26, 7, -7, + 40, -12, -24, 63, 30, 11, -49, 2, + 24, -7, 21, -34, 31, 12, 25, 39, + 9, 46, 39, 21, 11, -28, 20, -5, + 8, 10, 6, -28, 56, 8, -2, -7, + -3, 11, 3, -5, -5, -9, -41, 15, + 18, -24, 9, 10, 0, -16, 30, -32, + 23, 17, 117, 43, -52, 1, 12, 11, + 9, -10, 0, 11, -5, -20, 3, -33, + 0, 19, 101, 69, 34, 0, 29, 11, + -4, 126, 31, -95, -12, -27, -80, -38, + -85, -96, -7, 68, -126, 16, 84, -8, + -47, 33, -35, 54, -23, -12, 57, -40, + -23, 43, 71, 40, 49, -40, 37, 15, + -6, -46, -60, 25, -34, 49, -3, 29, + 10, 38, -7, 21, 94, -6, 11, -16, + 71, 3, -13, 32, -6, 93, -103, 56, + 49, -7, -2, -22, 18, -36, 8, 49, + 0, 21, -48, -3, 55, 94, -85, -24, + -26, -22, -81, -44, 51, 6, 3, -24, + 86, 83, -14, 68, -40, -19, 41, 2, + 51, 37, -100, 59, 98, 61, 6, 37, + -59, -13, 10, 126, 49, 29, -18, -123, + -128, -46, 46, -125, -126, -1, 88, -101, + -50, 70, 11, 41, -127, -16, -79, 81, + -38, 5, -125, -8, 47, 51, 66, -82, + 47, 91, 28, 57, -15, 86, -94, -43, + -47, 32, -8, -63, 82, 19, -73, -25, + 47, 55, 46, 45, -93, -82, -123, 36, + -12, 16, 14, -106, 25, 26, -36, -57, + -15, -10, -67, 123, 93, -33, 58, -5, + -80, 53, -94, -53, -22, 58, -11, -1, + 67, -36, 125, 40, 11, 62, 79, -123, + 33, -121, 2, -114, 6, 50, -17, 26, + -35, 19, -6, -85, 28, -1, -21, 17, + 45, 125, 71, -29, -59, 16, 41, -63, + 13, 7, 65, -36, -52, -51, 68, -44, + 30, 24, -3, -4, -46, 63, 19, 58, + -78, 34, 8, 94, 69, -29, -51, -1, + 28, -99, 39, 19, -14, -4, -92, 38, + -5, -21, 104, 13, 13, -81, -29, 29, + -54, -83, 91, -16, -2, 15, 0, -5, + 33, 9, 35, 4, 13, 21, -113, 0, + -21, 25, -26, -89, 96, 34, -7, 27, + -12, -27, 16, -33, 7, -6, -1, 24, + 43, 33, 9, 45, 87, 25, 54, 40, + -53, -122, -42, 55, 98, 23, 83, -5, + 5, 73, -65, -110, 29, 12, -127, -3, + 63, -64, 0, 14, 16, 36, 38, -44, + 37, -84, -74, -7, -5, -41, -95, -119, + 14, 66, -18, 120, -17, -40, -58, 2, + 44, -26, 86, -11, 40, 59, 52, -20, + 24, 11, -18, -68, -9, -126, -7, 62, + 120, 79, 31, 69, -20, -53, 42, 86, + 32, -52, 12, 32, 64, 74, -39, -66, + 67, -4, 35, -51, -50, 50, 8, 69, + -112, 104, 51, 53, 15, 1, 55, 88, + 34, -100, -21, -25, -117, -91, -4, -125, + -1, 1, 48, -29, -13, -20, 7, 29, + -61, -24, 26, 9, 73, -23, -128, -61, + -79, -17, -21, 17, -7, -23, -70, -18, + 97, -30, 15, -28, 56, -41, -126, 21, + 27, 122, -13, 41, -86, -109, 12, 35, + 46, -57, 2, -47, 1, 7, 63, -53, + 98, 51, -50, -7, -10, -37, 8, 6, + -110, 15, 42, 24, -34, 117, -4, 50, + 14, 17, -37, 58, -46, 6, 115, -31, + -113, -40, 104, -47, -5, 118, 19, 30, + -78, -22, -126, 27, -3, 54, -63, -65, + 21, -7, -45, -39, -10, -35, 2, -3, + 53, 5, -119, -26, -66, -6, -46, 43, + -66, -2, -20, -27, -10, 27, -34, 29, + -21, -7, -103, 4, -26, -5, 17, 52, + -128, -28, -13, -1, 52, -38, 67, -24, + -41, -121, 65, 32, 84, -24, -112, 13, + 106, -105, -9, -66, -18, -28, -52, 78, + -58, -33, 2, -18, 17, -33, -21, -30, + 93, -28, -116, 80, -84, -51, -21, -1, + -15, -4, 24, -34, 50, -44, -54, 61, + -79, -41, 15, -48, -125, -29, -84, 93, + 17, 1, -123, 106, -123, -17, 84, 0, + 126, -34, -30, -81, 61, -96, 13, 71, + -40, -122, -19, 22, -89, -25, -119, 57, + -5, -98, 112, -71, 95, -94, -59, 74, + 103, -12, 39, -61, -8, -125, 15, 7, + -69, -17, -119, -11, 69, -81, -17, 124, + 43, 33, -127, -103, -88, 81, 56, -10, + 1, -98, -97, -73, -71, -42, 17, 73, + -33, -123, -46, -80, 9, 91, 126, -127, + -21, -115, -110, 93, -126, -24, -53, -119, + -103, -117, -9, -28, 39, -11, -59, -103, + 0, 53, 1, 59, 27, -86, -88, 95, + 24, 51, 114, -6, 81, 107, 70, -80, + -95, -34, -24, 20, 18, 7, 31, -121, + 72, -64, 101, 5, -28, -94, -121, 14, + 19, 45, 23, -86, -34, -19, 5, -61, + 73, -82, -11, 28, 10, 23, -46, 26, + 108, 16, 5, -16, 32, -32, -81, 76, + -18, -19, -44, 13, -53, -97, 37, 36, + 21, -62, 9, 9, 82, -65, -92, -61, + -128, 73, 33, -12, -126, -41, -3, 6, + 63, -97, -127, -91, 31, -18, -3, 91, + -65, 27, 21, 57, -43, -40, 99, -62, + 22, -6, -51, 127, 82, -14, -114, -31, + 39, 39, -88, -17, -24, -10, -22, -113, + -36, 3, -67, -76, 47, 50, 85, -22, + 22, 10, 60, -2, 34, 0, -25, 22, + -52, 48, 79, 48, -3, 17, -27, -6, + 6, -17, 125, -12, 65, 1, -12, 35, + 30, 91, 14, 88, -13, 26, 36, -17, + 20, 9, -23, 37, 112, 25, 31, 56, + -7, 118, -78, 11, -13, 84, 41, 1, + 46, 114, 123, 35, 40, 39, 18, 12, + 116, -13, -13, -7, 64, -128, -22, 15, + -27, 11, 53, -13, -31, 13, 51, 74, + -21, -26, 81, -56, -4, 5, 24, -41, + 20, 22, 18, -8, 70, 4, -14, -2, + 17, 17, 71, -30, -128, 20, 20, 3, + 40, -5, -35, 123, -23, 28, 21, -6, + -34, -5, 41, -33, -18, 3, -5, -44, + -46, 37, 37, 0, 14, 39, -69, 50, + 18, -31, 39, 43, -51, 5, 6, 19, + -21, 40, -9, -5, 41, 3, 76, 4, + 10, -41, -49, -43, -47, -66, -74, 53, + -77, 109, 51, 62, -19, 1, 33, 31, + -18, -107, -39, -72, 20, 21, -32, -35, + 42, -8, -2, 10, -2, 1, 31, 9, + 69, 24, 55, -60, 11, -14, -11, -69, + -63, 0, 107, 10, -61, 77, 57, 34, + 38, 21, -55, -29, -126, 18, -54, -94, + -61, -41, 0, 20, 45, -21, -2, -52, + -47, 71, 12, 11, -11, 33, 117, 14, + -64, -4, -112, -21, 27, -46, 16, 25, + 41, 36, -35, 45, -2, -24, -35, 10, + -4, 24, 18, 57, -126, 17, 21, -13, + 31, -125, -1, -3, -8, -70, 22, -124, + 1, -50, 75, 87, 60, 83, 27, -42, + 21, 9, -3, -45, -40, -24, 8, -6, + -11, -3, -126, -15, -8, -75, -80, -124, + -70, 40, 48, 19, 29, -2, 25, -121, + -23, 4, -12, 90, 49, -40, 20, -13, + 3, -123, 10, -59, -21, -25, -60, -10, + 56, -5, 8, 24, 18, 4, 15, 33, + 77, 4, 1, 8, -126, 23, 27, 34, + 3, 51, 66, -14, 50, 51, 108, -40, + -2, -39, 10, 46, 60, 28, 5, 72, + 11, -64, -82, 31, -52, -3, -29, 32, + 63, -24, 44, 35, 8, -8, 25, -25, + 6, 58, 41, 55, -24, -33, -7, -8, + -16, -36, -59, 6, 56, -68, 32, 16, + -14, 26, -19, 26, -41, -77, 41, 0, + -20, -21, 15, -49, -27, 7, 34, 41, + 17, 14, 20, 62, 95, 40, -4, 65, + 28, -11, -28, 27, -43, 85, -12, 2, + -11, -13, -5, 38, 4, -87, 126, 44, + -12, 3, 30, -89, 124, 24, -16, 35, + -30, -39, 0, -116, 122, 33, -8, -31, + 80, -29, -12, -55, -17, 18, 58, 15, + 34, -123, 19, 4, -51, 48, 7, -127, + -89, 127, 39, -86, -19, 46, 127, -52, + 68, 3, -7, 9, -126, -13, -49, -45, + 8, -20, -126, 42, -72, 127, -15, -2, + 42, -33, 53, -34, 10, -128, 13, -78, + -60, -65, 52, -35, 36, 39, -30, 55, + -64, -81, 32, 41, 38, -17, -53, -65, + 95, -115, 22, -52, 10, 64, -60, -9, + -45, -21, 24, -22, -25, -31, 64, 127, + -47, -33, 10, 98, 62, 30, 17, 19, + -46, 36, -67, -74, 75, -19, 56, 2, + 11, 28, 127, -57, -52, -110, -44, -20, + 106, -30, 49, 52, -1, 43, 38, -32, + 41, -13, -2, -38, -12, -3, -17, -36, + 42, -6, -22, -17, -74, -30, 95, 18, + 23, 14, 17, -21, 34, 72, 24, 2, + -37, -49, -36, -34, 3, -126, -65, -37, + -24, -56, 14, 75, 28, 0, 41, -80, + -13, -90, -33, 39, 31, -14, -1, 15, + 19, 63, -9, 45, 3, -122, 48, -35, + -38, 25, 22, 18, -27, 4, 1, -11, + -43, -101, -24, -2, 13, 64, 42, -45, + -20, -1, -6, 29, 0, 10, -5, 14, + 98, -1, -18, 9, 44, -19, -24, -36, + -12, -42, -31, -28, 125, -21, -17, -12, + -12, 25, -21, 8, -6, 30, 39, -13, + -24, 25, 36, 48, -12, 31, 53, -23, + -57, -27, -53, -5, -18, 12, 1, -13, + -28, 8, -48, -64, 25, -12, -27, 66, + -28, 2, 12, -12, -8, -88, -16, -48, + -61, 12, 77, 41, -50, 7, -18, 12, + 1, 2, 25, 80, 6, -63, -50, 29, + 127, 37, -30, -102, 35, 39, -7, -26, + -27, -81, -4, -1, -7, -48, -28, 29, + 37, 19, 49, -23, 19, -2, 15, -20, + -7, -1, 45, 33, -54, -2, 26, 23, + 26, -101, -45, -32, 39, -24, -18, -9, + -7, 34, 75, 5, 15, 39, 19, 7, + 34, 53, -13, -10, 11, 38, -29, -27, + -1, -104, -55, -10, 27, -9, -2, 6, + -14, 13, 29, -28, -21, 13, -1, 38, + -25, 32, -31, -50, 52, -34, -19, -39, + 12, -29, -110, 10, -74, 47, 60, -6, + -2, 22, 4, -42, -55, 41, -66, 88, + -15, 123, 26, -7, 20, 20, 28, 35, + 69, 5, -64, -21, 36, 26, 78, 122, + -6, 54, -44, 51, -7, 118, 60, 109, + 12, -29, 36, 27, -50, 50, -37, 10, + 56, -126, 107, 56, -8, 81, -101, 29, + 21, -25, 15, 68, 81, -98, -28, -46, + -61, 12, 87, 96, 2, 4, 57, -123, + 25, 12, 41, -5, -32, 78, 123, 28, + 59, 58, -85, 16, -13, 60, -115, -23, + 50, 127, -83, 39, 94, -50, -96, -97, + -17, 91, -28, 11, -127, -63, -25, -92, + -106, 89, 78, -11, -27, -22, 8, 13, + 26, 53, 25, 19, 93, 49, -21, -10, + 0, 37, 59, -6, -34, -22, 13, 80, + 35, 0, 30, 89, 64, -17, -25, 16, + 73, -32, -47, 92, 31, -32, 82, 3, + 44, -22, 3, 44, 1, 1, 10, 22, + 13, 9, -18, 20, -2, -15, 41, -1, + 33, 18, 22, 51, 6, -16, -41, 17, + -5, -43, 78, 25, 45, 66, -15, 0, + 14, 3, 51, -36, 24, -13, -7, -15, + -4, -11, 28, 29, 11, -20, -3, 54, + 14, -9, -127, 8, 12, 61, 9, 28, + 55, 0, 9, 13, -61, 28, 40, 39, + -49, -32, -20, 9, -16, 59, -15, -21, + 5, -39, 28, -33, -26, 51, 4, -10, + -19, 27, -31, -4, -34, 74, 3, 16, + 104, 2, 121, 23, -27, -1, 6, -32, + -15, -28, -32, -14, 65, 100, -2, 5, + -4, -8, 55, 67, 10, 6, 10, -9, + -39, -78, 42, 35, 9, 24, -24, 50, + -7, 44, -37, 4, 0, 14, 45, -29, + 33, 21, 54, -3, 20, 46, 20, -34, + 33, -71, -32, 8, -24, 8, -29, -19, + -7, 116, -27, 52, -9, 26, 121, 54, + -86, -5, -66, 44, -40, 56, -21, -38, + -24, -42, 2, -20, -11, 23, -8, -7, + -101, -44, 15, 38, 60, -15, -3, -4, + 50, -12, -91, 10, -49, 7, 31, 30, + -61, -55, 20, -100, 17, 13, -57, 32, + 88, -62, 56, -23, -9, -8, 3, 26, + 4, 2, 9, 70, 11, 36, -30, -126, + -55, 71, 28, 47, 94, 44, -8, 20, + -3, 6, 18, 56, 124, 78, 1, 19, + -57, -19, -121, 42, 54, -51, -12, -39, + -44, -16, -80, -77, 31, 16, -13, -76, + 29, -2, 1, -47, -4, 59, 79, -10, + 22, -35, 69, -50, 7, -17, 64, -58 +}; + +static const rnn_weight denoise_gru_bias[288] = { + -41, -76, -24, -96, 25, 117, -55, 54, + -73, -28, 53, -79, 20, -8, -87, 28, + 44, 38, -66, -19, -45, 25, 119, 78, + 54, -92, 31, 13, -3, -13, -28, -67, + 3, 31, 54, -48, -16, -97, -12, 2, + -117, -48, -24, 56, 18, 115, -59, 126, + -30, 6, 16, -126, -11, -6, 15, -67, + 33, -113, 59, -12, 126, -3, 61, 58, + -71, -4, 42, 41, -48, 11, -33, 50, + 43, 4, 0, 15, -46, -16, 23, -18, + 8, -30, 13, 66, 77, -6, 34, 103, + 40, 50, 39, 72, -10, 22, -16, 24, + 1, 127, -9, -48, -55, -27, 36, -16, + 90, 4, 12, -17, 59, 23, -34, 20, + -84, 19, 41, 121, 116, 111, -10, -127, + 41, 44, 4, 34, -1, 20, -11, 2, + 127, -127, 44, 16, 21, 126, 66, 125, + 126, 78, 25, 45, 72, 3, 123, 40, + 105, -62, 25, -105, 44, 33, 13, -51, + 119, 126, 126, 53, 0, -88, -32, -27, + -33, -18, 11, 1, 27, -62, -6, -57, + 71, 46, 21, -7, -6, -55, 127, 30, + -41, -6, -21, -21, -38, 87, -16, 34, + 44, -126, -112, -30, 61, -17, 115, 1, + -39, 19, -43, 76, -64, 48, -13, 11, + 73, 71, 93, 104, 23, 10, 63, 34, + -7, 126, 57, 3, 127, 15, -71, -126, + -25, 125, 7, 7, 39, -18, -27, 126, + 95, -127, -95, 36, -4, 125, 37, 72, + 127, -29, 69, 84, 99, 39, 127, 40, + -127, -92, 0, 127, -14, 70, 39, -98, + 25, 127, -54, 48, 47, 19, -21, 93, + 61, 127, 3, -62, 127, -75, 24, -3, + -18, 102, 40, -6, -14, -36, -41, 46, + 89, -17, 29, -55, 7, -10, -59, 22, + -21, 25, 18, -58, 25, 126, -84, 127 +}; + +static const GRULayer denoise_gru = { + denoise_gru_bias, + denoise_gru_weights, + denoise_gru_recurrent_weights, + 114, 96, ACTIVATION_RELU +}; + +static const rnn_weight denoise_output_weights[2112] = { + 24, 90, 127, 108, 73, 38, 24, 13, + 4, 16, 41, 51, 46, 35, 24, 14, + 19, 23, 27, 23, 11, 10, 14, -1, + 20, 67, 122, 95, 44, 11, 4, 5, + 8, 15, 19, 17, 11, 5, 1, -2, + -1, 5, 5, 1, 28, 2, -25, -16, + -83, -45, 4, 36, 29, 24, 20, 12, + -1, -2, 4, 13, 17, 18, 21, 22, + 20, 11, -11, -26, -15, -21, -18, -15, + -22, -22, -18, -22, -25, -20, -24, -25, + -19, -21, -22, -16, -18, -24, -22, -7, + 6, 26, 25, 7, 12, 17, 11, 4, + 11, 11, 1, 1, 6, 14, 19, 11, + 13, 29, 37, 14, -16, -45, -3, -8, + -17, -22, -19, -11, -19, -21, -22, -24, + -25, -20, -19, -14, -7, -5, -3, -1, + 0, 1, 14, 13, 10, -12, 27, 5, + -33, 11, 51, 26, -23, -22, -11, -8, + -4, -8, -21, -32, -41, -45, -43, -39, + -34, -16, -3, 2, -7, -10, -16, -12, + -12, -8, -4, 1, 24, 53, 81, 98, + 99, 80, 60, 54, 49, 38, 16, 5, + 0, 15, 5, 14, 11, 12, 3, 7, + 16, 19, 17, 10, 6, 1, -11, -11, + -18, -26, -31, -26, -17, -14, -2, -15, + -23, -27, -28, -31, -25, -29, -36, -37, + -26, -22, -17, -15, -18, -17, -14, -14, + -9, -9, 1, 4, 1, -6, -29, -23, + -21, -14, -24, -24, -20, -21, -16, -13, + -14, -15, -12, -16, -13, -10, -10, -7, + 4, 18, -8, -11, -24, -12, -6, -10, + -14, -21, -21, -20, -15, -22, -53, -65, + -58, -43, -27, -31, -38, -45, -48, 0, + -5, -8, -4, 2, 10, 12, 12, 6, + 12, 21, 25, 22, 23, 23, 26, 38, + 44, 41, 38, 36, 24, 18, -21, -31, + -28, -37, -20, -5, -4, -15, -14, -16, + -21, -21, -9, -10, -8, 0, -5, 4, + 20, 14, 15, 47, 26, 26, 25, 5, + 3, 8, 17, 17, 10, 14, 14, 4, + 0, 5, 6, 12, 12, 11, 14, 28, + 47, 62, 8, 8, -7, 2, 2, 2, + -5, -4, 2, 2, -4, -5, -11, -22, + -34, -46, -53, -55, -45, -39, -35, -43, + -15, -7, 24, 40, 50, 55, 62, 63, + 60, 58, 50, 48, 46, 47, 45, 40, + 30, 20, 12, 7, 3, 4, -9, 2, + -13, -9, -13, -15, -20, -17, -19, -32, + -45, -54, -65, -67, -63, -62, -48, -24, + -11, 5, 11, -38, -5, -7, -8, -12, + -17, -15, -11, -7, -8, -12, -10, -11, + -14, -13, -14, -17, -7, 12, 24, 15, + -11, -79, -1, 14, -8, 26, 14, 13, + 13, 6, -16, -16, 10, 28, 16, 5, + -1, -1, 5, 9, 7, 6, 8, 14, + -10, -7, -2, -1, -9, -18, -20, -18, + -13, -11, -14, -20, -29, -37, -46, -46, + -48, -54, -56, -72, -83, 11, 0, 11, + 37, 45, 52, 80, 118, 96, 33, -10, + -13, -10, 4, 7, -6, -3, 5, 3, + -2, -9, -9, -7, -4, -3, 12, 13, + 17, 24, 20, 16, 10, 11, 15, 17, + 11, 3, 1, -1, -6, -9, -5, -14, + -21, -16, 8, 1, -10, 5, -3, -51, + -14, -10, -28, -27, -21, -14, -4, -3, + -6, 1, 5, 4, 5, 13, 11, -1, + -3, 5, 11, 30, 37, 34, 31, 27, + 24, 24, 29, 34, 28, 25, 29, 27, + 29, 30, 36, 38, 38, 8, -17, -18, + -41, -58, -45, -39, -51, -52, -40, -47, + -60, -52, -38, -25, -27, -36, -43, -33, + -19, -3, 1, -3, 3, 1, 6, 14, + 12, 9, 6, 3, 8, 13, 10, 8, + 15, 31, 46, 69, 82, 81, 76, 67, + 42, 15, 14, 28, 21, 36, 42, 27, + 28, 26, 20, 11, 7, 4, 7, 6, + 7, 20, 26, 17, 12, 1, -13, -22, + -16, -23, -16, -9, -11, -10, -12, -10, + -5, -6, -8, -6, -3, -2, 9, 10, + 7, 7, 9, 22, 34, 25, -5, -8, + -3, 2, -14, -7, -2, 1, 7, 33, + 56, 59, 58, 42, 2, -11, -10, -11, + -10, -12, -10, -4, 9, 14, 21, 11, + 9, 6, 7, 8, 13, 16, 18, 22, + 26, 30, 30, 30, 26, 18, 9, 5, + 2, 3, -18, -29, -57, -45, -39, -45, + -35, -19, -7, -6, -2, -4, -11, -13, + -1, 0, -6, -22, -47, -50, -18, 12, + -13, -19, -45, -41, -43, -38, -34, -31, + -31, -21, -13, -10, -13, -13, -9, -8, + -9, -9, -6, -4, -4, 2, -9, -18, + -6, 2, 0, 5, 8, 5, 5, 7, + 8, 7, 6, 2, -5, -6, -2, 3, + 3, -8, -14, -1, 45, 35, 41, 21, + 32, 31, 20, 17, 22, 20, 19, 16, + 10, 4, 11, 15, 6, 4, 13, 4, + -7, -18, 7, -26, -11, 9, -12, -28, + -34, -16, -2, -8, -20, -20, -27, -10, + 2, 4, 9, 1, -4, 4, 9, -1, + 84, 95, 23, 17, 19, 21, 17, 25, + 32, 23, 8, 2, 5, 9, 13, 17, + 16, 16, 16, 18, 23, 24, -27, -32, + -10, -21, -8, -1, -10, -10, -5, -1, + 2, 1, 3, 0, -9, -11, -10, 0, + 10, 9, 18, 28, 12, 18, 15, 11, + 2, 1, -4, 4, 8, 7, 8, 7, + 3, 4, 3, 11, 14, 4, -5, -2, + 3, -29, -4, -2, -7, 0, -4, -7, + -16, -7, 5, 2, 0, 0, -3, 2, + 5, 1, -3, -18, -63, -113, -128, -117, + -23, -13, -2, -21, -21, -29, -54, -37, + -2, -2, -17, -29, -35, -29, -19, -16, + -14, -16, -19, -16, -15, -21, 28, 19, + 29, 20, 30, 19, 13, 12, 11, 14, + 17, 20, 28, 29, 25, 24, 24, 32, + 31, 34, 60, 97, -37, -41, -57, -61, + -54, -50, -54, -57, -49, -49, -47, -45, + -45, -50, -56, -59, -54, -49, -52, -60, + -51, -40, -16, -16, 0, -9, -16, -11, + -5, -6, -10, -17, -27, -35, -29, -31, + -40, -42, -44, -38, -31, -25, -23, -6, + -5, -2, -17, -38, -24, -16, -19, -12, + 12, 38, 47, 37, 24, 6, -15, -9, + 13, 37, 61, 56, 11, -7, 27, 18, + -10, -14, -14, -14, -19, -18, -13, -12, + -12, -13, -15, -11, -5, -6, -8, -8, + -7, -4, -1, -8, -4, 0, -9, 2, + 2, 6, -4, -7, -4, -6, -7, -12, + -15, -17, -10, -8, -15, -21, -25, -31, + -72, -127, -128, 20, 42, 17, 8, -4, + 1, 6, 3, -18, -28, -2, 1, 5, + 28, 8, -3, 8, 16, 24, 15, 0, + -9, 19, 12, 16, 43, 69, 55, 41, + 35, 22, 14, 8, 3, 8, 16, 20, + 26, 33, 34, 30, 19, 15, -10, -12, + -10, -10, -7, -11, -3, -4, -8, -7, + -2, 1, 0, -3, -7, -8, -8, -6, + -1, -2, -4, -4, -76, -91, -66, -74, + -76, -41, -48, -47, -44, -41, -39, -36, + -42, -48, -37, -37, -39, -41, -47, -58, + -68, -70, -78, -79, -69, -70, -70, -67, + -72, -72, -72, -68, -63, -60, -58, -58, + -55, -53, -49, -43, -43, -37, -34, -16, + -6, -3, -1, -11, -7, -4, -3, -3, + -2, 2, 1, -6, -7, -11, -11, -3, + -17, -41, -58, -44, -36, -43, 6, -15, + -19, -19, -21, -17, 0, 16, 16, 13, + 12, -5, 0, -4, -27, -27, -12, -3, + 0, -2, -7, -15, 7, 0, -8, -6, + -2, 8, 4, 0, -5, -7, -11, -13, + -13, -11, -7, -9, -9, -6, -10, -5, + 6, 0, 37, 34, 32, 43, 40, 35, + 34, 36, 40, 38, 35, 38, 37, 40, + 44, 37, 31, 30, 25, 38, 44, 23, + 18, 10, 8, 8, 3, 5, 4, 2, + 3, 6, 6, 5, 7, 6, -3, -8, + -8, -13, -14, -24, -17, 9, -110, 22, + -17, -65, -43, -35, 5, 5, -19, -29, + -27, -18, -11, -6, -4, -6, -10, -9, + -4, 1, 3, 2, 36, -40, -99, -102, + 20, 27, 7, -16, -27, -30, -23, -15, + -6, 1, 3, -2, -8, -10, -9, 4, + 8, -10, -44, -30, -9, -2, -5, -15, + -9, -3, 1, -2, -18, -17, -21, -26, + -25, -34, -38, -44, -46, -46, -38, -87, + 43, 32, 14, 5, 8, 12, 10, 4, + -12, -14, -4, 2, -1, -7, -6, 2, + 3, -3, -2, -1, 4, 14, -89, -105, + -6, -8, 0, 1, -3, -7, -8, -4, + 8, 15, 9, 3, -1, -5, -6, -4, + -2, 1, 5, 9, -4, -6, -1, -1, + -6, -1, -5, -3, 0, -3, -4, -2, + -4, -5, -5, -10, -9, -5, -8, -14, + -20, -44, -20, -5, 1, 20, 22, -17, + -35, -38, -33, -23, -2, 8, 10, 7, + -2, -40, -59, -50, -30, -8, -17, -32, + -125, -75, -53, -13, -1, -9, -12, -12, + -9, -7, -3, -1, -4, -9, -11, -13, + -8, -10, -14, -6, -6, -24, 46, -1, + 8, -6, 4, 0, 34, 19, -20, -27, + -23, -19, -6, 3, 15, 33, 37, 29, + 22, 11, 3, -6, 1, 0, 12, 7, + 3, -4, 3, 3, 7, 12, 6, -1, + -2, -5, -4, -6, -7, -4, -3, -1, + 3, 8, -7, 1, 9, 5, -1, -3, + -7, -3, 3, 6, 10, 7, 1, 0, + 2, 0, 1, -3, -5, 2, 2, -9, + 5, -2, -25, -17, -17, -14, -14, -13, + -24, -38, -48, -48, -43, -31, -24, -17, + -13, -12, -12, -12, -5, 7, 21, 21, + -5, 4, 10, -3, 2, 7, 8, 8, + 6, 6, 3, -3, -11, -15, -13, -12, + -20, -26, -28, -41, 30, 17, 18, 26, + 24, 22, 22, 32, 40, 39, 34, 32, + 27, 34, 36, 26, 20, 18, 23, 35, + 41, 27, -10, -7, 1, 7, 8, 0, + 6, 14, 15, 11, 6, 7, 7, 6, + 10, 11, 11, 14, 14, 17, 33, 71, + 17, 10, -6, -1, 12, 14, 7, 7, + 18, 15, 2, -7, -6, 0, 5, 7, + 2, -3, -5, -6, -1, 4, -11, 6, + 5, 3, 10, 13, 9, 5, 14, 19, + 9, -4, -17, -18, -9, 1, 3, 5, + 17, 38, 53, 31, -34, -26, -28, -22, + -17, -24, -23, -21, -26, -21, -21, -25, + -20, -17, -13, -19, -16, -23, -31, -23, + -9, -20, 21, 26, 16, 13, 20, 11, + 15, 19, 19, 18, 24, 26, 29, 27, + 29, 25, 25, 15, 28, 38, 44, 59, + 14, 18, 31, 37, 42, 40, 43, 45, + 49, 54, 54, 56, 53, 50, 43, 37, + 30, 26, 23, 20, 14, 10, 9, 11, + 16, 5, 15, 12, 14, 15, 13, 12, + 14, 17, 17, 16, 16, 16, 15, 6, + 4, 2, -14, 2, 0, -2, -7, 18, + 26, 20, 22, 29, 29, 14, 9, 17, + 21, 12, 2, 5, 9, 14, 10, 5, + 2, 5, 86, 127, 127, 68, 29, -3, + -3, 15, 23, 28, 31, 21, 5, -3, + -7, -5, -2, -1, -6, -17, -18, -14, + 3, -2, -24, -29, -32, -33, -40, -46, + -45, -45, -45, -44, -42, -39, -35, -29, + -28, -29, -28, -23, -7, 9, 24, 37, + 62, 62, 67, 64, 59, 76, 105, 95, + 61, 41, 28, 23, 23, 18, 14, 19, + 22, 22, 14, 11, -51, -39, -20, -13, + -13, -6, -5, -7, -9, -14, -15, -18, + -25, -26, -30, -35, -35, -44, -44, -51, + -71, -105, -16, -4, -18, 2, 28, 7, + -3, 7, 13, 3, -3, 2, 9, 14, + 14, 5, -8, -14, -4, 17, 47, 98, + -23, -41, -55, -45, -41, -38, -34, -32, + -31, -29, -25, -22, -19, -17, -14, -13, + -13, -13, -14, -13, -4, 0, 14, 26, + 11, 2, 4, -7, -8, -15, -22, -28, + -26, -9, 6, 14, 11, -3, -4, -1, + 3, 31, 65, 45, 4, -1, 37, 31, + 2, -7, -21, -29, -20, 5, 10, -10, + -13, -13, -8, 1, 4, -4, -12, -31, + -15, 39, -3, -6, -12, -16, -24, -27, + -31, -28, -30, -38, -45, -45, -48, -50, + -50, -46, -43, -41, -38, -39, -34, -34, + 15, 16, 3, 2, 10, 12, -4, -2, + 15, 26, 25, 21, 27, 35, 44, 43, + 28, 21, 43, 42, 17, 40, 9, 7, + 10, 12, 10, 5, 1, 5, 11, 11, + 9, 10, 16, 19, 22, 34, 44, 51, + 48, 48, 48, 60, -43, -29, -12, -2, + -9, -24, -19, -13, -12, -14, -15, -13, + -17, -18, -19, -19, -17, -20, -23, -34, + -39, -44, -6, -31, -32, -25, 52, 106, + 110, 19, -4, -14, -13, -5, -2, -8, + 2, 6, 5, 11, 15, 20, 38, 61, + -40, -19, -2, -5, -2, -4, -1, -6, + -8, -6, -8, -4, -5, -4, 3, 0, + -2, -4, -5, -3, -14, -24, -16, -24, + -22, 2, -2, -29, -33, -27, -23, -25, + -24, -16, -18, -17, 5, 38, 47, 41, + 37, 27, 13, 0, 67, 78, 39, -26, + -7, -10, -15, -6, 3, -3, -5, -3, + -2, 0, 3, 1, -3, -4, -3, -1, + 1, 0, -2, -15, 24, -12, -48, 24, + -26, -82, -69, -40, -15, -16, -9, -1, + 7, 12, 18, 20, 26, 33, 27, 30 +}; + +static const rnn_weight denoise_output_bias[22] = { + -82, -66, -125, -95, -127, -127, -127, -127, + -127, -94, -113, -127, -80, -65, -109, -127, + -126, -105, -53, -49, -18, -9 +}; + +static const DenseLayer denoise_output = { + denoise_output_bias, + denoise_output_weights, + 96, 22, ACTIVATION_SIGMOID +}; + +static const rnn_weight vad_output_weights[24] = { + 127, 127, 127, 127, 127, 20, 127, -126, + -126, -54, 14, 125, -126, -126, 127, -125, + -126, 127, -127, -127, -57, -30, 127, 80 +}; + +static const rnn_weight vad_output_bias[1] = { + -50 +}; + +static const DenseLayer vad_output = { + vad_output_bias, + vad_output_weights, + 24, 1, ACTIVATION_SIGMOID +}; + +const struct RNNModel rnnoise_model_orig = { + 24, + &input_dense, + + 24, + &vad_gru, + + 48, + &noise_gru, + + 96, + &denoise_gru, + + 22, + &denoise_output, + + 1, + &vad_output +}; diff --git a/TMessagesProj/jni/voip/rnnoise/src/rnn_data.h b/TMessagesProj/jni/voip/rnnoise/src/rnn_data.h new file mode 100644 index 000000000..f2186fe07 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/rnn_data.h @@ -0,0 +1,34 @@ +#ifndef RNN_DATA_H +#define RNN_DATA_H + +#include "rnn.h" + +struct RNNModel { + int input_dense_size; + const DenseLayer *input_dense; + + int vad_gru_size; + const GRULayer *vad_gru; + + int noise_gru_size; + const GRULayer *noise_gru; + + int denoise_gru_size; + const GRULayer *denoise_gru; + + int denoise_output_size; + const DenseLayer *denoise_output; + + int vad_output_size; + const DenseLayer *vad_output; +}; + +struct RNNState { + const RNNModel *model; + float *vad_gru_state; + float *noise_gru_state; + float *denoise_gru_state; +}; + + +#endif diff --git a/TMessagesProj/jni/voip/rnnoise/src/rnn_reader.c b/TMessagesProj/jni/voip/rnnoise/src/rnn_reader.c new file mode 100644 index 000000000..2a031db11 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/rnn_reader.c @@ -0,0 +1,168 @@ +/* Copyright (c) 2018 Gregor Richards */ +/* + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + - Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include + +#include "rnn.h" +#include "rnn_data.h" +#include "rnnoise.h" + +/* Although these values are the same as in rnn.h, we make them separate to + * avoid accidentally burning internal values into a file format */ +#define F_ACTIVATION_TANH 0 +#define F_ACTIVATION_SIGMOID 1 +#define F_ACTIVATION_RELU 2 + +RNNModel *rnnoise_model_from_file(FILE *f) +{ + int i, in; + + if (fscanf(f, "rnnoise-nu model file version %d\n", &in) != 1 || in != 1) + return NULL; + + RNNModel *ret = calloc(1, sizeof(RNNModel)); + if (!ret) + return NULL; + +#define ALLOC_LAYER(type, name) \ + type *name; \ + name = calloc(1, sizeof(type)); \ + if (!name) { \ + rnnoise_model_free(ret); \ + return NULL; \ + } \ + ret->name = name + + ALLOC_LAYER(DenseLayer, input_dense); + ALLOC_LAYER(GRULayer, vad_gru); + ALLOC_LAYER(GRULayer, noise_gru); + ALLOC_LAYER(GRULayer, denoise_gru); + ALLOC_LAYER(DenseLayer, denoise_output); + ALLOC_LAYER(DenseLayer, vad_output); + +#define INPUT_VAL(name) do { \ + if (fscanf(f, "%d", &in) != 1 || in < 0 || in > 128) { \ + rnnoise_model_free(ret); \ + return NULL; \ + } \ + name = in; \ + } while (0) + +#define INPUT_ACTIVATION(name) do { \ + int activation; \ + INPUT_VAL(activation); \ + switch (activation) { \ + case F_ACTIVATION_SIGMOID: \ + name = ACTIVATION_SIGMOID; \ + break; \ + case F_ACTIVATION_RELU: \ + name = ACTIVATION_RELU; \ + break; \ + default: \ + name = ACTIVATION_TANH; \ + } \ + } while (0) + +#define INPUT_ARRAY(name, len) do { \ + rnn_weight *values = malloc((len) * sizeof(rnn_weight)); \ + if (!values) { \ + rnnoise_model_free(ret); \ + return NULL; \ + } \ + name = values; \ + for (i = 0; i < (len); i++) { \ + if (fscanf(f, "%d", &in) != 1) { \ + rnnoise_model_free(ret); \ + return NULL; \ + } \ + values[i] = in; \ + } \ + } while (0) + +#define INPUT_DENSE(name) do { \ + INPUT_VAL(name->nb_inputs); \ + INPUT_VAL(name->nb_neurons); \ + ret->name ## _size = name->nb_neurons; \ + INPUT_ACTIVATION(name->activation); \ + INPUT_ARRAY(name->input_weights, name->nb_inputs * name->nb_neurons); \ + INPUT_ARRAY(name->bias, name->nb_neurons); \ + } while (0) + +#define INPUT_GRU(name) do { \ + INPUT_VAL(name->nb_inputs); \ + INPUT_VAL(name->nb_neurons); \ + ret->name ## _size = name->nb_neurons; \ + INPUT_ACTIVATION(name->activation); \ + INPUT_ARRAY(name->input_weights, name->nb_inputs * name->nb_neurons * 3); \ + INPUT_ARRAY(name->recurrent_weights, name->nb_neurons * name->nb_neurons * 3); \ + INPUT_ARRAY(name->bias, name->nb_neurons * 3); \ + } while (0) + + INPUT_DENSE(input_dense); + INPUT_GRU(vad_gru); + INPUT_GRU(noise_gru); + INPUT_GRU(denoise_gru); + INPUT_DENSE(denoise_output); + INPUT_DENSE(vad_output); + + return ret; +} + +void rnnoise_model_free(RNNModel *model) +{ +#define FREE_MAYBE(ptr) do { if (ptr) free(ptr); } while (0) +#define FREE_DENSE(name) do { \ + if (model->name) { \ + free((void *) model->name->input_weights); \ + free((void *) model->name->bias); \ + free((void *) model->name); \ + } \ + } while (0) +#define FREE_GRU(name) do { \ + if (model->name) { \ + free((void *) model->name->input_weights); \ + free((void *) model->name->recurrent_weights); \ + free((void *) model->name->bias); \ + free((void *) model->name); \ + } \ + } while (0) + + if (!model) + return; + FREE_DENSE(input_dense); + FREE_GRU(vad_gru); + FREE_GRU(noise_gru); + FREE_GRU(denoise_gru); + FREE_DENSE(denoise_output); + FREE_DENSE(vad_output); + free(model); +} diff --git a/TMessagesProj/jni/voip/rnnoise/src/tansig_table.h b/TMessagesProj/jni/voip/rnnoise/src/tansig_table.h new file mode 100644 index 000000000..c76f844a7 --- /dev/null +++ b/TMessagesProj/jni/voip/rnnoise/src/tansig_table.h @@ -0,0 +1,45 @@ +/* This file is auto-generated by gen_tables */ + +static const float tansig_table[201] = { +0.000000f, 0.039979f, 0.079830f, 0.119427f, 0.158649f, +0.197375f, 0.235496f, 0.272905f, 0.309507f, 0.345214f, +0.379949f, 0.413644f, 0.446244f, 0.477700f, 0.507977f, +0.537050f, 0.564900f, 0.591519f, 0.616909f, 0.641077f, +0.664037f, 0.685809f, 0.706419f, 0.725897f, 0.744277f, +0.761594f, 0.777888f, 0.793199f, 0.807569f, 0.821040f, +0.833655f, 0.845456f, 0.856485f, 0.866784f, 0.876393f, +0.885352f, 0.893698f, 0.901468f, 0.908698f, 0.915420f, +0.921669f, 0.927473f, 0.932862f, 0.937863f, 0.942503f, +0.946806f, 0.950795f, 0.954492f, 0.957917f, 0.961090f, +0.964028f, 0.966747f, 0.969265f, 0.971594f, 0.973749f, +0.975743f, 0.977587f, 0.979293f, 0.980869f, 0.982327f, +0.983675f, 0.984921f, 0.986072f, 0.987136f, 0.988119f, +0.989027f, 0.989867f, 0.990642f, 0.991359f, 0.992020f, +0.992631f, 0.993196f, 0.993718f, 0.994199f, 0.994644f, +0.995055f, 0.995434f, 0.995784f, 0.996108f, 0.996407f, +0.996682f, 0.996937f, 0.997172f, 0.997389f, 0.997590f, +0.997775f, 0.997946f, 0.998104f, 0.998249f, 0.998384f, +0.998508f, 0.998623f, 0.998728f, 0.998826f, 0.998916f, +0.999000f, 0.999076f, 0.999147f, 0.999213f, 0.999273f, +0.999329f, 0.999381f, 0.999428f, 0.999472f, 0.999513f, +0.999550f, 0.999585f, 0.999617f, 0.999646f, 0.999673f, +0.999699f, 0.999722f, 0.999743f, 0.999763f, 0.999781f, +0.999798f, 0.999813f, 0.999828f, 0.999841f, 0.999853f, +0.999865f, 0.999875f, 0.999885f, 0.999893f, 0.999902f, +0.999909f, 0.999916f, 0.999923f, 0.999929f, 0.999934f, +0.999939f, 0.999944f, 0.999948f, 0.999952f, 0.999956f, +0.999959f, 0.999962f, 0.999965f, 0.999968f, 0.999970f, +0.999973f, 0.999975f, 0.999977f, 0.999978f, 0.999980f, +0.999982f, 0.999983f, 0.999984f, 0.999986f, 0.999987f, +0.999988f, 0.999989f, 0.999990f, 0.999990f, 0.999991f, +0.999992f, 0.999992f, 0.999993f, 0.999994f, 0.999994f, +0.999994f, 0.999995f, 0.999995f, 0.999996f, 0.999996f, +0.999996f, 0.999997f, 0.999997f, 0.999997f, 0.999997f, +0.999997f, 0.999998f, 0.999998f, 0.999998f, 0.999998f, +0.999998f, 0.999998f, 0.999999f, 0.999999f, 0.999999f, +0.999999f, 0.999999f, 0.999999f, 0.999999f, 0.999999f, +0.999999f, 0.999999f, 0.999999f, 0.999999f, 0.999999f, +1.000000f, 1.000000f, 1.000000f, 1.000000f, 1.000000f, +1.000000f, 1.000000f, 1.000000f, 1.000000f, 1.000000f, +1.000000f, +}; diff --git a/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp b/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp index 83f3c5104..7fe0f50d4 100644 --- a/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp +++ b/TMessagesProj/jni/voip/tgcalls/CodecSelectHelper.cpp @@ -27,7 +27,9 @@ int FormatPriority(const VideoFormat &format, const std::vector &pr static const auto kCodecs = { std::string(cricket::kAv1CodecName), std::string(cricket::kVp9CodecName), +#ifndef WEBRTC_DISABLE_H265 std::string(cricket::kH265CodecName), +#endif std::string(cricket::kH264CodecName), std::string(cricket::kVp8CodecName), }; diff --git a/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.cpp b/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.cpp index 9b801caa1..d350299e4 100644 --- a/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.cpp +++ b/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.cpp @@ -11,8 +11,8 @@ namespace { constexpr auto kSingleMessagePacketSeqBit = (uint32_t(1) << 31); constexpr auto kMessageRequiresAckSeqBit = (uint32_t(1) << 30); constexpr auto kMaxAllowedCounter = std::numeric_limits::max() - & ~kSingleMessagePacketSeqBit - & ~kMessageRequiresAckSeqBit; + & ~kSingleMessagePacketSeqBit + & ~kMessageRequiresAckSeqBit; static_assert(kMaxAllowedCounter < kSingleMessagePacketSeqBit, "bad"); static_assert(kMaxAllowedCounter < kMessageRequiresAckSeqBit, "bad"); @@ -35,143 +35,199 @@ static constexpr uint8_t kAckId = uint8_t(-1); static constexpr uint8_t kEmptyId = uint8_t(-2); void AppendSeq(rtc::CopyOnWriteBuffer &buffer, uint32_t seq) { - const auto bytes = rtc::HostToNetwork32(seq); - buffer.AppendData(reinterpret_cast(&bytes), sizeof(bytes)); + const auto bytes = rtc::HostToNetwork32(seq); + buffer.AppendData(reinterpret_cast(&bytes), sizeof(bytes)); } void WriteSeq(void *bytes, uint32_t seq) { - *reinterpret_cast(bytes) = rtc::HostToNetwork32(seq); + *reinterpret_cast(bytes) = rtc::HostToNetwork32(seq); } uint32_t ReadSeq(const void *bytes) { - return rtc::NetworkToHost32(*reinterpret_cast(bytes)); + return rtc::NetworkToHost32(*reinterpret_cast(bytes)); } uint32_t CounterFromSeq(uint32_t seq) { - return seq & ~kSingleMessagePacketSeqBit & ~kMessageRequiresAckSeqBit; + return seq & ~kSingleMessagePacketSeqBit & ~kMessageRequiresAckSeqBit; } absl::nullopt_t LogError( - const char *message, - const std::string &additional = std::string()) { - RTC_LOG(LS_ERROR) << "ERROR! " << message << additional; - return absl::nullopt; + const char *message, + const std::string &additional = std::string()) { + RTC_LOG(LS_ERROR) << "ERROR! " << message << additional; + return absl::nullopt; } bool ConstTimeIsDifferent(const void *a, const void *b, size_t size) { - auto ca = reinterpret_cast(a); - auto cb = reinterpret_cast(b); - volatile auto different = false; - for (const auto ce = ca + size; ca != ce; ++ca, ++cb) { - different |= (*ca != *cb); - } - return different; + auto ca = reinterpret_cast(a); + auto cb = reinterpret_cast(b); + volatile auto different = false; + for (const auto ce = ca + size; ca != ce; ++ca, ++cb) { + different |= (*ca != *cb); + } + return different; } } // namespace EncryptedConnection::EncryptedConnection( - Type type, - const EncryptionKey &key, - std::function requestSendService) : + Type type, + const EncryptionKey &key, + std::function requestSendService) : _type(type), _key(key), _delayIntervals(DelayIntervalsByType(type)), _requestSendService(std::move(requestSendService)) { - assert(_key.value != nullptr); + assert(_key.value != nullptr); +} + +absl::optional EncryptedConnection::encryptRawPacket(rtc::CopyOnWriteBuffer const &buffer) { + auto seq = ++_counter; + + rtc::ByteBufferWriter writer; + writer.WriteUInt32(seq); + + auto result = rtc::CopyOnWriteBuffer(); + result.AppendData(writer.Data(), writer.Length()); + + result.AppendData(buffer); + + auto encryptedPacket = encryptPrepared(result); + + rtc::CopyOnWriteBuffer encryptedBuffer; + encryptedBuffer.AppendData(encryptedPacket.bytes.data(), encryptedPacket.bytes.size()); + return encryptedBuffer; +} + +absl::optional EncryptedConnection::decryptRawPacket(rtc::CopyOnWriteBuffer const &buffer) { + if (buffer.size() < 21 || buffer.size() > kMaxIncomingPacketSize) { + return absl::nullopt; + } + + const auto x = (_key.isOutgoing ? 8 : 0) + (_type == Type::Signaling ? 128 : 0); + const auto key = _key.value->data(); + const auto msgKey = reinterpret_cast(buffer.data()); + const auto encryptedData = msgKey + 16; + const auto dataSize = buffer.size() - 16; + + auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); + + auto decryptionBuffer = rtc::Buffer(dataSize); + AesProcessCtr( + MemorySpan{ encryptedData, dataSize }, + decryptionBuffer.data(), + std::move(aesKeyIv)); + + const auto msgKeyLarge = ConcatSHA256( + MemorySpan{ key + 88 + x, 32 }, + MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() }); + if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) { + return absl::nullopt; + } + + const auto incomingSeq = ReadSeq(decryptionBuffer.data()); + const auto incomingCounter = CounterFromSeq(incomingSeq); + if (!registerIncomingCounter(incomingCounter)) { + // We've received that packet already. + return absl::nullopt; + } + + rtc::CopyOnWriteBuffer resultBuffer; + resultBuffer.AppendData(decryptionBuffer.data() + 4, decryptionBuffer.size() - 4); + return resultBuffer; } auto EncryptedConnection::prepareForSending(const Message &message) -> absl::optional { - const auto messageRequiresAck = absl::visit([](const auto &data) { - return std::decay_t::kRequiresAck; - }, message.data); + const auto messageRequiresAck = absl::visit([](const auto &data) { + return std::decay_t::kRequiresAck; + }, message.data); - // If message requires ack, then we can't serialize it as a single - // message packet, because later it may be sent as a part of big packet. - const auto singleMessagePacket = !haveAdditionalMessages() && !messageRequiresAck; - const auto maybeSeq = computeNextSeq(messageRequiresAck, singleMessagePacket); - if (!maybeSeq) { - return absl::nullopt; - } - const auto seq = *maybeSeq; - auto serialized = SerializeMessageWithSeq(message, seq, singleMessagePacket); - if (!enoughSpaceInPacket(serialized, 0)) { - return LogError("Too large packet: ", std::to_string(serialized.size())); - } - const auto notYetAckedCopy = messageRequiresAck - ? serialized - : rtc::CopyOnWriteBuffer(); - if (!messageRequiresAck) { - appendAdditionalMessages(serialized); - return encryptPrepared(serialized); - } - const auto type = uint8_t(serialized.cdata()[4]); - const auto sendEnqueued = !_myNotYetAckedMessages.empty(); - if (sendEnqueued) { - // All requiring ack messages should always be sent in order within - // one packet, starting with the least not-yet-acked one. - // So if we still have those, we send an empty message with all - // requiring ack messages that will fit in correct order. - RTC_LOG(LS_INFO) << logHeader() - << "Enqueue SEND:type" << type << "#" << CounterFromSeq(seq); - } else { - RTC_LOG(LS_INFO) << logHeader() - << "Add SEND:type" << type << "#" << CounterFromSeq(seq); - appendAdditionalMessages(serialized); - } - _myNotYetAckedMessages.push_back({ notYetAckedCopy, rtc::TimeMillis() }); - if (!sendEnqueued) { - return encryptPrepared(serialized); - } - for (auto &queued : _myNotYetAckedMessages) { - queued.lastSent = 0; - } - return prepareForSendingService(0); + // If message requires ack, then we can't serialize it as a single + // message packet, because later it may be sent as a part of big packet. + const auto singleMessagePacket = !haveAdditionalMessages() && !messageRequiresAck; + const auto maybeSeq = computeNextSeq(messageRequiresAck, singleMessagePacket); + if (!maybeSeq) { + return absl::nullopt; + } + const auto seq = *maybeSeq; + auto serialized = SerializeMessageWithSeq(message, seq, singleMessagePacket); + if (!enoughSpaceInPacket(serialized, 0)) { + return LogError("Too large packet: ", std::to_string(serialized.size())); + } + const auto notYetAckedCopy = messageRequiresAck + ? serialized + : rtc::CopyOnWriteBuffer(); + if (!messageRequiresAck) { + appendAdditionalMessages(serialized); + return encryptPrepared(serialized); + } + const auto type = uint8_t(serialized.cdata()[4]); + const auto sendEnqueued = !_myNotYetAckedMessages.empty(); + if (sendEnqueued) { + // All requiring ack messages should always be sent in order within + // one packet, starting with the least not-yet-acked one. + // So if we still have those, we send an empty message with all + // requiring ack messages that will fit in correct order. + RTC_LOG(LS_INFO) << logHeader() + << "Enqueue SEND:type" << type << "#" << CounterFromSeq(seq); + } else { + RTC_LOG(LS_INFO) << logHeader() + << "Add SEND:type" << type << "#" << CounterFromSeq(seq); + appendAdditionalMessages(serialized); + } + _myNotYetAckedMessages.push_back({ notYetAckedCopy, rtc::TimeMillis() }); + if (!sendEnqueued) { + return encryptPrepared(serialized); + } + for (auto &queued : _myNotYetAckedMessages) { + queued.lastSent = 0; + } + return prepareForSendingService(0); } auto EncryptedConnection::prepareForSendingService(int cause) -> absl::optional { - if (cause == kServiceCauseAcks) { - _sendAcksTimerActive = false; - } else if (cause == kServiceCauseResend) { - _resendTimerActive = false; - } - if (!haveAdditionalMessages()) { - return absl::nullopt; - } - const auto messageRequiresAck = false; - const auto singleMessagePacket = false; - const auto seq = computeNextSeq(messageRequiresAck, singleMessagePacket); - if (!seq) { - return absl::nullopt; - } - auto serialized = SerializeEmptyMessageWithSeq(*seq); - assert(enoughSpaceInPacket(serialized, 0)); + if (cause == kServiceCauseAcks) { + _sendAcksTimerActive = false; + } else if (cause == kServiceCauseResend) { + _resendTimerActive = false; + } + if (!haveAdditionalMessages()) { + return absl::nullopt; + } + const auto messageRequiresAck = false; + const auto singleMessagePacket = false; + const auto seq = computeNextSeq(messageRequiresAck, singleMessagePacket); + if (!seq) { + return absl::nullopt; + } + auto serialized = SerializeEmptyMessageWithSeq(*seq); + assert(enoughSpaceInPacket(serialized, 0)); - RTC_LOG(LS_INFO) << logHeader() - << "SEND:empty#" << CounterFromSeq(*seq); + RTC_LOG(LS_INFO) << logHeader() + << "SEND:empty#" << CounterFromSeq(*seq); - appendAdditionalMessages(serialized); - return encryptPrepared(serialized); + appendAdditionalMessages(serialized); + return encryptPrepared(serialized); } bool EncryptedConnection::haveAdditionalMessages() const { - return !_myNotYetAckedMessages.empty() || !_acksToSendSeqs.empty(); + return !_myNotYetAckedMessages.empty() || !_acksToSendSeqs.empty(); } absl::optional EncryptedConnection::computeNextSeq( - bool messageRequiresAck, - bool singleMessagePacket) { - if (messageRequiresAck && _myNotYetAckedMessages.size() >= kNotAckedMessagesLimit) { - return LogError("Too many not ACKed messages."); - } else if (_counter == kMaxAllowedCounter) { - return LogError("Outgoing packet limit reached."); - } + bool messageRequiresAck, + bool singleMessagePacket) { + if (messageRequiresAck && _myNotYetAckedMessages.size() >= kNotAckedMessagesLimit) { + return LogError("Too many not ACKed messages."); + } else if (_counter == kMaxAllowedCounter) { + return LogError("Outgoing packet limit reached."); + } - return (++_counter) - | (singleMessagePacket ? kSingleMessagePacketSeqBit : 0) - | (messageRequiresAck ? kMessageRequiresAckSeqBit : 0); + return (++_counter) + | (singleMessagePacket ? kSingleMessagePacketSeqBit : 0) + | (messageRequiresAck ? kMessageRequiresAckSeqBit : 0); } size_t EncryptedConnection::packetLimit() const { @@ -184,337 +240,337 @@ size_t EncryptedConnection::packetLimit() const { } bool EncryptedConnection::enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const { - const auto limit = packetLimit(); - return (amount < limit) - && (16 + buffer.size() + amount <= limit); + const auto limit = packetLimit(); + return (amount < limit) + && (16 + buffer.size() + amount <= limit); } void EncryptedConnection::appendAcksToSend(rtc::CopyOnWriteBuffer &buffer) { - auto i = _acksToSendSeqs.begin(); - while ((i != _acksToSendSeqs.end()) - && enoughSpaceInPacket( - buffer, - kAckSerializedSize)) { + auto i = _acksToSendSeqs.begin(); + while ((i != _acksToSendSeqs.end()) + && enoughSpaceInPacket( + buffer, + kAckSerializedSize)) { - RTC_LOG(LS_INFO) << logHeader() - << "Add ACK#" << CounterFromSeq(*i); + RTC_LOG(LS_INFO) << logHeader() + << "Add ACK#" << CounterFromSeq(*i); - AppendSeq(buffer, *i); - buffer.AppendData(&kAckId, 1); - ++i; - } - _acksToSendSeqs.erase(_acksToSendSeqs.begin(), i); - for (const auto seq : _acksToSendSeqs) { - RTC_LOG(LS_INFO) << logHeader() - << "Skip ACK#" << CounterFromSeq(seq) - << " (no space, length: " << kAckSerializedSize << ", already: " << buffer.size() << ")"; - } + AppendSeq(buffer, *i); + buffer.AppendData(&kAckId, 1); + ++i; + } + _acksToSendSeqs.erase(_acksToSendSeqs.begin(), i); + for (const auto seq : _acksToSendSeqs) { + RTC_LOG(LS_INFO) << logHeader() + << "Skip ACK#" << CounterFromSeq(seq) + << " (no space, length: " << kAckSerializedSize << ", already: " << buffer.size() << ")"; + } } size_t EncryptedConnection::fullNotAckedLength() const { - assert(_myNotYetAckedMessages.size() < kNotAckedMessagesLimit); + assert(_myNotYetAckedMessages.size() < kNotAckedMessagesLimit); - auto result = size_t(); - for (const auto &message : _myNotYetAckedMessages) { - result += message.data.size(); - } - return result; + auto result = size_t(); + for (const auto &message : _myNotYetAckedMessages) { + result += message.data.size(); + } + return result; } void EncryptedConnection::appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer) { - appendAcksToSend(buffer); + appendAcksToSend(buffer); - if (_myNotYetAckedMessages.empty()) { - return; - } + if (_myNotYetAckedMessages.empty()) { + return; + } - const auto now = rtc::TimeMillis(); - auto someWereNotAdded = false; - for (auto &resending : _myNotYetAckedMessages) { - const auto sent = resending.lastSent; - const auto when = sent - ? (sent + _delayIntervals.minDelayBeforeMessageResend) - : 0; + const auto now = rtc::TimeMillis(); + auto someWereNotAdded = false; + for (auto &resending : _myNotYetAckedMessages) { + const auto sent = resending.lastSent; + const auto when = sent + ? (sent + _delayIntervals.minDelayBeforeMessageResend) + : 0; - assert(resending.data.size() >= 5); - const auto counter = CounterFromSeq(ReadSeq(resending.data.data())); - const auto type = uint8_t(resending.data.data()[4]); - if (when > now) { - RTC_LOG(LS_INFO) << logHeader() - << "Skip RESEND:type" << type << "#" << counter - << " (wait " << (when - now) << "ms)."; - break; - } else if (enoughSpaceInPacket(buffer, resending.data.size())) { - RTC_LOG(LS_INFO) << logHeader() - << "Add RESEND:type" << type << "#" << counter; - buffer.AppendData(resending.data); - resending.lastSent = now; - } else { - RTC_LOG(LS_INFO) << logHeader() - << "Skip RESEND:type" << type << "#" << counter - << " (no space, length: " << resending.data.size() << ", already: " << buffer.size() << ")"; - break; - } - } - if (!_resendTimerActive) { - _resendTimerActive = true; - _requestSendService( - _delayIntervals.maxDelayBeforeMessageResend, - kServiceCauseResend); - } + assert(resending.data.size() >= 5); + const auto counter = CounterFromSeq(ReadSeq(resending.data.data())); + const auto type = uint8_t(resending.data.data()[4]); + if (when > now) { + RTC_LOG(LS_INFO) << logHeader() + << "Skip RESEND:type" << type << "#" << counter + << " (wait " << (when - now) << "ms)."; + break; + } else if (enoughSpaceInPacket(buffer, resending.data.size())) { + RTC_LOG(LS_INFO) << logHeader() + << "Add RESEND:type" << type << "#" << counter; + buffer.AppendData(resending.data); + resending.lastSent = now; + } else { + RTC_LOG(LS_INFO) << logHeader() + << "Skip RESEND:type" << type << "#" << counter + << " (no space, length: " << resending.data.size() << ", already: " << buffer.size() << ")"; + break; + } + } + if (!_resendTimerActive) { + _resendTimerActive = true; + _requestSendService( + _delayIntervals.maxDelayBeforeMessageResend, + kServiceCauseResend); + } } auto EncryptedConnection::encryptPrepared(const rtc::CopyOnWriteBuffer &buffer) -> EncryptedPacket { - auto result = EncryptedPacket(); - result.counter = CounterFromSeq(ReadSeq(buffer.data())); - result.bytes.resize(16 + buffer.size()); + auto result = EncryptedPacket(); + result.counter = CounterFromSeq(ReadSeq(buffer.data())); + result.bytes.resize(16 + buffer.size()); - const auto x = (_key.isOutgoing ? 0 : 8) + (_type == Type::Signaling ? 128 : 0); - const auto key = _key.value->data(); + const auto x = (_key.isOutgoing ? 0 : 8) + (_type == Type::Signaling ? 128 : 0); + const auto key = _key.value->data(); - const auto msgKeyLarge = ConcatSHA256( - MemorySpan{ key + 88 + x, 32 }, - MemorySpan{ buffer.data(), buffer.size() }); - const auto msgKey = result.bytes.data(); - memcpy(msgKey, msgKeyLarge.data() + 8, 16); + const auto msgKeyLarge = ConcatSHA256( + MemorySpan{ key + 88 + x, 32 }, + MemorySpan{ buffer.data(), buffer.size() }); + const auto msgKey = result.bytes.data(); + memcpy(msgKey, msgKeyLarge.data() + 8, 16); - auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); + auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); - AesProcessCtr( - MemorySpan{ buffer.data(), buffer.size() }, - result.bytes.data() + 16, - std::move(aesKeyIv)); + AesProcessCtr( + MemorySpan{ buffer.data(), buffer.size() }, + result.bytes.data() + 16, + std::move(aesKeyIv)); - return result; + return result; } bool EncryptedConnection::registerIncomingCounter(uint32_t incomingCounter) { - auto &list = _largestIncomingCounters; + auto &list = _largestIncomingCounters; - const auto position = std::lower_bound(list.begin(), list.end(), incomingCounter); - const auto largest = list.empty() ? 0 : list.back(); - if (position != list.end() && *position == incomingCounter) { - // The packet is in the list already. - return false; - } else if (incomingCounter + kKeepIncomingCountersCount <= largest) { - // The packet is too old. - return false; - } - const auto eraseTill = std::find_if(list.begin(), list.end(), [&](uint32_t counter) { - return (counter + kKeepIncomingCountersCount > incomingCounter); - }); - const auto eraseCount = eraseTill - list.begin(); - const auto positionIndex = (position - list.begin()) - eraseCount; - list.erase(list.begin(), eraseTill); + const auto position = std::lower_bound(list.begin(), list.end(), incomingCounter); + const auto largest = list.empty() ? 0 : list.back(); + if (position != list.end() && *position == incomingCounter) { + // The packet is in the list already. + return false; + } else if (incomingCounter + kKeepIncomingCountersCount <= largest) { + // The packet is too old. + return false; + } + const auto eraseTill = std::find_if(list.begin(), list.end(), [&](uint32_t counter) { + return (counter + kKeepIncomingCountersCount > incomingCounter); + }); + const auto eraseCount = eraseTill - list.begin(); + const auto positionIndex = (position - list.begin()) - eraseCount; + list.erase(list.begin(), eraseTill); - assert(positionIndex >= 0 && positionIndex <= list.size()); - list.insert(list.begin() + positionIndex, incomingCounter); - return true; + assert(positionIndex >= 0 && positionIndex <= list.size()); + list.insert(list.begin() + positionIndex, incomingCounter); + return true; } auto EncryptedConnection::handleIncomingPacket(const char *bytes, size_t size) -> absl::optional { - if (size < 21 || size > kMaxIncomingPacketSize) { - return LogError("Bad incoming packet size: ", std::to_string(size)); - } + if (size < 21 || size > kMaxIncomingPacketSize) { + return LogError("Bad incoming packet size: ", std::to_string(size)); + } - const auto x = (_key.isOutgoing ? 8 : 0) + (_type == Type::Signaling ? 128 : 0); - const auto key = _key.value->data(); - const auto msgKey = reinterpret_cast(bytes); - const auto encryptedData = msgKey + 16; - const auto dataSize = size - 16; + const auto x = (_key.isOutgoing ? 8 : 0) + (_type == Type::Signaling ? 128 : 0); + const auto key = _key.value->data(); + const auto msgKey = reinterpret_cast(bytes); + const auto encryptedData = msgKey + 16; + const auto dataSize = size - 16; - auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); + auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); - auto decryptionBuffer = rtc::Buffer(dataSize); - AesProcessCtr( - MemorySpan{ encryptedData, dataSize }, - decryptionBuffer.data(), - std::move(aesKeyIv)); + auto decryptionBuffer = rtc::Buffer(dataSize); + AesProcessCtr( + MemorySpan{ encryptedData, dataSize }, + decryptionBuffer.data(), + std::move(aesKeyIv)); - const auto msgKeyLarge = ConcatSHA256( - MemorySpan{ key + 88 + x, 32 }, - MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() }); - if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) { - return LogError("Bad incoming data hash."); - } + const auto msgKeyLarge = ConcatSHA256( + MemorySpan{ key + 88 + x, 32 }, + MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() }); + if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) { + return LogError("Bad incoming data hash."); + } - const auto incomingSeq = ReadSeq(decryptionBuffer.data()); - const auto incomingCounter = CounterFromSeq(incomingSeq); - if (!registerIncomingCounter(incomingCounter)) { - // We've received that packet already. - return LogError("Already handled packet received.", std::to_string(incomingCounter)); - } - return processPacket(decryptionBuffer, incomingSeq); + const auto incomingSeq = ReadSeq(decryptionBuffer.data()); + const auto incomingCounter = CounterFromSeq(incomingSeq); + if (!registerIncomingCounter(incomingCounter)) { + // We've received that packet already. + return LogError("Already handled packet received.", std::to_string(incomingCounter)); + } + return processPacket(decryptionBuffer, incomingSeq); } auto EncryptedConnection::processPacket( - const rtc::Buffer &fullBuffer, - uint32_t packetSeq) + const rtc::Buffer &fullBuffer, + uint32_t packetSeq) -> absl::optional { - assert(fullBuffer.size() >= 5); + assert(fullBuffer.size() >= 5); - auto additionalMessage = false; - auto firstMessageRequiringAck = true; - auto newRequiringAckReceived = false; + auto additionalMessage = false; + auto firstMessageRequiringAck = true; + auto newRequiringAckReceived = false; - auto currentSeq = packetSeq; - auto currentCounter = CounterFromSeq(currentSeq); - rtc::ByteBufferReader reader( - reinterpret_cast(fullBuffer.data() + 4), // Skip seq. - fullBuffer.size() - 4); + auto currentSeq = packetSeq; + auto currentCounter = CounterFromSeq(currentSeq); + rtc::ByteBufferReader reader( + reinterpret_cast(fullBuffer.data() + 4), // Skip seq. + fullBuffer.size() - 4); - auto result = absl::optional(); - while (true) { - const auto type = uint8_t(*reader.Data()); - const auto singleMessagePacket = ((currentSeq & kSingleMessagePacketSeqBit) != 0); - if (singleMessagePacket && additionalMessage) { - return LogError("Single message packet bit in not first message."); - } + auto result = absl::optional(); + while (true) { + const auto type = uint8_t(*reader.Data()); + const auto singleMessagePacket = ((currentSeq & kSingleMessagePacketSeqBit) != 0); + if (singleMessagePacket && additionalMessage) { + return LogError("Single message packet bit in not first message."); + } - if (type == kEmptyId) { - if (additionalMessage) { - return LogError("Empty message should be only the first one in the packet."); - } - RTC_LOG(LS_INFO) << logHeader() - << "Got RECV:empty" << "#" << currentCounter; - reader.Consume(1); - } else if (type == kAckId) { - if (!additionalMessage) { - return LogError("Ack message must not be the first one in the packet."); - } - ackMyMessage(currentSeq); - reader.Consume(1); - } else if (auto message = DeserializeMessage(reader, singleMessagePacket)) { - const auto messageRequiresAck = ((currentSeq & kMessageRequiresAckSeqBit) != 0); - const auto skipMessage = messageRequiresAck - ? !registerSentAck(currentCounter, firstMessageRequiringAck) - : (additionalMessage && !registerIncomingCounter(currentCounter)); - if (messageRequiresAck) { - firstMessageRequiringAck = false; - if (!skipMessage) { - newRequiringAckReceived = true; - } - sendAckPostponed(currentSeq); - RTC_LOG(LS_INFO) << logHeader() - << (skipMessage ? "Repeated RECV:type" : "Got RECV:type") << type << "#" << currentCounter; - } - if (!skipMessage) { - appendReceivedMessage(result, std::move(*message), currentSeq); - } - } else { - return LogError("Could not parse message from packet, type: ", std::to_string(type)); - } - if (!reader.Length()) { - break; - } else if (singleMessagePacket) { - return LogError("Single message didn't fill the entire packet."); - } else if (reader.Length() < 5) { - return LogError("Bad remaining data size: ", std::to_string(reader.Length())); - } - const auto success = reader.ReadUInt32(¤tSeq); - assert(success); - currentCounter = CounterFromSeq(currentSeq); + if (type == kEmptyId) { + if (additionalMessage) { + return LogError("Empty message should be only the first one in the packet."); + } + RTC_LOG(LS_INFO) << logHeader() + << "Got RECV:empty" << "#" << currentCounter; + reader.Consume(1); + } else if (type == kAckId) { + if (!additionalMessage) { + return LogError("Ack message must not be the first one in the packet."); + } + ackMyMessage(currentSeq); + reader.Consume(1); + } else if (auto message = DeserializeMessage(reader, singleMessagePacket)) { + const auto messageRequiresAck = ((currentSeq & kMessageRequiresAckSeqBit) != 0); + const auto skipMessage = messageRequiresAck + ? !registerSentAck(currentCounter, firstMessageRequiringAck) + : (additionalMessage && !registerIncomingCounter(currentCounter)); + if (messageRequiresAck) { + firstMessageRequiringAck = false; + if (!skipMessage) { + newRequiringAckReceived = true; + } + sendAckPostponed(currentSeq); + RTC_LOG(LS_INFO) << logHeader() + << (skipMessage ? "Repeated RECV:type" : "Got RECV:type") << type << "#" << currentCounter; + } + if (!skipMessage) { + appendReceivedMessage(result, std::move(*message), currentSeq); + } + } else { + return LogError("Could not parse message from packet, type: ", std::to_string(type)); + } + if (!reader.Length()) { + break; + } else if (singleMessagePacket) { + return LogError("Single message didn't fill the entire packet."); + } else if (reader.Length() < 5) { + return LogError("Bad remaining data size: ", std::to_string(reader.Length())); + } + const auto success = reader.ReadUInt32(¤tSeq); + assert(success); + currentCounter = CounterFromSeq(currentSeq); - additionalMessage = true; - } + additionalMessage = true; + } - if (!_acksToSendSeqs.empty()) { - if (newRequiringAckReceived) { - _requestSendService(0, 0); - } else if (!_sendAcksTimerActive) { - _sendAcksTimerActive = true; - _requestSendService( - _delayIntervals.maxDelayBeforeAckResend, - kServiceCauseAcks); - } - } + if (!_acksToSendSeqs.empty()) { + if (newRequiringAckReceived) { + _requestSendService(0, 0); + } else if (!_sendAcksTimerActive) { + _sendAcksTimerActive = true; + _requestSendService( + _delayIntervals.maxDelayBeforeAckResend, + kServiceCauseAcks); + } + } - return result; + return result; } void EncryptedConnection::appendReceivedMessage( - absl::optional &to, - Message &&message, - uint32_t incomingSeq) { - auto decrypted = DecryptedMessage{ - std::move(message), - CounterFromSeq(incomingSeq) - }; - if (to) { - to->additional.push_back(std::move(decrypted)); - } else { - to = DecryptedPacket{ std::move(decrypted) }; - } + absl::optional &to, + Message &&message, + uint32_t incomingSeq) { + auto decrypted = DecryptedMessage{ + std::move(message), + CounterFromSeq(incomingSeq) + }; + if (to) { + to->additional.push_back(std::move(decrypted)); + } else { + to = DecryptedPacket{ std::move(decrypted) }; + } } const char *EncryptedConnection::logHeader() const { - return (_type == Type::Signaling) ? "(signaling) " : "(transport) "; + return (_type == Type::Signaling) ? "(signaling) " : "(transport) "; } bool EncryptedConnection::registerSentAck(uint32_t counter, bool firstInPacket) { - auto &list = _acksSentCounters; + auto &list = _acksSentCounters; - const auto position = std::lower_bound(list.begin(), list.end(), counter); - const auto already = (position != list.end()) && (*position == counter); + const auto position = std::lower_bound(list.begin(), list.end(), counter); + const auto already = (position != list.end()) && (*position == counter); - const auto was = list; - if (firstInPacket) { - list.erase(list.begin(), position); - if (!already) { - list.insert(list.begin(), counter); - } - } else if (!already) { - list.insert(position, counter); - } - return !already; + const auto was = list; + if (firstInPacket) { + list.erase(list.begin(), position); + if (!already) { + list.insert(list.begin(), counter); + } + } else if (!already) { + list.insert(position, counter); + } + return !already; } void EncryptedConnection::sendAckPostponed(uint32_t incomingSeq) { - auto &list = _acksToSendSeqs; - const auto already = std::find(list.begin(), list.end(), incomingSeq); - if (already == list.end()) { - list.push_back(incomingSeq); - } + auto &list = _acksToSendSeqs; + const auto already = std::find(list.begin(), list.end(), incomingSeq); + if (already == list.end()) { + list.push_back(incomingSeq); + } } void EncryptedConnection::ackMyMessage(uint32_t seq) { - auto type = uint8_t(0); - auto &list = _myNotYetAckedMessages; - for (auto i = list.begin(), e = list.end(); i != e; ++i) { - assert(i->data.size() >= 5); - if (ReadSeq(i->data.cdata()) == seq) { - type = uint8_t(i->data.cdata()[4]); - list.erase(i); - break; - } - } - RTC_LOG(LS_INFO) << logHeader() - << (type ? "Got ACK:type" + std::to_string(type) + "#" : "Repeated ACK#") - << CounterFromSeq(seq); + auto type = uint8_t(0); + auto &list = _myNotYetAckedMessages; + for (auto i = list.begin(), e = list.end(); i != e; ++i) { + assert(i->data.size() >= 5); + if (ReadSeq(i->data.cdata()) == seq) { + type = uint8_t(i->data.cdata()[4]); + list.erase(i); + break; + } + } + RTC_LOG(LS_INFO) << logHeader() + << (type ? "Got ACK:type" + std::to_string(type) + "#" : "Repeated ACK#") + << CounterFromSeq(seq); } auto EncryptedConnection::DelayIntervalsByType(Type type) -> DelayIntervals { - auto result = DelayIntervals(); - const auto signaling = (type == Type::Signaling); + auto result = DelayIntervals(); + const auto signaling = (type == Type::Signaling); - // Don't resend faster than min delay even if we have a packet we can attach to. - result.minDelayBeforeMessageResend = signaling ? 3000 : 300; + // Don't resend faster than min delay even if we have a packet we can attach to. + result.minDelayBeforeMessageResend = signaling ? 3000 : 300; - // When max delay elapsed we resend anyway, in a dedicated packet. - result.maxDelayBeforeMessageResend = signaling ? 5000 : 1000; - result.maxDelayBeforeAckResend = signaling ? 5000 : 1000; + // When max delay elapsed we resend anyway, in a dedicated packet. + result.maxDelayBeforeMessageResend = signaling ? 5000 : 1000; + result.maxDelayBeforeAckResend = signaling ? 5000 : 1000; - return result; + return result; } rtc::CopyOnWriteBuffer EncryptedConnection::SerializeEmptyMessageWithSeq(uint32_t seq) { - auto result = rtc::CopyOnWriteBuffer(5); - const auto bytes = result.data(); - WriteSeq(bytes, seq); - bytes[4] = kEmptyId; - return result; + auto result = rtc::CopyOnWriteBuffer(5); + auto bytes = result.MutableData(); + WriteSeq(bytes, seq); + bytes[4] = kEmptyId; + return result; } } // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.h b/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.h index bc3bf3184..5c3e653c0 100644 --- a/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.h +++ b/TMessagesProj/jni/voip/tgcalls/EncryptedConnection.h @@ -12,75 +12,78 @@ namespace tgcalls { class EncryptedConnection final { public: - enum class Type : uint8_t { - Signaling, - Transport, - }; - EncryptedConnection( - Type type, - const EncryptionKey &key, - std::function requestSendService); + enum class Type : uint8_t { + Signaling, + Transport, + }; + EncryptedConnection( + Type type, + const EncryptionKey &key, + std::function requestSendService); - struct EncryptedPacket { - std::vector bytes; - uint32_t counter = 0; - }; - absl::optional prepareForSending(const Message &message); - absl::optional prepareForSendingService(int cause); + struct EncryptedPacket { + std::vector bytes; + uint32_t counter = 0; + }; + absl::optional prepareForSending(const Message &message); + absl::optional prepareForSendingService(int cause); - struct DecryptedPacket { - DecryptedMessage main; - std::vector additional; - }; - absl::optional handleIncomingPacket(const char *bytes, size_t size); + struct DecryptedPacket { + DecryptedMessage main; + std::vector additional; + }; + absl::optional handleIncomingPacket(const char *bytes, size_t size); + + absl::optional encryptRawPacket(rtc::CopyOnWriteBuffer const &buffer); + absl::optional decryptRawPacket(rtc::CopyOnWriteBuffer const &buffer); private: - struct DelayIntervals { - // In milliseconds. - int minDelayBeforeMessageResend = 0; - int maxDelayBeforeMessageResend = 0; - int maxDelayBeforeAckResend = 0; - }; - struct MessageForResend { - rtc::CopyOnWriteBuffer data; - int64_t lastSent = 0; - }; + struct DelayIntervals { + // In milliseconds. + int minDelayBeforeMessageResend = 0; + int maxDelayBeforeMessageResend = 0; + int maxDelayBeforeAckResend = 0; + }; + struct MessageForResend { + rtc::CopyOnWriteBuffer data; + int64_t lastSent = 0; + }; - bool enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const; - size_t packetLimit() const; - size_t fullNotAckedLength() const; - void appendAcksToSend(rtc::CopyOnWriteBuffer &buffer); - void appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer); - EncryptedPacket encryptPrepared(const rtc::CopyOnWriteBuffer &buffer); - bool registerIncomingCounter(uint32_t incomingCounter); - absl::optional processPacket(const rtc::Buffer &fullBuffer, uint32_t packetSeq); - bool registerSentAck(uint32_t counter, bool firstInPacket); - void ackMyMessage(uint32_t counter); - void sendAckPostponed(uint32_t incomingSeq); - bool haveAdditionalMessages() const; - absl::optional computeNextSeq(bool messageRequiresAck, bool singleMessagePacket); - void appendReceivedMessage( - absl::optional &to, - Message &&message, - uint32_t incomingSeq); + bool enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const; + size_t packetLimit() const; + size_t fullNotAckedLength() const; + void appendAcksToSend(rtc::CopyOnWriteBuffer &buffer); + void appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer); + EncryptedPacket encryptPrepared(const rtc::CopyOnWriteBuffer &buffer); + bool registerIncomingCounter(uint32_t incomingCounter); + absl::optional processPacket(const rtc::Buffer &fullBuffer, uint32_t packetSeq); + bool registerSentAck(uint32_t counter, bool firstInPacket); + void ackMyMessage(uint32_t counter); + void sendAckPostponed(uint32_t incomingSeq); + bool haveAdditionalMessages() const; + absl::optional computeNextSeq(bool messageRequiresAck, bool singleMessagePacket); + void appendReceivedMessage( + absl::optional &to, + Message &&message, + uint32_t incomingSeq); - const char *logHeader() const; + const char *logHeader() const; - static DelayIntervals DelayIntervalsByType(Type type); - static rtc::CopyOnWriteBuffer SerializeEmptyMessageWithSeq(uint32_t seq); + static DelayIntervals DelayIntervalsByType(Type type); + static rtc::CopyOnWriteBuffer SerializeEmptyMessageWithSeq(uint32_t seq); - Type _type = Type(); - EncryptionKey _key; - uint32_t _counter = 0; - DelayIntervals _delayIntervals; - std::vector _largestIncomingCounters; - std::vector _ackedIncomingCounters; - std::vector _acksToSendSeqs; - std::vector _acksSentCounters; - std::vector _myNotYetAckedMessages; - std::function _requestSendService; - bool _resendTimerActive = false; - bool _sendAcksTimerActive = false; + Type _type = Type(); + EncryptionKey _key; + uint32_t _counter = 0; + DelayIntervals _delayIntervals; + std::vector _largestIncomingCounters; + std::vector _ackedIncomingCounters; + std::vector _acksToSendSeqs; + std::vector _acksSentCounters; + std::vector _myNotYetAckedMessages; + std::function _requestSendService; + bool _resendTimerActive = false; + bool _sendAcksTimerActive = false; }; diff --git a/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.cpp b/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.cpp index 787cd6a4c..8b24984ba 100644 --- a/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.cpp +++ b/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.cpp @@ -14,14 +14,17 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD public: static rtc::scoped_refptr Create(webrtc::TaskQueueFactory* taskQueueFactory, std::shared_ptr renderer, + std::shared_ptr recorder, FakeAudioDeviceModule::Options options) { return rtc::scoped_refptr( - new rtc::RefCountedObject(taskQueueFactory, options, std::move(renderer))); + new rtc::RefCountedObject(taskQueueFactory, options, std::move(renderer), std::move(recorder))); } FakeAudioDeviceModuleImpl(webrtc::TaskQueueFactory*, FakeAudioDeviceModule::Options options, - std::shared_ptr renderer) - : num_channels_{options.num_channels}, samples_per_sec_{options.samples_per_sec}, scheduler_(options.scheduler_), renderer_(std::move(renderer)) { + std::shared_ptr renderer, + std::shared_ptr recorder) + : num_channels_{options.num_channels}, samples_per_sec_{options.samples_per_sec}, scheduler_(options.scheduler_), + renderer_(std::move(renderer)), recorder_(std::move(recorder)) { if (!scheduler_) { scheduler_ = [](auto f) { std::thread([f = std::move(f)]() { @@ -80,14 +83,16 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD } int32_t RegisterAudioCallback(webrtc::AudioTransport* callback) override { - std::unique_lock lock(mutex_); + std::unique_lock lock(render_mutex_); audio_callback_ = callback; return 0; } int32_t StartPlayout() override { - std::unique_lock lock(mutex_); - RTC_CHECK(renderer_); + std::unique_lock lock(render_mutex_); + if (!renderer_) { + return 0; + } if (rendering_) { return 0; } @@ -105,8 +110,8 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD } need_rendering_ = false; - std::unique_lock lock(mutex_); - cond_.wait(lock, [this]{ return !rendering_; }); + std::unique_lock lock(render_mutex_); + render_cond_.wait(lock, [this]{ return !rendering_; }); return 0; } @@ -115,20 +120,50 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD return rendering_; } - private: + int32_t StartRecording() override { + std::unique_lock lock(record_mutex_); + if (!recorder_) { + return 0; + } + if (recording_) { + return 0; + } + need_recording_ = true; + recording_ = true; + scheduler_([this]{ + return Record() / 1000000.0; + }); + return 0; + } + int32_t StopRecording() override { + if (!recording_) { + return 0; + } + + need_recording_ = false; + std::unique_lock lock(record_mutex_); + record_cond_.wait(lock, [this]{ return !recording_; }); + + return 0; + } + bool Recording() const override { + return recording_; + } + +private: int32_t Render() { - std::unique_lock lock(mutex_); + std::unique_lock lock(render_mutex_); if (!need_rendering_) { rendering_ = false; - cond_.notify_all(); + render_cond_.notify_all(); return -1; } size_t samples_out = 0; int64_t elapsed_time_ms = -1; int64_t ntp_time_ms = -1; - size_t bytes_per_sample = 2; + size_t bytes_per_sample = 2 * num_channels_; RTC_CHECK(audio_callback_); if (renderer_) { @@ -157,31 +192,66 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD return wait_for_us; } + int32_t Record() { + std::unique_lock lock(record_mutex_); + if (!need_recording_) { + recording_ = false; + record_cond_.notify_all(); + return -1; + } + + auto frame = recorder_->Record(); + if (frame.num_samples != 0) { + uint32_t new_mic_level; + audio_callback_->RecordedDataIsAvailable(frame.audio_samples, + frame.num_samples, frame.bytes_per_sample, frame.num_channels, + frame.samples_per_sec, 0, 0, 0, false, new_mic_level); + } + + int32_t wait_for_us = -1; + if (recorder_) { + wait_for_us = recorder_->WaitForUs(); + } + return wait_for_us; + } + size_t num_channels_; const uint32_t samples_per_sec_; size_t samples_per_frame_{0}; std::function scheduler_; - mutable std::mutex mutex_; + mutable std::mutex render_mutex_; std::atomic need_rendering_{false}; std::atomic rendering_{false}; - std::condition_variable cond_; + std::condition_variable render_cond_; std::unique_ptr renderThread_; + mutable std::mutex record_mutex_; + std::atomic need_recording_{false}; + std::atomic recording_{false}; + std::condition_variable record_cond_; + std::unique_ptr recordThread_; + + webrtc::AudioTransport* audio_callback_{nullptr}; const std::shared_ptr renderer_; + const std::shared_ptr recorder_; std::vector playout_buffer_; }; std::function(webrtc::TaskQueueFactory*)> FakeAudioDeviceModule::Creator( - std::shared_ptr renderer, Options options) { + std::shared_ptr renderer, std::shared_ptr recorder, Options options) { bool is_renderer_empty = bool(renderer); auto boxed_renderer = std::make_shared>(std::move(renderer)); + bool is_recorder_empty = bool(recorder); + auto boxed_recorder = std::make_shared>(std::move(recorder)); return - [boxed_renderer = std::move(boxed_renderer), is_renderer_empty, options](webrtc::TaskQueueFactory* task_factory) { + [boxed_renderer = std::move(boxed_renderer), is_renderer_empty, + boxed_recorder = std::move(boxed_recorder), is_recorder_empty, options](webrtc::TaskQueueFactory* task_factory) { RTC_CHECK(is_renderer_empty == bool(*boxed_renderer)); // call only once if renderer exists - return FakeAudioDeviceModuleImpl::Create(task_factory, std::move(*boxed_renderer), options); + RTC_CHECK(is_recorder_empty == bool(*boxed_recorder)); // call only once if recorder exists + return FakeAudioDeviceModuleImpl::Create(task_factory, std::move(*boxed_renderer), std::move(*boxed_recorder), options); }; } } // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.h b/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.h index b0d5ffe8e..0cd9c7a6d 100644 --- a/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.h +++ b/TMessagesProj/jni/voip/tgcalls/FakeAudioDeviceModule.h @@ -32,6 +32,14 @@ class FakeAudioDeviceModule { return 10000; } }; + class Recorder { + public: + virtual ~Recorder() = default; + virtual AudioFrame Record() = 0; + virtual int32_t WaitForUs() { + return 10000; + } + }; using Task = std::function; struct Options { uint32_t samples_per_sec{48000}; @@ -39,6 +47,8 @@ class FakeAudioDeviceModule { std::function scheduler_; }; static std::function(webrtc::TaskQueueFactory *)> Creator( - std::shared_ptr renderer, Options options); + std::shared_ptr renderer, + std::shared_ptr recorder, + Options options); }; } // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp b/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp new file mode 100644 index 000000000..67c827f19 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.cpp @@ -0,0 +1,173 @@ +#include "FakeVideoTrackSource.h" + +#include "api/video/i420_buffer.h" +#include "media/base/video_broadcaster.h" +#include "pc/video_track_source.h" + +#include "libyuv.h" + +#include + +namespace tgcalls { + +int WIDTH = 1280; +int HEIGHT = 720; + +class ChessFrameSource : public FrameSource { +public: + ChessFrameSource() { + int N = 100; + frames_.reserve(N); + for (int i = 0; i < N; i++) { + frames_.push_back(genFrame(i, N)); + } + } + Info info() const override{ + return Info{WIDTH, HEIGHT}; + } +// webrtc::VideoFrame next_frame() override { +// i = (i + 1) % frames_.size(); +// return frames_[i].frame; +// } + void next_frame_rgb0(char *buf, double *pts) override { + *pts = 0; + i = (i + 1) % frames_.size(); + size_t size = WIDTH * HEIGHT * 4; + memcpy(buf, frames_[i].rbga.get(), size); + } + +private: + struct Frame { + webrtc::VideoFrame frame; + std::unique_ptr rbga; + }; + std::vector frames_; + size_t i = 0; + Frame genFrame(int i, int n) { + int width = WIDTH; + int height = HEIGHT; + auto bytes_ptr = std::make_unique(width * height * 4); + auto bytes = bytes_ptr.get(); + auto set_rgb = [&](int x, int y, std::uint8_t r, std::uint8_t g, std::uint8_t b) { + auto dest = bytes + (x * width + y) * 4; + dest[0] = r; + dest[1] = g; + dest[2] = b; + dest[3] = 0; + }; + auto angle = (double)i / n * M_PI; + auto co = cos(angle); + auto si = sin(angle); + + for (int i = 0; i < height; i++) { + for (int j = 0; j < width; j++) { + double sx = (i - height / 2) * 20.0 / HEIGHT; + double sy = (j - width / 2) * 20.0 / HEIGHT; + + int x, y; + if (sx * sx + sy * sy < 10) { + x = int(floor(sx * co - sy * si)); + y = int(floor(sx * si + sy * co)); + } else { + x = int(floor(sx)); + y = int(floor(sy)); + } + std::uint8_t color = ((y & 1) ^ (x & 1)) * 255; + set_rgb(i, j, color, color, color); + } + } + + rtc::scoped_refptr buffer = webrtc::I420Buffer::Create(width, height); + + libyuv::RGBAToI420(bytes, width * 4, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), + buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), width, height); + + return Frame{webrtc::VideoFrame::Builder().set_video_frame_buffer(buffer).build(), std::move(bytes_ptr)}; + } + +}; + +webrtc::VideoFrame FrameSource::next_frame() { + auto info = this->info(); + auto height = info.height; + auto width = info.width; + auto bytes_ptr = std::make_unique(width * height * 4); + double pts; + next_frame_rgb0(reinterpret_cast(bytes_ptr.get()), &pts); + rtc::scoped_refptr buffer = webrtc::I420Buffer::Create(width, height); + libyuv::ABGRToI420(bytes_ptr.get(), width * 4, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), + buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), width, height); + return webrtc::VideoFrame::Builder().set_timestamp_us(static_cast(pts * 1000000)).set_video_frame_buffer(buffer).build(); +} + +class FakeVideoSource : public rtc::VideoSourceInterface { + public: + FakeVideoSource(std::unique_ptr source) { + data_ = std::make_shared(); + std::thread([data = data_, source = std::move(source)] { + std::uint32_t step = 0; + while (!data->flag_) { + step++; + std::this_thread::sleep_for(std::chrono::milliseconds(1000 / 30)); + auto frame = source->next_frame(); + frame.set_id(static_cast(step)); + frame.set_timestamp_us(rtc::TimeMicros()); + data->broadcaster_.OnFrame(frame); + } + }).detach(); + } + ~FakeVideoSource() { + data_->flag_ = true; + } + using VideoFrameT = webrtc::VideoFrame; + void AddOrUpdateSink(rtc::VideoSinkInterface *sink, const rtc::VideoSinkWants &wants) override { + RTC_LOG(WARNING) << "ADD"; + data_->broadcaster_.AddOrUpdateSink(sink, wants); + } + // RemoveSink must guarantee that at the time the method returns, + // there is no current and no future calls to VideoSinkInterface::OnFrame. + void RemoveSink(rtc::VideoSinkInterface *sink) { + RTC_LOG(WARNING) << "REMOVE"; + data_->broadcaster_.RemoveSink(sink); + } + + private: + struct Data { + std::atomic flag_; + rtc::VideoBroadcaster broadcaster_; + }; + std::shared_ptr data_; +}; + +class FakeVideoTrackSourceImpl : public webrtc::VideoTrackSource { + public: + static rtc::scoped_refptr Create(std::unique_ptr source) { + return rtc::scoped_refptr(new rtc::RefCountedObject(std::move(source))); + } + + explicit FakeVideoTrackSourceImpl(std::unique_ptr source) : VideoTrackSource(false), source_(std::move(source)) { + } + + protected: + FakeVideoSource source_; + rtc::VideoSourceInterface *source() override { + return &source_; + } +}; + +std::function FakeVideoTrackSource::create(std::unique_ptr frame_source) { + auto source = FakeVideoTrackSourceImpl::Create(std::move(frame_source)); + return [source] { + return source.get(); + }; +} +std::unique_ptr FrameSource::chess(){ + return std::make_unique(); +} + +void FrameSource::video_frame_to_rgb0(const webrtc::VideoFrame & src, char *dest){ + auto buffer = src.video_frame_buffer()->GetI420(); + libyuv::I420ToABGR(buffer->DataY(), buffer->StrideY(), buffer->DataU(), + buffer->StrideU(), buffer->DataV(), buffer->StrideV( ), reinterpret_cast(dest), src.width() * 4, src.width(), src.height()); +} +} diff --git a/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.h b/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.h new file mode 100644 index 000000000..e0e50f312 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/FakeVideoTrackSource.h @@ -0,0 +1,33 @@ +#pragma once + +#include + +namespace webrtc { +class VideoTrackSourceInterface; +class VideoFrame; +} + +namespace tgcalls { +class FrameSource { +public: + struct Info { + int32_t width; + int32_t height; + }; + + virtual ~FrameSource() = default; + + virtual Info info() const = 0; + virtual webrtc::VideoFrame next_frame(); + static void video_frame_to_rgb0(const webrtc::VideoFrame &src, char *dest); + virtual void next_frame_rgb0(char *buf, double *pt_in_seconds) = 0; + + static std::unique_ptr chess(); + static std::unique_ptr from_file(std::string path); +}; + +class FakeVideoTrackSource { + public: + static std::function create(std::unique_ptr source); +}; +} \ No newline at end of file diff --git a/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp index 508954928..ec83897c8 100644 --- a/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/InstanceImpl.cpp @@ -169,7 +169,7 @@ void InstanceImpl::stop(std::function completion) { } int InstanceImpl::GetConnectionMaxLayer() { - return 92; // TODO: retrieve from LayerBase + return 92; } std::vector InstanceImpl::GetVersions() { diff --git a/TMessagesProj/jni/voip/tgcalls/JsonConfig.cpp b/TMessagesProj/jni/voip/tgcalls/JsonConfig.cpp deleted file mode 100644 index 80737e9ca..000000000 --- a/TMessagesProj/jni/voip/tgcalls/JsonConfig.cpp +++ /dev/null @@ -1,13 +0,0 @@ -#include "JsonConfig.h" - -namespace tgcalls { - -JsonConfig::JsonConfig(Values values) : _values(values) { - -} - -Value JsonConfig::getValue(std::string key) { - return _values[key]; -} - -} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/JsonConfig.h b/TMessagesProj/jni/voip/tgcalls/JsonConfig.h deleted file mode 100644 index c9bd0f853..000000000 --- a/TMessagesProj/jni/voip/tgcalls/JsonConfig.h +++ /dev/null @@ -1,25 +0,0 @@ -#ifndef TGCALLS_JSON_CONFIG_H -#define TGCALLS_JSON_CONFIG_H - -#include -#include -#include "absl/types/variant.h" - -namespace tgcalls { - -typedef absl::variant Value; -typedef std::map Values; - -class JsonConfig { - -public: - JsonConfig(Values values); - Value getValue(std::string key); - -private: - Values _values; -}; - -} // namespace tgcalls - -#endif diff --git a/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp b/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp index b7e53e07f..98ea05428 100644 --- a/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/LogSinkImpl.cpp @@ -29,7 +29,6 @@ void LogSinkImpl::OnLogMessage(const std::string &message) { time_t rawTime; time(&rawTime); struct tm timeinfo; - timeval curTime = { 0 }; #ifdef WEBRTC_WIN localtime_s(&timeinfo, &rawTime); @@ -45,14 +44,13 @@ void LogSinkImpl::OnLogMessage(const std::string &message) { const auto deltaEpochInMicrosecs = 11644473600000000Ui64; full -= deltaEpochInMicrosecs; full /= 10; - curTime.tv_sec = (long)(full / 1000000UL); - curTime.tv_usec = (long)(full % 1000000UL); + int32_t milliseconds = (long)(full % 1000000UL) / 1000; #else + timeval curTime = { 0 }; localtime_r(&rawTime, &timeinfo); gettimeofday(&curTime, nullptr); -#endif - int32_t milliseconds = curTime.tv_usec / 1000; +#endif auto &stream = _file.is_open() ? (std::ostream&)_file : _data; stream diff --git a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp index 14d043635..273c33210 100644 --- a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp @@ -205,6 +205,7 @@ _platformContext(platformContext) { "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/" "WebRTC-FlexFEC-03/Enabled/" "WebRTC-FlexFEC-03-Advertised/Enabled/" + "WebRTC-Turn-AllowSystemPorts/Enabled/" ); PlatformInterface::SharedInstance()->configurePlatformAudio(); @@ -391,7 +392,7 @@ void MediaManager::start() { beginStatsTimer(3000); if (_audioLevelUpdated != nullptr) { - beginLevelsTimer(50); + beginLevelsTimer(100); } } @@ -504,7 +505,7 @@ void MediaManager::beginLevelsTimer(int timeoutMs) { float effectiveLevel = fmaxf(strong->_currentAudioLevel, strong->_currentMyAudioLevel); strong->_audioLevelUpdated(effectiveLevel); - strong->beginLevelsTimer(50); + strong->beginLevelsTimer(100); }, timeoutMs); } @@ -746,7 +747,9 @@ void MediaManager::checkIsReceivingVideoChanged(bool wasReceiving) { const auto codecs = { cricket::kFlexfecCodecName, cricket::kH264CodecName, +#ifndef WEBRTC_DISABLE_H265 cricket::kH265CodecName, +#endif cricket::kVp8CodecName, cricket::kVp9CodecName, cricket::kAv1CodecName, diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp index 460c254e0..72f815664 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp @@ -10,6 +10,9 @@ #include "rtc_base/task_utils/to_queued_task.h" #include "p2p/base/ice_credentials_iterator.h" #include "api/jsep_ice_candidate.h" +#include "rtc_base/network_monitor_factory.h" + +#include "platform/PlatformInterface.h" extern "C" { #include @@ -104,6 +107,8 @@ _transportMessageReceived(std::move(transportMessageReceived)), _sendSignalingMessage(std::move(sendSignalingMessage)), _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) { assert(_thread->IsCurrent()); + + _networkMonitorFactory = PlatformInterface::SharedInstance()->createNetworkMonitorFactory(); } NetworkManager::~NetworkManager() { @@ -121,7 +126,7 @@ NetworkManager::~NetworkManager() { void NetworkManager::start() { _socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread)); - _networkManager = std::make_unique(); + _networkManager = std::make_unique(_networkMonitorFactory.get()); if (_enableStunMarking) { _turnCustomizer.reset(new TurnCustomizerImpl()); diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.h b/TMessagesProj/jni/voip/tgcalls/NetworkManager.h index 53c53d28a..b50850046 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.h +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.h @@ -10,6 +10,7 @@ #include "rtc_base/copy_on_write_buffer.h" #include "api/candidate.h" +#include "rtc_base/network_monitor_factory.h" #include #include @@ -93,6 +94,7 @@ private: std::function _transportMessageReceived; std::function _sendSignalingMessage; + std::unique_ptr _networkMonitorFactory; std::unique_ptr _socketFactory; std::unique_ptr _networkManager; std::unique_ptr _turnCustomizer; diff --git a/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp b/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp index 5db98dc2f..4a88625d7 100644 --- a/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp +++ b/TMessagesProj/jni/voip/tgcalls/StaticThreads.cpp @@ -61,10 +61,12 @@ public: explicit ThreadsImpl(size_t i) { auto suffix = i == 0 ? "" : "#" + std::to_string(i); network_ = create_network("tgc-net" + suffix); + network_->DisallowAllInvokes(); media_ = create("tgc-media" + suffix); worker_ = create("tgc-work" + suffix); process_ = create("tgc-process" + suffix); - shared_module_thread_ = webrtc::SharedModuleThread::Create(webrtc::ProcessThread::Create("tgc-module"), nullptr); + worker_->DisallowAllInvokes(); + worker_->AllowInvokesToThread(network_.get()); } rtc::Thread *getNetworkThread() override { @@ -80,6 +82,13 @@ public: return process_.get(); } rtc::scoped_refptr getSharedModuleThread() override { + // This function must be called from a single thread because of SharedModuleThread implementation + // So we don't care about making it thread safe + if (!shared_module_thread_) { + shared_module_thread_ = webrtc::SharedModuleThread::Create( + webrtc::ProcessThread::Create("tgc-module"), + [=] { shared_module_thread_ = nullptr; }); + } return shared_module_thread_; } diff --git a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterface.h b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterface.h index 7fe8f0da9..3ab2a00e0 100644 --- a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterface.h +++ b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterface.h @@ -3,6 +3,7 @@ #include #include +#include namespace rtc { template @@ -24,6 +25,7 @@ enum class VideoState { Active, }; + class VideoCaptureInterface { protected: VideoCaptureInterface() = default; @@ -40,10 +42,22 @@ public: virtual void setState(VideoState state) = 0; virtual void setPreferredAspectRatio(float aspectRatio) = 0; virtual void setOutput(std::shared_ptr> sink) = 0; + virtual void setOnFatalError(std::function error) { + // TODO: make this function pure virtual when everybody implements it. + } + virtual void setOnPause(std::function pause) { + // TODO: make this function pure virtual when everybody implements it. + } + virtual void setOnIsActiveUpdated(std::function onIsActiveUpdated) { + // TODO: make this function pure virtual when everybody implements it. + } + virtual void withNativeImplementation(std::function completion) { + completion(nullptr); + } + virtual std::shared_ptr getPlatformContext() { return nullptr; } - }; } // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp index eef616051..8022658b6 100644 --- a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.cpp @@ -10,7 +10,7 @@ namespace tgcalls { VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::string deviceId, std::shared_ptr platformContext, Threads &threads) -: _videoSource(PlatformInterface::SharedInstance()->makeVideoSource(threads.getMediaThread(), threads.getWorkerThread())) { +: _videoSource(PlatformInterface::SharedInstance()->makeVideoSource(threads.getMediaThread(), threads.getWorkerThread(), deviceId == "screen")) { _platformContext = platformContext; switchToDevice(deviceId); @@ -26,8 +26,16 @@ webrtc::VideoTrackSourceInterface *VideoCaptureInterfaceObject::source() { return _videoSource; } +int VideoCaptureInterfaceObject::getRotation() { + if (_videoCapturer) { + return _videoCapturer->getRotation(); + } else { + return 0; + } +} + void VideoCaptureInterfaceObject::switchToDevice(std::string deviceId) { - if (_videoCapturer && _currentUncroppedSink) { + if (_videoCapturer && _currentUncroppedSink != nullptr) { _videoCapturer->setUncroppedOutput(nullptr); } if (_videoSource) { @@ -37,24 +45,53 @@ void VideoCaptureInterfaceObject::switchToDevice(std::string deviceId) { if (this->_stateUpdated) { this->_stateUpdated(state); } + if (this->_onIsActiveUpdated) { + switch (state) { + case VideoState::Active: { + this->_onIsActiveUpdated(true); + break; + } + default: { + this->_onIsActiveUpdated(false); + break; + } + } + } }, [this](PlatformCaptureInfo info) { if (this->_shouldBeAdaptedToReceiverAspectRate != info.shouldBeAdaptedToReceiverAspectRate) { this->_shouldBeAdaptedToReceiverAspectRate = info.shouldBeAdaptedToReceiverAspectRate; - this->updateAspectRateAdaptation(); } + if (this->_rotationUpdated) { + this->_rotationUpdated(info.rotation); + } + this->updateAspectRateAdaptation(); }, _platformContext, _videoCapturerResolution); } if (_videoCapturer) { -// if (_preferredAspectRatio > 0) { -// _videoCapturer->setPreferredCaptureAspectRatio(_preferredAspectRatio); -// } +// if (_preferredAspectRatio > 0) { +// _videoCapturer->setPreferredCaptureAspectRatio(_preferredAspectRatio); +// } if (_currentUncroppedSink) { _videoCapturer->setUncroppedOutput(_currentUncroppedSink); } + if (_onFatalError) { + _videoCapturer->setOnFatalError(_onFatalError); + } + if (_onPause) { + _videoCapturer->setOnPause(_onPause); + } _videoCapturer->setState(_state); } } +void VideoCaptureInterfaceObject::withNativeImplementation(std::function completion) { + if (_videoCapturer) { + _videoCapturer->withNativeImplementation(completion); + } else { + completion(nullptr); + } +} + void VideoCaptureInterfaceObject::setState(VideoState state) { if (_state != state) { _state = state; @@ -84,10 +121,10 @@ void VideoCaptureInterfaceObject::updateAspectRateAdaptation() { float height = (originalWidth > aspectRatio * originalHeight) ? originalHeight : int(std::round(originalHeight / aspectRatio)); - - PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30); + + PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 25); } else { - PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, _videoCapturerResolution.first, _videoCapturerResolution.second, 30); + PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, _videoCapturerResolution.first, _videoCapturerResolution.second, 25); } } } @@ -100,10 +137,31 @@ void VideoCaptureInterfaceObject::setOutput(std::shared_ptr error) { + if (_videoCapturer) { + _videoCapturer->setOnFatalError(error); + } + _onFatalError = error; +} +void VideoCaptureInterfaceObject::setOnPause(std::function pause) { + if (_videoCapturer) { + _videoCapturer->setOnPause(pause); + } + _onPause = pause; +} + +void VideoCaptureInterfaceObject::setOnIsActiveUpdated(std::function onIsActiveUpdated) { + _onIsActiveUpdated = onIsActiveUpdated; +} + void VideoCaptureInterfaceObject::setStateUpdated(std::function stateUpdated) { _stateUpdated = stateUpdated; } +void VideoCaptureInterfaceObject::setRotationUpdated(std::function rotationUpdated) { + _rotationUpdated = rotationUpdated; +} + VideoCaptureInterfaceImpl::VideoCaptureInterfaceImpl(std::string deviceId, std::shared_ptr platformContext, std::shared_ptr threads) : _platformContext(platformContext), @@ -120,6 +178,12 @@ void VideoCaptureInterfaceImpl::switchToDevice(std::string deviceId) { }); } +void VideoCaptureInterfaceImpl::withNativeImplementation(std::function completion) { + _impl.perform(RTC_FROM_HERE, [completion](VideoCaptureInterfaceObject *impl) { + impl->withNativeImplementation(completion); + }); +} + void VideoCaptureInterfaceImpl::setState(VideoState state) { _impl.perform(RTC_FROM_HERE, [state](VideoCaptureInterfaceObject *impl) { impl->setState(state); @@ -131,6 +195,22 @@ void VideoCaptureInterfaceImpl::setPreferredAspectRatio(float aspectRatio) { impl->setPreferredAspectRatio(aspectRatio); }); } +void VideoCaptureInterfaceImpl::setOnFatalError(std::function error) { + _impl.perform(RTC_FROM_HERE, [error](VideoCaptureInterfaceObject *impl) { + impl->setOnFatalError(error); + }); +} +void VideoCaptureInterfaceImpl::setOnPause(std::function pause) { + _impl.perform(RTC_FROM_HERE, [pause](VideoCaptureInterfaceObject *impl) { + impl->setOnPause(pause); + }); +} + +void VideoCaptureInterfaceImpl::setOnIsActiveUpdated(std::function onIsActiveUpdated) { + _impl.perform(RTC_FROM_HERE, [onIsActiveUpdated](VideoCaptureInterfaceObject *impl) { + impl->setOnIsActiveUpdated(onIsActiveUpdated); + }); +} void VideoCaptureInterfaceImpl::setOutput(std::shared_ptr> sink) { _impl.perform(RTC_FROM_HERE, [sink](VideoCaptureInterfaceObject *impl) { diff --git a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h index e3bb2a27b..43b7a5520 100644 --- a/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/VideoCaptureInterfaceImpl.h @@ -18,11 +18,17 @@ public: ~VideoCaptureInterfaceObject(); void switchToDevice(std::string deviceId); + void withNativeImplementation(std::function completion); void setState(VideoState state); void setPreferredAspectRatio(float aspectRatio); void setOutput(std::shared_ptr> sink); void setStateUpdated(std::function stateUpdated); + void setRotationUpdated(std::function rotationUpdated); + void setOnFatalError(std::function error); + void setOnPause(std::function pause); + void setOnIsActiveUpdated(std::function onIsActiveUpdated); webrtc::VideoTrackSourceInterface *source(); + int getRotation(); private: void updateAspectRateAdaptation(); @@ -33,6 +39,10 @@ private: std::pair _videoCapturerResolution; std::unique_ptr _videoCapturer; std::function _stateUpdated; + std::function _onFatalError; + std::function _onPause; + std::function _onIsActiveUpdated; + std::function _rotationUpdated; VideoState _state = VideoState::Active; float _preferredAspectRatio = 0.0f; bool _shouldBeAdaptedToReceiverAspectRate = true; @@ -44,10 +54,14 @@ public: virtual ~VideoCaptureInterfaceImpl(); void switchToDevice(std::string deviceId) override; + void withNativeImplementation(std::function completion) override; void setState(VideoState state) override; void setPreferredAspectRatio(float aspectRatio) override; void setOutput(std::shared_ptr> sink) override; - std::shared_ptr getPlatformContext() override; + void setOnFatalError(std::function error) override; + void setOnPause(std::function pause) override; + void setOnIsActiveUpdated(std::function onIsActiveUpdated) override; + std::shared_ptr getPlatformContext() override; ThreadLocalObject *object(); diff --git a/TMessagesProj/jni/voip/tgcalls/VideoCapturerInterface.h b/TMessagesProj/jni/voip/tgcalls/VideoCapturerInterface.h index 8449a6330..f2cadbde8 100644 --- a/TMessagesProj/jni/voip/tgcalls/VideoCapturerInterface.h +++ b/TMessagesProj/jni/voip/tgcalls/VideoCapturerInterface.h @@ -4,6 +4,7 @@ #include "Instance.h" #include +#include namespace rtc { template @@ -23,6 +24,16 @@ public: virtual void setState(VideoState state) = 0; virtual void setPreferredCaptureAspectRatio(float aspectRatio) = 0; virtual void setUncroppedOutput(std::shared_ptr> sink) = 0; + virtual int getRotation() = 0; + virtual void setOnFatalError(std::function error) { + // TODO: make this function pure virtual when everybody implements it. + } + virtual void setOnPause(std::function pause) { + // TODO: make this function pure virtual when everybody implements it. + } + virtual void withNativeImplementation(std::function completion) { + completion(nullptr); + } }; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp index 9b2e078b8..3d06cfdd3 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp @@ -29,8 +29,8 @@ #include "modules/audio_processing/audio_buffer.h" #include "absl/strings/match.h" #include "modules/audio_processing/agc2/vad_with_level.h" +#include "modules/audio_processing/agc2/cpu_features.h" #include "pc/channel_manager.h" -#include "media/base/rtp_data_engine.h" #include "audio/audio_state.h" #include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -46,10 +46,24 @@ #include "StreamingPart.h" #include "AudioDeviceHelper.h" +#include #include #include #include + +#ifndef USE_RNNOISE +#define USE_RNNOISE 1 +#endif + +#if USE_RNNOISE +#include "rnnoise.h" +#endif + +#include "GroupJoinPayloadInternal.h" + +#include "third-party/json11.hpp" + namespace tgcalls { namespace { @@ -120,9 +134,9 @@ static void addDefaultFeedbackParams(cricket::VideoCodec *codec) { codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli)); } -static absl::optional assignPayloadTypes(std::vector const &formats) { +static std::vector assignPayloadTypes(std::vector const &formats) { if (formats.empty()) { - return absl::nullopt; + return {}; } constexpr int kFirstDynamicPayloadType = 100; @@ -130,37 +144,26 @@ static absl::optional assignPayloadTypes(std::vector result; - bool codecSelected = false; + std::vector filterCodecNames = { + cricket::kVp8CodecName, + cricket::kVp9CodecName + }; - for (const auto &format : formats) { - if (codecSelected) { - break; - } + for (const auto &codecName : filterCodecNames) { + for (const auto &format : formats) { + if (format.name != codecName) { + continue; + } - cricket::VideoCodec codec(format); - codec.id = payload_type; - addDefaultFeedbackParams(&codec); + cricket::VideoCodec codec(format); + codec.id = payload_type; + addDefaultFeedbackParams(&codec); - if (!absl::EqualsIgnoreCase(codec.name, cricket::kVp8CodecName)) { - continue; - } + OutgoingVideoFormat resultFormat; - result.videoCodec = codec; - codecSelected = true; - - // Increment payload type. - ++payload_type; - if (payload_type > kLastDynamicPayloadType) { - RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest."; - break; - } - - // Add associated RTX codec for non-FEC codecs. - if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) && - !absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) { - result.rtxCodec = cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id); + resultFormat.videoCodec = codec; // Increment payload type. ++payload_type; @@ -168,8 +171,24 @@ static absl::optional assignPayloadTypes(std::vector kLastDynamicPayloadType) { + RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest."; + break; + } + } + + result.push_back(std::move(resultFormat)); } } + return result; } @@ -227,48 +246,81 @@ struct ChannelId { } }; +struct VideoChannelId { + std::string endpointId; -class NetworkInterfaceImpl : public cricket::MediaChannel::NetworkInterface { -public: - NetworkInterfaceImpl(std::function sendPacket) : - _sendPacket(sendPacket) { - + explicit VideoChannelId(std::string const &endpointId_) : + endpointId(endpointId_) { } - bool SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) { - rtc::SentPacket sentPacket(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent); - _sendPacket(packet, sentPacket); - return true; + bool operator <(const VideoChannelId& rhs) const { + return endpointId < rhs.endpointId; } +}; - bool SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) { - rtc::SentPacket sentPacket(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent); - _sendPacket(packet, sentPacket); - return true; +struct ChannelSsrcInfo { + enum class Type { + Audio, + Video + }; + + Type type = Type::Audio; + std::vector allSsrcs; + std::string videoEndpointId; +}; + +struct RequestedMediaChannelDescriptions { + std::shared_ptr task; + std::vector ssrcs; + + RequestedMediaChannelDescriptions(std::shared_ptr task_, std::vector ssrcs_) : + task(task_), ssrcs(std::move(ssrcs_)) { } - - int SetOption(cricket::MediaChannel::NetworkInterface::SocketType, rtc::Socket::Option, int) { - return -1; - } - -private: - std::function _sendPacket; }; static const int kVadResultHistoryLength = 6; -class CombinedVad { +class VadHistory { private: - webrtc::VadLevelAnalyzer _vadWithLevel; float _vadResultHistory[kVadResultHistoryLength]; - std::atomic _waitingFramesToProcess{0}; - bool _countFrames; public: - CombinedVad(bool count = false) { + VadHistory() { for (float & i : _vadResultHistory) { i = 0.0f; } + } + + ~VadHistory() = default; + + bool update(float vadProbability) { + if (vadProbability >= 0.0f) { + for (int i = 1; i < kVadResultHistoryLength; i++) { + _vadResultHistory[i - 1] = _vadResultHistory[i]; + } + _vadResultHistory[kVadResultHistoryLength - 1] = vadProbability; + } + + float movingAverage = 0.0f; + for (float i : _vadResultHistory) { + movingAverage += i; + } + movingAverage /= (float)kVadResultHistoryLength; + + return movingAverage > 0.6f; + } +}; + +class CombinedVad { +private: + std::unique_ptr _vadWithLevel; + VadHistory _history; + bool _countFrames; + std::atomic _waitingFramesToProcess{0}; + +public: + CombinedVad(bool count = false){ + _vadWithLevel = std::make_unique(500, webrtc::GetAvailableCpuFeatures()); _countFrames = count; } @@ -284,28 +336,50 @@ public: bool update(webrtc::AudioBuffer *buffer) { if (buffer) { - webrtc::AudioFrameView frameView(buffer->channels(), buffer->num_channels(), buffer->num_frames()); - auto result = _vadWithLevel.AnalyzeFrame(frameView); - float speech_probability = result.speech_probability; - for (int i = 1; i < kVadResultHistoryLength; i++) { - _vadResultHistory[i - 1] = _vadResultHistory[i]; - } - _vadResultHistory[kVadResultHistoryLength - 1] = speech_probability; if (_countFrames) { _waitingFramesToProcess--; } + if (buffer->num_channels() <= 0) { + return _history.update(0.0f); + } + webrtc::AudioFrameView frameView(buffer->channels(), buffer->num_channels(), buffer->num_frames()); + float peak = 0.0f; + for (const auto &x : frameView.channel(0)) { + peak = std::max(std::fabs(x), peak); + } + if (peak <= 0.01f) { + return _history.update(false); + } + auto result = _vadWithLevel->AnalyzeFrame(frameView); + return _history.update(result.speech_probability); } - - float movingAverage = 0.0f; - for (float i : _vadResultHistory) { - movingAverage += i; - } - movingAverage /= (float)kVadResultHistoryLength; - - return movingAverage > 0.6f; + return _history.update(-1); } }; +class SparseVad { +public: + SparseVad() { + } + + bool update(webrtc::AudioBuffer *buffer) { + _sampleCount += buffer->num_frames(); + if (_sampleCount < 400) { + return _currentValue; + } + _sampleCount = 0; + + _currentValue = _vad.update(buffer); + + return _currentValue; + } + +private: + CombinedVad _vad; + bool _currentValue = false; + size_t _sampleCount = 0; +}; + class AudioSinkImpl: public webrtc::AudioSinkInterface { public: struct Update { @@ -344,6 +418,7 @@ public: const int16_t *samples = (const int16_t *)audio.data; int numberOfSamplesInFrame = (int)audio.samples_per_channel; + int16_t currentPeak = 0; for (int i = 0; i < numberOfSamplesInFrame; i++) { int16_t sample = samples[i]; if (sample < 0) { @@ -352,11 +427,14 @@ public: if (_peak < sample) { _peak = sample; } + if (currentPeak < sample) { + currentPeak = sample; + } _peakCount += 1; } - if (_peakCount >= 1200) { - float level = ((float)(_peak)) / 4000.0f; + if (_peakCount >= 4400) { + float level = ((float)(_peak)) / 8000.0f; _peak = 0; _peakCount = 0; @@ -393,7 +471,8 @@ public: } virtual void OnFrame(const webrtc::VideoFrame& frame) override { - _lastFrame = frame; + std::unique_lock lock{ _mutex }; + //_lastFrame = frame; for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) { auto strong = _sinks[i].lock(); if (!strong) { @@ -405,6 +484,7 @@ public: } virtual void OnDiscardedFrame() override { + std::unique_lock lock{ _mutex }; for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) { auto strong = _sinks[i].lock(); if (!strong) { @@ -416,6 +496,7 @@ public: } void addSink(std::weak_ptr> impl) { + std::unique_lock lock{ _mutex }; _sinks.push_back(impl); if (_lastFrame) { auto strong = impl.lock(); @@ -428,72 +509,152 @@ public: private: std::vector>> _sinks; absl::optional _lastFrame; + std::mutex _mutex; + }; -class AudioCaptureAnalyzer : public webrtc::CustomAudioAnalyzer { -private: - void Initialize(int sample_rate_hz, int num_channels) override { +struct NoiseSuppressionConfiguration { + NoiseSuppressionConfiguration(bool isEnabled_) : + isEnabled(isEnabled_) { } - void Analyze(const webrtc::AudioBuffer* buffer) override { + bool isEnabled = false; +}; + +#if USE_RNNOISE +class AudioCapturePostProcessor : public webrtc::CustomProcessing { +public: + AudioCapturePostProcessor(std::function updated, std::shared_ptr noiseSuppressionConfiguration) : + _updated(updated), + _noiseSuppressionConfiguration(noiseSuppressionConfiguration) { + int frameSize = rnnoise_get_frame_size(); + _frameSamples.resize(frameSize); + + _denoiseState = rnnoise_create(nullptr); + } + + virtual ~AudioCapturePostProcessor() { + if (_denoiseState) { + rnnoise_destroy(_denoiseState); + } + } + +private: + virtual void Initialize(int sample_rate_hz, int num_channels) override { + } + + virtual void Process(webrtc::AudioBuffer *buffer) override { if (!buffer) { return; } if (buffer->num_channels() != 1) { return; } - - float peak = 0; - int peakCount = 0; - const float *samples = buffer->channels_const()[0]; - for (int i = 0; i < buffer->num_frames(); i++) { - float sample = samples[i]; - if (sample < 0) { - sample = -sample; - } - if (peak < sample) { - peak = sample; - } - peakCount += 1; + if (!_denoiseState) { + return; + } + if (buffer->num_frames() != _frameSamples.size()) { + return; } - bool vadStatus = _vad.update((webrtc::AudioBuffer *)buffer); - - _peakCount += peakCount; - if (_peak < peak) { - _peak = peak; + float sourcePeak = 0.0f; + float *sourceSamples = buffer->channels()[0]; + for (int i = 0; i < _frameSamples.size(); i++) { + sourcePeak = std::max(std::fabs(sourceSamples[i]), sourcePeak); } - if (_peakCount >= 1200) { - float level = _peak / 4000.0f; - _peak = 0; - _peakCount = 0; - _updated(GroupLevelValue{ - level, - vadStatus, - }); + if (_noiseSuppressionConfiguration->isEnabled) { + float vadProbability = 0.0f; + if (sourcePeak >= 0.01f) { + vadProbability = rnnoise_process_frame(_denoiseState, _frameSamples.data(), buffer->channels()[0]); + if (_noiseSuppressionConfiguration->isEnabled) { + memcpy(buffer->channels()[0], _frameSamples.data(), _frameSamples.size() * sizeof(float)); + } + } + + float peak = 0; + int peakCount = 0; + const float *samples = buffer->channels_const()[0]; + for (int i = 0; i < buffer->num_frames(); i++) { + float sample = samples[i]; + if (sample < 0) { + sample = -sample; + } + if (peak < sample) { + peak = sample; + } + peakCount += 1; + } + + bool vadStatus = _history.update(vadProbability); + + _peakCount += peakCount; + if (_peak < peak) { + _peak = peak; + } + if (_peakCount >= 4400) { + float level = _peak / 4000.0f; + _peak = 0; + _peakCount = 0; + + _updated(GroupLevelValue{ + level, + vadStatus, + }); + } + } else { + float peak = 0; + int peakCount = 0; + const float *samples = buffer->channels_const()[0]; + for (int i = 0; i < buffer->num_frames(); i++) { + float sample = samples[i]; + if (sample < 0) { + sample = -sample; + } + if (peak < sample) { + peak = sample; + } + peakCount += 1; + } + + _peakCount += peakCount; + if (_peak < peak) { + _peak = peak; + } + if (_peakCount >= 1200) { + float level = _peak / 8000.0f; + _peak = 0; + _peakCount = 0; + + _updated(GroupLevelValue{ + level, + level >= 1.0f, + }); + } } } - std::string ToString() const override { - return "analyzing"; + virtual std::string ToString() const override { + return "CustomPostProcessing"; + } + + virtual void SetRuntimeSetting(webrtc::AudioProcessing::RuntimeSetting setting) override { } private: std::function _updated; + std::shared_ptr _noiseSuppressionConfiguration; - CombinedVad _vad; + DenoiseState *_denoiseState = nullptr; + std::vector _frameSamples; int32_t _peakCount = 0; float _peak = 0; - -public: - AudioCaptureAnalyzer(std::function updated) : - _updated(updated) { - } - - virtual ~AudioCaptureAnalyzer() = default; + VadHistory _history; + SparseVad _vad; }; +#endif + class IncomingAudioChannel : public sigslot::has_slots<> { public: IncomingAudioChannel( @@ -505,86 +666,83 @@ public: ChannelId ssrc, std::function &&onAudioLevelUpdated, std::function onAudioFrame, - Threads &threads) : + std::shared_ptr threads) : + _threads(threads), _ssrc(ssrc), _channelManager(channelManager), _call(call) { _creationTimestamp = rtc::TimeMillis(); - cricket::AudioOptions audioOptions; - audioOptions.echo_cancellation = true; - audioOptions.noise_suppression = true; - audioOptions.audio_jitter_buffer_fast_accelerate = true; - audioOptions.audio_jitter_buffer_min_delay_ms = 50; + threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, rtpTransport, ssrc, onAudioFrame = std::move(onAudioFrame), onAudioLevelUpdated = std::move(onAudioLevelUpdated), randomIdGenerator, isRawPcm]() mutable { + cricket::AudioOptions audioOptions; + audioOptions.audio_jitter_buffer_fast_accelerate = true; + audioOptions.audio_jitter_buffer_min_delay_ms = 50; - std::string streamId = std::string("stream") + ssrc.name(); + std::string streamId = std::string("stream") + ssrc.name(); - _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads.getMediaThread(), std::string("audio") + uint32ToString(ssrc.networkSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); + _audioChannel = _channelManager->CreateVoiceChannel(_call, cricket::MediaConfig(), rtpTransport, _threads->getWorkerThread(), std::string("audio") + uint32ToString(ssrc.networkSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); - const uint8_t opusMinBitrateKbps = 32; - const uint8_t opusMaxBitrateKbps = 32; - const uint8_t opusStartBitrateKbps = 32; - const uint8_t opusPTimeMs = 120; + const uint8_t opusPTimeMs = 120; - cricket::AudioCodec opusCodec(111, "opus", 48000, 0, 2); - opusCodec.SetParam(cricket::kCodecParamMinBitrate, opusMinBitrateKbps); - opusCodec.SetParam(cricket::kCodecParamStartBitrate, opusStartBitrateKbps); - opusCodec.SetParam(cricket::kCodecParamMaxBitrate, opusMaxBitrateKbps); - opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); - opusCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs); + cricket::AudioCodec opusCodec(111, "opus", 48000, 0, 2); + opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); + opusCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs); - cricket::AudioCodec pcmCodec(112, "l16", 48000, 0, 1); + cricket::AudioCodec pcmCodec(112, "l16", 48000, 0, 1); - auto outgoingAudioDescription = std::make_unique(); - if (!isRawPcm) { - outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 1)); - outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); - outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); - } - outgoingAudioDescription->set_rtcp_mux(true); - outgoingAudioDescription->set_rtcp_reduced_size(true); - outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); - outgoingAudioDescription->set_codecs({ opusCodec, pcmCodec }); + auto outgoingAudioDescription = std::make_unique(); + if (!isRawPcm) { + outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 1)); + outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); + outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); + } + outgoingAudioDescription->set_rtcp_mux(true); + outgoingAudioDescription->set_rtcp_reduced_size(true); + outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + outgoingAudioDescription->set_codecs({ opusCodec, pcmCodec }); + outgoingAudioDescription->set_bandwidth(1300000); - auto incomingAudioDescription = std::make_unique(); - if (!isRawPcm) { - incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 1)); - incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); - incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); - } - incomingAudioDescription->set_rtcp_mux(true); - incomingAudioDescription->set_rtcp_reduced_size(true); - incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); - incomingAudioDescription->set_codecs({ opusCodec, pcmCodec }); - cricket::StreamParams streamParams = cricket::StreamParams::CreateLegacy(ssrc.networkSsrc); - streamParams.set_stream_ids({ streamId }); - incomingAudioDescription->AddStream(streamParams); + auto incomingAudioDescription = std::make_unique(); + if (!isRawPcm) { + incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 1)); + incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); + incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); + } + incomingAudioDescription->set_rtcp_mux(true); + incomingAudioDescription->set_rtcp_reduced_size(true); + incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + incomingAudioDescription->set_codecs({ opusCodec, pcmCodec }); + incomingAudioDescription->set_bandwidth(1300000); + cricket::StreamParams streamParams = cricket::StreamParams::CreateLegacy(ssrc.networkSsrc); + streamParams.set_stream_ids({ streamId }); + incomingAudioDescription->AddStream(streamParams); - _audioChannel->SetPayloadTypeDemuxingEnabled(false); - _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); + _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _audioChannel->SetPayloadTypeDemuxingEnabled(false); - outgoingAudioDescription.reset(); - incomingAudioDescription.reset(); + outgoingAudioDescription.reset(); + incomingAudioDescription.reset(); - std::unique_ptr audioLevelSink(new AudioSinkImpl(onAudioLevelUpdated, _ssrc, std::move(onAudioFrame))); - _audioChannel->media_channel()->SetRawAudioSink(ssrc.networkSsrc, std::move(audioLevelSink)); + std::unique_ptr audioLevelSink(new AudioSinkImpl(std::move(onAudioLevelUpdated), _ssrc, std::move(onAudioFrame))); - _audioChannel->SignalSentPacket().connect(this, &IncomingAudioChannel::OnSentPacket_w); - //_audioChannel->UpdateRtpTransport(nullptr); + _audioChannel->media_channel()->SetRawAudioSink(ssrc.networkSsrc, std::move(audioLevelSink)); - _audioChannel->Enable(true); + _audioChannel->Enable(true); + }); } ~IncomingAudioChannel() { - _audioChannel->SignalSentPacket().disconnect(this); - _audioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_audioChannel); - _audioChannel = nullptr; + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _channelManager->DestroyVoiceChannel(_audioChannel); + _audioChannel = nullptr; + }); } void setVolume(double value) { - _audioChannel->media_channel()->SetOutputVolume(_ssrc.networkSsrc, value); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, value]() { + _audioChannel->media_channel()->SetOutputVolume(_ssrc.networkSsrc, value); + }); } void updateActivity() { @@ -596,11 +754,7 @@ public: } private: - void OnSentPacket_w(const rtc::SentPacket& sent_packet) { - _call->OnSentPacket(sent_packet); - } - -private: + std::shared_ptr _threads; ChannelId _ssrc; // Memory is managed by _channelManager cricket::VoiceChannel *_audioChannel = nullptr; @@ -619,102 +773,133 @@ public: webrtc::RtpTransport *rtpTransport, rtc::UniqueRandomIdGenerator *randomIdGenerator, std::vector const &availableVideoFormats, - GroupParticipantDescription const &description, - Threads &threads) : + GroupJoinVideoInformation sharedVideoInformation, + uint32_t audioSsrc, + VideoChannelDescription::Quality minQuality, + VideoChannelDescription::Quality maxQuality, + GroupParticipantVideoInformation const &description, + std::shared_ptr threads) : + _threads(threads), + _endpointId(description.endpointId), _channelManager(channelManager), - _call(call) { + _call(call), + _requestedMinQuality(minQuality), + _requestedMaxQuality(maxQuality) { _videoSink.reset(new VideoSinkImpl()); - std::string streamId = std::string("stream") + uint32ToString(description.audioSsrc); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, rtpTransport, &availableVideoFormats, &description, randomIdGenerator]() mutable { + uint32_t mid = randomIdGenerator->GenerateId(); + std::string streamId = std::string("video") + uint32ToString(mid); - _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); + _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); - _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads.getMediaThread(), std::string("video") + uint32ToString(description.audioSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + auto payloadTypes = assignPayloadTypes(availableVideoFormats); + std::vector codecs; + for (const auto &payloadType : payloadTypes) { + codecs.push_back(payloadType.videoCodec); + codecs.push_back(payloadType.rtxCodec); + } - auto payloadTypes = assignPayloadTypes(availableVideoFormats); - if (!payloadTypes.has_value()) { - return; - } + auto outgoingVideoDescription = std::make_unique(); + outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); + outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); + outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri, 13)); + outgoingVideoDescription->set_rtcp_mux(true); + outgoingVideoDescription->set_rtcp_reduced_size(true); + outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + outgoingVideoDescription->set_codecs(codecs); + outgoingVideoDescription->set_bandwidth(1300000); - auto outgoingVideoDescription = std::make_unique(); - outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); - outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); - outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri, 13)); - outgoingVideoDescription->set_rtcp_mux(true); - outgoingVideoDescription->set_rtcp_reduced_size(true); - outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); - outgoingVideoDescription->set_codecs({ payloadTypes->videoCodec, payloadTypes->rtxCodec }); + cricket::StreamParams videoRecvStreamParams; - cricket::StreamParams videoRecvStreamParams; + std::vector allSsrcs; + for (const auto &group : description.ssrcGroups) { + for (auto ssrc : group.ssrcs) { + if (std::find(allSsrcs.begin(), allSsrcs.end(), ssrc) == allSsrcs.end()) { + allSsrcs.push_back(ssrc); + } + } - std::vector allSsrcs; - for (const auto &group : description.videoSourceGroups) { - for (auto ssrc : group.ssrcs) { - if (std::find(allSsrcs.begin(), allSsrcs.end(), ssrc) == allSsrcs.end()) { - allSsrcs.push_back(ssrc); + if (group.semantics == "SIM") { + if (_mainVideoSsrc == 0) { + _mainVideoSsrc = group.ssrcs[0]; + } + } + + cricket::SsrcGroup parsedGroup(group.semantics, group.ssrcs); + videoRecvStreamParams.ssrc_groups.push_back(parsedGroup); + } + videoRecvStreamParams.ssrcs = allSsrcs; + + if (_mainVideoSsrc == 0) { + if (description.ssrcGroups.size() == 1) { + _mainVideoSsrc = description.ssrcGroups[0].ssrcs[0]; } } - if (group.semantics == "SIM") { - if (_mainVideoSsrc == 0) { - _mainVideoSsrc = group.ssrcs[0]; - } - } + videoRecvStreamParams.cname = "cname"; + videoRecvStreamParams.set_stream_ids({ streamId }); - cricket::SsrcGroup parsedGroup(group.semantics, group.ssrcs); - videoRecvStreamParams.ssrc_groups.push_back(parsedGroup); - } - videoRecvStreamParams.ssrcs = allSsrcs; + auto incomingVideoDescription = std::make_unique(); + incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); + incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); + incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri, 13)); + incomingVideoDescription->set_rtcp_mux(true); + incomingVideoDescription->set_rtcp_reduced_size(true); + incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + incomingVideoDescription->set_codecs(codecs); + incomingVideoDescription->set_bandwidth(1300000); - if (_mainVideoSsrc == 0) { - if (description.videoSourceGroups.size() == 1) { - _mainVideoSsrc = description.videoSourceGroups[0].ssrcs[0]; - } - } + incomingVideoDescription->AddStream(videoRecvStreamParams); - videoRecvStreamParams.cname = "cname"; - videoRecvStreamParams.set_stream_ids({ streamId }); + _videoChannel = _channelManager->CreateVideoChannel(_call, cricket::MediaConfig(), rtpTransport, _threads->getWorkerThread(), std::string("video") + uint32ToString(mid), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); - auto incomingVideoDescription = std::make_unique(); - incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); - incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); - incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri, 13)); - incomingVideoDescription->set_rtcp_mux(true); - incomingVideoDescription->set_rtcp_reduced_size(true); - incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); - incomingVideoDescription->set_codecs({ payloadTypes->videoCodec, payloadTypes->rtxCodec }); + _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); + _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _videoChannel->SetPayloadTypeDemuxingEnabled(false); + _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); - incomingVideoDescription->AddStream(videoRecvStreamParams); - - _videoChannel->SetPayloadTypeDemuxingEnabled(false); - _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); - - _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); - - _videoChannel->SignalSentPacket().connect(this, &IncomingVideoChannel::OnSentPacket_w); - //_videoChannel->UpdateRtpTransport(nullptr); - - _videoChannel->Enable(true); + _videoChannel->Enable(true); + }); } ~IncomingVideoChannel() { - _videoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_videoChannel); - _videoChannel = nullptr; + //_videoChannel->SignalSentPacket().disconnect(this); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _videoChannel->Enable(false); + _channelManager->DestroyVideoChannel(_videoChannel); + _videoChannel = nullptr; + }); } void addSink(std::weak_ptr> impl) { _videoSink->addSink(impl); } -private: - void OnSentPacket_w(const rtc::SentPacket& sent_packet) { - _call->OnSentPacket(sent_packet); + std::string const &endpointId() { + return _endpointId; + } + + VideoChannelDescription::Quality requestedMinQuality() { + return _requestedMinQuality; + } + + VideoChannelDescription::Quality requestedMaxQuality() { + return _requestedMaxQuality; + } + + void setRequstedMinQuality(VideoChannelDescription::Quality quality) { + _requestedMinQuality = quality; + } + + void setRequstedMaxQuality(VideoChannelDescription::Quality quality) { + _requestedMaxQuality = quality; } private: + std::shared_ptr _threads; uint32_t _mainVideoSsrc = 0; + std::string _endpointId; std::unique_ptr _videoSink; std::vector _ssrcGroups; std::unique_ptr _videoBitrateAllocatorFactory; @@ -723,12 +908,9 @@ private: // Memory is managed externally cricket::ChannelManager *_channelManager = nullptr; webrtc::Call *_call = nullptr; -}; -struct SsrcMappingInfo { - uint32_t ssrc = 0; - bool isVideo = false; - std::string endpointId; + VideoChannelDescription::Quality _requestedMinQuality = VideoChannelDescription::Quality::Thumbnail; + VideoChannelDescription::Quality _requestedMaxQuality = VideoChannelDescription::Quality::Thumbnail; }; class MissingSsrcPacketBuffer { @@ -790,6 +972,13 @@ struct DecodedBroadcastPart { std::vector channels; }; +std::function videoCaptureToGetVideoSource(std::shared_ptr videoCapture) { + return [videoCapture]() { + VideoCaptureInterfaceObject *videoCaptureImpl = GetVideoCaptureAssumingSameThread(videoCapture.get()); + return videoCaptureImpl ? videoCaptureImpl->source() : nullptr; + }; +} + } // namespace class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::enable_shared_from_this { @@ -799,53 +988,61 @@ public: _networkStateUpdated(descriptor.networkStateUpdated), _audioLevelsUpdated(descriptor.audioLevelsUpdated), _onAudioFrame(descriptor.onAudioFrame), - _incomingVideoSourcesUpdated(descriptor.incomingVideoSourcesUpdated), - _participantDescriptionsRequired(descriptor.participantDescriptionsRequired), + _requestMediaChannelDescriptions(descriptor.requestMediaChannelDescriptions), _requestBroadcastPart(descriptor.requestBroadcastPart), _videoCapture(descriptor.videoCapture), + _videoCaptureSink(new VideoSinkImpl()), + _getVideoSource(descriptor.getVideoSource), _disableIncomingChannels(descriptor.disableIncomingChannels), _useDummyChannel(descriptor.useDummyChannel), + _outgoingAudioBitrateKbit(descriptor.outgoingAudioBitrateKbit), + _disableOutgoingAudioProcessing(descriptor.disableOutgoingAudioProcessing), + _minOutgoingVideoBitrateKbit(descriptor.minOutgoingVideoBitrateKbit), + _videoContentType(descriptor.videoContentType), + _videoCodecPreferences(std::move(descriptor.videoCodecPreferences)), _eventLog(std::make_unique()), _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), - _createAudioDeviceModule(descriptor.createAudioDeviceModule), + _createAudioDeviceModule(descriptor.createAudioDeviceModule), _initialInputDeviceId(std::move(descriptor.initialInputDeviceId)), _initialOutputDeviceId(std::move(descriptor.initialOutputDeviceId)), - _missingPacketBuffer(100), + _missingPacketBuffer(50), _platformContext(descriptor.platformContext) { assert(_threads->getMediaThread()->IsCurrent()); - auto generator = std::mt19937(std::random_device()()); - auto distribution = std::uniform_int_distribution(); - do { - _outgoingAudioSsrc = distribution(generator) & 0x7fffffffU; - } while (!_outgoingAudioSsrc); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this] { + _workerThreadSafery = webrtc::PendingTaskSafetyFlag::Create(); + }); - uint32_t outgoingVideoSsrcBase = _outgoingAudioSsrc + 1; - int numVideoSimulcastLayers = 2; - for (int layerIndex = 0; layerIndex < numVideoSimulcastLayers; layerIndex++) { - _outgoingVideoSsrcs.simulcastLayers.push_back(VideoSsrcs::SimulcastLayer(outgoingVideoSsrcBase + layerIndex * 2 + 0, outgoingVideoSsrcBase + layerIndex * 2 + 1)); + if (_videoCapture) { + assert(!_getVideoSource); + _getVideoSource = videoCaptureToGetVideoSource(std::move(descriptor.videoCapture)); } + generateSsrcs(); + + _noiseSuppressionConfiguration = std::make_shared(descriptor.initialEnableNoiseSuppression); } ~GroupInstanceCustomInternal() { - _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown); - _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown); - _incomingAudioChannels.clear(); _incomingVideoChannels.clear(); + _serverBandwidthProbingVideoSsrc.reset(); destroyOutgoingAudioChannel(); + destroyOutgoingVideoChannel(); - if (_outgoingVideoChannel) { - _outgoingVideoChannel->SignalSentPacket().disconnect(this); - _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, nullptr); - _outgoingVideoChannel->Enable(false); - _channelManager->DestroyVideoChannel(_outgoingVideoChannel); - _outgoingVideoChannel = nullptr; - } + _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { + _rtpTransport->SignalSentPacket.disconnect(this); + _rtpTransport->SignalRtcpPacketReceived.disconnect(this); + }); - _channelManager = nullptr; - _audioDeviceModule = nullptr; + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _channelManager = nullptr; + if (_audioDeviceModule) { + _audioDeviceModule->Stop(); + _audioDeviceModule = nullptr; + } + _call.reset(); + }); } void start() { @@ -854,7 +1051,11 @@ public: webrtc::field_trial::InitFieldTrialsFromString( "WebRTC-Audio-Allocation/min:32kbps,max:32kbps/" "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/" -// "WebRTC-TaskQueuePacer/Enabled/" + "WebRTC-TaskQueuePacer/Enabled/" + "WebRTC-VP8ConferenceTemporalLayers/1/" + "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/" + //"WebRTC-MutedStateKillSwitch/Enabled/" + //"WebRTC-VP8IosMaxNumberOfThread/max_thread:1/" ); _networkManager.reset(new ThreadLocalObject(_threads->getNetworkThread(), [weak, threads = _threads] () mutable { @@ -869,19 +1070,15 @@ public: }); }, [=](rtc::CopyOnWriteBuffer const &message, bool isUnresolved) { + if (!isUnresolved) { + return; + } threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message, isUnresolved]() mutable { if (const auto strong = weak.lock()) { strong->receivePacket(message, isUnresolved); } }); }, - [=](rtc::CopyOnWriteBuffer const &message, int64_t timestamp) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message, timestamp]() mutable { - if (const auto strong = weak.lock()) { - strong->receiveRtcpPacket(message, timestamp); - } - }); - }, [=](bool isDataChannelOpen) { threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isDataChannelOpen]() mutable { if (const auto strong = weak.lock()) { @@ -890,96 +1087,280 @@ public: }); }, [=](std::string const &message) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message]() mutable { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message]() { if (const auto strong = weak.lock()) { + strong->receiveDataChannelMessage(message); } }); }, threads); })); - PlatformInterface::SharedInstance()->configurePlatformAudio(); + if (_videoContentType != VideoContentType::Screencast) { + PlatformInterface::SharedInstance()->configurePlatformAudio(); + } - cricket::MediaEngineDependencies mediaDeps; - mediaDeps.task_queue_factory = _taskQueueFactory.get(); - mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); - mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); - - if (_audioLevelsUpdated) { - auto analyzer = new AudioCaptureAnalyzer([weak, threads = _threads](GroupLevelValue const &level) { - threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, level, threads]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->_myAudioLevel = level; - threads->getMediaThread()->Invoke(RTC_FROM_HERE, [strong] {}); - }); +#if USE_RNNOISE + auto processor = std::make_unique([weak, threads = _threads](GroupLevelValue const &level) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, level](){ + auto strong = weak.lock(); + if (!strong) { + return; + } + strong->_myAudioLevel = level; }); + }, _noiseSuppressionConfiguration); +#endif - webrtc::AudioProcessingBuilder builder; - builder.SetCaptureAnalyzer(std::unique_ptr(analyzer)); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this +#if USE_RNNOISE + , processor = std::move(processor) +#endif + ]() mutable { + cricket::MediaEngineDependencies mediaDeps; + mediaDeps.task_queue_factory = _taskQueueFactory.get(); + mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); + mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - mediaDeps.audio_processing = builder.Create(); - } + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); - _audioDeviceModule = createAudioDeviceModule(); - if (!_audioDeviceModule) { - return; - } - mediaDeps.adm = _audioDeviceModule; + #if USE_RNNOISE + if (_audioLevelsUpdated) { + webrtc::AudioProcessingBuilder builder; + builder.SetCapturePostProcessing(std::move(processor)); - _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); + mediaDeps.audio_processing = builder.Create(); + } + #endif - std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); + _audioDeviceModule = createAudioDeviceModule(); + if (!_audioDeviceModule) { + return; + } + mediaDeps.adm = _audioDeviceModule; - _channelManager.reset(new cricket::ChannelManager(std::move(mediaEngine), std::make_unique(), _threads->getMediaThread(), _threads->getNetworkThread())); - _channelManager->Init(); + _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); + + std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); + + _channelManager = cricket::ChannelManager::Create( + std::move(mediaEngine), + true, + _threads->getWorkerThread(), + _threads->getNetworkThread() + ); + }); setAudioInputDevice(_initialInputDeviceId); setAudioOutputDevice(_initialOutputDeviceId); - webrtc::Call::Config callConfig(_eventLog.get()); - callConfig.task_queue_factory = _taskQueueFactory.get(); - callConfig.trials = &_fieldTrials; - callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); - //_call.reset(webrtc::Call::Create(callConfig, _threads->getSharedModuleThread())); - _call.reset(webrtc::Call::Create(callConfig)); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + webrtc::Call::Config callConfig(_eventLog.get(), _threads->getNetworkThread()); + callConfig.task_queue_factory = _taskQueueFactory.get(); + callConfig.trials = &_fieldTrials; + callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); + _call.reset(webrtc::Call::Create(callConfig, _threads->getSharedModuleThread())); + }); _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator()); _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { _rtpTransport = _networkManager->getSyncAssumingSameThread()->getRtpTransport(); + _rtpTransport->SignalSentPacket.connect(this, &GroupInstanceCustomInternal::OnSentPacket_w); + _rtpTransport->SignalRtcpPacketReceived.connect(this, &GroupInstanceCustomInternal::OnRtcpPacketReceived_n); }); _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); - //_outgoingVideoChannel = _channelManager->CreateVideoChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getMediaThread(), "1", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); - - configureSendVideo(); - - if (_outgoingVideoChannel) { - _outgoingVideoChannel->SignalSentPacket().connect(this, &GroupInstanceCustomInternal::OnSentPacket_w); - //_outgoingVideoChannel->UpdateRtpTransport(nullptr); - } + configureVideoParams(); + createOutgoingVideoChannel(); if (_audioLevelsUpdated) { - beginLevelsTimer(50); + beginLevelsTimer(100); } - if (_videoCapture) { - setVideoCapture(_videoCapture, [](GroupJoinPayload) {}, true); + if (_getVideoSource) { + setVideoSource(_getVideoSource, true); } + if (_useDummyChannel && _videoContentType != VideoContentType::Screencast) { + addIncomingAudioChannel(ChannelId(1), true); + } + + /*if (_videoContentType != VideoContentType::Screencast) { + createOutgoingAudioChannel(); + }*/ + + beginNetworkStatusTimer(0); + //beginAudioChannelCleanupTimer(0); + adjustBitratePreferences(true); - if (_useDummyChannel) { - addIncomingAudioChannel("_dummy", ChannelId(1), true); + beginRemoteConstraintsUpdateTimer(5000); + } + + void destroyOutgoingVideoChannel() { + if (!_outgoingVideoChannel) { + return; + } + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _outgoingVideoChannel->Enable(false); + _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, nullptr); + _channelManager->DestroyVideoChannel(_outgoingVideoChannel); + }); + _outgoingVideoChannel = nullptr; + } + + void createOutgoingVideoChannel() { + if (_outgoingVideoChannel + || _videoContentType == VideoContentType::None) { + return; + } + configureVideoParams(); + + if (!_selectedPayloadType) { + RTC_LOG(LS_ERROR) << "Could not select payload type."; + return; } - beginNetworkStatusTimer(0); + cricket::VideoOptions videoOptions; + if (_videoContentType == VideoContentType::Screencast) { + videoOptions.is_screencast = true; + } + _outgoingVideoChannel = _channelManager->CreateVideoChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "1", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), videoOptions, _videoBitrateAllocatorFactory.get()); + + if (!_outgoingVideoChannel) { + RTC_LOG(LS_ERROR) << "Could not create outgoing video channel."; + return; + } + + _videoSourceGroups.clear(); + cricket::StreamParams videoSendStreamParams; + + std::vector simulcastGroupSsrcs; + std::vector fidGroups; + for (const auto &layer : _outgoingVideoSsrcs.simulcastLayers) { + simulcastGroupSsrcs.push_back(layer.ssrc); + + videoSendStreamParams.ssrcs.push_back(layer.ssrc); + videoSendStreamParams.ssrcs.push_back(layer.fidSsrc); + + cricket::SsrcGroup fidGroup(cricket::kFidSsrcGroupSemantics, { layer.ssrc, layer.fidSsrc }); + fidGroups.push_back(fidGroup); + } + if (simulcastGroupSsrcs.size() > 1) { + cricket::SsrcGroup simulcastGroup(cricket::kSimSsrcGroupSemantics, simulcastGroupSsrcs); + videoSendStreamParams.ssrc_groups.push_back(simulcastGroup); + + GroupJoinPayloadVideoSourceGroup payloadSimulcastGroup; + payloadSimulcastGroup.semantics = "SIM"; + payloadSimulcastGroup.ssrcs = simulcastGroupSsrcs; + _videoSourceGroups.push_back(payloadSimulcastGroup); + } + + for (auto fidGroup : fidGroups) { + videoSendStreamParams.ssrc_groups.push_back(fidGroup); + + GroupJoinPayloadVideoSourceGroup payloadFidGroup; + payloadFidGroup.semantics = "FID"; + payloadFidGroup.ssrcs = fidGroup.ssrcs; + _videoSourceGroups.push_back(payloadFidGroup); + } + + videoSendStreamParams.cname = "cname"; + + auto outgoingVideoDescription = std::make_shared(); + for (const auto &extension : _videoExtensionMap) { + outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(extension.second, extension.first)); + } + outgoingVideoDescription->set_rtcp_mux(true); + outgoingVideoDescription->set_rtcp_reduced_size(true); + outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + outgoingVideoDescription->set_codecs({ _selectedPayloadType->videoCodec, _selectedPayloadType->rtxCodec }); + outgoingVideoDescription->set_bandwidth(1300000); + outgoingVideoDescription->AddStream(videoSendStreamParams); + + auto incomingVideoDescription = std::make_shared(); + for (const auto &extension : _videoExtensionMap) { + incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(extension.second, extension.first)); + } + incomingVideoDescription->set_rtcp_mux(true); + incomingVideoDescription->set_rtcp_reduced_size(true); + incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + incomingVideoDescription->set_codecs({ _selectedPayloadType->videoCodec, _selectedPayloadType->rtxCodec }); + incomingVideoDescription->set_bandwidth(1300000); + + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, incomingVideoDescription, outgoingVideoDescription]() { + _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); + _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); + }); + + adjustVideoSendParams(); + updateVideoSend(); + } + + void adjustVideoSendParams() { + if (!_outgoingVideoChannel) { + return; + } + + if (_videoContentType == VideoContentType::Screencast) { + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc); + if (rtpParameters.encodings.size() == 3) { + for (int i = 0; i < (int)rtpParameters.encodings.size(); i++) { + if (i == 0) { + rtpParameters.encodings[i].min_bitrate_bps = 50000; + rtpParameters.encodings[i].max_bitrate_bps = 100000; + rtpParameters.encodings[i].scale_resolution_down_by = 4.0; + rtpParameters.encodings[i].active = _outgoingVideoConstraint >= 180; + } else if (i == 1) { + rtpParameters.encodings[i].max_bitrate_bps = 150000; + rtpParameters.encodings[i].max_bitrate_bps = 200000; + rtpParameters.encodings[i].scale_resolution_down_by = 2.0; + rtpParameters.encodings[i].active = _outgoingVideoConstraint >= 360; + } else if (i == 2) { + rtpParameters.encodings[i].min_bitrate_bps = 300000; + rtpParameters.encodings[i].max_bitrate_bps = 800000 + 100000; + rtpParameters.encodings[i].active = _outgoingVideoConstraint >= 720; + } + } + } else if (rtpParameters.encodings.size() == 2) { + for (int i = 0; i < (int)rtpParameters.encodings.size(); i++) { + if (i == 0) { + rtpParameters.encodings[i].min_bitrate_bps = 50000; + rtpParameters.encodings[i].max_bitrate_bps = 100000; + rtpParameters.encodings[i].scale_resolution_down_by = 4.0; + } else if (i == 1) { + rtpParameters.encodings[i].min_bitrate_bps = 200000; + rtpParameters.encodings[i].max_bitrate_bps = 900000 + 100000; + } + } + } else { + rtpParameters.encodings[0].max_bitrate_bps = (800000 + 100000) * 2; + } + + _outgoingVideoChannel->media_channel()->SetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, rtpParameters); + }); + } + } + + void updateVideoSend() { + if (!_outgoingVideoChannel) { + return; + } + + webrtc::VideoTrackSourceInterface *videoSource = _getVideoSource ? _getVideoSource() : nullptr; + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, videoSource]() { + if (_getVideoSource) { + _outgoingVideoChannel->Enable(true); + _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, videoSource); + } else { + _outgoingVideoChannel->Enable(false); + _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, nullptr, nullptr); + } + }); } void destroyOutgoingAudioChannel() { @@ -987,31 +1368,45 @@ public: return; } - _outgoingAudioChannel->SignalSentPacket().disconnect(this); - _outgoingAudioChannel->media_channel()->SetAudioSend(_outgoingAudioSsrc, false, nullptr, &_audioSource); - _outgoingAudioChannel->Enable(false); - _channelManager->DestroyVoiceChannel(_outgoingAudioChannel); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _outgoingAudioChannel->media_channel()->SetAudioSend(_outgoingAudioSsrc, false, nullptr, &_audioSource); + _outgoingAudioChannel->Enable(false); + _channelManager->DestroyVoiceChannel(_outgoingAudioChannel); + }); _outgoingAudioChannel = nullptr; } void createOutgoingAudioChannel() { - if (_outgoingAudioChannel) { + if (_outgoingAudioChannel + || _videoContentType == VideoContentType::Screencast) { return; } cricket::AudioOptions audioOptions; - audioOptions.echo_cancellation = true; - audioOptions.noise_suppression = true; - audioOptions.audio_jitter_buffer_fast_accelerate = true; + if (_disableOutgoingAudioProcessing) { + audioOptions.echo_cancellation = false; + audioOptions.noise_suppression = false; + audioOptions.auto_gain_control = false; + audioOptions.highpass_filter = false; + audioOptions.typing_detection = false; + audioOptions.experimental_agc = false; + audioOptions.experimental_ns = false; + audioOptions.residual_echo_detector = false; + } else { + audioOptions.echo_cancellation = true; + audioOptions.noise_suppression = true; + audioOptions.experimental_ns = true; + audioOptions.residual_echo_detector = true; + } std::vector streamIds; streamIds.push_back("1"); - _outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getMediaThread(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), audioOptions); + _outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), audioOptions); - const uint8_t opusMinBitrateKbps = 32; - const uint8_t opusMaxBitrateKbps = 32; - const uint8_t opusStartBitrateKbps = 32; + const uint8_t opusMinBitrateKbps = _outgoingAudioBitrateKbit; + const uint8_t opusMaxBitrateKbps = _outgoingAudioBitrateKbit; + const uint8_t opusStartBitrateKbps = _outgoingAudioBitrateKbit; const uint8_t opusPTimeMs = 120; cricket::AudioCodec opusCodec(111, "opus", 48000, 0, 2); @@ -1022,7 +1417,7 @@ public: opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); opusCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs); - auto outgoingAudioDescription = std::make_unique(); + auto outgoingAudioDescription = std::make_shared(); outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 1)); outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); @@ -1030,9 +1425,10 @@ public: outgoingAudioDescription->set_rtcp_reduced_size(true); outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); outgoingAudioDescription->set_codecs({ opusCodec }); + outgoingAudioDescription->set_bandwidth(1300000); outgoingAudioDescription->AddStream(cricket::StreamParams::CreateLegacy(_outgoingAudioSsrc)); - auto incomingAudioDescription = std::make_unique(); + auto incomingAudioDescription = std::make_shared(); incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 1)); incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); @@ -1040,15 +1436,18 @@ public: incomingAudioDescription->set_rtcp_reduced_size(true); incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); incomingAudioDescription->set_codecs({ opusCodec }); + incomingAudioDescription->set_bandwidth(1300000); - _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); - - _outgoingAudioChannel->SignalSentPacket().connect(this, &GroupInstanceCustomInternal::OnSentPacket_w); - //_outgoingAudioChannel->UpdateRtpTransport(nullptr); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, outgoingAudioDescription, incomingAudioDescription]() mutable { + _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); + _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); + _outgoingAudioChannel->Enable(true); + }); onUpdatedIsMuted(); + + adjustBitratePreferences(false); } void stop() { @@ -1076,13 +1475,16 @@ public: effectiveSsrc, it.second, }); + if (it.second.level > 0.001f) { + auto audioChannel = strong->_incomingAudioChannels.find(it.first); + if (audioChannel != strong->_incomingAudioChannels.end()) { + audioChannel->second->updateActivity(); + } + } } } auto myAudioLevel = strong->_myAudioLevel; - if (strong->_isMuted) { - myAudioLevel.level = 0.0f; - myAudioLevel.voice = false; - } + myAudioLevel.isMuted = strong->_isMuted; levelsUpdate.updates.push_back(GroupLevelUpdate{ 0, myAudioLevel }); strong->_audioLevels.clear(); @@ -1090,10 +1492,53 @@ public: strong->_audioLevelsUpdated(levelsUpdate); } - strong->beginLevelsTimer(50); + strong->beginLevelsTimer(100); }, timeoutMs); } + void beginAudioChannelCleanupTimer(int delayMs) { + const auto weak = std::weak_ptr(shared_from_this()); + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + auto timestamp = rtc::TimeMillis(); + + std::vector removeChannels; + for (const auto &it : strong->_incomingAudioChannels) { + if (it.first.networkSsrc == 1) { + continue; + } + auto activity = it.second->getActivity(); + if (activity < timestamp - 1000) { + removeChannels.push_back(it.first); + } + } + + for (const auto &channelId : removeChannels) { + strong->removeIncomingAudioChannel(channelId); + } + + strong->beginAudioChannelCleanupTimer(500); + }, delayMs); + } + + void beginRemoteConstraintsUpdateTimer(int delayMs) { + const auto weak = std::weak_ptr(shared_from_this()); + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->maybeUpdateRemoteVideoConstraints(); + + strong->beginRemoteConstraintsUpdateTimer(5000); + }, delayMs); + } + void beginNetworkStatusTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { @@ -1195,9 +1640,7 @@ public: ChannelId channelSsrc = ChannelId(decodedChannel.ssrc + 1000, decodedChannel.ssrc); if (_incomingAudioChannels.find(channelSsrc) == _incomingAudioChannels.end()) { - std::ostringstream os; - os << "broadcast" << channelSsrc.name(); - addIncomingAudioChannel(os.str(), channelSsrc, true); + addIncomingAudioChannel(channelSsrc, true); } webrtc::RtpPacket packet(nullptr, 12 + decodedChannel.pcmData.size() * 2); @@ -1232,12 +1675,14 @@ public: } auto buffer = packet.Buffer(); - _call->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, buffer, -1); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, buffer]() { + _call->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, buffer, -1); + }); channelsWithActivity.insert(ChannelId(channelSsrc)); } - for (const auto channelId : channelsWithActivity) { + for (auto channelId : channelsWithActivity) { const auto it = _incomingAudioChannels.find(channelId); if (it != _incomingAudioChannels.end()) { it->second->updateActivity(); @@ -1347,165 +1792,137 @@ public: }, timeoutMs); } - void configureSendVideo() { - if (!_outgoingVideoChannel) { + void configureVideoParams() { + if (_selectedPayloadType) { + // Already configured. return; } - auto payloadTypes = assignPayloadTypes(_availableVideoFormats); - if (!payloadTypes.has_value()) { + _availablePayloadTypes = assignPayloadTypes(_availableVideoFormats); + if (_availablePayloadTypes.empty()) { return; } - GroupJoinPayloadVideoPayloadType vp8Payload; - vp8Payload.id = payloadTypes.value().videoCodec.id; - vp8Payload.name = payloadTypes.value().videoCodec.name; - vp8Payload.clockrate = payloadTypes.value().videoCodec.clockrate; - vp8Payload.channels = 0; + for (const auto &payloadType : _availablePayloadTypes) { + GroupJoinPayloadVideoPayloadType payload; + payload.id = payloadType.videoCodec.id; + payload.name = payloadType.videoCodec.name; + payload.clockrate = payloadType.videoCodec.clockrate; + payload.channels = 0; - std::vector vp8FeedbackTypes; + std::vector feedbackTypes; - GroupJoinPayloadVideoPayloadFeedbackType fbGoogRemb; - fbGoogRemb.type = "goog-remb"; - vp8FeedbackTypes.push_back(fbGoogRemb); + GroupJoinPayloadVideoPayloadType::FeedbackType fbGoogRemb; + fbGoogRemb.type = "goog-remb"; + feedbackTypes.push_back(fbGoogRemb); - GroupJoinPayloadVideoPayloadFeedbackType fbTransportCc; - fbTransportCc.type = "transport-cc"; - vp8FeedbackTypes.push_back(fbTransportCc); + GroupJoinPayloadVideoPayloadType::FeedbackType fbTransportCc; + fbTransportCc.type = "transport-cc"; + feedbackTypes.push_back(fbTransportCc); - GroupJoinPayloadVideoPayloadFeedbackType fbCcmFir; - fbCcmFir.type = "ccm"; - fbCcmFir.subtype = "fir"; - vp8FeedbackTypes.push_back(fbCcmFir); + GroupJoinPayloadVideoPayloadType::FeedbackType fbCcmFir; + fbCcmFir.type = "ccm"; + fbCcmFir.subtype = "fir"; + feedbackTypes.push_back(fbCcmFir); - GroupJoinPayloadVideoPayloadFeedbackType fbNack; - fbNack.type = "nack"; - vp8FeedbackTypes.push_back(fbNack); + GroupJoinPayloadVideoPayloadType::FeedbackType fbNack; + fbNack.type = "nack"; + feedbackTypes.push_back(fbNack); - GroupJoinPayloadVideoPayloadFeedbackType fbNackPli; - fbNackPli.type = "nack"; - fbNackPli.subtype = "pli"; - vp8FeedbackTypes.push_back(fbNackPli); + GroupJoinPayloadVideoPayloadType::FeedbackType fbNackPli; + fbNackPli.type = "nack"; + fbNackPli.subtype = "pli"; + feedbackTypes.push_back(fbNackPli); - vp8Payload.feedbackTypes = vp8FeedbackTypes; - vp8Payload.parameters = {}; + payload.feedbackTypes = feedbackTypes; + payload.parameters = {}; - _videoPayloadTypes.push_back(std::move(vp8Payload)); + _videoPayloadTypes.push_back(std::move(payload)); - GroupJoinPayloadVideoPayloadType rtxPayload; - rtxPayload.id = payloadTypes.value().rtxCodec.id; - rtxPayload.name = payloadTypes.value().rtxCodec.name; - rtxPayload.clockrate = payloadTypes.value().rtxCodec.clockrate; - rtxPayload.parameters.push_back(std::make_pair("apt", intToString(payloadTypes.value().videoCodec.id))); - _videoPayloadTypes.push_back(std::move(rtxPayload)); - - auto outgoingVideoDescription = std::make_unique(); - outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); - outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); - outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri, 13)); - - for (const auto &extension : outgoingVideoDescription->rtp_header_extensions()) { - _videoExtensionMap.push_back(std::make_pair(extension.id, extension.uri)); + GroupJoinPayloadVideoPayloadType rtxPayload; + rtxPayload.id = payloadType.rtxCodec.id; + rtxPayload.name = payloadType.rtxCodec.name; + rtxPayload.clockrate = payloadType.rtxCodec.clockrate; + rtxPayload.parameters.push_back(std::make_pair("apt", intToString(payloadType.videoCodec.id))); + _videoPayloadTypes.push_back(std::move(rtxPayload)); } - outgoingVideoDescription->set_rtcp_mux(true); - outgoingVideoDescription->set_rtcp_reduced_size(true); - outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); - outgoingVideoDescription->set_codecs({ payloadTypes->videoCodec, payloadTypes->rtxCodec }); - - cricket::StreamParams videoSendStreamParams; - - std::vector simulcastGroupSsrcs; - std::vector fidGroups; - for (const auto &layer : _outgoingVideoSsrcs.simulcastLayers) { - simulcastGroupSsrcs.push_back(layer.ssrc); - - videoSendStreamParams.ssrcs.push_back(layer.ssrc); - videoSendStreamParams.ssrcs.push_back(layer.fidSsrc); - - cricket::SsrcGroup fidGroup(cricket::kFidSsrcGroupSemantics, { layer.ssrc, layer.fidSsrc }); - fidGroups.push_back(fidGroup); - } - if (simulcastGroupSsrcs.size() > 1) { - cricket::SsrcGroup simulcastGroup(cricket::kSimSsrcGroupSemantics, simulcastGroupSsrcs); - videoSendStreamParams.ssrc_groups.push_back(simulcastGroup); - - GroupJoinPayloadVideoSourceGroup payloadSimulcastGroup; - payloadSimulcastGroup.semantics = "SIM"; - payloadSimulcastGroup.ssrcs = simulcastGroupSsrcs; - _videoSourceGroups.push_back(payloadSimulcastGroup); - } - - for (auto fidGroup : fidGroups) { - videoSendStreamParams.ssrc_groups.push_back(fidGroup); - - GroupJoinPayloadVideoSourceGroup payloadFidGroup; - payloadFidGroup.semantics = "FID"; - payloadFidGroup.ssrcs = fidGroup.ssrcs; - _videoSourceGroups.push_back(payloadFidGroup); - } - - videoSendStreamParams.cname = "cname"; - - outgoingVideoDescription->AddStream(videoSendStreamParams); - - auto incomingVideoDescription = std::make_unique(); - incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri, 2)); - incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, 3)); - incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri, 13)); - incomingVideoDescription->set_rtcp_mux(true); - incomingVideoDescription->set_rtcp_reduced_size(true); - incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); - incomingVideoDescription->set_codecs({ payloadTypes->videoCodec, payloadTypes->rtxCodec }); - - _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); - _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); - _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); - - webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc); - if (rtpParameters.encodings.size() == 3) { - for (int i = 0; i < (int)rtpParameters.encodings.size(); i++) { - if (i == 0) { - rtpParameters.encodings[i].min_bitrate_bps = 50000; - rtpParameters.encodings[i].max_bitrate_bps = 100000; - rtpParameters.encodings[i].scale_resolution_down_by = 4.0; - } else if (i == 1) { - rtpParameters.encodings[i].max_bitrate_bps = 150000; - rtpParameters.encodings[i].max_bitrate_bps = 200000; - rtpParameters.encodings[i].scale_resolution_down_by = 2.0; - } else if (i == 2) { - rtpParameters.encodings[i].min_bitrate_bps = 300000; - rtpParameters.encodings[i].max_bitrate_bps = 800000; - } + std::vector codecPriorities; + for (const auto name : _videoCodecPreferences) { + std::string codecName; + switch (name) { + case VideoCodecName::VP8: { + codecName = cricket::kVp8CodecName; + break; } - } else if (rtpParameters.encodings.size() == 2) { - for (int i = 0; i < (int)rtpParameters.encodings.size(); i++) { - if (i == 0) { - rtpParameters.encodings[i].min_bitrate_bps = 50000; - rtpParameters.encodings[i].max_bitrate_bps = 100000; - rtpParameters.encodings[i].scale_resolution_down_by = 4.0; - } else if (i == 1) { - rtpParameters.encodings[i].min_bitrate_bps = 200000; - rtpParameters.encodings[i].max_bitrate_bps = 800000; - } + case VideoCodecName::VP9: { + codecName = cricket::kVp9CodecName; + break; + } + default: { + break; + } + } + if (codecName.size() != 0) { + codecPriorities.push_back(std::move(codecName)); + } + } + std::vector defaultCodecPriorities = { + cricket::kVp8CodecName, + cricket::kVp9CodecName + }; + for (const auto &name : defaultCodecPriorities) { + if (std::find(codecPriorities.begin(), codecPriorities.end(), name) == codecPriorities.end()) { + codecPriorities.push_back(name); } } - _outgoingVideoChannel->media_channel()->SetRtpSendParameters(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, rtpParameters); + for (const auto &codecName : codecPriorities) { + if (_selectedPayloadType) { + break; + } + for (const auto &payloadType : _availablePayloadTypes) { + if (payloadType.videoCodec.name == codecName) { + _selectedPayloadType = payloadType; + break; + } + } + } + if (!_selectedPayloadType) { + return; + } + + _videoExtensionMap.emplace_back(2, webrtc::RtpExtension::kAbsSendTimeUri); + _videoExtensionMap.emplace_back(3, webrtc::RtpExtension::kTransportSequenceNumberUri); + _videoExtensionMap.emplace_back(13, webrtc::RtpExtension::kVideoRotationUri); } void OnSentPacket_w(const rtc::SentPacket& sent_packet) { _call->OnSentPacket(sent_packet); } + void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *buffer, int64_t packet_time_us) { + rtc::CopyOnWriteBuffer packet = *buffer; + _threads->getWorkerThread()->PostTask(ToQueuedTask(_workerThreadSafery, [this, packet, packet_time_us] { + if (_call) { + _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, packet_time_us); + } + })); + } + void adjustBitratePreferences(bool resetStartBitrate) { webrtc::BitrateConstraints preferences; - if (_videoCapture) { - preferences.min_bitrate_bps = 64000; + webrtc::BitrateSettings settings; + if (_getVideoSource) { + settings.min_bitrate_bps = _minOutgoingVideoBitrateKbit * 1024; if (resetStartBitrate) { - preferences.start_bitrate_bps = (100 + 800 + 32 + 100) * 1000; + preferences.start_bitrate_bps = std::max(preferences.min_bitrate_bps, 400 * 1000); + } + if (_videoContentType == VideoContentType::Screencast) { + preferences.max_bitrate_bps = std::max(preferences.min_bitrate_bps, (1020 + 32) * 1000); + } else { + preferences.max_bitrate_bps = std::max(preferences.min_bitrate_bps, (700 + 32) * 1000); } - preferences.max_bitrate_bps = (100 + 200 + 800 + 32 + 100) * 1000; } else { preferences.min_bitrate_bps = 32000; if (resetStartBitrate) { @@ -1514,7 +1931,14 @@ public: preferences.max_bitrate_bps = 32000; } + settings.min_bitrate_bps = preferences.min_bitrate_bps; + settings.start_bitrate_bps = preferences.start_bitrate_bps; + settings.max_bitrate_bps = preferences.max_bitrate_bps; + _call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { + _call->SetClientBitratePreferences(settings); + }); } void setIsRtcConnected(bool isConnected) { @@ -1571,14 +1995,6 @@ public: if (_effectiveNetworkState.isConnected != effectiveNetworkState.isConnected || _effectiveNetworkState.isTransitioningFromBroadcastToRtc != effectiveNetworkState.isTransitioningFromBroadcastToRtc) { _effectiveNetworkState = effectiveNetworkState; - if (_effectiveNetworkState.isConnected) { - _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp); - _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp); - } else { - _call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown); - _call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown); - } - if (_networkStateUpdated) { _networkStateUpdated(_effectiveNetworkState); } @@ -1592,12 +2008,12 @@ public: _isDataChannelOpen = isDataChannelOpen; if (_isDataChannelOpen) { - maybeUpdateRemoteVideoConstaints(); + maybeUpdateRemoteVideoConstraints(); } } void receivePacket(rtc::CopyOnWriteBuffer const &packet, bool isUnresolved) { - if (packet.size() >= 4) { + if (packet.size() >= 4) { if (packet.data()[0] == 0x13 && packet.data()[1] == 0x88 && packet.data()[2] == 0x13 && packet.data()[3] == 0x88) { // SCTP packet header (source port 5000, destination port 5000) return; @@ -1613,7 +2029,9 @@ public: return; } - _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, -1); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this, packet]() { + _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, -1); + }); } else { if (!rtpParser.Parse(&header)) { // Probably a data channel message @@ -1624,74 +2042,149 @@ public: return; } - auto it = _ssrcMapping.find(header.ssrc); - if (it == _ssrcMapping.end()) { - if (isUnresolved) { - maybeReportUnknownSsrc(header.ssrc); + auto ssrcInfo = _channelBySsrc.find(header.ssrc); + if (ssrcInfo == _channelBySsrc.end()) { + // opus + if (header.payloadType == 111) { + maybeRequestUnknownSsrc(header.ssrc); _missingPacketBuffer.add(header.ssrc, packet); } } else { - const auto it = _incomingAudioChannels.find(ChannelId(header.ssrc)); - if (it != _incomingAudioChannels.end()) { - it->second->updateActivity(); + switch (ssrcInfo->second.type) { + case ChannelSsrcInfo::Type::Audio: { + const auto it = _incomingAudioChannels.find(ChannelId(header.ssrc)); + if (it != _incomingAudioChannels.end()) { + it->second->updateActivity(); + } + + break; + } + case ChannelSsrcInfo::Type::Video: { + break; + } + default: { + break; + } } } } } void receiveRtcpPacket(rtc::CopyOnWriteBuffer const &packet, int64_t timestamp) { - _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); + _threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [this, packet, timestamp]() { + _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); + }); } - void maybeReportUnknownSsrc(uint32_t ssrc) { - if (_reportedUnknownSsrcs.find(ssrc) == _reportedUnknownSsrcs.end()) { - _reportedUnknownSsrcs.insert(ssrc); + void receiveDataChannelMessage(std::string const &message) { + std::string parsingError; + auto json = json11::Json::parse(message, parsingError); + if (json.type() != json11::Json::OBJECT) { + RTC_LOG(LS_WARNING) << "receiveDataChannelMessage: error parsing message: " << parsingError; + return; + } - _pendingUnknownSsrcs.insert(ssrc); + if (json.is_object()) { + const auto colibriClass = json.object_items().find("colibriClass"); + if (colibriClass != json.object_items().end() && colibriClass->second.is_string()) { + const auto messageType = colibriClass->second.string_value(); + if (messageType == "SenderVideoConstraints") { + const auto videoConstraints = json.object_items().find("videoConstraints"); + if (videoConstraints != json.object_items().end() && videoConstraints->second.is_object()) { + const auto idealHeight = videoConstraints->second.object_items().find("idealHeight"); + if (idealHeight != videoConstraints->second.object_items().end() && idealHeight->second.is_number()) { + int outgoingVideoConstraint = idealHeight->second.int_value(); + if (_outgoingVideoConstraint != outgoingVideoConstraint) { + if (_outgoingVideoConstraint > outgoingVideoConstraint) { + _pendingOutgoingVideoConstraint = outgoingVideoConstraint; - if (!_isUnknownSsrcsScheduled) { - auto timestamp = rtc::TimeMillis(); - if (_lastUnknownSsrcsReport < timestamp - 100) { - doReportPendingUnknownSsrcs(); - } else { - _isUnknownSsrcsScheduled = true; + int requestId = _pendingOutgoingVideoConstraintRequestId; + _pendingOutgoingVideoConstraintRequestId += 1; - const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { - auto strong = weak.lock(); - if (!strong) { - return; + const auto weak = std::weak_ptr(shared_from_this()); + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, requestId]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + if (strong->_pendingOutgoingVideoConstraint != -1 && strong->_pendingOutgoingVideoConstraintRequestId == requestId) { + if (strong->_outgoingVideoConstraint != strong->_pendingOutgoingVideoConstraint) { + strong->_outgoingVideoConstraint = strong->_pendingOutgoingVideoConstraint; + strong->adjustVideoSendParams(); + } + strong->_pendingOutgoingVideoConstraint = -1; + } + }, 2000); + } else { + _pendingOutgoingVideoConstraint = -1; + _pendingOutgoingVideoConstraintRequestId += 1; + _outgoingVideoConstraint = outgoingVideoConstraint; + adjustVideoSendParams(); + } + } } - - strong->_isUnknownSsrcsScheduled = false; - strong->doReportPendingUnknownSsrcs(); - }, 100); + } } } } } - void doReportPendingUnknownSsrcs() { - std::vector ssrcs; - for (auto ssrc : _pendingUnknownSsrcs) { - ssrcs.push_back(ssrc); - } - _pendingUnknownSsrcs.clear(); - - if (ssrcs.size() != 0) { - _lastUnknownSsrcsReport = rtc::TimeMillis(); - if (_participantDescriptionsRequired) { - _participantDescriptionsRequired(ssrcs); - } else { - std::vector participants; - for (auto ssrc : ssrcs) { - GroupParticipantDescription description; - description.audioSsrc = ssrc; - participants.push_back(std::move(description)); - } - addParticipants(std::move(participants)); + void maybeRequestUnknownSsrc(uint32_t ssrc) { + if (!_requestMediaChannelDescriptions) { + MediaChannelDescription description; + description.audioSsrc = ssrc; + processMediaChannelDescriptionsResponse(-1, {description}); + return; + } + + for (const auto &it : _requestedMediaChannelDescriptions) { + if (std::find(it.second.ssrcs.begin(), it.second.ssrcs.end(), ssrc) != it.second.ssrcs.end()) { + return; + } + } + + int requestId = _nextMediaChannelDescriptionsRequestId; + _nextMediaChannelDescriptionsRequestId += 1; + + std::vector requestSsrcs = { ssrc }; + + const auto weak = std::weak_ptr(shared_from_this()); + auto task = _requestMediaChannelDescriptions(requestSsrcs, [weak, threads = _threads, requestId](std::vector &&descriptions) { + threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [weak, requestId, descriptions = std::move(descriptions)]() mutable { + auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->processMediaChannelDescriptionsResponse(requestId, descriptions); + }); + }); + _requestedMediaChannelDescriptions.insert(std::make_pair(requestId, RequestedMediaChannelDescriptions(task, std::move(requestSsrcs)))); + } + + void processMediaChannelDescriptionsResponse(int requestId, std::vector const &descriptions) { + _requestedMediaChannelDescriptions.erase(requestId); + + if (_disableIncomingChannels) { + return; + } + + for (const auto &description : descriptions) { + switch (description.type) { + case MediaChannelDescription::Type::Audio: { + if (description.audioSsrc != 0) { + addIncomingAudioChannel(ChannelId(description.audioSsrc)); + } + break; + } + case MediaChannelDescription::Type::Video: { + break; + } + default: { + break; + } + } } - } } void maybeDeliverBufferedPackets(uint32_t ssrc) { @@ -1709,59 +2202,76 @@ public: }*/ } - void maybeUpdateRemoteVideoConstaints() { + void maybeUpdateRemoteVideoConstraints() { if (!_isDataChannelOpen) { return; } - std::vector endpointIds; - for (const auto &incomingVideoChannel : _incomingVideoChannels) { - auto ssrcMapping = _ssrcMapping.find(incomingVideoChannel.first); - if (ssrcMapping != _ssrcMapping.end()) { - if (std::find(endpointIds.begin(), endpointIds.end(), ssrcMapping->second.endpointId) == endpointIds.end()) { - endpointIds.push_back(ssrcMapping->second.endpointId); - } - } - } - std::sort(endpointIds.begin(), endpointIds.end()); - std::string pinnedEndpoint; - std::ostringstream string; - string << "{" << "\n"; - string << " \"colibriClass\": \"ReceiverVideoConstraintsChangedEvent\"," << "\n"; - string << " \"videoConstraints\": [" << "\n"; - bool isFirst = true; - for (size_t i = 0; i < endpointIds.size(); i++) { - int idealHeight = 180; - if (_currentHighQualityVideoEndpointId == endpointIds[i]) { - idealHeight = 720; - } + json11::Json::object json; + json.insert(std::make_pair("colibriClass", json11::Json("ReceiverVideoConstraints"))); - if (isFirst) { - isFirst = false; - } else { - if (i != 0) { - string << ","; + json11::Json::object defaultConstraints; + defaultConstraints.insert(std::make_pair("maxHeight", json11::Json(0))); + json.insert(std::make_pair("defaultConstraints", json11::Json(std::move(defaultConstraints)))); + + json11::Json::array onStageEndpoints; + json11::Json::object constraints; + + for (const auto &incomingVideoChannel : _incomingVideoChannels) { + json11::Json::object selectedConstraint; + + switch (incomingVideoChannel.second->requestedMinQuality()) { + case VideoChannelDescription::Quality::Full: { + selectedConstraint.insert(std::make_pair("minHeight", json11::Json(720))); + break; + } + case VideoChannelDescription::Quality::Medium: { + selectedConstraint.insert(std::make_pair("minHeight", json11::Json(360))); + break; + } + case VideoChannelDescription::Quality::Thumbnail: { + selectedConstraint.insert(std::make_pair("minHeight", json11::Json(180))); + break; + } + default: { + break; + } + } + switch (incomingVideoChannel.second->requestedMaxQuality()) { + case VideoChannelDescription::Quality::Full: { + onStageEndpoints.push_back(json11::Json(incomingVideoChannel.first.endpointId)); + selectedConstraint.insert(std::make_pair("maxHeight", json11::Json(720))); + break; + } + case VideoChannelDescription::Quality::Medium: { + selectedConstraint.insert(std::make_pair("maxHeight", json11::Json(360))); + break; + } + case VideoChannelDescription::Quality::Thumbnail: { + selectedConstraint.insert(std::make_pair("maxHeight", json11::Json(180))); + break; + } + default: { + break; } } - string << " {\n"; - string << " \"id\": \"" << endpointIds[i] << "\",\n"; - string << " \"idealHeight\": " << idealHeight << "\n"; - string << " }"; - string << "\n"; - } - string << " ]" << "\n"; - string << "}"; - std::string result = string.str(); + constraints.insert(std::make_pair(incomingVideoChannel.first.endpointId, json11::Json(std::move(selectedConstraint)))); + } + + json.insert(std::make_pair("onStageEndpoints", json11::Json(std::move(onStageEndpoints)))); + json.insert(std::make_pair("constraints", json11::Json(std::move(constraints)))); + + std::string result = json11::Json(std::move(json)).dump(); _networkManager->perform(RTC_FROM_HERE, [result = std::move(result)](GroupNetworkManager *networkManager) { networkManager->sendDataChannelMessage(result); }); } void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled) { - if (_connectionMode != connectionMode) { + if (_connectionMode != connectionMode || connectionMode == GroupConnectionMode::GroupConnectionModeNone) { GroupConnectionMode previousMode = _connectionMode; _connectionMode = connectionMode; onConnectionModeUpdated(previousMode, keepBroadcastIfWasEnabled); @@ -1769,7 +2279,7 @@ public: } void onConnectionModeUpdated(GroupConnectionMode previousMode, bool keepBroadcastIfWasEnabled) { - RTC_CHECK(_connectionMode != previousMode); + RTC_CHECK(_connectionMode != previousMode || _connectionMode == GroupConnectionMode::GroupConnectionModeNone); if (previousMode == GroupConnectionMode::GroupConnectionModeRtc) { _networkManager->perform(RTC_FROM_HERE, [](GroupNetworkManager *networkManager) { @@ -1790,16 +2300,15 @@ public: if (_connectionMode == GroupConnectionMode::GroupConnectionModeNone) { destroyOutgoingAudioChannel(); + destroyOutgoingVideoChannel(); - auto generator = std::mt19937(std::random_device()()); - auto distribution = std::uniform_int_distribution(); - do { - _outgoingAudioSsrc = distribution(generator) & 0x7fffffffU; - } while (!_outgoingAudioSsrc); + // Regenerate and reconfigure. + generateSsrcs(); if (!_isMuted) { createOutgoingAudioChannel(); } + createOutgoingVideoChannel(); } switch (_connectionMode) { @@ -1831,78 +2340,118 @@ public: updateIsConnected(); } - void emitJoinPayload(std::function completion) { - _networkManager->perform(RTC_FROM_HERE, [outgoingAudioSsrc = _outgoingAudioSsrc, videoPayloadTypes = _videoPayloadTypes, videoExtensionMap = _videoExtensionMap, videoSourceGroups = _videoSourceGroups, completion](GroupNetworkManager *networkManager) { - GroupJoinPayload payload; + void generateSsrcs() { + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + do { + _outgoingAudioSsrc = distribution(generator) & 0x7fffffffU; + } while (!_outgoingAudioSsrc); - payload.ssrc = outgoingAudioSsrc; + uint32_t outgoingVideoSsrcBase = _outgoingAudioSsrc + 1; + int numVideoSimulcastLayers = 3; + if (_videoContentType == VideoContentType::Screencast) { + numVideoSimulcastLayers = 2; + } + _outgoingVideoSsrcs.simulcastLayers.clear(); + for (int layerIndex = 0; layerIndex < numVideoSimulcastLayers; layerIndex++) { + _outgoingVideoSsrcs.simulcastLayers.push_back(VideoSsrcs::SimulcastLayer(outgoingVideoSsrcBase + layerIndex * 2 + 0, outgoingVideoSsrcBase + layerIndex * 2 + 1)); + } + } - /*payload.videoPayloadTypes = videoPayloadTypes; - payload.videoExtensionMap = videoExtensionMap; - payload.videoSourceGroups = videoSourceGroups;*/ + void emitJoinPayload(std::function completion) { + _networkManager->perform(RTC_FROM_HERE, [outgoingAudioSsrc = _outgoingAudioSsrc, /*videoPayloadTypes = _videoPayloadTypes, videoExtensionMap = _videoExtensionMap, */videoSourceGroups = _videoSourceGroups, videoContentType = _videoContentType, completion](GroupNetworkManager *networkManager) { + GroupJoinInternalPayload payload; + + payload.audioSsrc = outgoingAudioSsrc; + + if (videoContentType != VideoContentType::None) { + GroupParticipantVideoInformation videoInformation; + videoInformation.ssrcGroups = videoSourceGroups; + payload.videoInformation = std::move(videoInformation); + } + + GroupJoinTransportDescription transportDescription; auto localIceParameters = networkManager->getLocalIceParameters(); - payload.ufrag = localIceParameters.ufrag; - payload.pwd = localIceParameters.pwd; + transportDescription.ufrag = localIceParameters.ufrag; + transportDescription.pwd = localIceParameters.pwd; auto localFingerprint = networkManager->getLocalFingerprint(); if (localFingerprint) { - GroupJoinPayloadFingerprint serializedFingerprint; + GroupJoinTransportDescription::Fingerprint serializedFingerprint; serializedFingerprint.hash = localFingerprint->algorithm; serializedFingerprint.fingerprint = localFingerprint->GetRfc4572Fingerprint(); serializedFingerprint.setup = "passive"; - payload.fingerprints.push_back(std::move(serializedFingerprint)); + transportDescription.fingerprints.push_back(std::move(serializedFingerprint)); } - completion(payload); + payload.transport = std::move(transportDescription); + + GroupJoinPayload result; + result.audioSsrc = payload.audioSsrc; + result.json = payload.serialize(); + completion(result); }); } - void setVideoCapture(std::shared_ptr videoCapture, std::function completion, bool isInitializing) { - bool resetBitrate = (_videoCapture == nullptr) != (videoCapture == nullptr) && !isInitializing; - if (!isInitializing && _videoCapture == videoCapture) { + void setVideoSource(std::function getVideoSource, bool isInitializing) { + bool resetBitrate = (!_getVideoSource) != (!getVideoSource) && !isInitializing; + if (!isInitializing && _getVideoSource && getVideoSource && getVideoSource() == _getVideoSource()) { return; } - _videoCapture = videoCapture; - - if (_outgoingVideoChannel) { - if (_videoCapture) { - _outgoingVideoChannel->Enable(true); - _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, NULL, GetVideoCaptureAssumingSameThread(_videoCapture.get())->source()); - } else { - _outgoingVideoChannel->Enable(false); - _outgoingVideoChannel->media_channel()->SetVideoSend(_outgoingVideoSsrcs.simulcastLayers[0].ssrc, NULL, nullptr); - } - } - + _getVideoSource = std::move(getVideoSource); + updateVideoSend(); if (resetBitrate) { adjustBitratePreferences(true); } } - void setAudioOutputDevice(const std::string &id) { + void setVideoCapture(std::shared_ptr videoCapture, bool isInitializing) { + _videoCapture = videoCapture; + setVideoSource(videoCaptureToGetVideoSource(std::move(videoCapture)), isInitializing); + } + + void setAudioOutputDevice(const std::string &id) { #if not defined(WEBRTC_IOS) && not defined(WEBRTC_ANDROID) - SetAudioOutputDeviceById(_audioDeviceModule.get(), id); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&] { + SetAudioOutputDeviceById(_audioDeviceModule.get(), id); + }); #endif // WEBRTC_IOS } void setAudioInputDevice(const std::string &id) { #if not defined(WEBRTC_IOS) && not defined(WEBRTC_ANDROID) - SetAudioInputDeviceById(_audioDeviceModule.get(), id); + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&] { + SetAudioInputDeviceById(_audioDeviceModule.get(), id); + }); #endif // WEBRTC_IOS } - void setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector &&participants) { + void setJoinResponsePayload(std::string const &payload) { RTC_LOG(LS_INFO) << formatTimestampMillis(rtc::TimeMillis()) << ": " << "setJoinResponsePayload"; - _networkManager->perform(RTC_FROM_HERE, [payload](GroupNetworkManager *networkManager) { + auto parsedPayload = GroupJoinResponsePayload::parse(payload); + if (!parsedPayload) { + RTC_LOG(LS_ERROR) << "Could not parse json response payload"; + return; + } + + _sharedVideoInformation = parsedPayload->videoInformation; + + _serverBandwidthProbingVideoSsrc.reset(); + + if (parsedPayload->videoInformation && parsedPayload->videoInformation->serverVideoBandwidthProbingSsrc) { + setServerBandwidthProbingChannelSsrc(parsedPayload->videoInformation->serverVideoBandwidthProbingSsrc); + } + + _networkManager->perform(RTC_FROM_HERE, [parsedTransport = parsedPayload->transport](GroupNetworkManager *networkManager) { PeerIceParameters remoteIceParameters; - remoteIceParameters.ufrag = payload.ufrag; - remoteIceParameters.pwd = payload.pwd; + remoteIceParameters.ufrag = parsedTransport.ufrag; + remoteIceParameters.pwd = parsedTransport.pwd; std::vector iceCandidates; - for (auto const &candidate : payload.candidates) { + for (auto const &candidate : parsedTransport.candidates) { rtc::SocketAddress address(candidate.ip, stringToInt(candidate.port)); cricket::Candidate parsedCandidate( @@ -1910,8 +2459,8 @@ public: /*protocol=*/candidate.protocol, /*address=*/address, /*priority=*/stringToUInt32(candidate.priority), - /*username=*/payload.ufrag, - /*password=*/payload.pwd, + /*username=*/parsedTransport.ufrag, + /*password=*/parsedTransport.pwd, /*type=*/candidate.type, /*generation=*/stringToUInt32(candidate.generation), /*foundation=*/candidate.foundation, @@ -1922,40 +2471,58 @@ public: } std::unique_ptr fingerprint; - if (payload.fingerprints.size() != 0) { - fingerprint = rtc::SSLFingerprint::CreateUniqueFromRfc4572(payload.fingerprints[0].hash, payload.fingerprints[0].fingerprint); + if (parsedTransport.fingerprints.size() != 0) { + fingerprint = rtc::SSLFingerprint::CreateUniqueFromRfc4572(parsedTransport.fingerprints[0].hash, parsedTransport.fingerprints[0].fingerprint); } networkManager->setRemoteParams(remoteIceParameters, iceCandidates, fingerprint.get()); }); - addParticipants(std::move(participants)); + adjustBitratePreferences(true); + + if (!_pendingRequestedVideo.empty()) { + setRequestedVideoChannels(std::move(_pendingRequestedVideo)); + _pendingRequestedVideo.clear(); + } } - void addParticipants(std::vector &&participants) { - if (_disableIncomingChannels) { + void setServerBandwidthProbingChannelSsrc(uint32_t probingSsrc) { + RTC_CHECK(probingSsrc); + + if (!_sharedVideoInformation || _availablePayloadTypes.empty()) { return; } - for (const auto &participant : participants) { - if (participant.audioSsrc == _outgoingAudioSsrc) { - continue; - } - _reportedUnknownSsrcs.erase(participant.audioSsrc); + GroupParticipantVideoInformation videoInformation; - if (_incomingAudioChannels.find(ChannelId(participant.audioSsrc)) == _incomingAudioChannels.end()) { - addIncomingAudioChannel(participant.endpointId, ChannelId(participant.audioSsrc)); - } - if (participant.videoPayloadTypes.size() != 0 && participant.videoSourceGroups.size() != 0) { - if (_incomingVideoChannels.find(participant.audioSsrc) == _incomingVideoChannels.end()) { - addIncomingVideoChannel(participant); - } - } - } + GroupJoinPayloadVideoSourceGroup sourceGroup; + sourceGroup.ssrcs.push_back(probingSsrc); + sourceGroup.semantics = "SIM"; + + videoInformation.ssrcGroups.push_back(std::move(sourceGroup)); + + _serverBandwidthProbingVideoSsrc.reset(new IncomingVideoChannel( + _channelManager.get(), + _call.get(), + _rtpTransport, + _uniqueRandomIdGenerator.get(), + _availableVideoFormats, + _sharedVideoInformation.value(), + 123456, + VideoChannelDescription::Quality::Thumbnail, + VideoChannelDescription::Quality::Thumbnail, + videoInformation, + _threads + )); + + ChannelSsrcInfo mapping; + mapping.type = ChannelSsrcInfo::Type::Video; + mapping.allSsrcs.push_back(probingSsrc); + _channelBySsrc.insert(std::make_pair(probingSsrc, std::move(mapping))); } void removeSsrcs(std::vector ssrcs) { - bool updatedIncomingVideoChannels = false; + /*bool updatedIncomingVideoChannels = false; for (auto ssrc : ssrcs) { auto it = _ssrcMapping.find(ssrc); @@ -1975,7 +2542,15 @@ public: if (updatedIncomingVideoChannels) { updateIncomingVideoSources(); - } + }*/ + } + + void removeIncomingVideoSource(uint32_t ssrc) { + /*auto videoChannel = _incomingVideoChannels.find(ssrc); + if (videoChannel != _incomingVideoChannels.end()) { + _incomingVideoChannels.erase(videoChannel); + updateIncomingVideoSources(); + }*/ } void setIsMuted(bool isMuted) { @@ -1984,59 +2559,71 @@ public: } _isMuted = isMuted; + if (!_isMuted && !_outgoingAudioChannel) { + createOutgoingAudioChannel(); + } + onUpdatedIsMuted(); } void onUpdatedIsMuted() { - if (!_isMuted) { - if (!_outgoingAudioChannel) { - createOutgoingAudioChannel(); + if (_outgoingAudioChannel) { + _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [this]() { + _outgoingAudioChannel->media_channel()->SetAudioSend(_outgoingAudioSsrc, !_isMuted, nullptr, &_audioSource); + _outgoingAudioChannel->Enable(!_isMuted); + }); + } + } + + void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled) { + _noiseSuppressionConfiguration->isEnabled = isNoiseSuppressionEnabled; + } + + void addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr> sink) { + if (_sharedVideoInformation && endpointId == _sharedVideoInformation->endpointId) { + if (_videoCapture) { + _videoCaptureSink->addSink(sink); + _videoCapture->setOutput(_videoCaptureSink); + } + } else { + auto it = _incomingVideoChannels.find(VideoChannelId(endpointId)); + if (it != _incomingVideoChannels.end()) { + it->second->addSink(sink); + } else { + _pendingVideoSinks[VideoChannelId(endpointId)].push_back(sink); } } - - if (_outgoingAudioChannel) { - _outgoingAudioChannel->Enable(!_isMuted); - _outgoingAudioChannel->media_channel()->SetAudioSend(_outgoingAudioSsrc, _isRtcConnected && !_isMuted, nullptr, &_audioSource); - } } - void addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr> sink) { - auto it = _incomingVideoChannels.find(ssrc); - if (it != _incomingVideoChannels.end()) { - it->second->addSink(sink); - } - } - - void addIncomingAudioChannel(std::string const &endpointId, ChannelId ssrc, bool isRawPcm = false) { + void addIncomingAudioChannel(ChannelId ssrc, bool isRawPcm = false) { if (_incomingAudioChannels.find(ssrc) != _incomingAudioChannels.end()) { return; } if (_incomingAudioChannels.size() > 5) { + auto timestamp = rtc::TimeMillis(); + int64_t minActivity = INT64_MAX; ChannelId minActivityChannelId(0, 0); for (const auto &it : _incomingAudioChannels) { + if (it.first.networkSsrc == 1) { + continue; + } auto activity = it.second->getActivity(); - if (activity < minActivity) { + if (activity < minActivity && activity < timestamp - 1000) { minActivity = activity; minActivityChannelId = it.first; } } if (minActivityChannelId.networkSsrc != 0) { - const auto it = _incomingAudioChannels.find(minActivityChannelId); - if (it != _incomingAudioChannels.end()) { - _incomingAudioChannels.erase(it); - } - auto reportedIt = _reportedUnknownSsrcs.find(minActivityChannelId.actualSsrc); - if (reportedIt != _reportedUnknownSsrcs.end()) { - _reportedUnknownSsrcs.erase(reportedIt); - } - auto mappingIt = _ssrcMapping.find(minActivityChannelId.actualSsrc); - if (mappingIt != _ssrcMapping.end()) { - _ssrcMapping.erase(mappingIt); - } + removeIncomingAudioChannel(minActivityChannelId); + } + + if (_incomingAudioChannels.size() > 5) { + // Wait until there is a channel that hasn't been active in 1 second + return; } } @@ -2070,7 +2657,7 @@ public: ssrc, std::move(onAudioSinkUpdate), _onAudioFrame, - *_threads + _threads )); auto volume = _volumeBySsrc.find(ssrc.actualSsrc); @@ -2080,17 +2667,50 @@ public: _incomingAudioChannels.insert(std::make_pair(ssrc, std::move(channel))); - SsrcMappingInfo mapping; - mapping.ssrc = ssrc.networkSsrc; - mapping.isVideo = false; - mapping.endpointId = endpointId; - _ssrcMapping.insert(std::make_pair(ssrc.networkSsrc, mapping)); + auto currentMapping = _channelBySsrc.find(ssrc.networkSsrc); + if (currentMapping != _channelBySsrc.end()) { + if (currentMapping->second.type == ChannelSsrcInfo::Type::Audio) { + if (std::find(currentMapping->second.allSsrcs.begin(), currentMapping->second.allSsrcs.end(), ssrc.networkSsrc) == currentMapping->second.allSsrcs.end()) { + currentMapping->second.allSsrcs.push_back(ssrc.networkSsrc); + } + } + } else { + ChannelSsrcInfo mapping; + mapping.type = ChannelSsrcInfo::Type::Audio; + mapping.allSsrcs.push_back(ssrc.networkSsrc); + _channelBySsrc.insert(std::make_pair(ssrc.networkSsrc, std::move(mapping))); + } maybeDeliverBufferedPackets(ssrc.networkSsrc); + + adjustBitratePreferences(false); } - void addIncomingVideoChannel(GroupParticipantDescription const &participant) { - if (_incomingVideoChannels.find(participant.audioSsrc) != _incomingVideoChannels.end()) { + void removeIncomingAudioChannel(ChannelId const &channelId) { + const auto it = _incomingAudioChannels.find(channelId); + if (it != _incomingAudioChannels.end()) { + _incomingAudioChannels.erase(it); + } + + auto currentMapping = _channelBySsrc.find(channelId.networkSsrc); + if (currentMapping != _channelBySsrc.end()) { + if (currentMapping->second.type == ChannelSsrcInfo::Type::Audio) { + auto ssrcs = currentMapping->second.allSsrcs; + for (auto ssrc : ssrcs) { + auto it = _channelBySsrc.find(ssrc); + if (it != _channelBySsrc.end()) { + _channelBySsrc.erase(it); + } + } + } + } + } + + void addIncomingVideoChannel(uint32_t audioSsrc, GroupParticipantVideoInformation const &videoInformation, VideoChannelDescription::Quality minQuality, VideoChannelDescription::Quality maxQuality) { + if (!_sharedVideoInformation) { + return; + } + if (_incomingVideoChannels.find(VideoChannelId(videoInformation.endpointId)) != _incomingVideoChannels.end()) { return; } @@ -2102,41 +2722,47 @@ public: _rtpTransport, _uniqueRandomIdGenerator.get(), _availableVideoFormats, - participant, - *_threads + _sharedVideoInformation.value(), + audioSsrc, + minQuality, + maxQuality, + videoInformation, + _threads )); - _incomingVideoChannels.insert(std::make_pair(participant.audioSsrc, std::move(channel))); + + const auto pendingSinks = _pendingVideoSinks.find(VideoChannelId(videoInformation.endpointId)); + if (pendingSinks != _pendingVideoSinks.end()) { + for (const auto &sink : pendingSinks->second) { + channel->addSink(sink); + } + + _pendingVideoSinks.erase(pendingSinks); + } + + _incomingVideoChannels.insert(std::make_pair(VideoChannelId(videoInformation.endpointId), std::move(channel))); std::vector allSsrcs; - for (const auto &group : participant.videoSourceGroups) { + for (const auto &group : videoInformation.ssrcGroups) { for (auto ssrc : group.ssrcs) { - if (_ssrcMapping.find(ssrc) == _ssrcMapping.end()) { + if (std::find(allSsrcs.begin(), allSsrcs.end(), ssrc) == allSsrcs.end()) { allSsrcs.push_back(ssrc); - - SsrcMappingInfo mapping; - mapping.ssrc = participant.audioSsrc; - mapping.isVideo = true; - mapping.endpointId = participant.endpointId; - _ssrcMapping.insert(std::make_pair(ssrc, mapping)); } } } - updateIncomingVideoSources(); + for (auto ssrc : allSsrcs) { + ChannelSsrcInfo mapping; + mapping.type = ChannelSsrcInfo::Type::Video; + mapping.allSsrcs = allSsrcs; + mapping.videoEndpointId = videoInformation.endpointId; + _channelBySsrc.insert(std::make_pair(ssrc, std::move(mapping))); + } for (auto ssrc : allSsrcs) { maybeDeliverBufferedPackets(ssrc); } - } - void updateIncomingVideoSources() { - if (_incomingVideoSourcesUpdated) { - std::vector videoChannelSsrcs; - for (const auto &it : _incomingVideoChannels) { - videoChannelSsrcs.push_back(it.first); - } - _incomingVideoSourcesUpdated(videoChannelSsrcs); - } + adjustBitratePreferences(false); } void setVolume(uint32_t ssrc, double volume) { @@ -2158,34 +2784,81 @@ public: } } - void setFullSizeVideoSsrc(uint32_t ssrc) { - auto ssrcMapping = _ssrcMapping.find(ssrc); - std::string currentHighQualityVideoEndpointId; - if (ssrcMapping != _ssrcMapping.end()) { - currentHighQualityVideoEndpointId = ssrcMapping->second.endpointId; + void setRequestedVideoChannels(std::vector &&requestedVideoChannels) { + if (!_sharedVideoInformation) { + _pendingRequestedVideo = std::move(requestedVideoChannels); + return; } - if (_currentHighQualityVideoEndpointId != currentHighQualityVideoEndpointId) { - _currentHighQualityVideoEndpointId = currentHighQualityVideoEndpointId; - maybeUpdateRemoteVideoConstaints(); + bool updated = false; + std::vector allEndpointIds; + + for (const auto &description : requestedVideoChannels) { + if (_sharedVideoInformation && _sharedVideoInformation->endpointId == description.endpointId) { + continue; + } + + GroupParticipantVideoInformation videoInformation; + videoInformation.endpointId = description.endpointId; + for (const auto &group : description.ssrcGroups) { + GroupJoinPayloadVideoSourceGroup parsedGroup; + parsedGroup.semantics = group.semantics; + parsedGroup.ssrcs = group.ssrcs; + videoInformation.ssrcGroups.push_back(std::move(parsedGroup)); + } + + allEndpointIds.push_back(videoInformation.endpointId); + + auto current = _incomingVideoChannels.find(VideoChannelId(videoInformation.endpointId)); + if (current != _incomingVideoChannels.end()) { + if (current->second->requestedMinQuality() != description.minQuality || current->second->requestedMaxQuality() != description.maxQuality) { + current->second->setRequstedMinQuality(description.minQuality); + current->second->setRequstedMaxQuality(description.maxQuality); + updated = true; + } + continue; + } + + addIncomingVideoChannel(description.audioSsrc, videoInformation, description.minQuality, description.maxQuality); + updated = true; + } + + std::vector removeEndpointIds; + for (const auto &it : _incomingVideoChannels) { + if (std::find(allEndpointIds.begin(), allEndpointIds.end(), it.first.endpointId) == allEndpointIds.end()) { + removeEndpointIds.push_back(it.first.endpointId); + updated = true; + } + } + + for (const auto &endpointId : removeEndpointIds) { + _incomingVideoChannels.erase(VideoChannelId(endpointId)); + } + + if (updated) { + maybeUpdateRemoteVideoConstraints(); } } private: - rtc::scoped_refptr createAudioDeviceModule() { - const auto create = [&](webrtc::AudioDeviceModule::AudioLayer layer) { - return webrtc::AudioDeviceModule::Create( - layer, - _taskQueueFactory.get()); - }; - const auto check = [&](const rtc::scoped_refptr &result) { - return (result && result->Init() == 0) ? result : nullptr; - }; - if (_createAudioDeviceModule) { - if (const auto result = check(_createAudioDeviceModule(_taskQueueFactory.get()))) { - return result; - } - } - return check(create(webrtc::AudioDeviceModule::kPlatformDefaultAudio)); + rtc::scoped_refptr createAudioDeviceModule() { + const auto create = [&](webrtc::AudioDeviceModule::AudioLayer layer) { + return webrtc::AudioDeviceModule::Create( + layer, + _taskQueueFactory.get()); + }; + const auto check = [&](const rtc::scoped_refptr &result) -> rtc::scoped_refptr { + if (result && result->Init() == 0) { + return PlatformInterface::SharedInstance()->wrapAudioDeviceModule(result); + } else { + return nullptr; + } + }; + if (_createAudioDeviceModule) { + if (const auto result = check(_createAudioDeviceModule(_taskQueueFactory.get()))) { + return result; + } + } + return check(create(webrtc::AudioDeviceModule::kPlatformDefaultAudio)); } private: @@ -2195,17 +2868,21 @@ private: std::function _networkStateUpdated; std::function _audioLevelsUpdated; std::function _onAudioFrame; - std::function const &)> _incomingVideoSourcesUpdated; - std::function const &)> _participantDescriptionsRequired; + std::function(std::vector const &, std::function &&)>)> _requestMediaChannelDescriptions; std::function(std::shared_ptr, int64_t, int64_t, std::function)> _requestBroadcastPart; std::shared_ptr _videoCapture; + std::shared_ptr _videoCaptureSink; + std::function _getVideoSource; bool _disableIncomingChannels = false; bool _useDummyChannel{true}; + int _outgoingAudioBitrateKbit{32}; + bool _disableOutgoingAudioProcessing{false}; + int _minOutgoingVideoBitrateKbit{100}; + VideoContentType _videoContentType{VideoContentType::None}; + std::vector _videoCodecPreferences; - int64_t _lastUnknownSsrcsReport = 0; - std::set _pendingUnknownSsrcs; - bool _isUnknownSsrcsScheduled = false; - std::set _reportedUnknownSsrcs; + int _nextMediaChannelDescriptionsRequestId = 0; + std::map _requestedMediaChannelDescriptions; std::unique_ptr> _networkManager; @@ -2215,8 +2892,8 @@ private: std::unique_ptr _call; webrtc::FieldTrialBasedConfig _fieldTrials; webrtc::LocalAudioSinkAdapter _audioSource; - rtc::scoped_refptr _audioDeviceModule; - std::function(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; + rtc::scoped_refptr _audioDeviceModule; + std::function(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; std::string _initialInputDeviceId; std::string _initialOutputDeviceId; @@ -2225,6 +2902,8 @@ private: uint32_t _outgoingAudioSsrc = 0; std::vector _availableVideoFormats; + std::vector _availablePayloadTypes; + absl::optional _selectedPayloadType; std::vector _videoPayloadTypes; std::vector> _videoExtensionMap; @@ -2238,19 +2917,28 @@ private: // _outgoingVideoChannel memory is managed by _channelManager cricket::VideoChannel *_outgoingVideoChannel = nullptr; VideoSsrcs _outgoingVideoSsrcs; + int _outgoingVideoConstraint = 720; + int _pendingOutgoingVideoConstraint = -1; + int _pendingOutgoingVideoConstraintRequestId = 0; std::map _audioLevels; GroupLevelValue _myAudioLevel; bool _isMuted = true; + std::shared_ptr _noiseSuppressionConfiguration; MissingSsrcPacketBuffer _missingPacketBuffer; - std::map _ssrcMapping; + std::map _channelBySsrc; std::map _volumeBySsrc; std::map> _incomingAudioChannels; - std::map> _incomingVideoChannels; + std::map> _incomingVideoChannels; - std::string _currentHighQualityVideoEndpointId; + std::map>>> _pendingVideoSinks; + std::vector _pendingRequestedVideo; + + std::unique_ptr _serverBandwidthProbingVideoSsrc; + + absl::optional _sharedVideoInformation; int64_t _broadcastPartDurationMilliseconds = 500; std::vector> _sourceBroadcastParts; @@ -2266,15 +2954,19 @@ private: bool _isDataChannelOpen = false; GroupNetworkState _effectiveNetworkState; + rtc::scoped_refptr _workerThreadSafery; + std::shared_ptr _platformContext; }; GroupInstanceCustomImpl::GroupInstanceCustomImpl(GroupInstanceDescriptor &&descriptor) { if (descriptor.config.need_log) { _logSink = std::make_unique(descriptor.config.logPath); + rtc::LogMessage::SetLogToStderr(true); + } else { + rtc::LogMessage::SetLogToStderr(false); } rtc::LogMessage::LogToDebug(rtc::LS_INFO); - rtc::LogMessage::SetLogToStderr(false); if (_logSink) { rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); } @@ -2310,21 +3002,15 @@ void GroupInstanceCustomImpl::setConnectionMode(GroupConnectionMode connectionMo }); } -void GroupInstanceCustomImpl::emitJoinPayload(std::function completion) { +void GroupInstanceCustomImpl::emitJoinPayload(std::function completion) { _internal->perform(RTC_FROM_HERE, [completion](GroupInstanceCustomInternal *internal) { internal->emitJoinPayload(completion); }); } -void GroupInstanceCustomImpl::setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector &&participants) { - _internal->perform(RTC_FROM_HERE, [payload, participants = std::move(participants)](GroupInstanceCustomInternal *internal) mutable { - internal->setJoinResponsePayload(payload, std::move(participants)); - }); -} - -void GroupInstanceCustomImpl::addParticipants(std::vector &&participants) { - _internal->perform(RTC_FROM_HERE, [participants = std::move(participants)](GroupInstanceCustomInternal *internal) mutable { - internal->addParticipants(std::move(participants)); +void GroupInstanceCustomImpl::setJoinResponsePayload(std::string const &payload) { + _internal->perform(RTC_FROM_HERE, [payload](GroupInstanceCustomInternal *internal) { + internal->setJoinResponsePayload(payload); }); } @@ -2334,33 +3020,51 @@ void GroupInstanceCustomImpl::removeSsrcs(std::vector ssrcs) { }); } +void GroupInstanceCustomImpl::removeIncomingVideoSource(uint32_t ssrc) { + _internal->perform(RTC_FROM_HERE, [ssrc](GroupInstanceCustomInternal *internal) mutable { + internal->removeIncomingVideoSource(ssrc); + }); +} + void GroupInstanceCustomImpl::setIsMuted(bool isMuted) { _internal->perform(RTC_FROM_HERE, [isMuted](GroupInstanceCustomInternal *internal) { internal->setIsMuted(isMuted); }); } -void GroupInstanceCustomImpl::setVideoCapture(std::shared_ptr videoCapture, std::function completion) { - _internal->perform(RTC_FROM_HERE, [videoCapture, completion](GroupInstanceCustomInternal *internal) { - internal->setVideoCapture(videoCapture, completion, false); +void GroupInstanceCustomImpl::setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled) { + _internal->perform(RTC_FROM_HERE, [isNoiseSuppressionEnabled](GroupInstanceCustomInternal *internal) { + internal->setIsNoiseSuppressionEnabled(isNoiseSuppressionEnabled); }); } +void GroupInstanceCustomImpl::setVideoCapture(std::shared_ptr videoCapture) { + _internal->perform(RTC_FROM_HERE, [videoCapture](GroupInstanceCustomInternal *internal) { + internal->setVideoCapture(videoCapture, false); + }); +} + +void GroupInstanceCustomImpl::setVideoSource(std::function getVideoSource) { + _internal->perform(RTC_FROM_HERE, [getVideoSource](GroupInstanceCustomInternal *internal) { + internal->setVideoSource(getVideoSource, false); + }); +} + void GroupInstanceCustomImpl::setAudioOutputDevice(std::string id) { _internal->perform(RTC_FROM_HERE, [id](GroupInstanceCustomInternal *internal) { internal->setAudioOutputDevice(id); - }); + }); } void GroupInstanceCustomImpl::setAudioInputDevice(std::string id) { - _internal->perform(RTC_FROM_HERE, [id](GroupInstanceCustomInternal *internal) { - internal->setAudioInputDevice(id); - }); + _internal->perform(RTC_FROM_HERE, [id](GroupInstanceCustomInternal *internal) { + internal->setAudioInputDevice(id); + }); } -void GroupInstanceCustomImpl::addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr> sink) { - _internal->perform(RTC_FROM_HERE, [ssrc, sink](GroupInstanceCustomInternal *internal) mutable { - internal->addIncomingVideoOutput(ssrc, sink); +void GroupInstanceCustomImpl::addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr> sink) { + _internal->perform(RTC_FROM_HERE, [endpointId, sink](GroupInstanceCustomInternal *internal) mutable { + internal->addIncomingVideoOutput(endpointId, sink); }); } @@ -2370,11 +3074,12 @@ void GroupInstanceCustomImpl::setVolume(uint32_t ssrc, double volume) { }); } -void GroupInstanceCustomImpl::setFullSizeVideoSsrc(uint32_t ssrc) { - _internal->perform(RTC_FROM_HERE, [ssrc](GroupInstanceCustomInternal *internal) { - internal->setFullSizeVideoSsrc(ssrc); +void GroupInstanceCustomImpl::setRequestedVideoChannels(std::vector &&requestedVideoChannels) { + _internal->perform(RTC_FROM_HERE, [requestedVideoChannels = std::move(requestedVideoChannels)](GroupInstanceCustomInternal *internal) mutable { + internal->setRequestedVideoChannels(std::move(requestedVideoChannels)); }); } + std::vector GroupInstanceInterface::getAudioDevices(AudioDevice::Type type) { auto result = std::vector(); #ifdef WEBRTC_LINUX //Not needed for ios, and some crl::sync stuff is needed for windows diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h index 9cca3af4f..e11c00762 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.h @@ -25,20 +25,22 @@ public: void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled); - void emitJoinPayload(std::function completion); - void setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector &&participants); - void addParticipants(std::vector &&participants); + void emitJoinPayload(std::function completion); + void setJoinResponsePayload(std::string const &payload); void removeSsrcs(std::vector ssrcs); + void removeIncomingVideoSource(uint32_t ssrc); void setIsMuted(bool isMuted); - void setVideoCapture(std::shared_ptr videoCapture, std::function completion); + void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled); + void setVideoCapture(std::shared_ptr videoCapture); + void setVideoSource(std::function getVideoSource); void setAudioOutputDevice(std::string id); void setAudioInputDevice(std::string id); - void addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr> sink); + void addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr> sink); void setVolume(uint32_t ssrc, double volume); - void setFullSizeVideoSsrc(uint32_t ssrc); + void setRequestedVideoChannels(std::vector &&requestedVideoChannels); private: std::shared_ptr _threads; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.cpp deleted file mode 100644 index 6ec3d8edc..000000000 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.cpp +++ /dev/null @@ -1,3273 +0,0 @@ -#include "GroupInstanceImpl.h" - -#include -#include "api/scoped_refptr.h" -#include "rtc_base/thread.h" -#include "rtc_base/logging.h" -#include "api/peer_connection_interface.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "media/engine/webrtc_media_engine.h" -#include "api/audio_codecs/audio_decoder_factory_template.h" -#include "api/audio_codecs/audio_encoder_factory_template.h" -#include "api/audio_codecs/opus/audio_decoder_opus.h" -#include "api/audio_codecs/opus/audio_encoder_opus.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/peer_connection_interface.h" -#include "api/video_track_source_proxy.h" -#include "system_wrappers/include/field_trial.h" -#include "api/stats/rtcstats_objects.h" -#include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_device/include/audio_device_factory.h" -#include "common_audio/include/audio_util.h" -#include "common_audio/vad/include/webrtc_vad.h" -#include "modules/audio_processing/agc2/vad_with_level.h" - -#include "ThreadLocalObject.h" -#include "Manager.h" -#include "NetworkManager.h" -#include "VideoCaptureInterfaceImpl.h" -#include "platform/PlatformInterface.h" -#include "LogSinkImpl.h" -#include "StaticThreads.h" - -#include -#include -#include - -namespace tgcalls { - -namespace { - -static std::vector splitSdpLines(std::string const &sdp) { - std::vector result; - - std::istringstream sdpStream(sdp); - - std::string s; - while (std::getline(sdpStream, s, '\n')) { - if (s.size() == 0) { - continue; - } - if (s[s.size() - 1] == '\r') { - s.resize(s.size() - 1); - } - result.push_back(s); - } - - return result; -} - -static std::vector splitFingerprintLines(std::string const &line) { - std::vector result; - - std::istringstream sdpStream(line); - - std::string s; - while (std::getline(sdpStream, s, ' ')) { - if (s.size() == 0) { - continue; - } - result.push_back(s); - } - - return result; -} - -static std::vector splitSsrcList(std::string const &line) { - std::vector result; - - std::istringstream sdpStream(line); - - std::string s; - while (std::getline(sdpStream, s, ' ')) { - if (s.size() == 0) { - continue; - } - - std::istringstream iss(s); - uint32_t ssrc = 0; - iss >> ssrc; - - result.push_back(ssrc); - } - - return result; -} - -static std::vector splitBundleMLines(std::string const &line) { - std::vector result; - - std::istringstream sdpStream(line); - - std::string s; - while (std::getline(sdpStream, s, ' ')) { - if (s.size() == 0) { - continue; - } - - result.push_back(s); - } - - return result; -} - -static std::vector getLines(std::vector const &lines, std::string prefix) { - std::vector result; - - for (auto &line : lines) { - if (line.find(prefix) == 0) { - auto cleanLine = line; - cleanLine.replace(0, prefix.size(), ""); - result.push_back(cleanLine); - } - } - - return result; -} - -static absl::optional parsePayloadType(uint32_t id, std::string const &line) { - std::string s; - std::istringstream lineStream(line); - std::string codec; - uint32_t clockrate = 0; - uint32_t channels = 0; - for (int i = 0; std::getline(lineStream, s, '/'); i++) { - if (s.size() == 0) { - continue; - } - - if (i == 0) { - codec = s; - } else if (i == 1) { - std::istringstream iss(s); - iss >> clockrate; - } else if (i == 2) { - std::istringstream iss(s); - iss >> channels; - } - } - if (codec.size() != 0) { - GroupJoinPayloadVideoPayloadType payloadType; - payloadType.id = id; - payloadType.name = codec; - payloadType.clockrate = clockrate; - payloadType.channels = channels; - return payloadType; - } else { - return absl::nullopt; - } -} - -static absl::optional parseFeedbackType(std::string const &line) { - std::istringstream lineStream(line); - std::string s; - - std::string type; - std::string subtype; - for (int i = 0; std::getline(lineStream, s, ' '); i++) { - if (s.size() == 0) { - continue; - } - - if (i == 0) { - type = s; - } else if (i == 1) { - subtype = s; - } - } - - if (type.size() != 0) { - GroupJoinPayloadVideoPayloadFeedbackType parsedType; - parsedType.type = type; - parsedType.subtype = subtype; - return parsedType; - } else { - return absl::nullopt; - } -} - -static void parsePayloadParameter(std::string const &line, std::vector> &result) { - std::istringstream lineStream(line); - std::string s; - - std::string key; - std::string value; - for (int i = 0; std::getline(lineStream, s, '='); i++) { - if (s.size() == 0) { - continue; - } - - if (i == 0) { - key = s; - } else if (i == 1) { - value = s; - } - } - if (key.size() != 0 && value.size() != 0) { - result.push_back(std::make_pair(key, value)); - } -} - -static std::vector> parsePayloadParameters(std::string const &line) { - std::vector> result; - - std::istringstream lineStream(line); - std::string s; - - while (std::getline(lineStream, s, ';')) { - if (s.size() == 0) { - continue; - } - - parsePayloadParameter(s, result); - } - - return result; -} - -static absl::optional parseSdpIntoJoinPayload(std::string const &sdp) { - GroupJoinPayload result; - - auto lines = splitSdpLines(sdp); - - std::vector audioLines; - std::vector videoLines; - bool isAudioLine = false; - bool isVideoLine = false; - for (auto &line : lines) { - if (line.find("m=audio") == 0) { - isAudioLine = true; - isVideoLine = false; - } else if (line.find("m=video") == 0) { - isAudioLine = false; - isVideoLine = true; - } else if (line.find("m=application") == 0) { - isAudioLine = false; - isVideoLine = true; - } - if (isAudioLine) { - audioLines.push_back(line); - } else if (isVideoLine) { - videoLines.push_back(line); - } - } - - result.ssrc = 0; - - auto ufragLines = getLines(audioLines, "a=ice-ufrag:"); - if (ufragLines.size() != 1) { - return absl::nullopt; - } - result.ufrag = ufragLines[0]; - - auto pwdLines = getLines(audioLines, "a=ice-pwd:"); - if (pwdLines.size() != 1) { - return absl::nullopt; - } - result.pwd = pwdLines[0]; - - for (auto &line : getLines(audioLines, "a=fingerprint:")) { - auto fingerprintComponents = splitFingerprintLines(line); - if (fingerprintComponents.size() != 2) { - continue; - } - - GroupJoinPayloadFingerprint fingerprint; - fingerprint.hash = fingerprintComponents[0]; - fingerprint.fingerprint = fingerprintComponents[1]; - fingerprint.setup = "active"; - result.fingerprints.push_back(fingerprint); - } - - for (auto &line : getLines(videoLines, "a=rtpmap:")) { - std::string s; - std::istringstream lineStream(line); - uint32_t id = 0; - for (int i = 0; std::getline(lineStream, s, ' '); i++) { - if (s.size() == 0) { - continue; - } - - if (i == 0) { - std::istringstream iss(s); - iss >> id; - } else if (i == 1) { - if (id != 0) { - auto payloadType = parsePayloadType(id, s); - if (payloadType.has_value()) { - std::ostringstream fbPrefixStream; - fbPrefixStream << "a=rtcp-fb:"; - fbPrefixStream << id; - fbPrefixStream << " "; - for (auto &feedbackLine : getLines(videoLines, fbPrefixStream.str())) { - auto feedbackType = parseFeedbackType(feedbackLine); - if (feedbackType.has_value()) { - payloadType->feedbackTypes.push_back(feedbackType.value()); - } - } - - std::ostringstream parametersPrefixStream; - parametersPrefixStream << "a=fmtp:"; - parametersPrefixStream << id; - parametersPrefixStream << " "; - for (auto ¶metersLine : getLines(videoLines, parametersPrefixStream.str())) { - payloadType->parameters = parsePayloadParameters(parametersLine); - } - - result.videoPayloadTypes.push_back(payloadType.value()); - } - } - } - } - } - - for (auto &line : getLines(videoLines, "a=extmap:")) { - std::string s; - std::istringstream lineStream(line); - uint32_t id = 0; - for (int i = 0; std::getline(lineStream, s, ' '); i++) { - if (s.size() == 0) { - continue; - } - - if (i == 0) { - std::istringstream iss(s); - iss >> id; - } else if (i == 1) { - if (id != 0) { - result.videoExtensionMap.push_back(std::make_pair(id, s)); - } - } - } - } - - for (auto &line : getLines(videoLines, "a=ssrc-group:FID ")) { - auto ssrcs = splitSsrcList(line); - GroupJoinPayloadVideoSourceGroup group; - group.semantics = "FID"; - group.ssrcs = ssrcs; - result.videoSourceGroups.push_back(std::move(group)); - } - for (auto &line : getLines(videoLines, "a=ssrc-group:SIM ")) { - auto ssrcs = splitSsrcList(line); - GroupJoinPayloadVideoSourceGroup group; - group.semantics = "SIM"; - group.ssrcs = ssrcs; - result.videoSourceGroups.push_back(std::move(group)); - } - - return result; -} - -struct StreamSpec { - bool isMain = false; - bool isOutgoing = false; - std::string mLine; - uint32_t streamId = 0; - uint32_t ssrc = 0; - std::vector videoSourceGroups; - std::vector videoPayloadTypes; - std::vector> videoExtensionMap; - bool isRemoved = false; - bool isData = false; - bool isVideo = false; -}; - -static void appendSdp(std::vector &lines, std::string const &line, int index = -1) { - if (index >= 0) { - lines.insert(lines.begin() + index, line); - } else { - lines.push_back(line); - } -} - -enum class SdpType { - kSdpTypeJoinAnswer, - kSdpTypeRemoteOffer, - kSdpTypeLocalAnswer -}; - -static std::string createSdp(uint32_t sessionId, GroupJoinResponsePayload const &payload, SdpType type, std::vector const &bundleStreams) { - std::vector sdp; - - appendSdp(sdp, "v=0"); - - std::ostringstream sessionIdString; - sessionIdString << "o=- "; - sessionIdString << sessionId; - sessionIdString << " 2 IN IP4 0.0.0.0"; - appendSdp(sdp, sessionIdString.str()); - - appendSdp(sdp, "s=-"); - appendSdp(sdp, "t=0 0"); - - std::ostringstream bundleString; - bundleString << "a=group:BUNDLE"; - for (auto &stream : bundleStreams) { - bundleString << " "; - bundleString << stream.mLine; - } - appendSdp(sdp, bundleString.str()); - - appendSdp(sdp, "a=ice-lite"); - - for (auto &stream : bundleStreams) { - std::ostringstream streamMidString; - streamMidString << "a=mid:" << stream.mLine; - - if (stream.isData) { - appendSdp(sdp, "m=application 9 UDP/DTLS/SCTP webrtc-datachannel"); - appendSdp(sdp, "c=IN IP4 0.0.0.0"); - - std::ostringstream ufragString; - ufragString << "a=ice-ufrag:"; - ufragString << payload.ufrag; - appendSdp(sdp, ufragString.str()); - - std::ostringstream pwdString; - pwdString << "a=ice-pwd:"; - pwdString << payload.pwd; - appendSdp(sdp, pwdString.str()); - - for (auto &fingerprint : payload.fingerprints) { - std::ostringstream fingerprintString; - fingerprintString << "a=fingerprint:"; - fingerprintString << fingerprint.hash; - fingerprintString << " "; - fingerprintString << fingerprint.fingerprint; - appendSdp(sdp, fingerprintString.str()); - appendSdp(sdp, "a=setup:passive"); - } - - appendSdp(sdp, streamMidString.str()); - appendSdp(sdp, "a=sctp-port:5000"); - appendSdp(sdp, "a=max-message-size:262144"); - } else { - std::ostringstream mLineString; - if (stream.isVideo) { - mLineString << "m=video "; - } else { - mLineString << "m=audio "; - } - if (stream.isMain) { - mLineString << "1"; - } else { - mLineString << "0"; - } - if (stream.videoPayloadTypes.size() == 0) { - mLineString << " RTP/AVPF 111 126"; - } else { - mLineString << " RTP/AVPF"; - for (auto &it : stream.videoPayloadTypes) { - mLineString << " " << it.id; - } - } - - appendSdp(sdp, mLineString.str()); - - if (stream.isMain) { - appendSdp(sdp, "c=IN IP4 0.0.0.0"); - } - - appendSdp(sdp, streamMidString.str()); - - std::ostringstream ufragString; - ufragString << "a=ice-ufrag:"; - ufragString << payload.ufrag; - appendSdp(sdp, ufragString.str()); - - std::ostringstream pwdString; - pwdString << "a=ice-pwd:"; - pwdString << payload.pwd; - appendSdp(sdp, pwdString.str()); - - for (auto &fingerprint : payload.fingerprints) { - std::ostringstream fingerprintString; - fingerprintString << "a=fingerprint:"; - fingerprintString << fingerprint.hash; - fingerprintString << " "; - fingerprintString << fingerprint.fingerprint; - appendSdp(sdp, fingerprintString.str()); - appendSdp(sdp, "a=setup:passive"); - } - - if (stream.isMain) { - for (auto &candidate : payload.candidates) { - std::ostringstream candidateString; - candidateString << "a=candidate:"; - candidateString << candidate.foundation; - candidateString << " "; - candidateString << candidate.component; - candidateString << " "; - candidateString << candidate.protocol; - candidateString << " "; - candidateString << candidate.priority; - candidateString << " "; - candidateString << candidate.ip; - candidateString << " "; - candidateString << candidate.port; - candidateString << " "; - candidateString << "typ "; - candidateString << candidate.type; - candidateString << " "; - - if (candidate.type == "srflx" || candidate.type == "prflx" || candidate.type == "relay") { - if (candidate.relAddr.size() != 0 && candidate.relPort.size() != 0) { - candidateString << "raddr "; - candidateString << candidate.relAddr; - candidateString << " "; - candidateString << "rport "; - candidateString << candidate.relPort; - candidateString << " "; - } - } - - if (candidate.protocol == "tcp") { - if (candidate.tcpType.size() != 0) { - candidateString << "tcptype "; - candidateString << candidate.tcpType; - candidateString << " "; - } - } - - candidateString << "generation "; - candidateString << candidate.generation; - - appendSdp(sdp, candidateString.str()); - } - } - - if (!stream.isVideo) { - appendSdp(sdp, "a=rtpmap:111 opus/48000/2"); - appendSdp(sdp, "a=rtpmap:126 telephone-event/8000"); - appendSdp(sdp, "a=fmtp:111 minptime=10; useinbandfec=1"); - appendSdp(sdp, "a=rtcp:1 IN IP4 0.0.0.0"); - appendSdp(sdp, "a=rtcp-mux"); - appendSdp(sdp, "a=rtcp-rsize"); - appendSdp(sdp, "a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level"); - appendSdp(sdp, "a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"); - appendSdp(sdp, "a=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"); - - bool addSsrcs = false; - if (stream.isRemoved) { - appendSdp(sdp, "a=inactive"); - } else if (type == SdpType::kSdpTypeJoinAnswer) { - if (stream.isOutgoing) { - appendSdp(sdp, "a=recvonly"); - } else { - appendSdp(sdp, "a=sendonly"); - appendSdp(sdp, "a=bundle-only"); - addSsrcs = true; - } - } else if (type == SdpType::kSdpTypeRemoteOffer) { - if (stream.isOutgoing) { - appendSdp(sdp, "a=recvonly"); - } else { - appendSdp(sdp, "a=sendonly"); - appendSdp(sdp, "a=bundle-only"); - addSsrcs = true; - } - } else if (type == SdpType::kSdpTypeLocalAnswer) { - if (stream.isOutgoing) { - appendSdp(sdp, "a=sendonly"); - addSsrcs = true; - } else { - appendSdp(sdp, "a=recvonly"); - appendSdp(sdp, "a=bundle-only"); - } - } - - if (addSsrcs) { - std::ostringstream cnameString; - cnameString << "a=ssrc:"; - cnameString << stream.ssrc; - cnameString << " cname:stream"; - cnameString << stream.streamId; - appendSdp(sdp, cnameString.str()); - - std::ostringstream msidString; - msidString << "a=ssrc:"; - msidString << stream.ssrc; - msidString << " msid:stream"; - msidString << stream.streamId; - msidString << " audio" << stream.streamId; - appendSdp(sdp, msidString.str()); - - std::ostringstream mslabelString; - mslabelString << "a=ssrc:"; - mslabelString << stream.ssrc; - mslabelString << " mslabel:audio"; - mslabelString << stream.streamId; - appendSdp(sdp, mslabelString.str()); - - std::ostringstream labelString; - labelString << "a=ssrc:"; - labelString << stream.ssrc; - labelString << " label:audio"; - labelString << stream.streamId; - appendSdp(sdp, labelString.str()); - } - } else { - appendSdp(sdp, "a=rtcp:1 IN IP4 0.0.0.0"); - appendSdp(sdp, "a=rtcp-mux"); - appendSdp(sdp, "a=rtcp-rsize"); - - for (auto &it : stream.videoPayloadTypes) { - std::ostringstream rtpmapString; - rtpmapString << "a=rtpmap:"; - rtpmapString << it.id; - rtpmapString << " "; - rtpmapString << it.name; - rtpmapString << "/"; - rtpmapString << it.clockrate; - if (it.channels != 0) { - rtpmapString << "/"; - rtpmapString << it.channels; - } - appendSdp(sdp, rtpmapString.str()); - - for (auto &feedbackType : it.feedbackTypes) { - std::ostringstream feedbackString; - feedbackString << "a=rtcp-fb:"; - feedbackString << it.id; - feedbackString << " "; - feedbackString << feedbackType.type; - if (feedbackType.subtype.size() != 0) { - feedbackString << " "; - feedbackString << feedbackType.subtype; - } - appendSdp(sdp, feedbackString.str()); - } - - auto parameters = it.parameters; - - if (it.name == "VP8") { - bool hasBitrate = false; - for (auto ¶m : parameters) { - if (param.first == "x-google-max-bitrate") { - hasBitrate = true; - } - } - - if (!hasBitrate) { - parameters.push_back(std::make_pair("x-google-max-bitrate", "1200")); - //parameters.push_back(std::make_pair("x-google-start-bitrate", "300")); - } - } - - if (parameters.size() != 0) { - std::ostringstream fmtpString; - fmtpString << "a=fmtp:"; - fmtpString << it.id; - fmtpString << " "; - - for (int i = 0; i < parameters.size(); i++) { - if (i != 0) { - fmtpString << ";"; - } - fmtpString << parameters[i].first; - fmtpString << "="; - fmtpString << parameters[i].second; - } - - appendSdp(sdp, fmtpString.str()); - } - } - - for (auto &it : stream.videoExtensionMap) { - std::ostringstream extString; - extString << "a=extmap:"; - extString << it.first; - extString << " "; - extString << it.second; - appendSdp(sdp, extString.str()); - } - - bool addSsrcs = false; - if (stream.isRemoved) { - appendSdp(sdp, "a=inactive"); - } else if (type == SdpType::kSdpTypeJoinAnswer) { - if (stream.isOutgoing) { - appendSdp(sdp, "a=recvonly"); - appendSdp(sdp, "a=bundle-only"); - } else { - appendSdp(sdp, "a=sendonly"); - appendSdp(sdp, "a=bundle-only"); - addSsrcs = true; - } - } else if (type == SdpType::kSdpTypeRemoteOffer) { - if (stream.isOutgoing) { - appendSdp(sdp, "a=recvonly"); - appendSdp(sdp, "a=bundle-only"); - } else { - appendSdp(sdp, "a=sendonly"); - appendSdp(sdp, "a=bundle-only"); - addSsrcs = true; - } - } else if (type == SdpType::kSdpTypeLocalAnswer) { - if (stream.isOutgoing) { - appendSdp(sdp, "a=sendonly"); - appendSdp(sdp, "a=bundle-only"); - addSsrcs = true; - } else { - appendSdp(sdp, "a=recvonly"); - appendSdp(sdp, "a=bundle-only"); - } - } - - if (addSsrcs) { - std::vector ssrcs; - for (auto &group : stream.videoSourceGroups) { - std::ostringstream groupString; - groupString << "a=ssrc-group:"; - groupString << group.semantics; - - for (auto ssrc : group.ssrcs) { - groupString << " " << ssrc; - - if (std::find(ssrcs.begin(), ssrcs.end(), ssrc) == ssrcs.end()) { - ssrcs.push_back(ssrc); - } - } - - appendSdp(sdp, groupString.str()); - } - - for (auto ssrc : ssrcs) { - std::ostringstream cnameString; - cnameString << "a=ssrc:"; - cnameString << ssrc; - cnameString << " cname:stream"; - cnameString << stream.streamId; - appendSdp(sdp, cnameString.str()); - - std::ostringstream msidString; - msidString << "a=ssrc:"; - msidString << ssrc; - msidString << " msid:stream"; - msidString << stream.streamId; - msidString << " video" << stream.streamId; - appendSdp(sdp, msidString.str()); - - std::ostringstream mslabelString; - mslabelString << "a=ssrc:"; - mslabelString << ssrc; - mslabelString << " mslabel:video"; - mslabelString << stream.streamId; - appendSdp(sdp, mslabelString.str()); - - std::ostringstream labelString; - labelString << "a=ssrc:"; - labelString << ssrc; - labelString << " label:video"; - labelString << stream.streamId; - appendSdp(sdp, labelString.str()); - } - } - } - } - } - - std::ostringstream result; - for (auto &line : sdp) { - result << line << "\n"; - } - - return result.str(); -} - -static std::string parseJoinResponseIntoSdp(uint32_t sessionId, GroupJoinPayload const &joinPayload, GroupJoinResponsePayload const &payload, SdpType type, std::vector const &allOtherParticipants, absl::optional localVideoMid, absl::optional dataChannelMid, std::vector &bundleStreamsState) { - - std::vector bundleStreams; - - StreamSpec mainStream; - mainStream.mLine = "0"; - mainStream.isMain = true; - mainStream.isOutgoing = true; - mainStream.streamId = 0; - mainStream.ssrc = joinPayload.ssrc; - mainStream.isRemoved = false; - mainStream.isVideo = false; - bundleStreams.push_back(mainStream); - - if (dataChannelMid.has_value() && dataChannelMid.value() == "1") { - StreamSpec dataStream; - dataStream.mLine = dataChannelMid.value(); - dataStream.isMain = false; - dataStream.isOutgoing = true; - dataStream.streamId = 0; - dataStream.ssrc = 0; - dataStream.isRemoved = false; - dataStream.isData = true; - dataStream.isVideo = false; - bundleStreams.push_back(dataStream); - } - - if (localVideoMid.has_value()) { - if (joinPayload.videoSourceGroups.size() != 0) { - StreamSpec mainVideoStream; - mainVideoStream.mLine = localVideoMid.value(); - mainVideoStream.isMain = false; - mainVideoStream.isOutgoing = true; - mainVideoStream.isVideo = true; - mainVideoStream.streamId = joinPayload.videoSourceGroups[0].ssrcs[0]; - mainVideoStream.ssrc = joinPayload.videoSourceGroups[0].ssrcs[0]; - mainVideoStream.videoSourceGroups = joinPayload.videoSourceGroups; - mainVideoStream.videoPayloadTypes = joinPayload.videoPayloadTypes; - mainVideoStream.videoExtensionMap = joinPayload.videoExtensionMap; - - mainVideoStream.isRemoved = joinPayload.videoSourceGroups.size() == 0; - bundleStreams.push_back(mainVideoStream); - } - } - - if (dataChannelMid.has_value() && dataChannelMid.value() == "2") { - StreamSpec dataStream; - dataStream.mLine = dataChannelMid.value(); - dataStream.isMain = false; - dataStream.isOutgoing = true; - dataStream.streamId = 0; - dataStream.ssrc = 0; - dataStream.isRemoved = false; - dataStream.isData = true; - dataStream.isVideo = false; - bundleStreams.push_back(dataStream); - } - - for (auto &participant : allOtherParticipants) { - StreamSpec audioStream; - audioStream.isMain = false; - - std::ostringstream audioMLine; - audioMLine << "audio" << participant.audioSsrc; - audioStream.mLine = audioMLine.str(); - audioStream.ssrc = participant.audioSsrc; - audioStream.isRemoved = participant.isRemoved; - audioStream.streamId = participant.audioSsrc; - bundleStreams.push_back(audioStream); - - if (participant.videoPayloadTypes.size() != 0 && participant.videoSourceGroups.size() != 0 ) { - StreamSpec videoStream; - videoStream.isMain = false; - - std::ostringstream videoMLine; - videoMLine << "video" << participant.audioSsrc; - videoStream.mLine = videoMLine.str(); - videoStream.isVideo = true; - videoStream.ssrc = participant.videoSourceGroups[0].ssrcs[0]; - videoStream.isRemoved = participant.isRemoved; - videoStream.streamId = participant.audioSsrc; - videoStream.videoSourceGroups = participant.videoSourceGroups; - videoStream.videoExtensionMap = participant.videoExtensionMap; - videoStream.videoPayloadTypes = participant.videoPayloadTypes; - - bundleStreams.push_back(videoStream); - } - } - - std::vector orderedStreams; - for (auto const &oldStream : bundleStreamsState) { - bool found = false; - for (int i = 0; i < (int)bundleStreams.size(); i++) { - if (bundleStreams[i].mLine == oldStream.mLine) { - found = true; - orderedStreams.push_back(bundleStreams[i]); - bundleStreams.erase(bundleStreams.begin() + i); - break; - } - } - if (!found) { - StreamSpec copyStream = oldStream; - copyStream.isRemoved = true; - orderedStreams.push_back(copyStream); - } - } - for (const auto &it : bundleStreams) { - orderedStreams.push_back(it); - } - - bundleStreamsState = orderedStreams; - - return createSdp(sessionId, payload, type, orderedStreams); -} - -VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(VideoCaptureInterface *videoCapture) { - return videoCapture - ? static_cast(videoCapture)->object()->getSyncAssumingSameThread() - : nullptr; -} - -class ErrorParsingLogSink final : public rtc::LogSink { -public: - ErrorParsingLogSink(std::function onMissingSsrc) : - _onMissingSsrc(onMissingSsrc) { - - } - - void OnLogMessage(const std::string &msg, rtc::LoggingSeverity severity, const char *tag) override { - handleMessage(msg); - } - - void OnLogMessage(const std::string &message, rtc::LoggingSeverity severity) override { - handleMessage(message); - } - - void OnLogMessage(const std::string &message) override { - handleMessage(message); - } - -private: - void handleMessage(const std::string &message) { - const std::string pattern = "Failed to demux RTP packet:"; - const std::string ssrcPattern = "SSRC="; - auto index = message.find(pattern); - if (index != std::string::npos) { - index = message.find(ssrcPattern); - if (index != std::string::npos) { - std::string string = message; - string.erase(0, index + ssrcPattern.size()); - - std::istringstream stream(string); - uint32_t ssrc = 0; - stream >> ssrc; - if (ssrc != 0) { - _onMissingSsrc(ssrc); - } - } - return; - } - - const std::string pattern2 = "receive_rtp_config_ lookup failed for ssrc "; - index = message.find(pattern2); - if (index != std::string::npos) { - std::string string = message; - string.erase(0, index + pattern2.size()); - - std::istringstream stream(string); - uint32_t ssrc = 0; - stream >> ssrc; - if (ssrc != 0) { - _onMissingSsrc(ssrc); - } - - return; - } - } - -private: - std::function _onMissingSsrc; - -}; - -class PeerConnectionObserverImpl : public webrtc::PeerConnectionObserver { -private: - std::function _discoveredIceCandidate; - std::function _connectionStateChanged; - std::function)> _onTrackAdded; - std::function)> _onTrackRemoved; - std::function _onMissingSsrc; - -public: - PeerConnectionObserverImpl( - std::function discoveredIceCandidate, - std::function connectionStateChanged, - std::function)> onTrackAdded, - std::function)> onTrackRemoved, - std::function onMissingSsrc - ) : - _discoveredIceCandidate(discoveredIceCandidate), - _connectionStateChanged(connectionStateChanged), - _onTrackAdded(onTrackAdded), - _onTrackRemoved(onTrackRemoved), - _onMissingSsrc(onMissingSsrc) { - } - - virtual void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override { - } - - virtual void OnAddStream(rtc::scoped_refptr stream) override { - } - - virtual void OnRemoveStream(rtc::scoped_refptr stream) override { - } - - virtual void OnDataChannel(rtc::scoped_refptr data_channel) override { - - } - - virtual void OnRenegotiationNeeded() override { - } - - virtual void OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) override { - bool isConnected = false; - switch (new_state) { - case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionConnected: - case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionCompleted: - isConnected = true; - break; - default: - break; - } - _connectionStateChanged(isConnected); - } - - virtual void OnStandardizedIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) override { - } - - virtual void OnConnectionChange(webrtc::PeerConnectionInterface::PeerConnectionState new_state) override { - } - - virtual void OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState new_state) override { - } - - virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override { - std::string sdp; - candidate->ToString(&sdp); - _discoveredIceCandidate(sdp, candidate->sdp_mline_index(), candidate->sdp_mid()); - } - - virtual void OnIceCandidateError(const std::string& host_candidate, const std::string& url, int error_code, const std::string& error_text) override { - } - - virtual void OnIceCandidateError(const std::string& address, - int port, - const std::string& url, - int error_code, - const std::string& error_text) override { - } - - virtual void OnIceCandidatesRemoved(const std::vector& candidates) override { - } - - virtual void OnIceConnectionReceivingChange(bool receiving) override { - } - - virtual void OnIceSelectedCandidatePairChanged(const cricket::CandidatePairChangeEvent& event) override { - } - - virtual void OnAddTrack(rtc::scoped_refptr receiver, const std::vector>& streams) override { - } - - virtual void OnTrack(rtc::scoped_refptr transceiver) override { - _onTrackAdded(transceiver); - } - - virtual void OnRemoveTrack(rtc::scoped_refptr receiver) override { - _onTrackRemoved(receiver); - } - - virtual void OnInterestingUsage(int usage_pattern) override { - } -}; - -class DataChannelObserverImpl : public webrtc::DataChannelObserver { -public: - DataChannelObserverImpl(std::function stateChanged) : - _stateChanged(stateChanged) { - } - - virtual void OnStateChange() override { - RTC_LOG(LS_INFO) << "DataChannel state changed"; - _stateChanged(); - } - - virtual void OnMessage(const webrtc::DataBuffer &buffer) override { - RTC_LOG(LS_INFO) << "DataChannel message received: " << std::string((const char *)buffer.data.data(), buffer.data.size()); - } - - virtual void OnBufferedAmountChange(uint64_t sent_data_size) override { - } - -private: - std::function _stateChanged; -}; - -class RTCStatsCollectorCallbackImpl : public webrtc::RTCStatsCollectorCallback { -public: - RTCStatsCollectorCallbackImpl(std::function &)> completion) : - _completion(completion) { - } - - virtual void OnStatsDelivered(const rtc::scoped_refptr &report) override { - _completion(report); - } - -private: - std::function &)> _completion; -}; - -static const int kVadResultHistoryLength = 8; - -class CombinedVad { -private: - webrtc::VadLevelAnalyzer _vadWithLevel; - float _vadResultHistory[kVadResultHistoryLength]; - -public: - CombinedVad() { - for (int i = 0; i < kVadResultHistoryLength; i++) { - _vadResultHistory[i] = 0.0f; - } - } - - ~CombinedVad() { - } - - bool update(webrtc::AudioBuffer *buffer) { - webrtc::AudioFrameView frameView(buffer->channels(), buffer->num_channels(), buffer->num_frames()); - auto result = _vadWithLevel.AnalyzeFrame(frameView); - for (int i = 1; i < kVadResultHistoryLength; i++) { - _vadResultHistory[i - 1] = _vadResultHistory[i]; - } - _vadResultHistory[kVadResultHistoryLength - 1] = result.speech_probability; - - float movingAverage = 0.0f; - for (int i = 0; i < kVadResultHistoryLength; i++) { - movingAverage += _vadResultHistory[i]; - } - movingAverage /= (float)kVadResultHistoryLength; - - bool vadResult = false; - if (movingAverage > 0.8f) { - vadResult = true; - } - - return vadResult; - } -}; - -class AudioTrackSinkInterfaceImpl: public webrtc::AudioTrackSinkInterface { -private: - std::function _update; - - int _peakCount = 0; - uint16_t _peak = 0; - - CombinedVad _vad; - -public: - AudioTrackSinkInterfaceImpl(std::function update) : - _update(update) { - } - - virtual ~AudioTrackSinkInterfaceImpl() { - } - - virtual void OnData(const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames) override { - if (bits_per_sample == 16 && number_of_channels == 1) { - int16_t *samples = (int16_t *)audio_data; - int numberOfSamplesInFrame = (int)number_of_frames; - - webrtc::AudioBuffer buffer(sample_rate, 1, 48000, 1, 48000, 1); - webrtc::StreamConfig config(sample_rate, 1); - buffer.CopyFrom(samples, config); - - bool vadResult = _vad.update(&buffer); - - for (int i = 0; i < numberOfSamplesInFrame; i++) { - int16_t sample = samples[i]; - if (sample < 0) { - sample = -sample; - } - if (_peak < sample) { - _peak = sample; - } - _peakCount += 1; - } - - if (_peakCount >= 1200) { - float level = ((float)(_peak)) / 4000.0f; - _peak = 0; - _peakCount = 0; - _update(level, vadResult); - } - } - } -}; - -class CreateSessionDescriptionObserverImpl : public webrtc::CreateSessionDescriptionObserver { -private: - std::function _completion; - -public: - CreateSessionDescriptionObserverImpl(std::function completion) : - _completion(completion) { - } - - virtual void OnSuccess(webrtc::SessionDescriptionInterface* desc) override { - if (desc) { - std::string sdp; - desc->ToString(&sdp); - - _completion(sdp, desc->type()); - } - } - - virtual void OnFailure(webrtc::RTCError error) override { - } -}; - -class SetSessionDescriptionObserverImpl : public webrtc::SetSessionDescriptionObserver { -private: - std::function _completion; - std::function _error; - -public: - SetSessionDescriptionObserverImpl(std::function completion, std::function error) : - _completion(completion), _error(error) { - } - - virtual void OnSuccess() override { - _completion(); - } - - virtual void OnFailure(webrtc::RTCError error) override { - _error(error); - } -}; - -class AudioCaptureAnalyzer : public webrtc::CustomAudioAnalyzer { -private: - void Initialize(int sample_rate_hz, int num_channels) override { - - } - // Analyzes the given capture or render signal. - void Analyze(const webrtc::AudioBuffer* audio) override { - _analyze(audio); - } - // Returns a string representation of the module state. - std::string ToString() const override { - return "analyzing"; - } - - std::function _analyze; - -public: - AudioCaptureAnalyzer(std::function analyze) : - _analyze(analyze) { - } - - virtual ~AudioCaptureAnalyzer() = default; -}; - -class WrappedAudioDeviceModule : public webrtc::AudioDeviceModule { -private: - rtc::scoped_refptr _impl; - -public: - WrappedAudioDeviceModule(rtc::scoped_refptr impl) : - _impl(impl) { - } - - virtual ~WrappedAudioDeviceModule() { - } - - virtual int32_t ActiveAudioLayer(AudioLayer *audioLayer) const override { - return _impl->ActiveAudioLayer(audioLayer); - } - - virtual int32_t RegisterAudioCallback(webrtc::AudioTransport *audioCallback) override { - return _impl->RegisterAudioCallback(audioCallback); - } - - virtual int32_t Init() override { - return _impl->Init(); - } - - virtual int32_t Terminate() override { - return _impl->Terminate(); - } - - virtual bool Initialized() const override { - return _impl->Initialized(); - } - - virtual int16_t PlayoutDevices() override { - return _impl->PlayoutDevices(); - } - - virtual int16_t RecordingDevices() override { - return _impl->RecordingDevices(); - } - - virtual int32_t PlayoutDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override { - return _impl->PlayoutDeviceName(index, name, guid); - } - - virtual int32_t RecordingDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override { - return _impl->RecordingDeviceName(index, name, guid); - } - - virtual int32_t SetPlayoutDevice(uint16_t index) override { - return _impl->SetPlayoutDevice(index); - } - - virtual int32_t SetPlayoutDevice(WindowsDeviceType device) override { - return _impl->SetPlayoutDevice(device); - } - - virtual int32_t SetRecordingDevice(uint16_t index) override { - return _impl->SetRecordingDevice(index); - } - - virtual int32_t SetRecordingDevice(WindowsDeviceType device) override { - return _impl->SetRecordingDevice(device); - } - - virtual int32_t PlayoutIsAvailable(bool *available) override { - return _impl->PlayoutIsAvailable(available); - } - - virtual int32_t InitPlayout() override { - return _impl->InitPlayout(); - } - - virtual bool PlayoutIsInitialized() const override { - return _impl->PlayoutIsInitialized(); - } - - virtual int32_t RecordingIsAvailable(bool *available) override { - return _impl->RecordingIsAvailable(available); - } - - virtual int32_t InitRecording() override { - return _impl->InitRecording(); - } - - virtual bool RecordingIsInitialized() const override { - return _impl->RecordingIsInitialized(); - } - - virtual int32_t StartPlayout() override { - return _impl->StartPlayout(); - } - - virtual int32_t StopPlayout() override { - return _impl->StopPlayout(); - } - - virtual bool Playing() const override { - return _impl->Playing(); - } - - virtual int32_t StartRecording() override { - return _impl->StartRecording(); - } - - virtual int32_t StopRecording() override { - return _impl->StopRecording(); - } - - virtual bool Recording() const override { - return _impl->Recording(); - } - - virtual int32_t InitSpeaker() override { - return _impl->InitSpeaker(); - } - - virtual bool SpeakerIsInitialized() const override { - return _impl->SpeakerIsInitialized(); - } - - virtual int32_t InitMicrophone() override { - return _impl->InitMicrophone(); - } - - virtual bool MicrophoneIsInitialized() const override { - return _impl->MicrophoneIsInitialized(); - } - - virtual int32_t SpeakerVolumeIsAvailable(bool *available) override { - return _impl->SpeakerVolumeIsAvailable(available); - } - - virtual int32_t SetSpeakerVolume(uint32_t volume) override { - return _impl->SetSpeakerVolume(volume); - } - - virtual int32_t SpeakerVolume(uint32_t* volume) const override { - return _impl->SpeakerVolume(volume); - } - - virtual int32_t MaxSpeakerVolume(uint32_t *maxVolume) const override { - return _impl->MaxSpeakerVolume(maxVolume); - } - - virtual int32_t MinSpeakerVolume(uint32_t *minVolume) const override { - return _impl->MinSpeakerVolume(minVolume); - } - - virtual int32_t MicrophoneVolumeIsAvailable(bool *available) override { - return _impl->MicrophoneVolumeIsAvailable(available); - } - - virtual int32_t SetMicrophoneVolume(uint32_t volume) override { - return _impl->SetMicrophoneVolume(volume); - } - - virtual int32_t MicrophoneVolume(uint32_t *volume) const override { - return _impl->MicrophoneVolume(volume); - } - - virtual int32_t MaxMicrophoneVolume(uint32_t *maxVolume) const override { - return _impl->MaxMicrophoneVolume(maxVolume); - } - - virtual int32_t MinMicrophoneVolume(uint32_t *minVolume) const override { - return _impl->MinMicrophoneVolume(minVolume); - } - - virtual int32_t SpeakerMuteIsAvailable(bool *available) override { - return _impl->SpeakerMuteIsAvailable(available); - } - - virtual int32_t SetSpeakerMute(bool enable) override { - return _impl->SetSpeakerMute(enable); - } - - virtual int32_t SpeakerMute(bool *enabled) const override { - return _impl->SpeakerMute(enabled); - } - - virtual int32_t MicrophoneMuteIsAvailable(bool *available) override { - return _impl->MicrophoneMuteIsAvailable(available); - } - - virtual int32_t SetMicrophoneMute(bool enable) override { - return _impl->SetMicrophoneMute(enable); - } - - virtual int32_t MicrophoneMute(bool *enabled) const override { - return _impl->MicrophoneMute(enabled); - } - - virtual int32_t StereoPlayoutIsAvailable(bool *available) const override { - return _impl->StereoPlayoutIsAvailable(available); - } - - virtual int32_t SetStereoPlayout(bool enable) override { - return _impl->SetStereoPlayout(enable); - } - - virtual int32_t StereoPlayout(bool *enabled) const override { - return _impl->StereoPlayout(enabled); - } - - virtual int32_t StereoRecordingIsAvailable(bool *available) const override { - return _impl->StereoRecordingIsAvailable(available); - } - - virtual int32_t SetStereoRecording(bool enable) override { - return _impl->SetStereoRecording(enable); - } - - virtual int32_t StereoRecording(bool *enabled) const override { - return _impl->StereoRecording(enabled); - } - - virtual int32_t PlayoutDelay(uint16_t* delayMS) const override { - return _impl->PlayoutDelay(delayMS); - } - - virtual bool BuiltInAECIsAvailable() const override { - return _impl->BuiltInAECIsAvailable(); - } - - virtual bool BuiltInAGCIsAvailable() const override { - return _impl->BuiltInAGCIsAvailable(); - } - - virtual bool BuiltInNSIsAvailable() const override { - return _impl->BuiltInNSIsAvailable(); - } - - virtual int32_t EnableBuiltInAEC(bool enable) override { - return _impl->EnableBuiltInAEC(enable); - } - - virtual int32_t EnableBuiltInAGC(bool enable) override { - return _impl->EnableBuiltInAGC(enable); - } - - virtual int32_t EnableBuiltInNS(bool enable) override { - return _impl->EnableBuiltInNS(enable); - } - - virtual int32_t GetPlayoutUnderrunCount() const override { - return _impl->GetPlayoutUnderrunCount(); - } - -#if defined(WEBRTC_IOS) - virtual int GetPlayoutAudioParameters(webrtc::AudioParameters *params) const override { - return _impl->GetPlayoutAudioParameters(params); - } - virtual int GetRecordAudioParameters(webrtc::AudioParameters *params) const override { - return _impl->GetRecordAudioParameters(params); - } -#endif // WEBRTC_IOS -}; - -template -void split(const std::string &s, char delim, Out result) { - std::istringstream iss(s); - std::string item; - while (std::getline(iss, item, delim)) { - *result++ = item; - } -} - -std::string adjustLocalDescription(const std::string &sdp) { - return sdp; -} - -class CustomVideoSinkInterfaceProxyImpl : public rtc::VideoSinkInterface { -public: - CustomVideoSinkInterfaceProxyImpl() { - } - - virtual ~CustomVideoSinkInterfaceProxyImpl() { - } - - virtual void OnFrame(const webrtc::VideoFrame& frame) override { - //_lastFrame = frame; - for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) { - auto strong = _sinks[i].lock(); - if (!strong) { - _sinks.erase(_sinks.begin() + i); - } else { - strong->OnFrame(frame); - } - } - } - - virtual void OnDiscardedFrame() override { - for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) { - auto strong = _sinks[i].lock(); - if (!strong) { - _sinks.erase(_sinks.begin() + i); - } else { - strong->OnDiscardedFrame(); - } - } - } - - void addSink(std::weak_ptr> impl) { - _sinks.push_back(impl); - if (_lastFrame) { - auto strong = impl.lock(); - if (strong) { - strong->OnFrame(_lastFrame.value()); - } - } - } - -private: - std::vector>> _sinks; - absl::optional _lastFrame; -}; - -} // namespace - - - -class GroupInstanceManager : public std::enable_shared_from_this { -public: - GroupInstanceManager(GroupInstanceDescriptor &&descriptor) : - _networkStateUpdated(descriptor.networkStateUpdated), - _audioLevelsUpdated(descriptor.audioLevelsUpdated), - _incomingVideoSourcesUpdated(descriptor.incomingVideoSourcesUpdated), - _participantDescriptionsRequired(descriptor.participantDescriptionsRequired), - _initialInputDeviceId(descriptor.initialInputDeviceId), - _initialOutputDeviceId(descriptor.initialOutputDeviceId), - _createAudioDeviceModule(descriptor.createAudioDeviceModule), - _videoCapture(descriptor.videoCapture), - _platformContext(descriptor.platformContext) { - auto generator = std::mt19937(std::random_device()()); - auto distribution = std::uniform_int_distribution(); - do { - _mainStreamAudioSsrc = distribution(generator); - } while (!_mainStreamAudioSsrc); - } - - ~GroupInstanceManager() { - assert(StaticThreads::getMediaThread()->IsCurrent()); - - destroyAudioDeviceModule(); - if (_peerConnection) { - _peerConnection->Close(); - } - - if (_errorParsingLogSink) { - rtc::LogMessage::RemoveLogToStream(_errorParsingLogSink.get()); - } - } - - void generateAndInsertFakeIncomingSsrc() { - // At least on Windows recording can't be started without playout. - // We keep a fake incoming stream, so that playout is always started. - /*auto generator = std::mt19937(std::random_device()()); - auto distribution = std::uniform_int_distribution(); - while (true) { - _fakeIncomingSsrc = distribution(generator); - if (_fakeIncomingSsrc != 0 - && _fakeIncomingSsrc != _mainStreamAudioSsrc - && std::find(_allOtherSsrcs.begin(), _allOtherSsrcs.end(), _fakeIncomingSsrc) == _allOtherSsrcs.end()) { - break; - } - } - _activeOtherSsrcs.emplace(_fakeIncomingSsrc); - _allOtherSsrcs.emplace_back(_fakeIncomingSsrc);*/ - } - - bool createAudioDeviceModule( - const webrtc::PeerConnectionFactoryDependencies &dependencies) { - _adm_thread = dependencies.worker_thread; - if (!_adm_thread) { - return false; - } - _adm_thread->Invoke(RTC_FROM_HERE, [&] { - const auto create = [&](webrtc::AudioDeviceModule::AudioLayer layer) { - return webrtc::AudioDeviceModule::Create( - layer, - dependencies.task_queue_factory.get()); - }; - const auto finalize = [&](const rtc::scoped_refptr &result) { - _adm_use_withAudioDeviceModule = new rtc::RefCountedObject(result); - }; - const auto check = [&](const rtc::scoped_refptr &result) { - if (!result || result->Init() != 0) { - return false; - } - finalize(result); - return true; - }; - if (_createAudioDeviceModule - && check(_createAudioDeviceModule(dependencies.task_queue_factory.get()))) { - return; - } else if (check(create(webrtc::AudioDeviceModule::kPlatformDefaultAudio))) { - return; - } - }); - return (_adm_use_withAudioDeviceModule != nullptr); - } - void destroyAudioDeviceModule() { - if (!_adm_thread) { - return; - } - _adm_thread->Invoke(RTC_FROM_HERE, [&] { - _adm_use_withAudioDeviceModule = nullptr; - }); - } - - void start() { - const auto weak = std::weak_ptr(shared_from_this()); - - _errorParsingLogSink.reset(new ErrorParsingLogSink([weak](uint32_t ssrc) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - - std::vector ssrcs; - ssrcs.push_back(ssrc); - strong->_participantDescriptionsRequired(ssrcs); - }); - })); - rtc::LogMessage::AddLogToStream(_errorParsingLogSink.get(), rtc::LS_WARNING); - - webrtc::field_trial::InitFieldTrialsFromString( - //"WebRTC-Audio-SendSideBwe/Enabled/" - "WebRTC-Audio-Allocation/min:32kbps,max:32kbps/" - "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/" - //"WebRTC-FlexFEC-03/Enabled/" - //"WebRTC-FlexFEC-03-Advertised/Enabled/" - "WebRTC-PcFactoryDefaultBitrates/min:32kbps,start:32kbps,max:32kbps/" - "WebRTC-Video-DiscardPacketsWithUnknownSsrc/Enabled/" - "WebRTC-Video-BufferPacketsWithUnknownSsrc/Enabled/" - ); - - PlatformInterface::SharedInstance()->configurePlatformAudio(); - - webrtc::PeerConnectionFactoryDependencies dependencies; - dependencies.network_thread = StaticThreads::getNetworkThread(); - dependencies.worker_thread = StaticThreads::getWorkerThread(); - dependencies.signaling_thread = StaticThreads::getMediaThread(); - dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); - - if (!createAudioDeviceModule(dependencies)) { - return; - } - - cricket::MediaEngineDependencies mediaDeps; - mediaDeps.task_queue_factory = dependencies.task_queue_factory.get(); - mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); - mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); - mediaDeps.adm = _adm_use_withAudioDeviceModule; - - std::shared_ptr myVad(new CombinedVad()); - - auto analyzer = new AudioCaptureAnalyzer([&, weak, myVad](const webrtc::AudioBuffer* buffer) { - if (!buffer) { - return; - } - if (buffer->num_channels() != 1) { - return; - } - - float peak = 0; - int peakCount = 0; - const float *samples = buffer->channels_const()[0]; - for (int i = 0; i < buffer->num_frames(); i++) { - float sample = samples[i]; - if (sample < 0) { - sample = -sample; - } - if (peak < sample) { - peak = sample; - } - peakCount += 1; - } - - bool vadStatus = myVad->update((webrtc::AudioBuffer *)buffer); - - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, peak, peakCount, vadStatus](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - - strong->_myAudioLevelPeakCount += peakCount; - if (strong->_myAudioLevelPeak < peak) { - strong->_myAudioLevelPeak = peak; - } - if (strong->_myAudioLevelPeakCount >= 1200) { - float level = strong->_myAudioLevelPeak / 4000.0f; - if (strong->_isMuted) { - level = 0.0f; - } - strong->_myAudioLevelPeak = 0; - strong->_myAudioLevelPeakCount = 0; - strong->_myAudioLevel = GroupLevelValue{ - level, - vadStatus, - }; - } - }); - }); - - webrtc::AudioProcessingBuilder builder; - builder.SetCaptureAnalyzer(std::unique_ptr(analyzer)); - webrtc::AudioProcessing *apm = builder.Create(); - - webrtc::AudioProcessing::Config audioConfig; - webrtc::AudioProcessing::Config::NoiseSuppression noiseSuppression; - noiseSuppression.enabled = true; - noiseSuppression.level = webrtc::AudioProcessing::Config::NoiseSuppression::kHigh; - audioConfig.noise_suppression = noiseSuppression; - - audioConfig.high_pass_filter.enabled = true; - - audioConfig.voice_detection.enabled = true; - - apm->ApplyConfig(audioConfig); - - mediaDeps.audio_processing = apm; - - /*mediaDeps.onUnknownAudioSsrc = [weak](uint32_t ssrc) { - getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->onMissingSsrc(ssrc); - }); - };*/ - - dependencies.media_engine = cricket::CreateMediaEngine(std::move(mediaDeps)); - dependencies.call_factory = webrtc::CreateCallFactory(); - dependencies.event_log_factory = - std::make_unique(dependencies.task_queue_factory.get()); - dependencies.network_controller_factory = nullptr; - - _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); - - webrtc::PeerConnectionFactoryInterface::Options peerConnectionOptions; - peerConnectionOptions.disable_encryption = true; - _nativeFactory->SetOptions(peerConnectionOptions); - - webrtc::PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - //config.continual_gathering_policy = webrtc::PeerConnectionInterface::ContinualGatheringPolicy::GATHER_CONTINUALLY; - config.audio_jitter_buffer_fast_accelerate = true; - config.prioritize_most_likely_ice_candidate_pairs = true; - config.presume_writable_when_fully_relayed = true; - //config.audio_jitter_buffer_enable_rtx_handling = true; - config.enable_rtp_data_channel = true; - config.enable_dtls_srtp = false; - - /*webrtc::CryptoOptions cryptoOptions; - webrtc::CryptoOptions::SFrame sframe; - sframe.require_frame_encryption = true; - cryptoOptions.sframe = sframe; - config.crypto_options = cryptoOptions;*/ - - _observer.reset(new PeerConnectionObserverImpl( - [weak](std::string sdp, int mid, std::string sdpMid) { - /*getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, mid, sdpMid](){ - auto strong = weak.lock(); - if (strong) { - //strong->emitIceCandidate(sdp, mid, sdpMid); - } - });*/ - }, - [weak](bool isConnected) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isConnected](){ - auto strong = weak.lock(); - if (strong) { - strong->updateIsConnected(isConnected); - } - }); - }, - [weak](rtc::scoped_refptr transceiver) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, transceiver](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->onTrackAdded(transceiver); - }); - }, - [weak](rtc::scoped_refptr receiver) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, receiver](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->onTrackRemoved(receiver); - }); - }, - [weak](uint32_t ssrc) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->onMissingSsrc(ssrc); - }); - } - )); - _peerConnection = _nativeFactory->CreatePeerConnection(config, nullptr, nullptr, _observer.get()); - assert(_peerConnection != nullptr); - - cricket::AudioOptions options; - rtc::scoped_refptr audioSource = _nativeFactory->CreateAudioSource(options); - std::stringstream name; - name << "audio"; - name << 0; - std::vector streamIds; - streamIds.push_back(name.str()); - _localAudioTrack = _nativeFactory->CreateAudioTrack(name.str(), audioSource); - _localAudioTrack->set_enabled(false); - auto addedAudioTrack = _peerConnection->AddTrack(_localAudioTrack, streamIds); - - if (addedAudioTrack.ok()) { - _localAudioTrackSender = addedAudioTrack.value(); - for (auto &it : _peerConnection->GetTransceivers()) { - if (it->media_type() == cricket::MediaType::MEDIA_TYPE_AUDIO) { - if (_localAudioTrackSender.get() == it->sender().get()) { - const auto error = it->SetDirectionWithError(webrtc::RtpTransceiverDirection::kRecvOnly); - (void)error; - } - - break; - } - } - } - - if (_videoCapture && false) { - webrtc::DataChannelInit dataChannelConfig; - _localDataChannel = _peerConnection->CreateDataChannel("1", &dataChannelConfig); - - if (_localDataChannel) { - _localDataChannelMid = "1"; - - _localDataChannelObserver.reset(new DataChannelObserverImpl([weak]() { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - bool isOpen = strong->_localDataChannel->state() == webrtc::DataChannelInterface::DataState::kOpen; - if (strong->_localDataChannelIsOpen != isOpen) { - RTC_LOG(LS_INFO) << "DataChannel isOpen: " << isOpen; - strong->_localDataChannelIsOpen = isOpen; - if (isOpen) { - strong->updateRemoteVideoConstaints(); - } - } - }); - })); - _localDataChannel->RegisterObserver(_localDataChannelObserver.get()); - } - } - - updateVideoTrack(false, [](auto result) {}); - - setAudioInputDevice(_initialInputDeviceId); - setAudioOutputDevice(_initialOutputDeviceId); - - // At least on Windows recording doesn't work without started playout. - withAudioDeviceModule([weak](webrtc::AudioDeviceModule *adm) { -#ifdef WEBRTC_WIN - // At least on Windows starting/stopping playout while recording - // is active leads to errors in recording and assertion violation. - adm->EnableBuiltInAEC(false); -#endif // WEBRTC_WIN - - if (adm->InitPlayout() == 0) { - adm->StartPlayout(); - } else { - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->withAudioDeviceModule([](webrtc::AudioDeviceModule *adm) { - if (adm->InitPlayout() == 0) { - adm->StartPlayout(); - } - }); - }, 2000); - } - }); - - beginLevelsTimer(50); - //beginTestQualityTimer(2000); - } - - - void setAudioInputDevice(std::string id) { -#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) - withAudioDeviceModule([&](webrtc::AudioDeviceModule *adm) { - const auto recording = adm->Recording(); - if (recording) { - adm->StopRecording(); - } - const auto finish = [&] { - if (recording) { - adm->InitRecording(); - adm->StartRecording(); - } - }; - if (id == "default" || id.empty()) { - if (const auto result = adm->SetRecordingDevice(webrtc::AudioDeviceModule::kDefaultCommunicationDevice)) { - RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << "): SetRecordingDevice(kDefaultCommunicationDevice) failed: " << result << "."; - } else { - RTC_LOG(LS_INFO) << "setAudioInputDevice(" << id << "): SetRecordingDevice(kDefaultCommunicationDevice) success."; - } - return finish(); - } - const auto count = adm - ? adm->RecordingDevices() - : int16_t(-666); - if (count <= 0) { - RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << "): Could not get recording devices count: " << count << "."; - return finish(); - } - for (auto i = 0; i != count; ++i) { - char name[webrtc::kAdmMaxDeviceNameSize + 1] = { 0 }; - char guid[webrtc::kAdmMaxGuidSize + 1] = { 0 }; - adm->RecordingDeviceName(i, name, guid); - if (id == guid) { - const auto result = adm->SetRecordingDevice(i); - if (result != 0) { - RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << ") name '" << std::string(name) << "' failed: " << result << "."; - } else { - RTC_LOG(LS_INFO) << "setAudioInputDevice(" << id << ") name '" << std::string(name) << "' success."; - } - return finish(); - } - } - RTC_LOG(LS_ERROR) << "setAudioInputDevice(" << id << "): Could not find recording device."; - return finish(); - }); -#endif - } - - void setAudioOutputDevice(std::string id) { -#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) - withAudioDeviceModule([&](webrtc::AudioDeviceModule *adm) { - const auto playing = adm->Playing(); - if (playing) { - adm->StopPlayout(); - } - const auto finish = [&] { - if (playing) { - adm->InitPlayout(); - adm->StartPlayout(); - } - }; - if (id == "default" || id.empty()) { - if (const auto result = adm->SetPlayoutDevice(webrtc::AudioDeviceModule::kDefaultCommunicationDevice)) { - RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << "): SetPlayoutDevice(kDefaultCommunicationDevice) failed: " << result << "."; - } else { - RTC_LOG(LS_INFO) << "setAudioOutputDevice(" << id << "): SetPlayoutDevice(kDefaultCommunicationDevice) success."; - } - return finish(); - } - const auto count = adm - ? adm->PlayoutDevices() - : int16_t(-666); - if (count <= 0) { - RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << "): Could not get playout devices count: " << count << "."; - return finish(); - } - for (auto i = 0; i != count; ++i) { - char name[webrtc::kAdmMaxDeviceNameSize + 1] = { 0 }; - char guid[webrtc::kAdmMaxGuidSize + 1] = { 0 }; - adm->PlayoutDeviceName(i, name, guid); - if (id == guid) { - const auto result = adm->SetPlayoutDevice(i); - if (result != 0) { - RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << ") name '" << std::string(name) << "' failed: " << result << "."; - } else { - RTC_LOG(LS_INFO) << "setAudioOutputDevice(" << id << ") name '" << std::string(name) << "' success."; - } - return finish(); - } - } - RTC_LOG(LS_ERROR) << "setAudioOutputDevice(" << id << "): Could not find playout device."; - return finish(); - }); -#endif - } - - void addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr> sink) { - auto current = _remoteVideoTrackSinks.find(ssrc); - if (current != _remoteVideoTrackSinks.end()) { - current->second->addSink(sink); - } else { - std::unique_ptr sinkProxy(new CustomVideoSinkInterfaceProxyImpl()); - sinkProxy->addSink(sink); - _remoteVideoTrackSinks[ssrc] = std::move(sinkProxy); - } - } - - void setVolume(uint32_t ssrc, double volume) { - auto current = _audioTrackVolumes.find(ssrc); - bool updated = false; - if (current != _audioTrackVolumes.end()) { - if (abs(current->second - volume) > 0.001) { - updated = true; - } - } else { - if (volume < 1.0 - 0.001) { - updated = true; - } - } - if (updated) { - _audioTrackVolumes[ssrc] = volume; - auto track = _audioTracks.find(ssrc); - if (track != _audioTracks.end()) { - track->second->GetSource()->SetVolume(volume); - } - } - } - - void setFullSizeVideoSsrc(uint32_t ssrc) { - if (_currentFullSizeVideoSsrc == ssrc) { - return; - } - bool update = false; - if (_currentFullSizeVideoSsrc != 0) { - if (setVideoConstraint(_currentFullSizeVideoSsrc, false, false)) { - update = true; - } - } - _currentFullSizeVideoSsrc = ssrc; - if (_currentFullSizeVideoSsrc != 0) { - if (setVideoConstraint(_currentFullSizeVideoSsrc, true, false)) { - update = true; - } - } - if (update) { - updateRemoteVideoConstaints(); - } - } - - void updateIsConnected(bool isConnected) { - _isConnected = isConnected; - - auto timestamp = rtc::TimeMillis(); - - _isConnectedUpdateValidTaskId++; - - if (!isConnected && _appliedOfferTimestamp > timestamp - 1000) { - auto taskId = _isConnectedUpdateValidTaskId; - const auto weak = std::weak_ptr(shared_from_this()); - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, taskId]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - if (strong->_isConnectedUpdateValidTaskId == taskId) { - strong->_networkStateUpdated(strong->_isConnected); - } - }, 1000); - } else { - _networkStateUpdated(_isConnected); - } - } - - void stop() { - _peerConnection->Close(); - } - - std::string adjustLocalSdp(std::string const &sdp) { - auto lines = splitSdpLines(sdp); - std::vector resultSdp; - - std::ostringstream generatedSsrcStringStream; - generatedSsrcStringStream << _mainStreamAudioSsrc; - auto generatedSsrcString = generatedSsrcStringStream.str(); - - auto bundleLines = getLines(lines, "a=group:BUNDLE "); - std::vector bundleMLines; - if (bundleLines.size() != 0) { - bundleMLines = splitBundleMLines(bundleLines[0]); - } - - bool hasVideo = false; - std::string currentMid; - int insertVideoLinesAtIndex = 0; - for (auto &line : lines) { - auto adjustedLine = line; - - if (adjustedLine.find("a=group:BUNDLE ") == 0) { - std::ostringstream bundleString; - bundleString << "a=group:BUNDLE"; - for (auto &mLine : bundleMLines) { - bundleString << " " << mLine; - } - adjustedLine = bundleString.str(); - } - - if (adjustedLine.find("m=") == 0) { - currentMid = ""; - } - if (adjustedLine.find("a=mid:") == 0) { - currentMid = adjustedLine; - currentMid.replace(0, std::string("a=mid:").size(), ""); - } - - if (adjustedLine.find("m=application") == 0) { - insertVideoLinesAtIndex = (int)resultSdp.size(); - } - - if (currentMid == "0") { - if (adjustedLine.find("a=ssrc:") == 0) { - int startIndex = 7; - int i = startIndex; - while (i < adjustedLine.size()) { - if (!isdigit(adjustedLine[i])) { - break; - } - i++; - } - if (i >= startIndex) { - adjustedLine.replace(startIndex, i - startIndex, generatedSsrcString); - } - } - } else if (currentMid == "1") { - if (adjustedLine.find("a=ssrc:") == 0 || adjustedLine.find("a=ssrc-group:") == 0) { - hasVideo = true; - adjustedLine.clear(); - } - } - - if (adjustedLine.find("a=candidate") == 0) { - adjustedLine.clear(); - } - - if (adjustedLine.size() != 0) { - appendSdp(resultSdp, adjustedLine); - if (currentMid == "1") { - insertVideoLinesAtIndex = (int)resultSdp.size(); - } - } - } - - if (hasVideo) { - std::vector videoSourceGroups; - - int ssrcDistance = 1; - - GroupJoinPayloadVideoSourceGroup sim; - sim.semantics = "SIM"; - sim.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 0); - sim.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 2); - sim.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 4); - videoSourceGroups.push_back(sim); - - GroupJoinPayloadVideoSourceGroup fid0; - fid0.semantics = "FID"; - fid0.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 0); - fid0.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 1); - videoSourceGroups.push_back(fid0); - - GroupJoinPayloadVideoSourceGroup fid1; - fid1.semantics = "FID"; - fid1.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 2); - fid1.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 3); - videoSourceGroups.push_back(fid1); - - GroupJoinPayloadVideoSourceGroup fid2; - fid2.semantics = "FID"; - fid2.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 4); - fid2.ssrcs.push_back(_mainStreamAudioSsrc + ssrcDistance + 5); - videoSourceGroups.push_back(fid2); - - std::string streamId = "video0"; - - std::vector ssrcs; - for (auto &group : videoSourceGroups) { - std::ostringstream groupString; - groupString << "a=ssrc-group:"; - groupString << group.semantics; - - for (auto ssrc : group.ssrcs) { - groupString << " " << ssrc; - - if (std::find(ssrcs.begin(), ssrcs.end(), ssrc) == ssrcs.end()) { - ssrcs.push_back(ssrc); - } - } - - appendSdp(resultSdp, groupString.str(), insertVideoLinesAtIndex); - insertVideoLinesAtIndex++; - } - - for (auto ssrc : ssrcs) { - std::ostringstream cnameString; - cnameString << "a=ssrc:"; - cnameString << ssrc; - cnameString << " cname:stream"; - cnameString << streamId; - appendSdp(resultSdp, cnameString.str(), insertVideoLinesAtIndex); - insertVideoLinesAtIndex++; - - std::ostringstream msidString; - msidString << "a=ssrc:"; - msidString << ssrc; - msidString << " msid:stream"; - msidString << streamId; - msidString << " video" << streamId; - appendSdp(resultSdp, msidString.str(), insertVideoLinesAtIndex); - insertVideoLinesAtIndex++; - - std::ostringstream mslabelString; - mslabelString << "a=ssrc:"; - mslabelString << ssrc; - mslabelString << " mslabel:video"; - mslabelString << streamId; - appendSdp(resultSdp, mslabelString.str(), insertVideoLinesAtIndex); - insertVideoLinesAtIndex++; - - std::ostringstream labelString; - labelString << "a=ssrc:"; - labelString << ssrc; - labelString << " label:video"; - labelString << streamId; - appendSdp(resultSdp, labelString.str(), insertVideoLinesAtIndex); - insertVideoLinesAtIndex++; - } - } - - std::ostringstream result; - for (auto &line : resultSdp) { - result << line << "\n"; - } - - return result.str(); - } - - void emitJoinPayload(std::function completion) { - const auto weak = std::weak_ptr(shared_from_this()); - webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, completion](std::string sdp, std::string type) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, type, completion](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - - auto adjustedSdp = strong->adjustLocalSdp(sdp); - - RTC_LOG(LoggingSeverity::WARNING) << "----- setLocalDescription join -----"; - RTC_LOG(LoggingSeverity::WARNING) << adjustedSdp; - RTC_LOG(LoggingSeverity::WARNING) << "-----"; - - webrtc::SdpParseError error; - webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type, adjustLocalDescription(adjustedSdp), &error); - if (sessionDescription != nullptr) { - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, adjustedSdp, completion]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - - if (strong->_localVideoTrackTransceiver) { - strong->_localVideoMid = strong->_localVideoTrackTransceiver->mid(); - if (strong->_localDataChannel) { - if (strong->_localVideoMid && strong->_localVideoMid.value() == "1") { - strong->_localDataChannelMid = "2"; - } else { - strong->_localDataChannelMid = "1"; - } - } - } else { - strong->_localVideoMid.reset(); - } - - auto payload = parseSdpIntoJoinPayload(adjustedSdp); - if (payload) { - payload->ssrc = strong->_mainStreamAudioSsrc; - strong->_joinPayload = payload; - completion(payload.value()); - } - }, [](webrtc::RTCError error) { - })); - strong->_peerConnection->SetLocalDescription(observer, sessionDescription); - } else { - return; - } - }); - })); - _peerConnection->CreateOffer(observer, options); - } - - void setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector &&participants) { - if (!_joinPayload) { - return; - } - _joinResponsePayload = payload; - - auto sdp = parseJoinResponseIntoSdp(_sessionId, _joinPayload.value(), payload, SdpType::kSdpTypeJoinAnswer, _allOtherParticipants, _localVideoMid, _localDataChannelMid, _bundleStreamsState); - setOfferSdp(sdp, true, true, false); - - addParticipantsInternal(std::move(participants), false); - } - - void removeSsrcs(std::vector ssrcs) { - if (!_joinPayload) { - return; - } - if (!_joinResponsePayload) { - return; - } - - bool updated = false; - for (auto ssrc : ssrcs) { - for (auto &participant : _allOtherParticipants) { - if (participant.audioSsrc == ssrc) { - if (!participant.isRemoved) { - participant.isRemoved = true; - updated = true; - } - } - } - } - - if (updated) { - auto sdp = parseJoinResponseIntoSdp(_sessionId, _joinPayload.value(), _joinResponsePayload.value(), SdpType::kSdpTypeRemoteOffer, _allOtherParticipants, _localVideoMid, _localDataChannelMid, _bundleStreamsState); - setOfferSdp(sdp, false, false, false); - } - } - - void addParticipants(std::vector &&participants) { - addParticipantsInternal(std::move(participants), false); - } - - void addParticipantsInternal(std::vector const &participants, bool completeMissingSsrcSetup) { - if (!_joinPayload || !_joinResponsePayload) { - if (completeMissingSsrcSetup) { - completeProcessingMissingSsrcs(); - } - return; - } - - std::vector addedSsrcs; - - for (auto &participant : participants) { - bool found = false; - for (auto &other : _allOtherParticipants) { - if (other.audioSsrc == participant.audioSsrc) { - found = true; - break; - } - } - - if (!found) { - addedSsrcs.push_back(participant.audioSsrc); - _allOtherParticipants.push_back(participant); - //_activeOtherSsrcs.insert(participant.audioSsrc); - } - } - - auto sdp = parseJoinResponseIntoSdp(_sessionId, _joinPayload.value(), _joinResponsePayload.value(), SdpType::kSdpTypeRemoteOffer, _allOtherParticipants, _localVideoMid, _localDataChannelMid, _bundleStreamsState); - setOfferSdp(sdp, false, false, completeMissingSsrcSetup); - - bool updated = false; - for (auto &ssrc : addedSsrcs) { - /*if (setVideoConstraint(ssrc, false, false)) { - updated = true; - }*/ - } - if (updated) { - updateRemoteVideoConstaints(); - } - } - - void applyLocalSdp() { - const auto weak = std::weak_ptr(shared_from_this()); - webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak](std::string sdp, std::string type) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, type](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - - auto adjustedSdp = strong->adjustLocalSdp(sdp); - - RTC_LOG(LoggingSeverity::WARNING) << "----- setLocalDescription applyLocalSdp -----"; - RTC_LOG(LoggingSeverity::WARNING) << adjustedSdp; - RTC_LOG(LoggingSeverity::WARNING) << "-----"; - - webrtc::SdpParseError error; - webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type, adjustLocalDescription(adjustedSdp), &error); - if (sessionDescription != nullptr) { - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, adjustedSdp]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - - if (!strong->_joinPayload) { - return; - } - if (!strong->_joinResponsePayload) { - return; - } - - if (strong->_localVideoTrackTransceiver) { - strong->_localVideoMid = strong->_localVideoTrackTransceiver->mid(); - if (strong->_localDataChannel) { - if (strong->_localVideoMid && strong->_localVideoMid.value() == "1") { - strong->_localDataChannelMid = "2"; - } else { - strong->_localDataChannelMid = "1"; - } - } - } else { - strong->_localVideoMid.reset(); - } - - auto sdp = parseJoinResponseIntoSdp(strong->_sessionId, strong->_joinPayload.value(), strong->_joinResponsePayload.value(), SdpType::kSdpTypeJoinAnswer, strong->_allOtherParticipants, strong->_localVideoMid, strong->_localDataChannelMid, strong->_bundleStreamsState); - strong->setOfferSdp(sdp, false, true, false); - }, [](webrtc::RTCError error) { - })); - strong->_peerConnection->SetLocalDescription(observer, sessionDescription); - } else { - return; - } - }); - })); - _peerConnection->CreateOffer(observer, options); - } - - void setOfferSdp(std::string const &offerSdp, bool isInitialJoinAnswer, bool isAnswer, bool completeMissingSsrcSetup) { - if (!isAnswer && _appliedRemoteDescription == offerSdp) { - if (completeMissingSsrcSetup) { - completeProcessingMissingSsrcs(); - } - return; - } - - if (_appliedRemoteDescription.size() != 0) { - _appliedOfferTimestamp = rtc::TimeMillis(); - } - - _appliedRemoteDescription = offerSdp; - - RTC_LOG(LoggingSeverity::WARNING) << "----- setOfferSdp " << (isAnswer ? "answer" : "offer") << " -----"; - RTC_LOG(LoggingSeverity::WARNING) << offerSdp; - RTC_LOG(LoggingSeverity::WARNING) << "-----"; - - webrtc::SdpParseError error; - webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(isAnswer ? "answer" : "offer", adjustLocalDescription(offerSdp), &error); - if (!sessionDescription) { - if (completeMissingSsrcSetup) { - completeProcessingMissingSsrcs(); - } - return; - } - - const auto weak = std::weak_ptr(shared_from_this()); - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, isInitialJoinAnswer, isAnswer, completeMissingSsrcSetup]() { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isInitialJoinAnswer, isAnswer, completeMissingSsrcSetup](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - if (!isAnswer) { - strong->emitAnswer(completeMissingSsrcSetup); - } else { - if (isInitialJoinAnswer) { - strong->completedInitialSetup(); - } - - if (completeMissingSsrcSetup) { - strong->completeProcessingMissingSsrcs(); - } - } - }); - }, [weak, completeMissingSsrcSetup](webrtc::RTCError error) { - RTC_LOG(LoggingSeverity::LS_ERROR) << "Error: " << error.message(); - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, completeMissingSsrcSetup](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - if (completeMissingSsrcSetup) { - strong->completeProcessingMissingSsrcs(); - } - }); - })); - - _peerConnection->SetRemoteDescription(observer, sessionDescription); - } - - void beginStatsTimer(int timeoutMs) { - const auto weak = std::weak_ptr(shared_from_this()); - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->collectStats(); - }); - }, timeoutMs); - } - - void beginLevelsTimer(int timeoutMs) { - const auto weak = std::weak_ptr(shared_from_this()); - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - - GroupLevelsUpdate levelsUpdate; - levelsUpdate.updates.reserve(strong->_audioLevels.size() + 1); - for (auto &it : strong->_audioLevels) { - if (it.second.level > 0.001f) { - levelsUpdate.updates.push_back(GroupLevelUpdate{ - it.first, - it.second, - }); - } - } - levelsUpdate.updates.push_back(GroupLevelUpdate{ 0, strong->_myAudioLevel }); - - strong->_audioLevels.clear(); - strong->_audioLevelsUpdated(levelsUpdate); - - strong->beginLevelsTimer(50); - }, timeoutMs); - } - - void beginTestQualityTimer(int timeoutMs) { - const auto weak = std::weak_ptr(shared_from_this()); - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - - strong->_debugQualityValue = !strong->_debugQualityValue; - strong->updateRemoteVideoConstaints(); - - strong->beginTestQualityTimer(5000); - }, timeoutMs); - } - - void collectStats() { - const auto weak = std::weak_ptr(shared_from_this()); - - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak](const rtc::scoped_refptr &stats) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, stats](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->reportStats(stats); - strong->beginStatsTimer(100); - }); - })); - _peerConnection->GetStats(observer); - } - - void reportStats(const rtc::scoped_refptr &stats) { - } - - void onTrackAdded(rtc::scoped_refptr transceiver) { - if (transceiver->direction() == webrtc::RtpTransceiverDirection::kRecvOnly && transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_AUDIO) { - if (transceiver->mid()) { - auto streamId = transceiver->mid().value(); - if (streamId.find("audio") != 0) { - return; - } - streamId.replace(0, 5, ""); - std::istringstream iss(streamId); - uint32_t ssrc = 0; - iss >> ssrc; - - rtc::scoped_refptr remoteAudioTrack(static_cast(transceiver->receiver()->track().get())); - if (_audioTracks.find(ssrc) == _audioTracks.end()) { - _audioTracks.insert(std::make_pair(ssrc, remoteAudioTrack)); - } - auto currentVolume = _audioTrackVolumes.find(ssrc); - if (currentVolume != _audioTrackVolumes.end()) { - remoteAudioTrack->GetSource()->SetVolume(currentVolume->second); - } - if (_audioTrackSinks.find(ssrc) == _audioTrackSinks.end()) { - const auto weak = std::weak_ptr(shared_from_this()); - std::shared_ptr sink(new AudioTrackSinkInterfaceImpl([weak, ssrc](float level, bool hasSpeech) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc, level, hasSpeech]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - auto current = strong->_audioLevels.find(ssrc); - if (current != strong->_audioLevels.end()) { - if (current->second.level < level) { - strong->_audioLevels[ssrc] = GroupLevelValue{ - level, - hasSpeech, - }; - } - } else { - strong->_audioLevels.emplace( - ssrc, - GroupLevelValue{ - level, - hasSpeech, - }); - } - }); - })); - _audioTrackSinks[ssrc] = sink; - remoteAudioTrack->AddSink(sink.get()); - //remoteAudioTrack->GetSource()->SetVolume(0.01); - } - } - } else if (transceiver->direction() == webrtc::RtpTransceiverDirection::kRecvOnly && transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) { - auto streamId = transceiver->mid().value(); - if (streamId.find("video") != 0) { - return; - } - streamId.replace(0, 5, ""); - std::istringstream iss(streamId); - uint32_t ssrc = 0; - iss >> ssrc; - - auto remoteVideoTrack = static_cast(transceiver->receiver()->track().get()); - if (_remoteVideoTracks.find(ssrc) == _remoteVideoTracks.end()) { - _remoteVideoTracks[ssrc] = remoteVideoTrack; - auto current = _remoteVideoTrackSinks.find(ssrc); - if (current != _remoteVideoTrackSinks.end()) { - remoteVideoTrack->AddOrUpdateSink(current->second.get(), rtc::VideoSinkWants()); - } else { - std::unique_ptr sink(new CustomVideoSinkInterfaceProxyImpl()); - remoteVideoTrack->AddOrUpdateSink(sink.get(), rtc::VideoSinkWants()); - _remoteVideoTrackSinks[ssrc] = std::move(sink); - } - - if (_incomingVideoSourcesUpdated) { - std::vector allSources; - for (auto &it : _remoteVideoTracks) { - allSources.push_back(it.first); - } - _incomingVideoSourcesUpdated(allSources); - } - } - } - } - - void onTrackRemoved(rtc::scoped_refptr receiver) { - for (auto &transceiver : _peerConnection->GetTransceivers()) { - if (transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) { - if (receiver.get() == transceiver->receiver().get()) { - auto remoteVideoTrack = static_cast(transceiver->receiver()->track().get()); - - for (auto &it : _remoteVideoTracks) { - if (it.second.get() == remoteVideoTrack) { - auto sink = _remoteVideoTrackSinks.find(it.first); - if (sink != _remoteVideoTrackSinks.end()) { - remoteVideoTrack->RemoveSink(sink->second.get()); - _remoteVideoTrackSinks.erase(it.first); - } - _remoteVideoTracks.erase(it.first); - - if (_incomingVideoSourcesUpdated) { - std::vector allSources; - for (auto &it : _remoteVideoTracks) { - allSources.push_back(it.first); - } - _incomingVideoSourcesUpdated(allSources); - } - - break; - } - } - - break; - } - } - } - } - - void onMissingSsrc(uint32_t ssrc) { - /*if (_processedMissingSsrcs.find(ssrc) == _processedMissingSsrcs.end()) { - _processedMissingSsrcs.insert(ssrc); - - _missingSsrcQueue.insert(ssrc); - if (!_isProcessingMissingSsrcs) { - beginProcessingMissingSsrcs(); - } - }*/ - } - - void beginProcessingMissingSsrcs() { - if (_isProcessingMissingSsrcs) { - return; - } - _isProcessingMissingSsrcs = true; - auto timestamp = rtc::TimeMillis(); - if (timestamp > _missingSsrcsProcessedTimestamp + 200) { - applyMissingSsrcs(); - } else { - const auto weak = std::weak_ptr(shared_from_this()); - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - strong->applyMissingSsrcs(); - }, 200); - } - } - - void applyMissingSsrcs() { - assert(_isProcessingMissingSsrcs); - if (_missingSsrcQueue.size() == 0) { - completeProcessingMissingSsrcs(); - return; - } - - std::vector addParticipants; - for (auto ssrc : _missingSsrcQueue) { - GroupParticipantDescription participant; - participant.audioSsrc = ssrc; - addParticipants.push_back(participant); - } - _missingSsrcQueue.clear(); - - const auto weak = std::weak_ptr(shared_from_this()); - - addParticipantsInternal(addParticipants, true); - } - - void completeProcessingMissingSsrcs() { - assert(_isProcessingMissingSsrcs); - _isProcessingMissingSsrcs = false; - _missingSsrcsProcessedTimestamp = rtc::TimeMillis(); - - if (_missingSsrcQueue.size() != 0) { - beginProcessingMissingSsrcs(); - } - } - - void completedInitialSetup() { - //beginDebugSsrcTimer(1000); - } - - uint32_t _nextTestSsrc = 100; - - void beginDebugSsrcTimer(int timeout) { - const auto weak = std::weak_ptr(shared_from_this()); - StaticThreads::getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - - if (strong->_nextTestSsrc >= 100 + 50) { - return; - } - - strong->_nextTestSsrc++; - strong->onMissingSsrc(strong->_nextTestSsrc); - - strong->beginDebugSsrcTimer(20); - }, timeout); - } - - void setIsMuted(bool isMuted) { - if (!_localAudioTrackSender) { - return; - } - if (_isMuted == isMuted) { - return; - } - - for (auto &it : _peerConnection->GetTransceivers()) { - if (it->media_type() == cricket::MediaType::MEDIA_TYPE_AUDIO) { - if (_localAudioTrackSender.get() == it->sender().get()) { - if (isMuted) { - } else { - if (it->direction() != webrtc::RtpTransceiverDirection::kSendOnly) { - const auto error = it->SetDirectionWithError(webrtc::RtpTransceiverDirection::kSendOnly); - (void)error; - - applyLocalSdp(); - } - } - break; - } - } - } - - _isMuted = isMuted; - _localAudioTrack->set_enabled(!isMuted); - - RTC_LOG(LoggingSeverity::WARNING) << "setIsMuted: " << isMuted; - } - - void setVideoCapture(std::shared_ptr videoCapture, std::function completion) { - _videoCapture = videoCapture; - - updateVideoTrack(true, completion); - } - - void updateVideoTrack(bool applyNow, std::function completion) { - if (_videoCapture) { - VideoCaptureInterfaceObject *videoCaptureImpl = GetVideoCaptureAssumingSameThread(_videoCapture.get()); - - //_videoCapture->setPreferredAspectRatio(1280.0f / 720.0f); - - _localVideoTrack = _nativeFactory->CreateVideoTrack("video0", videoCaptureImpl->source()); - _localVideoTrack->set_enabled(true); - webrtc::RtpTransceiverInit videoInit; - auto addedTransceiver = _peerConnection->AddTransceiver(_localVideoTrack, videoInit); - if (addedTransceiver.ok()) { - _localVideoTrackTransceiver = addedTransceiver.value(); - for (auto &it : _peerConnection->GetTransceivers()) { - if (it->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) { - if (_localVideoTrackTransceiver->sender().get() == it->sender().get()) { - it->SetDirectionWithError(webrtc::RtpTransceiverDirection::kSendOnly); - - auto capabilities = _nativeFactory->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_VIDEO); - - std::vector codecs; - bool hasVP8 = false; - for (auto &codec : capabilities.codecs) { - if (codec.name == cricket::kVp8CodecName) { - if (!hasVP8) { - codecs.insert(codecs.begin(), codec); - hasVP8 = true; - } - } else if (codec.name == cricket::kRtxCodecName) { - codecs.push_back(codec); - } - } - it->SetCodecPreferences(codecs); - - break; - } - } - } - } - } else if (_localVideoTrack && _localVideoTrackTransceiver) { - _localVideoTrack->set_enabled(false); - _localVideoTrackTransceiver->SetDirectionWithError(webrtc::RtpTransceiverDirection::kInactive); - for (auto &it : _peerConnection->GetTransceivers()) { - if (it.get() == _localVideoTrackTransceiver.get()) { - _peerConnection->RemoveTrack(it->sender()); - break; - } - } - _localVideoTrack = nullptr; - _localVideoTrackTransceiver = nullptr; - } - - if (applyNow) { - const auto weak = std::weak_ptr(shared_from_this()); - emitJoinPayload([weak, completion](auto result) { - auto strong = weak.lock(); - if (!strong) { - return; - } - - if (!strong->_joinPayload) { - return; - } - if (!strong->_joinResponsePayload) { - return; - } - - auto sdp = parseJoinResponseIntoSdp(strong->_sessionId, strong->_joinPayload.value(), strong->_joinResponsePayload.value(), SdpType::kSdpTypeJoinAnswer, strong->_allOtherParticipants, strong->_localVideoMid, strong->_localDataChannelMid, strong->_bundleStreamsState); - strong->setOfferSdp(sdp, false, true, false); - - completion(result); - }); - } - } - - void emitAnswer(bool completeMissingSsrcSetup) { - const auto weak = std::weak_ptr(shared_from_this()); - - webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, completeMissingSsrcSetup](std::string sdp, std::string type) { - StaticThreads::getMediaThread()->PostTask(RTC_FROM_HERE, [weak, sdp, type, completeMissingSsrcSetup](){ - auto strong = weak.lock(); - if (!strong) { - return; - } - - RTC_LOG(LoggingSeverity::WARNING) << "----- setLocalDescription answer -----"; - RTC_LOG(LoggingSeverity::WARNING) << sdp; - RTC_LOG(LoggingSeverity::WARNING) << "-----"; - - webrtc::SdpParseError error; - webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type, adjustLocalDescription(sdp), &error); - if (sessionDescription != nullptr) { - rtc::scoped_refptr observer(new rtc::RefCountedObject([weak, sdp, completeMissingSsrcSetup]() { - auto strong = weak.lock(); - if (!strong) { - return; - } - - if (completeMissingSsrcSetup) { - strong->completeProcessingMissingSsrcs(); - } - }, [weak, completeMissingSsrcSetup](webrtc::RTCError error) { - auto strong = weak.lock(); - if (!strong) { - return; - } - - if (completeMissingSsrcSetup) { - strong->completeProcessingMissingSsrcs(); - } - })); - strong->_peerConnection->SetLocalDescription(observer, sessionDescription); - } else { - if (completeMissingSsrcSetup) { - strong->completeProcessingMissingSsrcs(); - } - } - }); - })); - _peerConnection->CreateAnswer(observer, options); - } - - bool setVideoConstraint(uint32_t ssrc, bool highQuality, bool updateImmediately) { - auto current = _videoConstraints.find(ssrc); - bool updated = false; - if (current != _videoConstraints.end()) { - updated = current->second != highQuality; - } else { - updated = true; - } - - if (updated) { - _videoConstraints[ssrc] = highQuality; - - if (updateImmediately) { - updateRemoteVideoConstaints(); - } - } - return updated; - } - - void updateRemoteVideoConstaints() { - if (!_localDataChannelIsOpen) { - return; - } - - std::vector keys; - for (auto &it : _videoConstraints) { - keys.push_back(it.first); - } - std::sort(keys.begin(), keys.end()); - - std::string pinnedEndpoint; - - std::ostringstream string; - string << "{" << "\n"; - string << " \"colibriClass\": \"ReceiverVideoConstraintsChangedEvent\"," << "\n"; - string << " \"videoConstraints\": [" << "\n"; - bool isFirst = true; - for (size_t i = 0; i < keys.size(); i++) { - auto it = _videoConstraints.find(keys[i]); - int idealHeight = 720; - if (!it->second) { - idealHeight = 180; - } - - std::string endpointId; - for (auto &participant : _allOtherParticipants) { - if (participant.isRemoved) { - continue; - } - if (participant.audioSsrc == keys[i]) { - endpointId = participant.endpointId; - break; - } - } - - if (endpointId.size() == 0) { - continue; - } - - if (isFirst) { - isFirst = false; - } else { - if (i != 0) { - string << ","; - } - } - string << " {\n"; - string << " \"id\": \"" << endpointId << "\",\n"; - string << " \"idealHeight\": " << idealHeight << "\n"; - string << " }"; - string << "\n"; - } - string << " ]" << "\n"; - string << "}"; - - std::string result = string.str(); - RTC_LOG(LS_INFO) << "DataChannel send message: " << result; - - webrtc::DataBuffer buffer(result, false); - _localDataChannel->Send(buffer); - - /*if (pinnedEndpoint.size() != 0) { - std::ostringstream string; - string << "{" << "\n"; - string << " \"colibriClass\": \"PinnedEndpointChangedEvent\"," << "\n"; - string << " \"pinnedEndpoint\": \"" << pinnedEndpoint << "\"" << "\n"; - string << "}"; - - std::string result = string.str(); - - RTC_LOG(LS_INFO) << "DataChannel send message: " << result; - - webrtc::DataBuffer buffer(result, false); - _localDataChannel->Send(buffer); - }*/ - } - -private: - void withAudioDeviceModule(std::function callback) { - _adm_thread->Invoke(RTC_FROM_HERE, [&] { - callback(_adm_use_withAudioDeviceModule.get()); - }); - } - - std::function _networkStateUpdated; - std::function _audioLevelsUpdated; - std::function const &)> _incomingVideoSourcesUpdated; - std::function const &)> _participantDescriptionsRequired; - - int32_t _myAudioLevelPeakCount = 0; - float _myAudioLevelPeak = 0; - GroupLevelValue _myAudioLevel; - - std::string _initialInputDeviceId; - std::string _initialOutputDeviceId; - - uint32_t _sessionId = 6543245; - uint32_t _mainStreamAudioSsrc = 0; - absl::optional _joinPayload; - uint32_t _fakeIncomingSsrc = 0; - absl::optional _joinResponsePayload; - - int64_t _appliedOfferTimestamp = 0; - bool _isConnected = false; - int _isConnectedUpdateValidTaskId = 0; - - bool _isMuted = true; - - std::vector _allOtherParticipants; - std::set _processedMissingSsrcs; - - int64_t _missingSsrcsProcessedTimestamp = 0; - bool _isProcessingMissingSsrcs = false; - std::set _missingSsrcQueue; - - std::string _appliedRemoteDescription; - - rtc::scoped_refptr _nativeFactory; - std::unique_ptr _observer; - rtc::scoped_refptr _peerConnection; - std::unique_ptr _localAudioTrackSink; - rtc::scoped_refptr _localAudioTrack; - rtc::scoped_refptr _localAudioTrackSender; - - rtc::scoped_refptr _localVideoTrack; - rtc::scoped_refptr _localVideoTrackTransceiver; - - rtc::scoped_refptr _localDataChannel; - absl::optional _localDataChannelMid; - std::unique_ptr _localDataChannelObserver; - bool _localDataChannelIsOpen = false; - - absl::optional _localVideoMid; - - std::vector _bundleStreamsState; - - std::function(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; - rtc::Thread *_adm_thread = nullptr; - rtc::scoped_refptr _adm_use_withAudioDeviceModule; - - std::map> _audioTracks; - std::map _audioTrackVolumes; - std::map> _audioTrackSinks; - std::map _audioLevels; - - std::map _videoConstraints; - uint32_t _currentFullSizeVideoSsrc = 0; - - bool _debugQualityValue = false; - - std::map> _remoteVideoTracks; - std::map> _remoteVideoTrackSinks; - - std::shared_ptr _videoCapture; - - std::unique_ptr _errorParsingLogSink; - - std::shared_ptr _platformContext; -}; - -GroupInstanceImpl::GroupInstanceImpl(GroupInstanceDescriptor &&descriptor) -: _logSink(std::make_unique(descriptor.config.logPath)) { - rtc::LogMessage::LogToDebug(rtc::LS_INFO); - rtc::LogMessage::SetLogToStderr(true); - if (_logSink) { - rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); - } - - _manager.reset(new ThreadLocalObject(StaticThreads::getMediaThread(), [descriptor = std::move(descriptor)]() mutable { - return new GroupInstanceManager(std::move(descriptor)); - })); - _manager->perform(RTC_FROM_HERE, [](GroupInstanceManager *manager) { - manager->start(); - }); -} - -GroupInstanceImpl::~GroupInstanceImpl() { - if (_logSink) { - rtc::LogMessage::RemoveLogToStream(_logSink.get()); - } - _manager = nullptr; - - // Wait until _manager is destroyed, otherwise there is a race condition - // in destruction of PeerConnection on media thread and network thread. - StaticThreads::getMediaThread()->Invoke(RTC_FROM_HERE, [] {}); -} - -void GroupInstanceImpl::stop() { - _manager->perform(RTC_FROM_HERE, [](GroupInstanceManager *manager) { - manager->stop(); - }); -} - -void GroupInstanceImpl::emitJoinPayload(std::function completion) { - _manager->perform(RTC_FROM_HERE, [completion](GroupInstanceManager *manager) { - manager->emitJoinPayload(completion); - }); -} - -void GroupInstanceImpl::setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector &&participants) { - _manager->perform(RTC_FROM_HERE, [payload, participants = std::move(participants)](GroupInstanceManager *manager) mutable { - manager->setJoinResponsePayload(payload, std::move(participants)); - }); -} - -void GroupInstanceImpl::removeSsrcs(std::vector ssrcs) { - _manager->perform(RTC_FROM_HERE, [ssrcs](GroupInstanceManager *manager) { - manager->removeSsrcs(ssrcs); - }); -} - -void GroupInstanceImpl::addParticipants(std::vector &&participants) { - _manager->perform(RTC_FROM_HERE, [participants = std::move(participants)](GroupInstanceManager *manager) mutable { - manager->addParticipants(std::move(participants)); - }); -} - -void GroupInstanceImpl::setIsMuted(bool isMuted) { - _manager->perform(RTC_FROM_HERE, [isMuted](GroupInstanceManager *manager) { - manager->setIsMuted(isMuted); - }); -} - -void GroupInstanceImpl::setVideoCapture(std::shared_ptr videoCapture, std::function completion) { - _manager->perform(RTC_FROM_HERE, [videoCapture, completion = std::move(completion)](GroupInstanceManager *manager) mutable { - manager->setVideoCapture(videoCapture, completion); - }); -} - -void GroupInstanceImpl::setAudioInputDevice(std::string id) { - _manager->perform(RTC_FROM_HERE, [id](GroupInstanceManager *manager) { - manager->setAudioInputDevice(id); - }); -} - -void GroupInstanceImpl::setAudioOutputDevice(std::string id) { - _manager->perform(RTC_FROM_HERE, [id](GroupInstanceManager *manager) { - manager->setAudioOutputDevice(id); - }); -} - -void GroupInstanceImpl::addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr> sink) { - _manager->perform(RTC_FROM_HERE, [ssrc, sink](GroupInstanceManager *manager) { - manager->addIncomingVideoOutput(ssrc, sink); - }); -} - -void GroupInstanceImpl::setVolume(uint32_t ssrc, double volume) { - _manager->perform(RTC_FROM_HERE, [ssrc, volume](GroupInstanceManager *manager) { - manager->setVolume(ssrc, volume); - }); -} - -void GroupInstanceImpl::setFullSizeVideoSsrc(uint32_t ssrc) { - _manager->perform(RTC_FROM_HERE, [ssrc](GroupInstanceManager *manager) { - manager->setFullSizeVideoSsrc(ssrc); - }); -} - -} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h index a752652d7..bff07d9bb 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceImpl.h @@ -10,10 +10,12 @@ #include "../Instance.h" #include "../StaticThreads.h" +#include "GroupJoinPayload.h" namespace webrtc { class AudioDeviceModule; class TaskQueueFactory; +class VideoTrackSourceInterface; } namespace rtc { @@ -35,6 +37,7 @@ struct GroupConfig { struct GroupLevelValue { float level = 0.; bool voice = false; + bool isMuted = false; }; struct GroupLevelUpdate { @@ -77,6 +80,53 @@ struct GroupNetworkState { bool isTransitioningFromBroadcastToRtc = false; }; +enum class VideoContentType { + None, + Screencast, + Generic +}; + +enum class VideoCodecName { + VP8, + VP9 +}; + +class RequestMediaChannelDescriptionTask { +public: + virtual ~RequestMediaChannelDescriptionTask() = default; + + virtual void cancel() = 0; +}; + +struct MediaChannelDescription { + enum class Type { + Audio, + Video + }; + + Type type = Type::Audio; + uint32_t audioSsrc = 0; + std::string videoInformation; +}; + +struct MediaSsrcGroup { + std::string semantics; + std::vector ssrcs; +}; + +struct VideoChannelDescription { + enum class Quality { + Thumbnail, + Medium, + Full + }; + uint32_t audioSsrc = 0; + std::string endpointId; + std::vector ssrcGroups; + Quality minQuality = Quality::Thumbnail; + Quality maxQuality = Quality::Thumbnail; +}; + struct GroupInstanceDescriptor { std::shared_ptr threads; GroupConfig config; @@ -88,82 +138,20 @@ struct GroupInstanceDescriptor { bool useDummyChannel{true}; bool disableIncomingChannels{false}; std::function(webrtc::TaskQueueFactory*)> createAudioDeviceModule; - std::shared_ptr videoCapture; - std::function const &)> incomingVideoSourcesUpdated; - std::function const &)> participantDescriptionsRequired; + std::shared_ptr videoCapture; // deprecated + std::function getVideoSource; std::function(std::shared_ptr, int64_t, int64_t, std::function)> requestBroadcastPart; + int outgoingAudioBitrateKbit{32}; + bool disableOutgoingAudioProcessing{false}; + VideoContentType videoContentType{VideoContentType::None}; + bool initialEnableNoiseSuppression{false}; + std::vector videoCodecPreferences; + std::function(std::vector const &, std::function &&)>)> requestMediaChannelDescriptions; + int minOutgoingVideoBitrateKbit{100}; + std::shared_ptr platformContext; }; -struct GroupJoinPayloadFingerprint { - std::string hash; - std::string setup; - std::string fingerprint; -}; - -struct GroupJoinPayloadVideoSourceGroup { - std::vector ssrcs; - std::string semantics; -}; - -struct GroupJoinPayloadVideoPayloadFeedbackType { - std::string type; - std::string subtype; -}; - -struct GroupJoinPayloadVideoPayloadType { - uint32_t id = 0; - std::string name; - uint32_t clockrate = 0; - uint32_t channels = 0; - std::vector feedbackTypes; - std::vector> parameters; -}; - -struct GroupJoinPayload { - std::string ufrag; - std::string pwd; - std::vector fingerprints; - - std::vector videoPayloadTypes; - std::vector> videoExtensionMap; - uint32_t ssrc = 0; - std::vector videoSourceGroups; -}; - -struct GroupParticipantDescription { - std::string endpointId; - uint32_t audioSsrc = 0; - std::vector videoPayloadTypes; - std::vector> videoExtensionMap; - std::vector videoSourceGroups; - bool isRemoved = false; -}; - -struct GroupJoinResponseCandidate { - std::string port; - std::string protocol; - std::string network; - std::string generation; - std::string id; - std::string component; - std::string foundation; - std::string priority; - std::string ip; - std::string type; - - std::string tcpType; - std::string relAddr; - std::string relPort; -}; - -struct GroupJoinResponsePayload { - std::string ufrag; - std::string pwd; - std::vector fingerprints; - std::vector candidates; -}; - template class ThreadLocalObject; @@ -178,20 +166,22 @@ public: virtual void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled) = 0; - virtual void emitJoinPayload(std::function completion) = 0; - virtual void setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector &&participants) = 0; - virtual void addParticipants(std::vector &&participants) = 0; + virtual void emitJoinPayload(std::function completion) = 0; + virtual void setJoinResponsePayload(std::string const &payload) = 0; virtual void removeSsrcs(std::vector ssrcs) = 0; + virtual void removeIncomingVideoSource(uint32_t ssrc) = 0; virtual void setIsMuted(bool isMuted) = 0; - virtual void setVideoCapture(std::shared_ptr videoCapture, std::function completion) = 0; + virtual void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled) = 0; + virtual void setVideoCapture(std::shared_ptr videoCapture) = 0; + virtual void setVideoSource(std::function getVideoSource) = 0; virtual void setAudioOutputDevice(std::string id) = 0; virtual void setAudioInputDevice(std::string id) = 0; - virtual void addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr> sink) = 0; + virtual void addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr> sink) = 0; virtual void setVolume(uint32_t ssrc, double volume) = 0; - virtual void setFullSizeVideoSsrc(uint32_t ssrc) = 0; + virtual void setRequestedVideoChannels(std::vector &&requestedVideoChannels) = 0; struct AudioDevice { enum class Type {Input, Output}; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayload.h b/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayload.h new file mode 100644 index 000000000..e3fca4034 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayload.h @@ -0,0 +1,78 @@ +#ifndef TGCALLS_GROUP_JOIN_PAYLOAD_H +#define TGCALLS_GROUP_JOIN_PAYLOAD_H + +#include +#include +#include + +namespace tgcalls { + +struct GroupJoinPayloadVideoSourceGroup { + std::vector ssrcs; + std::string semantics; +}; + +struct GroupJoinPayloadVideoPayloadType { + struct FeedbackType { + std::string type; + std::string subtype; + }; + + uint32_t id = 0; + std::string name; + uint32_t clockrate = 0; + uint32_t channels = 0; + std::vector feedbackTypes; + std::vector> parameters; +}; + +struct GroupJoinTransportDescription { + struct Fingerprint { + std::string hash; + std::string setup; + std::string fingerprint; + }; + + struct Candidate { + std::string port; + std::string protocol; + std::string network; + std::string generation; + std::string id; + std::string component; + std::string foundation; + std::string priority; + std::string ip; + std::string type; + + std::string tcpType; + std::string relAddr; + std::string relPort; + }; + + std::string ufrag; + std::string pwd; + std::vector fingerprints; + std::vector candidates; +}; + +struct GroupJoinVideoInformation { + uint32_t serverVideoBandwidthProbingSsrc = 0; + std::string endpointId; + std::vector payloadTypes; + std::vector> extensionMap; +}; + +struct GroupParticipantVideoInformation { + std::string endpointId; + std::vector ssrcGroups; +}; + +struct GroupJoinPayload { + uint32_t audioSsrc = 0; + std::string json; +}; + +} + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayloadInternal.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayloadInternal.cpp new file mode 100644 index 000000000..fce5c67e9 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayloadInternal.cpp @@ -0,0 +1,373 @@ +#include "GroupJoinPayloadInternal.h" + +#include "third-party/json11.hpp" +#include + +namespace tgcalls { + +namespace { + +absl::optional parseInt(json11::Json::object const &object, std::string const &key) { + const auto value = object.find(key); + if (value == object.end() || !value->second.is_number()) { + return absl::nullopt; + } + return value->second.int_value(); +} + +absl::optional parseString(json11::Json::object const &object, std::string const &key) { + const auto value = object.find(key); + if (value == object.end() || !value->second.is_string()) { + return absl::nullopt; + } + return value->second.string_value(); +} + +template +void splitString(const std::string &s, char delim, Out result) { + std::istringstream iss(s); + std::string item; + while (std::getline(iss, item, delim)) { + *result++ = item; + } +} + +std::vector splitString(const std::string &s, char delim) { + std::vector elems; + splitString(s, delim, std::back_inserter(elems)); + return elems; +} + +absl::optional parseTransportDescription(json11::Json::object const &object) { + GroupJoinTransportDescription result; + + if (const auto pwd = parseString(object, "pwd")) { + result.pwd = pwd.value(); + } else { + return absl::nullopt; + } + + if (const auto ufrag = parseString(object, "ufrag")) { + result.ufrag = ufrag.value(); + } else { + return absl::nullopt; + } + + const auto fingerprints = object.find("fingerprints"); + if (fingerprints == object.end() || !fingerprints->second.is_array()) { + return absl::nullopt; + } + for (const auto &fingerprint : fingerprints->second.array_items()) { + if (!fingerprint.is_object()) { + return absl::nullopt; + } + + GroupJoinTransportDescription::Fingerprint parsedFingerprint; + + if (const auto hash = parseString(fingerprint.object_items(), "hash")) { + parsedFingerprint.hash = hash.value(); + } else { + return absl::nullopt; + } + + if (const auto fingerprintValue = parseString(fingerprint.object_items(), "fingerprint")) { + parsedFingerprint.fingerprint = fingerprintValue.value(); + } else { + return absl::nullopt; + } + + if (const auto setup = parseString(fingerprint.object_items(), "setup")) { + parsedFingerprint.setup = setup.value(); + } else { + return absl::nullopt; + } + + result.fingerprints.push_back(std::move(parsedFingerprint)); + } + + const auto candidates = object.find("candidates"); + if (candidates == object.end() || !candidates->second.is_array()) { + return absl::nullopt; + } + for (const auto &candidate : candidates->second.array_items()) { + if (!candidate.is_object()) { + return absl::nullopt; + } + + GroupJoinTransportDescription::Candidate parsedCandidate; + + if (const auto port = parseString(candidate.object_items(), "port")) { + parsedCandidate.port = port.value(); + } else { + return absl::nullopt; + } + + if (const auto protocol = parseString(candidate.object_items(), "protocol")) { + parsedCandidate.protocol = protocol.value(); + } else { + return absl::nullopt; + } + + if (const auto network = parseString(candidate.object_items(), "network")) { + parsedCandidate.network = network.value(); + } else { + return absl::nullopt; + } + + if (const auto generation = parseString(candidate.object_items(), "generation")) { + parsedCandidate.generation = generation.value(); + } else { + return absl::nullopt; + } + + if (const auto id = parseString(candidate.object_items(), "id")) { + parsedCandidate.id = id.value(); + } else { + return absl::nullopt; + } + + if (const auto component = parseString(candidate.object_items(), "component")) { + parsedCandidate.component = component.value(); + } else { + return absl::nullopt; + } + + if (const auto foundation = parseString(candidate.object_items(), "foundation")) { + parsedCandidate.foundation = foundation.value(); + } else { + return absl::nullopt; + } + + if (const auto priority = parseString(candidate.object_items(), "priority")) { + parsedCandidate.priority = priority.value(); + } else { + return absl::nullopt; + } + + if (const auto ip = parseString(candidate.object_items(), "ip")) { + parsedCandidate.ip = ip.value(); + } else { + return absl::nullopt; + } + + if (const auto type = parseString(candidate.object_items(), "type")) { + parsedCandidate.type = type.value(); + } else { + return absl::nullopt; + } + + if (const auto tcpType = parseString(candidate.object_items(), "tcptype")) { + parsedCandidate.tcpType = tcpType.value(); + } + + if (const auto relAddr = parseString(candidate.object_items(), "rel-addr")) { + parsedCandidate.relAddr = relAddr.value(); + } + + if (const auto relPort = parseString(candidate.object_items(), "rel-port")) { + parsedCandidate.relPort = relPort.value(); + } + + result.candidates.push_back(std::move(parsedCandidate)); + } + + return result; +} + +absl::optional parsePayloadType(json11::Json::object const &object) { + GroupJoinPayloadVideoPayloadType result; + + if (const auto id = parseInt(object, "id")) { + result.id = (uint32_t)id.value(); + } else { + return absl::nullopt; + } + + if (const auto name = parseString(object, "name")) { + result.name = name.value(); + } else { + return absl::nullopt; + } + + if (const auto clockrate = parseInt(object, "clockrate")) { + result.clockrate = (uint32_t)clockrate.value(); + } else { + result.clockrate = 0; + } + + if (const auto channels = parseInt(object, "channels")) { + result.channels = (uint32_t)channels.value(); + } else { + result.channels = 1; + } + + const auto parameters = object.find("parameters"); + if (parameters != object.end() && parameters->second.is_object()) { + for (const auto ¶meter : parameters->second.object_items()) { + if (parameter.second.is_string()) { + result.parameters.push_back(std::make_pair(parameter.first, parameter.second.string_value())); + } + } + } + + const auto rtcpFbs = object.find("rtcp-fbs"); + if (rtcpFbs != object.end() && rtcpFbs->second.is_array()) { + for (const auto &item : rtcpFbs->second.array_items()) { + if (item.is_object()) { + const auto type = item.object_items().find("type"); + if (type != item.object_items().end() && type->second.is_string()) { + GroupJoinPayloadVideoPayloadType::FeedbackType parsedFeedbackType; + + const auto typeString = type->second.string_value(); + + const auto subtype = item.object_items().find("subtype"); + if (subtype != item.object_items().end() && subtype->second.is_string()) { + parsedFeedbackType.type = typeString; + parsedFeedbackType.subtype = subtype->second.string_value(); + } else { + auto components = splitString(typeString, ' '); + if (components.size() == 1) { + parsedFeedbackType.type = components[0]; + } else if (components.size() == 2) { + parsedFeedbackType.type = components[0]; + parsedFeedbackType.subtype = components[1]; + } else { + continue; + } + } + + result.feedbackTypes.push_back(std::move(parsedFeedbackType)); + } + } + } + } + + return result; +} + +absl::optional parseVideoInformation(json11::Json::object const &object) { + GroupJoinVideoInformation result; + + const auto serverSources = object.find("server_sources"); + if (serverSources != object.end() && serverSources->second.is_array()) { + for (const auto &item : serverSources->second.array_items()) { + if (item.is_number()) { + int32_t value = item.int_value(); + uint32_t unsignedValue = *(uint32_t *)&value; + result.serverVideoBandwidthProbingSsrc = unsignedValue; + } + } + } + + const auto payloadTypes = object.find("payload-types"); + if (payloadTypes != object.end() && payloadTypes->second.is_array()) { + for (const auto &payloadType : payloadTypes->second.array_items()) { + if (payloadType.is_object()) { + if (const auto parsedPayloadType = parsePayloadType(payloadType.object_items())) { + result.payloadTypes.push_back(parsedPayloadType.value()); + } + } + } + } + + const auto rtpHdrexts = object.find("rtp-hdrexts"); + if (rtpHdrexts != object.end() && rtpHdrexts->second.is_array()) { + for (const auto &rtpHdrext : rtpHdrexts->second.array_items()) { + if (rtpHdrext.is_object()) { + const auto id = rtpHdrext.object_items().find("id"); + if (id == rtpHdrext.object_items().end() || !id->second.is_number()) { + continue; + } + + const auto uri = rtpHdrext.object_items().find("uri"); + if (uri == rtpHdrext.object_items().end() || !uri->second.is_string()) { + continue; + } + + result.extensionMap.push_back(std::make_pair(id->second.int_value(), uri->second.string_value())); + } + } + } + + const auto endpointId = object.find("endpoint"); + if (endpointId != object.end() && endpointId->second.is_string()) { + result.endpointId = endpointId->second.string_value(); + } + + return result; +} + +} + +std::string GroupJoinInternalPayload::serialize() { + json11::Json::object object; + + int32_t signedSsrc = *(int32_t *)&audioSsrc; + + object.insert(std::make_pair("ssrc", json11::Json(signedSsrc))); + object.insert(std::make_pair("ufrag", json11::Json(transport.ufrag))); + object.insert(std::make_pair("pwd", json11::Json(transport.pwd))); + + json11::Json::array fingerprints; + for (const auto &fingerprint : transport.fingerprints) { + json11::Json::object fingerprintJson; + + fingerprintJson.insert(std::make_pair("hash", json11::Json(fingerprint.hash))); + fingerprintJson.insert(std::make_pair("fingerprint", json11::Json(fingerprint.fingerprint))); + fingerprintJson.insert(std::make_pair("setup", json11::Json(fingerprint.setup))); + + fingerprints.push_back(json11::Json(std::move(fingerprintJson))); + } + object.insert(std::make_pair("fingerprints", json11::Json(std::move(fingerprints)))); + + if (videoInformation) { + json11::Json::array ssrcGroups; + for (const auto &ssrcGroup : videoInformation->ssrcGroups) { + json11::Json::object ssrcGroupJson; + + json11::Json::array ssrcGroupSources; + for (auto ssrc : ssrcGroup.ssrcs) { + int32_t signedValue = *(int32_t *)&ssrc; + ssrcGroupSources.push_back(json11::Json(signedValue)); + } + + ssrcGroupJson.insert(std::make_pair("sources", json11::Json(std::move(ssrcGroupSources)))); + ssrcGroupJson.insert(std::make_pair("semantics", json11::Json(ssrcGroup.semantics))); + + ssrcGroups.push_back(json11::Json(std::move(ssrcGroupJson))); + } + object.insert(std::make_pair("ssrc-groups", json11::Json(std::move(ssrcGroups)))); + } + + auto json = json11::Json(std::move(object)); + return json.dump(); +} + +absl::optional GroupJoinResponsePayload::parse(std::string const &data) { + std::string parsingError; + auto json = json11::Json::parse(std::string(data.begin(), data.end()), parsingError); + if (json.type() != json11::Json::OBJECT) { + return absl::nullopt; + } + + tgcalls::GroupJoinResponsePayload result; + + const auto transport = json.object_items().find("transport"); + if (transport == json.object_items().end() || !transport->second.is_object()) { + return absl::nullopt; + } + if (const auto parsedTransport = parseTransportDescription(transport->second.object_items())) { + result.transport = parsedTransport.value(); + } else { + return absl::nullopt; + } + + const auto video = json.object_items().find("video"); + if (video != json.object_items().end() && video->second.is_object()) { + result.videoInformation = parseVideoInformation(video->second.object_items()); + } + + return result; +} + +} diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayloadInternal.h b/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayloadInternal.h new file mode 100644 index 000000000..08e09ad27 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupJoinPayloadInternal.h @@ -0,0 +1,32 @@ +#ifndef TGCALLS_GROUP_JOIN_PAYLOAD_INTERNAL_H +#define TGCALLS_GROUP_JOIN_PAYLOAD_INTERNAL_H + +#include "GroupJoinPayload.h" + +#include +#include +#include + +#include "absl/types/optional.h" + +namespace tgcalls { + +struct GroupJoinResponsePayload { + GroupJoinTransportDescription transport; + absl::optional videoInformation; + + static absl::optional parse(std::string const &data); +}; + +struct GroupJoinInternalPayload { + GroupJoinTransportDescription transport; + + uint32_t audioSsrc = 0; + absl::optional videoInformation; + + std::string serialize(); +}; + +} + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp index 852c1ec00..0d13513e5 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp @@ -13,6 +13,8 @@ #include "p2p/base/dtls_transport_factory.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" +#include "media/sctp/sctp_transport_factory.h" +#include "platform/PlatformInterface.h" #include "StaticThreads.h" @@ -137,10 +139,10 @@ public: _dataChannel->OnDataReceived(params, buffer); } - virtual bool SendData(const cricket::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override { + virtual bool SendData(int sid, const webrtc::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override { assert(_threads->getNetworkThread()->IsCurrent()); - return _sctpTransport->SendData(params, payload); + return _sctpTransport->SendData(sid, params, payload); } virtual bool ConnectDataChannel(webrtc::SctpDataChannel *data_channel) override { @@ -199,14 +201,12 @@ webrtc::CryptoOptions GroupNetworkManager::getDefaulCryptoOptions() { GroupNetworkManager::GroupNetworkManager( std::function stateUpdated, std::function transportMessageReceived, - std::function rtcpPacketReceived, std::function dataChannelStateUpdated, std::function dataChannelMessageReceived, std::shared_ptr threads) : _threads(std::move(threads)), _stateUpdated(std::move(stateUpdated)), _transportMessageReceived(std::move(transportMessageReceived)), -_rtcpPacketReceived(std::move(rtcpPacketReceived)), _dataChannelStateUpdated(dataChannelStateUpdated), _dataChannelMessageReceived(dataChannelMessageReceived) { assert(_threads->getNetworkThread()->IsCurrent()); @@ -214,18 +214,18 @@ _dataChannelMessageReceived(dataChannelMessageReceived) { _localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)); _localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt); + + _networkMonitorFactory = PlatformInterface::SharedInstance()->createNetworkMonitorFactory(); _socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread())); - _networkManager = std::make_unique(); + _networkManager = std::make_unique(_networkMonitorFactory.get()); _asyncResolverFactory = std::make_unique(); _dtlsSrtpTransport = std::make_unique(true); _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); _dtlsSrtpTransport->SetActiveResetSrtpParams(false); - _dtlsSrtpTransport->SignalDtlsStateChange.connect(this, &GroupNetworkManager::DtlsStateChanged); _dtlsSrtpTransport->SignalReadyToSend.connect(this, &GroupNetworkManager::DtlsReadyToSend); _dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &GroupNetworkManager::RtpPacketReceived_n); - _dtlsSrtpTransport->SignalRtcpPacketReceived.connect(this, &GroupNetworkManager::OnRtcpPacketReceived_n); resetDtlsSrtpTransport(); } @@ -281,8 +281,6 @@ void GroupNetworkManager::resetDtlsSrtpTransport() { this, &GroupNetworkManager::OnTransportWritableState_n); _dtlsTransport->SignalReceivingState.connect( this, &GroupNetworkManager::OnTransportReceivingState_n); - _dtlsTransport->SignalDtlsHandshakeError.connect( - this, &GroupNetworkManager::OnDtlsHandshakeError); _dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER); _dtlsTransport->SetLocalCertificate(_localCertificate); @@ -293,22 +291,27 @@ void GroupNetworkManager::resetDtlsSrtpTransport() { void GroupNetworkManager::start() { _transportChannel->MaybeStartGathering(); - /*const auto weak = std::weak_ptr(shared_from_this()); - _dataChannelInterface.reset(new SctpDataChannelProviderInterfaceImpl(_dtlsTransport.get(), [weak, threads = _threads](bool state) { - assert(threads->getNetworkThread()->IsCurrent()); - const auto strong = weak.lock(); - if (!strong) { - return; - } - strong->_dataChannelStateUpdated(state); - }, [weak, threads = _threads[](std::string const &message) { - assert(threads->getNetworkThread()->IsCurrent()); - const auto strong = weak.lock(); - if (!strong) { - return; - } - strong->_dataChannelMessageReceived(message); - }));*/ + const auto weak = std::weak_ptr(shared_from_this()); + _dataChannelInterface.reset(new SctpDataChannelProviderInterfaceImpl( + _dtlsTransport.get(), + [weak, threads = _threads](bool state) { + assert(threads->getNetworkThread()->IsCurrent()); + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->_dataChannelStateUpdated(state); + }, + [weak, threads = _threads](std::string const &message) { + assert(threads->getNetworkThread()->IsCurrent()); + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->_dataChannelMessageReceived(message); + }, + _threads + )); } void GroupNetworkManager::stop() { @@ -317,7 +320,6 @@ void GroupNetworkManager::stop() { _dtlsTransport->SignalWritableState.disconnect(this); _dtlsTransport->SignalReceivingState.disconnect(this); - _dtlsTransport->SignalDtlsHandshakeError.disconnect(this); _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); @@ -416,25 +418,6 @@ void GroupNetworkManager::OnTransportReceivingState_n(rtc::PacketTransportIntern UpdateAggregateStates_n(); } -void GroupNetworkManager::OnDtlsHandshakeError(rtc::SSLHandshakeError error) { - assert(_threads->getNetworkThread()->IsCurrent()); -} - -void GroupNetworkManager::DtlsStateChanged() { - UpdateAggregateStates_n(); - - if (_dtlsTransport->IsDtlsActive()) { - const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() { - const auto strong = weak.lock(); - if (!strong) { - return; - } - strong->UpdateAggregateStates_n(); - }); - } -} - void GroupNetworkManager::DtlsReadyToSend(bool isReadyToSend) { UpdateAggregateStates_n(); @@ -470,12 +453,6 @@ void GroupNetworkManager::RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, in } } -void GroupNetworkManager::OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us) { - if (_rtcpPacketReceived) { - _rtcpPacketReceived(*packet, packet_time_us); - } -} - void GroupNetworkManager::UpdateAggregateStates_n() { assert(_threads->getNetworkThread()->IsCurrent()); diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h index 3477ec795..d62289f39 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.h @@ -8,9 +8,10 @@ #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/network_monitor_factory.h" #include "api/candidate.h" #include "media/base/media_channel.h" -#include "media/sctp/sctp_transport.h" +#include "rtc_base/ssl_fingerprint.h" #include "pc/sctp_data_channel.h" #include @@ -58,7 +59,6 @@ public: GroupNetworkManager( std::function stateUpdated, std::function transportMessageReceived, - std::function rtcpPacketReceived, std::function dataChannelStateUpdated, std::function dataChannelMessageReceived, std::shared_ptr threads); @@ -82,11 +82,9 @@ private: void candidateGatheringState(cricket::IceTransportInternal *transport); void OnTransportWritableState_n(rtc::PacketTransportInternal *transport); void OnTransportReceivingState_n(rtc::PacketTransportInternal *transport); - void OnDtlsHandshakeError(rtc::SSLHandshakeError error); void transportStateChanged(cricket::IceTransportInternal *transport); void transportReadyToSend(cricket::IceTransportInternal *transport); void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused); - void DtlsStateChanged(); void DtlsReadyToSend(bool DtlsReadyToSend); void UpdateAggregateStates_n(); void RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved); @@ -98,10 +96,10 @@ private: std::shared_ptr _threads; std::function _stateUpdated; std::function _transportMessageReceived; - std::function _rtcpPacketReceived; std::function _dataChannelStateUpdated; std::function _dataChannelMessageReceived; + std::unique_ptr _networkMonitorFactory; std::unique_ptr _socketFactory; std::unique_ptr _networkManager; std::unique_ptr _turnCustomizer; diff --git a/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.cpp b/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.cpp index 1b8dbf113..1847e2a35 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.cpp @@ -12,7 +12,6 @@ extern "C" { #include #include #include -#include namespace tgcalls { @@ -310,26 +309,28 @@ private: } int ret = 0; - - ret = av_read_frame(_inputFormatContext, &_packet); - if (ret < 0) { + do { + ret = av_read_frame(_inputFormatContext, &_packet); + if (ret < 0) { _didReadToEnd = true; return; - } + } - ret = avcodec_send_packet(_codecContext, &_packet); - if (ret < 0) { + ret = avcodec_send_packet(_codecContext, &_packet); + if (ret < 0) { _didReadToEnd = true; return; - } + } - int bytesPerSample = av_get_bytes_per_sample(_codecContext->sample_fmt); - if (bytesPerSample != 2 && bytesPerSample != 4) { + int bytesPerSample = av_get_bytes_per_sample(_codecContext->sample_fmt); + if (bytesPerSample != 2 && bytesPerSample != 4) { _didReadToEnd = true; return; - } + } + + ret = avcodec_receive_frame(_codecContext, _frame); + } while (ret == AVERROR(EAGAIN)); - ret = avcodec_receive_frame(_codecContext, _frame); if (ret != 0) { _didReadToEnd = true; return; diff --git a/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.h b/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.h index 08859d65e..6e0812cb5 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.h +++ b/TMessagesProj/jni/voip/tgcalls/group/StreamingPart.h @@ -3,6 +3,7 @@ #include "absl/types/optional.h" #include +#include namespace tgcalls { diff --git a/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h b/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h index 6a8e8b29f..e3432d089 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h +++ b/TMessagesProj/jni/voip/tgcalls/platform/PlatformInterface.h @@ -5,6 +5,9 @@ #include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/media_stream_interface.h" +#include "rtc_base/network_monitor_factory.h" +#include "modules/audio_device/include/audio_device.h" +#include "rtc_base/ref_counted_object.h" #include namespace tgcalls { @@ -16,6 +19,277 @@ class PlatformContext; struct PlatformCaptureInfo { bool shouldBeAdaptedToReceiverAspectRate = false; + int rotation = 0; +}; + +class WrappedAudioDeviceModule : public webrtc::AudioDeviceModule { +public: + virtual void Stop() = 0; +}; + +class DefaultWrappedAudioDeviceModule : public WrappedAudioDeviceModule { +public: + DefaultWrappedAudioDeviceModule(rtc::scoped_refptr impl) : + _impl(impl) { + } + + virtual ~DefaultWrappedAudioDeviceModule() { + } + + virtual void Stop() override { + } + + virtual int32_t ActiveAudioLayer(AudioLayer *audioLayer) const override { + return _impl->ActiveAudioLayer(audioLayer); + } + + virtual int32_t RegisterAudioCallback(webrtc::AudioTransport *audioCallback) override { + return _impl->RegisterAudioCallback(audioCallback); + } + + virtual int32_t Init() override { + return _impl->Init(); + } + + virtual int32_t Terminate() override { + return _impl->Terminate(); + } + + virtual bool Initialized() const override { + return _impl->Initialized(); + } + + virtual int16_t PlayoutDevices() override { + return _impl->PlayoutDevices(); + } + + virtual int16_t RecordingDevices() override { + return _impl->RecordingDevices(); + } + + virtual int32_t PlayoutDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override { + return _impl->PlayoutDeviceName(index, name, guid); + } + + virtual int32_t RecordingDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override { + return _impl->RecordingDeviceName(index, name, guid); + } + + virtual int32_t SetPlayoutDevice(uint16_t index) override { + return _impl->SetPlayoutDevice(index); + } + + virtual int32_t SetPlayoutDevice(WindowsDeviceType device) override { + return _impl->SetPlayoutDevice(device); + } + + virtual int32_t SetRecordingDevice(uint16_t index) override { + return _impl->SetRecordingDevice(index); + } + + virtual int32_t SetRecordingDevice(WindowsDeviceType device) override { + return _impl->SetRecordingDevice(device); + } + + virtual int32_t PlayoutIsAvailable(bool *available) override { + return _impl->PlayoutIsAvailable(available); + } + + virtual int32_t InitPlayout() override { + return _impl->InitPlayout(); + } + + virtual bool PlayoutIsInitialized() const override { + return _impl->PlayoutIsInitialized(); + } + + virtual int32_t RecordingIsAvailable(bool *available) override { + return _impl->RecordingIsAvailable(available); + } + + virtual int32_t InitRecording() override { + return _impl->InitRecording(); + } + + virtual bool RecordingIsInitialized() const override { + return _impl->RecordingIsInitialized(); + } + + virtual int32_t StartPlayout() override { + return _impl->StartPlayout(); + } + + virtual int32_t StopPlayout() override { + return _impl->StopPlayout(); + } + + virtual bool Playing() const override { + return _impl->Playing(); + } + + virtual int32_t StartRecording() override { + return _impl->StartRecording(); + } + + virtual int32_t StopRecording() override { + return _impl->StopRecording(); + } + + virtual bool Recording() const override { + return _impl->Recording(); + } + + virtual int32_t InitSpeaker() override { + return _impl->InitSpeaker(); + } + + virtual bool SpeakerIsInitialized() const override { + return _impl->SpeakerIsInitialized(); + } + + virtual int32_t InitMicrophone() override { + return _impl->InitMicrophone(); + } + + virtual bool MicrophoneIsInitialized() const override { + return _impl->MicrophoneIsInitialized(); + } + + virtual int32_t SpeakerVolumeIsAvailable(bool *available) override { + return _impl->SpeakerVolumeIsAvailable(available); + } + + virtual int32_t SetSpeakerVolume(uint32_t volume) override { + return _impl->SetSpeakerVolume(volume); + } + + virtual int32_t SpeakerVolume(uint32_t* volume) const override { + return _impl->SpeakerVolume(volume); + } + + virtual int32_t MaxSpeakerVolume(uint32_t *maxVolume) const override { + return _impl->MaxSpeakerVolume(maxVolume); + } + + virtual int32_t MinSpeakerVolume(uint32_t *minVolume) const override { + return _impl->MinSpeakerVolume(minVolume); + } + + virtual int32_t MicrophoneVolumeIsAvailable(bool *available) override { + return _impl->MicrophoneVolumeIsAvailable(available); + } + + virtual int32_t SetMicrophoneVolume(uint32_t volume) override { + return _impl->SetMicrophoneVolume(volume); + } + + virtual int32_t MicrophoneVolume(uint32_t *volume) const override { + return _impl->MicrophoneVolume(volume); + } + + virtual int32_t MaxMicrophoneVolume(uint32_t *maxVolume) const override { + return _impl->MaxMicrophoneVolume(maxVolume); + } + + virtual int32_t MinMicrophoneVolume(uint32_t *minVolume) const override { + return _impl->MinMicrophoneVolume(minVolume); + } + + virtual int32_t SpeakerMuteIsAvailable(bool *available) override { + return _impl->SpeakerMuteIsAvailable(available); + } + + virtual int32_t SetSpeakerMute(bool enable) override { + return _impl->SetSpeakerMute(enable); + } + + virtual int32_t SpeakerMute(bool *enabled) const override { + return _impl->SpeakerMute(enabled); + } + + virtual int32_t MicrophoneMuteIsAvailable(bool *available) override { + return _impl->MicrophoneMuteIsAvailable(available); + } + + virtual int32_t SetMicrophoneMute(bool enable) override { + return _impl->SetMicrophoneMute(enable); + } + + virtual int32_t MicrophoneMute(bool *enabled) const override { + return _impl->MicrophoneMute(enabled); + } + + virtual int32_t StereoPlayoutIsAvailable(bool *available) const override { + return _impl->StereoPlayoutIsAvailable(available); + } + + virtual int32_t SetStereoPlayout(bool enable) override { + return _impl->SetStereoPlayout(enable); + } + + virtual int32_t StereoPlayout(bool *enabled) const override { + return _impl->StereoPlayout(enabled); + } + + virtual int32_t StereoRecordingIsAvailable(bool *available) const override { + return _impl->StereoRecordingIsAvailable(available); + } + + virtual int32_t SetStereoRecording(bool enable) override { + return _impl->SetStereoRecording(enable); + } + + virtual int32_t StereoRecording(bool *enabled) const override { + return _impl->StereoRecording(enabled); + } + + virtual int32_t PlayoutDelay(uint16_t* delayMS) const override { + return _impl->PlayoutDelay(delayMS); + } + + virtual bool BuiltInAECIsAvailable() const override { + return _impl->BuiltInAECIsAvailable(); + } + + virtual bool BuiltInAGCIsAvailable() const override { + return _impl->BuiltInAGCIsAvailable(); + } + + virtual bool BuiltInNSIsAvailable() const override { + return _impl->BuiltInNSIsAvailable(); + } + + virtual int32_t EnableBuiltInAEC(bool enable) override { + return _impl->EnableBuiltInAEC(enable); + } + + virtual int32_t EnableBuiltInAGC(bool enable) override { + return _impl->EnableBuiltInAGC(enable); + } + + virtual int32_t EnableBuiltInNS(bool enable) override { + return _impl->EnableBuiltInNS(enable); + } + + virtual int32_t GetPlayoutUnderrunCount() const override { + return _impl->GetPlayoutUnderrunCount(); + } + +#if defined(WEBRTC_IOS) + virtual int GetPlayoutAudioParameters(webrtc::AudioParameters *params) const override { + return _impl->GetPlayoutAudioParameters(params); + } + virtual int GetRecordAudioParameters(webrtc::AudioParameters *params) const override { + return _impl->GetRecordAudioParameters(params); + } +#endif // WEBRTC_IOS + + rtc::scoped_refptr WrappedInstance() { + return _impl; + } + +private: + rtc::scoped_refptr _impl; }; class PlatformInterface { @@ -26,12 +300,19 @@ public: virtual void configurePlatformAudio() { } + virtual std::unique_ptr createNetworkMonitorFactory() { + return nullptr; + } + virtual std::unique_ptr makeVideoEncoderFactory(std::shared_ptr platformContext) = 0; virtual std::unique_ptr makeVideoDecoderFactory(std::shared_ptr platformContext) = 0; virtual bool supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) = 0; - virtual rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0; + virtual rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) = 0; virtual void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) = 0; virtual std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::function captureInfoUpdated, std::shared_ptr platformContext, std::pair &outResolution) = 0; + virtual rtc::scoped_refptr wrapAudioDeviceModule(rtc::scoped_refptr module) { + return new rtc::RefCountedObject(module); + } }; diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp index e49d1ce9f..7cf031dc8 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.cpp @@ -5,22 +5,22 @@ namespace tgcalls { -AndroidContext::AndroidContext(JNIEnv *env, jobject instance) { - VideoCameraCapturerClass = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/VideoCameraCapturer")); - jmethodID initMethodId = env->GetMethodID(VideoCameraCapturerClass, "", "()V"); - javaCapturer = env->NewGlobalRef(env->NewObject(VideoCameraCapturerClass, initMethodId)); +AndroidContext::AndroidContext(JNIEnv *env, jobject instance, bool screencast) { + VideoCapturerDeviceClass = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/VideoCapturerDevice")); + jmethodID initMethodId = env->GetMethodID(VideoCapturerDeviceClass, "", "(Z)V"); + javaCapturer = env->NewGlobalRef(env->NewObject(VideoCapturerDeviceClass, initMethodId, screencast)); javaInstance = env->NewGlobalRef(instance); } AndroidContext::~AndroidContext() { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - jmethodID onDestroyMethodId = env->GetMethodID(VideoCameraCapturerClass, "onDestroy", "()V"); + jmethodID onDestroyMethodId = env->GetMethodID(VideoCapturerDeviceClass, "onDestroy", "()V"); env->CallVoidMethod(javaCapturer, onDestroyMethodId); env->DeleteGlobalRef(javaCapturer); javaCapturer = nullptr; - env->DeleteGlobalRef(VideoCameraCapturerClass); + env->DeleteGlobalRef(VideoCapturerDeviceClass); if (javaInstance) { env->DeleteGlobalRef(javaInstance); @@ -40,7 +40,7 @@ jobject AndroidContext::getJavaCapturer() { } jclass AndroidContext::getJavaCapturerClass() { - return VideoCameraCapturerClass; + return VideoCapturerDeviceClass; } } // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.h b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.h index 2a77ef7f3..effdb0ecf 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.h +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidContext.h @@ -10,7 +10,7 @@ namespace tgcalls { class AndroidContext final : public PlatformContext { public: - AndroidContext(JNIEnv *env, jobject instance); + AndroidContext(JNIEnv *env, jobject instance, bool screencast); ~AndroidContext() override; jobject getJavaCapturer(); @@ -20,9 +20,10 @@ public: void setJavaInstance(JNIEnv *env, jobject instance); std::shared_ptr streamTask; + std::vector> descriptionTasks; private: - jclass VideoCameraCapturerClass = nullptr; + jclass VideoCapturerDeviceClass = nullptr; jobject javaCapturer = nullptr; jobject javaInstance = nullptr; diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp index b1b57d0d6..78a8e3b7f 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.cpp @@ -3,6 +3,8 @@ #include #include #include +#include +#include #include #include "VideoCapturerInterfaceImpl.h" @@ -55,10 +57,10 @@ void AndroidInterface::adaptVideoSource(rtc::scoped_refptr AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) { +rtc::scoped_refptr AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - _source = webrtc::CreateJavaVideoSource(env, signalingThread, false, false); - return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, _source); + _source[screencapture ? 1 : 0] = webrtc::CreateJavaVideoSource(env, signalingThread, false, false); + return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, _source[screencapture ? 1 : 0]); } bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) { @@ -84,9 +86,12 @@ bool AndroidInterface::supportsEncoding(const std::string &codecName, std::share } std::unique_ptr AndroidInterface::makeVideoCapturer(rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::function captureInfoUpdated, std::shared_ptr platformContext, std::pair &outResolution) { - return std::make_unique(_source, deviceId, stateUpdated, platformContext); + return std::make_unique(_source[deviceId == "screen" ? 1 : 0], deviceId, stateUpdated, platformContext); } +std::unique_ptr AndroidInterface::createNetworkMonitorFactory() { + return std::make_unique(); +} std::unique_ptr CreatePlatformInterface() { return std::make_unique(); diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.h b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.h index 0a52e1f5c..32d5fd818 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.h +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/AndroidInterface.h @@ -13,12 +13,13 @@ public: std::unique_ptr makeVideoEncoderFactory(std::shared_ptr platformContext) override; std::unique_ptr makeVideoDecoderFactory(std::shared_ptr platformContext) override; bool supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) override; - rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override; + rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) override; void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) override; std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::function captureInfoUpdated, std::shared_ptr platformContext, std::pair &outResolution) override; + std::unique_ptr createNetworkMonitorFactory() override; private: - rtc::scoped_refptr _source; + rtc::scoped_refptr _source[2]; std::unique_ptr hardwareVideoEncoderFactory; std::unique_ptr softwareVideoEncoderFactory; diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCameraCapturer.cpp b/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCameraCapturer.cpp index 3236622f6..4368f8be5 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCameraCapturer.cpp +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCameraCapturer.cpp @@ -1,6 +1,6 @@ #include "VideoCameraCapturer.h" -#include +#include #include #include @@ -13,8 +13,8 @@ namespace tgcalls { VideoCameraCapturer::VideoCameraCapturer(rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::shared_ptr platformContext) : _source(source), _stateUpdated(stateUpdated), _platformContext(platformContext) { AndroidContext *context = (AndroidContext *) platformContext.get(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "init", "(JZ)V"); - env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, (jboolean) (deviceId != "back")); + jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "init", "(JLjava/lang/String;)V"); + env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, env->NewStringUTF(deviceId.c_str())); } void VideoCameraCapturer::setState(VideoState state) { @@ -23,7 +23,7 @@ void VideoCameraCapturer::setState(VideoState state) { _stateUpdated(_state); } JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - AndroidContext *context = (AndroidContext *) _platformContext.get(); + auto context = (AndroidContext *) _platformContext.get(); jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "onStateChanged", "(JI)V"); env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, (jint) state); } @@ -31,7 +31,7 @@ void VideoCameraCapturer::setState(VideoState state) { void VideoCameraCapturer::setPreferredCaptureAspectRatio(float aspectRatio) { _aspectRatio = aspectRatio; JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - AndroidContext *context = (AndroidContext *) _platformContext.get(); + auto context = (AndroidContext *) _platformContext.get(); jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "onAspectRatioRequested", "(F)V"); env->CallVoidMethod(context->getJavaCapturer(), methodId, (jfloat) aspectRatio); } @@ -54,7 +54,7 @@ webrtc::ScopedJavaLocalRef VideoCameraCapturer::GetJavaVideoCapturerObs extern "C" { -JNIEXPORT jobject Java_org_telegram_messenger_voip_VideoCameraCapturer_nativeGetJavaVideoCapturerObserver(JNIEnv *env, jclass clazz, jlong ptr) { +JNIEXPORT jobject Java_org_telegram_messenger_voip_VideoCapturerDevice_nativeGetJavaVideoCapturerObserver(JNIEnv *env, jclass clazz, jlong ptr) { tgcalls::VideoCameraCapturer *capturer = (tgcalls::VideoCameraCapturer *) (intptr_t) ptr; return capturer->GetJavaVideoCapturerObserver(env).Release(); } diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.cpp b/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.cpp index 1c7889256..fceedcc0c 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.cpp @@ -1,11 +1,13 @@ #include "VideoCapturerInterfaceImpl.h" +#include + #include "VideoCameraCapturer.h" namespace tgcalls { VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr source, std::string deviceId, std::function stateUpdated, std::shared_ptr platformContext) { - _capturer = std::unique_ptr(new VideoCameraCapturer(source, deviceId, stateUpdated, platformContext)); + _capturer = std::make_unique(source, deviceId, stateUpdated, platformContext); } void VideoCapturerInterfaceImpl::setState(VideoState state) { @@ -20,4 +22,12 @@ void VideoCapturerInterfaceImpl::setUncroppedOutput(std::shared_ptrsetUncroppedSink(sink); } +int VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl::getRotation() { + return 0; +} + +void VideoCapturerInterfaceImpl::setOnFatalError(std::function error) { + +} + } // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.h b/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.h index 69e1c5177..9079d558c 100644 --- a/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.h +++ b/TMessagesProj/jni/voip/tgcalls/platform/android/VideoCapturerInterfaceImpl.h @@ -14,6 +14,8 @@ public: void setState(VideoState state) override; void setPreferredCaptureAspectRatio(float aspectRatio) override; void setUncroppedOutput(std::shared_ptr> sink) override; + int getRotation() override; + void setOnFatalError(std::function error) override; private: std::unique_ptr _capturer; diff --git a/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp b/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp index 13e0c28b7..999d5616f 100644 --- a/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp +++ b/TMessagesProj/jni/voip/tgcalls/reference/InstanceImplReference.cpp @@ -844,11 +844,15 @@ private: std::vector codecs; for (auto &codec : capabilities.codecs) { +#ifndef WEBRTC_DISABLE_H265 if (codec.name == cricket::kH265CodecName) { codecs.insert(codecs.begin(), codec); } else { codecs.push_back(codec); } +#else + codecs.push_back(codec); +#endif } it->SetCodecPreferences(codecs); diff --git a/TMessagesProj/jni/voip/tgcalls/third-party/json11.cpp b/TMessagesProj/jni/voip/tgcalls/third-party/json11.cpp new file mode 100644 index 000000000..88024e922 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/third-party/json11.cpp @@ -0,0 +1,790 @@ +/* Copyright (c) 2013 Dropbox, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#include "json11.hpp" +#include +#include +#include +#include +#include + +namespace json11 { + +static const int max_depth = 200; + +using std::string; +using std::vector; +using std::map; +using std::make_shared; +using std::initializer_list; +using std::move; + +/* Helper for representing null - just a do-nothing struct, plus comparison + * operators so the helpers in JsonValue work. We can't use nullptr_t because + * it may not be orderable. + */ +struct NullStruct { + bool operator==(NullStruct) const { return true; } + bool operator<(NullStruct) const { return false; } +}; + +/* * * * * * * * * * * * * * * * * * * * + * Serialization + */ + +static void dump(NullStruct, string &out) { + out += "null"; +} + +static void dump(double value, string &out) { + if (std::isfinite(value)) { + char buf[32]; + snprintf(buf, sizeof buf, "%.17g", value); + out += buf; + } else { + out += "null"; + } +} + +static void dump(int value, string &out) { + char buf[32]; + snprintf(buf, sizeof buf, "%d", value); + out += buf; +} + +static void dump(bool value, string &out) { + out += value ? "true" : "false"; +} + +static void dump(const string &value, string &out) { + out += '"'; + for (size_t i = 0; i < value.length(); i++) { + const char ch = value[i]; + if (ch == '\\') { + out += "\\\\"; + } else if (ch == '"') { + out += "\\\""; + } else if (ch == '\b') { + out += "\\b"; + } else if (ch == '\f') { + out += "\\f"; + } else if (ch == '\n') { + out += "\\n"; + } else if (ch == '\r') { + out += "\\r"; + } else if (ch == '\t') { + out += "\\t"; + } else if (static_cast(ch) <= 0x1f) { + char buf[8]; + snprintf(buf, sizeof buf, "\\u%04x", ch); + out += buf; + } else if (static_cast(ch) == 0xe2 && static_cast(value[i+1]) == 0x80 + && static_cast(value[i+2]) == 0xa8) { + out += "\\u2028"; + i += 2; + } else if (static_cast(ch) == 0xe2 && static_cast(value[i+1]) == 0x80 + && static_cast(value[i+2]) == 0xa9) { + out += "\\u2029"; + i += 2; + } else { + out += ch; + } + } + out += '"'; +} + +static void dump(const Json::array &values, string &out) { + bool first = true; + out += "["; + for (const auto &value : values) { + if (!first) + out += ", "; + value.dump(out); + first = false; + } + out += "]"; +} + +static void dump(const Json::object &values, string &out) { + bool first = true; + out += "{"; + for (const auto &kv : values) { + if (!first) + out += ", "; + dump(kv.first, out); + out += ": "; + kv.second.dump(out); + first = false; + } + out += "}"; +} + +void Json::dump(string &out) const { + m_ptr->dump(out); +} + +/* * * * * * * * * * * * * * * * * * * * + * Value wrappers + */ + +template +class Value : public JsonValue { +protected: + + // Constructors + explicit Value(const T &value) : m_value(value) {} + explicit Value(T &&value) : m_value(move(value)) {} + + // Get type tag + Json::Type type() const override { + return tag; + } + + // Comparisons + bool equals(const JsonValue * other) const override { + return m_value == static_cast *>(other)->m_value; + } + bool less(const JsonValue * other) const override { + return m_value < static_cast *>(other)->m_value; + } + + const T m_value; + void dump(string &out) const override { json11::dump(m_value, out); } +}; + +class JsonDouble final : public Value { + double number_value() const override { return m_value; } + int int_value() const override { return static_cast(m_value); } + bool equals(const JsonValue * other) const override { return m_value == other->number_value(); } + bool less(const JsonValue * other) const override { return m_value < other->number_value(); } +public: + explicit JsonDouble(double value) : Value(value) {} +}; + +class JsonInt final : public Value { + double number_value() const override { return m_value; } + int int_value() const override { return m_value; } + bool equals(const JsonValue * other) const override { return m_value == other->number_value(); } + bool less(const JsonValue * other) const override { return m_value < other->number_value(); } +public: + explicit JsonInt(int value) : Value(value) {} +}; + +class JsonBoolean final : public Value { + bool bool_value() const override { return m_value; } +public: + explicit JsonBoolean(bool value) : Value(value) {} +}; + +class JsonString final : public Value { + const string &string_value() const override { return m_value; } +public: + explicit JsonString(const string &value) : Value(value) {} + explicit JsonString(string &&value) : Value(move(value)) {} +}; + +class JsonArray final : public Value { + const Json::array &array_items() const override { return m_value; } + const Json & operator[](size_t i) const override; +public: + explicit JsonArray(const Json::array &value) : Value(value) {} + explicit JsonArray(Json::array &&value) : Value(move(value)) {} +}; + +class JsonObject final : public Value { + const Json::object &object_items() const override { return m_value; } + const Json & operator[](const string &key) const override; +public: + explicit JsonObject(const Json::object &value) : Value(value) {} + explicit JsonObject(Json::object &&value) : Value(move(value)) {} +}; + +class JsonNull final : public Value { +public: + JsonNull() : Value({}) {} +}; + +/* * * * * * * * * * * * * * * * * * * * + * Static globals - static-init-safe + */ +struct Statics { + const std::shared_ptr null = make_shared(); + const std::shared_ptr t = make_shared(true); + const std::shared_ptr f = make_shared(false); + const string empty_string; + const vector empty_vector; + const map empty_map; + Statics() {} +}; + +static const Statics & statics() { + static const Statics s {}; + return s; +} + +static const Json & static_null() { + // This has to be separate, not in Statics, because Json() accesses statics().null. + static const Json json_null; + return json_null; +} + +/* * * * * * * * * * * * * * * * * * * * + * Constructors + */ + +Json::Json() noexcept : m_ptr(statics().null) {} +Json::Json(std::nullptr_t) noexcept : m_ptr(statics().null) {} +Json::Json(double value) : m_ptr(make_shared(value)) {} +Json::Json(int value) : m_ptr(make_shared(value)) {} +Json::Json(bool value) : m_ptr(value ? statics().t : statics().f) {} +Json::Json(const string &value) : m_ptr(make_shared(value)) {} +Json::Json(string &&value) : m_ptr(make_shared(move(value))) {} +Json::Json(const char * value) : m_ptr(make_shared(value)) {} +Json::Json(const Json::array &values) : m_ptr(make_shared(values)) {} +Json::Json(Json::array &&values) : m_ptr(make_shared(move(values))) {} +Json::Json(const Json::object &values) : m_ptr(make_shared(values)) {} +Json::Json(Json::object &&values) : m_ptr(make_shared(move(values))) {} + +/* * * * * * * * * * * * * * * * * * * * + * Accessors + */ + +Json::Type Json::type() const { return m_ptr->type(); } +double Json::number_value() const { return m_ptr->number_value(); } +int Json::int_value() const { return m_ptr->int_value(); } +bool Json::bool_value() const { return m_ptr->bool_value(); } +const string & Json::string_value() const { return m_ptr->string_value(); } +const vector & Json::array_items() const { return m_ptr->array_items(); } +const map & Json::object_items() const { return m_ptr->object_items(); } +const Json & Json::operator[] (size_t i) const { return (*m_ptr)[i]; } +const Json & Json::operator[] (const string &key) const { return (*m_ptr)[key]; } + +double JsonValue::number_value() const { return 0; } +int JsonValue::int_value() const { return 0; } +bool JsonValue::bool_value() const { return false; } +const string & JsonValue::string_value() const { return statics().empty_string; } +const vector & JsonValue::array_items() const { return statics().empty_vector; } +const map & JsonValue::object_items() const { return statics().empty_map; } +const Json & JsonValue::operator[] (size_t) const { return static_null(); } +const Json & JsonValue::operator[] (const string &) const { return static_null(); } + +const Json & JsonObject::operator[] (const string &key) const { + auto iter = m_value.find(key); + return (iter == m_value.end()) ? static_null() : iter->second; +} +const Json & JsonArray::operator[] (size_t i) const { + if (i >= m_value.size()) return static_null(); + else return m_value[i]; +} + +/* * * * * * * * * * * * * * * * * * * * + * Comparison + */ + +bool Json::operator== (const Json &other) const { + if (m_ptr == other.m_ptr) + return true; + if (m_ptr->type() != other.m_ptr->type()) + return false; + + return m_ptr->equals(other.m_ptr.get()); +} + +bool Json::operator< (const Json &other) const { + if (m_ptr == other.m_ptr) + return false; + if (m_ptr->type() != other.m_ptr->type()) + return m_ptr->type() < other.m_ptr->type(); + + return m_ptr->less(other.m_ptr.get()); +} + +/* * * * * * * * * * * * * * * * * * * * + * Parsing + */ + +/* esc(c) + * + * Format char c suitable for printing in an error message. + */ +static inline string esc(char c) { + char buf[12]; + if (static_cast(c) >= 0x20 && static_cast(c) <= 0x7f) { + snprintf(buf, sizeof buf, "'%c' (%d)", c, c); + } else { + snprintf(buf, sizeof buf, "(%d)", c); + } + return string(buf); +} + +static inline bool in_range(long x, long lower, long upper) { + return (x >= lower && x <= upper); +} + +namespace { +/* JsonParser + * + * Object that tracks all state of an in-progress parse. + */ +struct JsonParser final { + + /* State + */ + const string &str; + size_t i; + string &err; + bool failed; + const JsonParse strategy; + + /* fail(msg, err_ret = Json()) + * + * Mark this parse as failed. + */ + Json fail(string &&msg) { + return fail(move(msg), Json()); + } + + template + T fail(string &&msg, const T err_ret) { + if (!failed) + err = std::move(msg); + failed = true; + return err_ret; + } + + /* consume_whitespace() + * + * Advance until the current character is non-whitespace. + */ + void consume_whitespace() { + while (str[i] == ' ' || str[i] == '\r' || str[i] == '\n' || str[i] == '\t') + i++; + } + + /* consume_comment() + * + * Advance comments (c-style inline and multiline). + */ + bool consume_comment() { + bool comment_found = false; + if (str[i] == '/') { + i++; + if (i == str.size()) + return fail("unexpected end of input after start of comment", false); + if (str[i] == '/') { // inline comment + i++; + // advance until next line, or end of input + while (i < str.size() && str[i] != '\n') { + i++; + } + comment_found = true; + } + else if (str[i] == '*') { // multiline comment + i++; + if (i > str.size()-2) + return fail("unexpected end of input inside multi-line comment", false); + // advance until closing tokens + while (!(str[i] == '*' && str[i+1] == '/')) { + i++; + if (i > str.size()-2) + return fail( + "unexpected end of input inside multi-line comment", false); + } + i += 2; + comment_found = true; + } + else + return fail("malformed comment", false); + } + return comment_found; + } + + /* consume_garbage() + * + * Advance until the current character is non-whitespace and non-comment. + */ + void consume_garbage() { + consume_whitespace(); + if(strategy == JsonParse::COMMENTS) { + bool comment_found = false; + do { + comment_found = consume_comment(); + if (failed) return; + consume_whitespace(); + } + while(comment_found); + } + } + + /* get_next_token() + * + * Return the next non-whitespace character. If the end of the input is reached, + * flag an error and return 0. + */ + char get_next_token() { + consume_garbage(); + if (failed) return static_cast(0); + if (i == str.size()) + return fail("unexpected end of input", static_cast(0)); + + return str[i++]; + } + + /* encode_utf8(pt, out) + * + * Encode pt as UTF-8 and add it to out. + */ + void encode_utf8(long pt, string & out) { + if (pt < 0) + return; + + if (pt < 0x80) { + out += static_cast(pt); + } else if (pt < 0x800) { + out += static_cast((pt >> 6) | 0xC0); + out += static_cast((pt & 0x3F) | 0x80); + } else if (pt < 0x10000) { + out += static_cast((pt >> 12) | 0xE0); + out += static_cast(((pt >> 6) & 0x3F) | 0x80); + out += static_cast((pt & 0x3F) | 0x80); + } else { + out += static_cast((pt >> 18) | 0xF0); + out += static_cast(((pt >> 12) & 0x3F) | 0x80); + out += static_cast(((pt >> 6) & 0x3F) | 0x80); + out += static_cast((pt & 0x3F) | 0x80); + } + } + + /* parse_string() + * + * Parse a string, starting at the current position. + */ + string parse_string() { + string out; + long last_escaped_codepoint = -1; + while (true) { + if (i == str.size()) + return fail("unexpected end of input in string", ""); + + char ch = str[i++]; + + if (ch == '"') { + encode_utf8(last_escaped_codepoint, out); + return out; + } + + if (in_range(ch, 0, 0x1f)) + return fail("unescaped " + esc(ch) + " in string", ""); + + // The usual case: non-escaped characters + if (ch != '\\') { + encode_utf8(last_escaped_codepoint, out); + last_escaped_codepoint = -1; + out += ch; + continue; + } + + // Handle escapes + if (i == str.size()) + return fail("unexpected end of input in string", ""); + + ch = str[i++]; + + if (ch == 'u') { + // Extract 4-byte escape sequence + string esc = str.substr(i, 4); + // Explicitly check length of the substring. The following loop + // relies on std::string returning the terminating NUL when + // accessing str[length]. Checking here reduces brittleness. + if (esc.length() < 4) { + return fail("bad \\u escape: " + esc, ""); + } + for (size_t j = 0; j < 4; j++) { + if (!in_range(esc[j], 'a', 'f') && !in_range(esc[j], 'A', 'F') + && !in_range(esc[j], '0', '9')) + return fail("bad \\u escape: " + esc, ""); + } + + long codepoint = strtol(esc.data(), nullptr, 16); + + // JSON specifies that characters outside the BMP shall be encoded as a pair + // of 4-hex-digit \u escapes encoding their surrogate pair components. Check + // whether we're in the middle of such a beast: the previous codepoint was an + // escaped lead (high) surrogate, and this is a trail (low) surrogate. + if (in_range(last_escaped_codepoint, 0xD800, 0xDBFF) + && in_range(codepoint, 0xDC00, 0xDFFF)) { + // Reassemble the two surrogate pairs into one astral-plane character, per + // the UTF-16 algorithm. + encode_utf8((((last_escaped_codepoint - 0xD800) << 10) + | (codepoint - 0xDC00)) + 0x10000, out); + last_escaped_codepoint = -1; + } else { + encode_utf8(last_escaped_codepoint, out); + last_escaped_codepoint = codepoint; + } + + i += 4; + continue; + } + + encode_utf8(last_escaped_codepoint, out); + last_escaped_codepoint = -1; + + if (ch == 'b') { + out += '\b'; + } else if (ch == 'f') { + out += '\f'; + } else if (ch == 'n') { + out += '\n'; + } else if (ch == 'r') { + out += '\r'; + } else if (ch == 't') { + out += '\t'; + } else if (ch == '"' || ch == '\\' || ch == '/') { + out += ch; + } else { + return fail("invalid escape character " + esc(ch), ""); + } + } + } + + /* parse_number() + * + * Parse a double. + */ + Json parse_number() { + size_t start_pos = i; + + if (str[i] == '-') + i++; + + // Integer part + if (str[i] == '0') { + i++; + if (in_range(str[i], '0', '9')) + return fail("leading 0s not permitted in numbers"); + } else if (in_range(str[i], '1', '9')) { + i++; + while (in_range(str[i], '0', '9')) + i++; + } else { + return fail("invalid " + esc(str[i]) + " in number"); + } + + if (str[i] != '.' && str[i] != 'e' && str[i] != 'E' + && (i - start_pos) <= static_cast(std::numeric_limits::digits10)) { + return std::atoi(str.c_str() + start_pos); + } + + // Decimal part + if (str[i] == '.') { + i++; + if (!in_range(str[i], '0', '9')) + return fail("at least one digit required in fractional part"); + + while (in_range(str[i], '0', '9')) + i++; + } + + // Exponent part + if (str[i] == 'e' || str[i] == 'E') { + i++; + + if (str[i] == '+' || str[i] == '-') + i++; + + if (!in_range(str[i], '0', '9')) + return fail("at least one digit required in exponent"); + + while (in_range(str[i], '0', '9')) + i++; + } + + return std::strtod(str.c_str() + start_pos, nullptr); + } + + /* expect(str, res) + * + * Expect that 'str' starts at the character that was just read. If it does, advance + * the input and return res. If not, flag an error. + */ + Json expect(const string &expected, Json res) { + assert(i != 0); + i--; + if (str.compare(i, expected.length(), expected) == 0) { + i += expected.length(); + return res; + } else { + return fail("parse error: expected " + expected + ", got " + str.substr(i, expected.length())); + } + } + + /* parse_json() + * + * Parse a JSON object. + */ + Json parse_json(int depth) { + if (depth > max_depth) { + return fail("exceeded maximum nesting depth"); + } + + char ch = get_next_token(); + if (failed) + return Json(); + + if (ch == '-' || (ch >= '0' && ch <= '9')) { + i--; + return parse_number(); + } + + if (ch == 't') + return expect("true", true); + + if (ch == 'f') + return expect("false", false); + + if (ch == 'n') + return expect("null", Json()); + + if (ch == '"') + return parse_string(); + + if (ch == '{') { + map data; + ch = get_next_token(); + if (ch == '}') + return data; + + while (1) { + if (ch != '"') + return fail("expected '\"' in object, got " + esc(ch)); + + string key = parse_string(); + if (failed) + return Json(); + + ch = get_next_token(); + if (ch != ':') + return fail("expected ':' in object, got " + esc(ch)); + + data[std::move(key)] = parse_json(depth + 1); + if (failed) + return Json(); + + ch = get_next_token(); + if (ch == '}') + break; + if (ch != ',') + return fail("expected ',' in object, got " + esc(ch)); + + ch = get_next_token(); + } + return data; + } + + if (ch == '[') { + vector data; + ch = get_next_token(); + if (ch == ']') + return data; + + while (1) { + i--; + data.push_back(parse_json(depth + 1)); + if (failed) + return Json(); + + ch = get_next_token(); + if (ch == ']') + break; + if (ch != ',') + return fail("expected ',' in list, got " + esc(ch)); + + ch = get_next_token(); + (void)ch; + } + return data; + } + + return fail("expected value, got " + esc(ch)); + } +}; +}//namespace { + +Json Json::parse(const string &in, string &err, JsonParse strategy) { + JsonParser parser { in, 0, err, false, strategy }; + Json result = parser.parse_json(0); + + // Check for any trailing garbage + parser.consume_garbage(); + if (parser.failed) + return Json(); + if (parser.i != in.size()) + return parser.fail("unexpected trailing " + esc(in[parser.i])); + + return result; +} + +// Documented in json11.hpp +vector Json::parse_multi(const string &in, + std::string::size_type &parser_stop_pos, + string &err, + JsonParse strategy) { + JsonParser parser { in, 0, err, false, strategy }; + parser_stop_pos = 0; + vector json_vec; + while (parser.i != in.size() && !parser.failed) { + json_vec.push_back(parser.parse_json(0)); + if (parser.failed) + break; + + // Check for another object + parser.consume_garbage(); + if (parser.failed) + break; + parser_stop_pos = parser.i; + } + return json_vec; +} + +/* * * * * * * * * * * * * * * * * * * * + * Shape-checking + */ + +bool Json::has_shape(const shape & types, string & err) const { + if (!is_object()) { + err = "expected JSON object, got " + dump(); + return false; + } + + const auto& obj_items = object_items(); + for (auto & item : types) { + const auto it = obj_items.find(item.first); + if (it == obj_items.cend() || it->second.type() != item.second) { + err = "bad type for " + item.first + " in " + dump(); + return false; + } + } + + return true; +} + +} // namespace json11 diff --git a/TMessagesProj/jni/voip/tgcalls/third-party/json11.hpp b/TMessagesProj/jni/voip/tgcalls/third-party/json11.hpp new file mode 100644 index 000000000..0c47d0509 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/third-party/json11.hpp @@ -0,0 +1,232 @@ +/* json11 + * + * json11 is a tiny JSON library for C++11, providing JSON parsing and serialization. + * + * The core object provided by the library is json11::Json. A Json object represents any JSON + * value: null, bool, number (int or double), string (std::string), array (std::vector), or + * object (std::map). + * + * Json objects act like values: they can be assigned, copied, moved, compared for equality or + * order, etc. There are also helper methods Json::dump, to serialize a Json to a string, and + * Json::parse (static) to parse a std::string as a Json object. + * + * Internally, the various types of Json object are represented by the JsonValue class + * hierarchy. + * + * A note on numbers - JSON specifies the syntax of number formatting but not its semantics, + * so some JSON implementations distinguish between integers and floating-point numbers, while + * some don't. In json11, we choose the latter. Because some JSON implementations (namely + * Javascript itself) treat all numbers as the same type, distinguishing the two leads + * to JSON that will be *silently* changed by a round-trip through those implementations. + * Dangerous! To avoid that risk, json11 stores all numbers as double internally, but also + * provides integer helpers. + * + * Fortunately, double-precision IEEE754 ('double') can precisely store any integer in the + * range +/-2^53, which includes every 'int' on most systems. (Timestamps often use int64 + * or long long to avoid the Y2038K problem; a double storing microseconds since some epoch + * will be exact for +/- 275 years.) + */ + +/* Copyright (c) 2013 Dropbox, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#pragma once + +#include +#include +#include +#include +#include + +#ifdef _MSC_VER + #if _MSC_VER <= 1800 // VS 2013 + #ifndef noexcept + #define noexcept throw() + #endif + + #ifndef snprintf + #define snprintf _snprintf_s + #endif + #endif +#endif + +namespace json11 { + +enum JsonParse { + STANDARD, COMMENTS +}; + +class JsonValue; + +class Json final { +public: + // Types + enum Type { + NUL, NUMBER, BOOL, STRING, ARRAY, OBJECT + }; + + // Array and object typedefs + typedef std::vector array; + typedef std::map object; + + // Constructors for the various types of JSON value. + Json() noexcept; // NUL + Json(std::nullptr_t) noexcept; // NUL + Json(double value); // NUMBER + Json(int value); // NUMBER + Json(bool value); // BOOL + Json(const std::string &value); // STRING + Json(std::string &&value); // STRING + Json(const char * value); // STRING + Json(const array &values); // ARRAY + Json(array &&values); // ARRAY + Json(const object &values); // OBJECT + Json(object &&values); // OBJECT + + // Implicit constructor: anything with a to_json() function. + template + Json(const T & t) : Json(t.to_json()) {} + + // Implicit constructor: map-like objects (std::map, std::unordered_map, etc) + template ().begin()->first)>::value + && std::is_constructible().begin()->second)>::value, + int>::type = 0> + Json(const M & m) : Json(object(m.begin(), m.end())) {} + + // Implicit constructor: vector-like objects (std::list, std::vector, std::set, etc) + template ().begin())>::value, + int>::type = 0> + Json(const V & v) : Json(array(v.begin(), v.end())) {} + + // This prevents Json(some_pointer) from accidentally producing a bool. Use + // Json(bool(some_pointer)) if that behavior is desired. + Json(void *) = delete; + + // Accessors + Type type() const; + + bool is_null() const { return type() == NUL; } + bool is_number() const { return type() == NUMBER; } + bool is_bool() const { return type() == BOOL; } + bool is_string() const { return type() == STRING; } + bool is_array() const { return type() == ARRAY; } + bool is_object() const { return type() == OBJECT; } + + // Return the enclosed value if this is a number, 0 otherwise. Note that json11 does not + // distinguish between integer and non-integer numbers - number_value() and int_value() + // can both be applied to a NUMBER-typed object. + double number_value() const; + int int_value() const; + + // Return the enclosed value if this is a boolean, false otherwise. + bool bool_value() const; + // Return the enclosed string if this is a string, "" otherwise. + const std::string &string_value() const; + // Return the enclosed std::vector if this is an array, or an empty vector otherwise. + const array &array_items() const; + // Return the enclosed std::map if this is an object, or an empty map otherwise. + const object &object_items() const; + + // Return a reference to arr[i] if this is an array, Json() otherwise. + const Json & operator[](size_t i) const; + // Return a reference to obj[key] if this is an object, Json() otherwise. + const Json & operator[](const std::string &key) const; + + // Serialize. + void dump(std::string &out) const; + std::string dump() const { + std::string out; + dump(out); + return out; + } + + // Parse. If parse fails, return Json() and assign an error message to err. + static Json parse(const std::string & in, + std::string & err, + JsonParse strategy = JsonParse::STANDARD); + static Json parse(const char * in, + std::string & err, + JsonParse strategy = JsonParse::STANDARD) { + if (in) { + return parse(std::string(in), err, strategy); + } else { + err = "null input"; + return nullptr; + } + } + // Parse multiple objects, concatenated or separated by whitespace + static std::vector parse_multi( + const std::string & in, + std::string::size_type & parser_stop_pos, + std::string & err, + JsonParse strategy = JsonParse::STANDARD); + + static inline std::vector parse_multi( + const std::string & in, + std::string & err, + JsonParse strategy = JsonParse::STANDARD) { + std::string::size_type parser_stop_pos; + return parse_multi(in, parser_stop_pos, err, strategy); + } + + bool operator== (const Json &rhs) const; + bool operator< (const Json &rhs) const; + bool operator!= (const Json &rhs) const { return !(*this == rhs); } + bool operator<= (const Json &rhs) const { return !(rhs < *this); } + bool operator> (const Json &rhs) const { return (rhs < *this); } + bool operator>= (const Json &rhs) const { return !(*this < rhs); } + + /* has_shape(types, err) + * + * Return true if this is a JSON object and, for each item in types, has a field of + * the given type. If not, return false and set err to a descriptive message. + */ + typedef std::initializer_list> shape; + bool has_shape(const shape & types, std::string & err) const; + +private: + std::shared_ptr m_ptr; +}; + +// Internal class hierarchy - JsonValue objects are not exposed to users of this API. +class JsonValue { +protected: + friend class Json; + friend class JsonInt; + friend class JsonDouble; + virtual Json::Type type() const = 0; + virtual bool equals(const JsonValue * other) const = 0; + virtual bool less(const JsonValue * other) const = 0; + virtual void dump(std::string &out) const = 0; + virtual double number_value() const; + virtual int int_value() const; + virtual bool bool_value() const; + virtual const std::string &string_value() const; + virtual const Json::array &array_items() const; + virtual const Json &operator[](size_t i) const; + virtual const Json::object &object_items() const; + virtual const Json &operator[](const std::string &key) const; + virtual ~JsonValue() {} +}; + +} // namespace json11 diff --git a/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp new file mode 100644 index 000000000..b1ccd8b95 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.cpp @@ -0,0 +1,2008 @@ +#include "v2/InstanceV2Impl.h" + +#include "LogSinkImpl.h" +#include "VideoCaptureInterfaceImpl.h" +#include "VideoCapturerInterface.h" +#include "v2/NativeNetworkingImpl.h" +#include "v2/Signaling.h" + +#include "CodecSelectHelper.h" +#include "platform/PlatformInterface.h" + +#include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/opus/audio_decoder_opus.h" +#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" +#include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/audio_codecs/L16/audio_decoder_L16.h" +#include "api/audio_codecs/L16/audio_encoder_L16.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "media/engine/webrtc_media_engine.h" +#include "system_wrappers/include/field_trial.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "call/call.h" +#include "modules/rtp_rtcp/source/rtp_utility.h" +#include "api/call/audio_sink.h" +#include "modules/audio_processing/audio_buffer.h" +#include "absl/strings/match.h" +#include "modules/audio_processing/agc2/vad_with_level.h" +#include "pc/channel_manager.h" +#include "media/base/rtp_data_engine.h" +#include "audio/audio_state.h" +#include "modules/audio_coding/neteq/default_neteq_factory.h" +#include "modules/audio_coding/include/audio_coding_module.h" +#include "api/candidate.h" +#include "api/jsep_ice_candidate.h" +#include "media/base/h264_profile_level_id.h" +#include "pc/used_ids.h" + +#include "AudioFrame.h" +#include "ThreadLocalObject.h" +#include "Manager.h" +#include "NetworkManager.h" +#include "VideoCaptureInterfaceImpl.h" +#include "platform/PlatformInterface.h" +#include "LogSinkImpl.h" +#include "CodecSelectHelper.h" +#include "AudioDeviceHelper.h" +#include "SignalingEncryption.h" + +#include +#include + +namespace tgcalls { +namespace { + +static int stringToInt(std::string const &string) { + std::stringstream stringStream(string); + int value = 0; + stringStream >> value; + return value; +} + +static std::string intToString(int value) { + std::ostringstream stringStream; + stringStream << value; + return stringStream.str(); +} + +static VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(VideoCaptureInterface *videoCapture) { + return videoCapture + ? static_cast(videoCapture)->object()->getSyncAssumingSameThread() + : nullptr; +} + +struct OutgoingVideoFormat { + cricket::VideoCodec videoCodec; + absl::optional rtxCodec; +}; + +static void addDefaultFeedbackParams(cricket::VideoCodec *codec) { + // Don't add any feedback params for RED and ULPFEC. + if (codec->name == cricket::kRedCodecName || codec->name == cricket::kUlpfecCodecName) { + return; + } + codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty)); + codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty)); + // Don't add any more feedback params for FLEXFEC. + if (codec->name == cricket::kFlexfecCodecName) { + return; + } + codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)); + codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); + codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli)); +} + +template +static bool IsRtxCodec(const C& codec) { + return absl::EqualsIgnoreCase(codec.name, cricket::kRtxCodecName); +} + +template +static bool ReferencedCodecsMatch(const std::vector& codecs1, + const int codec1_id, + const std::vector& codecs2, + const int codec2_id) { + const C* codec1 = FindCodecById(codecs1, codec1_id); + const C* codec2 = FindCodecById(codecs2, codec2_id); + return codec1 != nullptr && codec2 != nullptr && codec1->Matches(*codec2); +} + +// Finds a codec in |codecs2| that matches |codec_to_match|, which is +// a member of |codecs1|. If |codec_to_match| is an RTX codec, both +// the codecs themselves and their associated codecs must match. +template +static bool FindMatchingCodec(const std::vector& codecs1, + const std::vector& codecs2, + const C& codec_to_match, + C* found_codec) { + // |codec_to_match| should be a member of |codecs1|, in order to look up RTX + // codecs' associated codecs correctly. If not, that's a programming error. + RTC_DCHECK(absl::c_any_of(codecs1, [&codec_to_match](const C& codec) { + return &codec == &codec_to_match; + })); + for (const C& potential_match : codecs2) { + if (potential_match.Matches(codec_to_match)) { + if (IsRtxCodec(codec_to_match)) { + int apt_value_1 = 0; + int apt_value_2 = 0; + if (!codec_to_match.GetParam(cricket::kCodecParamAssociatedPayloadType, + &apt_value_1) || + !potential_match.GetParam(cricket::kCodecParamAssociatedPayloadType, + &apt_value_2)) { + RTC_LOG(LS_WARNING) << "RTX missing associated payload type."; + continue; + } + if (!ReferencedCodecsMatch(codecs1, apt_value_1, codecs2, + apt_value_2)) { + continue; + } + } + if (found_codec) { + *found_codec = potential_match; + } + return true; + } + } + return false; +} + +template +static void NegotiatePacketization(const C& local_codec, + const C& remote_codec, + C* negotiated_codec) {} + +template <> +void NegotiatePacketization(const cricket::VideoCodec& local_codec, + const cricket::VideoCodec& remote_codec, + cricket::VideoCodec* negotiated_codec) { + negotiated_codec->packetization = + cricket::VideoCodec::IntersectPacketization(local_codec, remote_codec); +} + +template +static void NegotiateCodecs(const std::vector& local_codecs, + const std::vector& offered_codecs, + std::vector* negotiated_codecs, + bool keep_offer_order) { + for (const C& ours : local_codecs) { + C theirs; + // Note that we intentionally only find one matching codec for each of our + // local codecs, in case the remote offer contains duplicate codecs. + if (FindMatchingCodec(local_codecs, offered_codecs, ours, &theirs)) { + C negotiated = ours; + NegotiatePacketization(ours, theirs, &negotiated); + negotiated.IntersectFeedbackParams(theirs); + if (IsRtxCodec(negotiated)) { + const auto apt_it = + theirs.params.find(cricket::kCodecParamAssociatedPayloadType); + // FindMatchingCodec shouldn't return something with no apt value. + RTC_DCHECK(apt_it != theirs.params.end()); + negotiated.SetParam(cricket::kCodecParamAssociatedPayloadType, apt_it->second); + } + if (absl::EqualsIgnoreCase(ours.name, cricket::kH264CodecName)) { + webrtc::H264::GenerateProfileLevelIdForAnswer( + ours.params, theirs.params, &negotiated.params); + } + negotiated.id = theirs.id; + negotiated.name = theirs.name; + negotiated_codecs->push_back(std::move(negotiated)); + } + } + if (keep_offer_order) { + // RFC3264: Although the answerer MAY list the formats in their desired + // order of preference, it is RECOMMENDED that unless there is a + // specific reason, the answerer list formats in the same relative order + // they were present in the offer. + // This can be skipped when the transceiver has any codec preferences. + std::unordered_map payload_type_preferences; + int preference = static_cast(offered_codecs.size() + 1); + for (const C& codec : offered_codecs) { + payload_type_preferences[codec.id] = preference--; + } + absl::c_sort(*negotiated_codecs, [&payload_type_preferences](const C& a, + const C& b) { + return payload_type_preferences[a.id] > payload_type_preferences[b.id]; + }); + } +} + +// Find the codec in |codec_list| that |rtx_codec| is associated with. +template +static const C* GetAssociatedCodec(const std::vector& codec_list, + const C& rtx_codec) { + std::string associated_pt_str; + if (!rtx_codec.GetParam(cricket::kCodecParamAssociatedPayloadType, + &associated_pt_str)) { + RTC_LOG(LS_WARNING) << "RTX codec " << rtx_codec.name + << " is missing an associated payload type."; + return nullptr; + } + + int associated_pt; + if (!rtc::FromString(associated_pt_str, &associated_pt)) { + RTC_LOG(LS_WARNING) << "Couldn't convert payload type " << associated_pt_str + << " of RTX codec " << rtx_codec.name + << " to an integer."; + return nullptr; + } + + // Find the associated reference codec for the reference RTX codec. + const C* associated_codec = FindCodecById(codec_list, associated_pt); + if (!associated_codec) { + RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type " + << associated_pt << " for RTX codec " << rtx_codec.name + << "."; + } + return associated_codec; +} + +// Adds all codecs from |reference_codecs| to |offered_codecs| that don't +// already exist in |offered_codecs| and ensure the payload types don't +// collide. +template +static void MergeCodecs(const std::vector& reference_codecs, + std::vector* offered_codecs, + cricket::UsedPayloadTypes* used_pltypes) { + // Add all new codecs that are not RTX codecs. + for (const C& reference_codec : reference_codecs) { + if (!IsRtxCodec(reference_codec) && + !FindMatchingCodec(reference_codecs, *offered_codecs, + reference_codec, nullptr)) { + C codec = reference_codec; + used_pltypes->FindAndSetIdUsed(&codec); + offered_codecs->push_back(codec); + } + } + + // Add all new RTX codecs. + for (const C& reference_codec : reference_codecs) { + if (IsRtxCodec(reference_codec) && + !FindMatchingCodec(reference_codecs, *offered_codecs, + reference_codec, nullptr)) { + C rtx_codec = reference_codec; + const C* associated_codec = + GetAssociatedCodec(reference_codecs, rtx_codec); + if (!associated_codec) { + continue; + } + // Find a codec in the offered list that matches the reference codec. + // Its payload type may be different than the reference codec. + C matching_codec; + if (!FindMatchingCodec(reference_codecs, *offered_codecs, + *associated_codec, &matching_codec)) { + RTC_LOG(LS_WARNING) + << "Couldn't find matching " << associated_codec->name << " codec."; + continue; + } + + rtx_codec.params[cricket::kCodecParamAssociatedPayloadType] = + rtc::ToString(matching_codec.id); + used_pltypes->FindAndSetIdUsed(&rtx_codec); + offered_codecs->push_back(rtx_codec); + } + } +} + +static std::vector generateAvailableVideoFormats(std::vector const &formats) { + if (formats.empty()) { + return {}; + } + + constexpr int kFirstDynamicPayloadType = 120; + constexpr int kLastDynamicPayloadType = 127; + + int payload_type = kFirstDynamicPayloadType; + + std::vector result; + + bool codecSelected = false; + + for (const auto &format : formats) { + if (codecSelected) { + break; + } + + OutgoingVideoFormat resultFormat; + + cricket::VideoCodec codec(format); + codec.id = payload_type; + addDefaultFeedbackParams(&codec); + + if (!absl::EqualsIgnoreCase(codec.name, cricket::kVp8CodecName)) { + continue; + } + + resultFormat.videoCodec = codec; + codecSelected = true; + + // Increment payload type. + ++payload_type; + if (payload_type > kLastDynamicPayloadType) { + RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest."; + break; + } + + // Add associated RTX codec for non-FEC codecs. + if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) && + !absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) { + resultFormat.rtxCodec = cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id); + + // Increment payload type. + ++payload_type; + if (payload_type > kLastDynamicPayloadType) { + RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest."; + break; + } + } + + result.push_back(std::move(resultFormat)); + } + return result; +} + +static void getCodecsFromMediaContent(signaling::MediaContent const &content, std::vector &codecs) { + for (const auto &payloadType : content.payloadTypes) { + cricket::VideoCodec codec(payloadType.id, payloadType.name); + for (const auto &feedbackType : payloadType.feedbackTypes) { + codec.AddFeedbackParam(cricket::FeedbackParam(feedbackType.type, feedbackType.subtype)); + } + for (const auto ¶meter : payloadType.parameters) { + codec.SetParam(parameter.first, parameter.second); + } + codecs.push_back(std::move(codec)); + } +} + +static std::vector getPayloadTypesFromVideoCodecs(std::vector const &codecs) { + std::vector payloadTypes; + + for (const auto &codec : codecs) { + signaling::PayloadType payloadType; + + payloadType.id = codec.id; + payloadType.name = codec.name; + payloadType.clockrate = 90000; + payloadType.channels = 0; + + for (const auto &feedbackParam : codec.feedback_params.params()) { + signaling::FeedbackType feedbackType; + feedbackType.type = feedbackParam.id(); + feedbackType.subtype = feedbackParam.param(); + payloadType.feedbackTypes.push_back(std::move(feedbackType)); + } + + for (const auto ¶m : codec.params) { + payloadType.parameters.push_back(std::make_pair(param.first, param.second)); + } + + payloadTypes.push_back(std::move(payloadType)); + } + + return payloadTypes; +} + +static void getCodecsFromMediaContent(signaling::MediaContent const &content, std::vector &codecs) { + for (const auto &payloadType : content.payloadTypes) { + cricket::AudioCodec codec(payloadType.id, payloadType.name, payloadType.clockrate, 0, payloadType.channels); + for (const auto &feedbackType : payloadType.feedbackTypes) { + codec.AddFeedbackParam(cricket::FeedbackParam(feedbackType.type, feedbackType.subtype)); + } + for (const auto ¶meter : payloadType.parameters) { + codec.SetParam(parameter.first, parameter.second); + } + codecs.push_back(std::move(codec)); + } +} + +static std::vector getPayloadTypesFromAudioCodecs(std::vector const &codecs) { + std::vector payloadTypes; + + for (const auto &codec : codecs) { + signaling::PayloadType payloadType; + + payloadType.id = codec.id; + payloadType.name = codec.name; + payloadType.clockrate = codec.clockrate; + payloadType.channels = (uint32_t)codec.channels; + + for (const auto &feedbackParam : codec.feedback_params.params()) { + signaling::FeedbackType feedbackType; + feedbackType.type = feedbackParam.id(); + feedbackType.subtype = feedbackParam.param(); + payloadType.feedbackTypes.push_back(std::move(feedbackType)); + } + + for (const auto ¶m : codec.params) { + payloadType.parameters.push_back(std::make_pair(param.first, param.second)); + } + + payloadTypes.push_back(std::move(payloadType)); + } + + return payloadTypes; +} + +template +struct NegotiatedMediaContent { + uint32_t ssrc = 0; + std::vector ssrcGroups; + std::vector rtpExtensions; + std::vector codecs; +}; + +static bool FindByUri(const cricket::RtpHeaderExtensions& extensions, + const webrtc::RtpExtension& ext_to_match, + webrtc::RtpExtension* found_extension) { + // We assume that all URIs are given in a canonical format. + const webrtc::RtpExtension* found = + webrtc::RtpExtension::FindHeaderExtensionByUri(extensions, + ext_to_match.uri); + if (!found) { + return false; + } + if (found_extension) { + *found_extension = *found; + } + return true; +} + +template +static NegotiatedMediaContent negotiateMediaContent(signaling::MediaContent const &baseMediaContent, signaling::MediaContent const &localContent, signaling::MediaContent const &remoteContent, bool isAnswer) { + std::vector localCodecs; + getCodecsFromMediaContent(localContent, localCodecs); + + std::vector remoteCodecs; + getCodecsFromMediaContent(remoteContent, remoteCodecs); + + std::vector negotiatedCodecs; + + cricket::UsedPayloadTypes usedPayloadTypes; + NegotiateCodecs(localCodecs, remoteCodecs, &negotiatedCodecs, true); + + NegotiatedMediaContent result; + + result.ssrc = baseMediaContent.ssrc; + result.ssrcGroups = baseMediaContent.ssrcGroups; + result.codecs = std::move(negotiatedCodecs); + + cricket::UsedRtpHeaderExtensionIds extensionIds(cricket::UsedRtpHeaderExtensionIds::IdDomain::kOneByteOnly); + + for (const auto &extension : remoteContent.rtpExtensions) { + if (isAnswer) { + webrtc::RtpExtension found; + if (!FindByUri(localContent.rtpExtensions, extension, &found)) { + continue; + } + } + + webrtc::RtpExtension mutableExtension = extension; + extensionIds.FindAndSetIdUsed(&mutableExtension); + result.rtpExtensions.push_back(std::move(mutableExtension)); + } + + if (!isAnswer) { + for (const auto &extension : localContent.rtpExtensions) { + webrtc::RtpExtension found; + if (!FindByUri(result.rtpExtensions, extension, &found)) { + webrtc::RtpExtension mutableExtension = extension; + extensionIds.FindAndSetIdUsed(&mutableExtension); + result.rtpExtensions.push_back(std::move(mutableExtension)); + } + } + } + + return result; +} + +class OutgoingAudioChannel : public sigslot::has_slots<> { +public: + static absl::optional createOutgoingContentDescription() { + signaling::MediaContent mediaContent; + + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + do { + mediaContent.ssrc = distribution(generator) & 0x7fffffffU; + } while (!mediaContent.ssrc); + + mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kAudioLevelUri, 1); + mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kAbsSendTimeUri, 2); + mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 3); + + cricket::AudioCodec opusCodec(109, "opus", 48000, 0, 2); + opusCodec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc)); + opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); + opusCodec.SetParam(cricket::kCodecParamMinPTime, 60); + + mediaContent.payloadTypes = getPayloadTypesFromAudioCodecs({ opusCodec }); + + return mediaContent; + } + +public: + OutgoingAudioChannel( + webrtc::Call *call, + cricket::ChannelManager *channelManager, + rtc::UniqueRandomIdGenerator *uniqueRandomIdGenerator, + webrtc::LocalAudioSinkAdapter *audioSource, + webrtc::RtpTransport *rtpTransport, + NegotiatedMediaContent const &mediaContent, + std::shared_ptr threads + ) : + _ssrc(mediaContent.ssrc), + _call(call), + _channelManager(channelManager), + _audioSource(audioSource) { + cricket::AudioOptions audioOptions; + bool _disableOutgoingAudioProcessing = false; + + if (_disableOutgoingAudioProcessing) { + audioOptions.echo_cancellation = false; + audioOptions.noise_suppression = false; + audioOptions.auto_gain_control = false; + audioOptions.highpass_filter = false; + audioOptions.typing_detection = false; + audioOptions.experimental_agc = false; + audioOptions.experimental_ns = false; + audioOptions.residual_echo_detector = false; + } else { + audioOptions.echo_cancellation = true; + audioOptions.noise_suppression = true; + } + + std::vector streamIds; + streamIds.push_back("1"); + + _outgoingAudioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "audio0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), uniqueRandomIdGenerator, audioOptions); + + std::vector codecs; + for (const auto &codec : mediaContent.codecs) { + if (codec.name == "opus") { + auto mutableCodec = codec; + + const uint8_t opusMinBitrateKbps = 16; + const uint8_t opusMaxBitrateKbps = 32; + const uint8_t opusStartBitrateKbps = 32; + const uint8_t opusPTimeMs = 60; + + mutableCodec.SetParam(cricket::kCodecParamMinBitrate, opusMinBitrateKbps); + mutableCodec.SetParam(cricket::kCodecParamStartBitrate, opusStartBitrateKbps); + mutableCodec.SetParam(cricket::kCodecParamMaxBitrate, opusMaxBitrateKbps); + mutableCodec.SetParam(cricket::kCodecParamUseInbandFec, 1); + mutableCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs); + + codecs.push_back(std::move(mutableCodec)); + } + } + + auto outgoingAudioDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + outgoingAudioDescription->set_rtcp_mux(true); + outgoingAudioDescription->set_rtcp_reduced_size(true); + outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + outgoingAudioDescription->set_codecs(codecs); + outgoingAudioDescription->set_bandwidth(1032000); + outgoingAudioDescription->AddStream(cricket::StreamParams::CreateLegacy(_ssrc)); + + auto incomingAudioDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + incomingAudioDescription->set_rtcp_mux(true); + incomingAudioDescription->set_rtcp_reduced_size(true); + incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + incomingAudioDescription->set_codecs(codecs); + incomingAudioDescription->set_bandwidth(1032000); + + _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); + _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); + _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + + _outgoingAudioChannel->SignalSentPacket().connect(this, &OutgoingAudioChannel::OnSentPacket_w); + //_outgoingAudioChannel->UpdateRtpTransport(nullptr); + + setIsMuted(false); + } + + ~OutgoingAudioChannel() { + _outgoingAudioChannel->SignalSentPacket().disconnect(this); + _outgoingAudioChannel->media_channel()->SetAudioSend(_ssrc, false, nullptr, _audioSource); + _outgoingAudioChannel->Enable(false); + _channelManager->DestroyVoiceChannel(_outgoingAudioChannel); + _outgoingAudioChannel = nullptr; + } + + void setIsMuted(bool isMuted) { + if (_isMuted != isMuted) { + _isMuted = false; + + _outgoingAudioChannel->Enable(!_isMuted); + _outgoingAudioChannel->media_channel()->SetAudioSend(_ssrc, !_isMuted, nullptr, _audioSource); + } + } + +private: + void OnSentPacket_w(const rtc::SentPacket& sent_packet) { + _call->OnSentPacket(sent_packet); + } + +private: + uint32_t _ssrc = 0; + webrtc::Call *_call = nullptr; + cricket::ChannelManager *_channelManager = nullptr; + webrtc::LocalAudioSinkAdapter *_audioSource = nullptr; + cricket::VoiceChannel *_outgoingAudioChannel = nullptr; + + bool _isMuted = true; +}; + +class IncomingV2AudioChannel : public sigslot::has_slots<> { +public: + IncomingV2AudioChannel( + cricket::ChannelManager *channelManager, + webrtc::Call *call, + webrtc::RtpTransport *rtpTransport, + rtc::UniqueRandomIdGenerator *randomIdGenerator, + NegotiatedMediaContent const &mediaContent, + std::shared_ptr threads) : + _ssrc(mediaContent.ssrc), + _channelManager(channelManager), + _call(call) { + _creationTimestamp = rtc::TimeMillis(); + + cricket::AudioOptions audioOptions; + audioOptions.audio_jitter_buffer_fast_accelerate = true; + audioOptions.audio_jitter_buffer_min_delay_ms = 50; + + std::string streamId = std::string("stream1"); + + _audioChannel = _channelManager->CreateVoiceChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "0", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); + + auto audioCodecs = mediaContent.codecs; + + auto outgoingAudioDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + outgoingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + outgoingAudioDescription->set_rtcp_mux(true); + outgoingAudioDescription->set_rtcp_reduced_size(true); + outgoingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + outgoingAudioDescription->set_codecs(audioCodecs); + outgoingAudioDescription->set_bandwidth(1032000); + + auto incomingAudioDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + incomingAudioDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + incomingAudioDescription->set_rtcp_mux(true); + incomingAudioDescription->set_rtcp_reduced_size(true); + incomingAudioDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + incomingAudioDescription->set_codecs(audioCodecs); + incomingAudioDescription->set_bandwidth(1032000); + cricket::StreamParams streamParams = cricket::StreamParams::CreateLegacy(mediaContent.ssrc); + streamParams.set_stream_ids({ streamId }); + incomingAudioDescription->AddStream(streamParams); + + _audioChannel->SetPayloadTypeDemuxingEnabled(false); + _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); + _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); + + outgoingAudioDescription.reset(); + incomingAudioDescription.reset(); + + //std::unique_ptr audioLevelSink(new AudioSinkImpl(onAudioLevelUpdated, _ssrc, std::move(onAudioFrame))); + //_audioChannel->media_channel()->SetRawAudioSink(ssrc.networkSsrc, std::move(audioLevelSink)); + + _audioChannel->SignalSentPacket().connect(this, &IncomingV2AudioChannel::OnSentPacket_w); + //_audioChannel->UpdateRtpTransport(nullptr); + + _audioChannel->Enable(true); + } + + ~IncomingV2AudioChannel() { + _audioChannel->SignalSentPacket().disconnect(this); + _audioChannel->Enable(false); + _channelManager->DestroyVoiceChannel(_audioChannel); + _audioChannel = nullptr; + } + + void setVolume(double value) { + _audioChannel->media_channel()->SetOutputVolume(_ssrc, value); + } + + void updateActivity() { + _activityTimestamp = rtc::TimeMillis(); + } + + int64_t getActivity() { + return _activityTimestamp; + } + +private: + void OnSentPacket_w(const rtc::SentPacket& sent_packet) { + _call->OnSentPacket(sent_packet); + } + +private: + uint32_t _ssrc = 0; + // Memory is managed by _channelManager + cricket::VoiceChannel *_audioChannel = nullptr; + // Memory is managed externally + cricket::ChannelManager *_channelManager = nullptr; + webrtc::Call *_call = nullptr; + int64_t _creationTimestamp = 0; + int64_t _activityTimestamp = 0; +}; + +class OutgoingVideoChannel : public sigslot::has_slots<>, public std::enable_shared_from_this { +public: + static absl::optional createOutgoingContentDescription(std::vector const &availableVideoFormats) { + signaling::MediaContent mediaContent; + + auto generator = std::mt19937(std::random_device()()); + auto distribution = std::uniform_int_distribution(); + do { + mediaContent.ssrc = distribution(generator) & 0x7fffffffU; + } while (!mediaContent.ssrc); + + mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kAbsSendTimeUri, 2); + mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, 3); + mediaContent.rtpExtensions.emplace_back(webrtc::RtpExtension::kVideoRotationUri, 13); + + signaling::SsrcGroup fidGroup; + fidGroup.semantics = "FID"; + fidGroup.ssrcs.push_back(mediaContent.ssrc); + fidGroup.ssrcs.push_back(mediaContent.ssrc + 1); + mediaContent.ssrcGroups.push_back(std::move(fidGroup)); + + const auto videoFormats = generateAvailableVideoFormats(availableVideoFormats); + + for (const auto &format : videoFormats) { + signaling::PayloadType videoPayload; + videoPayload.id = format.videoCodec.id; + videoPayload.name = format.videoCodec.name; + videoPayload.clockrate = format.videoCodec.clockrate; + videoPayload.channels = 0; + + std::vector videoFeedbackTypes; + + signaling::FeedbackType fbGoogRemb; + fbGoogRemb.type = "goog-remb"; + videoFeedbackTypes.push_back(fbGoogRemb); + + signaling::FeedbackType fbTransportCc; + fbTransportCc.type = "transport-cc"; + videoFeedbackTypes.push_back(fbTransportCc); + + signaling::FeedbackType fbCcmFir; + fbCcmFir.type = "ccm"; + fbCcmFir.subtype = "fir"; + videoFeedbackTypes.push_back(fbCcmFir); + + signaling::FeedbackType fbNack; + fbNack.type = "nack"; + videoFeedbackTypes.push_back(fbNack); + + signaling::FeedbackType fbNackPli; + fbNackPli.type = "nack"; + fbNackPli.subtype = "pli"; + videoFeedbackTypes.push_back(fbNackPli); + + videoPayload.feedbackTypes = videoFeedbackTypes; + videoPayload.parameters = {}; + + mediaContent.payloadTypes.push_back(std::move(videoPayload)); + + if (format.rtxCodec) { + signaling::PayloadType rtxPayload; + rtxPayload.id = format.rtxCodec->id; + rtxPayload.name = format.rtxCodec->name; + rtxPayload.clockrate = format.rtxCodec->clockrate; + rtxPayload.parameters.push_back(std::make_pair("apt", intToString(videoPayload.id))); + mediaContent.payloadTypes.push_back(std::move(rtxPayload)); + } + } + + return mediaContent; + } + +public: + OutgoingVideoChannel( + std::shared_ptr threads, + cricket::ChannelManager *channelManager, + webrtc::Call *call, + webrtc::RtpTransport *rtpTransport, + rtc::UniqueRandomIdGenerator *randomIdGenerator, + webrtc::VideoBitrateAllocatorFactory *videoBitrateAllocatorFactory, + std::function rotationUpdated, + NegotiatedMediaContent const &mediaContent + ) : + _threads(threads), + _mainSsrc(mediaContent.ssrc), + _call(call), + _channelManager(channelManager), + _rotationUpdated(rotationUpdated) { + _outgoingVideoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "out1", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), videoBitrateAllocatorFactory); + + auto videoCodecs = mediaContent.codecs; + + auto outgoingVideoDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + outgoingVideoDescription->AddRtpHeaderExtension(rtpExtension); + } + + outgoingVideoDescription->set_rtcp_mux(true); + outgoingVideoDescription->set_rtcp_reduced_size(true); + outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + outgoingVideoDescription->set_codecs(videoCodecs); + outgoingVideoDescription->set_bandwidth(1032000); + + cricket::StreamParams videoSendStreamParams; + + for (const auto &ssrcGroup : mediaContent.ssrcGroups) { + for (auto ssrc : ssrcGroup.ssrcs) { + videoSendStreamParams.ssrcs.push_back(ssrc); + } + + cricket::SsrcGroup mappedGroup(ssrcGroup.semantics, ssrcGroup.ssrcs); + videoSendStreamParams.ssrc_groups.push_back(std::move(mappedGroup)); + } + + videoSendStreamParams.cname = "cname"; + + outgoingVideoDescription->AddStream(videoSendStreamParams); + + auto incomingVideoDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + incomingVideoDescription->set_rtcp_mux(true); + incomingVideoDescription->set_rtcp_reduced_size(true); + incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + incomingVideoDescription->set_codecs(videoCodecs); + incomingVideoDescription->set_bandwidth(1032000); + + _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); + _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); + _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + + webrtc::RtpParameters rtpParameters = _outgoingVideoChannel->media_channel()->GetRtpSendParameters(mediaContent.ssrc); + + _outgoingVideoChannel->media_channel()->SetRtpSendParameters(mediaContent.ssrc, rtpParameters); + + _outgoingVideoChannel->SignalSentPacket().connect(this, &OutgoingVideoChannel::OnSentPacket_w); + //_outgoingVideoChannel->UpdateRtpTransport(nullptr); + + _outgoingVideoChannel->Enable(false); + _outgoingVideoChannel->media_channel()->SetVideoSend(mediaContent.ssrc, NULL, nullptr); + } + + ~OutgoingVideoChannel() { + _outgoingVideoChannel->SignalSentPacket().disconnect(this); + _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, nullptr, nullptr); + _outgoingVideoChannel->Enable(false); + _channelManager->DestroyVideoChannel(_outgoingVideoChannel); + _outgoingVideoChannel = nullptr; + } + + void setVideoCapture(std::shared_ptr videoCapture) { + _videoCapture = videoCapture; + + if (_videoCapture) { + _outgoingVideoChannel->Enable(true); + auto videoCaptureImpl = GetVideoCaptureAssumingSameThread(_videoCapture.get()); + _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, videoCaptureImpl->source()); + + const auto weak = std::weak_ptr(shared_from_this()); + videoCaptureImpl->setRotationUpdated([threads = _threads, weak](int angle) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + signaling::MediaStateMessage::VideoRotation videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + switch (angle) { + case 0: { + videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + break; + } + case 90: { + videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation90; + break; + } + case 180: { + videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation180; + break; + } + case 270: { + videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation270; + break; + } + default: { + videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + break; + } + } + if (strong->_videoRotation != videoRotation) { + strong->_videoRotation = videoRotation; + strong->_rotationUpdated(); + } + }); + }); + + switch (videoCaptureImpl->getRotation()) { + case 0: { + _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + break; + } + case 90: { + _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation90; + break; + } + case 180: { + _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation180; + break; + } + case 270: { + _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation270; + break; + } + default: { + _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + break; + } + } + } else { + _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + _outgoingVideoChannel->Enable(false); + _outgoingVideoChannel->media_channel()->SetVideoSend(_mainSsrc, NULL, nullptr); + } + } + +public: + std::shared_ptr videoCapture() { + return _videoCapture; + } + + signaling::MediaStateMessage::VideoRotation getRotation() { + return _videoRotation; + } + +private: + void OnSentPacket_w(const rtc::SentPacket& sent_packet) { + _call->OnSentPacket(sent_packet); + } + +private: + std::shared_ptr _threads; + + uint32_t _mainSsrc = 0; + webrtc::Call *_call = nullptr; + cricket::ChannelManager *_channelManager = nullptr; + cricket::VideoChannel *_outgoingVideoChannel = nullptr; + + std::function _rotationUpdated; + + std::shared_ptr _videoCapture; + signaling::MediaStateMessage::VideoRotation _videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; +}; + +class VideoSinkImpl : public rtc::VideoSinkInterface { +public: + VideoSinkImpl() { + } + + virtual ~VideoSinkImpl() { + } + + virtual void OnFrame(const webrtc::VideoFrame& frame) override { + //_lastFrame = frame; + for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) { + auto strong = _sinks[i].lock(); + if (!strong) { + _sinks.erase(_sinks.begin() + i); + } else { + strong->OnFrame(frame); + } + } + } + + virtual void OnDiscardedFrame() override { + for (int i = (int)(_sinks.size()) - 1; i >= 0; i--) { + auto strong = _sinks[i].lock(); + if (!strong) { + _sinks.erase(_sinks.begin() + i); + } else { + strong->OnDiscardedFrame(); + } + } + } + + void addSink(std::weak_ptr> impl) { + _sinks.push_back(impl); + if (_lastFrame) { + auto strong = impl.lock(); + if (strong) { + strong->OnFrame(_lastFrame.value()); + } + } + } + +private: + std::vector>> _sinks; + absl::optional _lastFrame; +}; + +class IncomingV2VideoChannel : public sigslot::has_slots<> { +public: + IncomingV2VideoChannel( + cricket::ChannelManager *channelManager, + webrtc::Call *call, + webrtc::RtpTransport *rtpTransport, + rtc::UniqueRandomIdGenerator *randomIdGenerator, + NegotiatedMediaContent const &mediaContent, + std::shared_ptr threads) : + _channelManager(channelManager), + _call(call) { + _videoSink.reset(new VideoSinkImpl()); + + std::string streamId = "1"; + + _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); + + _videoChannel = _channelManager->CreateVideoChannel(call, cricket::MediaConfig(), rtpTransport, threads->getMediaThread(), "1", false, NativeNetworkingImpl::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + + std::vector videoCodecs = mediaContent.codecs; + + auto outgoingVideoDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + outgoingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + outgoingVideoDescription->set_rtcp_mux(true); + outgoingVideoDescription->set_rtcp_reduced_size(true); + outgoingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kRecvOnly); + outgoingVideoDescription->set_codecs(videoCodecs); + outgoingVideoDescription->set_bandwidth(1032000); + + cricket::StreamParams videoRecvStreamParams; + + _mainVideoSsrc = mediaContent.ssrc; + + std::vector allSsrcs; + for (const auto &group : mediaContent.ssrcGroups) { + for (auto ssrc : group.ssrcs) { + if (std::find(allSsrcs.begin(), allSsrcs.end(), ssrc) == allSsrcs.end()) { + allSsrcs.push_back(ssrc); + } + } + + cricket::SsrcGroup parsedGroup(group.semantics, group.ssrcs); + videoRecvStreamParams.ssrc_groups.push_back(parsedGroup); + } + videoRecvStreamParams.ssrcs = allSsrcs; + + videoRecvStreamParams.cname = "cname"; + videoRecvStreamParams.set_stream_ids({ streamId }); + + auto incomingVideoDescription = std::make_unique(); + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + incomingVideoDescription->AddRtpHeaderExtension(webrtc::RtpExtension(rtpExtension.uri, rtpExtension.id)); + } + incomingVideoDescription->set_rtcp_mux(true); + incomingVideoDescription->set_rtcp_reduced_size(true); + incomingVideoDescription->set_direction(webrtc::RtpTransceiverDirection::kSendOnly); + incomingVideoDescription->set_codecs(videoCodecs); + incomingVideoDescription->set_bandwidth(1032000); + + incomingVideoDescription->AddStream(videoRecvStreamParams); + + _videoChannel->SetPayloadTypeDemuxingEnabled(false); + _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); + _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + + _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); + + _videoChannel->SignalSentPacket().connect(this, &IncomingV2VideoChannel::OnSentPacket_w); + //_videoChannel->UpdateRtpTransport(nullptr); + + _videoChannel->Enable(true); + } + + ~IncomingV2VideoChannel() { + _videoChannel->Enable(false); + _channelManager->DestroyVideoChannel(_videoChannel); + _videoChannel = nullptr; + } + + void addSink(std::weak_ptr> impl) { + _videoSink->addSink(impl); + } + +private: + void OnSentPacket_w(const rtc::SentPacket& sent_packet) { + _call->OnSentPacket(sent_packet); + } + +private: + uint32_t _mainVideoSsrc = 0; + std::unique_ptr _videoSink; + std::unique_ptr _videoBitrateAllocatorFactory; + // Memory is managed by _channelManager + cricket::VideoChannel *_videoChannel; + // Memory is managed externally + cricket::ChannelManager *_channelManager = nullptr; + webrtc::Call *_call = nullptr; +}; + +} // namespace + +class InstanceV2ImplInternal : public std::enable_shared_from_this { +public: + InstanceV2ImplInternal(Descriptor &&descriptor, std::shared_ptr threads) : + _threads(threads), + _rtcServers(descriptor.rtcServers), + _encryptionKey(std::move(descriptor.encryptionKey)), + _stateUpdated(descriptor.stateUpdated), + _signalBarsUpdated(descriptor.signalBarsUpdated), + _audioLevelUpdated(descriptor.audioLevelUpdated), + _remoteBatteryLevelIsLowUpdated(descriptor.remoteBatteryLevelIsLowUpdated), + _remoteMediaStateUpdated(descriptor.remoteMediaStateUpdated), + _remotePrefferedAspectRatioUpdated(descriptor.remotePrefferedAspectRatioUpdated), + _signalingDataEmitted(descriptor.signalingDataEmitted), + _createAudioDeviceModule(descriptor.createAudioDeviceModule), + _eventLog(std::make_unique()), + _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), + _videoCapture(descriptor.videoCapture) { + } + + ~InstanceV2ImplInternal() { + _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) { + networking->stop(); + }); + _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, []() { + }); + } + + void start() { + const auto weak = std::weak_ptr(shared_from_this()); + + _networking.reset(new ThreadLocalObject(_threads->getNetworkThread(), [weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing, rtcServers = _rtcServers]() { + return new NativeNetworkingImpl((NativeNetworkingImpl::Configuration){ + .isOutgoing = isOutgoing, + .enableStunMarking = false, + .enableTCP = false, + .enableP2P = true, + .rtcServers = rtcServers, + .stateUpdated = [threads, weak](const NativeNetworkingImpl::State &state) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onNetworkStateUpdated(state); + }); + }, + .candidateGathered = [threads, weak](const cricket::Candidate &candidate) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + + strong->sendCandidate(candidate); + }); + }, + .transportMessageReceived = [threads, weak](rtc::CopyOnWriteBuffer const &packet, bool isMissing) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + }); + }, + .rtcpPacketReceived = [threads, weak](rtc::CopyOnWriteBuffer const &packet, int64_t timestamp) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->_call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); + }); + }, + .dataChannelStateUpdated = [threads, weak](bool isDataChannelOpen) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onDataChannelStateUpdated(isDataChannelOpen); + }); + }, + .dataChannelMessageReceived = [threads, weak](std::string const &message) { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->onDataChannelMessage(message); + }); + }, + .threads = threads + }); + })); + + PlatformInterface::SharedInstance()->configurePlatformAudio(); + + cricket::MediaEngineDependencies mediaDeps; + mediaDeps.task_queue_factory = _taskQueueFactory.get(); + mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); + mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); + + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(); + + _audioDeviceModule = createAudioDeviceModule(); + if (!_audioDeviceModule) { + return; + } + mediaDeps.adm = _audioDeviceModule; + + _availableVideoFormats = mediaDeps.video_encoder_factory->GetSupportedFormats(); + + std::unique_ptr mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); + + _channelManager = cricket::ChannelManager::Create( + std::move(mediaEngine), + std::make_unique(), + true, + _threads->getMediaThread(), + _threads->getNetworkThread() + ); + + //setAudioInputDevice(_initialInputDeviceId); + //setAudioOutputDevice(_initialOutputDeviceId); + + webrtc::Call::Config callConfig(_eventLog.get()); + callConfig.task_queue_factory = _taskQueueFactory.get(); + callConfig.trials = &_fieldTrials; + callConfig.audio_state = _channelManager->media_engine()->voice().GetAudioState(); + _call.reset(webrtc::Call::Create(callConfig)); + + _uniqueRandomIdGenerator.reset(new rtc::UniqueRandomIdGenerator()); + + _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this]() { + _rtpTransport = _networking->getSyncAssumingSameThread()->getRtpTransport(); + }); + + _videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory(); + + _networking->perform(RTC_FROM_HERE, [](NativeNetworkingImpl *networking) { + networking->start(); + }); + + if (_videoCapture) { + setVideoCapture(_videoCapture); + } + + beginSignaling(); + + adjustBitratePreferences(true); + } + + void sendSignalingMessage(signaling::Message const &message) { + auto data = message.serialize(); + + RTC_LOG(LS_INFO) << "sendSignalingMessage: " << std::string(data.begin(), data.end()); + + if (_signalingEncryption) { + if (const auto encryptedData = _signalingEncryption->encryptOutgoing(data)) { + _signalingDataEmitted(std::vector(encryptedData->data(), encryptedData->data() + encryptedData->size())); + } else { + RTC_LOG(LS_ERROR) << "sendSignalingMessage: failed to encrypt payload"; + } + } else { + _signalingDataEmitted(data); + } + } + + void beginSignaling() { + _signalingEncryption.reset(new SignalingEncryption(_encryptionKey)); + + if (_encryptionKey.isOutgoing) { + _outgoingAudioContent = OutgoingAudioChannel::createOutgoingContentDescription(); + _outgoingVideoContent = OutgoingVideoChannel::createOutgoingContentDescription(_availableVideoFormats); + + sendInitialSetup(); + } + } + + void createNegotiatedChannels() { + if (_negotiatedOutgoingVideoContent) { + const auto weak = std::weak_ptr(shared_from_this()); + + _outgoingVideoChannel.reset(new OutgoingVideoChannel( + _threads, + _channelManager.get(), + _call.get(), + _rtpTransport, + _uniqueRandomIdGenerator.get(), + _videoBitrateAllocatorFactory.get(), + [threads = _threads, weak]() { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->sendMediaState(); + }); + }, + _negotiatedOutgoingVideoContent.value() + )); + + if (_videoCapture) { + _outgoingVideoChannel->setVideoCapture(_videoCapture); + } + } + + if (_negotiatedOutgoingAudioContent) { + _outgoingAudioChannel.reset(new OutgoingAudioChannel( + _call.get(), + _channelManager.get(), + _uniqueRandomIdGenerator.get(), + &_audioSource, + _rtpTransport, + _negotiatedOutgoingAudioContent.value(), + _threads + )); + } + + adjustBitratePreferences(true); + } + + void sendInitialSetup() { + const auto weak = std::weak_ptr(shared_from_this()); + + _networking->perform(RTC_FROM_HERE, [weak, threads = _threads, isOutgoing = _encryptionKey.isOutgoing](NativeNetworkingImpl *networking) { + auto localFingerprint = networking->getLocalFingerprint(); + std::string hash = localFingerprint->algorithm; + std::string fingerprint = localFingerprint->GetRfc4572Fingerprint(); + std::string setup; + if (isOutgoing) { + setup = "actpass"; + } else { + setup = "passive"; + } + + auto localIceParams = networking->getLocalIceParameters(); + std::string ufrag = localIceParams.ufrag; + std::string pwd = localIceParams.pwd; + + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ufrag, pwd, hash, fingerprint, setup, localIceParams]() { + const auto strong = weak.lock(); + if (!strong) { + return; + } + + signaling::InitialSetupMessage data; + + if (strong->_outgoingAudioContent) { + data.audio = strong->_outgoingAudioContent.value(); + } + if (strong->_outgoingVideoContent) { + data.video = strong->_outgoingVideoContent.value(); + } + + data.ufrag = ufrag; + data.pwd = pwd; + + signaling::DtlsFingerprint dtlsFingerprint; + dtlsFingerprint.hash = hash; + dtlsFingerprint.fingerprint = fingerprint; + dtlsFingerprint.setup = setup; + data.fingerprints.push_back(std::move(dtlsFingerprint)); + + signaling::Message message; + message.data = std::move(data); + strong->sendSignalingMessage(message); + }); + }); + } + + void receiveSignalingData(const std::vector &data) { + std::vector decryptedData; + + if (_signalingEncryption) { + const auto rawDecryptedData = _signalingEncryption->decryptIncoming(data); + if (!rawDecryptedData) { + RTC_LOG(LS_ERROR) << "receiveSignalingData: could not decrypt payload"; + + return; + } + + decryptedData = std::vector(rawDecryptedData->data(), rawDecryptedData->data() + rawDecryptedData->size()); + } else { + decryptedData = data; + } + + processSignalingData(decryptedData); + } + + void processSignalingData(const std::vector &data) { + RTC_LOG(LS_INFO) << "processSignalingData: " << std::string(data.begin(), data.end()); + + const auto message = signaling::Message::parse(data); + if (!message) { + return; + } + const auto messageData = &message->data; + if (const auto initialSetup = absl::get_if(messageData)) { + PeerIceParameters remoteIceParameters; + remoteIceParameters.ufrag = initialSetup->ufrag; + remoteIceParameters.pwd = initialSetup->pwd; + + std::unique_ptr fingerprint; + std::string sslSetup; + if (initialSetup->fingerprints.size() != 0) { + fingerprint = rtc::SSLFingerprint::CreateUniqueFromRfc4572(initialSetup->fingerprints[0].hash, initialSetup->fingerprints[0].fingerprint); + sslSetup = initialSetup->fingerprints[0].setup; + } + + _networking->perform(RTC_FROM_HERE, [threads = _threads, remoteIceParameters = std::move(remoteIceParameters), fingerprint = std::move(fingerprint), sslSetup = std::move(sslSetup)](NativeNetworkingImpl *networking) { + networking->setRemoteParams(remoteIceParameters, fingerprint.get(), sslSetup); + }); + + if (const auto audio = initialSetup->audio) { + if (_encryptionKey.isOutgoing) { + if (_outgoingAudioContent) { + _negotiatedOutgoingAudioContent = negotiateMediaContent(_outgoingAudioContent.value(), _outgoingAudioContent.value(), audio.value(), false); + const auto incomingAudioContent = negotiateMediaContent(audio.value(), _outgoingAudioContent.value(), audio.value(), false); + + signaling::MediaContent outgoingAudioContent; + + outgoingAudioContent.ssrc = _outgoingAudioContent->ssrc; + outgoingAudioContent.ssrcGroups = _outgoingAudioContent->ssrcGroups; + outgoingAudioContent.rtpExtensions = _negotiatedOutgoingAudioContent->rtpExtensions; + outgoingAudioContent.payloadTypes = getPayloadTypesFromAudioCodecs(_negotiatedOutgoingAudioContent->codecs); + + _outgoingAudioContent = std::move(outgoingAudioContent); + + _incomingAudioChannel.reset(new IncomingV2AudioChannel( + _channelManager.get(), + _call.get(), + _rtpTransport, + _uniqueRandomIdGenerator.get(), + incomingAudioContent, + _threads + )); + } + } else { + const auto generatedOutgoingContent = OutgoingAudioChannel::createOutgoingContentDescription(); + + if (generatedOutgoingContent) { + _negotiatedOutgoingAudioContent = negotiateMediaContent(generatedOutgoingContent.value(), generatedOutgoingContent.value(), audio.value(), true); + const auto incomingAudioContent = negotiateMediaContent(audio.value(), generatedOutgoingContent.value(), audio.value(), true); + + if (_negotiatedOutgoingAudioContent) { + signaling::MediaContent outgoingAudioContent; + + outgoingAudioContent.ssrc = generatedOutgoingContent->ssrc; + outgoingAudioContent.ssrcGroups = generatedOutgoingContent->ssrcGroups; + outgoingAudioContent.rtpExtensions = _negotiatedOutgoingAudioContent->rtpExtensions; + outgoingAudioContent.payloadTypes = getPayloadTypesFromAudioCodecs(_negotiatedOutgoingAudioContent->codecs); + + _outgoingAudioContent = std::move(outgoingAudioContent); + + _incomingAudioChannel.reset(new IncomingV2AudioChannel( + _channelManager.get(), + _call.get(), + _rtpTransport, + _uniqueRandomIdGenerator.get(), + incomingAudioContent, + _threads + )); + } + } + } + } + + if (const auto video = initialSetup->video) { + if (_encryptionKey.isOutgoing) { + if (_outgoingVideoContent) { + _negotiatedOutgoingVideoContent = negotiateMediaContent(_outgoingVideoContent.value(), _outgoingVideoContent.value(), video.value(), false); + const auto incomingVideoContent = negotiateMediaContent(video.value(), _outgoingVideoContent.value(), video.value(), false); + + signaling::MediaContent outgoingVideoContent; + + outgoingVideoContent.ssrc = _outgoingVideoContent->ssrc; + outgoingVideoContent.ssrcGroups = _outgoingVideoContent->ssrcGroups; + outgoingVideoContent.rtpExtensions = _negotiatedOutgoingVideoContent->rtpExtensions; + outgoingVideoContent.payloadTypes = getPayloadTypesFromVideoCodecs(_negotiatedOutgoingVideoContent->codecs); + + _outgoingVideoContent = std::move(outgoingVideoContent); + + _incomingVideoChannel.reset(new IncomingV2VideoChannel( + _channelManager.get(), + _call.get(), + _rtpTransport, + _uniqueRandomIdGenerator.get(), + incomingVideoContent, + _threads + )); + } + } else { + const auto generatedOutgoingContent = OutgoingVideoChannel::createOutgoingContentDescription(_availableVideoFormats); + + if (generatedOutgoingContent) { + _negotiatedOutgoingVideoContent = negotiateMediaContent(generatedOutgoingContent.value(), generatedOutgoingContent.value(), video.value(), true); + const auto incomingVideoContent = negotiateMediaContent(video.value(), generatedOutgoingContent.value(), video.value(), true); + + if (_negotiatedOutgoingVideoContent) { + signaling::MediaContent outgoingVideoContent; + + outgoingVideoContent.ssrc = generatedOutgoingContent->ssrc; + outgoingVideoContent.ssrcGroups = generatedOutgoingContent->ssrcGroups; + outgoingVideoContent.rtpExtensions = _negotiatedOutgoingVideoContent->rtpExtensions; + outgoingVideoContent.payloadTypes = getPayloadTypesFromVideoCodecs(_negotiatedOutgoingVideoContent->codecs); + + _outgoingVideoContent = std::move(outgoingVideoContent); + + _incomingVideoChannel.reset(new IncomingV2VideoChannel( + _channelManager.get(), + _call.get(), + _rtpTransport, + _uniqueRandomIdGenerator.get(), + incomingVideoContent, + _threads + )); + } + } + } + } + + createNegotiatedChannels(); + + if (!_encryptionKey.isOutgoing) { + sendInitialSetup(); + } + + _handshakeCompleted = true; + commitPendingIceCandidates(); + } else if (const auto candidatesList = absl::get_if(messageData)) { + for (const auto &candidate : candidatesList->iceCandidates) { + webrtc::JsepIceCandidate parseCandidate{ std::string(), 0 }; + if (!parseCandidate.Initialize(candidate.sdpString, nullptr)) { + RTC_LOG(LS_ERROR) << "Could not parse candidate: " << candidate.sdpString; + continue; + } + _pendingIceCandidates.push_back(parseCandidate.candidate()); + } + + if (_handshakeCompleted) { + commitPendingIceCandidates(); + } + } else if (const auto mediaState = absl::get_if(messageData)) { + AudioState mappedAudioState; + if (mediaState->isMuted) { + mappedAudioState = AudioState::Muted; + } else { + mappedAudioState = AudioState::Active; + } + + VideoState mappedVideoState; + switch (mediaState->videoState) { + case signaling::MediaStateMessage::VideoState::Inactive: { + mappedVideoState = VideoState::Inactive; + break; + } + case signaling::MediaStateMessage::VideoState::Suspended: { + mappedVideoState = VideoState::Paused; + break; + } + case signaling::MediaStateMessage::VideoState::Active: { + mappedVideoState = VideoState::Active; + break; + } + default: { + RTC_FATAL() << "Unknown videoState"; + break; + } + } + + if (_remoteMediaStateUpdated) { + _remoteMediaStateUpdated(mappedAudioState, mappedVideoState); + } + + if (_remoteBatteryLevelIsLowUpdated) { + _remoteBatteryLevelIsLowUpdated(mediaState->isBatteryLow); + } + } + } + + void commitPendingIceCandidates() { + if (_pendingIceCandidates.size() == 0) { + return; + } + _networking->perform(RTC_FROM_HERE, [threads = _threads, parsedCandidates = _pendingIceCandidates](NativeNetworkingImpl *networking) { + networking->addCandidates(parsedCandidates); + }); + _pendingIceCandidates.clear(); + } + + void onNetworkStateUpdated(NativeNetworkingImpl::State const &state) { + State mappedState; + if (state.isReadyToSendData) { + mappedState = State::Established; + } else { + mappedState = State::Reconnecting; + } + _stateUpdated(mappedState); + } + + void onDataChannelStateUpdated(bool isDataChannelOpen) { + if (_isDataChannelOpen != isDataChannelOpen) { + _isDataChannelOpen = isDataChannelOpen; + + if (_isDataChannelOpen) { + sendMediaState(); + } + } + } + + void sendDataChannelMessage(signaling::Message const &message) { + if (!_isDataChannelOpen) { + RTC_LOG(LS_ERROR) << "sendDataChannelMessage called, but data channel is not open"; + return; + } + auto data = message.serialize(); + std::string stringData(data.begin(), data.end()); + RTC_LOG(LS_INFO) << "sendDataChannelMessage: " << stringData; + _networking->perform(RTC_FROM_HERE, [stringData = std::move(stringData)](NativeNetworkingImpl *networking) { + networking->sendDataChannelMessage(stringData); + }); + } + + void onDataChannelMessage(std::string const &message) { + RTC_LOG(LS_INFO) << "dataChannelMessage received: " << message; + std::vector data(message.begin(), message.end()); + processSignalingData(data); + } + + void sendMediaState() { + if (!_isDataChannelOpen) { + return; + } + signaling::Message message; + signaling::MediaStateMessage data; + data.isMuted = _isMicrophoneMuted; + data.isBatteryLow = _isBatteryLow; + if (_outgoingVideoChannel) { + if (_outgoingVideoChannel->videoCapture()) { + data.videoState = signaling::MediaStateMessage::VideoState::Active; + } else{ + data.videoState = signaling::MediaStateMessage::VideoState::Inactive; + } + data.videoRotation = _outgoingVideoChannel->getRotation(); + } else { + data.videoState = signaling::MediaStateMessage::VideoState::Inactive; + data.videoRotation = signaling::MediaStateMessage::VideoRotation::Rotation0; + } + message.data = std::move(data); + sendDataChannelMessage(message); + } + + void sendCandidate(const cricket::Candidate &candidate) { + cricket::Candidate patchedCandidate = candidate; + patchedCandidate.set_component(1); + + signaling::CandidatesMessage data; + + signaling::IceCandidate serializedCandidate; + + webrtc::JsepIceCandidate iceCandidate{ std::string(), 0 }; + iceCandidate.SetCandidate(patchedCandidate); + std::string serialized; + const auto success = iceCandidate.ToString(&serialized); + assert(success); + + serializedCandidate.sdpString = serialized; + + data.iceCandidates.push_back(std::move(serializedCandidate)); + + signaling::Message message; + message.data = std::move(data); + sendSignalingMessage(message); + } + + void setVideoCapture(std::shared_ptr videoCapture) { + _videoCapture = videoCapture; + + if (_outgoingVideoChannel) { + _outgoingVideoChannel->setVideoCapture(videoCapture); + + sendMediaState(); + + adjustBitratePreferences(true); + } + } + + void setRequestedVideoAspect(float aspect) { + + } + + void setNetworkType(NetworkType networkType) { + + } + + void setMuteMicrophone(bool muteMicrophone) { + if (_isMicrophoneMuted != muteMicrophone) { + _isMicrophoneMuted = muteMicrophone; + + if (_outgoingAudioChannel) { + _outgoingAudioChannel->setIsMuted(muteMicrophone); + } + + sendMediaState(); + } + } + + void setIncomingVideoOutput(std::shared_ptr> sink) { + if (_incomingVideoChannel) { + _incomingVideoChannel->addSink(sink); + } + } + + void setAudioInputDevice(std::string id) { + + } + + void setAudioOutputDevice(std::string id) { + + } + + void setIsLowBatteryLevel(bool isLowBatteryLevel) { + if (_isBatteryLow != isLowBatteryLevel) { + _isBatteryLow = isLowBatteryLevel; + sendMediaState(); + } + } + + void stop(std::function completion) { + completion({}); + } + + void adjustBitratePreferences(bool resetStartBitrate) { + webrtc::BitrateConstraints preferences; + if (_videoCapture) { + preferences.min_bitrate_bps = 64000; + if (resetStartBitrate) { + preferences.start_bitrate_bps = (100 + 800 + 32 + 100) * 1000; + } + preferences.max_bitrate_bps = (100 + 200 + 800 + 32 + 100) * 1000; + } else { + preferences.min_bitrate_bps = 32000; + if (resetStartBitrate) { + preferences.start_bitrate_bps = 32000; + } + preferences.max_bitrate_bps = 32000; + } + + _call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences); + } + +private: + rtc::scoped_refptr createAudioDeviceModule() { + const auto create = [&](webrtc::AudioDeviceModule::AudioLayer layer) { + return webrtc::AudioDeviceModule::Create( + layer, + _taskQueueFactory.get()); + }; + const auto check = [&](const rtc::scoped_refptr &result) { + return (result && result->Init() == 0) ? result : nullptr; + }; + if (_createAudioDeviceModule) { + if (const auto result = check(_createAudioDeviceModule(_taskQueueFactory.get()))) { + return result; + } + } + return check(create(webrtc::AudioDeviceModule::kPlatformDefaultAudio)); + } + +private: + std::shared_ptr _threads; + std::vector _rtcServers; + EncryptionKey _encryptionKey; + std::function _stateUpdated; + std::function _signalBarsUpdated; + std::function _audioLevelUpdated; + std::function _remoteBatteryLevelIsLowUpdated; + std::function _remoteMediaStateUpdated; + std::function _remotePrefferedAspectRatioUpdated; + std::function &)> _signalingDataEmitted; + std::function(webrtc::TaskQueueFactory*)> _createAudioDeviceModule; + + std::unique_ptr _signalingEncryption; + + bool _handshakeCompleted = false; + std::vector _pendingIceCandidates; + bool _isDataChannelOpen = false; + + std::unique_ptr _eventLog; + std::unique_ptr _taskQueueFactory; + std::unique_ptr _mediaEngine; + std::unique_ptr _call; + webrtc::FieldTrialBasedConfig _fieldTrials; + webrtc::LocalAudioSinkAdapter _audioSource; + rtc::scoped_refptr _audioDeviceModule; + + std::unique_ptr _uniqueRandomIdGenerator; + webrtc::RtpTransport *_rtpTransport = nullptr; + std::unique_ptr _channelManager; + std::unique_ptr _videoBitrateAllocatorFactory; + + std::shared_ptr> _networking; + + absl::optional _outgoingAudioContent; + absl::optional> _negotiatedOutgoingAudioContent; + + std::unique_ptr _outgoingAudioChannel; + bool _isMicrophoneMuted = false; + + std::vector _availableVideoFormats; + + absl::optional _outgoingVideoContent; + absl::optional> _negotiatedOutgoingVideoContent; + + std::shared_ptr _outgoingVideoChannel; + + bool _isBatteryLow = false; + + std::unique_ptr _incomingAudioChannel; + std::unique_ptr _incomingVideoChannel; + + std::shared_ptr _videoCapture; +}; + +InstanceV2Impl::InstanceV2Impl(Descriptor &&descriptor) { + if (descriptor.config.logPath.data.size() != 0) { + _logSink = std::make_unique(descriptor.config.logPath); + } + rtc::LogMessage::LogToDebug(rtc::LS_INFO); + rtc::LogMessage::SetLogToStderr(false); + if (_logSink) { + rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); + } + + _threads = StaticThreads::getThreads(); + _internal.reset(new ThreadLocalObject(_threads->getMediaThread(), [descriptor = std::move(descriptor), threads = _threads]() mutable { + return new InstanceV2ImplInternal(std::move(descriptor), threads); + })); + _internal->perform(RTC_FROM_HERE, [](InstanceV2ImplInternal *internal) { + internal->start(); + }); +} + +InstanceV2Impl::~InstanceV2Impl() { + rtc::LogMessage::RemoveLogToStream(_logSink.get()); +} + +void InstanceV2Impl::receiveSignalingData(const std::vector &data) { + _internal->perform(RTC_FROM_HERE, [data](InstanceV2ImplInternal *internal) { + internal->receiveSignalingData(data); + }); +} + +void InstanceV2Impl::setVideoCapture(std::shared_ptr videoCapture) { + _internal->perform(RTC_FROM_HERE, [videoCapture](InstanceV2ImplInternal *internal) { + internal->setVideoCapture(videoCapture); + }); +} + +void InstanceV2Impl::setRequestedVideoAspect(float aspect) { + _internal->perform(RTC_FROM_HERE, [aspect](InstanceV2ImplInternal *internal) { + internal->setRequestedVideoAspect(aspect); + }); +} + +void InstanceV2Impl::setNetworkType(NetworkType networkType) { + _internal->perform(RTC_FROM_HERE, [networkType](InstanceV2ImplInternal *internal) { + internal->setNetworkType(networkType); + }); +} + +void InstanceV2Impl::setMuteMicrophone(bool muteMicrophone) { + _internal->perform(RTC_FROM_HERE, [muteMicrophone](InstanceV2ImplInternal *internal) { + internal->setMuteMicrophone(muteMicrophone); + }); +} + +void InstanceV2Impl::setIncomingVideoOutput(std::shared_ptr> sink) { + _internal->perform(RTC_FROM_HERE, [sink](InstanceV2ImplInternal *internal) { + internal->setIncomingVideoOutput(sink); + }); +} + +void InstanceV2Impl::setAudioInputDevice(std::string id) { + _internal->perform(RTC_FROM_HERE, [id](InstanceV2ImplInternal *internal) { + internal->setAudioInputDevice(id); + }); +} + +void InstanceV2Impl::setAudioOutputDevice(std::string id) { + _internal->perform(RTC_FROM_HERE, [id](InstanceV2ImplInternal *internal) { + internal->setAudioOutputDevice(id); + }); +} + +void InstanceV2Impl::setIsLowBatteryLevel(bool isLowBatteryLevel) { + _internal->perform(RTC_FROM_HERE, [isLowBatteryLevel](InstanceV2ImplInternal *internal) { + internal->setIsLowBatteryLevel(isLowBatteryLevel); + }); +} + +void InstanceV2Impl::setInputVolume(float level) { +} + +void InstanceV2Impl::setOutputVolume(float level) { +} + +void InstanceV2Impl::setAudioOutputDuckingEnabled(bool enabled) { +} + +void InstanceV2Impl::setAudioOutputGainControlEnabled(bool enabled) { +} + +void InstanceV2Impl::setEchoCancellationStrength(int strength) { +} + +std::vector InstanceV2Impl::GetVersions() { + std::vector result; + result.push_back("4.0.0"); + return result; +} + +int InstanceV2Impl::GetConnectionMaxLayer() { + return 92; +} + +std::string InstanceV2Impl::getLastError() { + return ""; +} + +std::string InstanceV2Impl::getDebugInfo() { + return ""; +} + +int64_t InstanceV2Impl::getPreferredRelayId() { + return 0; +} + +TrafficStats InstanceV2Impl::getTrafficStats() { + return {}; +} + +PersistentState InstanceV2Impl::getPersistentState() { + return {}; +} + +void InstanceV2Impl::stop(std::function completion) { + std::string debugLog; + if (_logSink) { + debugLog = _logSink->result(); + } + _internal->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](InstanceV2ImplInternal *internal) mutable { + internal->stop([completion, debugLog = std::move(debugLog)](FinalState finalState) mutable { + finalState.debugLog = debugLog; + completion(finalState); + }); + }); +} + +template <> +bool Register() { + return Meta::RegisterOne(); +} + +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.h b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.h new file mode 100644 index 000000000..1a36a24c6 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/InstanceV2Impl.h @@ -0,0 +1,57 @@ +#ifndef TGCALLS_INSTANCEV2_IMPL_H +#define TGCALLS_INSTANCEV2_IMPL_H + +#include "Instance.h" +#include "StaticThreads.h" + +namespace tgcalls { + +class LogSinkImpl; + +class Manager; +template +class ThreadLocalObject; + +class InstanceV2ImplInternal; + +class InstanceV2Impl final : public Instance { +public: + explicit InstanceV2Impl(Descriptor &&descriptor); + ~InstanceV2Impl() override; + + void receiveSignalingData(const std::vector &data) override; + void setVideoCapture(std::shared_ptr videoCapture) override; + void setRequestedVideoAspect(float aspect) override; + void setNetworkType(NetworkType networkType) override; + void setMuteMicrophone(bool muteMicrophone) override; + bool supportsVideo() override { + return true; + } + void setIncomingVideoOutput(std::shared_ptr> sink) override; + void setAudioOutputGainControlEnabled(bool enabled) override; + void setEchoCancellationStrength(int strength) override; + void setAudioInputDevice(std::string id) override; + void setAudioOutputDevice(std::string id) override; + void setInputVolume(float level) override; + void setOutputVolume(float level) override; + void setAudioOutputDuckingEnabled(bool enabled) override; + void setIsLowBatteryLevel(bool isLowBatteryLevel) override; + static std::vector GetVersions(); + static int GetConnectionMaxLayer(); + std::string getLastError() override; + std::string getDebugInfo() override; + int64_t getPreferredRelayId() override; + TrafficStats getTrafficStats() override; + PersistentState getPersistentState() override; + void stop(std::function completion) override; + +private: + std::shared_ptr _threads; + std::unique_ptr> _internal; + std::unique_ptr _logSink; + +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp new file mode 100644 index 000000000..fdd43b676 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.cpp @@ -0,0 +1,565 @@ +#include "v2/NativeNetworkingImpl.h" + +#include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/client/basic_port_allocator.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/basic_async_resolver_factory.h" +#include "api/packet_socket_factory.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "p2p/base/ice_credentials_iterator.h" +#include "api/jsep_ice_candidate.h" +#include "p2p/base/dtls_transport.h" +#include "p2p/base/dtls_transport_factory.h" +#include "pc/dtls_srtp_transport.h" +#include "pc/dtls_transport.h" + +#include "StaticThreads.h" + +namespace tgcalls { + +class TurnCustomizerImpl : public webrtc::TurnCustomizer { +public: + TurnCustomizerImpl() { + } + + virtual ~TurnCustomizerImpl() { + } + + void MaybeModifyOutgoingStunMessage(cricket::PortInterface* port, + cricket::StunMessage* message) override { + message->AddAttribute(std::make_unique(cricket::STUN_ATTR_SOFTWARE, "Telegram ")); + } + + bool AllowChannelData(cricket::PortInterface* port, const void *data, size_t size, bool payload) override { + return true; + } +}; + +class SctpDataChannelProviderInterfaceImpl : public sigslot::has_slots<>, public webrtc::SctpDataChannelProviderInterface, public webrtc::DataChannelObserver { +public: + SctpDataChannelProviderInterfaceImpl( + cricket::DtlsTransport *transportChannel, + bool isOutgoing, + std::function onStateChanged, + std::function onMessageReceived, + std::shared_ptr threads + ) : + _threads(std::move(threads)), + _onStateChanged(onStateChanged), + _onMessageReceived(onMessageReceived) { + assert(_threads->getNetworkThread()->IsCurrent()); + + _sctpTransportFactory.reset(new cricket::SctpTransportFactory(_threads->getNetworkThread())); + + _sctpTransport = _sctpTransportFactory->CreateSctpTransport(transportChannel); + _sctpTransport->SignalReadyToSendData.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpReadyToSendData); + _sctpTransport->SignalDataReceived.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpDataReceived); + + webrtc::InternalDataChannelInit dataChannelInit; + dataChannelInit.id = 0; + dataChannelInit.open_handshake_role = isOutgoing ? webrtc::InternalDataChannelInit::kOpener : webrtc::InternalDataChannelInit::kAcker; + _dataChannel = webrtc::SctpDataChannel::Create( + this, + "data", + dataChannelInit, + _threads->getNetworkThread(), + _threads->getNetworkThread() + ); + + _dataChannel->RegisterObserver(this); + } + + virtual ~SctpDataChannelProviderInterfaceImpl() { + assert(_threads->getNetworkThread()->IsCurrent()); + + _dataChannel->UnregisterObserver(); + _dataChannel->Close(); + _dataChannel = nullptr; + + _sctpTransport = nullptr; + _sctpTransportFactory.reset(); + } + + void sendDataChannelMessage(std::string const &message) { + assert(_threads->getNetworkThread()->IsCurrent()); + + if (_isDataChannelOpen) { + RTC_LOG(LS_INFO) << "Outgoing DataChannel message: " << message; + + webrtc::DataBuffer buffer(message); + _dataChannel->Send(buffer); + } else { + RTC_LOG(LS_INFO) << "Could not send an outgoing DataChannel message: the channel is not open"; + } + } + + virtual void OnStateChange() override { + assert(_threads->getNetworkThread()->IsCurrent()); + + auto state = _dataChannel->state(); + bool isDataChannelOpen = state == webrtc::DataChannelInterface::DataState::kOpen; + if (_isDataChannelOpen != isDataChannelOpen) { + _isDataChannelOpen = isDataChannelOpen; + _onStateChanged(_isDataChannelOpen); + } + } + + virtual void OnMessage(const webrtc::DataBuffer& buffer) override { + assert(_threads->getNetworkThread()->IsCurrent()); + + if (!buffer.binary) { + std::string messageText(buffer.data.data(), buffer.data.data() + buffer.data.size()); + RTC_LOG(LS_INFO) << "Incoming DataChannel message: " << messageText; + + _onMessageReceived(messageText); + } + } + + void updateIsConnected(bool isConnected) { + assert(_threads->getNetworkThread()->IsCurrent()); + + if (isConnected) { + if (!_isSctpTransportStarted) { + _isSctpTransportStarted = true; + _sctpTransport->Start(5000, 5000, 262144); + } + } + } + + void sctpReadyToSendData() { + assert(_threads->getNetworkThread()->IsCurrent()); + + _dataChannel->OnTransportReady(true); + } + + void sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { + assert(_threads->getNetworkThread()->IsCurrent()); + + _dataChannel->OnDataReceived(params, buffer); + } + + virtual bool SendData(const cricket::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override { + assert(_threads->getNetworkThread()->IsCurrent()); + + return _sctpTransport->SendData(params, payload); + } + + virtual bool ConnectDataChannel(webrtc::SctpDataChannel *data_channel) override { + assert(_threads->getNetworkThread()->IsCurrent()); + + return true; + } + + virtual void DisconnectDataChannel(webrtc::SctpDataChannel* data_channel) override { + assert(_threads->getNetworkThread()->IsCurrent()); + + return; + } + + virtual void AddSctpDataStream(int sid) override { + assert(_threads->getNetworkThread()->IsCurrent()); + + _sctpTransport->OpenStream(sid); + } + + virtual void RemoveSctpDataStream(int sid) override { + assert(_threads->getNetworkThread()->IsCurrent()); + + _threads->getNetworkThread()->Invoke(RTC_FROM_HERE, [this, sid]() { + _sctpTransport->ResetStream(sid); + }); + } + + virtual bool ReadyToSendData() const override { + assert(_threads->getNetworkThread()->IsCurrent()); + + return _sctpTransport->ReadyToSendData(); + } + +private: + std::shared_ptr _threads; + std::function _onStateChanged; + std::function _onMessageReceived; + + std::unique_ptr _sctpTransportFactory; + std::unique_ptr _sctpTransport; + rtc::scoped_refptr _dataChannel; + + bool _isSctpTransportStarted = false; + bool _isDataChannelOpen = false; + +}; + +webrtc::CryptoOptions NativeNetworkingImpl::getDefaulCryptoOptions() { + auto options = webrtc::CryptoOptions(); + options.srtp.enable_aes128_sha1_80_crypto_cipher = true; + options.srtp.enable_gcm_crypto_suites = true; + return options; +} + +NativeNetworkingImpl::NativeNetworkingImpl(Configuration &&configuration) : +_threads(std::move(configuration.threads)), +_isOutgoing(configuration.isOutgoing), +_enableStunMarking(configuration.enableStunMarking), +_enableTCP(configuration.enableTCP), +_enableP2P(configuration.enableP2P), +_rtcServers(configuration.rtcServers), +_stateUpdated(std::move(configuration.stateUpdated)), +_candidateGathered(std::move(configuration.candidateGathered)), +_transportMessageReceived(std::move(configuration.transportMessageReceived)), +_rtcpPacketReceived(std::move(configuration.rtcpPacketReceived)), +_dataChannelStateUpdated(configuration.dataChannelStateUpdated), +_dataChannelMessageReceived(configuration.dataChannelMessageReceived) { + assert(_threads->getNetworkThread()->IsCurrent()); + + _localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)); + + _localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt); + + _socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread())); + _networkManager = std::make_unique(); + _asyncResolverFactory = std::make_unique(); + + _dtlsSrtpTransport = std::make_unique(true); + _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); + _dtlsSrtpTransport->SetActiveResetSrtpParams(false); + _dtlsSrtpTransport->SignalReadyToSend.connect(this, &NativeNetworkingImpl::DtlsReadyToSend); + _dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &NativeNetworkingImpl::RtpPacketReceived_n); + _dtlsSrtpTransport->SignalRtcpPacketReceived.connect(this, &NativeNetworkingImpl::OnRtcpPacketReceived_n); + + resetDtlsSrtpTransport(); +} + +NativeNetworkingImpl::~NativeNetworkingImpl() { + assert(_threads->getNetworkThread()->IsCurrent()); + + RTC_LOG(LS_INFO) << "NativeNetworkingImpl::~NativeNetworkingImpl()"; + + _dtlsSrtpTransport.reset(); + _dtlsTransport.reset(); + _dataChannelInterface.reset(); + _transportChannel.reset(); + _asyncResolverFactory.reset(); + _portAllocator.reset(); + _networkManager.reset(); + _socketFactory.reset(); +} + +void NativeNetworkingImpl::resetDtlsSrtpTransport() { + if (_enableStunMarking) { + _turnCustomizer.reset(new TurnCustomizerImpl()); + } + + _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), nullptr)); + + uint32_t flags = _portAllocator->flags(); + + flags |= + //cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + cricket::PORTALLOCATOR_ENABLE_IPV6 | + cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI; + + if (!_enableTCP) { + flags |= cricket::PORTALLOCATOR_DISABLE_TCP; + } + if (!_enableP2P) { + flags |= cricket::PORTALLOCATOR_DISABLE_UDP; + flags |= cricket::PORTALLOCATOR_DISABLE_STUN; + uint32_t candidateFilter = _portAllocator->candidate_filter(); + candidateFilter &= ~(cricket::CF_REFLEXIVE); + _portAllocator->SetCandidateFilter(candidateFilter); + } + + _portAllocator->set_step_delay(cricket::kMinimumStepDelay); + + //TODO: figure out the proxy setup + /*if (_proxy) { + rtc::ProxyInfo proxyInfo; + proxyInfo.type = rtc::ProxyType::PROXY_SOCKS5; + proxyInfo.address = rtc::SocketAddress(_proxy->host, _proxy->port); + proxyInfo.username = _proxy->login; + proxyInfo.password = rtc::CryptString(TgCallsCryptStringImpl(_proxy->password)); + _portAllocator->set_proxy("t/1.0", proxyInfo); + }*/ + + _portAllocator->set_flags(flags); + _portAllocator->Initialize(); + + cricket::ServerAddresses stunServers; + std::vector turnServers; + + for (auto &server : _rtcServers) { + if (server.isTurn) { + turnServers.push_back(cricket::RelayServerConfig( + rtc::SocketAddress(server.host, server.port), + server.login, + server.password, + cricket::PROTO_UDP + )); + } else { + rtc::SocketAddress stunAddress = rtc::SocketAddress(server.host, server.port); + stunServers.insert(stunAddress); + } + } + + _portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE, _turnCustomizer.get()); + + _transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr)); + + cricket::IceConfig iceConfig; + iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY; + iceConfig.prioritize_most_likely_candidate_pairs = true; + iceConfig.regather_on_failed_networks_interval = 8000; + _transportChannel->SetIceConfig(iceConfig); + + cricket::IceParameters localIceParameters( + _localIceParameters.ufrag, + _localIceParameters.pwd, + false + ); + + _transportChannel->SetIceParameters(localIceParameters); + _transportChannel->SetIceRole(_isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED); + _transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL); + + _transportChannel->SignalCandidateGathered.connect(this, &NativeNetworkingImpl::candidateGathered); + _transportChannel->SignalIceTransportStateChanged.connect(this, &NativeNetworkingImpl::transportStateChanged); + _transportChannel->SignalReadPacket.connect(this, &NativeNetworkingImpl::transportPacketReceived); + + webrtc::CryptoOptions cryptoOptions = NativeNetworkingImpl::getDefaulCryptoOptions(); + _dtlsTransport.reset(new cricket::DtlsTransport(_transportChannel.get(), cryptoOptions, nullptr)); + + _dtlsTransport->SignalWritableState.connect( + this, &NativeNetworkingImpl::OnTransportWritableState_n); + _dtlsTransport->SignalReceivingState.connect( + this, &NativeNetworkingImpl::OnTransportReceivingState_n); + + _dtlsTransport->SetLocalCertificate(_localCertificate); + + _dtlsSrtpTransport->SetDtlsTransports(_dtlsTransport.get(), nullptr); +} + +void NativeNetworkingImpl::start() { + _transportChannel->MaybeStartGathering(); + + const auto weak = std::weak_ptr(shared_from_this()); + _dataChannelInterface.reset(new SctpDataChannelProviderInterfaceImpl( + _dtlsTransport.get(), + _isOutgoing, + [weak, threads = _threads](bool state) { + assert(threads->getNetworkThread()->IsCurrent()); + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->_dataChannelStateUpdated(state); + }, + [weak, threads = _threads](std::string const &message) { + assert(threads->getNetworkThread()->IsCurrent()); + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->_dataChannelMessageReceived(message); + }, + _threads + )); +} + +void NativeNetworkingImpl::stop() { + _transportChannel->SignalCandidateGathered.disconnect(this); + _transportChannel->SignalIceTransportStateChanged.disconnect(this); + _transportChannel->SignalReadPacket.disconnect(this); + + _dtlsTransport->SignalWritableState.disconnect(this); + _dtlsTransport->SignalReceivingState.disconnect(this); + + _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); + + _dataChannelInterface.reset(); + _dtlsTransport.reset(); + _transportChannel.reset(); + _portAllocator.reset(); + + _localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)); + + _localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt); + + resetDtlsSrtpTransport(); +} + +PeerIceParameters NativeNetworkingImpl::getLocalIceParameters() { + return _localIceParameters; +} + +std::unique_ptr NativeNetworkingImpl::getLocalFingerprint() { + auto certificate = _localCertificate; + if (!certificate) { + return nullptr; + } + return rtc::SSLFingerprint::CreateFromCertificate(*certificate); +} + +void NativeNetworkingImpl::setRemoteParams(PeerIceParameters const &remoteIceParameters, rtc::SSLFingerprint *fingerprint, std::string const &sslSetup) { + _remoteIceParameters = remoteIceParameters; + + cricket::IceParameters parameters( + remoteIceParameters.ufrag, + remoteIceParameters.pwd, + false + ); + + _transportChannel->SetRemoteIceParameters(parameters); + + if (sslSetup == "active") { + _dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER); + } else if (sslSetup == "passive") { + _dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_CLIENT); + } else { + _dtlsTransport->SetDtlsRole(_isOutgoing ? rtc::SSLRole::SSL_CLIENT : rtc::SSLRole::SSL_SERVER); + } + + if (fingerprint) { + _dtlsTransport->SetRemoteFingerprint(fingerprint->algorithm, fingerprint->digest.data(), fingerprint->digest.size()); + } +} + +void NativeNetworkingImpl::addCandidates(std::vector const &candidates) { + for (const auto &candidate : candidates) { + _transportChannel->AddRemoteCandidate(candidate); + } +} + +void NativeNetworkingImpl::sendDataChannelMessage(std::string const &message) { + if (_dataChannelInterface) { + _dataChannelInterface->sendDataChannelMessage(message); + } +} + +webrtc::RtpTransport *NativeNetworkingImpl::getRtpTransport() { + return _dtlsSrtpTransport.get(); +} + +void NativeNetworkingImpl::checkConnectionTimeout() { + const auto weak = std::weak_ptr(shared_from_this()); + _threads->getNetworkThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + int64_t currentTimestamp = rtc::TimeMillis(); + const int64_t maxTimeout = 20000; + + if (strong->_lastNetworkActivityMs + maxTimeout < currentTimestamp) { + NativeNetworkingImpl::State emitState; + emitState.isReadyToSendData = false; + emitState.isFailed = true; + strong->_stateUpdated(emitState); + } + + strong->checkConnectionTimeout(); + }, 1000); +} + +void NativeNetworkingImpl::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) { + assert(_threads->getNetworkThread()->IsCurrent()); + + _candidateGathered(candidate); +} + +void NativeNetworkingImpl::candidateGatheringState(cricket::IceTransportInternal *transport) { + assert(_threads->getNetworkThread()->IsCurrent()); +} + +void NativeNetworkingImpl::OnTransportWritableState_n(rtc::PacketTransportInternal *transport) { + assert(_threads->getNetworkThread()->IsCurrent()); + + UpdateAggregateStates_n(); +} +void NativeNetworkingImpl::OnTransportReceivingState_n(rtc::PacketTransportInternal *transport) { + assert(_threads->getNetworkThread()->IsCurrent()); + + UpdateAggregateStates_n(); +} + +void NativeNetworkingImpl::DtlsReadyToSend(bool isReadyToSend) { + UpdateAggregateStates_n(); + + if (isReadyToSend) { + const auto weak = std::weak_ptr(shared_from_this()); + _threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() { + const auto strong = weak.lock(); + if (!strong) { + return; + } + strong->UpdateAggregateStates_n(); + }); + } +} + +void NativeNetworkingImpl::transportStateChanged(cricket::IceTransportInternal *transport) { + UpdateAggregateStates_n(); +} + +void NativeNetworkingImpl::transportReadyToSend(cricket::IceTransportInternal *transport) { + assert(_threads->getNetworkThread()->IsCurrent()); +} + +void NativeNetworkingImpl::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused) { + assert(_threads->getNetworkThread()->IsCurrent()); + + _lastNetworkActivityMs = rtc::TimeMillis(); +} + +void NativeNetworkingImpl::RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved) { + if (_transportMessageReceived) { + _transportMessageReceived(*packet, isUnresolved); + } +} + +void NativeNetworkingImpl::OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us) { + if (_rtcpPacketReceived) { + _rtcpPacketReceived(*packet, packet_time_us); + } +} + +void NativeNetworkingImpl::UpdateAggregateStates_n() { + assert(_threads->getNetworkThread()->IsCurrent()); + + auto state = _transportChannel->GetIceTransportState(); + bool isConnected = false; + switch (state) { + case webrtc::IceTransportState::kConnected: + case webrtc::IceTransportState::kCompleted: + isConnected = true; + break; + default: + break; + } + + if (!_dtlsSrtpTransport->IsWritable(false)) { + isConnected = false; + } + + if (_isConnected != isConnected) { + _isConnected = isConnected; + + NativeNetworkingImpl::State emitState; + emitState.isReadyToSendData = isConnected; + _stateUpdated(emitState); + + if (_dataChannelInterface) { + _dataChannelInterface->updateIsConnected(isConnected); + } + } +} + +void NativeNetworkingImpl::sctpReadyToSendData() { +} + +void NativeNetworkingImpl::sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { +} + +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h new file mode 100644 index 000000000..ead493519 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/NativeNetworkingImpl.h @@ -0,0 +1,142 @@ +#ifndef TGCALLS_NATIVE_NETWORKING_IMPL_H +#define TGCALLS_NATIVE_NETWORKING_IMPL_H + +#ifdef WEBRTC_WIN +// Compiler errors in conflicting Windows headers if not included here. +#include +#endif // WEBRTC_WIN + +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "api/candidate.h" +#include "media/base/media_channel.h" +#include "media/sctp/sctp_transport.h" +#include "pc/sctp_data_channel.h" + +#include +#include + +#include "Message.h" +#include "ThreadLocalObject.h" +#include "Instance.h" + +namespace rtc { +class BasicPacketSocketFactory; +class BasicNetworkManager; +class PacketTransportInternal; +struct NetworkRoute; +} // namespace rtc + +namespace cricket { +class BasicPortAllocator; +class P2PTransportChannel; +class IceTransportInternal; +class DtlsTransport; +} // namespace cricket + +namespace webrtc { +class BasicAsyncResolverFactory; +class TurnCustomizer; +class DtlsSrtpTransport; +class RtpTransport; +} // namespace webrtc + +namespace tgcalls { + +struct Message; +class SctpDataChannelProviderInterfaceImpl; +class Threads; + +class NativeNetworkingImpl : public sigslot::has_slots<>, public std::enable_shared_from_this { +public: + struct State { + bool isReadyToSendData = false; + bool isFailed = false; + }; + + struct Configuration { + bool isOutgoing = false; + bool enableStunMarking = false; + bool enableTCP = false; + bool enableP2P = false; + std::vector rtcServers; + std::function stateUpdated; + std::function candidateGathered; + std::function transportMessageReceived; + std::function rtcpPacketReceived; + std::function dataChannelStateUpdated; + std::function dataChannelMessageReceived; + std::shared_ptr threads; + }; + + static webrtc::CryptoOptions getDefaulCryptoOptions(); + + NativeNetworkingImpl(Configuration &&configuration); + ~NativeNetworkingImpl(); + + void start(); + void stop(); + + PeerIceParameters getLocalIceParameters(); + std::unique_ptr getLocalFingerprint(); + void setRemoteParams(PeerIceParameters const &remoteIceParameters, rtc::SSLFingerprint *fingerprint, std::string const &sslSetup); + void addCandidates(std::vector const &candidates); + + void sendDataChannelMessage(std::string const &message); + + webrtc::RtpTransport *getRtpTransport(); + +private: + void resetDtlsSrtpTransport(); + void checkConnectionTimeout(); + void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate); + void candidateGatheringState(cricket::IceTransportInternal *transport); + void OnTransportWritableState_n(rtc::PacketTransportInternal *transport); + void OnTransportReceivingState_n(rtc::PacketTransportInternal *transport); + void transportStateChanged(cricket::IceTransportInternal *transport); + void transportReadyToSend(cricket::IceTransportInternal *transport); + void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused); + void DtlsReadyToSend(bool DtlsReadyToSend); + void UpdateAggregateStates_n(); + void RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved); + void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us); + + void sctpReadyToSendData(); + void sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer); + + std::shared_ptr _threads; + bool _isOutgoing = false; + bool _enableStunMarking = false; + bool _enableTCP = false; + bool _enableP2P = false; + std::vector _rtcServers; + + std::function _stateUpdated; + std::function _candidateGathered; + std::function _transportMessageReceived; + std::function _rtcpPacketReceived; + std::function _dataChannelStateUpdated; + std::function _dataChannelMessageReceived; + + std::unique_ptr _socketFactory; + std::unique_ptr _networkManager; + std::unique_ptr _turnCustomizer; + std::unique_ptr _portAllocator; + std::unique_ptr _asyncResolverFactory; + std::unique_ptr _transportChannel; + std::unique_ptr _dtlsTransport; + std::unique_ptr _dtlsSrtpTransport; + + std::unique_ptr _dataChannelInterface; + + rtc::scoped_refptr _localCertificate; + PeerIceParameters _localIceParameters; + absl::optional _remoteIceParameters; + + bool _isConnected = false; + int64_t _lastNetworkActivityMs = 0; +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/v2/Signaling.cpp b/TMessagesProj/jni/voip/tgcalls/v2/Signaling.cpp new file mode 100644 index 000000000..0de171cc8 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/Signaling.cpp @@ -0,0 +1,652 @@ +#include "v2/Signaling.h" + +#include "third-party/json11.hpp" + +#include "rtc_base/checks.h" + +#include + +namespace tgcalls { +namespace signaling { + +static std::string uint32ToString(uint32_t value) { + std::ostringstream stringStream; + stringStream << value; + return stringStream.str(); +} + +static uint32_t stringToUInt32(std::string const &string) { + std::stringstream stringStream(string); + uint32_t value = 0; + stringStream >> value; + return value; +} + +json11::Json::object SsrcGroup_serialize(SsrcGroup const &ssrcGroup) { + json11::Json::object object; + + json11::Json::array ssrcs; + for (auto ssrc : ssrcGroup.ssrcs) { + ssrcs.push_back(json11::Json(uint32ToString(ssrc))); + } + object.insert(std::make_pair("semantics", json11::Json(ssrcGroup.semantics))); + object.insert(std::make_pair("ssrcs", json11::Json(std::move(ssrcs)))); + + return object; +} + +absl::optional SsrcGroup_parse(json11::Json::object const &object) { + SsrcGroup result; + + const auto semantics = object.find("semantics"); + if (semantics == object.end() || !semantics->second.is_string()) { + return absl::nullopt; + } + result.semantics = semantics->second.string_value(); + + const auto ssrcs = object.find("ssrcs"); + if (ssrcs == object.end() || !ssrcs->second.is_array()) { + return absl::nullopt; + } + for (const auto &ssrc : ssrcs->second.array_items()) { + if (ssrc.is_string()) { + uint32_t parsedSsrc = stringToUInt32(ssrc.string_value()); + if (parsedSsrc == 0) { + return absl::nullopt; + } + result.ssrcs.push_back(parsedSsrc); + } else if (ssrc.is_number()) { + uint32_t parsedSsrc = (uint32_t)ssrc.number_value(); + result.ssrcs.push_back(parsedSsrc); + } else { + return absl::nullopt; + } + } + + return result; +} + +json11::Json::object FeedbackType_serialize(FeedbackType const &feedbackType) { + json11::Json::object object; + + object.insert(std::make_pair("type", json11::Json(feedbackType.type))); + object.insert(std::make_pair("subtype", json11::Json(feedbackType.subtype))); + + return object; +} + +absl::optional FeedbackType_parse(json11::Json::object const &object) { + FeedbackType result; + + const auto type = object.find("type"); + if (type == object.end() || !type->second.is_string()) { + return absl::nullopt; + } + result.type = type->second.string_value(); + + const auto subtype = object.find("subtype"); + if (subtype == object.end() || !subtype->second.is_string()) { + return absl::nullopt; + } + result.subtype = subtype->second.string_value(); + + return result; +} + +json11::Json::object RtpExtension_serialize(webrtc::RtpExtension const &rtpExtension) { + json11::Json::object object; + + object.insert(std::make_pair("id", json11::Json(rtpExtension.id))); + object.insert(std::make_pair("uri", json11::Json(rtpExtension.uri))); + + return object; +} + +absl::optional RtpExtension_parse(json11::Json::object const &object) { + const auto id = object.find("id"); + if (id == object.end() || !id->second.is_number()) { + return absl::nullopt; + } + + const auto uri = object.find("uri"); + if (uri == object.end() || !uri->second.is_string()) { + return absl::nullopt; + } + + return webrtc::RtpExtension(uri->second.string_value(), id->second.int_value()); +} + +json11::Json::object PayloadType_serialize(PayloadType const &payloadType) { + json11::Json::object object; + + object.insert(std::make_pair("id", json11::Json((int)payloadType.id))); + object.insert(std::make_pair("name", json11::Json(payloadType.name))); + object.insert(std::make_pair("clockrate", json11::Json((int)payloadType.clockrate))); + object.insert(std::make_pair("channels", json11::Json((int)payloadType.channels))); + + json11::Json::array feedbackTypes; + for (const auto &feedbackType : payloadType.feedbackTypes) { + feedbackTypes.push_back(FeedbackType_serialize(feedbackType)); + } + object.insert(std::make_pair("feedbackTypes", json11::Json(std::move(feedbackTypes)))); + + json11::Json::object parameters; + for (auto it : payloadType.parameters) { + parameters.insert(std::make_pair(it.first, json11::Json(it.second))); + } + object.insert(std::make_pair("parameters", json11::Json(std::move(parameters)))); + + return object; +} + +absl::optional PayloadType_parse(json11::Json::object const &object) { + PayloadType result; + + const auto id = object.find("id"); + if (id == object.end() || !id->second.is_number()) { + return absl::nullopt; + } + result.id = id->second.int_value(); + + const auto name = object.find("name"); + if (name == object.end() || !name->second.is_string()) { + return absl::nullopt; + } + result.name = name->second.string_value(); + + const auto clockrate = object.find("clockrate"); + if (clockrate == object.end() || !clockrate->second.is_number()) { + return absl::nullopt; + } + result.clockrate = clockrate->second.int_value(); + + const auto channels = object.find("channels"); + if (channels != object.end()) { + if (!channels->second.is_number()) { + return absl::nullopt; + } + result.channels = channels->second.int_value(); + } + + const auto feedbackTypes = object.find("feedbackTypes"); + if (feedbackTypes != object.end()) { + if (!feedbackTypes->second.is_array()) { + return absl::nullopt; + } + for (const auto &feedbackType : feedbackTypes->second.array_items()) { + if (!feedbackType.is_object()) { + return absl::nullopt; + } + if (const auto parsedFeedbackType = FeedbackType_parse(feedbackType.object_items())) { + result.feedbackTypes.push_back(parsedFeedbackType.value()); + } else { + return absl::nullopt; + } + } + } + + const auto parameters = object.find("parameters"); + if (parameters != object.end()) { + if (!parameters->second.is_object()) { + return absl::nullopt; + } + for (const auto &item : parameters->second.object_items()) { + if (!item.second.is_string()) { + return absl::nullopt; + } + result.parameters.push_back(std::make_pair(item.first, item.second.string_value())); + } + } + + return result; +} + +json11::Json::object MediaContent_serialize(MediaContent const &mediaContent) { + json11::Json::object object; + + object.insert(std::make_pair("ssrc", json11::Json(uint32ToString(mediaContent.ssrc)))); + + if (mediaContent.ssrcGroups.size() != 0) { + json11::Json::array ssrcGroups; + for (const auto &group : mediaContent.ssrcGroups) { + ssrcGroups.push_back(SsrcGroup_serialize(group)); + } + object.insert(std::make_pair("ssrcGroups", json11::Json(std::move(ssrcGroups)))); + } + + if (mediaContent.payloadTypes.size() != 0) { + json11::Json::array payloadTypes; + for (const auto &payloadType : mediaContent.payloadTypes) { + payloadTypes.push_back(PayloadType_serialize(payloadType)); + } + object.insert(std::make_pair("payloadTypes", json11::Json(std::move(payloadTypes)))); + } + + json11::Json::array rtpExtensions; + for (const auto &rtpExtension : mediaContent.rtpExtensions) { + rtpExtensions.push_back(RtpExtension_serialize(rtpExtension)); + } + object.insert(std::make_pair("rtpExtensions", json11::Json(std::move(rtpExtensions)))); + + return object; +} + +absl::optional MediaContent_parse(json11::Json::object const &object) { + MediaContent result; + + const auto ssrc = object.find("ssrc"); + if (ssrc == object.end()) { + return absl::nullopt; + } + if (ssrc->second.is_string()) { + result.ssrc = stringToUInt32(ssrc->second.string_value()); + } else if (ssrc->second.is_number()) { + result.ssrc = (uint32_t)ssrc->second.number_value(); + } else { + return absl::nullopt; + } + + const auto ssrcGroups = object.find("ssrcGroups"); + if (ssrcGroups != object.end()) { + if (!ssrcGroups->second.is_array()) { + return absl::nullopt; + } + for (const auto &ssrcGroup : ssrcGroups->second.array_items()) { + if (!ssrcGroup.is_object()) { + return absl::nullopt; + } + if (const auto parsedSsrcGroup = SsrcGroup_parse(ssrcGroup.object_items())) { + result.ssrcGroups.push_back(parsedSsrcGroup.value()); + } else { + return absl::nullopt; + } + } + } + + const auto payloadTypes = object.find("payloadTypes"); + if (payloadTypes != object.end()) { + if (!payloadTypes->second.is_array()) { + return absl::nullopt; + } + for (const auto &payloadType : payloadTypes->second.array_items()) { + if (!payloadType.is_object()) { + return absl::nullopt; + } + if (const auto parsedPayloadType = PayloadType_parse(payloadType.object_items())) { + result.payloadTypes.push_back(parsedPayloadType.value()); + } else { + return absl::nullopt; + } + } + } + + const auto rtpExtensions = object.find("rtpExtensions"); + if (rtpExtensions != object.end()) { + if (!rtpExtensions->second.is_array()) { + return absl::nullopt; + } + for (const auto &rtpExtension : rtpExtensions->second.array_items()) { + if (!rtpExtension.is_object()) { + return absl::nullopt; + } + if (const auto parsedRtpExtension = RtpExtension_parse(rtpExtension.object_items())) { + result.rtpExtensions.push_back(parsedRtpExtension.value()); + } else { + return absl::nullopt; + } + } + } + + return result; +} + +std::vector InitialSetupMessage_serialize(const InitialSetupMessage * const message) { + json11::Json::object object; + + object.insert(std::make_pair("@type", json11::Json("InitialSetup"))); + object.insert(std::make_pair("ufrag", json11::Json(message->ufrag))); + object.insert(std::make_pair("pwd", json11::Json(message->pwd))); + + json11::Json::array jsonFingerprints; + for (const auto &fingerprint : message->fingerprints) { + json11::Json::object jsonFingerprint; + jsonFingerprint.insert(std::make_pair("hash", json11::Json(fingerprint.hash))); + jsonFingerprint.insert(std::make_pair("setup", json11::Json(fingerprint.setup))); + jsonFingerprint.insert(std::make_pair("fingerprint", json11::Json(fingerprint.fingerprint))); + jsonFingerprints.emplace_back(std::move(jsonFingerprint)); + } + object.insert(std::make_pair("fingerprints", json11::Json(std::move(jsonFingerprints)))); + + if (const auto audio = message->audio) { + object.insert(std::make_pair("audio", json11::Json(MediaContent_serialize(audio.value())))); + } + + if (const auto video = message->video) { + object.insert(std::make_pair("video", json11::Json(MediaContent_serialize(video.value())))); + } + + auto json = json11::Json(std::move(object)); + std::string result = json.dump(); + return std::vector(result.begin(), result.end()); +} + +absl::optional InitialSetupMessage_parse(json11::Json::object const &object) { + const auto ufrag = object.find("ufrag"); + if (ufrag == object.end() || !ufrag->second.is_string()) { + return absl::nullopt; + } + const auto pwd = object.find("pwd"); + if (pwd == object.end() || !pwd->second.is_string()) { + return absl::nullopt; + } + const auto fingerprints = object.find("fingerprints"); + if (fingerprints == object.end() || !fingerprints->second.is_array()) { + return absl::nullopt; + } + std::vector parsedFingerprints; + for (const auto &fingerprintObject : fingerprints->second.array_items()) { + if (!fingerprintObject.is_object()) { + return absl::nullopt; + } + const auto hash = fingerprintObject.object_items().find("hash"); + if (hash == fingerprintObject.object_items().end() || !hash->second.is_string()) { + return absl::nullopt; + } + const auto setup = fingerprintObject.object_items().find("setup"); + if (setup == fingerprintObject.object_items().end() || !setup->second.is_string()) { + return absl::nullopt; + } + const auto fingerprint = fingerprintObject.object_items().find("fingerprint"); + if (fingerprint == fingerprintObject.object_items().end() || !fingerprint->second.is_string()) { + return absl::nullopt; + } + + DtlsFingerprint parsedFingerprint; + parsedFingerprint.hash = hash->second.string_value(); + parsedFingerprint.setup = setup->second.string_value(); + parsedFingerprint.fingerprint = fingerprint->second.string_value(); + + parsedFingerprints.push_back(std::move(parsedFingerprint)); + } + + InitialSetupMessage message; + message.ufrag = ufrag->second.string_value(); + message.pwd = pwd->second.string_value(); + message.fingerprints = std::move(parsedFingerprints); + + const auto audio = object.find("audio"); + if (audio != object.end()) { + if (!audio->second.is_object()) { + return absl::nullopt; + } + if (const auto parsedAudio = MediaContent_parse(audio->second.object_items())) { + message.audio = parsedAudio.value(); + } else { + return absl::nullopt; + } + } + + const auto video = object.find("video"); + if (video != object.end()) { + if (!video->second.is_object()) { + return absl::nullopt; + } + if (const auto parsedVideo = MediaContent_parse(video->second.object_items())) { + message.video = parsedVideo.value(); + } else { + return absl::nullopt; + } + } + + return message; +} + +json11::Json::object ConnectionAddress_serialize(ConnectionAddress const &connectionAddress) { + json11::Json::object object; + + object.insert(std::make_pair("ip", json11::Json(connectionAddress.ip))); + object.insert(std::make_pair("port", json11::Json(connectionAddress.port))); + + return object; +} + +absl::optional ConnectionAddress_parse(json11::Json::object const &object) { + const auto ip = object.find("ip"); + if (ip == object.end() || !ip->second.is_string()) { + return absl::nullopt; + } + + const auto port = object.find("port"); + if (port == object.end() || !port->second.is_number()) { + return absl::nullopt; + } + + ConnectionAddress address; + address.ip = ip->second.string_value(); + address.port = port->second.int_value(); + return address; +} + +std::vector CandidatesMessage_serialize(const CandidatesMessage * const message) { + json11::Json::array candidates; + for (const auto &candidate : message->iceCandidates) { + json11::Json::object candidateObject; + + candidateObject.insert(std::make_pair("sdpString", json11::Json(candidate.sdpString))); + + candidates.emplace_back(std::move(candidateObject)); + } + + json11::Json::object object; + + object.insert(std::make_pair("@type", json11::Json("Candidates"))); + object.insert(std::make_pair("candidates", json11::Json(std::move(candidates)))); + + auto json = json11::Json(std::move(object)); + std::string result = json.dump(); + return std::vector(result.begin(), result.end()); +} + +absl::optional CandidatesMessage_parse(json11::Json::object const &object) { + const auto candidates = object.find("candidates"); + if (candidates == object.end() || !candidates->second.is_array()) { + return absl::nullopt; + } + + std::vector parsedCandidates; + for (const auto &candidateObject : candidates->second.array_items()) { + if (!candidateObject.is_object()) { + return absl::nullopt; + } + + IceCandidate candidate; + + const auto sdpString = candidateObject.object_items().find("sdpString"); + if (sdpString == candidateObject.object_items().end() || !sdpString->second.is_string()) { + return absl::nullopt; + } + candidate.sdpString = sdpString->second.string_value(); + + parsedCandidates.push_back(std::move(candidate)); + } + + CandidatesMessage message; + message.iceCandidates = std::move(parsedCandidates); + + return message; +} + +std::vector MediaStateMessage_serialize(const MediaStateMessage * const message) { + json11::Json::object object; + + object.insert(std::make_pair("@type", json11::Json("MediaState"))); + object.insert(std::make_pair("muted", json11::Json(message->isMuted))); + object.insert(std::make_pair("lowBattery", json11::Json(message->isBatteryLow))); + + std::string videoStateValue; + switch (message->videoState) { + case MediaStateMessage::VideoState::Inactive: { + videoStateValue = "inactive"; + break; + } + case MediaStateMessage::VideoState::Suspended: { + videoStateValue = "suspended"; + break; + } + case MediaStateMessage::VideoState::Active: { + videoStateValue = "active"; + break; + } + default: { + RTC_FATAL() << "Unknown videoState"; + break; + } + } + object.insert(std::make_pair("videoState", json11::Json(videoStateValue))); + + int videoRotationValue = 0; + switch (message->videoRotation) { + case MediaStateMessage::VideoRotation::Rotation0: { + videoRotationValue = 0; + break; + } + case MediaStateMessage::VideoRotation::Rotation90: { + videoRotationValue = 90; + break; + } + case MediaStateMessage::VideoRotation::Rotation180: { + videoRotationValue = 180; + break; + } + case MediaStateMessage::VideoRotation::Rotation270: { + videoRotationValue = 270; + break; + } + default: { + RTC_FATAL() << "Unknown videoRotation"; + break; + } + } + object.insert(std::make_pair("videoRotation", json11::Json(videoRotationValue))); + + auto json = json11::Json(std::move(object)); + std::string result = json.dump(); + return std::vector(result.begin(), result.end()); +} + +absl::optional MediaStateMessage_parse(json11::Json::object const &object) { + MediaStateMessage message; + + const auto muted = object.find("muted"); + if (muted != object.end()) { + if (!muted->second.is_bool()) { + return absl::nullopt; + } + message.isMuted = muted->second.bool_value(); + } + + const auto lowBattery = object.find("lowBattery"); + if (lowBattery != object.end()) { + if (!lowBattery->second.is_bool()) { + return absl::nullopt; + } + message.isBatteryLow = lowBattery->second.bool_value(); + } + + const auto videoState = object.find("videoState"); + if (videoState != object.end()) { + if (!videoState->second.is_string()) { + return absl::nullopt; + } + if (videoState->second.string_value() == "inactive") { + message.videoState = MediaStateMessage::VideoState::Inactive; + } else if (videoState->second.string_value() == "suspended") { + message.videoState = MediaStateMessage::VideoState::Suspended; + } else if (videoState->second.string_value() == "active") { + message.videoState = MediaStateMessage::VideoState::Active; + } + } else { + message.videoState = MediaStateMessage::VideoState::Inactive; + } + + const auto videoRotation = object.find("videoRotation"); + if (videoRotation != object.end()) { + if (!videoRotation->second.is_number()) { + return absl::nullopt; + } + if (videoState->second.int_value() == 0) { + message.videoRotation = MediaStateMessage::VideoRotation::Rotation0; + } else if (videoState->second.int_value() == 90) { + message.videoRotation = MediaStateMessage::VideoRotation::Rotation90; + } else if (videoState->second.int_value() == 180) { + message.videoRotation = MediaStateMessage::VideoRotation::Rotation180; + } else if (videoState->second.int_value() == 270) { + message.videoRotation = MediaStateMessage::VideoRotation::Rotation270; + } else { + message.videoRotation = MediaStateMessage::VideoRotation::Rotation0; + } + } else { + message.videoRotation = MediaStateMessage::VideoRotation::Rotation0; + } + + return message; +} + +std::vector Message::serialize() const { + if (const auto initialSetup = absl::get_if(&data)) { + return InitialSetupMessage_serialize(initialSetup); + } else if (const auto candidates = absl::get_if(&data)) { + return CandidatesMessage_serialize(candidates); + } else if (const auto mediaState = absl::get_if(&data)) { + return MediaStateMessage_serialize(mediaState); + } else { + return {}; + } +} + +absl::optional Message::parse(const std::vector &data) { + std::string parsingError; + auto json = json11::Json::parse(std::string(data.begin(), data.end()), parsingError); + if (json.type() != json11::Json::OBJECT) { + return absl::nullopt; + } + + auto type = json.object_items().find("@type"); + if (type == json.object_items().end()) { + return absl::nullopt; + } + if (!type->second.is_string()) { + return absl::nullopt; + } + if (type->second.string_value() == "InitialSetup") { + auto parsed = InitialSetupMessage_parse(json.object_items()); + if (!parsed) { + return absl::nullopt; + } + Message message; + message.data = std::move(parsed.value()); + return message; + } else if (type->second.string_value() == "Candidates") { + auto parsed = CandidatesMessage_parse(json.object_items()); + if (!parsed) { + return absl::nullopt; + } + Message message; + message.data = std::move(parsed.value()); + return message; + } else if (type->second.string_value() == "MediaState") { + auto parsed = MediaStateMessage_parse(json.object_items()); + if (!parsed) { + return absl::nullopt; + } + Message message; + message.data = std::move(parsed.value()); + return message; + } else { + return absl::nullopt; + } +} + +} // namespace signaling + +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/v2/Signaling.h b/TMessagesProj/jni/voip/tgcalls/v2/Signaling.h new file mode 100644 index 000000000..303d46afa --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/Signaling.h @@ -0,0 +1,103 @@ +#ifndef TGCALLS_SIGNALING_H +#define TGCALLS_SIGNALING_H + +#include +#include + +#include "absl/types/variant.h" +#include "absl/types/optional.h" +#include "api/rtp_parameters.h" + +namespace tgcalls { + +namespace signaling { + +struct DtlsFingerprint { + std::string hash; + std::string setup; + std::string fingerprint; +}; + +struct ConnectionAddress { + std::string ip; + int port = 0; +}; + +struct IceCandidate { + std::string sdpString; +}; + +struct SsrcGroup { + std::vector ssrcs; + std::string semantics; +}; + +struct FeedbackType { + std::string type; + std::string subtype; +}; + +struct PayloadType { + uint32_t id = 0; + std::string name; + uint32_t clockrate = 0; + uint32_t channels = 0; + std::vector feedbackTypes; + std::vector> parameters; +}; + +struct MediaContent { + uint32_t ssrc = 0; + std::vector ssrcGroups; + std::vector payloadTypes; + std::vector rtpExtensions; +}; + +struct InitialSetupMessage { + std::string ufrag; + std::string pwd; + std::vector fingerprints; + absl::optional audio; + absl::optional video; +}; + +struct CandidatesMessage { + std::vector iceCandidates; +}; + +struct MediaStateMessage { + enum class VideoState { + Inactive, + Suspended, + Active + }; + + enum class VideoRotation { + Rotation0, + Rotation90, + Rotation180, + Rotation270 + }; + + bool isMuted = false; + VideoState videoState = VideoState::Inactive; + VideoRotation videoRotation = VideoRotation::Rotation0; + bool isBatteryLow = false; + +}; + +struct Message { + absl::variant< + InitialSetupMessage, + CandidatesMessage, + MediaStateMessage> data; + + std::vector serialize() const; + static absl::optional parse(const std::vector &data); +}; + +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/tgcalls/v2/SignalingEncryption.cpp b/TMessagesProj/jni/voip/tgcalls/v2/SignalingEncryption.cpp new file mode 100644 index 000000000..7ad3304e9 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/SignalingEncryption.cpp @@ -0,0 +1,22 @@ +#include "v2/SignalingEncryption.h" + +namespace tgcalls { + +SignalingEncryption::SignalingEncryption(EncryptionKey const &encryptionKey) { + _connection.reset(new EncryptedConnection(EncryptedConnection::Type::Signaling, encryptionKey, [](int, int) { + })); +} + +SignalingEncryption::~SignalingEncryption() { + +} + +absl::optional SignalingEncryption::encryptOutgoing(std::vector const &data) { + return _connection->encryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size())); +} + +absl::optional SignalingEncryption::decryptIncoming(std::vector const &data) { + return _connection->decryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size())); +} + +} // namespace tgcalls diff --git a/TMessagesProj/jni/voip/tgcalls/v2/SignalingEncryption.h b/TMessagesProj/jni/voip/tgcalls/v2/SignalingEncryption.h new file mode 100644 index 000000000..1f9dee6d7 --- /dev/null +++ b/TMessagesProj/jni/voip/tgcalls/v2/SignalingEncryption.h @@ -0,0 +1,23 @@ +#ifndef TGCALLS_SIGNALING_ENCRYPTION_H +#define TGCALLS_SIGNALING_ENCRYPTION_H + +#include "Instance.h" +#include "EncryptedConnection.h" + +namespace tgcalls { + +class SignalingEncryption { +public: + SignalingEncryption(EncryptionKey const &encryptionKey); + ~SignalingEncryption(); + + absl::optional encryptOutgoing(std::vector const &data); + absl::optional decryptIncoming(std::vector const &data); + +private: + std::unique_ptr _connection; +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/voip/webrtc/api/DESIGN.md b/TMessagesProj/jni/voip/webrtc/api/DESIGN.md deleted file mode 100644 index 0a2f36eb2..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/DESIGN.md +++ /dev/null @@ -1,71 +0,0 @@ -# Design considerations - -The header files in this directory form the API to the WebRTC library -that is intended for client applications' use. - -This API is designed to be used on top of a multithreaded runtime. - -The public API functions are designed to be called from a single thread* -(the "client thread"), and can do internal dispatching to the thread -where activity needs to happen. Those threads can be passed in by the -client, typically as arguments to factory constructors, or they can be -created by the library if factory constructors that don't take threads -are used. - -Many of the functions are designed to be used in an asynchronous manner, -where a function is called to initiate an activity, and a callback will -be called when the activity is completed, or a handler function will -be called on an observer object when interesting events happen. - -Note: Often, even functions that look like simple functions (such as -information query functions) will need to jump between threads to perform -their function - which means that things may happen on other threads -between calls; writing "increment(x); increment(x)" is not a safe -way to increment X by exactly two, since the increment function may have -jumped to another thread that already had a queue of things to handle, -causing large amounts of other activity to have intervened between -the two calls. - -(*) The term "thread" is used here to denote any construct that guarantees -sequential execution - other names for such constructs are task runners -and sequenced task queues. - -# Client threads and callbacks - -At the moment, the API does not give any guarantee on which thread* the -callbacks and events are called on. So it's best to write all callback -and event handlers like this (pseudocode): -
-void ObserverClass::Handler(event) {
-  if (!called_on_client_thread()) {
-    dispatch_to_client_thread(bind(handler(event)));
-    return;
-  }
-  // Process event, we're now on the right thread
-}
-
-In the future, the implementation may change to always call the callbacks -and event handlers on the client thread. - -# Implementation considerations - -The C++ classes that are part of the public API are also used to derive -classes that form part of the implementation. - -This should not directly concern users of the API, but may matter if one -wants to look at how the WebRTC library is implemented, or for legacy code -that directly accesses internal APIs. - -Many APIs are defined in terms of a "proxy object", which will do a blocking -dispatch of the function to another thread, and an "implementation object" -which will do the actual -work, but can only be created, invoked and destroyed on its "home thread". - -Usually, the classes are named "xxxInterface" (in api/), "xxxProxy" and -"xxx" (not in api/). WebRTC users should only need to depend on the files -in api/. In many cases, the "xxxProxy" and "xxx" classes are subclasses -of "xxxInterface", but this property is an implementation feature only, -and should not be relied upon. - -The threading properties of these internal APIs are NOT documented in -this note, and need to be understood by inspecting those classes. diff --git a/TMessagesProj/jni/voip/webrtc/api/OWNERS b/TMessagesProj/jni/voip/webrtc/api/OWNERS index 4cf391517..e18667970 100644 --- a/TMessagesProj/jni/voip/webrtc/api/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/api/OWNERS @@ -2,7 +2,6 @@ crodbro@webrtc.org deadbeef@webrtc.org hta@webrtc.org juberti@webrtc.org -kwiberg@webrtc.org magjed@webrtc.org perkj@webrtc.org tkchin@webrtc.org @@ -11,4 +10,16 @@ tommi@webrtc.org per-file peer_connection*=hbos@webrtc.org per-file DEPS=mbonadei@webrtc.org -per-file DEPS=kwiberg@webrtc.org + +# Please keep this list in sync with Chromium's //base/metrics/OWNERS and +# send a CL when you notice any difference. +# Even if people in the list below cannot formally grant +1 on WebRTC, it +# is good to get their LGTM before sending the CL to one of the folder OWNERS. +per-file uma_metrics.h=asvitkine@chromium.org +per-file uma_metrics.h=bcwhite@chromium.org +per-file uma_metrics.h=caitlinfischer@google.com +per-file uma_metrics.h=holte@chromium.org +per-file uma_metrics.h=isherman@chromium.org +per-file uma_metrics.h=jwd@chromium.org +per-file uma_metrics.h=mpearson@chromium.org +per-file uma_metrics.h=rkaplow@chromium.org diff --git a/TMessagesProj/jni/voip/webrtc/api/README.md b/TMessagesProj/jni/voip/webrtc/api/README.md index 4cc799362..7c1a27f51 100644 --- a/TMessagesProj/jni/voip/webrtc/api/README.md +++ b/TMessagesProj/jni/voip/webrtc/api/README.md @@ -1,6 +1,6 @@ # How to write code in the `api/` directory -Mostly, just follow the regular [style guide](../style-guide.md), but: +Mostly, just follow the regular [style guide](../g3doc/style-guide.md), but: * Note that `api/` code is not exempt from the “`.h` and `.cc` files come in pairs” rule, so if you declare something in `api/path/to/foo.h`, it should be @@ -17,7 +17,7 @@ it from a `.cc` file, so that users of our API headers won’t transitively For headers in `api/` that need to refer to non-public types, forward declarations are often a lesser evil than including non-public header files. The -usual [rules](../style-guide.md#forward-declarations) still apply, though. +usual [rules](../g3doc/style-guide.md#forward-declarations) still apply, though. `.cc` files in `api/` should preferably be kept reasonably small. If a substantial implementation is needed, consider putting it with our non-public diff --git a/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h b/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h new file mode 100644 index 000000000..eabb41c11 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/async_dns_resolver.h @@ -0,0 +1,86 @@ +/* + * Copyright 2021 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ASYNC_DNS_RESOLVER_H_ +#define API_ASYNC_DNS_RESOLVER_H_ + +#include +#include + +#include "rtc_base/socket_address.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// This interface defines the methods to resolve a hostname asynchronously. +// The AsyncDnsResolverInterface class encapsulates a single name query. +// +// Usage: +// std::unique_ptr resolver = +// factory->Create(address-to-be-resolved, [r = resolver.get()]() { +// if (r->result.GetResolvedAddress(AF_INET, &addr) { +// // success +// } else { +// // failure +// error = r->result().GetError(); +// } +// // Release resolver. +// resolver_list.erase(std::remove_if(resolver_list.begin(), +// resolver_list.end(), +// [](refptr) { refptr.get() == r; }); +// }); +// resolver_list.push_back(std::move(resolver)); + +class AsyncDnsResolverResult { + public: + virtual ~AsyncDnsResolverResult() = default; + // Returns true iff the address from |Start| was successfully resolved. + // If the address was successfully resolved, sets |addr| to a copy of the + // address from |Start| with the IP address set to the top most resolved + // address of |family| (|addr| will have both hostname and the resolved ip). + virtual bool GetResolvedAddress(int family, + rtc::SocketAddress* addr) const = 0; + // Returns error from resolver. + virtual int GetError() const = 0; +}; + +class RTC_EXPORT AsyncDnsResolverInterface { + public: + virtual ~AsyncDnsResolverInterface() = default; + + // Start address resolution of the hostname in |addr|. + virtual void Start(const rtc::SocketAddress& addr, + std::function callback) = 0; + virtual const AsyncDnsResolverResult& result() const = 0; +}; + +// An abstract factory for creating AsyncDnsResolverInterfaces. This allows +// client applications to provide WebRTC with their own mechanism for +// performing DNS resolution. +class AsyncDnsResolverFactoryInterface { + public: + virtual ~AsyncDnsResolverFactoryInterface() = default; + + // Creates an AsyncDnsResolver and starts resolving the name. The callback + // will be called when resolution is finished. + // The callback will be called on the thread that the caller runs on. + virtual std::unique_ptr CreateAndResolve( + const rtc::SocketAddress& addr, + std::function callback) = 0; + // Creates an AsyncDnsResolver and does not start it. + // For backwards compatibility, will be deprecated and removed. + // One has to do a separate Start() call on the + // resolver to start name resolution. + virtual std::unique_ptr Create() = 0; +}; + +} // namespace webrtc + +#endif // API_ASYNC_DNS_RESOLVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.cc index aeb809efa..4cb75043d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.cc @@ -153,6 +153,7 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) { res = res & Limit(&c->filter.config_change_duration_blocks, 0, 100000); res = res & Limit(&c->filter.initial_state_seconds, 0.f, 100.f); + res = res & Limit(&c->filter.coarse_reset_hangover_blocks, 0, 250000); res = res & Limit(&c->erle.min, 1.f, 100000.f); res = res & Limit(&c->erle.max_l, 1.f, 100000.f); diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h index 3ed11ff8b..d4a04cd2e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h @@ -43,6 +43,7 @@ struct RTC_EXPORT EchoCanceller3Config { size_t hysteresis_limit_blocks = 1; size_t fixed_capture_delay_samples = 0; float delay_estimate_smoothing = 0.7f; + float delay_estimate_smoothing_delay_found = 0.7f; float delay_candidate_detection_threshold = 0.2f; struct DelaySelectionThresholds { int initial; @@ -86,9 +87,11 @@ struct RTC_EXPORT EchoCanceller3Config { size_t config_change_duration_blocks = 250; float initial_state_seconds = 2.5f; + int coarse_reset_hangover_blocks = 25; bool conservative_initial_phase = false; bool enable_coarse_filter_output_usage = true; bool use_linear_filter = true; + bool high_pass_filter_echo_reference = false; bool export_linear_aec_output = false; } filter; @@ -107,6 +110,7 @@ struct RTC_EXPORT EchoCanceller3Config { float default_len = 0.83f; bool echo_can_saturate = true; bool bounded_erl = false; + bool erle_onset_compensation_in_dominant_nearend = false; } ep_strength; struct EchoAudibility { diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc index 907b47271..39713a1fb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config_json.cc @@ -191,6 +191,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.delay.fixed_capture_delay_samples); ReadParam(section, "delay_estimate_smoothing", &cfg.delay.delay_estimate_smoothing); + ReadParam(section, "delay_estimate_smoothing_delay_found", + &cfg.delay.delay_estimate_smoothing_delay_found); ReadParam(section, "delay_candidate_detection_threshold", &cfg.delay.delay_candidate_detection_threshold); @@ -223,11 +225,15 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.filter.config_change_duration_blocks); ReadParam(section, "initial_state_seconds", &cfg.filter.initial_state_seconds); + ReadParam(section, "coarse_reset_hangover_blocks", + &cfg.filter.coarse_reset_hangover_blocks); ReadParam(section, "conservative_initial_phase", &cfg.filter.conservative_initial_phase); ReadParam(section, "enable_coarse_filter_output_usage", &cfg.filter.enable_coarse_filter_output_usage); ReadParam(section, "use_linear_filter", &cfg.filter.use_linear_filter); + ReadParam(section, "high_pass_filter_echo_reference", + &cfg.filter.high_pass_filter_echo_reference); ReadParam(section, "export_linear_aec_output", &cfg.filter.export_linear_aec_output); } @@ -249,6 +255,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "default_len", &cfg.ep_strength.default_len); ReadParam(section, "echo_can_saturate", &cfg.ep_strength.echo_can_saturate); ReadParam(section, "bounded_erl", &cfg.ep_strength.bounded_erl); + ReadParam(section, "erle_onset_compensation_in_dominant_nearend", + &cfg.ep_strength.erle_onset_compensation_in_dominant_nearend); } if (rtc::GetValueFromJsonObject(aec3_root, "echo_audibility", §ion)) { @@ -419,6 +427,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { << config.delay.fixed_capture_delay_samples << ","; ost << "\"delay_estimate_smoothing\": " << config.delay.delay_estimate_smoothing << ","; + ost << "\"delay_estimate_smoothing_delay_found\": " + << config.delay.delay_estimate_smoothing_delay_found << ","; ost << "\"delay_candidate_detection_threshold\": " << config.delay.delay_candidate_detection_threshold << ","; @@ -502,6 +512,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { << config.filter.config_change_duration_blocks << ","; ost << "\"initial_state_seconds\": " << config.filter.initial_state_seconds << ","; + ost << "\"coarse_reset_hangover_blocks\": " + << config.filter.coarse_reset_hangover_blocks << ","; ost << "\"conservative_initial_phase\": " << (config.filter.conservative_initial_phase ? "true" : "false") << ","; ost << "\"enable_coarse_filter_output_usage\": " @@ -509,6 +521,9 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { << ","; ost << "\"use_linear_filter\": " << (config.filter.use_linear_filter ? "true" : "false") << ","; + ost << "\"high_pass_filter_echo_reference\": " + << (config.filter.high_pass_filter_echo_reference ? "true" : "false") + << ","; ost << "\"export_linear_aec_output\": " << (config.filter.export_linear_aec_output ? "true" : "false"); @@ -533,8 +548,11 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { ost << "\"echo_can_saturate\": " << (config.ep_strength.echo_can_saturate ? "true" : "false") << ","; ost << "\"bounded_erl\": " - << (config.ep_strength.bounded_erl ? "true" : "false"); - + << (config.ep_strength.bounded_erl ? "true" : "false") << ","; + ost << "\"erle_onset_compensation_in_dominant_nearend\": " + << (config.ep_strength.erle_onset_compensation_in_dominant_nearend + ? "true" + : "false"); ost << "},"; ost << "\"echo_audibility\": {"; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_control.h b/TMessagesProj/jni/voip/webrtc/api/audio/echo_control.h index 8d567bf2b..74fbc27b1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_control.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_control.h @@ -48,6 +48,13 @@ class EchoControl { // Provides an optional external estimate of the audio buffer delay. virtual void SetAudioBufferDelay(int delay_ms) = 0; + // Specifies whether the capture output will be used. The purpose of this is + // to allow the echo controller to deactivate some of the processing when the + // resulting output is anyway not used, for instance when the endpoint is + // muted. + // TODO(b/177830919): Make pure virtual. + virtual void SetCaptureOutputUsage(bool capture_output_used) {} + // Returns wheter the signal is altered. virtual bool ActiveProcessing() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc b/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc index 4c3d9e61f..04215b0de 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_detector_creator.cc @@ -15,7 +15,7 @@ namespace webrtc { rtc::scoped_refptr CreateEchoDetector() { - return new rtc::RefCountedObject(); + return rtc::make_ref_counted(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/OWNERS b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/OWNERS index fe417854d..77e9d0022 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/OWNERS @@ -1 +1,2 @@ -kwiberg@webrtc.org +minyue@webrtc.org +henrik.lundin@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h index 557ffe275..ce235946d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h @@ -136,7 +136,7 @@ class AudioDecoder { // with the decoded audio on either side of the concealment. // Note: The default implementation of GeneratePlc will be deleted soon. All // implementations must provide their own, which can be a simple as a no-op. - // TODO(bugs.webrtc.org/9676): Remove default impementation. + // TODO(bugs.webrtc.org/9676): Remove default implementation. virtual void GeneratePlc(size_t requested_samples_per_channel, rtc::BufferT* concealment_audio); diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h index e628cb62d..388668d4c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder_factory_template.h @@ -123,9 +123,8 @@ rtc::scoped_refptr CreateAudioDecoderFactory() { static_assert(sizeof...(Ts) >= 1, "Caller must give at least one template parameter"); - return rtc::scoped_refptr( - new rtc::RefCountedObject< - audio_decoder_factory_template_impl::AudioDecoderFactoryT>()); + return rtc::make_ref_counted< + audio_decoder_factory_template_impl::AudioDecoderFactoryT>(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder.h index fd2d94886..92e42cf10 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder.h @@ -16,12 +16,12 @@ #include #include +#include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/call/bitrate_allocation.h" #include "api/units/time_delta.h" #include "rtc_base/buffer.h" -#include "rtc_base/deprecation.h" namespace webrtc { @@ -182,12 +182,11 @@ class AudioEncoder { // implementation does nothing. virtual void SetMaxPlaybackRate(int frequency_hz); - // This is to be deprecated. Please use |OnReceivedTargetAudioBitrate| - // instead. // Tells the encoder what average bitrate we'd like it to produce. The // encoder is free to adjust or disregard the given bitrate (the default // implementation does the latter). - RTC_DEPRECATED virtual void SetTargetBitrate(int target_bps); + ABSL_DEPRECATED("Use OnReceivedTargetAudioBitrate instead") + virtual void SetTargetBitrate(int target_bps); // Causes this encoder to let go of any other encoders it contains, and // returns a pointer to an array where they are stored (which is required to @@ -210,7 +209,8 @@ class AudioEncoder { virtual void OnReceivedUplinkPacketLossFraction( float uplink_packet_loss_fraction); - RTC_DEPRECATED virtual void OnReceivedUplinkRecoverablePacketLossFraction( + ABSL_DEPRECATED("") + virtual void OnReceivedUplinkRecoverablePacketLossFraction( float uplink_recoverable_packet_loss_fraction); // Provides target audio bitrate to this encoder to allow it to adapt. diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h index 74cb05342..cdc7defd2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_encoder_factory_template.h @@ -142,9 +142,8 @@ rtc::scoped_refptr CreateAudioEncoderFactory() { static_assert(sizeof...(Ts) >= 1, "Caller must give at least one template parameter"); - return rtc::scoped_refptr( - new rtc::RefCountedObject< - audio_encoder_factory_template_impl::AudioEncoderFactoryT>()); + return rtc::make_ref_counted< + audio_encoder_factory_template_impl::AudioEncoderFactoryT>(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc index f01caf11b..0052c429b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc @@ -38,7 +38,7 @@ operator=(const AudioEncoderMultiChannelOpusConfig&) = default; bool AudioEncoderMultiChannelOpusConfig::IsOk() const { if (frame_size_ms <= 0 || frame_size_ms % 10 != 0) return false; - if (num_channels < 0 || num_channels >= 255) { + if (num_channels >= 255) { return false; } if (bitrate_bps < kMinBitrateBps || bitrate_bps > kMaxBitrateBps) @@ -47,7 +47,7 @@ bool AudioEncoderMultiChannelOpusConfig::IsOk() const { return false; // Check the lengths: - if (num_channels < 0 || num_streams < 0 || coupled_streams < 0) { + if (num_streams < 0 || coupled_streams < 0) { return false; } if (num_streams < coupled_streams) { diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus_config.cc b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus_config.cc index 2f36d0261..0e6f55ee6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus_config.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/opus/audio_encoder_opus_config.cc @@ -61,7 +61,7 @@ bool AudioEncoderOpusConfig::IsOk() const { // well; we can add support for them when needed.) return false; } - if (num_channels < 0 || num_channels >= 255) { + if (num_channels >= 255) { return false; } if (!bitrate_bps) diff --git a/TMessagesProj/jni/voip/webrtc/api/call/transport.h b/TMessagesProj/jni/voip/webrtc/api/call/transport.h index 2a2a87a5f..8bff28825 100644 --- a/TMessagesProj/jni/voip/webrtc/api/call/transport.h +++ b/TMessagesProj/jni/voip/webrtc/api/call/transport.h @@ -14,7 +14,8 @@ #include #include -#include +#include "api/ref_counted_base.h" +#include "api/scoped_refptr.h" namespace webrtc { @@ -30,7 +31,7 @@ struct PacketOptions { int packet_id = -1; // Additional data bound to the RTP packet for use in application code, // outside of WebRTC. - std::vector application_data; + rtc::scoped_refptr additional_data; // Whether this is a retransmission of an earlier packet. bool is_retransmit = false; bool included_in_feedback = false; diff --git a/TMessagesProj/jni/voip/webrtc/api/data_channel_interface.h b/TMessagesProj/jni/voip/webrtc/api/data_channel_interface.h index 5b2b1263a..56bb6c98f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/data_channel_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/data_channel_interface.h @@ -44,11 +44,13 @@ struct DataChannelInit { // // Cannot be set along with |maxRetransmits|. // This is called |maxPacketLifeTime| in the WebRTC JS API. + // Negative values are ignored, and positive values are clamped to [0-65535] absl::optional maxRetransmitTime; // The max number of retransmissions. // // Cannot be set along with |maxRetransmitTime|. + // Negative values are ignored, and positive values are clamped to [0-65535] absl::optional maxRetransmits; // This is set by the application and opaque to the WebRTC implementation. diff --git a/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc index c32d7d2e1..26ef88bf1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/ice_transport_factory.cc @@ -14,6 +14,7 @@ #include #include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/port_allocator.h" #include "rtc_base/thread.h" @@ -41,7 +42,7 @@ class IceTransportWithTransportChannel : public IceTransportInterface { } private: - const rtc::ThreadChecker thread_checker_{}; + const SequenceChecker thread_checker_{}; const std::unique_ptr internal_ RTC_GUARDED_BY(thread_checker_); }; @@ -57,10 +58,18 @@ rtc::scoped_refptr CreateIceTransport( rtc::scoped_refptr CreateIceTransport( IceTransportInit init) { - return new rtc::RefCountedObject( - std::make_unique( - "", 0, init.port_allocator(), init.async_resolver_factory(), - init.event_log())); + if (init.async_resolver_factory()) { + // Backwards compatibility mode + return rtc::make_ref_counted( + std::make_unique( + "", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(), + init.async_resolver_factory(), init.event_log())); + } else { + return rtc::make_ref_counted( + cricket::P2PTransportChannel::Create( + "", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(), + init.async_dns_resolver_factory(), init.event_log())); + } } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h b/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h index d2f1edc01..a3b364c87 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/ice_transport_interface.h @@ -13,6 +13,7 @@ #include +#include "api/async_dns_resolver.h" #include "api/async_resolver_factory.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" @@ -52,11 +53,21 @@ struct IceTransportInit final { port_allocator_ = port_allocator; } + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory() { + return async_dns_resolver_factory_; + } + void set_async_dns_resolver_factory( + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory) { + RTC_DCHECK(!async_resolver_factory_); + async_dns_resolver_factory_ = async_dns_resolver_factory; + } AsyncResolverFactory* async_resolver_factory() { return async_resolver_factory_; } + ABSL_DEPRECATED("bugs.webrtc.org/12598") void set_async_resolver_factory( AsyncResolverFactory* async_resolver_factory) { + RTC_DCHECK(!async_dns_resolver_factory_); async_resolver_factory_ = async_resolver_factory; } @@ -65,8 +76,11 @@ struct IceTransportInit final { private: cricket::PortAllocator* port_allocator_ = nullptr; + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory_ = nullptr; + // For backwards compatibility. Only one resolver factory can be set. AsyncResolverFactory* async_resolver_factory_ = nullptr; RtcEventLog* event_log_ = nullptr; + // TODO(https://crbug.com/webrtc/12657): Redesign to have const members. }; // TODO(qingsi): The factory interface is defined in this file instead of its diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep.h b/TMessagesProj/jni/voip/webrtc/api/jsep.h index cf8aeb0cb..b56cf1d15 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep.h @@ -28,7 +28,6 @@ #include "absl/types/optional.h" #include "api/rtc_error.h" -#include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -136,6 +135,13 @@ class RTC_EXPORT SessionDescriptionInterface { virtual ~SessionDescriptionInterface() {} + // Create a new SessionDescriptionInterface object + // with the same values as the old object. + // TODO(bugs.webrtc.org:12215): Remove default implementation + virtual std::unique_ptr Clone() const { + return nullptr; + } + // Only for use internally. virtual cricket::SessionDescription* description() = 0; virtual const cricket::SessionDescription* description() const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h b/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h index 4ee84cf79..1a4247cb0 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h @@ -34,6 +34,8 @@ class RTC_EXPORT JsepIceCandidate : public IceCandidateInterface { JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index, const cricket::Candidate& candidate); + JsepIceCandidate(const JsepIceCandidate&) = delete; + JsepIceCandidate& operator=(const JsepIceCandidate&) = delete; ~JsepIceCandidate() override; // |err| may be null. bool Initialize(const std::string& sdp, SdpParseError* err); @@ -53,8 +55,6 @@ class RTC_EXPORT JsepIceCandidate : public IceCandidateInterface { std::string sdp_mid_; int sdp_mline_index_; cricket::Candidate candidate_; - - RTC_DISALLOW_COPY_AND_ASSIGN(JsepIceCandidate); }; // Implementation of IceCandidateCollection which stores JsepIceCandidates. @@ -64,6 +64,8 @@ class JsepCandidateCollection : public IceCandidateCollection { // Move constructor is defined so that a vector of JsepCandidateCollections // can be resized. JsepCandidateCollection(JsepCandidateCollection&& o); + // Returns a copy of the candidate collection. + JsepCandidateCollection Clone() const; size_t count() const override; bool HasCandidate(const IceCandidateInterface* candidate) const override; // Adds and takes ownership of the JsepIceCandidate. diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h b/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h index 79e15e21f..70ac9398a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h @@ -23,7 +23,6 @@ #include "api/jsep.h" #include "api/jsep_ice_candidate.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/deprecation.h" namespace cricket { class SessionDescription; @@ -49,6 +48,8 @@ class JsepSessionDescription : public SessionDescriptionInterface { const std::string& session_id, const std::string& session_version); + virtual std::unique_ptr Clone() const; + virtual cricket::SessionDescription* description() { return description_.get(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h index bd4a2c029..8892ee5a0 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h @@ -216,6 +216,11 @@ class AudioTrackSinkInterface { number_of_frames); } + // Returns the number of channels encoded by the sink. This can be less than + // the number_of_channels if down-mixing occur. A value of -1 means an unknown + // number. + virtual int NumPreferredChannels() const { return -1; } + protected: virtual ~AudioTrackSinkInterface() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h index 8ee33ca0e..773c5d8b1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_proxy.h @@ -20,8 +20,8 @@ namespace webrtc { // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. -BEGIN_SIGNALING_PROXY_MAP(MediaStream) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PRIMARY_PROXY_MAP(MediaStream) +PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_METHOD0(AudioTrackVector, GetAudioTracks) PROXY_METHOD0(VideoTrackVector, GetVideoTracks) diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h index 59dcb7724..a0fe676d5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_track_proxy.h @@ -24,8 +24,8 @@ namespace webrtc { // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. -BEGIN_SIGNALING_PROXY_MAP(AudioTrack) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PRIMARY_PROXY_MAP(AudioTrack) +PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(std::string, kind) BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(TrackState, state) @@ -41,7 +41,7 @@ PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) END_PROXY_MAP() BEGIN_PROXY_MAP(VideoTrack) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(std::string, kind) BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(TrackState, state) @@ -49,11 +49,11 @@ PROXY_CONSTMETHOD0(bool, enabled) PROXY_METHOD1(bool, set_enabled, bool) PROXY_CONSTMETHOD0(ContentHint, content_hint) PROXY_METHOD1(void, set_content_hint, ContentHint) -PROXY_WORKER_METHOD2(void, - AddOrUpdateSink, - rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&) -PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) +PROXY_SECONDARY_METHOD2(void, + AddOrUpdateSink, + rtc::VideoSinkInterface*, + const rtc::VideoSinkWants&) +PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) diff --git a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h index 2c09c3e15..4c49a0c24 100644 --- a/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h +++ b/TMessagesProj/jni/voip/webrtc/api/neteq/neteq_controller.h @@ -103,6 +103,7 @@ class NetEqController { uint16_t main_sequence_number; bool is_cng_or_dtmf; bool is_dtx; + bool buffer_flush; }; virtual ~NetEqController() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_factory_proxy.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_factory_proxy.h index e33fb457a..de6250fe9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_factory_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_factory_proxy.h @@ -17,23 +17,16 @@ #include "api/peer_connection_interface.h" #include "api/proxy.h" -#include "rtc_base/bind.h" namespace webrtc { // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. -BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PROXY_MAP(PeerConnectionFactory) +PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(void, SetOptions, const Options&) -PROXY_METHOD4(rtc::scoped_refptr, - CreatePeerConnection, - const PeerConnectionInterface::RTCConfiguration&, - std::unique_ptr, - std::unique_ptr, - PeerConnectionObserver*) -PROXY_METHOD2(rtc::scoped_refptr, - CreatePeerConnection, +PROXY_METHOD2(RTCErrorOr>, + CreatePeerConnectionOrError, const PeerConnectionInterface::RTCConfiguration&, PeerConnectionDependencies) PROXY_CONSTMETHOD1(webrtc::RtpCapabilities, @@ -56,8 +49,8 @@ PROXY_METHOD2(rtc::scoped_refptr, CreateAudioTrack, const std::string&, AudioSourceInterface*) -PROXY_METHOD2(bool, StartAecDump, FILE*, int64_t) -PROXY_METHOD0(void, StopAecDump) +PROXY_SECONDARY_METHOD2(bool, StartAecDump, FILE*, int64_t) +PROXY_SECONDARY_METHOD0(void, StopAecDump) END_PROXY_MAP() } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc index f82e84b80..230731c42 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc @@ -10,8 +10,7 @@ #include "api/peer_connection_interface.h" -#include "api/dtls_transport_interface.h" -#include "api/sctp_transport_interface.h" +#include namespace webrtc { @@ -77,14 +76,34 @@ PeerConnectionFactoryInterface::CreatePeerConnection( std::unique_ptr allocator, std::unique_ptr cert_generator, PeerConnectionObserver* observer) { - return nullptr; + PeerConnectionDependencies dependencies(observer); + dependencies.allocator = std::move(allocator); + dependencies.cert_generator = std::move(cert_generator); + auto result = + CreatePeerConnectionOrError(configuration, std::move(dependencies)); + if (!result.ok()) { + return nullptr; + } + return result.MoveValue(); } rtc::scoped_refptr PeerConnectionFactoryInterface::CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { - return nullptr; + auto result = + CreatePeerConnectionOrError(configuration, std::move(dependencies)); + if (!result.ok()) { + return nullptr; + } + return result.MoveValue(); +} + +RTCErrorOr> +PeerConnectionFactoryInterface::CreatePeerConnectionOrError( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies) { + return RTCError(RTCErrorType::INTERNAL_ERROR); } RtpCapabilities PeerConnectionFactoryInterface::GetRtpSenderCapabilities( diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h index c5b04b25e..892e84e19 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h @@ -67,19 +67,25 @@ #ifndef API_PEER_CONNECTION_INTERFACE_H_ #define API_PEER_CONNECTION_INTERFACE_H_ +#include #include +#include #include #include #include +#include "absl/base/attributes.h" +#include "absl/types/optional.h" #include "api/adaptation/resource.h" +#include "api/async_dns_resolver.h" #include "api/async_resolver_factory.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_options.h" #include "api/call/call_factory_interface.h" +#include "api/candidate.h" #include "api/crypto/crypto_options.h" #include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" @@ -87,15 +93,18 @@ #include "api/ice_transport_interface.h" #include "api/jsep.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/packet_socket_factory.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" @@ -108,19 +117,25 @@ #include "api/transport/sctp_transport_factory_interface.h" #include "api/transport/webrtc_key_value_config.h" #include "api/turn_customizer.h" +#include "api/video/video_bitrate_allocator_factory.h" #include "media/base/media_config.h" #include "media/base/media_engine.h" // TODO(bugs.webrtc.org/7447): We plan to provide a way to let applications // inject a PacketSocketFactory and/or NetworkManager, and not expose -// PortAllocator in the PeerConnection api. +// PortAllocator in the PeerConnection api. This will let us remove nogncheck. +#include "p2p/base/port.h" // nogncheck #include "p2p/base/port_allocator.h" // nogncheck +#include "rtc_base/network.h" +#include "rtc_base/network_constants.h" #include "rtc_base/network_monitor_factory.h" +#include "rtc_base/ref_count.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" namespace rtc { class Thread; @@ -403,12 +418,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // from consideration for gathering ICE candidates. bool disable_link_local_networks = false; - // If set to true, use RTP data channels instead of SCTP. - // TODO(deadbeef): Remove this. We no longer commit to supporting RTP data - // channels, though some applications are still working on moving off of - // them. - bool enable_rtp_data_channel = false; - // Minimum bitrate at which screencast video tracks will be encoded at. // This means adding padding bits up to this bitrate, which can help // when switching from a static scene to one with motion. @@ -621,12 +630,8 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { absl::optional crypto_options; // Configure if we should include the SDP attribute extmap-allow-mixed in - // our offer. Although we currently do support this, it's not included in - // our offer by default due to a previous bug that caused the SDP parser to - // abort parsing if this attribute was present. This is fixed in Chrome 71. - // TODO(webrtc:9985): Change default to true once sufficient time has - // passed. - bool offer_extmap_allow_mixed = false; + // our offer on session level. + bool offer_extmap_allow_mixed = true; // TURN logging identifier. // This identifier is added to a TURN allocation @@ -643,6 +648,10 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // The delay before doing a usage histogram report for long-lived // PeerConnections. Used for testing only. absl::optional report_usage_pattern_delay_ms; + + // The ping interval (ms) when the connection is stable and writable. This + // parameter overrides the default value in the ICE implementation if set. + absl::optional stable_writable_connection_ping_interval_ms; // // Don't forget to update operator== if adding something. // @@ -1060,7 +1069,10 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Removes a group of remote candidates from the ICE agent. Needed mainly for // continual gathering, to avoid an ever-growing list of candidates as - // networks come and go. + // networks come and go. Note that the candidates' transport_name must be set + // to the MID of the m= section that generated the candidate. + // TODO(bugs.webrtc.org/8395): Use IceCandidateInterface instead of + // cricket::Candidate, which would avoid the transport_name oddity. virtual bool RemoveIceCandidates( const std::vector& candidates) = 0; @@ -1296,25 +1308,8 @@ class PeerConnectionObserver { // The heuristics for defining what constitutes "interesting" are // implementation-defined. virtual void OnInterestingUsage(int usage_pattern) {} - - virtual void OnErrorDemuxingPacket(uint32_t ssrc) {} }; -class ErrorDemuxingPacketObserver : public rtc::RefCountInterface { -public: - ErrorDemuxingPacketObserver(PeerConnectionObserver *observer) : - observer_(observer) { - } - - void OnErrorDemuxingPacket(uint32_t ssrc) { - observer_->OnErrorDemuxingPacket(ssrc); - } - -private: - PeerConnectionObserver *observer_ = nullptr; -}; - - // PeerConnectionDependencies holds all of PeerConnections dependencies. // A dependency is distinct from a configuration as it defines significant // executable code that can be provided by a user of the API. @@ -1340,6 +1335,10 @@ struct RTC_EXPORT PeerConnectionDependencies final { // packet_socket_factory, not both. std::unique_ptr allocator; std::unique_ptr packet_socket_factory; + // Factory for creating resolvers that look up hostnames in DNS + std::unique_ptr + async_dns_resolver_factory; + // Deprecated - use async_dns_resolver_factory std::unique_ptr async_resolver_factory; std::unique_ptr ice_transport_factory; std::unique_ptr cert_generator; @@ -1413,10 +1412,6 @@ class RTC_EXPORT PeerConnectionFactoryInterface // testing/debugging. bool disable_encryption = false; - // Deprecated. The only effect of setting this to true is that - // CreateDataChannel will fail, which is not that useful. - bool disable_sctp_data_channels = false; - // If set to true, any platform-supported network monitoring capability // won't be used, and instead networks will only be updated via polling. // @@ -1445,6 +1440,13 @@ class RTC_EXPORT PeerConnectionFactoryInterface // configuration and a PeerConnectionDependencies structure. // TODO(benwright): Make pure virtual once downstream mock PC factory classes // are updated. + virtual RTCErrorOr> + CreatePeerConnectionOrError( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies); + // Deprecated creator - does not return an error code on error. + // TODO(bugs.webrtc.org:12238): Deprecate and remove. + ABSL_DEPRECATED("Use CreatePeerConnectionOrError") virtual rtc::scoped_refptr CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies); @@ -1458,6 +1460,7 @@ class RTC_EXPORT PeerConnectionFactoryInterface // responsibility of the caller to delete it. It can be safely deleted after // Close has been called on the returned PeerConnection, which ensures no // more observer callbacks will be invoked. + ABSL_DEPRECATED("Use CreatePeerConnectionOrError") virtual rtc::scoped_refptr CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration& configuration, std::unique_ptr allocator, diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h index 2d4cb5cad..cc9df10ee 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_proxy.h @@ -20,10 +20,13 @@ namespace webrtc { +// PeerConnection proxy objects will be constructed with two thread pointers, +// signaling and network. The proxy macros don't have 'network' specific macros +// and support for a secondary thread is provided via 'SECONDARY' macros. // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. -BEGIN_SIGNALING_PROXY_MAP(PeerConnection) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PROXY_MAP(PeerConnection) +PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD0(rtc::scoped_refptr, local_streams) PROXY_METHOD0(rtc::scoped_refptr, remote_streams) PROXY_METHOD1(bool, AddStream, MediaStreamInterface*) @@ -130,10 +133,15 @@ PROXY_METHOD1(bool, RemoveIceCandidates, const std::vector&) PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&) PROXY_METHOD1(void, SetAudioPlayout, bool) PROXY_METHOD1(void, SetAudioRecording, bool) -PROXY_METHOD1(rtc::scoped_refptr, - LookupDtlsTransportByMid, - const std::string&) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetSctpTransport) +// This method will be invoked on the network thread. See +// PeerConnectionFactory::CreatePeerConnectionOrError for more details. +PROXY_SECONDARY_METHOD1(rtc::scoped_refptr, + LookupDtlsTransportByMid, + const std::string&) +// This method will be invoked on the network thread. See +// PeerConnectionFactory::CreatePeerConnectionOrError for more details. +PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr, + GetSctpTransport) PROXY_METHOD0(SignalingState, signaling_state) PROXY_METHOD0(IceConnectionState, ice_connection_state) PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state) diff --git a/TMessagesProj/jni/voip/webrtc/api/proxy.h b/TMessagesProj/jni/voip/webrtc/api/proxy.h index 05f7414bc..d14e0b2c3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/proxy.h @@ -12,6 +12,13 @@ // PeerConnection classes. // TODO(deadbeef): Move this to pc/; this is part of the implementation. +// The proxied objects are initialized with either one or two thread +// objects that operations can be proxied to: The primary and secondary +// threads. +// In common usage, the primary thread will be the PeerConnection's +// signaling thread, and the secondary thread will be either the +// PeerConnection's worker thread or the PeerConnection's network thread. + // // Example usage: // @@ -29,22 +36,22 @@ // }; // // BEGIN_PROXY_MAP(Test) -// PROXY_SIGNALING_THREAD_DESTRUCTOR() +// PROXY_PRIMARY_THREAD_DESTRUCTOR() // PROXY_METHOD0(std::string, FooA) // PROXY_CONSTMETHOD1(std::string, FooB, arg1) -// PROXY_WORKER_METHOD1(std::string, FooC, arg1) +// PROXY_SECONDARY_METHOD1(std::string, FooC, arg1) // END_PROXY_MAP() // -// Where the destructor and first two methods are invoked on the signaling -// thread, and the third is invoked on the worker thread. +// Where the destructor and first two methods are invoked on the primary +// thread, and the third is invoked on the secondary thread. // // The proxy can be created using // // TestProxy::Create(Thread* signaling_thread, Thread* worker_thread, // TestInterface*). // -// The variant defined with BEGIN_SIGNALING_PROXY_MAP is unaware of -// the worker thread, and invokes all methods on the signaling thread. +// The variant defined with BEGIN_PRIMARY_PROXY_MAP is unaware of +// the secondary thread, and invokes all methods on the primary thread. // // The variant defined with BEGIN_OWNED_PROXY_MAP does not use // refcounting, and instead just takes ownership of the object being proxied. @@ -195,25 +202,25 @@ class ConstMethodCall : public QueuedTask { }; // clang-format on -#define SIGNALING_PROXY_MAP_BOILERPLATE(c) \ +#define PRIMARY_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* primary_thread, INTERNAL_CLASS* c) \ + : primary_thread_(primary_thread), c_(c) {} \ + \ + private: \ + mutable rtc::Thread* primary_thread_; + +#define SECONDARY_PROXY_MAP_BOILERPLATE(c) \ protected: \ - c##ProxyWithInternal(rtc::Thread* signaling_thread, INTERNAL_CLASS* c) \ - : signaling_thread_(signaling_thread), c_(c) {} \ + c##ProxyWithInternal(rtc::Thread* primary_thread, \ + rtc::Thread* secondary_thread, INTERNAL_CLASS* c) \ + : primary_thread_(primary_thread), \ + secondary_thread_(secondary_thread), \ + c_(c) {} \ \ private: \ - mutable rtc::Thread* signaling_thread_; - -#define WORKER_PROXY_MAP_BOILERPLATE(c) \ - protected: \ - c##ProxyWithInternal(rtc::Thread* signaling_thread, \ - rtc::Thread* worker_thread, INTERNAL_CLASS* c) \ - : signaling_thread_(signaling_thread), \ - worker_thread_(worker_thread), \ - c_(c) {} \ - \ - private: \ - mutable rtc::Thread* signaling_thread_; \ - mutable rtc::Thread* worker_thread_; + mutable rtc::Thread* primary_thread_; \ + mutable rtc::Thread* secondary_thread_; // Note that the destructor is protected so that the proxy can only be // destroyed via RefCountInterface. @@ -246,89 +253,88 @@ class ConstMethodCall : public QueuedTask { void DestroyInternal() { delete c_; } \ INTERNAL_CLASS* c_; -#define BEGIN_SIGNALING_PROXY_MAP(c) \ +#define BEGIN_PRIMARY_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + PRIMARY_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* primary_thread, INTERNAL_CLASS* c) { \ + return rtc::make_ref_counted(primary_thread, c); \ + } + +#define BEGIN_PROXY_MAP(c) \ PROXY_MAP_BOILERPLATE(c) \ - SIGNALING_PROXY_MAP_BOILERPLATE(c) \ + SECONDARY_PROXY_MAP_BOILERPLATE(c) \ REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ public: \ static rtc::scoped_refptr Create( \ - rtc::Thread* signaling_thread, INTERNAL_CLASS* c) { \ - return new rtc::RefCountedObject(signaling_thread, \ - c); \ - } - -#define BEGIN_PROXY_MAP(c) \ - PROXY_MAP_BOILERPLATE(c) \ - WORKER_PROXY_MAP_BOILERPLATE(c) \ - REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ - public: \ - static rtc::scoped_refptr Create( \ - rtc::Thread* signaling_thread, rtc::Thread* worker_thread, \ - INTERNAL_CLASS* c) { \ - return new rtc::RefCountedObject(signaling_thread, \ - worker_thread, c); \ + rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ + INTERNAL_CLASS* c) { \ + return rtc::make_ref_counted(primary_thread, \ + secondary_thread, c); \ } #define BEGIN_OWNED_PROXY_MAP(c) \ PROXY_MAP_BOILERPLATE(c) \ - WORKER_PROXY_MAP_BOILERPLATE(c) \ + SECONDARY_PROXY_MAP_BOILERPLATE(c) \ OWNED_PROXY_MAP_BOILERPLATE(c) \ public: \ static std::unique_ptr Create( \ - rtc::Thread* signaling_thread, rtc::Thread* worker_thread, \ + rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ std::unique_ptr c) { \ return std::unique_ptr(new c##ProxyWithInternal( \ - signaling_thread, worker_thread, c.release())); \ + primary_thread, secondary_thread, c.release())); \ } -#define PROXY_SIGNALING_THREAD_DESTRUCTOR() \ +#define PROXY_PRIMARY_THREAD_DESTRUCTOR() \ + private: \ + rtc::Thread* destructor_thread() const { return primary_thread_; } \ + \ + public: // NOLINTNEXTLINE + +#define PROXY_SECONDARY_THREAD_DESTRUCTOR() \ private: \ - rtc::Thread* destructor_thread() const { return signaling_thread_; } \ + rtc::Thread* destructor_thread() const { return secondary_thread_; } \ \ public: // NOLINTNEXTLINE -#define PROXY_WORKER_THREAD_DESTRUCTOR() \ - private: \ - rtc::Thread* destructor_thread() const { return worker_thread_; } \ - \ - public: // NOLINTNEXTLINE - -#define PROXY_METHOD0(r, method) \ - r method() override { \ - MethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ +#define PROXY_METHOD0(r, method) \ + r method() override { \ + MethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_CONSTMETHOD0(r, method) \ - r method() const override { \ - ConstMethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ +#define PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + ConstMethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_METHOD1(r, method, t1) \ r method(t1 a1) override { \ MethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_CONSTMETHOD1(r, method, t1) \ r method(t1 a1) const override { \ ConstMethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_METHOD2(r, method, t1, t2) \ r method(t1 a1, t2 a2) override { \ MethodCall call(c_, &C::method, std::move(a1), \ std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_METHOD3(r, method, t1, t2, t3) \ r method(t1 a1, t2 a2, t3 a3) override { \ MethodCall call(c_, &C::method, std::move(a1), \ std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ @@ -336,7 +342,7 @@ class ConstMethodCall : public QueuedTask { MethodCall call(c_, &C::method, std::move(a1), \ std::move(a2), std::move(a3), \ std::move(a4)); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ @@ -344,60 +350,60 @@ class ConstMethodCall : public QueuedTask { MethodCall call(c_, &C::method, std::move(a1), \ std::move(a2), std::move(a3), \ std::move(a4), std::move(a5)); \ - return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -// Define methods which should be invoked on the worker thread. -#define PROXY_WORKER_METHOD0(r, method) \ - r method() override { \ - MethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ +// Define methods which should be invoked on the secondary thread. +#define PROXY_SECONDARY_METHOD0(r, method) \ + r method() override { \ + MethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_CONSTMETHOD0(r, method) \ - r method() const override { \ - ConstMethodCall call(c_, &C::method); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ +#define PROXY_SECONDARY_CONSTMETHOD0(r, method) \ + r method() const override { \ + ConstMethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_METHOD1(r, method, t1) \ +#define PROXY_SECONDARY_METHOD1(r, method, t1) \ r method(t1 a1) override { \ MethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_CONSTMETHOD1(r, method, t1) \ +#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \ r method(t1 a1) const override { \ ConstMethodCall call(c_, &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_METHOD2(r, method, t1, t2) \ +#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \ r method(t1 a1, t2 a2) override { \ MethodCall call(c_, &C::method, std::move(a1), \ std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_CONSTMETHOD2(r, method, t1, t2) \ +#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \ r method(t1 a1, t2 a2) const override { \ ConstMethodCall call(c_, &C::method, std::move(a1), \ std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_METHOD3(r, method, t1, t2, t3) \ +#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \ r method(t1 a1, t2 a2, t3 a3) override { \ MethodCall call(c_, &C::method, std::move(a1), \ std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_WORKER_CONSTMETHOD3(r, method, t1, t2) \ +#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \ r method(t1 a1, t2 a2, t3 a3) const override { \ ConstMethodCall call(c_, &C::method, std::move(a1), \ std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } // For use when returning purely const state (set during construction). diff --git a/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h b/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h index a1761db85..931cb2076 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h @@ -10,8 +10,9 @@ #ifndef API_REF_COUNTED_BASE_H_ #define API_REF_COUNTED_BASE_H_ +#include + #include "rtc_base/constructor_magic.h" -#include "rtc_base/ref_count.h" #include "rtc_base/ref_counter.h" namespace rtc { @@ -30,6 +31,10 @@ class RefCountedBase { } protected: + // Provided for internal webrtc subclasses for corner cases where it's + // necessary to know whether or not a reference is exclusively held. + bool HasOneRef() const { return ref_count_.HasOneRef(); } + virtual ~RefCountedBase() = default; private: @@ -38,6 +43,55 @@ class RefCountedBase { RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase); }; +// Template based version of `RefCountedBase` for simple implementations that do +// not need (or want) destruction via virtual destructor or the overhead of a +// vtable. +// +// To use: +// struct MyInt : public rtc::RefCountedNonVirtual { +// int foo_ = 0; +// }; +// +// rtc::scoped_refptr my_int(new MyInt()); +// +// sizeof(MyInt) on a 32 bit system would then be 8, int + refcount and no +// vtable generated. +template +class RefCountedNonVirtual { + public: + RefCountedNonVirtual() = default; + + void AddRef() const { ref_count_.IncRef(); } + RefCountReleaseStatus Release() const { + // If you run into this assert, T has virtual methods. There are two + // options: + // 1) The class doesn't actually need virtual methods, the type is complete + // so the virtual attribute(s) can be removed. + // 2) The virtual methods are a part of the design of the class. In this + // case you can consider using `RefCountedBase` instead or alternatively + // use `rtc::RefCountedObject`. + static_assert(!std::is_polymorphic::value, + "T has virtual methods. RefCountedBase is a better fit."); + const auto status = ref_count_.DecRef(); + if (status == RefCountReleaseStatus::kDroppedLastRef) { + delete static_cast(this); + } + return status; + } + + protected: + // Provided for internal webrtc subclasses for corner cases where it's + // necessary to know whether or not a reference is exclusively held. + bool HasOneRef() const { return ref_count_.HasOneRef(); } + + ~RefCountedNonVirtual() = default; + + private: + mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; + + RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedNonVirtual); +}; + } // namespace rtc #endif // API_REF_COUNTED_BASE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h index d24737c25..7cfd89ab7 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h @@ -11,9 +11,9 @@ #ifndef API_RTC_ERROR_H_ #define API_RTC_ERROR_H_ -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include #include // For std::move. @@ -161,7 +161,7 @@ class RTC_EXPORT RTCError { RTC_EXPORT const char* ToString(RTCErrorType error); RTC_EXPORT const char* ToString(RTCErrorDetailType error); -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) RTCErrorType error) { @@ -173,7 +173,7 @@ inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) RTCErrorDetailType error) { return stream << ToString(error); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST // Helper macro that can be used by implementations to create an error with a // message and log it. |message| should be a string literal or movable diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h b/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h index b9a97c885..cf3d90949 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_headers.h @@ -144,13 +144,12 @@ struct RTPHeaderExtension { VideoPlayoutDelay playout_delay; // For identification of a stream when ssrc is not signaled. See - // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 - // TODO(danilchap): Update url from draft to release version. + // https://tools.ietf.org/html/rfc8852 std::string stream_id; std::string repaired_stream_id; // For identifying the media section used to interpret this RTP packet. See - // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-38 + // https://tools.ietf.org/html/rfc8843 std::string mid; absl::optional color_space; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc index a9ebd9df4..db818f765 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.cc @@ -16,7 +16,7 @@ namespace webrtc { RtpPacketInfo::RtpPacketInfo() - : ssrc_(0), rtp_timestamp_(0), receive_time_ms_(-1) {} + : ssrc_(0), rtp_timestamp_(0), receive_time_(Timestamp::MinusInfinity()) {} RtpPacketInfo::RtpPacketInfo( uint32_t ssrc, @@ -24,19 +24,19 @@ RtpPacketInfo::RtpPacketInfo( uint32_t rtp_timestamp, absl::optional audio_level, absl::optional absolute_capture_time, - int64_t receive_time_ms) + Timestamp receive_time) : ssrc_(ssrc), csrcs_(std::move(csrcs)), rtp_timestamp_(rtp_timestamp), audio_level_(audio_level), absolute_capture_time_(absolute_capture_time), - receive_time_ms_(receive_time_ms) {} + receive_time_(receive_time) {} RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, - int64_t receive_time_ms) + Timestamp receive_time) : ssrc_(rtp_header.ssrc), rtp_timestamp_(rtp_header.timestamp), - receive_time_ms_(receive_time_ms) { + receive_time_(receive_time) { const auto& extension = rtp_header.extension; const auto csrcs_count = std::min(rtp_header.numCSRCs, kRtpCsrcSize); @@ -49,12 +49,31 @@ RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, absolute_capture_time_ = extension.absolute_capture_time; } +RtpPacketInfo::RtpPacketInfo( + uint32_t ssrc, + std::vector csrcs, + uint32_t rtp_timestamp, + absl::optional audio_level, + absl::optional absolute_capture_time, + int64_t receive_time_ms) + : RtpPacketInfo(ssrc, + csrcs, + rtp_timestamp, + audio_level, + absolute_capture_time, + Timestamp::Millis(receive_time_ms)) {} +RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, + int64_t receive_time_ms) + : RtpPacketInfo(rtp_header, Timestamp::Millis(receive_time_ms)) {} + bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) { return (lhs.ssrc() == rhs.ssrc()) && (lhs.csrcs() == rhs.csrcs()) && (lhs.rtp_timestamp() == rhs.rtp_timestamp()) && (lhs.audio_level() == rhs.audio_level()) && (lhs.absolute_capture_time() == rhs.absolute_capture_time()) && - (lhs.receive_time_ms() == rhs.receive_time_ms()); + (lhs.receive_time() == rhs.receive_time() && + (lhs.local_capture_clock_offset() == + rhs.local_capture_clock_offset())); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h index 639ba3277..605620d63 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_info.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" +#include "api/units/timestamp.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -35,8 +36,18 @@ class RTC_EXPORT RtpPacketInfo { uint32_t rtp_timestamp, absl::optional audio_level, absl::optional absolute_capture_time, - int64_t receive_time_ms); + Timestamp receive_time); + RtpPacketInfo(const RTPHeader& rtp_header, Timestamp receive_time); + + // TODO(bugs.webrtc.org/12722): Deprecated, remove once downstream projects + // are updated. + RtpPacketInfo(uint32_t ssrc, + std::vector csrcs, + uint32_t rtp_timestamp, + absl::optional audio_level, + absl::optional absolute_capture_time, + int64_t receive_time_ms); RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms); RtpPacketInfo(const RtpPacketInfo& other) = default; @@ -64,8 +75,19 @@ class RTC_EXPORT RtpPacketInfo { absolute_capture_time_ = value; } - int64_t receive_time_ms() const { return receive_time_ms_; } - void set_receive_time_ms(int64_t value) { receive_time_ms_ = value; } + const absl::optional& local_capture_clock_offset() const { + return local_capture_clock_offset_; + } + + void set_local_capture_clock_offset(const absl::optional& value) { + local_capture_clock_offset_ = value; + } + + Timestamp receive_time() const { return receive_time_; } + void set_receive_time(Timestamp value) { receive_time_ = value; } + // TODO(bugs.webrtc.org/12722): Deprecated, remove once downstream projects + // are updated. + int64_t receive_time_ms() const { return receive_time_.ms(); } private: // Fields from the RTP header: @@ -80,10 +102,19 @@ class RTC_EXPORT RtpPacketInfo { // Fields from the Absolute Capture Time header extension: // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time + // To not be confused with |local_capture_clock_offset_|, the + // |estimated_capture_clock_offset| in |absolute_capture_time_| should + // represent the clock offset between a remote sender and the capturer, and + // thus equals to the corresponding values in the received RTP packets, + // subjected to possible interpolations. absl::optional absolute_capture_time_; + // Clock offset against capturer's clock. Should be derived from the estimated + // capture clock offset defined in the Absolute Capture Time header extension. + absl::optional local_capture_clock_offset_; + // Local |webrtc::Clock|-based timestamp of when the packet was received. - int64_t receive_time_ms_; + Timestamp receive_time_; }; bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs); diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc index 92f99e9bb..8a18f8983 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc @@ -130,6 +130,7 @@ constexpr char RtpExtension::kColorSpaceUri[]; constexpr char RtpExtension::kMidUri[]; constexpr char RtpExtension::kRidUri[]; constexpr char RtpExtension::kRepairedRidUri[]; +constexpr char RtpExtension::kVideoFrameTrackingIdUri[]; constexpr int RtpExtension::kMinId; constexpr int RtpExtension::kMaxId; @@ -164,7 +165,8 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { uri == webrtc::RtpExtension::kColorSpaceUri || uri == webrtc::RtpExtension::kRidUri || uri == webrtc::RtpExtension::kRepairedRidUri || - uri == webrtc::RtpExtension::kVideoLayersAllocationUri; + uri == webrtc::RtpExtension::kVideoLayersAllocationUri || + uri == webrtc::RtpExtension::kVideoFrameTrackingIdUri; } bool RtpExtension::IsEncryptionSupported(absl::string_view uri) { diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h index df0e7a93b..7fe9f2bc8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h @@ -353,6 +353,10 @@ struct RTC_EXPORT RtpExtension { static constexpr char kRepairedRidUri[] = "urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id"; + // Header extension to propagate webrtc::VideoFrame id field + static constexpr char kVideoFrameTrackingIdUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-frame-tracking-id"; + // Inclusive min and max IDs for two-byte header extensions and one-byte // header extensions, per RFC8285 Section 4.2-4.3. static constexpr int kMinId = 1; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h index 786ea3ace..d2645eda8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_receiver_interface.h @@ -26,7 +26,6 @@ #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" -#include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -101,11 +100,13 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { // before it is sent across the network. This will decrypt the entire frame // using the user provided decryption mechanism regardless of whether SRTP is // enabled or not. + // TODO(bugs.webrtc.org/12772): Remove. virtual void SetFrameDecryptor( rtc::scoped_refptr frame_decryptor); // Returns a pointer to the frame decryptor set previously by the // user. This can be used to update the state of the object. + // TODO(bugs.webrtc.org/12772): Remove. virtual rtc::scoped_refptr GetFrameDecryptor() const; // Sets a frame transformer between the depacketizer and the decoder to enable @@ -121,27 +122,31 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { // Define proxy for RtpReceiverInterface. // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. -BEGIN_SIGNALING_PROXY_MAP(RtpReceiver) -PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) +BEGIN_PROXY_MAP(RtpReceiver) +PROXY_PRIMARY_THREAD_DESTRUCTOR() +BYPASS_PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector>, streams) BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) BYPASS_PROXY_CONSTMETHOD0(std::string, id) -PROXY_CONSTMETHOD0(RtpParameters, GetParameters) +PROXY_SECONDARY_CONSTMETHOD0(RtpParameters, GetParameters) PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*) -PROXY_METHOD1(void, SetJitterBufferMinimumDelay, absl::optional) -PROXY_CONSTMETHOD0(std::vector, GetSources) -PROXY_METHOD1(void, - SetFrameDecryptor, - rtc::scoped_refptr) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, - GetFrameDecryptor) -PROXY_METHOD1(void, - SetDepacketizerToDecoderFrameTransformer, - rtc::scoped_refptr) +PROXY_SECONDARY_METHOD1(void, + SetJitterBufferMinimumDelay, + absl::optional) +PROXY_SECONDARY_CONSTMETHOD0(std::vector, GetSources) +// TODO(bugs.webrtc.org/12772): Remove. +PROXY_SECONDARY_METHOD1(void, + SetFrameDecryptor, + rtc::scoped_refptr) +// TODO(bugs.webrtc.org/12772): Remove. +PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr, + GetFrameDecryptor) +PROXY_SECONDARY_METHOD1(void, + SetDepacketizerToDecoderFrameTransformer, + rtc::scoped_refptr) END_PROXY_MAP() } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h index a33b80042..dd93792a0 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_sender_interface.h @@ -104,8 +104,8 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { // Define proxy for RtpSenderInterface. // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. -BEGIN_SIGNALING_PROXY_MAP(RtpSender) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PRIMARY_PROXY_MAP(RtpSender) +PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc index 1dc0fcc79..fd5085c33 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc @@ -64,6 +64,11 @@ webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions( return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); } +std::vector +RtpTransceiverInterface::HeaderExtensionsNegotiated() const { + return {}; +} + // TODO(bugs.webrtc.org/11839) Remove default implementations when clients // are updated. void RtpTransceiverInterface::SetDirection( diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h index fd3555fb4..4799c4b15 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h @@ -14,6 +14,7 @@ #include #include +#include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/media_types.h" @@ -111,8 +112,8 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction // TODO(hta): Deprecate SetDirection without error and rename // SetDirectionWithError to SetDirection, remove default implementations. - RTC_DEPRECATED virtual void SetDirection( - RtpTransceiverDirection new_direction); + ABSL_DEPRECATED("Use SetDirectionWithError instead") + virtual void SetDirection(RtpTransceiverDirection new_direction); virtual RTCError SetDirectionWithError(RtpTransceiverDirection new_direction); // The current_direction attribute indicates the current direction negotiated @@ -140,7 +141,7 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // This is an internal function, and is exposed for historical reasons. // https://w3c.github.io/webrtc-pc/#dfn-stop-the-rtcrtptransceiver virtual void StopInternal(); - RTC_DEPRECATED virtual void Stop(); + ABSL_DEPRECATED("Use StopStandard instead") virtual void Stop(); // The SetCodecPreferences method overrides the default codec preferences used // by WebRTC for this transceiver. @@ -156,6 +157,12 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { virtual std::vector HeaderExtensionsToOffer() const; + // Readonly attribute which is either empty if negotation has not yet + // happened, or a vector of the negotiated header extensions. + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface + virtual std::vector HeaderExtensionsNegotiated() + const; + // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation // so that it negotiates use of header extensions which are not kStopped. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface diff --git a/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h b/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h index fa4e83dba..4e3f0ebfc 100644 --- a/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h +++ b/TMessagesProj/jni/voip/webrtc/api/scoped_refptr.h @@ -104,6 +104,7 @@ class scoped_refptr { T* get() const { return ptr_; } operator T*() const { return ptr_; } + T& operator*() const { return *ptr_; } T* operator->() const { return ptr_; } // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a diff --git a/TMessagesProj/jni/voip/webrtc/api/sctp_transport_interface.h b/TMessagesProj/jni/voip/webrtc/api/sctp_transport_interface.h index 6af0bfce3..7080889fc 100644 --- a/TMessagesProj/jni/voip/webrtc/api/sctp_transport_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/sctp_transport_interface.h @@ -35,6 +35,8 @@ enum class SctpTransportState { // http://w3c.github.io/webrtc-pc/#rtcsctptransport-interface class RTC_EXPORT SctpTransportInformation { public: + SctpTransportInformation() = default; + SctpTransportInformation(const SctpTransportInformation&) = default; explicit SctpTransportInformation(SctpTransportState state); SctpTransportInformation( SctpTransportState state, diff --git a/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h b/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h new file mode 100644 index 000000000..5db7b9e4d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/sequence_checker.h @@ -0,0 +1,116 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_SEQUENCE_CHECKER_H_ +#define API_SEQUENCE_CHECKER_H_ + +#include "rtc_base/checks.h" +#include "rtc_base/synchronization/sequence_checker_internal.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// SequenceChecker is a helper class used to help verify that some methods +// of a class are called on the same task queue or thread. A +// SequenceChecker is bound to a a task queue if the object is +// created on a task queue, or a thread otherwise. +// +// +// Example: +// class MyClass { +// public: +// void Foo() { +// RTC_DCHECK_RUN_ON(&sequence_checker_); +// ... (do stuff) ... +// } +// +// private: +// SequenceChecker sequence_checker_; +// } +// +// In Release mode, IsCurrent will always return true. +class RTC_LOCKABLE SequenceChecker +#if RTC_DCHECK_IS_ON + : public webrtc_sequence_checker_internal::SequenceCheckerImpl { + using Impl = webrtc_sequence_checker_internal::SequenceCheckerImpl; +#else + : public webrtc_sequence_checker_internal::SequenceCheckerDoNothing { + using Impl = webrtc_sequence_checker_internal::SequenceCheckerDoNothing; +#endif + public: + // Returns true if sequence checker is attached to the current sequence. + bool IsCurrent() const { return Impl::IsCurrent(); } + // Detaches checker from sequence to which it is attached. Next attempt + // to do a check with this checker will result in attaching this checker + // to the sequence on which check was performed. + void Detach() { Impl::Detach(); } +}; + +} // namespace webrtc + +// RTC_RUN_ON/RTC_GUARDED_BY/RTC_DCHECK_RUN_ON macros allows to annotate +// variables are accessed from same thread/task queue. +// Using tools designed to check mutexes, it checks at compile time everywhere +// variable is access, there is a run-time dcheck thread/task queue is correct. +// +// class SequenceCheckerExample { +// public: +// int CalledFromPacer() RTC_RUN_ON(pacer_sequence_checker_) { +// return var2_; +// } +// +// void CallMeFromPacer() { +// RTC_DCHECK_RUN_ON(&pacer_sequence_checker_) +// << "Should be called from pacer"; +// CalledFromPacer(); +// } +// +// private: +// int pacer_var_ RTC_GUARDED_BY(pacer_sequence_checker_); +// SequenceChecker pacer_sequence_checker_; +// }; +// +// class TaskQueueExample { +// public: +// class Encoder { +// public: +// rtc::TaskQueueBase& Queue() { return encoder_queue_; } +// void Encode() { +// RTC_DCHECK_RUN_ON(&encoder_queue_); +// DoSomething(var_); +// } +// +// private: +// rtc::TaskQueueBase& encoder_queue_; +// Frame var_ RTC_GUARDED_BY(encoder_queue_); +// }; +// +// void Encode() { +// // Will fail at runtime when DCHECK is enabled: +// // encoder_->Encode(); +// // Will work: +// rtc::scoped_refptr encoder = encoder_; +// encoder_->Queue().PostTask([encoder] { encoder->Encode(); }); +// } +// +// private: +// rtc::scoped_refptr encoder_; +// } + +// Document if a function expected to be called from same thread/task queue. +#define RTC_RUN_ON(x) \ + RTC_THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(x)) + +#define RTC_DCHECK_RUN_ON(x) \ + webrtc::webrtc_sequence_checker_internal::SequenceCheckerScope \ + seq_check_scope(x); \ + RTC_DCHECK((x)->IsCurrent()) \ + << webrtc::webrtc_sequence_checker_internal::ExpectationToString(x) + +#endif // API_SEQUENCE_CHECKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_collector_callback.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_collector_callback.h index c3e08245e..506cc63e6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_collector_callback.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_collector_callback.h @@ -17,7 +17,7 @@ namespace webrtc { -class RTCStatsCollectorCallback : public virtual rtc::RefCountInterface { +class RTCStatsCollectorCallback : public rtc::RefCountInterface { public: ~RTCStatsCollectorCallback() override = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h index 94bd813b0..0fe5ce91f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtc_stats_report.h @@ -19,9 +19,11 @@ #include #include +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/stats/rtc_stats.h" -#include "rtc_base/ref_count.h" +// TODO(tommi): Remove this include after fixing iwyu issue in chromium. +// See: third_party/blink/renderer/platform/peerconnection/rtc_stats.cc #include "rtc_base/ref_counted_object.h" #include "rtc_base/system/rtc_export.h" @@ -29,7 +31,8 @@ namespace webrtc { // A collection of stats. // This is accessible as a map from |RTCStats::id| to |RTCStats|. -class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface { +class RTC_EXPORT RTCStatsReport final + : public rtc::RefCountedNonVirtual { public: typedef std::map> StatsMap; @@ -107,11 +110,11 @@ class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface { // listing all of its stats objects. std::string ToJson() const; - friend class rtc::RefCountedObject; + protected: + friend class rtc::RefCountedNonVirtual; + ~RTCStatsReport() = default; private: - ~RTCStatsReport() override; - int64_t timestamp_us_; StatsMap stats_; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h index ee3d70727..60ff8c29f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h @@ -161,6 +161,7 @@ class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { // TODO(hbos): Support enum types? // "RTCStatsMember"? RTCStatsMember state; + // Obsolete: priority RTCStatsMember priority; RTCStatsMember nominated; // TODO(hbos): Collect this the way the spec describes it. We have a value for @@ -208,9 +209,11 @@ class RTC_EXPORT RTCIceCandidateStats : public RTCStats { ~RTCIceCandidateStats() override; RTCStatsMember transport_id; + // Obsolete: is_remote RTCStatsMember is_remote; RTCStatsMember network_type; RTCStatsMember ip; + RTCStatsMember address; RTCStatsMember port; RTCStatsMember protocol; RTCStatsMember relay_protocol; @@ -219,9 +222,6 @@ class RTC_EXPORT RTCIceCandidateStats : public RTCStats { RTCStatsMember priority; // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/632723 RTCStatsMember url; - // TODO(hbos): |deleted = true| case is not supported by |RTCStatsCollector|. - // crbug.com/632723 - RTCStatsMember deleted; // = false protected: RTCIceCandidateStats(const std::string& id, @@ -374,34 +374,64 @@ class RTC_EXPORT RTCRTPStreamStats : public RTCStats { ~RTCRTPStreamStats() override; RTCStatsMember ssrc; - // TODO(hbos): Remote case not supported by |RTCStatsCollector|. - // crbug.com/657855, 657856 - RTCStatsMember is_remote; // = false - RTCStatsMember media_type; // renamed to kind. RTCStatsMember kind; + // Obsolete: track_id RTCStatsMember track_id; RTCStatsMember transport_id; RTCStatsMember codec_id; - // FIR and PLI counts are only defined for |media_type == "video"|. - RTCStatsMember fir_count; - RTCStatsMember pli_count; - // TODO(hbos): NACK count should be collected by |RTCStatsCollector| for both - // audio and video but is only defined in the "video" case. crbug.com/657856 - RTCStatsMember nack_count; - // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/657854 - // SLI count is only defined for |media_type == "video"|. - RTCStatsMember sli_count; - RTCStatsMember qp_sum; + + // Obsolete + RTCStatsMember media_type; // renamed to kind. protected: RTCRTPStreamStats(const std::string& id, int64_t timestamp_us); RTCRTPStreamStats(std::string&& id, int64_t timestamp_us); }; +// https://www.w3.org/TR/webrtc-stats/#receivedrtpstats-dict* +class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRTPStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCReceivedRtpStreamStats(const RTCReceivedRtpStreamStats& other); + ~RTCReceivedRtpStreamStats() override; + + // TODO(hbos) The following fields need to be added and migrated + // both from RTCInboundRtpStreamStats and RTCRemoteInboundRtpStreamStats: + // packetsReceived, packetsDiscarded, packetsRepaired, burstPacketsLost, + // burstPacketDiscarded, burstLossCount, burstDiscardCount, burstLossRate, + // burstDiscardRate, gapLossRate, gapDiscardRate, framesDropped, + // partialFramesLost, fullFramesLost + // crbug.com/webrtc/12532 + RTCStatsMember jitter; + RTCStatsMember packets_lost; // Signed per RFC 3550 + + protected: + RTCReceivedRtpStreamStats(const std::string&& id, int64_t timestamp_us); + RTCReceivedRtpStreamStats(std::string&& id, int64_t timestamp_us); +}; + +// https://www.w3.org/TR/webrtc-stats/#sentrtpstats-dict* +class RTC_EXPORT RTCSentRtpStreamStats : public RTCRTPStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCSentRtpStreamStats(const RTCSentRtpStreamStats& other); + ~RTCSentRtpStreamStats() override; + + RTCStatsMember packets_sent; + RTCStatsMember bytes_sent; + + protected: + RTCSentRtpStreamStats(const std::string&& id, int64_t timestamp_us); + RTCSentRtpStreamStats(std::string&& id, int64_t timestamp_us); +}; + // https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* // TODO(hbos): Support the remote case |is_remote = true|. // https://bugs.webrtc.org/7065 -class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { +class RTC_EXPORT RTCInboundRTPStreamStats final + : public RTCReceivedRtpStreamStats { public: WEBRTC_RTCSTATS_DECL(); @@ -410,16 +440,13 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { RTCInboundRTPStreamStats(const RTCInboundRTPStreamStats& other); ~RTCInboundRTPStreamStats() override; + RTCStatsMember remote_id; RTCStatsMember packets_received; RTCStatsMember fec_packets_received; RTCStatsMember fec_packets_discarded; RTCStatsMember bytes_received; RTCStatsMember header_bytes_received; - RTCStatsMember packets_lost; // Signed per RFC 3550 RTCStatsMember last_packet_received_timestamp; - // TODO(hbos): Collect and populate this value for both "audio" and "video", - // currently not collected for "video". https://bugs.webrtc.org/7065 - RTCStatsMember jitter; RTCStatsMember jitter_buffer_delay; RTCStatsMember jitter_buffer_emitted_count; RTCStatsMember total_samples_received; @@ -471,6 +498,13 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { // TODO(hbos): This is only implemented for video; implement it for audio as // well. RTCStatsMember decoder_implementation; + // FIR and PLI counts are only defined for |media_type == "video"|. + RTCStatsMember fir_count; + RTCStatsMember pli_count; + // TODO(hbos): NACK count should be collected by |RTCStatsCollector| for both + // audio and video but is only defined in the "video" case. crbug.com/657856 + RTCStatsMember nack_count; + RTCStatsMember qp_sum; }; // https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict* @@ -519,18 +553,18 @@ class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { // TODO(hbos): This is only implemented for video; implement it for audio as // well. RTCStatsMember encoder_implementation; + // FIR and PLI counts are only defined for |media_type == "video"|. + RTCStatsMember fir_count; + RTCStatsMember pli_count; + // TODO(hbos): NACK count should be collected by |RTCStatsCollector| for both + // audio and video but is only defined in the "video" case. crbug.com/657856 + RTCStatsMember nack_count; + RTCStatsMember qp_sum; }; -// TODO(https://crbug.com/webrtc/10671): Refactor the stats dictionaries to have -// the same hierarchy as in the spec; implement RTCReceivedRtpStreamStats. -// Several metrics are shared between "outbound-rtp", "remote-inbound-rtp", -// "inbound-rtp" and "remote-outbound-rtp". In the spec there is a hierarchy of -// dictionaries that minimizes defining the same metrics in multiple places. -// From JavaScript this hierarchy is not observable and the spec's hierarchy is -// purely editorial. In C++ non-final classes in the hierarchy could be used to -// refer to different stats objects within the hierarchy. // https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* -class RTC_EXPORT RTCRemoteInboundRtpStreamStats final : public RTCStats { +class RTC_EXPORT RTCRemoteInboundRtpStreamStats final + : public RTCReceivedRtpStreamStats { public: WEBRTC_RTCSTATS_DECL(); @@ -539,17 +573,6 @@ class RTC_EXPORT RTCRemoteInboundRtpStreamStats final : public RTCStats { RTCRemoteInboundRtpStreamStats(const RTCRemoteInboundRtpStreamStats& other); ~RTCRemoteInboundRtpStreamStats() override; - // In the spec RTCRemoteInboundRtpStreamStats inherits from RTCRtpStreamStats - // and RTCReceivedRtpStreamStats. The members here are listed based on where - // they are defined in the spec. - // RTCRtpStreamStats - RTCStatsMember ssrc; - RTCStatsMember kind; - RTCStatsMember transport_id; - RTCStatsMember codec_id; - // RTCReceivedRtpStreamStats - RTCStatsMember packets_lost; - RTCStatsMember jitter; // TODO(hbos): The following RTCReceivedRtpStreamStats metrics should also be // implemented: packetsReceived, packetsDiscarded, packetsRepaired, // burstPacketsLost, burstPacketsDiscarded, burstLossCount, burstDiscardCount, @@ -557,8 +580,25 @@ class RTC_EXPORT RTCRemoteInboundRtpStreamStats final : public RTCStats { // RTCRemoteInboundRtpStreamStats RTCStatsMember local_id; RTCStatsMember round_trip_time; - // TODO(hbos): The following RTCRemoteInboundRtpStreamStats metric should also - // be implemented: fractionLost. + RTCStatsMember fraction_lost; + RTCStatsMember total_round_trip_time; + RTCStatsMember round_trip_time_measurements; +}; + +// https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* +class RTC_EXPORT RTCRemoteOutboundRtpStreamStats final + : public RTCSentRtpStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCRemoteOutboundRtpStreamStats(const std::string& id, int64_t timestamp_us); + RTCRemoteOutboundRtpStreamStats(std::string&& id, int64_t timestamp_us); + RTCRemoteOutboundRtpStreamStats(const RTCRemoteOutboundRtpStreamStats& other); + ~RTCRemoteOutboundRtpStreamStats() override; + + RTCStatsMember local_id; + RTCStatsMember remote_timestamp; + RTCStatsMember reports_sent; }; // https://w3c.github.io/webrtc-stats/#dom-rtcmediasourcestats @@ -604,7 +644,6 @@ class RTC_EXPORT RTCVideoSourceStats final : public RTCMediaSourceStats { RTCStatsMember width; RTCStatsMember height; - // TODO(hbos): Implement this metric. RTCStatsMember frames; RTCStatsMember frames_per_second; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/stats_types.h b/TMessagesProj/jni/voip/webrtc/api/stats_types.h index c1922a8a2..f910b4a16 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats_types.h @@ -21,11 +21,11 @@ #include #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/string_encode.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -344,7 +344,7 @@ class RTC_EXPORT StatsReport { const StatsValueName name; private: - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; mutable int ref_count_ RTC_GUARDED_BY(thread_checker_) = 0; const Type type_; @@ -447,7 +447,7 @@ class StatsCollection { private: Container list_; - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h index 90b1efd31..88419edd8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h @@ -27,12 +27,14 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { // Starts destruction of the task queue. // On return ensures no task are running and no new tasks are able to start // on the task queue. - // Responsible for deallocation. Deallocation may happen syncrhoniously during + // Responsible for deallocation. Deallocation may happen synchronously during // Delete or asynchronously after Delete returns. // Code not running on the TaskQueue should not make any assumption when // TaskQueue is deallocated and thus should not call any methods after Delete. // Code running on the TaskQueue should not call Delete, but can assume // TaskQueue still exists and may call other methods, e.g. PostTask. + // Should be called on the same task queue or thread that this task queue + // was created on. virtual void Delete() = 0; // Schedules a task to execute. Tasks are executed in FIFO order. @@ -43,17 +45,20 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { // TaskQueue or it may happen asynchronously after TaskQueue is deleted. // This may vary from one implementation to the next so assumptions about // lifetimes of pending tasks should not be made. + // May be called on any thread or task queue, including this task queue. virtual void PostTask(std::unique_ptr task) = 0; // Schedules a task to execute a specified number of milliseconds from when // the call is made. The precision should be considered as "best effort" // and in some cases, such as on Windows when all high precision timers have // been used up, can be off by as much as 15 millseconds. + // May be called on any thread or task queue, including this task queue. virtual void PostDelayedTask(std::unique_ptr task, uint32_t milliseconds) = 0; // Returns the task queue that is running the current thread. // Returns nullptr if this thread is not associated with any task queue. + // May be called on any thread or task queue, including this task queue. static TaskQueueBase* Current(); bool IsCurrent() const { return Current() == this; } diff --git a/TMessagesProj/jni/voip/webrtc/api/test/OWNERS b/TMessagesProj/jni/voip/webrtc/api/test/OWNERS index 770c29956..a7392abe3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/api/test/OWNERS @@ -1,2 +1,5 @@ +mbonadei@webrtc.org +sprang@webrtc.org srte@webrtc.org +titovartem@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc b/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc index 6f0674299..5ecdcc1eb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/compile_all_headers.cc @@ -30,6 +30,7 @@ #include "api/test/dummy_peer_connection.h" #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" +#include "api/test/mock_async_dns_resolver.h" #include "api/test/mock_audio_mixer.h" #include "api/test/mock_data_channel.h" #include "api/test/mock_frame_decryptor.h" diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h new file mode 100644 index 000000000..e863cac6e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h @@ -0,0 +1,54 @@ +/* + * Copyright 2021 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_ASYNC_DNS_RESOLVER_H_ +#define API_TEST_MOCK_ASYNC_DNS_RESOLVER_H_ + +#include +#include + +#include "api/async_dns_resolver.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockAsyncDnsResolverResult : public AsyncDnsResolverResult { + public: + MOCK_METHOD(bool, + GetResolvedAddress, + (int, rtc::SocketAddress*), + (const override)); + MOCK_METHOD(int, GetError, (), (const override)); +}; + +class MockAsyncDnsResolver : public AsyncDnsResolverInterface { + public: + MOCK_METHOD(void, + Start, + (const rtc::SocketAddress&, std::function), + (override)); + MOCK_METHOD(AsyncDnsResolverResult&, result, (), (const override)); +}; + +class MockAsyncDnsResolverFactory : public AsyncDnsResolverFactoryInterface { + public: + MOCK_METHOD(std::unique_ptr, + CreateAndResolve, + (const rtc::SocketAddress&, std::function), + (override)); + MOCK_METHOD(std::unique_ptr, + Create, + (), + (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_ASYNC_DNS_RESOLVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h index 7319cebbc..c2f2435fb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h @@ -32,6 +32,11 @@ class MockPeerConnectionFactoryInterface final (const PeerConnectionInterface::RTCConfiguration&, PeerConnectionDependencies), (override)); + MOCK_METHOD(RTCErrorOr>, + CreatePeerConnectionOrError, + (const PeerConnectionInterface::RTCConfiguration&, + PeerConnectionDependencies), + (override)); MOCK_METHOD(rtc::scoped_refptr, CreatePeerConnection, (const PeerConnectionInterface::RTCConfiguration&, diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/create_cross_traffic.cc b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/create_cross_traffic.cc new file mode 100644 index 000000000..36a535cec --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/create_cross_traffic.cc @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/network_emulation/create_cross_traffic.h" + +#include + +#include "rtc_base/task_utils/repeating_task.h" +#include "test/network/cross_traffic.h" + +namespace webrtc { + +std::unique_ptr CreateRandomWalkCrossTraffic( + CrossTrafficRoute* traffic_route, + RandomWalkConfig config) { + return std::make_unique(config, traffic_route); +} + +std::unique_ptr CreatePulsedPeaksCrossTraffic( + CrossTrafficRoute* traffic_route, + PulsedPeaksConfig config) { + return std::make_unique(config, traffic_route); +} + +std::unique_ptr CreateFakeTcpCrossTraffic( + EmulatedRoute* send_route, + EmulatedRoute* ret_route, + FakeTcpConfig config) { + return std::make_unique(config, send_route, + ret_route); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/create_cross_traffic.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/create_cross_traffic.h new file mode 100644 index 000000000..42fc85539 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/create_cross_traffic.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_NETWORK_EMULATION_CREATE_CROSS_TRAFFIC_H_ +#define API_TEST_NETWORK_EMULATION_CREATE_CROSS_TRAFFIC_H_ + +#include + +#include "api/test/network_emulation/cross_traffic.h" +#include "api/test/network_emulation_manager.h" + +namespace webrtc { + +// This API is still in development and can be changed without prior notice. + +std::unique_ptr CreateRandomWalkCrossTraffic( + CrossTrafficRoute* traffic_route, + RandomWalkConfig config); + +std::unique_ptr CreatePulsedPeaksCrossTraffic( + CrossTrafficRoute* traffic_route, + PulsedPeaksConfig config); + +std::unique_ptr CreateFakeTcpCrossTraffic( + EmulatedRoute* send_route, + EmulatedRoute* ret_route, + FakeTcpConfig config); + +} // namespace webrtc + +#endif // API_TEST_NETWORK_EMULATION_CREATE_CROSS_TRAFFIC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/cross_traffic.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/cross_traffic.h new file mode 100644 index 000000000..85343e44d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/cross_traffic.h @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_NETWORK_EMULATION_CROSS_TRAFFIC_H_ +#define API_TEST_NETWORK_EMULATION_CROSS_TRAFFIC_H_ + +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { + +// This API is still in development and can be changed without prior notice. + +// Represents the endpoint for cross traffic that is going through the network. +// It can be used to emulate unexpected network load. +class CrossTrafficRoute { + public: + virtual ~CrossTrafficRoute() = default; + + // Triggers sending of dummy packets with size |packet_size| bytes. + virtual void TriggerPacketBurst(size_t num_packets, size_t packet_size) = 0; + // Sends a packet over the nodes. The content of the packet is unspecified; + // only the size metter for the emulation purposes. + virtual void SendPacket(size_t packet_size) = 0; + // Sends a packet over the nodes and runs |action| when it has been delivered. + virtual void NetworkDelayedAction(size_t packet_size, + std::function action) = 0; +}; + +// Describes a way of generating cross traffic on some route. Used by +// NetworkEmulationManager to produce cross traffic during some period of time. +class CrossTrafficGenerator { + public: + virtual ~CrossTrafficGenerator() = default; + + // Time between Process calls. + virtual TimeDelta GetProcessInterval() const = 0; + + // Called periodically by NetworkEmulationManager. Generates traffic on the + // route. + virtual void Process(Timestamp at_time) = 0; +}; + +// Config of a cross traffic generator. Generated traffic rises and falls +// randomly. +struct RandomWalkConfig { + int random_seed = 1; + DataRate peak_rate = DataRate::KilobitsPerSec(100); + DataSize min_packet_size = DataSize::Bytes(200); + TimeDelta min_packet_interval = TimeDelta::Millis(1); + TimeDelta update_interval = TimeDelta::Millis(200); + double variance = 0.6; + double bias = -0.1; +}; + +// Config of a cross traffic generator. Generated traffic has form of periodic +// peaks alternating with periods of silence. +struct PulsedPeaksConfig { + DataRate peak_rate = DataRate::KilobitsPerSec(100); + DataSize min_packet_size = DataSize::Bytes(200); + TimeDelta min_packet_interval = TimeDelta::Millis(1); + TimeDelta send_duration = TimeDelta::Millis(100); + TimeDelta hold_duration = TimeDelta::Millis(2000); +}; + +struct FakeTcpConfig { + DataSize packet_size = DataSize::Bytes(1200); + DataSize send_limit = DataSize::PlusInfinity(); + TimeDelta process_interval = TimeDelta::Millis(200); + TimeDelta packet_timeout = TimeDelta::Seconds(1); +}; + +} // namespace webrtc + +#endif // API_TEST_NETWORK_EMULATION_CROSS_TRAFFIC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h index 36fb99654..c8e6ed053 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation/network_emulation_interfaces.h @@ -222,10 +222,23 @@ class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { // |desired_port| != 0 and is free or will be the one, selected by endpoint) // or absl::nullopt if desired_port in used. Also fails if there are no more // free ports to bind to. + // + // The Bind- and Unbind-methods must not be called from within a bound + // receiver's OnPacketReceived method. virtual absl::optional BindReceiver( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver) = 0; + // Unbinds receiver from the specified port. Do nothing if no receiver was + // bound before. After this method returns, no more packets can be delivered + // to the receiver, and it is safe to destroy it. virtual void UnbindReceiver(uint16_t port) = 0; + // Binds receiver that will accept all packets which arrived on any port + // for which there are no bound receiver. + virtual void BindDefaultReceiver( + EmulatedNetworkReceiverInterface* receiver) = 0; + // Unbinds default receiver. Do nothing if no default receiver was bound + // before. + virtual void UnbindDefaultReceiver() = 0; virtual rtc::IPAddress GetPeerLocalAddress() const = 0; private: diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc index 602c90aac..9c148a069 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.cc @@ -56,18 +56,20 @@ NetworkEmulationManager::SimulatedNetworkNode::Builder::packet_queue_length( } NetworkEmulationManager::SimulatedNetworkNode -NetworkEmulationManager::SimulatedNetworkNode::Builder::Build() const { +NetworkEmulationManager::SimulatedNetworkNode::Builder::Build( + uint64_t random_seed) const { RTC_CHECK(net_); - return Build(net_); + return Build(net_, random_seed); } NetworkEmulationManager::SimulatedNetworkNode NetworkEmulationManager::SimulatedNetworkNode::Builder::Build( - NetworkEmulationManager* net) const { + NetworkEmulationManager* net, + uint64_t random_seed) const { RTC_CHECK(net); RTC_CHECK(net_ == nullptr || net_ == net); SimulatedNetworkNode res; - auto behavior = std::make_unique(config_); + auto behavior = std::make_unique(config_, random_seed); res.simulation = behavior.get(); res.node = net->CreateEmulatedNode(std::move(behavior)); return res; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h index 8619f3630..ec51b290e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/network_emulation_manager.h @@ -17,6 +17,7 @@ #include #include "api/array_view.h" +#include "api/test/network_emulation/cross_traffic.h" #include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" @@ -55,6 +56,8 @@ struct EmulatedEndpointConfig { kDebug }; + // If specified will be used to name endpoint for logging purposes. + absl::optional name = absl::nullopt; IpAddressFamily generated_ip_family = IpAddressFamily::kIpv4; // If specified will be used as IP address for endpoint node. Must be unique // among all created nodes. @@ -65,6 +68,14 @@ struct EmulatedEndpointConfig { // Network type which will be used to represent endpoint to WebRTC. rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; StatsGatheringMode stats_gathering_mode = StatsGatheringMode::kDefault; + // Allow endpoint to send packets specifying source IP address different to + // the current endpoint IP address. If false endpoint will crash if attempt + // to send such packet will be done. + bool allow_send_packet_with_different_source_ip = false; + // Allow endpoint to receive packet with destination IP address different to + // the current endpoint IP address. If false endpoint will crash if such + // packet will arrive. + bool allow_receive_packets_with_different_dest_ip = false; }; struct EmulatedTURNServerConfig { @@ -152,8 +163,9 @@ class NetworkEmulationManager { Builder& capacity_Mbps(int link_capacity_Mbps); Builder& loss(double loss_rate); Builder& packet_queue_length(int max_queue_length_in_packets); - SimulatedNetworkNode Build() const; - SimulatedNetworkNode Build(NetworkEmulationManager* net) const; + SimulatedNetworkNode Build(uint64_t random_seed = 1) const; + SimulatedNetworkNode Build(NetworkEmulationManager* net, + uint64_t random_seed = 1) const; private: NetworkEmulationManager* const net_; @@ -163,11 +175,19 @@ class NetworkEmulationManager { virtual ~NetworkEmulationManager() = default; virtual TimeController* time_controller() = 0; + // Returns a mode in which underlying time controller operates. + virtual TimeMode time_mode() const = 0; // Creates an emulated network node, which represents single network in - // the emulated network layer. + // the emulated network layer. Uses default implementation on network behavior + // which can be configured with |config|. |random_seed| can be provided to + // alter randomization behavior. virtual EmulatedNetworkNode* CreateEmulatedNode( - BuiltInNetworkBehaviorConfig config) = 0; + BuiltInNetworkBehaviorConfig config, + uint64_t random_seed = 1) = 0; + // Creates an emulated network node, which represents single network in + // the emulated network layer. |network_behavior| determines how created node + // will forward incoming packets to the next receiver. virtual EmulatedNetworkNode* CreateEmulatedNode( std::unique_ptr network_behavior) = 0; @@ -214,9 +234,39 @@ class NetworkEmulationManager { virtual EmulatedRoute* CreateRoute( const std::vector& via_nodes) = 0; + // Creates a default route between endpoints going through specified network + // nodes. Default route is used for packet when there is no known route for + // packet's destination IP. + // + // This route is single direction only and describe how traffic that was + // sent by network interface |from| have to be delivered in case if routing + // was unspecified. Return object can be used to remove created route. The + // route must contains at least one network node inside it. + // + // Assume that E{0-9} are endpoints and N{0-9} are network nodes, then + // creation of the route have to follow these rules: + // 1. A route consists of a source endpoint, an ordered list of one or + // more network nodes, and a destination endpoint. + // 2. If (E1, ..., E2) is a route, then E1 != E2. + // In other words, the source and the destination may not be the same. + // 3. Given two simultaneously existing routes (E1, ..., E2) and + // (E3, ..., E4), either E1 != E3 or E2 != E4. + // In other words, there may be at most one route from any given source + // endpoint to any given destination endpoint. + // 4. Given two simultaneously existing routes (E1, ..., N1, ..., E2) + // and (E3, ..., N2, ..., E4), either N1 != N2 or E2 != E4. + // In other words, a network node may not belong to two routes that lead + // to the same destination endpoint. + // 5. Any node N can belong to only one default route. + virtual EmulatedRoute* CreateDefaultRoute( + EmulatedEndpoint* from, + const std::vector& via_nodes, + EmulatedEndpoint* to) = 0; + // Removes route previously created by CreateRoute(...). // Caller mustn't call this function with route, that have been already - // removed earlier. + // removed earlier. Removing a route that is currently in use will lead to + // packets being dropped. virtual void ClearRoute(EmulatedRoute* route) = 0; // Creates a simulated TCP connection using |send_route| for traffic and @@ -226,6 +276,20 @@ class NetworkEmulationManager { virtual TcpMessageRoute* CreateTcpRoute(EmulatedRoute* send_route, EmulatedRoute* ret_route) = 0; + // Creates a route over the given |via_nodes|. Returns an object that can be + // used to emulate network load with cross traffic over the created route. + virtual CrossTrafficRoute* CreateCrossTrafficRoute( + const std::vector& via_nodes) = 0; + + // Starts generating cross traffic using given |generator|. Takes ownership + // over the generator. + virtual CrossTrafficGenerator* StartCrossTraffic( + std::unique_ptr generator) = 0; + + // Stops generating cross traffic that was started using given |generator|. + // The |generator| shouldn't be used after and the reference may be invalid. + virtual void StopCrossTraffic(CrossTrafficGenerator* generator) = 0; + // Creates EmulatedNetworkManagerInterface which can be used then to inject // network emulation layer into PeerConnection. |endpoints| - are available // network interfaces for PeerConnection. If endpoint is enabled, it will be @@ -239,7 +303,7 @@ class NetworkEmulationManager { // |stats_callback|. Callback will be executed on network emulation // internal task queue. virtual void GetStats( - rtc::ArrayView endpoints, + rtc::ArrayView endpoints, std::function)> stats_callback) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h index f37047895..8717e8f73 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h @@ -220,11 +220,19 @@ class PeerConnectionE2EQualityTestFixture { // was captured during the test for this video stream on sender side. // It is useful when generator is used as input. absl::optional input_dump_file_name; + // Used only if |input_dump_file_name| is set. Specifies the module for the + // video frames to be dumped. Modulo equals X means every Xth frame will be + // written to the dump file. The value must be greater than 0. + int input_dump_sampling_modulo = 1; // If specified this file will be used as output on the receiver side for // this stream. If multiple streams will be produced by input stream, // output files will be appended with indexes. The produced files contains // what was rendered for this video stream on receiver side. absl::optional output_dump_file_name; + // Used only if |output_dump_file_name| is set. Specifies the module for the + // video frames to be dumped. Modulo equals X means every Xth frame will be + // written to the dump file. The value must be greater than 0. + int output_dump_sampling_modulo = 1; // If true will display input and output video on the user's screen. bool show_on_screen = false; // If specified, determines a sync group to which this video stream belongs. diff --git a/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h b/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h index 3fba61f74..fcac51f4e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/simulated_network.h @@ -46,8 +46,7 @@ struct PacketDeliveryInfo { // for built-in network behavior that will be used by WebRTC if no custom // NetworkBehaviorInterface is provided. struct BuiltInNetworkBehaviorConfig { - BuiltInNetworkBehaviorConfig() {} - // Queue length in number of packets. + // Queue length in number of packets. size_t queue_length_packets = 0; // Delay in addition to capacity induced delay. int queue_delay_ms = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h index c5370a708..4488e5abf 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/video_quality_analyzer_interface.h @@ -138,6 +138,9 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface { absl::string_view pc_label, const rtc::scoped_refptr& report) override {} + // Will be called before test adds new participant in the middle of a call. + virtual void RegisterParticipantInCall(absl::string_view peer_name) {} + // Tells analyzer that analysis complete and it should calculate final // statistics. virtual void Stop() {} diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h index 395c5cb80..e0f804fe4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_fixture.h @@ -59,7 +59,7 @@ class VideoCodecTestFixture { class EncodedFrameChecker { public: virtual ~EncodedFrameChecker() = default; - virtual void CheckEncodedFrame(webrtc::VideoCodecType codec, + virtual void CheckEncodedFrame(VideoCodecType codec, const EncodedImage& encoded_frame) const = 0; }; @@ -88,6 +88,17 @@ class VideoCodecTestFixture { // Plain name of YUV file to process without file extension. std::string filename; + // Dimensions of test clip. Falls back to (codec_settings.width/height) if + // not set. + absl::optional clip_width; + absl::optional clip_height; + // Framerate of input clip. Defaults to 30fps if not set. + absl::optional clip_fps; + + // The resolution at which psnr/ssim comparisons should be made. Frames + // will be scaled to this size if different. + absl::optional reference_width; + absl::optional reference_height; // File to process. This must be a video file in the YUV format. std::string filepath; @@ -112,16 +123,16 @@ class VideoCodecTestFixture { bool encode_in_real_time = false; // Codec settings to use. - webrtc::VideoCodec codec_settings; + VideoCodec codec_settings; // Name of the codec being tested. std::string codec_name; // H.264 specific settings. struct H264CodecSettings { - H264::Profile profile = H264::kProfileConstrainedBaseline; + H264Profile profile = H264Profile::kProfileConstrainedBaseline; H264PacketizationMode packetization_mode = - webrtc::H264PacketizationMode::NonInterleaved; + H264PacketizationMode::NonInterleaved; } h264_codec_settings; // Custom checker that will be called for each frame. diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/data_channel_transport_interface.h b/TMessagesProj/jni/voip/webrtc/api/transport/data_channel_transport_interface.h index 7b8c653c3..550fabaac 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/data_channel_transport_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/data_channel_transport_interface.h @@ -47,15 +47,15 @@ struct SendDataParams { // If set, the maximum number of times this message may be // retransmitted by the transport before it is dropped. // Setting this value to zero disables retransmission. - // Must be non-negative. |max_rtx_count| and |max_rtx_ms| may not be set - // simultaneously. + // Valid values are in the range [0-UINT16_MAX]. + // |max_rtx_count| and |max_rtx_ms| may not be set simultaneously. absl::optional max_rtx_count; // If set, the maximum number of milliseconds for which the transport // may retransmit this message before it is dropped. // Setting this value to zero disables retransmission. - // Must be non-negative. |max_rtx_count| and |max_rtx_ms| may not be set - // simultaneously. + // Valid values are in the range [0-UINT16_MAX]. + // |max_rtx_count| and |max_rtx_ms| may not be set simultaneously. absl::optional max_rtx_ms; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/goog_cc_factory.h b/TMessagesProj/jni/voip/webrtc/api/transport/goog_cc_factory.h index b14d6dcd7..e12755d74 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/goog_cc_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/goog_cc_factory.h @@ -12,9 +12,9 @@ #define API_TRANSPORT_GOOG_CC_FACTORY_H_ #include +#include "absl/base/attributes.h" #include "api/network_state_predictor.h" #include "api/transport/network_control.h" -#include "rtc_base/deprecation.h" namespace webrtc { class RtcEventLog; @@ -31,8 +31,8 @@ class GoogCcNetworkControllerFactory : public NetworkControllerFactoryInterface { public: GoogCcNetworkControllerFactory() = default; - explicit RTC_DEPRECATED GoogCcNetworkControllerFactory( - RtcEventLog* event_log); + ABSL_DEPRECATED("") + explicit GoogCcNetworkControllerFactory(RtcEventLog* event_log); explicit GoogCcNetworkControllerFactory( NetworkStatePredictorFactoryInterface* network_state_predictor_factory); @@ -49,7 +49,8 @@ class GoogCcNetworkControllerFactory // Deprecated, use GoogCcFactoryConfig to enable feedback only mode instead. // Factory to create packet feedback only GoogCC, this can be used for // connections providing packet receive time feedback but no other reports. -class RTC_DEPRECATED GoogCcFeedbackNetworkControllerFactory +class ABSL_DEPRECATED("use GoogCcFactoryConfig instead") + GoogCcFeedbackNetworkControllerFactory : public GoogCcNetworkControllerFactory { public: explicit GoogCcFeedbackNetworkControllerFactory(RtcEventLog* event_log); diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h index 10fc0beed..38a8917f1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h @@ -19,7 +19,6 @@ #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/deprecation.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc b/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc index c3f589a69..1b5bf0c40 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc +++ b/TMessagesProj/jni/voip/webrtc/api/transport/stun.cc @@ -33,6 +33,7 @@ namespace { const int k127Utf8CharactersLengthInBytes = 508; const int kDefaultMaxAttributeLength = 508; const int kMessageIntegrityAttributeLength = 20; +const int kTheoreticalMaximumAttributeLength = 65535; uint32_t ReduceTransactionId(const std::string& transaction_id) { RTC_DCHECK(transaction_id.length() == cricket::kStunTransactionIdLength || @@ -77,10 +78,10 @@ bool LengthValid(int type, int length) { // No length restriction in RFC; it's the content of an UDP datagram, // which in theory can be up to 65.535 bytes. // TODO(bugs.webrtc.org/12179): Write a test to find the real limit. - return length <= 65535; + return length <= kTheoreticalMaximumAttributeLength; default: // Return an arbitrary restriction for all other types. - return length <= kDefaultMaxAttributeLength; + return length <= kTheoreticalMaximumAttributeLength; } RTC_NOTREACHED(); return true; @@ -245,6 +246,31 @@ const StunUInt16ListAttribute* StunMessage::GetUnknownAttributes() const { GetAttribute(STUN_ATTR_UNKNOWN_ATTRIBUTES)); } +StunMessage::IntegrityStatus StunMessage::ValidateMessageIntegrity( + const std::string& password) { + password_ = password; + if (GetByteString(STUN_ATTR_MESSAGE_INTEGRITY)) { + if (ValidateMessageIntegrityOfType( + STUN_ATTR_MESSAGE_INTEGRITY, kStunMessageIntegritySize, + buffer_.c_str(), buffer_.size(), password)) { + integrity_ = IntegrityStatus::kIntegrityOk; + } else { + integrity_ = IntegrityStatus::kIntegrityBad; + } + } else if (GetByteString(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32)) { + if (ValidateMessageIntegrityOfType( + STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, kStunMessageIntegrity32Size, + buffer_.c_str(), buffer_.size(), password)) { + integrity_ = IntegrityStatus::kIntegrityOk; + } else { + integrity_ = IntegrityStatus::kIntegrityBad; + } + } else { + integrity_ = IntegrityStatus::kNoIntegrity; + } + return integrity_; +} + bool StunMessage::ValidateMessageIntegrity(const char* data, size_t size, const std::string& password) { @@ -352,11 +378,6 @@ bool StunMessage::AddMessageIntegrity(const std::string& password) { password.size()); } -bool StunMessage::AddMessageIntegrity(const char* key, size_t keylen) { - return AddMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, - kStunMessageIntegritySize, key, keylen); -} - bool StunMessage::AddMessageIntegrity32(absl::string_view password) { return AddMessageIntegrityOfType(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, kStunMessageIntegrity32Size, password.data(), @@ -394,6 +415,8 @@ bool StunMessage::AddMessageIntegrityOfType(int attr_type, // Insert correct HMAC into the attribute. msg_integrity_attr->CopyBytes(hmac, attr_size); + password_.assign(key, keylen); + integrity_ = IntegrityStatus::kIntegrityOk; return true; } @@ -472,6 +495,9 @@ bool StunMessage::AddFingerprint() { } bool StunMessage::Read(ByteBufferReader* buf) { + // Keep a copy of the buffer data around for later verification. + buffer_.assign(buf->Data(), buf->Length()); + if (!buf->ReadUInt16(&type_)) { return false; } diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/stun.h b/TMessagesProj/jni/voip/webrtc/api/transport/stun.h index 8893b2a1f..682a17a94 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/stun.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/stun.h @@ -16,6 +16,7 @@ #include #include + #include #include #include @@ -149,15 +150,24 @@ class StunMessage { StunMessage(); virtual ~StunMessage(); + // The verification status of the message. This is checked on parsing, + // or set by AddMessageIntegrity. + enum class IntegrityStatus { + kNotSet, + kNoIntegrity, // Message-integrity attribute missing + kIntegrityOk, // Message-integrity checked OK + kIntegrityBad, // Message-integrity verification failed + }; + int type() const { return type_; } size_t length() const { return length_; } const std::string& transaction_id() const { return transaction_id_; } uint32_t reduced_transaction_id() const { return reduced_transaction_id_; } // Returns true if the message confirms to RFC3489 rather than - // RFC5389. The main difference between two version of the STUN + // RFC5389. The main difference between the two versions of the STUN // protocol is the presence of the magic cookie and different length - // of transaction ID. For outgoing packets version of the protocol + // of transaction ID. For outgoing packets the version of the protocol // is determined by the lengths of the transaction ID. bool IsLegacy() const; @@ -191,19 +201,27 @@ class StunMessage { // Remote all attributes and releases them. void ClearAttributes(); - // Validates that a raw STUN message has a correct MESSAGE-INTEGRITY value. - // This can't currently be done on a StunMessage, since it is affected by - // padding data (which we discard when reading a StunMessage). - static bool ValidateMessageIntegrity(const char* data, - size_t size, - const std::string& password); - static bool ValidateMessageIntegrity32(const char* data, - size_t size, - const std::string& password); + // Validates that a STUN message has a correct MESSAGE-INTEGRITY value. + // This uses the buffered raw-format message stored by Read(). + IntegrityStatus ValidateMessageIntegrity(const std::string& password); + + // Returns the current integrity status of the message. + IntegrityStatus integrity() const { return integrity_; } + + // Shortcut for checking if integrity is verified. + bool IntegrityOk() const { + return integrity_ == IntegrityStatus::kIntegrityOk; + } + + // Returns the password attribute used to set or check the integrity. + // Can only be called after adding or checking the integrity. + std::string password() const { + RTC_DCHECK(integrity_ != IntegrityStatus::kNotSet); + return password_; + } // Adds a MESSAGE-INTEGRITY attribute that is valid for the current message. bool AddMessageIntegrity(const std::string& password); - bool AddMessageIntegrity(const char* key, size_t keylen); // Adds a STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 attribute that is valid for the // current message. @@ -244,6 +262,30 @@ class StunMessage { bool EqualAttributes(const StunMessage* other, std::function attribute_type_mask) const; + // Expose raw-buffer ValidateMessageIntegrity function for testing. + static bool ValidateMessageIntegrityForTesting(const char* data, + size_t size, + const std::string& password) { + return ValidateMessageIntegrity(data, size, password); + } + // Expose raw-buffer ValidateMessageIntegrity function for testing. + static bool ValidateMessageIntegrity32ForTesting( + const char* data, + size_t size, + const std::string& password) { + return ValidateMessageIntegrity32(data, size, password); + } + // Validates that a STUN message in byte buffer form + // has a correct MESSAGE-INTEGRITY value. + // These functions are not recommended and will be deprecated; use + // ValidateMessageIntegrity(password) on the parsed form instead. + static bool ValidateMessageIntegrity(const char* data, + size_t size, + const std::string& password); + static bool ValidateMessageIntegrity32(const char* data, + size_t size, + const std::string& password); + protected: // Verifies that the given attribute is allowed for this message. virtual StunAttributeValueType GetAttributeValueType(int type) const; @@ -269,6 +311,10 @@ class StunMessage { std::string transaction_id_; uint32_t reduced_transaction_id_; uint32_t stun_magic_cookie_; + // The original buffer for messages created by Read(). + std::string buffer_; + IntegrityStatus integrity_ = IntegrityStatus::kNotSet; + std::string password_; }; // Base class for all STUN/TURN attributes. diff --git a/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h b/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h index 30543b68b..3e0deb009 100644 --- a/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/api/uma_metrics.h @@ -167,6 +167,41 @@ enum SimulcastApiVersion { kSimulcastApiVersionMax }; +// Metrics for reporting usage of BUNDLE. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. +enum BundleUsage { + // There are no m-lines in the SDP, only a session description. + kBundleUsageEmpty = 0, + // Only a data channel is negotiated but BUNDLE is not negotiated. + kBundleUsageNoBundleDatachannelOnly = 1, + // BUNDLE is not negotiated and there is at most one m-line per media type, + kBundleUsageNoBundleSimple = 2, + // BUNDLE is not negotiated and there are multiple m-lines per media type, + kBundleUsageNoBundleComplex = 3, + // Only a data channel is negotiated and BUNDLE is negotiated. + kBundleUsageBundleDatachannelOnly = 4, + // BUNDLE is negotiated but there is at most one m-line per media type, + kBundleUsageBundleSimple = 5, + // BUNDLE is negotiated and there are multiple m-lines per media type, + kBundleUsageBundleComplex = 6, + // Legacy plan-b metrics. + kBundleUsageNoBundlePlanB = 7, + kBundleUsageBundlePlanB = 8, + kBundleUsageMax +}; + +// Metrics for reporting configured BUNDLE policy, mapping directly to +// https://w3c.github.io/webrtc-pc/#rtcbundlepolicy-enum +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. +enum BundlePolicyUsage { + kBundlePolicyUsageBalanced = 0, + kBundlePolicyUsageMaxBundle = 1, + kBundlePolicyUsageMaxCompat = 2, + kBundlePolicyUsageMax +}; + // When adding new metrics please consider using the style described in // https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#usage // instead of the legacy enums used above. diff --git a/TMessagesProj/jni/voip/webrtc/api/units/data_rate.h b/TMessagesProj/jni/voip/webrtc/api/units/data_rate.h index 5c8a61fd9..98572123c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/units/data_rate.h +++ b/TMessagesProj/jni/voip/webrtc/api/units/data_rate.h @@ -11,9 +11,9 @@ #ifndef API_UNITS_DATA_RATE_H_ #define API_UNITS_DATA_RATE_H_ -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include #include @@ -142,13 +142,13 @@ inline std::string ToLogString(DataRate value) { return ToString(value); } -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) DataRate value) { return stream << ToString(value); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/units/data_size.h b/TMessagesProj/jni/voip/webrtc/api/units/data_size.h index 27a2a4e4d..6817e24c2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/units/data_size.h +++ b/TMessagesProj/jni/voip/webrtc/api/units/data_size.h @@ -11,9 +11,9 @@ #ifndef API_UNITS_DATA_SIZE_H_ #define API_UNITS_DATA_SIZE_H_ -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include #include @@ -53,13 +53,13 @@ inline std::string ToLogString(DataSize value) { return ToString(value); } -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) DataSize value) { return stream << ToString(value); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/units/frequency.h b/TMessagesProj/jni/voip/webrtc/api/units/frequency.h index 88912c64d..8e9cc2b5f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/units/frequency.h +++ b/TMessagesProj/jni/voip/webrtc/api/units/frequency.h @@ -10,9 +10,9 @@ #ifndef API_UNITS_FREQUENCY_H_ #define API_UNITS_FREQUENCY_H_ -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include #include @@ -89,13 +89,13 @@ inline std::string ToLogString(Frequency value) { return ToString(value); } -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) Frequency value) { return stream << ToString(value); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST } // namespace webrtc #endif // API_UNITS_FREQUENCY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h b/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h index 173affcc5..6f1910379 100644 --- a/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h +++ b/TMessagesProj/jni/voip/webrtc/api/units/time_delta.h @@ -11,9 +11,9 @@ #ifndef API_UNITS_TIME_DELTA_H_ #define API_UNITS_TIME_DELTA_H_ -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include #include @@ -92,13 +92,13 @@ inline std::string ToLogString(TimeDelta value) { return ToString(value); } -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) TimeDelta value) { return stream << ToString(value); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/units/timestamp.h b/TMessagesProj/jni/voip/webrtc/api/units/timestamp.h index f83477e80..1e9f9d1dc 100644 --- a/TMessagesProj/jni/voip/webrtc/api/units/timestamp.h +++ b/TMessagesProj/jni/voip/webrtc/api/units/timestamp.h @@ -11,9 +11,9 @@ #ifndef API_UNITS_TIMESTAMP_H_ #define API_UNITS_TIMESTAMP_H_ -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include #include @@ -125,13 +125,13 @@ inline std::string ToLogString(Timestamp value) { return ToString(value); } -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) Timestamp value) { return stream << ToString(value); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc index 26a794ec0..42d6b06b8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc @@ -11,11 +11,9 @@ #include "api/video/encoded_frame.h" namespace webrtc { -namespace video_coding { bool EncodedFrame::delayed_by_retransmission() const { return 0; } -} // namespace video_coding } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h index f0a67a1ce..5f046327f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h @@ -17,37 +17,6 @@ #include "modules/video_coding/encoded_frame.h" namespace webrtc { -namespace video_coding { - -// NOTE: This class is still under development and may change without notice. -struct VideoLayerFrameId { - // TODO(philipel): The default ctor is currently used internaly, but have a - // look if we can remove it. - VideoLayerFrameId() : picture_id(-1), spatial_layer(0) {} - VideoLayerFrameId(int64_t picture_id, uint8_t spatial_layer) - : picture_id(picture_id), spatial_layer(spatial_layer) {} - - bool operator==(const VideoLayerFrameId& rhs) const { - return picture_id == rhs.picture_id && spatial_layer == rhs.spatial_layer; - } - - bool operator!=(const VideoLayerFrameId& rhs) const { - return !(*this == rhs); - } - - bool operator<(const VideoLayerFrameId& rhs) const { - if (picture_id == rhs.picture_id) - return spatial_layer < rhs.spatial_layer; - return picture_id < rhs.picture_id; - } - - bool operator<=(const VideoLayerFrameId& rhs) const { return !(rhs < *this); } - bool operator>(const VideoLayerFrameId& rhs) const { return rhs < *this; } - bool operator>=(const VideoLayerFrameId& rhs) const { return rhs <= *this; } - - int64_t picture_id; - uint8_t spatial_layer; -}; // TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance. // TODO(philipel): Move transport specific info out of EncodedFrame. @@ -73,19 +42,23 @@ class EncodedFrame : public webrtc::VCMEncodedFrame { bool is_keyframe() const { return num_references == 0; } - VideoLayerFrameId id; + void SetId(int64_t id) { id_ = id; } + int64_t Id() const { return id_; } // TODO(philipel): Add simple modify/access functions to prevent adding too // many |references|. size_t num_references = 0; int64_t references[kMaxFrameReferences]; - bool inter_layer_predicted = false; // Is this subframe the last one in the superframe (In RTP stream that would // mean that the last packet has a marker bit set). bool is_last_spatial_layer = true; + + private: + // The ID of the frame is determined from RTP level information. The IDs are + // used to describe order and dependencies between frames. + int64_t id_ = -1; }; -} // namespace video_coding } // namespace webrtc #endif // API_VIDEO_ENCODED_FRAME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc index 1c73bdabe..fc77b9415 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc @@ -32,13 +32,13 @@ EncodedImageBuffer::~EncodedImageBuffer() { // static rtc::scoped_refptr EncodedImageBuffer::Create(size_t size) { - return new rtc::RefCountedObject(size); + return rtc::make_ref_counted(size); } // static rtc::scoped_refptr EncodedImageBuffer::Create( const uint8_t* data, size_t size) { - return new rtc::RefCountedObject(data, size); + return rtc::make_ref_counted(data, size); } const uint8_t* EncodedImageBuffer::data() const { @@ -66,21 +66,11 @@ EncodedImage::EncodedImage() = default; EncodedImage::EncodedImage(EncodedImage&&) = default; EncodedImage::EncodedImage(const EncodedImage&) = default; -EncodedImage::EncodedImage(uint8_t* buffer, size_t size, size_t capacity) - : size_(size), buffer_(buffer), capacity_(capacity) {} - EncodedImage::~EncodedImage() = default; EncodedImage& EncodedImage::operator=(EncodedImage&&) = default; EncodedImage& EncodedImage::operator=(const EncodedImage&) = default; -void EncodedImage::Retain() { - if (buffer_) { - encoded_data_ = EncodedImageBuffer::Create(buffer_, size_); - buffer_ = nullptr; - } -} - void EncodedImage::SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms) { timing_.encode_start_ms = encode_start_ms; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h index 650766ab6..dae4e3a60 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h @@ -26,7 +26,6 @@ #include "api/video/video_rotation.h" #include "api/video/video_timing.h" #include "rtc_base/checks.h" -#include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -73,12 +72,10 @@ class RTC_EXPORT EncodedImage { EncodedImage(); EncodedImage(EncodedImage&&); EncodedImage(const EncodedImage&); - RTC_DEPRECATED EncodedImage(uint8_t* buffer, size_t length, size_t capacity); ~EncodedImage(); EncodedImage& operator=(EncodedImage&&); - // Discouraged: potentially expensive. EncodedImage& operator=(const EncodedImage&); // TODO(nisse): Change style to timestamp(), set_timestamp(), for consistency @@ -112,6 +109,15 @@ class RTC_EXPORT EncodedImage { color_space_ = color_space; } + // These methods along with the private member video_frame_tracking_id_ are + // meant for media quality testing purpose only. + absl::optional VideoFrameTrackingId() const { + return video_frame_tracking_id_; + } + void SetVideoFrameTrackingId(absl::optional tracking_id) { + video_frame_tracking_id_ = tracking_id; + } + const RtpPacketInfos& PacketInfos() const { return packet_infos_; } void SetPacketInfos(RtpPacketInfos packet_infos) { packet_infos_ = std::move(packet_infos); @@ -128,34 +134,26 @@ class RTC_EXPORT EncodedImage { RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity()); size_ = new_size; } + void SetEncodedData( rtc::scoped_refptr encoded_data) { encoded_data_ = encoded_data; size_ = encoded_data->size(); - buffer_ = nullptr; } void ClearEncodedData() { encoded_data_ = nullptr; size_ = 0; - buffer_ = nullptr; - capacity_ = 0; } rtc::scoped_refptr GetEncodedData() const { - RTC_DCHECK(buffer_ == nullptr); return encoded_data_; } const uint8_t* data() const { - return buffer_ ? buffer_ - : (encoded_data_ ? encoded_data_->data() : nullptr); + return encoded_data_ ? encoded_data_->data() : nullptr; } - // Hack to workaround lack of ownership of the encoded data. If we don't - // already own the underlying data, make an owned copy. - void Retain(); - uint32_t _encodedWidth = 0; uint32_t _encodedHeight = 0; // NTP time of the capture time in local timebase in milliseconds. @@ -185,22 +183,17 @@ class RTC_EXPORT EncodedImage { } timing_; private: - size_t capacity() const { - return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0); - } + size_t capacity() const { return encoded_data_ ? encoded_data_->size() : 0; } - // TODO(bugs.webrtc.org/9378): We're transitioning to always owning the - // encoded data. rtc::scoped_refptr encoded_data_; size_t size_ = 0; // Size of encoded frame data. - // Non-null when used with an un-owned buffer. - uint8_t* buffer_ = nullptr; - // Allocated size of _buffer; relevant only if it's non-null. - size_t capacity_ = 0; uint32_t timestamp_rtp_ = 0; absl::optional spatial_index_; std::map spatial_layer_frame_size_bytes_; absl::optional color_space_; + // This field is meant for media quality testing purpose only. When enabled it + // carries the webrtc::VideoFrame id field from the sender to the receiver. + absl::optional video_frame_tracking_id_; // Information about packets used to assemble this video frame. This is needed // by |SourceTracker| when the frame is delivered to the RTCRtpReceiver's // MediaStreamTrack, in order to implement getContributingSources(). See: diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc index 7286676de..74d37d1b5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc @@ -56,8 +56,8 @@ I010Buffer::~I010Buffer() {} // static rtc::scoped_refptr I010Buffer::Create(int width, int height) { - return new rtc::RefCountedObject( - width, height, width, (width + 1) / 2, (width + 1) / 2); + return rtc::make_ref_counted(width, height, width, + (width + 1) / 2, (width + 1) / 2); } // static diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc index 312a7cf0c..8783a4a31 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc @@ -60,7 +60,7 @@ I420Buffer::~I420Buffer() {} // static rtc::scoped_refptr I420Buffer::Create(int width, int height) { - return new rtc::RefCountedObject(width, height); + return rtc::make_ref_counted(width, height); } // static @@ -69,8 +69,8 @@ rtc::scoped_refptr I420Buffer::Create(int width, int stride_y, int stride_u, int stride_v) { - return new rtc::RefCountedObject(width, height, stride_y, - stride_u, stride_v); + return rtc::make_ref_counted(width, height, stride_y, stride_u, + stride_v); } // static @@ -118,12 +118,13 @@ rtc::scoped_refptr I420Buffer::Rotate( rtc::scoped_refptr buffer = I420Buffer::Create(rotated_width, rotated_height); - libyuv::I420Rotate( - src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), - src.DataV(), src.StrideV(), buffer->MutableDataY(), - buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), src.width(), - src.height(), static_cast(rotation)); + RTC_CHECK_EQ(0, + libyuv::I420Rotate( + src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), + src.DataV(), src.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), src.width(), + src.height(), static_cast(rotation))); return buffer; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc index cfa85ac52..974620ba2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc @@ -49,7 +49,7 @@ NV12Buffer::~NV12Buffer() = default; // static rtc::scoped_refptr NV12Buffer::Create(int width, int height) { - return new rtc::RefCountedObject(width, height); + return rtc::make_ref_counted(width, height); } // static @@ -57,8 +57,7 @@ rtc::scoped_refptr NV12Buffer::Create(int width, int height, int stride_y, int stride_uv) { - return new rtc::RefCountedObject(width, height, stride_y, - stride_uv); + return rtc::make_ref_counted(width, height, stride_y, stride_uv); } // static diff --git a/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h index db59964f2..b4ad83a34 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/recordable_encoded_frame.h @@ -26,8 +26,10 @@ class RecordableEncodedFrame { public: // Encoded resolution in pixels struct EncodedResolution { - unsigned width; - unsigned height; + bool empty() const { return width == 0 && height == 0; } + + unsigned width = 0; + unsigned height = 0; }; virtual ~RecordableEncodedFrame() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/test/mock_recordable_encoded_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/test/mock_recordable_encoded_frame.h new file mode 100644 index 000000000..2178932d2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video/test/mock_recordable_encoded_frame.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ +#define API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ + +#include "api/video/recordable_encoded_frame.h" +#include "test/gmock.h" + +namespace webrtc { +class MockRecordableEncodedFrame : public RecordableEncodedFrame { + public: + MOCK_METHOD(rtc::scoped_refptr, + encoded_buffer, + (), + (const, override)); + MOCK_METHOD(absl::optional, + color_space, + (), + (const, override)); + MOCK_METHOD(VideoCodecType, codec, (), (const, override)); + MOCK_METHOD(bool, is_key_frame, (), (const, override)); + MOCK_METHOD(EncodedResolution, resolution, (), (const, override)); + MOCK_METHOD(Timestamp, render_time, (), (const, override)); +}; +} // namespace webrtc +#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h index e62aae8e5..e073fd5e4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame.h @@ -134,11 +134,11 @@ class RTC_EXPORT VideoFrame { // Get frame size in pixels. uint32_t size() const; - // Get frame ID. Returns 0 if ID is not set. Not guarantee to be transferred - // from the sender to the receiver, but preserved on single side. The id + // Get frame ID. Returns 0 if ID is not set. Not guaranteed to be transferred + // from the sender to the receiver, but preserved on the sender side. The id // should be propagated between all frame modifications during its lifetime // from capturing to sending as encoded image. It is intended to be unique - // over a time window of a few minutes for peer connection, to which + // over a time window of a few minutes for the peer connection to which the // corresponding video stream belongs to. uint16_t id() const { return id_; } void set_id(uint16_t id) { id_ = id; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc index 64f339448..708501032 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc @@ -11,6 +11,7 @@ #include "api/video/video_frame_buffer.h" #include "api/video/i420_buffer.h" +#include "api/video/nv12_buffer.h" #include "rtc_base/checks.h" namespace webrtc { @@ -139,4 +140,18 @@ int NV12BufferInterface::ChromaWidth() const { int NV12BufferInterface::ChromaHeight() const { return (height() + 1) / 2; } + +rtc::scoped_refptr NV12BufferInterface::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + rtc::scoped_refptr result = + NV12Buffer::Create(scaled_width, scaled_height); + result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); + return result; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h index 67b879732..62adc204f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h @@ -242,6 +242,13 @@ class RTC_EXPORT NV12BufferInterface : public BiplanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; + rtc::scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; + protected: ~NV12BufferInterface() override {} }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h b/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h index b03d7c548..8b5823fc2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h @@ -12,6 +12,7 @@ #define API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ #include +#include #include "absl/types/optional.h" #include "api/video/video_sink_interface.h" @@ -22,6 +23,15 @@ namespace rtc { // VideoSinkWants is used for notifying the source of properties a video frame // should have when it is delivered to a certain sink. struct RTC_EXPORT VideoSinkWants { + struct FrameSize { + FrameSize(int width, int height) : width(width), height(height) {} + FrameSize(const FrameSize&) = default; + ~FrameSize() = default; + + int width; + int height; + }; + VideoSinkWants(); VideoSinkWants(const VideoSinkWants&); ~VideoSinkWants(); @@ -49,8 +59,34 @@ struct RTC_EXPORT VideoSinkWants { // Note that this field is unrelated to any horizontal or vertical stride // requirements the encoder has on the incoming video frame buffers. int resolution_alignment = 1; + + // The resolutions that sink is configured to consume. If the sink is an + // encoder this is what the encoder is configured to encode. In singlecast we + // only encode one resolution, but in simulcast and SVC this can mean multiple + // resolutions per frame. + // + // The sink is always configured to consume a subset of the + // webrtc::VideoFrame's resolution. In the case of encoding, we usually encode + // at webrtc::VideoFrame's resolution but this may not always be the case due + // to scaleResolutionDownBy or turning off simulcast or SVC layers. + // + // For example, we may capture at 720p and due to adaptation (e.g. applying + // |max_pixel_count| constraints) create webrtc::VideoFrames of size 480p, but + // if we do scaleResolutionDownBy:2 then the only resolution we end up + // encoding is 240p. In this case we still need to provide webrtc::VideoFrames + // of size 480p but we can optimize internal buffers for 240p, avoiding + // downsampling to 480p if possible. + // + // Note that the |resolutions| can change while frames are in flight and + // should only be used as a hint when constructing the webrtc::VideoFrame. + std::vector resolutions; }; +inline bool operator==(const VideoSinkWants::FrameSize& a, + const VideoSinkWants::FrameSize& b) { + return a.width == b.width && a.height == b.height; +} + template class VideoSourceInterface { public: diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h index 4bf8b985c..8d71dd300 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_decoder.h @@ -38,9 +38,7 @@ class VideoStreamDecoderInterface { // Called when the VideoStreamDecoder enters a non-decodable state. virtual void OnNonDecodableState() = 0; - // Called with the last continuous frame. - virtual void OnContinuousUntil( - const video_coding::VideoLayerFrameId& key) = 0; + virtual void OnContinuousUntil(int64_t frame_id) {} virtual void OnDecodedFrame(VideoFrame frame, const FrameInfo& frame_info) = 0; @@ -48,7 +46,7 @@ class VideoStreamDecoderInterface { virtual ~VideoStreamDecoderInterface() = default; - virtual void OnFrame(std::unique_ptr frame) = 0; + virtual void OnFrame(std::unique_ptr frame) = 0; virtual void SetMinPlayoutDelay(TimeDelta min_delay) = 0; virtual void SetMaxPlayoutDelay(TimeDelta max_delay) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_create.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_create.cc deleted file mode 100644 index 3a2ebe79e..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_create.cc +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video/video_stream_encoder_create.h" - -#include - -#include "video/adaptation/overuse_frame_detector.h" -#include "video/video_stream_encoder.h" - -namespace webrtc { - -std::unique_ptr CreateVideoStreamEncoder( - Clock* clock, - TaskQueueFactory* task_queue_factory, - uint32_t number_of_cores, - VideoStreamEncoderObserver* encoder_stats_observer, - const VideoStreamEncoderSettings& settings) { - return std::make_unique( - clock, number_of_cores, encoder_stats_observer, settings, - std::make_unique(encoder_stats_observer), - task_queue_factory); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_create.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_create.h deleted file mode 100644 index 3946b95f0..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_create.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_VIDEO_VIDEO_STREAM_ENCODER_CREATE_H_ -#define API_VIDEO_VIDEO_STREAM_ENCODER_CREATE_H_ - -#include - -#include - -#include "api/task_queue/task_queue_factory.h" -#include "api/video/video_frame.h" -#include "api/video/video_sink_interface.h" -#include "api/video/video_stream_encoder_interface.h" -#include "api/video/video_stream_encoder_observer.h" -#include "api/video/video_stream_encoder_settings.h" - -namespace webrtc { -// TODO(srte): Find a way to avoid this forward declaration. -class Clock; - -std::unique_ptr CreateVideoStreamEncoder( - Clock* clock, - TaskQueueFactory* task_queue_factory, - uint32_t number_of_cores, - VideoStreamEncoderObserver* encoder_stats_observer, - const VideoStreamEncoderSettings& settings); -} // namespace webrtc - -#endif // API_VIDEO_VIDEO_STREAM_ENCODER_CREATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h index cbeed3d07..743524b35 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h @@ -39,12 +39,6 @@ class EncoderSwitchRequestCallback { }; struct VideoStreamEncoderSettings { - enum class BitrateAllocationCallbackType { - kVideoBitrateAllocation, - kVideoBitrateAllocationWhenScreenSharing, - kVideoLayersAllocation - }; - explicit VideoStreamEncoderSettings( const VideoEncoder::Capabilities& capabilities) : capabilities(capabilities) {} @@ -65,11 +59,6 @@ struct VideoStreamEncoderSettings { // Negotiated capabilities which the VideoEncoder may expect the other // side to use. VideoEncoder::Capabilities capabilities; - - // TODO(bugs.webrtc.org/12000): Reporting of VideoBitrateAllocation is beeing - // deprecated. Instead VideoLayersAllocation should be reported. - BitrateAllocationCallbackType allocation_cb_type = - BitrateAllocationCallbackType::kVideoBitrateAllocationWhenScreenSharing; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc index 2f722a4a5..9463a9cdf 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc @@ -26,18 +26,6 @@ namespace webrtc { namespace { -bool IsFormatSupported(const std::vector& supported_formats, - const SdpVideoFormat& format) { - for (const SdpVideoFormat& supported_format : supported_formats) { - if (cricket::IsSameCodec(format.name, format.parameters, - supported_format.name, - supported_format.parameters)) { - return true; - } - } - return false; -} - // This class wraps the internal factory and adds simulcast. class BuiltinVideoEncoderFactory : public VideoEncoderFactory { public: @@ -47,8 +35,8 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory { VideoEncoderFactory::CodecInfo QueryVideoEncoder( const SdpVideoFormat& format) const override { // Format must be one of the internal formats. - RTC_DCHECK(IsFormatSupported( - internal_encoder_factory_->GetSupportedFormats(), format)); + RTC_DCHECK( + format.IsCodecInList(internal_encoder_factory_->GetSupportedFormats())); VideoEncoderFactory::CodecInfo info; return info; } @@ -57,8 +45,8 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory { const SdpVideoFormat& format) override { // Try creating internal encoder. std::unique_ptr internal_encoder; - if (IsFormatSupported(internal_encoder_factory_->GetSupportedFormats(), - format)) { + if (format.IsCodecInList( + internal_encoder_factory_->GetSupportedFormats())) { internal_encoder = std::make_unique( internal_encoder_factory_.get(), format); } diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc new file mode 100644 index 000000000..fa4775818 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc @@ -0,0 +1,252 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/h264_profile_level_id.h" + +#include +#include +#include + +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { + +const char kProfileLevelId[] = "profile-level-id"; + +// For level_idc=11 and profile_idc=0x42, 0x4D, or 0x58, the constraint set3 +// flag specifies if level 1b or level 1.1 is used. +const uint8_t kConstraintSet3Flag = 0x10; + +// Convert a string of 8 characters into a byte where the positions containing +// character c will have their bit set. For example, c = 'x', str = "x1xx0000" +// will return 0b10110000. constexpr is used so that the pattern table in +// kProfilePatterns is statically initialized. +constexpr uint8_t ByteMaskString(char c, const char (&str)[9]) { + return (str[0] == c) << 7 | (str[1] == c) << 6 | (str[2] == c) << 5 | + (str[3] == c) << 4 | (str[4] == c) << 3 | (str[5] == c) << 2 | + (str[6] == c) << 1 | (str[7] == c) << 0; +} + +// Class for matching bit patterns such as "x1xx0000" where 'x' is allowed to be +// either 0 or 1. +class BitPattern { + public: + explicit constexpr BitPattern(const char (&str)[9]) + : mask_(~ByteMaskString('x', str)), + masked_value_(ByteMaskString('1', str)) {} + + bool IsMatch(uint8_t value) const { return masked_value_ == (value & mask_); } + + private: + const uint8_t mask_; + const uint8_t masked_value_; +}; + +// Table for converting between profile_idc/profile_iop to H264Profile. +struct ProfilePattern { + const uint8_t profile_idc; + const BitPattern profile_iop; + const H264Profile profile; +}; + +// This is from https://tools.ietf.org/html/rfc6184#section-8.1. +constexpr ProfilePattern kProfilePatterns[] = { + {0x42, BitPattern("x1xx0000"), H264Profile::kProfileConstrainedBaseline}, + {0x4D, BitPattern("1xxx0000"), H264Profile::kProfileConstrainedBaseline}, + {0x58, BitPattern("11xx0000"), H264Profile::kProfileConstrainedBaseline}, + {0x42, BitPattern("x0xx0000"), H264Profile::kProfileBaseline}, + {0x58, BitPattern("10xx0000"), H264Profile::kProfileBaseline}, + {0x4D, BitPattern("0x0x0000"), H264Profile::kProfileMain}, + {0x64, BitPattern("00000000"), H264Profile::kProfileHigh}, + {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}}; + +struct LevelConstraint { + const int max_macroblocks_per_second; + const int max_macroblock_frame_size; + const H264Level level; +}; + +// This is from ITU-T H.264 (02/2016) Table A-1 – Level limits. +static constexpr LevelConstraint kLevelConstraints[] = { + {1485, 99, H264Level::kLevel1}, + {1485, 99, H264Level::kLevel1_b}, + {3000, 396, H264Level::kLevel1_1}, + {6000, 396, H264Level::kLevel1_2}, + {11880, 396, H264Level::kLevel1_3}, + {11880, 396, H264Level::kLevel2}, + {19800, 792, H264Level::kLevel2_1}, + {20250, 1620, H264Level::kLevel2_2}, + {40500, 1620, H264Level::kLevel3}, + {108000, 3600, H264Level::kLevel3_1}, + {216000, 5120, H264Level::kLevel3_2}, + {245760, 8192, H264Level::kLevel4}, + {245760, 8192, H264Level::kLevel4_1}, + {522240, 8704, H264Level::kLevel4_2}, + {589824, 22080, H264Level::kLevel5}, + {983040, 36864, H264Level::kLevel5_1}, + {2073600, 36864, H264Level::kLevel5_2}, +}; + +} // anonymous namespace + +absl::optional ParseH264ProfileLevelId(const char* str) { + // The string should consist of 3 bytes in hexadecimal format. + if (strlen(str) != 6u) + return absl::nullopt; + const uint32_t profile_level_id_numeric = strtol(str, nullptr, 16); + if (profile_level_id_numeric == 0) + return absl::nullopt; + + // Separate into three bytes. + const uint8_t level_idc = + static_cast(profile_level_id_numeric & 0xFF); + const uint8_t profile_iop = + static_cast((profile_level_id_numeric >> 8) & 0xFF); + const uint8_t profile_idc = + static_cast((profile_level_id_numeric >> 16) & 0xFF); + + // Parse level based on level_idc and constraint set 3 flag. + H264Level level_casted = static_cast(level_idc); + H264Level level; + + switch (level_casted) { + case H264Level::kLevel1_1: + level = (profile_iop & kConstraintSet3Flag) != 0 ? H264Level::kLevel1_b + : H264Level::kLevel1_1; + break; + case H264Level::kLevel1: + case H264Level::kLevel1_2: + case H264Level::kLevel1_3: + case H264Level::kLevel2: + case H264Level::kLevel2_1: + case H264Level::kLevel2_2: + case H264Level::kLevel3: + case H264Level::kLevel3_1: + case H264Level::kLevel3_2: + case H264Level::kLevel4: + case H264Level::kLevel4_1: + case H264Level::kLevel4_2: + case H264Level::kLevel5: + case H264Level::kLevel5_1: + case H264Level::kLevel5_2: + level = level_casted; + break; + default: + // Unrecognized level_idc. + return absl::nullopt; + } + + // Parse profile_idc/profile_iop into a Profile enum. + for (const ProfilePattern& pattern : kProfilePatterns) { + if (profile_idc == pattern.profile_idc && + pattern.profile_iop.IsMatch(profile_iop)) { + return H264ProfileLevelId(pattern.profile, level); + } + } + + // Unrecognized profile_idc/profile_iop combination. + return absl::nullopt; +} + +absl::optional H264SupportedLevel(int max_frame_pixel_count, + float max_fps) { + static const int kPixelsPerMacroblock = 16 * 16; + + for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) { + const LevelConstraint& level_constraint = kLevelConstraints[i]; + if (level_constraint.max_macroblock_frame_size * kPixelsPerMacroblock <= + max_frame_pixel_count && + level_constraint.max_macroblocks_per_second <= + max_fps * level_constraint.max_macroblock_frame_size) { + return level_constraint.level; + } + } + + // No level supported. + return absl::nullopt; +} + +absl::optional ParseSdpForH264ProfileLevelId( + const SdpVideoFormat::Parameters& params) { + // TODO(magjed): The default should really be kProfileBaseline and kLevel1 + // according to the spec: https://tools.ietf.org/html/rfc6184#section-8.1. In + // order to not break backwards compatibility with older versions of WebRTC + // where external codecs don't have any parameters, use + // kProfileConstrainedBaseline kLevel3_1 instead. This workaround will only be + // done in an interim period to allow external clients to update their code. + // http://crbug/webrtc/6337. + static const H264ProfileLevelId kDefaultProfileLevelId( + H264Profile::kProfileConstrainedBaseline, H264Level::kLevel3_1); + + const auto profile_level_id_it = params.find(kProfileLevelId); + return (profile_level_id_it == params.end()) + ? kDefaultProfileLevelId + : ParseH264ProfileLevelId(profile_level_id_it->second.c_str()); +} + +absl::optional H264ProfileLevelIdToString( + const H264ProfileLevelId& profile_level_id) { + // Handle special case level == 1b. + if (profile_level_id.level == H264Level::kLevel1_b) { + switch (profile_level_id.profile) { + case H264Profile::kProfileConstrainedBaseline: + return {"42f00b"}; + case H264Profile::kProfileBaseline: + return {"42100b"}; + case H264Profile::kProfileMain: + return {"4d100b"}; + // Level 1b is not allowed for other profiles. + default: + return absl::nullopt; + } + } + + const char* profile_idc_iop_string; + switch (profile_level_id.profile) { + case H264Profile::kProfileConstrainedBaseline: + profile_idc_iop_string = "42e0"; + break; + case H264Profile::kProfileBaseline: + profile_idc_iop_string = "4200"; + break; + case H264Profile::kProfileMain: + profile_idc_iop_string = "4d00"; + break; + case H264Profile::kProfileConstrainedHigh: + profile_idc_iop_string = "640c"; + break; + case H264Profile::kProfileHigh: + profile_idc_iop_string = "6400"; + break; + // Unrecognized profile. + default: + return absl::nullopt; + } + + char str[7]; + snprintf(str, 7u, "%s%02x", profile_idc_iop_string, profile_level_id.level); + return {str}; +} + +bool H264IsSameProfile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2) { + const absl::optional profile_level_id = + ParseSdpForH264ProfileLevelId(params1); + const absl::optional other_profile_level_id = + ParseSdpForH264ProfileLevelId(params2); + // Compare H264 profiles, but not levels. + return profile_level_id && other_profile_level_id && + profile_level_id->profile == other_profile_level_id->profile; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h new file mode 100644 index 000000000..51d025cd7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_ +#define API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_ + +#include + +#include "absl/types/optional.h" +#include "api/video_codecs/sdp_video_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +enum class H264Profile { + kProfileConstrainedBaseline, + kProfileBaseline, + kProfileMain, + kProfileConstrainedHigh, + kProfileHigh, +}; + +// All values are equal to ten times the level number, except level 1b which is +// special. +enum class H264Level { + kLevel1_b = 0, + kLevel1 = 10, + kLevel1_1 = 11, + kLevel1_2 = 12, + kLevel1_3 = 13, + kLevel2 = 20, + kLevel2_1 = 21, + kLevel2_2 = 22, + kLevel3 = 30, + kLevel3_1 = 31, + kLevel3_2 = 32, + kLevel4 = 40, + kLevel4_1 = 41, + kLevel4_2 = 42, + kLevel5 = 50, + kLevel5_1 = 51, + kLevel5_2 = 52 +}; + +struct H264ProfileLevelId { + constexpr H264ProfileLevelId(H264Profile profile, H264Level level) + : profile(profile), level(level) {} + H264Profile profile; + H264Level level; +}; + +// Parse profile level id that is represented as a string of 3 hex bytes. +// Nothing will be returned if the string is not a recognized H264 +// profile level id. +absl::optional ParseH264ProfileLevelId(const char* str); + +// Parse profile level id that is represented as a string of 3 hex bytes +// contained in an SDP key-value map. A default profile level id will be +// returned if the profile-level-id key is missing. Nothing will be returned if +// the key is present but the string is invalid. +RTC_EXPORT absl::optional ParseSdpForH264ProfileLevelId( + const SdpVideoFormat::Parameters& params); + +// Given that a decoder supports up to a given frame size (in pixels) at up to a +// given number of frames per second, return the highest H.264 level where it +// can guarantee that it will be able to support all valid encoded streams that +// are within that level. +RTC_EXPORT absl::optional H264SupportedLevel( + int max_frame_pixel_count, + float max_fps); + +// Returns canonical string representation as three hex bytes of the profile +// level id, or returns nothing for invalid profile level ids. +RTC_EXPORT absl::optional H264ProfileLevelIdToString( + const H264ProfileLevelId& profile_level_id); + +// Returns true if the parameters have the same H264 profile (Baseline, High, +// etc). +RTC_EXPORT bool H264IsSameProfile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2); + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc index f8901492e..689c337ce 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.cc @@ -10,10 +10,57 @@ #include "api/video_codecs/sdp_video_format.h" +#include "absl/strings/match.h" +#include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/vp9_profile.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace { + +std::string H264GetPacketizationModeOrDefault( + const SdpVideoFormat::Parameters& params) { + constexpr char kH264FmtpPacketizationMode[] = "packetization-mode"; + const auto it = params.find(kH264FmtpPacketizationMode); + if (it != params.end()) { + return it->second; + } + // If packetization-mode is not present, default to "0". + // https://tools.ietf.org/html/rfc6184#section-6.2 + return "0"; +} + +bool H264IsSamePacketizationMode(const SdpVideoFormat::Parameters& left, + const SdpVideoFormat::Parameters& right) { + return H264GetPacketizationModeOrDefault(left) == + H264GetPacketizationModeOrDefault(right); +} + +// Some (video) codecs are actually families of codecs and rely on parameters +// to distinguish different incompatible family members. +bool IsSameCodecSpecific(const SdpVideoFormat& format1, + const SdpVideoFormat& format2) { + // The assumption when calling this function is that the two formats have the + // same name. + RTC_DCHECK(absl::EqualsIgnoreCase(format1.name, format2.name)); + + VideoCodecType codec_type = PayloadStringToCodecType(format1.name); + switch (codec_type) { + case kVideoCodecH264: + return H264IsSameProfile(format1.parameters, format2.parameters) && + H264IsSamePacketizationMode(format1.parameters, + format2.parameters); + case kVideoCodecVP9: + return VP9IsSameProfile(format1.parameters, format2.parameters); + default: + return true; + } +} +} // namespace + SdpVideoFormat::SdpVideoFormat(const std::string& name) : name(name) {} SdpVideoFormat::SdpVideoFormat(const std::string& name, @@ -37,6 +84,23 @@ std::string SdpVideoFormat::ToString() const { return builder.str(); } +bool SdpVideoFormat::IsSameCodec(const SdpVideoFormat& other) const { + // Two codecs are considered the same if the name matches (case insensitive) + // and certain codec-specific parameters match. + return absl::EqualsIgnoreCase(name, other.name) && + IsSameCodecSpecific(*this, other); +} + +bool SdpVideoFormat::IsCodecInList( + rtc::ArrayView formats) const { + for (const auto& format : formats) { + if (IsSameCodec(format)) { + return true; + } + } + return false; +} + bool operator==(const SdpVideoFormat& a, const SdpVideoFormat& b) { return a.name == b.name && a.parameters == b.parameters; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h index 97bb75489..a1e23f4f9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/sdp_video_format.h @@ -14,6 +14,7 @@ #include #include +#include "api/array_view.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -32,6 +33,13 @@ struct RTC_EXPORT SdpVideoFormat { ~SdpVideoFormat(); + // Returns true if the SdpVideoFormats have the same names as well as codec + // specific parameters. Please note that two SdpVideoFormats can represent the + // same codec even though not all parameters are the same. + bool IsSameCodec(const SdpVideoFormat& other) const; + bool IsCodecInList( + rtc::ArrayView formats) const; + std::string ToString() const; friend RTC_EXPORT bool operator==(const SdpVideoFormat& a, diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.cc index b18132391..04673e6c3 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.cc @@ -10,6 +10,8 @@ #include "api/video_codecs/video_decoder.h" +#include "rtc_base/strings/string_builder.h" + namespace webrtc { int32_t DecodedImageCallback::Decoded(VideoFrame& decodedImage, @@ -24,12 +26,31 @@ void DecodedImageCallback::Decoded(VideoFrame& decodedImage, Decoded(decodedImage, decode_time_ms.value_or(-1)); } -bool VideoDecoder::PrefersLateDecoding() const { - return true; +VideoDecoder::DecoderInfo VideoDecoder::GetDecoderInfo() const { + DecoderInfo info; + info.implementation_name = ImplementationName(); + return info; } const char* VideoDecoder::ImplementationName() const { return "unknown"; } +std::string VideoDecoder::DecoderInfo::ToString() const { + char string_buf[2048]; + rtc::SimpleStringBuilder oss(string_buf); + + oss << "DecoderInfo { " + << "prefers_late_decoding = " + << "implementation_name = '" << implementation_name << "', " + << "is_hardware_accelerated = " + << (is_hardware_accelerated ? "true" : "false") << " }"; + return oss.str(); +} + +bool VideoDecoder::DecoderInfo::operator==(const DecoderInfo& rhs) const { + return is_hardware_accelerated == rhs.is_hardware_accelerated && + implementation_name == rhs.implementation_name; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.h index 266d65369..04052de08 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder.h @@ -42,6 +42,18 @@ class RTC_EXPORT DecodedImageCallback { class RTC_EXPORT VideoDecoder { public: + struct DecoderInfo { + // Descriptive name of the decoder implementation. + std::string implementation_name; + + // True if the decoder is backed by hardware acceleration. + bool is_hardware_accelerated = false; + + std::string ToString() const; + bool operator==(const DecoderInfo& rhs) const; + bool operator!=(const DecoderInfo& rhs) const { return !(*this == rhs); } + }; + virtual ~VideoDecoder() {} virtual int32_t InitDecode(const VideoCodec* codec_settings, @@ -56,11 +68,9 @@ class RTC_EXPORT VideoDecoder { virtual int32_t Release() = 0; - // Returns true if the decoder prefer to decode frames late. - // That is, it can not decode infinite number of frames before the decoded - // frame is consumed. - virtual bool PrefersLateDecoding() const; + virtual DecoderInfo GetDecoderInfo() const; + // Deprecated, use GetDecoderInfo().implementation_name instead. virtual const char* ImplementationName() const; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.h index e4d83c246..0b6ea4f9f 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.h @@ -15,31 +15,51 @@ #include #include +#include "absl/types/optional.h" +#include "api/video_codecs/sdp_video_format.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { class VideoDecoder; -struct SdpVideoFormat; // A factory that creates VideoDecoders. // NOTE: This class is still under development and may change without notice. class RTC_EXPORT VideoDecoderFactory { public: + struct CodecSupport { + bool is_supported = false; + bool is_power_efficient = false; + }; + // Returns a list of supported video formats in order of preference, to use // for signaling etc. virtual std::vector GetSupportedFormats() const = 0; + // Query whether the specifed format is supported or not and if it will be + // power efficient, which is currently interpreted as if there is support for + // hardware acceleration. + // See https://w3c.github.io/webrtc-svc/#scalabilitymodes* for a specification + // of valid values for |scalability_mode|. + // NOTE: QueryCodecSupport is currently an experimental feature that is + // subject to change without notice. + virtual CodecSupport QueryCodecSupport( + const SdpVideoFormat& format, + absl::optional scalability_mode) const { + // Default implementation, query for supported formats and check if the + // specified format is supported. Returns false if scalability_mode is + // specified. + CodecSupport codec_support; + if (!scalability_mode) { + codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); + } + return codec_support; + } + // Creates a VideoDecoder for the specified format. virtual std::unique_ptr CreateVideoDecoder( const SdpVideoFormat& format) = 0; - // Note: Do not call or override this method! This method is a legacy - // workaround and is scheduled for removal without notice. - virtual std::unique_ptr LegacyCreateVideoDecoder( - const SdpVideoFormat& format, - const std::string& receive_stream_id); - virtual ~VideoDecoderFactory() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc index 20b312cc0..bf1bbb26b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -50,8 +50,8 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { DecodedImageCallback* callback) override; int32_t Release() override; - bool PrefersLateDecoding() const override; + DecoderInfo GetDecoderInfo() const override; const char* ImplementationName() const override; private: @@ -268,14 +268,23 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Release() { return status; } -bool VideoDecoderSoftwareFallbackWrapper::PrefersLateDecoding() const { - return active_decoder().PrefersLateDecoding(); +VideoDecoder::DecoderInfo VideoDecoderSoftwareFallbackWrapper::GetDecoderInfo() + const { + DecoderInfo info = active_decoder().GetDecoderInfo(); + if (decoder_type_ == DecoderType::kFallback) { + // Cached "A (fallback from B)" string. + info.implementation_name = fallback_implementation_name_; + } + return info; } const char* VideoDecoderSoftwareFallbackWrapper::ImplementationName() const { - return decoder_type_ == DecoderType::kFallback - ? fallback_implementation_name_.c_str() - : hw_decoder_->ImplementationName(); + if (decoder_type_ == DecoderType::kFallback) { + // Cached "A (fallback from B)" string. + return fallback_implementation_name_.c_str(); + } else { + return hw_decoder_->ImplementationName(); + } } VideoDecoder& VideoDecoderSoftwareFallbackWrapper::active_decoder() const { diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h index 3c9c2376a..61110bc24 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder.h @@ -367,7 +367,7 @@ class RTC_EXPORT VideoEncoder { // TODO(bugs.webrtc.org/10720): After updating downstream projects and posting // an announcement to discuss-webrtc, remove the three-parameters variant // and make the two-parameters variant pure-virtual. - /* RTC_DEPRECATED */ virtual int32_t InitEncode( + /* ABSL_DEPRECATED("bugs.webrtc.org/10720") */ virtual int32_t InitEncode( const VideoCodec* codec_settings, int32_t number_of_cores, size_t max_payload_size); diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc index 45d579503..a04f20bde 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.cc @@ -57,7 +57,8 @@ VideoEncoderConfig::VideoEncoderConfig() max_bitrate_bps(0), bitrate_priority(1.0), number_of_streams(0), - legacy_conference_mode(false) {} + legacy_conference_mode(false), + is_quality_scaling_allowed(false) {} VideoEncoderConfig::VideoEncoderConfig(VideoEncoderConfig&&) = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h index 81de62daf..2e6d1c388 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_config.h @@ -194,6 +194,9 @@ class VideoEncoderConfig { // Legacy Google conference mode flag for simulcast screenshare bool legacy_conference_mode; + // Indicates whether quality scaling can be used or not. + bool is_quality_scaling_allowed; + private: // Access to the copy constructor is private to force use of the Copy() // method for those exceptional cases where we do use it. diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h index 22430eb19..c2d66cfa8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_factory.h @@ -12,6 +12,7 @@ #define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_H_ #include +#include #include #include "absl/types/optional.h" @@ -36,6 +37,11 @@ class VideoEncoderFactory { bool has_internal_source = false; }; + struct CodecSupport { + bool is_supported = false; + bool is_power_efficient = false; + }; + // An injectable class that is continuously updated with encoding conditions // and selects the best encoder given those conditions. class EncoderSelectorInterface { @@ -78,6 +84,26 @@ class VideoEncoderFactory { return CodecInfo(); } + // Query whether the specifed format is supported or not and if it will be + // power efficient, which is currently interpreted as if there is support for + // hardware acceleration. + // See https://w3c.github.io/webrtc-svc/#scalabilitymodes* for a specification + // of valid values for |scalability_mode|. + // NOTE: QueryCodecSupport is currently an experimental feature that is + // subject to change without notice. + virtual CodecSupport QueryCodecSupport( + const SdpVideoFormat& format, + absl::optional scalability_mode) const { + // Default implementation, query for supported formats and check if the + // specified format is supported. Returns false if scalability_mode is + // specified. + CodecSupport codec_support; + if (!scalability_mode) { + codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); + } + return codec_support; + } + // Creates a VideoEncoder for the specified format. virtual std::unique_ptr CreateVideoEncoder( const SdpVideoFormat& format) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc index 94a18171a..be79c4246 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc @@ -50,7 +50,6 @@ struct ForcedFallbackParams { return enable_resolution_based_switch && codec.codecType == kVideoCodecVP8 && codec.numberOfSimulcastStreams <= 1 && - codec.VP8().numberOfTemporalLayers == 1 && codec.width * codec.height <= max_pixels; } diff --git a/TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc similarity index 91% rename from TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.cc rename to TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc index abf2502fc..d69f566e1 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "media/base/vp9_profile.h" +#include "api/video_codecs/vp9_profile.h" #include #include @@ -59,7 +59,7 @@ absl::optional ParseSdpForVP9Profile( return StringToVP9Profile(profile_str); } -bool IsSameVP9Profile(const SdpVideoFormat::Parameters& params1, +bool VP9IsSameProfile(const SdpVideoFormat::Parameters& params1, const SdpVideoFormat::Parameters& params2) { const absl::optional profile = ParseSdpForVP9Profile(params1); const absl::optional other_profile = diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h new file mode 100644 index 000000000..e632df437 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/vp9_profile.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VP9_PROFILE_H_ +#define API_VIDEO_CODECS_VP9_PROFILE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/video_codecs/sdp_video_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Profile information for VP9 video. +extern RTC_EXPORT const char kVP9FmtpProfileId[]; + +enum class VP9Profile { + kProfile0, + kProfile1, + kProfile2, +}; + +// Helper functions to convert VP9Profile to std::string. Returns "0" by +// default. +RTC_EXPORT std::string VP9ProfileToString(VP9Profile profile); + +// Helper functions to convert std::string to VP9Profile. Returns null if given +// an invalid profile string. +absl::optional StringToVP9Profile(const std::string& str); + +// Parse profile that is represented as a string of single digit contained in an +// SDP key-value map. A default profile(kProfile0) will be returned if the +// profile key is missing. Nothing will be returned if the key is present but +// the string is invalid. +RTC_EXPORT absl::optional ParseSdpForVP9Profile( + const SdpVideoFormat::Parameters& params); + +// Returns true if the parameters have the same VP9 profile, or neither contains +// VP9 profile. +bool VP9IsSameProfile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2); + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_VP9_PROFILE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h b/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h index 692ff6493..0b60d20de 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_track_source_proxy.h @@ -21,27 +21,27 @@ namespace webrtc { // TODO(deadbeef): Move this to .cc file and out of api/. What threads methods // are called on is an implementation detail. BEGIN_PROXY_MAP(VideoTrackSource) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_CONSTMETHOD0(SourceState, state) BYPASS_PROXY_CONSTMETHOD0(bool, remote) BYPASS_PROXY_CONSTMETHOD0(bool, is_screencast) PROXY_CONSTMETHOD0(absl::optional, needs_denoising) PROXY_METHOD1(bool, GetStats, Stats*) -PROXY_WORKER_METHOD2(void, - AddOrUpdateSink, - rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&) -PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) +PROXY_SECONDARY_METHOD2(void, + AddOrUpdateSink, + rtc::VideoSinkInterface*, + const rtc::VideoSinkWants&) +PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_CONSTMETHOD0(bool, SupportsEncodedOutput) -PROXY_WORKER_METHOD0(void, GenerateKeyFrame) -PROXY_WORKER_METHOD1(void, - AddEncodedSink, - rtc::VideoSinkInterface*) -PROXY_WORKER_METHOD1(void, - RemoveEncodedSink, - rtc::VideoSinkInterface*) +PROXY_SECONDARY_METHOD0(void, GenerateKeyFrame) +PROXY_SECONDARY_METHOD1(void, + AddEncodedSink, + rtc::VideoSinkInterface*) +PROXY_SECONDARY_METHOD1(void, + RemoveEncodedSink, + rtc::VideoSinkInterface*) END_PROXY_MAP() } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_base.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_base.h index ef83b51ed..d469ea4bd 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_base.h @@ -11,6 +11,7 @@ #ifndef API_VOIP_VOIP_BASE_H_ #define API_VOIP_VOIP_BASE_H_ +#include "absl/base/attributes.h" #include "absl/types/optional.h" namespace webrtc { @@ -35,6 +36,21 @@ class Transport; enum class ChannelId : int {}; +enum class ABSL_MUST_USE_RESULT VoipResult { + // kOk indicates the function was successfully invoked with no error. + kOk, + // kInvalidArgument indicates the caller specified an invalid argument, such + // as an invalid ChannelId. + kInvalidArgument, + // kFailedPrecondition indicates that the operation was failed due to not + // satisfying prerequisite such as not setting codec type before sending. + kFailedPrecondition, + // kInternal is used to indicate various internal failures that are not the + // caller's fault. Further detail is commented on each function that uses this + // return value. + kInternal, +}; + class VoipBase { public: // Creates a channel. @@ -46,40 +62,48 @@ class VoipBase { // and injection for incoming RTP from remote endpoint is handled via // VoipNetwork interface. |local_ssrc| is optional and when local_ssrc is not // set, some random value will be used by voip engine. - // Returns value is optional as to indicate the failure to create channel. - virtual absl::optional CreateChannel( - Transport* transport, - absl::optional local_ssrc) = 0; + // Returns a ChannelId created for caller to handle subsequent Channel + // operations. + virtual ChannelId CreateChannel(Transport* transport, + absl::optional local_ssrc) = 0; // Releases |channel_id| that no longer has any use. - virtual void ReleaseChannel(ChannelId channel_id) = 0; + // Returns following VoipResult; + // kOk - |channel_id| is released. + // kInvalidArgument - |channel_id| is invalid. + // kInternal - Fails to stop audio output device. + virtual VoipResult ReleaseChannel(ChannelId channel_id) = 0; - // Starts sending on |channel_id|. This will start microphone if not started - // yet. Returns false if initialization has failed on selected microphone - // device. API is subject to expand to reflect error condition to application - // later. - virtual bool StartSend(ChannelId channel_id) = 0; + // Starts sending on |channel_id|. This starts microphone if not started yet. + // Returns following VoipResult; + // kOk - Channel successfully started to send. + // kInvalidArgument - |channel_id| is invalid. + // kFailedPrecondition - Missing prerequisite on VoipCodec::SetSendCodec. + // kInternal - initialization has failed on selected microphone. + virtual VoipResult StartSend(ChannelId channel_id) = 0; // Stops sending on |channel_id|. If this is the last active channel, it will // stop microphone input from underlying audio platform layer. - // Returns false if termination logic has failed on selected microphone - // device. API is subject to expand to reflect error condition to application - // later. - virtual bool StopSend(ChannelId channel_id) = 0; + // Returns following VoipResult; + // kOk - Channel successfully stopped to send. + // kInvalidArgument - |channel_id| is invalid. + // kInternal - Failed to stop the active microphone device. + virtual VoipResult StopSend(ChannelId channel_id) = 0; // Starts playing on speaker device for |channel_id|. // This will start underlying platform speaker device if not started. - // Returns false if initialization has failed - // on selected speaker device. API is subject to expand to reflect error - // condition to application later. - virtual bool StartPlayout(ChannelId channel_id) = 0; + // Returns following VoipResult; + // kOk - Channel successfully started to play out. + // kInvalidArgument - |channel_id| is invalid. + // kFailedPrecondition - Missing prerequisite on VoipCodec::SetReceiveCodecs. + // kInternal - Failed to initializate the selected speaker device. + virtual VoipResult StartPlayout(ChannelId channel_id) = 0; // Stops playing on speaker device for |channel_id|. - // If this is the last active channel playing, then it will stop speaker - // from the platform layer. - // Returns false if termination logic has failed on selected speaker device. - // API is subject to expand to reflect error condition to application later. - virtual bool StopPlayout(ChannelId channel_id) = 0; + // Returns following VoipResult; + // kOk - Channel successfully stopped t play out. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult StopPlayout(ChannelId channel_id) = 0; protected: virtual ~VoipBase() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_codec.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_codec.h index eb42c449d..fec3827db 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_codec.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_codec.h @@ -29,15 +29,21 @@ namespace webrtc { class VoipCodec { public: // Set encoder type here along with its payload type to use. - virtual void SetSendCodec(ChannelId channel_id, - int payload_type, - const SdpAudioFormat& encoder_spec) = 0; + // Returns following VoipResult; + // kOk - sending codec is set as provided. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult SetSendCodec(ChannelId channel_id, + int payload_type, + const SdpAudioFormat& encoder_spec) = 0; // Set decoder payload type here. In typical offer and answer model, // this should be called after payload type has been agreed in media // session. Note that payload type can differ with same codec in each // direction. - virtual void SetReceiveCodecs( + // Returns following VoipResult; + // kOk - receiving codecs are set as provided. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult SetReceiveCodecs( ChannelId channel_id, const std::map& decoder_specs) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h index 56817bae5..a7367bed5 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_dtmf.h @@ -43,9 +43,12 @@ class VoipDtmf { // Register the payload type and sample rate for DTMF (RFC 4733) payload. // Must be called exactly once prior to calling SendDtmfEvent after payload // type has been negotiated with remote. - virtual void RegisterTelephoneEventType(ChannelId channel_id, - int rtp_payload_type, - int sample_rate_hz) = 0; + // Returns following VoipResult; + // kOk - telephone event type is registered as provided. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) = 0; // Send DTMF named event as specified by // https://tools.ietf.org/html/rfc4733#section-3.2 @@ -53,10 +56,14 @@ class VoipDtmf { // in place of real RTP packets instead. // Must be called after RegisterTelephoneEventType and VoipBase::StartSend // have been called. - // Returns true if the requested DTMF event is successfully scheduled. - virtual bool SendDtmfEvent(ChannelId channel_id, - DtmfEvent dtmf_event, - int duration_ms) = 0; + // Returns following VoipResult; + // kOk - requested DTMF event is successfully scheduled. + // kInvalidArgument - |channel_id| is invalid. + // kFailedPrecondition - Missing prerequisite on RegisterTelephoneEventType + // or sending state. + virtual VoipResult SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) = 0; protected: virtual ~VoipDtmf() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h index 69c0a8504..d223f6ad6 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_engine.h @@ -23,7 +23,7 @@ class VoipVolumeControl; // VoipEngine is the main interface serving as the entry point for all VoIP // APIs. A single instance of VoipEngine should suffice the most of the need for // typical VoIP applications as it handles multiple media sessions including a -// specialized session type like ad-hoc mesh conferencing. Below example code +// specialized session type like ad-hoc conference. Below example code // describes the typical sequence of API usage. Each API header contains more // description on what the methods are used for. // @@ -38,36 +38,35 @@ class VoipVolumeControl; // config.audio_processing = AudioProcessingBuilder().Create(); // // auto voip_engine = CreateVoipEngine(std::move(config)); -// if (!voip_engine) return some_failure; // // auto& voip_base = voip_engine->Base(); // auto& voip_codec = voip_engine->Codec(); // auto& voip_network = voip_engine->Network(); // -// absl::optional channel = -// voip_base.CreateChannel(&app_transport_); -// if (!channel) return some_failure; +// ChannelId channel = voip_base.CreateChannel(&app_transport_); // // // After SDP offer/answer, set payload type and codecs that have been // // decided through SDP negotiation. -// voip_codec.SetSendCodec(*channel, ...); -// voip_codec.SetReceiveCodecs(*channel, ...); +// // VoipResult handling omitted here. +// voip_codec.SetSendCodec(channel, ...); +// voip_codec.SetReceiveCodecs(channel, ...); // // // Start sending and playing RTP on voip channel. -// voip_base.StartSend(*channel); -// voip_base.StartPlayout(*channel); +// // VoipResult handling omitted here. +// voip_base.StartSend(channel); +// voip_base.StartPlayout(channel); // // // Inject received RTP/RTCP through VoipNetwork interface. -// voip_network.ReceivedRTPPacket(*channel, ...); -// voip_network.ReceivedRTCPPacket(*channel, ...); +// // VoipResult handling omitted here. +// voip_network.ReceivedRTPPacket(channel, ...); +// voip_network.ReceivedRTCPPacket(channel, ...); // // // Stop and release voip channel. -// voip_base.StopSend(*channel); -// voip_base.StopPlayout(*channel); -// voip_base.ReleaseChannel(*channel); +// // VoipResult handling omitted here. +// voip_base.StopSend(channel); +// voip_base.StopPlayout(channel); +// voip_base.ReleaseChannel(channel); // -// Current VoipEngine defines three sub-API classes and is subject to expand in -// near future. class VoipEngine { public: virtual ~VoipEngine() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_network.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_network.h index c49c7695b..c820ca04a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_network.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_network.h @@ -18,20 +18,22 @@ namespace webrtc { // VoipNetwork interface provides any network related interfaces such as // processing received RTP/RTCP packet from remote endpoint. This interface -// requires a ChannelId created via VoipBase interface. Note that using invalid -// (previously released) ChannelId will silently fail these API calls as it -// would have released underlying audio components. It's anticipated that caller -// may be using different thread for network I/O where released channel id is -// still used to input incoming RTP packets in which case we should silently -// ignore. The interface is subjected to expand as needed in near future. +// requires a ChannelId created via VoipBase interface. class VoipNetwork { public: // The data received from the network including RTP header is passed here. - virtual void ReceivedRTPPacket(ChannelId channel_id, - rtc::ArrayView rtp_packet) = 0; + // Returns following VoipResult; + // kOk - received RTP packet is processed. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult ReceivedRTPPacket( + ChannelId channel_id, + rtc::ArrayView rtp_packet) = 0; // The data received from the network including RTCP header is passed here. - virtual void ReceivedRTCPPacket( + // Returns following VoipResult; + // kOk - received RTCP packet is processed. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult ReceivedRTCPPacket( ChannelId channel_id, rtc::ArrayView rtcp_packet) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h index cf01e95e9..1b9b1646b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_statistics.h @@ -26,14 +26,68 @@ struct IngressStatistics { double total_duration = 0.0; }; +// Remote statistics obtained via remote RTCP SR/RR report received. +struct RemoteRtcpStatistics { + // Jitter as defined in RFC 3550 [6.4.1] expressed in seconds. + double jitter = 0.0; + + // Cumulative packets lost as defined in RFC 3550 [6.4.1] + int64_t packets_lost = 0; + + // Fraction lost as defined in RFC 3550 [6.4.1] expressed as a floating + // pointer number. + double fraction_lost = 0.0; + + // https://w3c.github.io/webrtc-stats/#dom-rtcremoteinboundrtpstreamstats-roundtriptime + absl::optional round_trip_time; + + // Last time (not RTP timestamp) when RTCP report received in milliseconds. + int64_t last_report_received_timestamp_ms; +}; + +struct ChannelStatistics { + // https://w3c.github.io/webrtc-stats/#dom-rtcsentrtpstreamstats-packetssent + uint64_t packets_sent = 0; + + // https://w3c.github.io/webrtc-stats/#dom-rtcsentrtpstreamstats-bytessent + uint64_t bytes_sent = 0; + + // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats-packetsreceived + uint64_t packets_received = 0; + + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-bytesreceived + uint64_t bytes_received = 0; + + // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats-jitter + double jitter = 0.0; + + // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats-packetslost + int64_t packets_lost = 0; + + // SSRC from remote media endpoint as indicated either by RTP header in RFC + // 3550 [5.1] or RTCP SSRC of sender in RFC 3550 [6.4.1]. + absl::optional remote_ssrc; + + absl::optional remote_rtcp; +}; + // VoipStatistics interface provides the interfaces for querying metrics around // the jitter buffer (NetEq) performance. class VoipStatistics { public: - // Gets the audio ingress statistics. Returns absl::nullopt when channel_id is - // invalid. - virtual absl::optional GetIngressStatistics( - ChannelId channel_id) = 0; + // Gets the audio ingress statistics by |ingress_stats| reference. + // Returns following VoipResult; + // kOk - successfully set provided IngressStatistics reference. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult GetIngressStatistics(ChannelId channel_id, + IngressStatistics& ingress_stats) = 0; + + // Gets the channel statistics by |channel_stats| reference. + // Returns following VoipResult; + // kOk - successfully set provided ChannelStatistics reference. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult GetChannelStatistics(ChannelId channel_id, + ChannelStatistics& channel_stats) = 0; protected: virtual ~VoipStatistics() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h b/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h index 54e446715..d91eabc5a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h +++ b/TMessagesProj/jni/voip/webrtc/api/voip/voip_volume_control.h @@ -36,17 +36,24 @@ class VoipVolumeControl { // Mute/unmutes the microphone input sample before encoding process. Note that // mute doesn't affect audio input level and energy values as input sample is // silenced after the measurement. - virtual void SetInputMuted(ChannelId channel_id, bool enable) = 0; + // Returns following VoipResult; + // kOk - input source muted or unmuted as provided by |enable|. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult SetInputMuted(ChannelId channel_id, bool enable) = 0; - // Gets the microphone volume info. - // Returns absl::nullopt if |channel_id| is invalid. - virtual absl::optional GetInputVolumeInfo( - ChannelId channel_id) = 0; + // Gets the microphone volume info via |volume_info| reference. + // Returns following VoipResult; + // kOk - successfully set provided input volume info. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult GetInputVolumeInfo(ChannelId channel_id, + VolumeInfo& volume_info) = 0; - // Gets the speaker volume info. - // Returns absl::nullopt if |channel_id| is invalid. - virtual absl::optional GetOutputVolumeInfo( - ChannelId channel_id) = 0; + // Gets the speaker volume info via |volume_info| reference. + // Returns following VoipResult; + // kOk - successfully set provided output volume info. + // kInvalidArgument - |channel_id| is invalid. + virtual VoipResult GetOutputVolumeInfo(ChannelId channel_id, + VolumeInfo& volume_info) = 0; protected: virtual ~VoipVolumeControl() = default; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc index 54c8a0297..467647be5 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.cc @@ -24,6 +24,7 @@ #include "audio/conversion.h" #include "call/rtp_config.h" #include "call/rtp_stream_receiver_controller_interface.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -118,21 +119,24 @@ AudioReceiveStream::AudioReceiveStream( webrtc::RtcEventLog* event_log, std::unique_ptr channel_receive) : audio_state_(audio_state), - channel_receive_(std::move(channel_receive)), - source_tracker_(clock) { + source_tracker_(clock), + channel_receive_(std::move(channel_receive)) { RTC_LOG(LS_INFO) << "AudioReceiveStream: " << config.rtp.remote_ssrc; RTC_DCHECK(config.decoder_factory); RTC_DCHECK(config.rtcp_send_transport); RTC_DCHECK(audio_state_); RTC_DCHECK(channel_receive_); - module_process_thread_checker_.Detach(); - RTC_DCHECK(receiver_controller); RTC_DCHECK(packet_router); // Configure bandwidth estimation. channel_receive_->RegisterReceiverCongestionControlObjects(packet_router); + // When output is muted, ChannelReceive will directly notify the source + // tracker of "delivered" frames, so RtpReceiver information will continue to + // be updated. + channel_receive_->SetSourceTracker(&source_tracker_); + // Register with transport. rtp_stream_receiver_ = receiver_controller->CreateReceiver( config.rtp.remote_ssrc, channel_receive_.get()); @@ -173,6 +177,11 @@ void AudioReceiveStream::Stop() { audio_state()->RemoveReceivingStream(this); } +bool AudioReceiveStream::IsRunning() const { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return playing_; +} + webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); @@ -253,6 +262,14 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats( stats.decoding_plc_cng = ds.decoded_plc_cng; stats.decoding_muted_output = ds.decoded_muted_output; + stats.last_sender_report_timestamp_ms = + call_stats.last_sender_report_timestamp_ms; + stats.last_sender_report_remote_timestamp_ms = + call_stats.last_sender_report_remote_timestamp_ms; + stats.sender_reports_packets_sent = call_stats.sender_reports_packets_sent; + stats.sender_reports_bytes_sent = call_stats.sender_reports_bytes_sent; + stats.sender_reports_reports_count = call_stats.sender_reports_reports_count; + return stats; } @@ -306,14 +323,10 @@ uint32_t AudioReceiveStream::id() const { } absl::optional AudioReceiveStream::GetInfo() const { - RTC_DCHECK_RUN_ON(&module_process_thread_checker_); - absl::optional info = channel_receive_->GetSyncInfo(); - - if (!info) - return absl::nullopt; - - info->current_delay_ms = channel_receive_->GetDelayEstimate(); - return info; + // TODO(bugs.webrtc.org/11993): This is called via RtpStreamsSynchronizer, + // expect to be called on the network thread. + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return channel_receive_->GetSyncInfo(); } bool AudioReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, @@ -331,11 +344,14 @@ void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs( } bool AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { - RTC_DCHECK_RUN_ON(&module_process_thread_checker_); + // TODO(bugs.webrtc.org/11993): This is called via RtpStreamsSynchronizer, + // expect to be called on the network thread. + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return channel_receive_->SetMinimumPlayoutDelay(delay_ms); } void AudioReceiveStream::AssociateSendStream(AudioSendStream* send_stream) { + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_receive_->SetAssociatedSendChannel( send_stream ? send_stream->GetChannel() : nullptr); @@ -357,6 +373,8 @@ const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const { const AudioSendStream* AudioReceiveStream::GetAssociatedSendStreamForTesting() const { + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread or + // remove test method and |associated_send_stream_| variable. RTC_DCHECK_RUN_ON(&worker_thread_checker_); return associated_send_stream_; } diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h index 32f8b60d5..a8438c252 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h @@ -17,11 +17,11 @@ #include "api/audio/audio_mixer.h" #include "api/neteq/neteq_factory.h" #include "api/rtp_headers.h" +#include "api/sequence_checker.h" #include "audio/audio_state.h" #include "call/audio_receive_stream.h" #include "call/syncable.h" #include "modules/rtp_rtcp/source/source_tracker.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -71,6 +71,8 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, void Reconfigure(const webrtc::AudioReceiveStream::Config& config) override; void Start() override; void Stop() override; + bool IsRunning() const override; + webrtc::AudioReceiveStream::Stats GetStats( bool get_and_clear_legacy_stats) const override; void SetSink(AudioSinkInterface* sink) override; @@ -106,12 +108,11 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream, AudioState* audio_state() const; - rtc::ThreadChecker worker_thread_checker_; - rtc::ThreadChecker module_process_thread_checker_; + SequenceChecker worker_thread_checker_; webrtc::AudioReceiveStream::Config config_; rtc::scoped_refptr audio_state_; - const std::unique_ptr channel_receive_; SourceTracker source_tracker_; + const std::unique_ptr channel_receive_; AudioSendStream* associated_send_stream_ = nullptr; bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc index 1c0a32f86..b769569fd 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.cc @@ -143,7 +143,6 @@ AudioSendStream::AudioSendStream( std::unique_ptr channel_send) : clock_(clock), worker_queue_(rtp_transport->GetWorkerQueue()), - audio_send_side_bwe_(field_trial::IsEnabled("WebRTC-Audio-SendSideBwe")), allocate_audio_without_feedback_( field_trial::IsEnabled("WebRTC-Audio-ABWENoTWCC")), enable_audio_alr_probing_( @@ -169,13 +168,14 @@ AudioSendStream::AudioSendStream( RTC_DCHECK(rtp_rtcp_module_); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); ConfigureStream(config, true); - + UpdateCachedTargetAudioBitrateConstraints(); pacer_thread_checker_.Detach(); } AudioSendStream::~AudioSendStream() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "~AudioSendStream: " << config_.rtp.ssrc; RTC_DCHECK(!sending_); channel_send_->ResetSenderCongestionControlObjects(); @@ -187,13 +187,13 @@ AudioSendStream::~AudioSendStream() { } const webrtc::AudioSendStream::Config& AudioSendStream::GetConfig() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return config_; } void AudioSendStream::Reconfigure( const webrtc::AudioSendStream::Config& new_config) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); ConfigureStream(new_config, false); } @@ -289,7 +289,7 @@ void AudioSendStream::ConfigureStream( RtcpBandwidthObserver* bandwidth_observer = nullptr; - if (audio_send_side_bwe_ && !allocate_audio_without_feedback_ && + if (!allocate_audio_without_feedback_ && new_ids.transport_sequence_number != 0) { rtp_rtcp_module_->RegisterRtpHeaderExtension( TransportSequenceNumber::kUri, new_ids.transport_sequence_number); @@ -352,20 +352,22 @@ void AudioSendStream::ConfigureStream( } channel_send_->CallEncoder([this](AudioEncoder* encoder) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (!encoder) { return; } - worker_queue_->PostTask( - [this, length_range = encoder->GetFrameLengthRange()] { - RTC_DCHECK_RUN_ON(worker_queue_); - frame_length_range_ = length_range; - }); + frame_length_range_ = encoder->GetFrameLengthRange(); + UpdateCachedTargetAudioBitrateConstraints(); }); if (sending_) { ReconfigureBitrateObserver(new_config); } + config_ = new_config; + if (!first_time) { + UpdateCachedTargetAudioBitrateConstraints(); + } } void AudioSendStream::Start() { @@ -380,13 +382,7 @@ void AudioSendStream::Start() { if (send_side_bwe_with_overhead_) rtp_transport_->IncludeOverheadInPacedSender(); rtp_rtcp_module_->SetAsPartOfAllocation(true); - rtc::Event thread_sync_event; - worker_queue_->PostTask([&] { - RTC_DCHECK_RUN_ON(worker_queue_); - ConfigureBitrateObserver(); - thread_sync_event.Set(); - }); - thread_sync_event.Wait(rtc::Event::kForever); + ConfigureBitrateObserver(); } else { rtp_rtcp_module_->SetAsPartOfAllocation(false); } @@ -397,7 +393,7 @@ void AudioSendStream::Start() { } void AudioSendStream::Stop() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (!sending_) { return; } @@ -432,14 +428,14 @@ bool AudioSendStream::SendTelephoneEvent(int payload_type, int payload_frequency, int event, int duration_ms) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_send_->SetSendTelephoneEventPayloadType(payload_type, payload_frequency); return channel_send_->SendTelephoneEventOutband(event, duration_ms); } void AudioSendStream::SetMuted(bool muted) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_send_->SetInputMute(muted); } @@ -449,7 +445,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const { webrtc::AudioSendStream::Stats AudioSendStream::GetStats( bool has_remote_tracks) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); webrtc::AudioSendStream::Stats stats; stats.local_ssrc = config_.rtp.ssrc; stats.target_bitrate_bps = channel_send_->GetBitrate(); @@ -510,12 +506,14 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( void AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_send_->ReceivedRTCPPacket(packet, length); - worker_queue_->PostTask([&]() { + + { // Poll if overhead has changed, which it can do if ack triggers us to stop // sending mid/rid. MutexLock lock(&overhead_per_packet_lock_); UpdateOverheadForEncoder(); - }); + } + UpdateCachedTargetAudioBitrateConstraints(); } uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { @@ -524,9 +522,11 @@ uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { // Pick a target bitrate between the constraints. Overrules the allocator if // it 1) allocated a bitrate of zero to disable the stream or 2) allocated a // higher than max to allow for e.g. extra FEC. - auto constraints = GetMinMaxBitrateConstraints(); - update.target_bitrate.Clamp(constraints.min, constraints.max); - update.stable_target_bitrate.Clamp(constraints.min, constraints.max); + RTC_DCHECK(cached_constraints_.has_value()); + update.target_bitrate.Clamp(cached_constraints_->min, + cached_constraints_->max); + update.stable_target_bitrate.Clamp(cached_constraints_->min, + cached_constraints_->max); channel_send_->OnBitrateAllocation(update); @@ -537,13 +537,17 @@ uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { void AudioSendStream::SetTransportOverhead( int transport_overhead_per_packet_bytes) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); - MutexLock lock(&overhead_per_packet_lock_); - transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes; - UpdateOverheadForEncoder(); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + { + MutexLock lock(&overhead_per_packet_lock_); + transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes; + UpdateOverheadForEncoder(); + } + UpdateCachedTargetAudioBitrateConstraints(); } void AudioSendStream::UpdateOverheadForEncoder() { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes(); if (overhead_per_packet_ == overhead_per_packet_bytes) { return; @@ -553,19 +557,11 @@ void AudioSendStream::UpdateOverheadForEncoder() { channel_send_->CallEncoder([&](AudioEncoder* encoder) { encoder->OnReceivedOverhead(overhead_per_packet_bytes); }); - auto update_task = [this, overhead_per_packet_bytes] { - RTC_DCHECK_RUN_ON(worker_queue_); - if (total_packet_overhead_bytes_ != overhead_per_packet_bytes) { - total_packet_overhead_bytes_ = overhead_per_packet_bytes; - if (registered_with_allocator_) { - ConfigureBitrateObserver(); - } + if (total_packet_overhead_bytes_ != overhead_per_packet_bytes) { + total_packet_overhead_bytes_ = overhead_per_packet_bytes; + if (registered_with_allocator_) { + ConfigureBitrateObserver(); } - }; - if (worker_queue_->IsCurrent()) { - update_task(); - } else { - worker_queue_->PostTask(update_task); } } @@ -603,7 +599,6 @@ const internal::AudioState* AudioSendStream::audio_state() const { void AudioSendStream::StoreEncoderProperties(int sample_rate_hz, size_t num_channels) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); encoder_sample_rate_hz_ = sample_rate_hz; encoder_num_channels_ = num_channels; if (sending_) { @@ -801,7 +796,6 @@ void AudioSendStream::ReconfigureCNG(const Config& new_config) { void AudioSendStream::ReconfigureBitrateObserver( const webrtc::AudioSendStream::Config& new_config) { - RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Since the Config's default is for both of these to be -1, this test will // allow us to configure the bitrate observer if the new config has bitrate // limits set, but would only have us call RemoveBitrateObserver if we were @@ -809,8 +803,7 @@ void AudioSendStream::ReconfigureBitrateObserver( if (config_.min_bitrate_bps == new_config.min_bitrate_bps && config_.max_bitrate_bps == new_config.max_bitrate_bps && config_.bitrate_priority == new_config.bitrate_priority && - (TransportSeqNumId(config_) == TransportSeqNumId(new_config) || - !audio_send_side_bwe_) && + TransportSeqNumId(config_) == TransportSeqNumId(new_config) && config_.audio_network_adaptor_config == new_config.audio_network_adaptor_config) { return; @@ -821,20 +814,13 @@ void AudioSendStream::ReconfigureBitrateObserver( rtp_transport_->AccountForAudioPacketsInPacedSender(true); if (send_side_bwe_with_overhead_) rtp_transport_->IncludeOverheadInPacedSender(); - rtc::Event thread_sync_event; - worker_queue_->PostTask([&] { - RTC_DCHECK_RUN_ON(worker_queue_); - // We may get a callback immediately as the observer is registered, so - // make - // sure the bitrate limits in config_ are up-to-date. - config_.min_bitrate_bps = new_config.min_bitrate_bps; - config_.max_bitrate_bps = new_config.max_bitrate_bps; + // We may get a callback immediately as the observer is registered, so + // make sure the bitrate limits in config_ are up-to-date. + config_.min_bitrate_bps = new_config.min_bitrate_bps; + config_.max_bitrate_bps = new_config.max_bitrate_bps; - config_.bitrate_priority = new_config.bitrate_priority; - ConfigureBitrateObserver(); - thread_sync_event.Set(); - }); - thread_sync_event.Wait(rtc::Event::kForever); + config_.bitrate_priority = new_config.bitrate_priority; + ConfigureBitrateObserver(); rtp_rtcp_module_->SetAsPartOfAllocation(true); } else { rtp_transport_->AccountForAudioPacketsInPacedSender(false); @@ -847,6 +833,7 @@ void AudioSendStream::ConfigureBitrateObserver() { // This either updates the current observer or adds a new observer. // TODO(srte): Add overhead compensation here. auto constraints = GetMinMaxBitrateConstraints(); + RTC_DCHECK(constraints.has_value()); DataRate priority_bitrate = allocation_settings_.priority_bitrate; if (send_side_bwe_with_overhead_) { @@ -868,30 +855,40 @@ void AudioSendStream::ConfigureBitrateObserver() { if (allocation_settings_.priority_bitrate_raw) priority_bitrate = *allocation_settings_.priority_bitrate_raw; - bitrate_allocator_->AddObserver( - this, - MediaStreamAllocationConfig{ - constraints.min.bps(), constraints.max.bps(), 0, - priority_bitrate.bps(), true, - allocation_settings_.bitrate_priority.value_or( - config_.bitrate_priority)}); + worker_queue_->PostTask([this, constraints, priority_bitrate, + config_bitrate_priority = config_.bitrate_priority] { + RTC_DCHECK_RUN_ON(worker_queue_); + bitrate_allocator_->AddObserver( + this, + MediaStreamAllocationConfig{ + constraints->min.bps(), constraints->max.bps(), + 0, priority_bitrate.bps(), true, + allocation_settings_.bitrate_priority.value_or( + config_bitrate_priority)}); + }); registered_with_allocator_ = true; } void AudioSendStream::RemoveBitrateObserver() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + registered_with_allocator_ = false; rtc::Event thread_sync_event; worker_queue_->PostTask([this, &thread_sync_event] { RTC_DCHECK_RUN_ON(worker_queue_); - registered_with_allocator_ = false; bitrate_allocator_->RemoveObserver(this); thread_sync_event.Set(); }); thread_sync_event.Wait(rtc::Event::kForever); } -AudioSendStream::TargetAudioBitrateConstraints +absl::optional AudioSendStream::GetMinMaxBitrateConstraints() const { + if (config_.min_bitrate_bps < 0 || config_.max_bitrate_bps < 0) { + RTC_LOG(LS_WARNING) << "Config is invalid: min_bitrate_bps=" + << config_.min_bitrate_bps + << "; max_bitrate_bps=" << config_.max_bitrate_bps + << "; both expected greater or equal to 0"; + return absl::nullopt; + } TargetAudioBitrateConstraints constraints{ DataRate::BitsPerSec(config_.min_bitrate_bps), DataRate::BitsPerSec(config_.max_bitrate_bps)}; @@ -904,7 +901,11 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { RTC_DCHECK_GE(constraints.min, DataRate::Zero()); RTC_DCHECK_GE(constraints.max, DataRate::Zero()); - RTC_DCHECK_GE(constraints.max, constraints.min); + if (constraints.max < constraints.min) { + RTC_LOG(LS_WARNING) << "TargetAudioBitrateConstraints::max is less than " + << "TargetAudioBitrateConstraints::min"; + return absl::nullopt; + } if (send_side_bwe_with_overhead_) { if (use_legacy_overhead_calculation_) { // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) @@ -915,7 +916,10 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { constraints.min += kMinOverhead; constraints.max += kMinOverhead; } else { - RTC_DCHECK(frame_length_range_); + if (!frame_length_range_.has_value()) { + RTC_LOG(LS_WARNING) << "frame_length_range_ is not set"; + return absl::nullopt; + } const DataSize kOverheadPerPacket = DataSize::Bytes(total_packet_overhead_bytes_); constraints.min += kOverheadPerPacket / frame_length_range_->second; @@ -929,5 +933,18 @@ void AudioSendStream::RegisterCngPayloadType(int payload_type, int clockrate_hz) { channel_send_->RegisterCngPayloadType(payload_type, clockrate_hz); } + +void AudioSendStream::UpdateCachedTargetAudioBitrateConstraints() { + absl::optional + new_constraints = GetMinMaxBitrateConstraints(); + if (!new_constraints.has_value()) { + return; + } + worker_queue_->PostTask([this, new_constraints]() { + RTC_DCHECK_RUN_ON(worker_queue_); + cached_constraints_ = new_constraints; + }); +} + } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h index 12fcb9f21..25346ae37 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream.h @@ -15,6 +15,7 @@ #include #include +#include "api/sequence_checker.h" #include "audio/audio_level.h" #include "audio/channel_send.h" #include "call/audio_send_stream.h" @@ -25,7 +26,6 @@ #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" namespace webrtc { class RtcEventLog; @@ -121,22 +121,29 @@ class AudioSendStream final : public webrtc::AudioSendStream, internal::AudioState* audio_state(); const internal::AudioState* audio_state() const; - void StoreEncoderProperties(int sample_rate_hz, size_t num_channels); + void StoreEncoderProperties(int sample_rate_hz, size_t num_channels) + RTC_RUN_ON(worker_thread_checker_); - void ConfigureStream(const Config& new_config, bool first_time); - bool SetupSendCodec(const Config& new_config); - bool ReconfigureSendCodec(const Config& new_config); - void ReconfigureANA(const Config& new_config); - void ReconfigureCNG(const Config& new_config); - void ReconfigureBitrateObserver(const Config& new_config); + void ConfigureStream(const Config& new_config, bool first_time) + RTC_RUN_ON(worker_thread_checker_); + bool SetupSendCodec(const Config& new_config) + RTC_RUN_ON(worker_thread_checker_); + bool ReconfigureSendCodec(const Config& new_config) + RTC_RUN_ON(worker_thread_checker_); + void ReconfigureANA(const Config& new_config) + RTC_RUN_ON(worker_thread_checker_); + void ReconfigureCNG(const Config& new_config) + RTC_RUN_ON(worker_thread_checker_); + void ReconfigureBitrateObserver(const Config& new_config) + RTC_RUN_ON(worker_thread_checker_); - void ConfigureBitrateObserver() RTC_RUN_ON(worker_queue_); - void RemoveBitrateObserver(); + void ConfigureBitrateObserver() RTC_RUN_ON(worker_thread_checker_); + void RemoveBitrateObserver() RTC_RUN_ON(worker_thread_checker_); // Returns bitrate constraints, maybe including overhead when enabled by // field trial. - TargetAudioBitrateConstraints GetMinMaxBitrateConstraints() const - RTC_RUN_ON(worker_queue_); + absl::optional GetMinMaxBitrateConstraints() + const RTC_RUN_ON(worker_thread_checker_); // Sets per-packet overhead on encoded (for ANA) based on current known values // of transport and packetization overheads. @@ -147,30 +154,35 @@ class AudioSendStream final : public webrtc::AudioSendStream, size_t GetPerPacketOverheadBytes() const RTC_EXCLUSIVE_LOCKS_REQUIRED(overhead_per_packet_lock_); - void RegisterCngPayloadType(int payload_type, int clockrate_hz); + void RegisterCngPayloadType(int payload_type, int clockrate_hz) + RTC_RUN_ON(worker_thread_checker_); + + void UpdateCachedTargetAudioBitrateConstraints() + RTC_RUN_ON(worker_thread_checker_); + Clock* clock_; - rtc::ThreadChecker worker_thread_checker_; - rtc::ThreadChecker pacer_thread_checker_; + SequenceChecker worker_thread_checker_; + SequenceChecker pacer_thread_checker_; rtc::RaceChecker audio_capture_race_checker_; rtc::TaskQueue* worker_queue_; - const bool audio_send_side_bwe_; const bool allocate_audio_without_feedback_; const bool force_no_audio_feedback_ = allocate_audio_without_feedback_; const bool enable_audio_alr_probing_; const bool send_side_bwe_with_overhead_; const AudioAllocationConfig allocation_settings_; - webrtc::AudioSendStream::Config config_; + webrtc::AudioSendStream::Config config_ + RTC_GUARDED_BY(worker_thread_checker_); rtc::scoped_refptr audio_state_; const std::unique_ptr channel_send_; RtcEventLog* const event_log_; const bool use_legacy_overhead_calculation_; - int encoder_sample_rate_hz_ = 0; - size_t encoder_num_channels_ = 0; - bool sending_ = false; + int encoder_sample_rate_hz_ RTC_GUARDED_BY(worker_thread_checker_) = 0; + size_t encoder_num_channels_ RTC_GUARDED_BY(worker_thread_checker_) = 0; + bool sending_ RTC_GUARDED_BY(worker_thread_checker_) = false; mutable Mutex audio_level_lock_; // Keeps track of audio level, total audio energy and total samples duration. // https://w3c.github.io/webrtc-stats/#dom-rtcaudiohandlerstats-totalaudioenergy @@ -178,6 +190,9 @@ class AudioSendStream final : public webrtc::AudioSendStream, BitrateAllocatorInterface* const bitrate_allocator_ RTC_GUARDED_BY(worker_queue_); + // Constrains cached to be accessed from |worker_queue_|. + absl::optional + cached_constraints_ RTC_GUARDED_BY(worker_queue_) = absl::nullopt; RtpTransportControllerSendInterface* const rtp_transport_; RtpRtcpInterface* const rtp_rtcp_module_; @@ -206,10 +221,12 @@ class AudioSendStream final : public webrtc::AudioSendStream, size_t transport_overhead_per_packet_bytes_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; - bool registered_with_allocator_ RTC_GUARDED_BY(worker_queue_) = false; - size_t total_packet_overhead_bytes_ RTC_GUARDED_BY(worker_queue_) = 0; + bool registered_with_allocator_ RTC_GUARDED_BY(worker_thread_checker_) = + false; + size_t total_packet_overhead_bytes_ RTC_GUARDED_BY(worker_thread_checker_) = + 0; absl::optional> frame_length_range_ - RTC_GUARDED_BY(worker_queue_); + RTC_GUARDED_BY(worker_thread_checker_); }; } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc index d2ea99ce0..e3895039d 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_send_stream_tests.cc @@ -188,17 +188,10 @@ class TransportWideSequenceNumberObserver : public AudioSendTest { }; TEST_F(AudioSendStreamCallTest, SendsTransportWideSequenceNumbersInFieldTrial) { - ScopedFieldTrials field_trials("WebRTC-Audio-SendSideBwe/Enabled/"); TransportWideSequenceNumberObserver test(/*expect_sequence_number=*/true); RunBaseTest(&test); } -TEST_F(AudioSendStreamCallTest, - DoesNotSendTransportWideSequenceNumbersPerDefault) { - TransportWideSequenceNumberObserver test(/*expect_sequence_number=*/false); - RunBaseTest(&test); -} - TEST_F(AudioSendStreamCallTest, SendDtmf) { static const uint8_t kDtmfPayloadType = 120; static const int kDtmfPayloadFrequency = 8000; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc index 566bae131..0e60f0372 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_state.cc @@ -187,6 +187,6 @@ void AudioState::UpdateNullAudioPollerState() { rtc::scoped_refptr AudioState::Create( const AudioState::Config& config) { - return new rtc::RefCountedObject(config); + return rtc::make_ref_counted(config); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_state.h b/TMessagesProj/jni/voip/webrtc/audio/audio_state.h index 5e766428d..89c748dc4 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_state.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_state.h @@ -15,11 +15,11 @@ #include #include +#include "api/sequence_checker.h" #include "audio/audio_transport_impl.h" #include "audio/null_audio_poller.h" #include "call/audio_state.h" #include "rtc_base/ref_count.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -65,8 +65,8 @@ class AudioState : public webrtc::AudioState { void UpdateAudioTransportWithSendingStreams(); void UpdateNullAudioPollerState(); - rtc::ThreadChecker thread_checker_; - rtc::ThreadChecker process_thread_checker_; + SequenceChecker thread_checker_; + SequenceChecker process_thread_checker_; const webrtc::AudioState::Config config_; bool recording_enabled_ = true; bool playout_enabled_ = true; diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc index 2788dacf7..fd33dbdf2 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc @@ -22,6 +22,7 @@ #include "api/crypto/frame_decryptor_interface.h" #include "api/frame_transformer_interface.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" #include "audio/audio_level.h" #include "audio/channel_receive_frame_transformer_delegate.h" #include "audio/channel_send.h" @@ -33,7 +34,8 @@ #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" @@ -46,7 +48,6 @@ #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" @@ -162,6 +163,8 @@ class ChannelReceive : public ChannelReceiveInterface { int PreferredSampleRate() const override; + void SetSourceTracker(SourceTracker* source_tracker) override; + // Associate to a send channel. // Used for obtaining RTT for a receive-only channel. void SetAssociatedSendChannel(const ChannelSendInterface* channel) override; @@ -197,8 +200,8 @@ class ChannelReceive : public ChannelReceiveInterface { // we know about. The goal is to eventually split up voe::ChannelReceive into // parts with single-threaded semantics, and thereby reduce the need for // locks. - rtc::ThreadChecker worker_thread_checker_; - rtc::ThreadChecker module_process_thread_checker_; + SequenceChecker worker_thread_checker_; + // Methods accessed from audio and video threads are checked for sequential- // only access. We don't necessarily own and control these threads, so thread // checkers cannot be used. E.g. Chromium may transfer "ownership" from one @@ -219,6 +222,7 @@ class ChannelReceive : public ChannelReceiveInterface { std::unique_ptr rtp_receive_statistics_; std::unique_ptr rtp_rtcp_; const uint32_t remote_ssrc_; + SourceTracker* source_tracker_ = nullptr; // Info for GetSyncInfo is updated on network or worker thread, and queried on // the worker thread. @@ -233,6 +237,7 @@ class ChannelReceive : public ChannelReceiveInterface { AudioSinkInterface* audio_sink_ = nullptr; AudioLevel _outputAudioLevel; + Clock* const clock_; RemoteNtpTimeEstimator ntp_estimator_ RTC_GUARDED_BY(ts_stats_lock_); // Timestamp of the audio pulled from NetEq. @@ -257,25 +262,24 @@ class ChannelReceive : public ChannelReceiveInterface { // frame. int64_t capture_start_ntp_time_ms_ RTC_GUARDED_BY(ts_stats_lock_); - // uses - ProcessThread* _moduleProcessThreadPtr; + ProcessThread* const module_process_thread_; AudioDeviceModule* _audioDeviceModulePtr; float _outputGain RTC_GUARDED_BY(volume_settings_mutex_); - // An associated send channel. - mutable Mutex assoc_send_channel_lock_; const ChannelSendInterface* associated_send_channel_ - RTC_GUARDED_BY(assoc_send_channel_lock_); + RTC_GUARDED_BY(worker_thread_checker_); PacketRouter* packet_router_ = nullptr; - rtc::ThreadChecker construction_thread_; + SequenceChecker construction_thread_; // E2EE Audio Frame Decryption rtc::scoped_refptr frame_decryptor_; webrtc::CryptoOptions crypto_options_; - webrtc::AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_; + webrtc::AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_; + + webrtc::CaptureClockOffsetUpdater capture_clock_offset_updater_; rtc::scoped_refptr frame_transformer_delegate_; @@ -287,6 +291,21 @@ void ChannelReceive::OnReceivedPayloadData( if (!Playing()) { // Avoid inserting into NetEQ when we are not playing. Count the // packet as discarded. + + // If we have a source_tracker_, tell it that the frame has been + // "delivered". Normally, this happens in AudioReceiveStream when audio + // frames are pulled out, but when playout is muted, nothing is pulling + // frames. The downside of this approach is that frames delivered this way + // won't be delayed for playout, and therefore will be unsynchronized with + // (a) audio delay when playing and (b) any audio/video synchronization. But + // the alternative is that muting playout also stops the SourceTracker from + // updating RtpSource information. + if (source_tracker_) { + RtpPacketInfos::vector_type packet_vector = { + RtpPacketInfo(rtpHeader, clock_->CurrentTime())}; + source_tracker_->OnFrameDelivered(RtpPacketInfos(packet_vector)); + } + return; } @@ -321,7 +340,7 @@ void ChannelReceive::InitFrameTransformerDelegate( OnReceivedPayloadData(packet, header); }; frame_transformer_delegate_ = - new rtc::RefCountedObject( + rtc::make_ref_counted( std::move(receive_audio_callback), std::move(frame_transformer), rtc::Thread::Current()); frame_transformer_delegate_->Init(); @@ -418,6 +437,22 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( } } + // Fill in local capture clock offset in |audio_frame->packet_infos_|. + RtpPacketInfos::vector_type packet_infos; + for (auto& packet_info : audio_frame->packet_infos_) { + absl::optional local_capture_clock_offset; + if (packet_info.absolute_capture_time().has_value()) { + local_capture_clock_offset = + capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( + packet_info.absolute_capture_time() + ->estimated_capture_clock_offset); + } + RtpPacketInfo new_packet_info(packet_info); + new_packet_info.set_local_capture_clock_offset(local_capture_clock_offset); + packet_infos.push_back(std::move(new_packet_info)); + } + audio_frame->packet_infos_ = RtpPacketInfos(packet_infos); + { RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs", acm_receiver_.TargetDelayMs()); @@ -442,6 +477,10 @@ int ChannelReceive::PreferredSampleRate() const { acm_receiver_.last_output_sample_rate_hz()); } +void ChannelReceive::SetSourceTracker(SourceTracker* source_tracker) { + source_tracker_ = source_tracker; +} + ChannelReceive::ChannelReceive( Clock* clock, ProcessThread* module_process_thread, @@ -469,23 +508,21 @@ ChannelReceive::ChannelReceive( jitter_buffer_max_packets, jitter_buffer_fast_playout)), _outputAudioLevel(), + clock_(clock), ntp_estimator_(clock), playout_timestamp_rtp_(0), playout_delay_ms_(0), rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()), capture_start_rtp_time_stamp_(-1), capture_start_ntp_time_ms_(-1), - _moduleProcessThreadPtr(module_process_thread), + module_process_thread_(module_process_thread), _audioDeviceModulePtr(audio_device_module), _outputGain(1.0f), associated_send_channel_(nullptr), frame_decryptor_(frame_decryptor), crypto_options_(crypto_options), - absolute_capture_time_receiver_(clock) { - // TODO(nisse): Use _moduleProcessThreadPtr instead? - module_process_thread_checker_.Detach(); - - RTC_DCHECK(module_process_thread); + absolute_capture_time_interpolator_(clock) { + RTC_DCHECK(module_process_thread_); RTC_DCHECK(audio_device_module); acm_receiver_.ResetInitialDelay(); @@ -512,39 +549,43 @@ ChannelReceive::ChannelReceive( rtp_rtcp_->SetSendingMediaStatus(false); rtp_rtcp_->SetRemoteSSRC(remote_ssrc_); - _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); - // Ensure that RTCP is enabled for the created channel. rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); + + // TODO(tommi): This should be an implementation detail of ModuleRtpRtcpImpl2 + // and the pointer to the process thread should be there (which also localizes + // the problem of getting rid of that dependency). + module_process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); } ChannelReceive::~ChannelReceive() { RTC_DCHECK(construction_thread_.IsCurrent()); + // Unregister the module before stopping playout etc, to match the order + // things were set up in the ctor. + module_process_thread_->DeRegisterModule(rtp_rtcp_.get()); + // Resets the delegate's callback to ChannelReceive::OnReceivedPayloadData. if (frame_transformer_delegate_) frame_transformer_delegate_->Reset(); StopPlayout(); - - if (_moduleProcessThreadPtr) - _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get()); } void ChannelReceive::SetSink(AudioSinkInterface* sink) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); MutexLock lock(&callback_mutex_); audio_sink_ = sink; } void ChannelReceive::StartPlayout() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); MutexLock lock(&playing_lock_); playing_ = true; } void ChannelReceive::StopPlayout() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); MutexLock lock(&playing_lock_); playing_ = false; _outputAudioLevel.ResetLevelFullRange(); @@ -552,13 +593,13 @@ void ChannelReceive::StopPlayout() { absl::optional> ChannelReceive::GetReceiveCodec() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return acm_receiver_.LastDecoder(); } void ChannelReceive::SetReceiveCodecs( const std::map& codecs) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); for (const auto& kv : codecs) { RTC_DCHECK_GE(kv.second.clockrate_hz, 1000); payload_type_frequencies_[kv.first] = kv.second.clockrate_hz; @@ -566,8 +607,11 @@ void ChannelReceive::SetReceiveCodecs( acm_receiver_.SetCodecs(codecs); } -// May be called on either worker thread or network thread. void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the + // network thread. Once that's done, the same applies to + // UpdatePlayoutTimestamp and int64_t now_ms = rtc::TimeMillis(); { @@ -593,9 +637,9 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { // Interpolates absolute capture timestamp RTP header extension. header.extension.absolute_capture_time = - absolute_capture_time_receiver_.OnReceivePacket( - AbsoluteCaptureTimeReceiver::GetSource(header.ssrc, - header.arrOfCSRCs), + absolute_capture_time_interpolator_.OnReceivePacket( + AbsoluteCaptureTimeInterpolator::GetSource(header.ssrc, + header.arrOfCSRCs), header.timestamp, rtc::saturated_cast(packet_copy.payload_type_frequency()), header.extension.absolute_capture_time); @@ -654,8 +698,11 @@ void ChannelReceive::ReceivePacket(const uint8_t* packet, } } -// May be called on either worker thread or network thread. void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the + // network thread. + // Store playout timestamp for the received RTCP packet UpdatePlayoutTimestamp(true, rtc::TimeMillis()); @@ -671,8 +718,10 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { uint32_t ntp_secs = 0; uint32_t ntp_frac = 0; uint32_t rtp_timestamp = 0; - if (0 != - rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, &rtp_timestamp)) { + if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, + /*rtcp_arrival_time_secs=*/nullptr, + /*rtcp_arrival_time_frac=*/nullptr, + &rtp_timestamp) != 0) { // Waiting for RTCP. return; } @@ -680,33 +729,39 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { { MutexLock lock(&ts_stats_lock_); ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); + absl::optional remote_to_local_clock_offset_ms = + ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); + if (remote_to_local_clock_offset_ms.has_value()) { + capture_clock_offset_updater_.SetRemoteToLocalClockOffset( + Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); + } } } int ChannelReceive::GetSpeechOutputLevelFullRange() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return _outputAudioLevel.LevelFullRange(); } double ChannelReceive::GetTotalOutputEnergy() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return _outputAudioLevel.TotalEnergy(); } double ChannelReceive::GetTotalOutputDuration() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return _outputAudioLevel.TotalDuration(); } void ChannelReceive::SetChannelOutputVolumeScaling(float scaling) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); MutexLock lock(&volume_settings_mutex_); _outputGain = scaling; } void ChannelReceive::RegisterReceiverCongestionControlObjects( PacketRouter* packet_router) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(packet_router); RTC_DCHECK(!packet_router_); constexpr bool remb_candidate = false; @@ -715,19 +770,18 @@ void ChannelReceive::RegisterReceiverCongestionControlObjects( } void ChannelReceive::ResetReceiverCongestionControlObjects() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(packet_router_); packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); packet_router_ = nullptr; } CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); - // --- RtcpStatistics + RTC_DCHECK_RUN_ON(&worker_thread_checker_); CallReceiveStatistics stats; - // The jitter statistics is updated for each received RTP packet and is - // based on received packets. + // The jitter statistics is updated for each received RTP packet and is based + // on received packets. RtpReceiveStats rtp_stats; StreamStatistician* statistician = rtp_receive_statistics_->GetStatistician(remote_ssrc_); @@ -738,10 +792,9 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { stats.cumulativeLost = rtp_stats.packets_lost; stats.jitterSamples = rtp_stats.jitter; - // --- RTT stats.rttMs = GetRTT(); - // --- Data counters + // Data counters. if (statistician) { stats.payload_bytes_rcvd = rtp_stats.packet_counter.payload_bytes; @@ -758,16 +811,33 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { stats.last_packet_received_timestamp_ms = absl::nullopt; } - // --- Timestamps + // Timestamps. { MutexLock lock(&ts_stats_lock_); stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; } + + absl::optional rtcp_sr_stats = + rtp_rtcp_->GetSenderReportStats(); + if (rtcp_sr_stats.has_value()) { + // Number of seconds since 1900 January 1 00:00 GMT (see + // https://tools.ietf.org/html/rfc868). + constexpr int64_t kNtpJan1970Millisecs = + 2208988800 * rtc::kNumMillisecsPerSec; + stats.last_sender_report_timestamp_ms = + rtcp_sr_stats->last_arrival_timestamp.ToMs() - kNtpJan1970Millisecs; + stats.last_sender_report_remote_timestamp_ms = + rtcp_sr_stats->last_remote_timestamp.ToMs() - kNtpJan1970Millisecs; + stats.sender_reports_packets_sent = rtcp_sr_stats->packets_sent; + stats.sender_reports_bytes_sent = rtcp_sr_stats->bytes_sent; + stats.sender_reports_reports_count = rtcp_sr_stats->reports_count; + } + return stats; } void ChannelReceive::SetNACKStatus(bool enable, int max_packets) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // None of these functions can fail. if (enable) { rtp_receive_statistics_->SetMaxReorderingThreshold(max_packets); @@ -787,14 +857,14 @@ int ChannelReceive::ResendPackets(const uint16_t* sequence_numbers, void ChannelReceive::SetAssociatedSendChannel( const ChannelSendInterface* channel) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); - MutexLock lock(&assoc_send_channel_lock_); + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. + RTC_DCHECK_RUN_ON(&worker_thread_checker_); associated_send_channel_ = channel; } void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Depending on when the channel is created, the transformer might be set // twice. Don't replace the delegate if it was already initialized. if (!frame_transformer || frame_transformer_delegate_) @@ -804,28 +874,36 @@ void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( NetworkStatistics ChannelReceive::GetNetworkStatistics( bool get_and_clear_legacy_stats) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); NetworkStatistics stats; acm_receiver_.GetNetworkStatistics(&stats, get_and_clear_legacy_stats); return stats; } AudioDecodingCallStats ChannelReceive::GetDecodingCallStatistics() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); AudioDecodingCallStats stats; acm_receiver_.GetDecodingCallStatistics(&stats); return stats; } uint32_t ChannelReceive::GetDelayEstimate() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent() || - module_process_thread_checker_.IsCurrent()); - MutexLock lock(&video_sync_lock_); - return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_; + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + + uint32_t playout_delay; + { + MutexLock lock(&video_sync_lock_); + playout_delay = playout_delay_ms_; + } + // Return the current jitter buffer delay + playout delay. + return acm_receiver_.FilteredCurrentDelayMs() + playout_delay; } bool ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { - RTC_DCHECK(module_process_thread_checker_.IsCurrent()); + // TODO(bugs.webrtc.org/11993): This should run on the network thread. + // We get here via RtpStreamsSynchronizer. Once that's done, many (all?) of + // these locks aren't needed. + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Limit to range accepted by both VoE and ACM, so we're at least getting as // close as possible, instead of failing. delay_ms = rtc::SafeClamp(delay_ms, kVoiceEngineMinMinPlayoutDelayMs, @@ -861,7 +939,7 @@ void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, absl::optional ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); MutexLock lock(&video_sync_lock_); if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_) return absl::nullopt; @@ -879,13 +957,19 @@ int ChannelReceive::GetBaseMinimumPlayoutDelayMs() const { } absl::optional ChannelReceive::GetSyncInfo() const { - RTC_DCHECK(module_process_thread_checker_.IsCurrent()); + // TODO(bugs.webrtc.org/11993): This should run on the network thread. + // We get here via RtpStreamsSynchronizer. Once that's done, many of + // these locks aren't needed. + RTC_DCHECK_RUN_ON(&worker_thread_checker_); Syncable::Info info; if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, - &info.capture_time_ntp_frac, nullptr, nullptr, + &info.capture_time_ntp_frac, + /*rtcp_arrival_time_secs=*/nullptr, + /*rtcp_arrival_time_frac=*/nullptr, &info.capture_time_source_clock) != 0) { return absl::nullopt; } + { MutexLock lock(&sync_info_lock_); if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { @@ -894,10 +978,20 @@ absl::optional ChannelReceive::GetSyncInfo() const { info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; info.latest_receive_time_ms = *last_received_rtp_system_time_ms_; } + + int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs(); + { + MutexLock lock(&video_sync_lock_); + info.current_delay_ms = jitter_buffer_delay + playout_delay_ms_; + } + return info; } void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) { + // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the + // network thread. Once that's done, we won't need video_sync_lock_. + jitter_buffer_playout_timestamp_ = acm_receiver_.GetPlayoutTimestamp(); if (!jitter_buffer_playout_timestamp_) { @@ -945,30 +1039,26 @@ int ChannelReceive::GetRtpTimestampRateHz() const { } int64_t ChannelReceive::GetRTT() const { - std::vector report_blocks; - rtp_rtcp_->RemoteRTCPStat(&report_blocks); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + std::vector report_blocks = + rtp_rtcp_->GetLatestReportBlockData(); - // TODO(nisse): Could we check the return value from the ->RTT() call below, - // instead of checking if we have any report blocks? if (report_blocks.empty()) { - MutexLock lock(&assoc_send_channel_lock_); - // Tries to get RTT from an associated channel. + // Try fall back on an RTT from an associated channel. if (!associated_send_channel_) { return 0; } return associated_send_channel_->GetRTT(); } - int64_t rtt = 0; - int64_t avg_rtt = 0; - int64_t max_rtt = 0; - int64_t min_rtt = 0; // TODO(nisse): This method computes RTT based on sender reports, even though // a receive stream is not supposed to do that. - if (rtp_rtcp_->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != 0) { - return 0; + for (const ReportBlockData& data : report_blocks) { + if (data.report_block().sender_ssrc == remote_ssrc_) { + return data.last_rtt_ms(); + } } - return rtt; + return 0; } } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h index eef2db425..c55968b55 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.h @@ -28,6 +28,7 @@ #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" +#include "modules/rtp_rtcp/source/source_tracker.h" #include "system_wrappers/include/clock.h" // TODO(solenberg, nisse): This file contains a few NOLINT marks, to silence @@ -57,13 +58,22 @@ struct CallReceiveStatistics { int64_t payload_bytes_rcvd = 0; int64_t header_and_padding_bytes_rcvd = 0; int packetsReceived; - // The capture ntp time (in local timebase) of the first played out audio + // The capture NTP time (in local timebase) of the first played out audio // frame. int64_t capture_start_ntp_time_ms_; // The timestamp at which the last packet was received, i.e. the time of the // local clock when it was received - not the RTP timestamp of that packet. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-lastpacketreceivedtimestamp absl::optional last_packet_received_timestamp_ms; + // Remote outbound stats derived by the received RTCP sender reports. + // Note that the timestamps below correspond to the time elapsed since the + // Unix epoch. + // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* + absl::optional last_sender_report_timestamp_ms; + absl::optional last_sender_report_remote_timestamp_ms; + uint32_t sender_reports_packets_sent = 0; + uint64_t sender_reports_bytes_sent = 0; + uint64_t sender_reports_reports_count = 0; }; namespace voe { @@ -135,6 +145,10 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual int PreferredSampleRate() const = 0; + // Sets the source tracker to notify about "delivered" packets when output is + // muted. + virtual void SetSourceTracker(SourceTracker* source_tracker) = 0; + // Associate to a send channel. // Used for obtaining RTT for a receive-only channel. virtual void SetAssociatedSendChannel( diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.h b/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.h index 73112d10e..0af748e37 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.h +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.h @@ -14,7 +14,8 @@ #include #include "api/frame_transformer_interface.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "api/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread.h" @@ -61,7 +62,7 @@ class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback { ~ChannelReceiveFrameTransformerDelegate() override = default; private: - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; ReceiveFrameCallback receive_frame_callback_ RTC_GUARDED_BY(sequence_checker_); rtc::scoped_refptr frame_transformer_ diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc index 80e7ab2f4..47afc7982 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc @@ -21,6 +21,7 @@ #include "api/call/transport.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" #include "audio/channel_send_frame_transformer_delegate.h" #include "audio/utility/audio_frame_operations.h" #include "call/rtp_transport_controller_send_interface.h" @@ -41,7 +42,6 @@ #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" @@ -179,8 +179,8 @@ class ChannelSend : public ChannelSendInterface, // specific threads we know about. The goal is to eventually split up // voe::Channel into parts with single-threaded semantics, and thereby reduce // the need for locks. - rtc::ThreadChecker worker_thread_checker_; - rtc::ThreadChecker module_process_thread_checker_; + SequenceChecker worker_thread_checker_; + SequenceChecker module_process_thread_checker_; // Methods accessed from audio and video threads are checked for sequential- // only access. We don't necessarily own and control these threads, so thread // checkers cannot be used. E.g. Chromium may transfer "ownership" from one @@ -218,8 +218,7 @@ class ChannelSend : public ChannelSendInterface, const std::unique_ptr rtp_packet_pacer_proxy_; const std::unique_ptr retransmission_rate_limiter_; - rtc::ThreadChecker construction_thread_; - + SequenceChecker construction_thread_; bool encoder_queue_is_active_ RTC_GUARDED_BY(encoder_queue_) = false; @@ -241,6 +240,8 @@ class ChannelSend : public ChannelSendInterface, // Defined last to ensure that there are no running tasks when the other // members are destroyed. rtc::TaskQueue encoder_queue_; + + const bool fixing_timestamp_stall_; }; const int kTelephoneEventAttenuationdB = 10; @@ -262,7 +263,7 @@ class RtpPacketSenderProxy : public RtpPacketSender { } private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; Mutex mutex_; RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&mutex_); }; @@ -471,7 +472,9 @@ ChannelSend::ChannelSend( crypto_options_(crypto_options), encoder_queue_(task_queue_factory->CreateTaskQueue( "AudioEncoder", - TaskQueueFactory::Priority::NORMAL)) { + TaskQueueFactory::Priority::NORMAL)), + fixing_timestamp_stall_( + !field_trial::IsDisabled("WebRTC-Audio-FixTimestampStall")) { RTC_DCHECK(module_process_thread); module_process_thread_checker_.Detach(); @@ -746,25 +749,20 @@ std::vector ChannelSend::GetRemoteRTCPReportBlocks() const { // Get the report blocks from the latest received RTCP Sender or Receiver // Report. Each element in the vector contains the sender's SSRC and a // report block according to RFC 3550. - std::vector rtcp_report_blocks; - - int ret = rtp_rtcp_->RemoteRTCPStat(&rtcp_report_blocks); - RTC_DCHECK_EQ(0, ret); - std::vector report_blocks; - - std::vector::const_iterator it = rtcp_report_blocks.begin(); - for (; it != rtcp_report_blocks.end(); ++it) { + for (const ReportBlockData& data : rtp_rtcp_->GetLatestReportBlockData()) { ReportBlock report_block; - report_block.sender_SSRC = it->sender_ssrc; - report_block.source_SSRC = it->source_ssrc; - report_block.fraction_lost = it->fraction_lost; - report_block.cumulative_num_packets_lost = it->packets_lost; + report_block.sender_SSRC = data.report_block().sender_ssrc; + report_block.source_SSRC = data.report_block().source_ssrc; + report_block.fraction_lost = data.report_block().fraction_lost; + report_block.cumulative_num_packets_lost = data.report_block().packets_lost; report_block.extended_highest_sequence_number = - it->extended_highest_sequence_number; - report_block.interarrival_jitter = it->jitter; - report_block.last_SR_timestamp = it->last_sender_report_timestamp; - report_block.delay_since_last_SR = it->delay_since_last_sender_report; + data.report_block().extended_highest_sequence_number; + report_block.interarrival_jitter = data.report_block().jitter; + report_block.last_SR_timestamp = + data.report_block().last_sender_report_timestamp; + report_block.delay_since_last_SR = + data.report_block().delay_since_last_sender_report; report_blocks.push_back(report_block); } return report_blocks; @@ -808,6 +806,10 @@ void ChannelSend::ProcessAndEncodeAudio( [this, audio_frame = std::move(audio_frame)]() mutable { RTC_DCHECK_RUN_ON(&encoder_queue_); if (!encoder_queue_is_active_) { + if (fixing_timestamp_stall_) { + _timeStamp += + static_cast(audio_frame->samples_per_channel_); + } return; } // Measure time between when the audio frame is added to the task queue @@ -861,24 +863,15 @@ RtpRtcpInterface* ChannelSend::GetRtpRtcp() const { } int64_t ChannelSend::GetRTT() const { - std::vector report_blocks; - rtp_rtcp_->RemoteRTCPStat(&report_blocks); - + std::vector report_blocks = + rtp_rtcp_->GetLatestReportBlockData(); if (report_blocks.empty()) { return 0; } - int64_t rtt = 0; - int64_t avg_rtt = 0; - int64_t max_rtt = 0; - int64_t min_rtt = 0; // We don't know in advance the remote ssrc used by the other end's receiver - // reports, so use the SSRC of the first report block for calculating the RTT. - if (rtp_rtcp_->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, &min_rtt, - &max_rtt) != 0) { - return 0; - } - return rtt; + // reports, so use the first report block for the RTT. + return report_blocks.front().last_rtt_ms(); } void ChannelSend::SetFrameEncryptor( @@ -926,7 +919,7 @@ void ChannelSend::InitFrameTransformerDelegate( absolute_capture_timestamp_ms); }; frame_transformer_delegate_ = - new rtc::RefCountedObject( + rtc::make_ref_counted( std::move(send_audio_callback), std::move(frame_transformer), &encoder_queue_); frame_transformer_delegate_->Init(); diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.h b/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.h index 531d1bc11..9b7eb33b5 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.h +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.h @@ -14,10 +14,10 @@ #include #include "api/frame_transformer_interface.h" +#include "api/sequence_checker.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h b/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h index 52e5b2fc8..7f140d400 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/audio/mock_voe_channel_proxy.h @@ -59,6 +59,7 @@ class MockChannelReceive : public voe::ChannelReceiveInterface { (int sample_rate_hz, AudioFrame*), (override)); MOCK_METHOD(int, PreferredSampleRate, (), (const, override)); + MOCK_METHOD(void, SetSourceTracker, (SourceTracker*), (override)); MOCK_METHOD(void, SetAssociatedSendChannel, (const voe::ChannelSendInterface*), diff --git a/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h b/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h index 97cd2c7e6..47e67a91d 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h +++ b/TMessagesProj/jni/voip/webrtc/audio/null_audio_poller.h @@ -13,9 +13,9 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/message_handler.h" -#include "rtc_base/thread_checker.h" namespace webrtc { namespace internal { @@ -29,7 +29,7 @@ class NullAudioPoller final : public rtc::MessageHandler { void OnMessage(rtc::Message* msg) override; private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; AudioTransport* const audio_transport_; int64_t reschedule_at_; }; diff --git a/TMessagesProj/jni/voip/webrtc/audio/remix_resample.cc b/TMessagesProj/jni/voip/webrtc/audio/remix_resample.cc index 3694d34e4..178af622a 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/remix_resample.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/remix_resample.cc @@ -56,9 +56,10 @@ void RemixAndResample(const int16_t* src_data, if (resampler->InitializeIfNeeded(sample_rate_hz, dst_frame->sample_rate_hz_, audio_ptr_num_channels) == -1) { - FATAL() << "InitializeIfNeeded failed: sample_rate_hz = " << sample_rate_hz - << ", dst_frame->sample_rate_hz_ = " << dst_frame->sample_rate_hz_ - << ", audio_ptr_num_channels = " << audio_ptr_num_channels; + RTC_FATAL() << "InitializeIfNeeded failed: sample_rate_hz = " + << sample_rate_hz << ", dst_frame->sample_rate_hz_ = " + << dst_frame->sample_rate_hz_ + << ", audio_ptr_num_channels = " << audio_ptr_num_channels; } // TODO(yujo): for muted input frames, don't resample. Either 1) allow @@ -70,9 +71,10 @@ void RemixAndResample(const int16_t* src_data, resampler->Resample(audio_ptr, src_length, dst_frame->mutable_data(), AudioFrame::kMaxDataSizeSamples); if (out_length == -1) { - FATAL() << "Resample failed: audio_ptr = " << audio_ptr - << ", src_length = " << src_length - << ", dst_frame->mutable_data() = " << dst_frame->mutable_data(); + RTC_FATAL() << "Resample failed: audio_ptr = " << audio_ptr + << ", src_length = " << src_length + << ", dst_frame->mutable_data() = " + << dst_frame->mutable_data(); } dst_frame->samples_per_channel_ = out_length / audio_ptr_num_channels; diff --git a/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.cc b/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.cc index a9d2cf163..e13a09bac 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.cc @@ -169,10 +169,10 @@ void AudioFrameOperations::UpmixChannels(size_t target_number_of_channels, if (!frame->muted()) { // Up-mixing done in place. Going backwards through the frame ensure nothing // is irrevocably overwritten. + int16_t* frame_data = frame->mutable_data(); for (int i = frame->samples_per_channel_ - 1; i >= 0; i--) { for (size_t j = 0; j < target_number_of_channels; ++j) { - frame->mutable_data()[target_number_of_channels * i + j] = - frame->data()[i]; + frame_data[target_number_of_channels * i + j] = frame_data[i]; } } } diff --git a/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.h b/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.h index 65c310c48..2f1540bcf 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.h +++ b/TMessagesProj/jni/voip/webrtc/audio/utility/audio_frame_operations.h @@ -14,8 +14,8 @@ #include #include +#include "absl/base/attributes.h" #include "api/audio/audio_frame.h" -#include "rtc_base/deprecation.h" namespace webrtc { @@ -36,12 +36,14 @@ class AudioFrameOperations { // |frame.num_channels_| will be updated. This version checks for sufficient // buffer size and that |num_channels_| is mono. Use UpmixChannels // instead. TODO(bugs.webrtc.org/8649): remove. - RTC_DEPRECATED static int MonoToStereo(AudioFrame* frame); + ABSL_DEPRECATED("bugs.webrtc.org/8649") + static int MonoToStereo(AudioFrame* frame); // |frame.num_channels_| will be updated. This version checks that // |num_channels_| is stereo. Use DownmixChannels // instead. TODO(bugs.webrtc.org/8649): remove. - RTC_DEPRECATED static int StereoToMono(AudioFrame* frame); + ABSL_DEPRECATED("bugs.webrtc.org/8649") + static int StereoToMono(AudioFrame* frame); // Downmixes 4 channels |src_audio| to stereo |dst_audio|. This is an in-place // operation, meaning |src_audio| and |dst_audio| may point to the same diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc index dc53acf3a..d11e6d79f 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.cc @@ -79,6 +79,12 @@ AudioChannel::~AudioChannel() { } audio_mixer_->RemoveSource(ingress_.get()); + + // AudioEgress could hold current global TaskQueueBase that we need to clear + // before ProcessThread::DeRegisterModule. + egress_.reset(); + ingress_.reset(); + process_thread_->DeRegisterModule(rtp_rtcp_.get()); } @@ -159,4 +165,17 @@ IngressStatistics AudioChannel::GetIngressStatistics() { return ingress_stats; } +ChannelStatistics AudioChannel::GetChannelStatistics() { + ChannelStatistics channel_stat = ingress_->GetChannelStatistics(); + + StreamDataCounters rtp_stats, rtx_stats; + rtp_rtcp_->GetSendStreamDataCounters(&rtp_stats, &rtx_stats); + channel_stat.bytes_sent = + rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes; + channel_stat.packets_sent = + rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; + + return channel_stat; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h index 5bc748359..7b9fa6f74 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_channel.h @@ -84,6 +84,7 @@ class AudioChannel : public rtc::RefCountInterface { ingress_->SetReceiveCodecs(codecs); } IngressStatistics GetIngressStatistics(); + ChannelStatistics GetChannelStatistics(); // See comments on the methods used from AudioEgress and AudioIngress. // Conversion to double is following what is done in @@ -106,6 +107,12 @@ class AudioChannel : public rtc::RefCountInterface { return ingress_->GetOutputTotalDuration(); } + // Internal API for testing purpose. + void SendRTCPReportForTesting(RTCPPacketType type) { + int32_t result = rtp_rtcp_->SendRTCP(type); + RTC_DCHECK(result == 0); + } + private: // ChannelId that this audio channel belongs for logging purpose. ChannelId id_; diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h index fcd9ed0f2..a39c7e225 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_egress.h @@ -15,6 +15,7 @@ #include #include "api/audio_codecs/audio_format.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "audio/audio_level.h" #include "audio/utility/audio_frame_operations.h" @@ -25,7 +26,6 @@ #include "modules/rtp_rtcp/source/rtp_sender_audio.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc index 07def9955..8aa552bb2 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.cc @@ -17,6 +17,10 @@ #include "api/audio_codecs/audio_format.h" #include "audio/utility/audio_frame_operations.h" #include "modules/audio_coding/include/audio_coding_module.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" @@ -153,6 +157,12 @@ void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { rtp_packet_received.set_payload_type_frequency(it->second); } + // Track current remote SSRC. + if (rtp_packet_received.Ssrc() != remote_ssrc_) { + rtp_rtcp_->SetRemoteSSRC(rtp_packet_received.Ssrc()); + remote_ssrc_.store(rtp_packet_received.Ssrc()); + } + rtp_receive_statistics_->OnRtpPacket(rtp_packet_received); RTPHeader header; @@ -181,11 +191,28 @@ void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { void AudioIngress::ReceivedRTCPPacket( rtc::ArrayView rtcp_packet) { - // Deliver RTCP packet to RTP/RTCP module for parsing. + rtcp::CommonHeader rtcp_header; + if (rtcp_header.Parse(rtcp_packet.data(), rtcp_packet.size()) && + (rtcp_header.type() == rtcp::SenderReport::kPacketType || + rtcp_header.type() == rtcp::ReceiverReport::kPacketType)) { + RTC_DCHECK_GE(rtcp_packet.size(), 8); + + uint32_t sender_ssrc = + ByteReader::ReadBigEndian(rtcp_packet.data() + 4); + + // If we don't have remote ssrc at this point, it's likely that remote + // endpoint is receive-only or it could have restarted the media. + if (sender_ssrc != remote_ssrc_) { + rtp_rtcp_->SetRemoteSSRC(sender_ssrc); + remote_ssrc_.store(sender_ssrc); + } + } + + // Deliver RTCP packet to RTP/RTCP module for parsing and processing. rtp_rtcp_->IncomingRtcpPacket(rtcp_packet.data(), rtcp_packet.size()); - int64_t rtt = GetRoundTripTime(); - if (rtt == -1) { + int64_t rtt = 0; + if (rtp_rtcp_->RTT(remote_ssrc_, &rtt, nullptr, nullptr, nullptr) != 0) { // Waiting for valid RTT. return; } @@ -203,30 +230,65 @@ void AudioIngress::ReceivedRTCPPacket( } } -int64_t AudioIngress::GetRoundTripTime() { +ChannelStatistics AudioIngress::GetChannelStatistics() { + ChannelStatistics channel_stats; + + // Get clockrate for current decoder ahead of jitter calculation. + uint32_t clockrate_hz = 0; + absl::optional> decoder = + acm_receiver_.LastDecoder(); + if (decoder) { + clockrate_hz = decoder->second.clockrate_hz; + } + + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(remote_ssrc_); + if (statistician) { + RtpReceiveStats stats = statistician->GetStats(); + channel_stats.packets_lost = stats.packets_lost; + channel_stats.packets_received = stats.packet_counter.packets; + channel_stats.bytes_received = stats.packet_counter.payload_bytes; + channel_stats.remote_ssrc = remote_ssrc_; + if (clockrate_hz > 0) { + channel_stats.jitter = static_cast(stats.jitter) / clockrate_hz; + } + } + + // Get RTCP report using remote SSRC. const std::vector& report_data = rtp_rtcp_->GetLatestReportBlockData(); + for (const ReportBlockData& block_data : report_data) { + const RTCPReportBlock& rtcp_report = block_data.report_block(); + if (rtp_rtcp_->SSRC() != rtcp_report.source_ssrc || + remote_ssrc_ != rtcp_report.sender_ssrc) { + continue; + } + RemoteRtcpStatistics remote_stat; + remote_stat.packets_lost = rtcp_report.packets_lost; + remote_stat.fraction_lost = + static_cast(rtcp_report.fraction_lost) / (1 << 8); + if (clockrate_hz > 0) { + remote_stat.jitter = + static_cast(rtcp_report.jitter) / clockrate_hz; + } + if (block_data.has_rtt()) { + remote_stat.round_trip_time = + static_cast(block_data.last_rtt_ms()) / + rtc::kNumMillisecsPerSec; + } + remote_stat.last_report_received_timestamp_ms = + block_data.report_block_timestamp_utc_us() / + rtc::kNumMicrosecsPerMillisec; + channel_stats.remote_rtcp = remote_stat; - // If we do not have report block which means remote RTCP hasn't be received - // yet, return -1 as to indicate uninitialized value. - if (report_data.empty()) { - return -1; + // Receive only channel won't send any RTP packets. + if (!channel_stats.remote_ssrc.has_value()) { + channel_stats.remote_ssrc = remote_ssrc_; + } + break; } - // We don't know in advance the remote SSRC used by the other end's receiver - // reports, so use the SSRC of the first report block as remote SSRC for now. - // TODO(natim@webrtc.org): handle the case where remote end is changing ssrc - // and update accordingly here. - const ReportBlockData& block_data = report_data[0]; - - const uint32_t sender_ssrc = block_data.report_block().sender_ssrc; - - if (sender_ssrc != remote_ssrc_.load()) { - remote_ssrc_.store(sender_ssrc); - rtp_rtcp_->SetRemoteSSRC(sender_ssrc); - } - - return (block_data.has_rtt() ? block_data.last_rtt_ms() : -1); + return channel_stats; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h index d3680e0f0..9a36a4656 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/audio_ingress.h @@ -17,10 +17,12 @@ #include #include +#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/audio_mixer.h" #include "api/rtp_headers.h" #include "api/scoped_refptr.h" +#include "api/voip/voip_statistics.h" #include "audio/audio_level.h" #include "modules/audio_coding/acm2/acm_receiver.h" #include "modules/audio_coding/include/audio_coding_module.h" @@ -78,10 +80,6 @@ class AudioIngress : public AudioMixer::Source { return output_audio_level_.TotalDuration(); } - // Returns network round trip time (RTT) measued by RTCP exchange with - // remote media endpoint. RTT value -1 indicates that it's not initialized. - int64_t GetRoundTripTime(); - NetworkStatistics GetNetworkStatistics() const { NetworkStatistics stats; acm_receiver_.GetNetworkStatistics(&stats, @@ -89,6 +87,8 @@ class AudioIngress : public AudioMixer::Source { return stats; } + ChannelStatistics GetChannelStatistics(); + // Implementation of AudioMixer::Source interface. AudioMixer::Source::AudioFrameInfo GetAudioFrameWithInfo( int sampling_rate, diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/test/mock_task_queue.h b/TMessagesProj/jni/voip/webrtc/audio/voip/test/mock_task_queue.h new file mode 100644 index 000000000..c3553a21e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/test/mock_task_queue.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef AUDIO_VOIP_TEST_MOCK_TASK_QUEUE_H_ +#define AUDIO_VOIP_TEST_MOCK_TASK_QUEUE_H_ + +#include + +#include "api/task_queue/task_queue_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +// MockTaskQueue enables immediate task run from global TaskQueueBase. +// It's necessary for some tests depending on TaskQueueBase internally. +class MockTaskQueue : public TaskQueueBase { + public: + MockTaskQueue() : current_(this) {} + + // Delete is deliberately defined as no-op as MockTaskQueue is expected to + // hold onto current global TaskQueueBase throughout the testing. + void Delete() override {} + + MOCK_METHOD(void, PostTask, (std::unique_ptr), (override)); + MOCK_METHOD(void, + PostDelayedTask, + (std::unique_ptr, uint32_t), + (override)); + + private: + CurrentTaskQueueSetter current_; +}; + +class MockTaskQueueFactory : public TaskQueueFactory { + public: + explicit MockTaskQueueFactory(MockTaskQueue* task_queue) + : task_queue_(task_queue) {} + + std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const override { + // Default MockTaskQueue::Delete is no-op, therefore it's safe to pass the + // raw pointer. + return std::unique_ptr(task_queue_); + } + + private: + MockTaskQueue* task_queue_; +}; + +} // namespace webrtc + +#endif // AUDIO_VOIP_TEST_MOCK_TASK_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc index ac29fbf6d..67ae4c652 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.cc @@ -127,10 +127,9 @@ bool VoipCore::InitializeIfNeeded() { return true; } -absl::optional VoipCore::CreateChannel( - Transport* transport, - absl::optional local_ssrc) { - absl::optional channel_id; +ChannelId VoipCore::CreateChannel(Transport* transport, + absl::optional local_ssrc) { + ChannelId channel_id; // Set local ssrc to random if not set by caller. if (!local_ssrc) { @@ -139,7 +138,7 @@ absl::optional VoipCore::CreateChannel( } rtc::scoped_refptr channel = - new rtc::RefCountedObject( + rtc::make_ref_counted( transport, local_ssrc.value(), task_queue_factory_.get(), process_thread_.get(), audio_mixer_.get(), decoder_factory_); @@ -153,7 +152,7 @@ absl::optional VoipCore::CreateChannel( start_process_thread = channels_.empty(); channel_id = static_cast(next_channel_id_); - channels_[*channel_id] = channel; + channels_[channel_id] = channel; next_channel_id_++; if (next_channel_id_ >= kMaxChannelId) { next_channel_id_ = 0; @@ -161,7 +160,7 @@ absl::optional VoipCore::CreateChannel( } // Set ChannelId in audio channel for logging/debugging purpose. - channel->SetId(*channel_id); + channel->SetId(channel_id); if (start_process_thread) { process_thread_->Start(); @@ -170,7 +169,7 @@ absl::optional VoipCore::CreateChannel( return channel_id; } -void VoipCore::ReleaseChannel(ChannelId channel_id) { +VoipResult VoipCore::ReleaseChannel(ChannelId channel_id) { // Destroy channel outside of the lock. rtc::scoped_refptr channel; @@ -188,8 +187,10 @@ void VoipCore::ReleaseChannel(ChannelId channel_id) { no_channels_after_release = channels_.empty(); } + VoipResult status_code = VoipResult::kOk; if (!channel) { RTC_LOG(LS_WARNING) << "Channel " << channel_id << " not found"; + status_code = VoipResult::kInvalidArgument; } if (no_channels_after_release) { @@ -201,9 +202,12 @@ void VoipCore::ReleaseChannel(ChannelId channel_id) { if (audio_device_module_->Playing()) { if (audio_device_module_->StopPlayout() != 0) { RTC_LOG(LS_WARNING) << "StopPlayout failed"; + status_code = VoipResult::kInternal; } } } + + return status_code; } rtc::scoped_refptr VoipCore::GetChannel(ChannelId channel_id) { @@ -281,174 +285,232 @@ bool VoipCore::UpdateAudioTransportWithSenders() { return true; } -bool VoipCore::StartSend(ChannelId channel_id) { - rtc::scoped_refptr channel = GetChannel(channel_id); - - if (!channel || !channel->StartSend()) { - return false; - } - - return UpdateAudioTransportWithSenders(); -} - -bool VoipCore::StopSend(ChannelId channel_id) { +VoipResult VoipCore::StartSend(ChannelId channel_id) { rtc::scoped_refptr channel = GetChannel(channel_id); if (!channel) { - return false; + return VoipResult::kInvalidArgument; + } + + if (!channel->StartSend()) { + return VoipResult::kFailedPrecondition; + } + + return UpdateAudioTransportWithSenders() ? VoipResult::kOk + : VoipResult::kInternal; +} + +VoipResult VoipCore::StopSend(ChannelId channel_id) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { + return VoipResult::kInvalidArgument; } channel->StopSend(); - return UpdateAudioTransportWithSenders(); + return UpdateAudioTransportWithSenders() ? VoipResult::kOk + : VoipResult::kInternal; } -bool VoipCore::StartPlayout(ChannelId channel_id) { +VoipResult VoipCore::StartPlayout(ChannelId channel_id) { rtc::scoped_refptr channel = GetChannel(channel_id); if (!channel) { - return false; + return VoipResult::kInvalidArgument; } if (channel->IsPlaying()) { - return true; + return VoipResult::kOk; } if (!channel->StartPlay()) { - return false; + return VoipResult::kFailedPrecondition; } // Initialize audio device module and default device if needed. if (!InitializeIfNeeded()) { - return false; + return VoipResult::kInternal; } if (!audio_device_module_->Playing()) { if (audio_device_module_->InitPlayout() != 0) { RTC_LOG(LS_ERROR) << "InitPlayout failed"; - return false; + return VoipResult::kInternal; } if (audio_device_module_->StartPlayout() != 0) { RTC_LOG(LS_ERROR) << "StartPlayout failed"; - return false; + return VoipResult::kInternal; } } - return true; + + return VoipResult::kOk; } -bool VoipCore::StopPlayout(ChannelId channel_id) { +VoipResult VoipCore::StopPlayout(ChannelId channel_id) { rtc::scoped_refptr channel = GetChannel(channel_id); if (!channel) { - return false; + return VoipResult::kInvalidArgument; } channel->StopPlay(); - return true; + return VoipResult::kOk; } -void VoipCore::ReceivedRTPPacket(ChannelId channel_id, - rtc::ArrayView rtp_packet) { +VoipResult VoipCore::ReceivedRTPPacket( + ChannelId channel_id, + rtc::ArrayView rtp_packet) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - channel->ReceivedRTPPacket(rtp_packet); + if (!channel) { + return VoipResult::kInvalidArgument; } + + channel->ReceivedRTPPacket(rtp_packet); + + return VoipResult::kOk; } -void VoipCore::ReceivedRTCPPacket(ChannelId channel_id, - rtc::ArrayView rtcp_packet) { +VoipResult VoipCore::ReceivedRTCPPacket( + ChannelId channel_id, + rtc::ArrayView rtcp_packet) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - channel->ReceivedRTCPPacket(rtcp_packet); + if (!channel) { + return VoipResult::kInvalidArgument; } + + channel->ReceivedRTCPPacket(rtcp_packet); + + return VoipResult::kOk; } -void VoipCore::SetSendCodec(ChannelId channel_id, - int payload_type, - const SdpAudioFormat& encoder_format) { +VoipResult VoipCore::SetSendCodec(ChannelId channel_id, + int payload_type, + const SdpAudioFormat& encoder_format) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - auto encoder = encoder_factory_->MakeAudioEncoder( - payload_type, encoder_format, absl::nullopt); - channel->SetEncoder(payload_type, encoder_format, std::move(encoder)); + if (!channel) { + return VoipResult::kInvalidArgument; } + + auto encoder = encoder_factory_->MakeAudioEncoder( + payload_type, encoder_format, absl::nullopt); + channel->SetEncoder(payload_type, encoder_format, std::move(encoder)); + + return VoipResult::kOk; } -void VoipCore::SetReceiveCodecs( +VoipResult VoipCore::SetReceiveCodecs( ChannelId channel_id, const std::map& decoder_specs) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - channel->SetReceiveCodecs(decoder_specs); + if (!channel) { + return VoipResult::kInvalidArgument; } + + channel->SetReceiveCodecs(decoder_specs); + + return VoipResult::kOk; } -void VoipCore::RegisterTelephoneEventType(ChannelId channel_id, - int rtp_payload_type, - int sample_rate_hz) { +VoipResult VoipCore::RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - channel->RegisterTelephoneEventType(rtp_payload_type, sample_rate_hz); + if (!channel) { + return VoipResult::kInvalidArgument; } + + channel->RegisterTelephoneEventType(rtp_payload_type, sample_rate_hz); + + return VoipResult::kOk; } -bool VoipCore::SendDtmfEvent(ChannelId channel_id, - DtmfEvent dtmf_event, - int duration_ms) { +VoipResult VoipCore::SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - return channel->SendTelephoneEvent(static_cast(dtmf_event), - duration_ms); + if (!channel) { + return VoipResult::kInvalidArgument; } - return false; + + return (channel->SendTelephoneEvent(static_cast(dtmf_event), duration_ms) + ? VoipResult::kOk + : VoipResult::kFailedPrecondition); } -absl::optional VoipCore::GetIngressStatistics( - ChannelId channel_id) { +VoipResult VoipCore::GetIngressStatistics(ChannelId channel_id, + IngressStatistics& ingress_stats) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - return channel->GetIngressStatistics(); + if (!channel) { + return VoipResult::kInvalidArgument; } - return absl::nullopt; + + ingress_stats = channel->GetIngressStatistics(); + + return VoipResult::kOk; } -void VoipCore::SetInputMuted(ChannelId channel_id, bool enable) { +VoipResult VoipCore::GetChannelStatistics(ChannelId channel_id, + ChannelStatistics& channel_stats) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - channel->SetMute(enable); + + if (!channel) { + return VoipResult::kInvalidArgument; } + + channel_stats = channel->GetChannelStatistics(); + + return VoipResult::kOk; } -absl::optional VoipCore::GetInputVolumeInfo(ChannelId channel_id) { +VoipResult VoipCore::SetInputMuted(ChannelId channel_id, bool enable) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - VolumeInfo input_volume; - input_volume.audio_level = channel->GetInputAudioLevel(); - input_volume.total_energy = channel->GetInputTotalEnergy(); - input_volume.total_duration = channel->GetInputTotalDuration(); - return input_volume; + + if (!channel) { + return VoipResult::kInvalidArgument; } - return absl::nullopt; + + channel->SetMute(enable); + + return VoipResult::kOk; } -absl::optional VoipCore::GetOutputVolumeInfo(ChannelId channel_id) { +VoipResult VoipCore::GetInputVolumeInfo(ChannelId channel_id, + VolumeInfo& input_volume) { rtc::scoped_refptr channel = GetChannel(channel_id); - if (channel) { - VolumeInfo output_volume; - output_volume.audio_level = channel->GetOutputAudioLevel(); - output_volume.total_energy = channel->GetOutputTotalEnergy(); - output_volume.total_duration = channel->GetOutputTotalDuration(); - return output_volume; + + if (!channel) { + return VoipResult::kInvalidArgument; } - return absl::nullopt; + + input_volume.audio_level = channel->GetInputAudioLevel(); + input_volume.total_energy = channel->GetInputTotalEnergy(); + input_volume.total_duration = channel->GetInputTotalDuration(); + + return VoipResult::kOk; +} + +VoipResult VoipCore::GetOutputVolumeInfo(ChannelId channel_id, + VolumeInfo& output_volume) { + rtc::scoped_refptr channel = GetChannel(channel_id); + + if (!channel) { + return VoipResult::kInvalidArgument; + } + + output_volume.audio_level = channel->GetOutputAudioLevel(); + output_volume.total_energy = channel->GetOutputTotalEnergy(); + output_volume.total_duration = channel->GetOutputTotalDuration(); + + return VoipResult::kOk; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h index 5ebf4381c..b7c1f2947 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h @@ -74,45 +74,50 @@ class VoipCore : public VoipEngine, VoipVolumeControl& VolumeControl() override { return *this; } // Implements VoipBase interfaces. - absl::optional CreateChannel( - Transport* transport, - absl::optional local_ssrc) override; - void ReleaseChannel(ChannelId channel_id) override; - bool StartSend(ChannelId channel_id) override; - bool StopSend(ChannelId channel_id) override; - bool StartPlayout(ChannelId channel_id) override; - bool StopPlayout(ChannelId channel_id) override; + ChannelId CreateChannel(Transport* transport, + absl::optional local_ssrc) override; + VoipResult ReleaseChannel(ChannelId channel_id) override; + VoipResult StartSend(ChannelId channel_id) override; + VoipResult StopSend(ChannelId channel_id) override; + VoipResult StartPlayout(ChannelId channel_id) override; + VoipResult StopPlayout(ChannelId channel_id) override; // Implements VoipNetwork interfaces. - void ReceivedRTPPacket(ChannelId channel_id, - rtc::ArrayView rtp_packet) override; - void ReceivedRTCPPacket(ChannelId channel_id, - rtc::ArrayView rtcp_packet) override; + VoipResult ReceivedRTPPacket( + ChannelId channel_id, + rtc::ArrayView rtp_packet) override; + VoipResult ReceivedRTCPPacket( + ChannelId channel_id, + rtc::ArrayView rtcp_packet) override; // Implements VoipCodec interfaces. - void SetSendCodec(ChannelId channel_id, - int payload_type, - const SdpAudioFormat& encoder_format) override; - void SetReceiveCodecs( + VoipResult SetSendCodec(ChannelId channel_id, + int payload_type, + const SdpAudioFormat& encoder_format) override; + VoipResult SetReceiveCodecs( ChannelId channel_id, const std::map& decoder_specs) override; // Implements VoipDtmf interfaces. - void RegisterTelephoneEventType(ChannelId channel_id, - int rtp_payload_type, - int sample_rate_hz) override; - bool SendDtmfEvent(ChannelId channel_id, - DtmfEvent dtmf_event, - int duration_ms) override; + VoipResult RegisterTelephoneEventType(ChannelId channel_id, + int rtp_payload_type, + int sample_rate_hz) override; + VoipResult SendDtmfEvent(ChannelId channel_id, + DtmfEvent dtmf_event, + int duration_ms) override; // Implements VoipStatistics interfaces. - absl::optional GetIngressStatistics( - ChannelId channel_id) override; + VoipResult GetIngressStatistics(ChannelId channel_id, + IngressStatistics& ingress_stats) override; + VoipResult GetChannelStatistics(ChannelId channe_id, + ChannelStatistics& channel_stats) override; // Implements VoipVolumeControl interfaces. - void SetInputMuted(ChannelId channel_id, bool enable) override; - absl::optional GetInputVolumeInfo(ChannelId channel_id) override; - absl::optional GetOutputVolumeInfo(ChannelId channel_id) override; + VoipResult SetInputMuted(ChannelId channel_id, bool enable) override; + VoipResult GetInputVolumeInfo(ChannelId channel_id, + VolumeInfo& volume_info) override; + VoipResult GetOutputVolumeInfo(ChannelId channel_id, + VolumeInfo& volume_info) override; private: // Initialize ADM and default audio device if needed. diff --git a/TMessagesProj/jni/voip/webrtc/base/base_paths_mac.mm b/TMessagesProj/jni/voip/webrtc/base/base_paths_mac.mm deleted file mode 100644 index f10b2c312..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/base_paths_mac.mm +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Defines base::PathProviderMac which replaces base::PathProviderPosix for Mac -// in base/path_service.cc. - -#include -#import -#include -#include - -#include "base/base_paths.h" -#include "base/compiler_specific.h" -#include "base/files/file_path.h" -#include "base/files/file_util.h" -#include "base/logging.h" -#include "base/mac/bundle_locations.h" -#include "base/mac/foundation_util.h" -#include "base/path_service.h" -#include "base/strings/string_util.h" -#include "base/threading/thread_restrictions.h" -#include "build/build_config.h" - -namespace { - -void GetNSExecutablePath(base::FilePath* path) { - DCHECK(path); - // Executable path can have relative references ("..") depending on - // how the app was launched. - uint32_t executable_length = 0; - _NSGetExecutablePath(NULL, &executable_length); - DCHECK_GT(executable_length, 1u); - std::string executable_path; - int rv = _NSGetExecutablePath( - base::WriteInto(&executable_path, executable_length), - &executable_length); - DCHECK_EQ(rv, 0); - - // _NSGetExecutablePath may return paths containing ./ or ../ which makes - // FilePath::DirName() work incorrectly, convert it to absolute path so that - // paths such as DIR_SOURCE_ROOT can work, since we expect absolute paths to - // be returned here. - // TODO(bauerb): http://crbug.com/259796, http://crbug.com/373477 - base::ThreadRestrictions::ScopedAllowIO allow_io; - *path = base::MakeAbsoluteFilePath(base::FilePath(executable_path)); -} - -// Returns true if the module for |address| is found. |path| will contain -// the path to the module. Note that |path| may not be absolute. -bool GetModulePathForAddress(base::FilePath* path, - const void* address) WARN_UNUSED_RESULT; - -bool GetModulePathForAddress(base::FilePath* path, const void* address) { - Dl_info info; - if (dladdr(address, &info) == 0) - return false; - *path = base::FilePath(info.dli_fname); - return true; -} - -} // namespace - -namespace base { - -bool PathProviderMac(int key, base::FilePath* result) { - switch (key) { - case base::FILE_EXE: - GetNSExecutablePath(result); - return true; - case base::FILE_MODULE: - return GetModulePathForAddress(result, - reinterpret_cast(&base::PathProviderMac)); - case base::DIR_APP_DATA: { - bool success = base::mac::GetUserDirectory(NSApplicationSupportDirectory, - result); -#if defined(OS_IOS) - // On IOS, this directory does not exist unless it is created explicitly. - if (success && !base::PathExists(*result)) - success = base::CreateDirectory(*result); -#endif // defined(OS_IOS) - return success; - } - case base::DIR_SOURCE_ROOT: - // Go through PathService to catch overrides. - if (!PathService::Get(base::FILE_EXE, result)) - return false; - - // Start with the executable's directory. - *result = result->DirName(); - -#if !defined(OS_IOS) - if (base::mac::AmIBundled()) { - // The bundled app executables (Chromium, TestShell, etc) live five - // levels down, eg: - // src/xcodebuild/{Debug|Release}/Chromium.app/Contents/MacOS/Chromium - *result = result->DirName().DirName().DirName().DirName().DirName(); - } else { - // Unit tests execute two levels deep from the source root, eg: - // src/xcodebuild/{Debug|Release}/base_unittests - *result = result->DirName().DirName(); - } -#endif - return true; - case base::DIR_USER_DESKTOP: -#if defined(OS_IOS) - // iOS does not have desktop directories. - NOTIMPLEMENTED(); - return false; -#else - return base::mac::GetUserDirectory(NSDesktopDirectory, result); -#endif - case base::DIR_ASSETS: -#if defined(OS_IOS) - // TODO(https://crbug.com/957792): Assets live alongside the executable. - return PathService::Get(base::DIR_MODULE, result); -#else - if (!base::mac::AmIBundled()) { - return PathService::Get(base::DIR_MODULE, result); - } - *result = base::mac::FrameworkBundlePath().Append( - FILE_PATH_LITERAL("Resources")); - return true; -#endif // !defined(OS_IOS) - case base::DIR_CACHE: - return base::mac::GetUserDirectory(NSCachesDirectory, result); - default: - return false; - } -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/base_paths_posix.cc b/TMessagesProj/jni/voip/webrtc/base/base_paths_posix.cc deleted file mode 100644 index 00a15696c..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/base_paths_posix.cc +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Defines base::PathProviderPosix, default path provider on POSIX OSes that -// don't have their own base_paths_OS.cc implementation (i.e. all but Mac and -// Android). - -#include "base/base_paths.h" - -#include -#include - -#include -#include -#include - -#include "base/environment.h" -#include "base/files/file_path.h" -#include "base/files/file_util.h" -#include "base/logging.h" -#include "base/nix/xdg_util.h" -#include "base/path_service.h" -#include "base/process/process_metrics.h" -#include "build/build_config.h" - -#if defined(OS_FREEBSD) -#include -#include -#elif defined(OS_SOLARIS) || defined(OS_AIX) -#include -#endif - -namespace base { - -bool PathProviderPosix(int key, FilePath* result) { - switch (key) { - case FILE_EXE: - case FILE_MODULE: { // TODO(evanm): is this correct? -#if defined(OS_LINUX) - FilePath bin_dir; - if (!ReadSymbolicLink(FilePath(kProcSelfExe), &bin_dir)) { - NOTREACHED() << "Unable to resolve " << kProcSelfExe << "."; - return false; - } - *result = bin_dir; - return true; -#elif defined(OS_FREEBSD) - int name[] = { CTL_KERN, KERN_PROC, KERN_PROC_PATHNAME, -1 }; - char bin_dir[PATH_MAX + 1]; - size_t length = sizeof(bin_dir); - // Upon return, |length| is the number of bytes written to |bin_dir| - // including the string terminator. - int error = sysctl(name, 4, bin_dir, &length, NULL, 0); - if (error < 0 || length <= 1) { - NOTREACHED() << "Unable to resolve path."; - return false; - } - *result = FilePath(FilePath::StringType(bin_dir, length - 1)); - return true; -#elif defined(OS_SOLARIS) - char bin_dir[PATH_MAX + 1]; - if (realpath(getexecname(), bin_dir) == NULL) { - NOTREACHED() << "Unable to resolve " << getexecname() << "."; - return false; - } - *result = FilePath(bin_dir); - return true; -#elif defined(OS_OPENBSD) || defined(OS_AIX) - // There is currently no way to get the executable path on OpenBSD - char* cpath; - if ((cpath = getenv("CHROME_EXE_PATH")) != NULL) - *result = FilePath(cpath); - else - *result = FilePath("/usr/local/chrome/chrome"); - return true; -#endif - } - case DIR_SOURCE_ROOT: { - // Allow passing this in the environment, for more flexibility in build - // tree configurations (sub-project builds, gyp --output_dir, etc.) - std::unique_ptr env(Environment::Create()); - std::string cr_source_root; - FilePath path; - if (env->GetVar("CR_SOURCE_ROOT", &cr_source_root)) { - path = FilePath(cr_source_root); - if (PathExists(path)) { - *result = path; - return true; - } - DLOG(WARNING) << "CR_SOURCE_ROOT is set, but it appears to not " - << "point to a directory."; - } - // On POSIX, unit tests execute two levels deep from the source root. - // For example: out/{Debug|Release}/net_unittest - if (PathService::Get(DIR_EXE, &path)) { - *result = path.DirName().DirName(); - return true; - } - - DLOG(ERROR) << "Couldn't find your source root. " - << "Try running from your chromium/src directory."; - return false; - } - case DIR_USER_DESKTOP: - *result = nix::GetXDGUserDirectory("DESKTOP", "Desktop"); - return true; - case DIR_CACHE: { - std::unique_ptr env(Environment::Create()); - FilePath cache_dir( - nix::GetXDGDirectory(env.get(), "XDG_CACHE_HOME", ".cache")); - *result = cache_dir; - return true; - } - } - return false; -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/debug/stack_trace_posix.cc b/TMessagesProj/jni/voip/webrtc/base/debug/stack_trace_posix.cc deleted file mode 100644 index 299feb6c6..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/debug/stack_trace_posix.cc +++ /dev/null @@ -1,931 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/debug/stack_trace.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include - -#if !defined(USE_SYMBOLIZE) -#include -#endif -#if !defined(__UCLIBC__) && !defined(_AIX) -#include -#endif - -#if defined(OS_MACOSX) -#include -#endif - -#if defined(OS_LINUX) -#include "base/debug/proc_maps_linux.h" -#endif - -#include "base/cfi_buildflags.h" -#include "base/debug/debugger.h" -#include "base/files/scoped_file.h" -#include "base/logging.h" -#include "base/memory/free_deleter.h" -#include "base/memory/singleton.h" -#include "base/numerics/safe_conversions.h" -#include "base/posix/eintr_wrapper.h" -#include "base/stl_util.h" -#include "base/strings/string_number_conversions.h" -#include "base/strings/string_util.h" -#include "build/build_config.h" - -#if defined(USE_SYMBOLIZE) -#include "base/third_party/symbolize/symbolize.h" -#endif - -namespace base { -namespace debug { - -namespace { - -volatile sig_atomic_t in_signal_handler = 0; - -#if !defined(OS_NACL) -bool (*try_handle_signal)(int, siginfo_t*, void*) = nullptr; -#endif - -#if !defined(USE_SYMBOLIZE) -// The prefix used for mangled symbols, per the Itanium C++ ABI: -// http://www.codesourcery.com/cxx-abi/abi.html#mangling -const char kMangledSymbolPrefix[] = "_Z"; - -// Characters that can be used for symbols, generated by Ruby: -// (('a'..'z').to_a+('A'..'Z').to_a+('0'..'9').to_a + ['_']).join -const char kSymbolCharacters[] = - "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"; -#endif // !defined(USE_SYMBOLIZE) - -#if !defined(USE_SYMBOLIZE) -// Demangles C++ symbols in the given text. Example: -// -// "out/Debug/base_unittests(_ZN10StackTraceC1Ev+0x20) [0x817778c]" -// => -// "out/Debug/base_unittests(StackTrace::StackTrace()+0x20) [0x817778c]" -void DemangleSymbols(std::string* text) { - // Note: code in this function is NOT async-signal safe (std::string uses - // malloc internally). - -#if !defined(__UCLIBC__) && !defined(_AIX) - std::string::size_type search_from = 0; - while (search_from < text->size()) { - // Look for the start of a mangled symbol, from search_from. - std::string::size_type mangled_start = - text->find(kMangledSymbolPrefix, search_from); - if (mangled_start == std::string::npos) { - break; // Mangled symbol not found. - } - - // Look for the end of the mangled symbol. - std::string::size_type mangled_end = - text->find_first_not_of(kSymbolCharacters, mangled_start); - if (mangled_end == std::string::npos) { - mangled_end = text->size(); - } - std::string mangled_symbol = - text->substr(mangled_start, mangled_end - mangled_start); - - // Try to demangle the mangled symbol candidate. - int status = 0; - std::unique_ptr demangled_symbol( - abi::__cxa_demangle(mangled_symbol.c_str(), nullptr, 0, &status)); - if (status == 0) { // Demangling is successful. - // Remove the mangled symbol. - text->erase(mangled_start, mangled_end - mangled_start); - // Insert the demangled symbol. - text->insert(mangled_start, demangled_symbol.get()); - // Next time, we'll start right after the demangled symbol we inserted. - search_from = mangled_start + strlen(demangled_symbol.get()); - } else { - // Failed to demangle. Retry after the "_Z" we just found. - search_from = mangled_start + 2; - } - } -#endif // !defined(__UCLIBC__) && !defined(_AIX) -} -#endif // !defined(USE_SYMBOLIZE) - -class BacktraceOutputHandler { - public: - virtual void HandleOutput(const char* output) = 0; - - protected: - virtual ~BacktraceOutputHandler() = default; -}; - -#if !defined(__UCLIBC__) && !defined(_AIX) -void OutputPointer(void* pointer, BacktraceOutputHandler* handler) { - // This should be more than enough to store a 64-bit number in hex: - // 16 hex digits + 1 for null-terminator. - char buf[17] = { '\0' }; - handler->HandleOutput("0x"); - internal::itoa_r(reinterpret_cast(pointer), - buf, sizeof(buf), 16, 12); - handler->HandleOutput(buf); -} - -#if defined(USE_SYMBOLIZE) -void OutputFrameId(intptr_t frame_id, BacktraceOutputHandler* handler) { - // Max unsigned 64-bit number in decimal has 20 digits (18446744073709551615). - // Hence, 30 digits should be more than enough to represent it in decimal - // (including the null-terminator). - char buf[30] = { '\0' }; - handler->HandleOutput("#"); - internal::itoa_r(frame_id, buf, sizeof(buf), 10, 1); - handler->HandleOutput(buf); -} -#endif // defined(USE_SYMBOLIZE) - -void ProcessBacktrace(void* const* trace, - size_t size, - const char* prefix_string, - BacktraceOutputHandler* handler) { -// NOTE: This code MUST be async-signal safe (it's used by in-process -// stack dumping signal handler). NO malloc or stdio is allowed here. - -#if defined(USE_SYMBOLIZE) - for (size_t i = 0; i < size; ++i) { - if (prefix_string) - handler->HandleOutput(prefix_string); - - OutputFrameId(i, handler); - handler->HandleOutput(" "); - OutputPointer(trace[i], handler); - handler->HandleOutput(" "); - - char buf[1024] = { '\0' }; - - // Subtract by one as return address of function may be in the next - // function when a function is annotated as noreturn. - void* address = static_cast(trace[i]) - 1; - if (google::Symbolize(address, buf, sizeof(buf))) - handler->HandleOutput(buf); - else - handler->HandleOutput(""); - - handler->HandleOutput("\n"); - } -#else - bool printed = false; - - // Below part is async-signal unsafe (uses malloc), so execute it only - // when we are not executing the signal handler. - if (in_signal_handler == 0) { - std::unique_ptr trace_symbols( - backtrace_symbols(trace, size)); - if (trace_symbols.get()) { - for (size_t i = 0; i < size; ++i) { - std::string trace_symbol = trace_symbols.get()[i]; - DemangleSymbols(&trace_symbol); - if (prefix_string) - handler->HandleOutput(prefix_string); - handler->HandleOutput(trace_symbol.c_str()); - handler->HandleOutput("\n"); - } - - printed = true; - } - } - - if (!printed) { - for (size_t i = 0; i < size; ++i) { - handler->HandleOutput(" ["); - OutputPointer(trace[i], handler); - handler->HandleOutput("]\n"); - } - } -#endif // defined(USE_SYMBOLIZE) -} -#endif // !defined(__UCLIBC__) && !defined(_AIX) - -void PrintToStderr(const char* output) { - // NOTE: This code MUST be async-signal safe (it's used by in-process - // stack dumping signal handler). NO malloc or stdio is allowed here. - ignore_result(HANDLE_EINTR(write(STDERR_FILENO, output, strlen(output)))); -} - -void StackDumpSignalHandler(int signal, siginfo_t* info, void* void_context) { - // NOTE: This code MUST be async-signal safe. - // NO malloc or stdio is allowed here. - -#if !defined(OS_NACL) - // Give a registered callback a chance to recover from this signal - // - // V8 uses guard regions to guarantee memory safety in WebAssembly. This means - // some signals might be expected if they originate from Wasm code while - // accessing the guard region. We give V8 the chance to handle and recover - // from these signals first. - if (try_handle_signal != nullptr && - try_handle_signal(signal, info, void_context)) { - // The first chance handler took care of this. The SA_RESETHAND flag - // replaced this signal handler upon entry, but we want to stay - // installed. Thus, we reinstall ourselves before returning. - struct sigaction action; - memset(&action, 0, sizeof(action)); - action.sa_flags = SA_RESETHAND | SA_SIGINFO; - action.sa_sigaction = &StackDumpSignalHandler; - sigemptyset(&action.sa_mask); - - sigaction(signal, &action, nullptr); - return; - } -#endif - -// Do not take the "in signal handler" code path on Mac in a DCHECK-enabled -// build, as this prevents seeing a useful (symbolized) stack trace on a crash -// or DCHECK() failure. While it may not be fully safe to run the stack symbol -// printing code, in practice it's better to provide meaningful stack traces - -// and the risk is low given we're likely crashing already. -#if !defined(OS_MACOSX) || !DCHECK_IS_ON() - // Record the fact that we are in the signal handler now, so that the rest - // of StackTrace can behave in an async-signal-safe manner. - in_signal_handler = 1; -#endif - - if (BeingDebugged()) - BreakDebugger(); - - PrintToStderr("Received signal "); - char buf[1024] = { 0 }; - internal::itoa_r(signal, buf, sizeof(buf), 10, 0); - PrintToStderr(buf); - if (signal == SIGBUS) { - if (info->si_code == BUS_ADRALN) - PrintToStderr(" BUS_ADRALN "); - else if (info->si_code == BUS_ADRERR) - PrintToStderr(" BUS_ADRERR "); - else if (info->si_code == BUS_OBJERR) - PrintToStderr(" BUS_OBJERR "); - else - PrintToStderr(" "); - } else if (signal == SIGFPE) { - if (info->si_code == FPE_FLTDIV) - PrintToStderr(" FPE_FLTDIV "); - else if (info->si_code == FPE_FLTINV) - PrintToStderr(" FPE_FLTINV "); - else if (info->si_code == FPE_FLTOVF) - PrintToStderr(" FPE_FLTOVF "); - else if (info->si_code == FPE_FLTRES) - PrintToStderr(" FPE_FLTRES "); - else if (info->si_code == FPE_FLTSUB) - PrintToStderr(" FPE_FLTSUB "); - else if (info->si_code == FPE_FLTUND) - PrintToStderr(" FPE_FLTUND "); - else if (info->si_code == FPE_INTDIV) - PrintToStderr(" FPE_INTDIV "); - else if (info->si_code == FPE_INTOVF) - PrintToStderr(" FPE_INTOVF "); - else - PrintToStderr(" "); - } else if (signal == SIGILL) { - if (info->si_code == ILL_BADSTK) - PrintToStderr(" ILL_BADSTK "); - else if (info->si_code == ILL_COPROC) - PrintToStderr(" ILL_COPROC "); - else if (info->si_code == ILL_ILLOPN) - PrintToStderr(" ILL_ILLOPN "); - else if (info->si_code == ILL_ILLADR) - PrintToStderr(" ILL_ILLADR "); - else if (info->si_code == ILL_ILLTRP) - PrintToStderr(" ILL_ILLTRP "); - else if (info->si_code == ILL_PRVOPC) - PrintToStderr(" ILL_PRVOPC "); - else if (info->si_code == ILL_PRVREG) - PrintToStderr(" ILL_PRVREG "); - else - PrintToStderr(" "); - } else if (signal == SIGSEGV) { - if (info->si_code == SEGV_MAPERR) - PrintToStderr(" SEGV_MAPERR "); - else if (info->si_code == SEGV_ACCERR) - PrintToStderr(" SEGV_ACCERR "); - else - PrintToStderr(" "); - } - if (signal == SIGBUS || signal == SIGFPE || - signal == SIGILL || signal == SIGSEGV) { - internal::itoa_r(reinterpret_cast(info->si_addr), - buf, sizeof(buf), 16, 12); - PrintToStderr(buf); - } - PrintToStderr("\n"); - -#if BUILDFLAG(CFI_ENFORCEMENT_TRAP) - if (signal == SIGILL && info->si_code == ILL_ILLOPN) { - PrintToStderr( - "CFI: Most likely a control flow integrity violation; for more " - "information see:\n"); - PrintToStderr( - "https://www.chromium.org/developers/testing/control-flow-integrity\n"); - } -#endif // BUILDFLAG(CFI_ENFORCEMENT_TRAP) - - debug::StackTrace().Print(); - -#if defined(OS_LINUX) -#if ARCH_CPU_X86_FAMILY - ucontext_t* context = reinterpret_cast(void_context); - const struct { - const char* label; - greg_t value; - } registers[] = { -#if ARCH_CPU_32_BITS - { " gs: ", context->uc_mcontext.gregs[REG_GS] }, - { " fs: ", context->uc_mcontext.gregs[REG_FS] }, - { " es: ", context->uc_mcontext.gregs[REG_ES] }, - { " ds: ", context->uc_mcontext.gregs[REG_DS] }, - { " edi: ", context->uc_mcontext.gregs[REG_EDI] }, - { " esi: ", context->uc_mcontext.gregs[REG_ESI] }, - { " ebp: ", context->uc_mcontext.gregs[REG_EBP] }, - { " esp: ", context->uc_mcontext.gregs[REG_ESP] }, - { " ebx: ", context->uc_mcontext.gregs[REG_EBX] }, - { " edx: ", context->uc_mcontext.gregs[REG_EDX] }, - { " ecx: ", context->uc_mcontext.gregs[REG_ECX] }, - { " eax: ", context->uc_mcontext.gregs[REG_EAX] }, - { " trp: ", context->uc_mcontext.gregs[REG_TRAPNO] }, - { " err: ", context->uc_mcontext.gregs[REG_ERR] }, - { " ip: ", context->uc_mcontext.gregs[REG_EIP] }, - { " cs: ", context->uc_mcontext.gregs[REG_CS] }, - { " efl: ", context->uc_mcontext.gregs[REG_EFL] }, - { " usp: ", context->uc_mcontext.gregs[REG_UESP] }, - { " ss: ", context->uc_mcontext.gregs[REG_SS] }, -#elif ARCH_CPU_64_BITS - { " r8: ", context->uc_mcontext.gregs[REG_R8] }, - { " r9: ", context->uc_mcontext.gregs[REG_R9] }, - { " r10: ", context->uc_mcontext.gregs[REG_R10] }, - { " r11: ", context->uc_mcontext.gregs[REG_R11] }, - { " r12: ", context->uc_mcontext.gregs[REG_R12] }, - { " r13: ", context->uc_mcontext.gregs[REG_R13] }, - { " r14: ", context->uc_mcontext.gregs[REG_R14] }, - { " r15: ", context->uc_mcontext.gregs[REG_R15] }, - { " di: ", context->uc_mcontext.gregs[REG_RDI] }, - { " si: ", context->uc_mcontext.gregs[REG_RSI] }, - { " bp: ", context->uc_mcontext.gregs[REG_RBP] }, - { " bx: ", context->uc_mcontext.gregs[REG_RBX] }, - { " dx: ", context->uc_mcontext.gregs[REG_RDX] }, - { " ax: ", context->uc_mcontext.gregs[REG_RAX] }, - { " cx: ", context->uc_mcontext.gregs[REG_RCX] }, - { " sp: ", context->uc_mcontext.gregs[REG_RSP] }, - { " ip: ", context->uc_mcontext.gregs[REG_RIP] }, - { " efl: ", context->uc_mcontext.gregs[REG_EFL] }, - { " cgf: ", context->uc_mcontext.gregs[REG_CSGSFS] }, - { " erf: ", context->uc_mcontext.gregs[REG_ERR] }, - { " trp: ", context->uc_mcontext.gregs[REG_TRAPNO] }, - { " msk: ", context->uc_mcontext.gregs[REG_OLDMASK] }, - { " cr2: ", context->uc_mcontext.gregs[REG_CR2] }, -#endif // ARCH_CPU_32_BITS - }; - -#if ARCH_CPU_32_BITS - const int kRegisterPadding = 8; -#elif ARCH_CPU_64_BITS - const int kRegisterPadding = 16; -#endif - - for (size_t i = 0; i < base::size(registers); i++) { - PrintToStderr(registers[i].label); - internal::itoa_r(registers[i].value, buf, sizeof(buf), - 16, kRegisterPadding); - PrintToStderr(buf); - - if ((i + 1) % 4 == 0) - PrintToStderr("\n"); - } - PrintToStderr("\n"); -#endif // ARCH_CPU_X86_FAMILY -#endif // defined(OS_LINUX) - - PrintToStderr("[end of stack trace]\n"); - -#if defined(OS_MACOSX) && !defined(OS_IOS) - if (::signal(signal, SIG_DFL) == SIG_ERR) - _exit(1); -#else - // Non-Mac OSes should probably reraise the signal as well, but the Linux - // sandbox tests break on CrOS devices. - // https://code.google.com/p/chromium/issues/detail?id=551681 - PrintToStderr("Calling _exit(1). Core file will not be generated.\n"); - _exit(1); -#endif // defined(OS_MACOSX) && !defined(OS_IOS) -} - -class PrintBacktraceOutputHandler : public BacktraceOutputHandler { - public: - PrintBacktraceOutputHandler() = default; - - void HandleOutput(const char* output) override { - // NOTE: This code MUST be async-signal safe (it's used by in-process - // stack dumping signal handler). NO malloc or stdio is allowed here. - PrintToStderr(output); - } - - private: - DISALLOW_COPY_AND_ASSIGN(PrintBacktraceOutputHandler); -}; - -class StreamBacktraceOutputHandler : public BacktraceOutputHandler { - public: - explicit StreamBacktraceOutputHandler(std::ostream* os) : os_(os) { - } - - void HandleOutput(const char* output) override { (*os_) << output; } - - private: - std::ostream* os_; - - DISALLOW_COPY_AND_ASSIGN(StreamBacktraceOutputHandler); -}; - -void WarmUpBacktrace() { - // Warm up stack trace infrastructure. It turns out that on the first - // call glibc initializes some internal data structures using pthread_once, - // and even backtrace() can call malloc(), leading to hangs. - // - // Example stack trace snippet (with tcmalloc): - // - // #8 0x0000000000a173b5 in tc_malloc - // at ./third_party/tcmalloc/chromium/src/debugallocation.cc:1161 - // #9 0x00007ffff7de7900 in _dl_map_object_deps at dl-deps.c:517 - // #10 0x00007ffff7ded8a9 in dl_open_worker at dl-open.c:262 - // #11 0x00007ffff7de9176 in _dl_catch_error at dl-error.c:178 - // #12 0x00007ffff7ded31a in _dl_open (file=0x7ffff625e298 "libgcc_s.so.1") - // at dl-open.c:639 - // #13 0x00007ffff6215602 in do_dlopen at dl-libc.c:89 - // #14 0x00007ffff7de9176 in _dl_catch_error at dl-error.c:178 - // #15 0x00007ffff62156c4 in dlerror_run at dl-libc.c:48 - // #16 __GI___libc_dlopen_mode at dl-libc.c:165 - // #17 0x00007ffff61ef8f5 in init - // at ../sysdeps/x86_64/../ia64/backtrace.c:53 - // #18 0x00007ffff6aad400 in pthread_once - // at ../nptl/sysdeps/unix/sysv/linux/x86_64/pthread_once.S:104 - // #19 0x00007ffff61efa14 in __GI___backtrace - // at ../sysdeps/x86_64/../ia64/backtrace.c:104 - // #20 0x0000000000752a54 in base::debug::StackTrace::StackTrace - // at base/debug/stack_trace_posix.cc:175 - // #21 0x00000000007a4ae5 in - // base::(anonymous namespace)::StackDumpSignalHandler - // at base/process_util_posix.cc:172 - // #22 - StackTrace stack_trace; -} - -#if defined(USE_SYMBOLIZE) - -// class SandboxSymbolizeHelper. -// -// The purpose of this class is to prepare and install a "file open" callback -// needed by the stack trace symbolization code -// (base/third_party/symbolize/symbolize.h) so that it can function properly -// in a sandboxed process. The caveat is that this class must be instantiated -// before the sandboxing is enabled so that it can get the chance to open all -// the object files that are loaded in the virtual address space of the current -// process. -class SandboxSymbolizeHelper { - public: - // Returns the singleton instance. - static SandboxSymbolizeHelper* GetInstance() { - return Singleton>::get(); - } - - private: - friend struct DefaultSingletonTraits; - - SandboxSymbolizeHelper() - : is_initialized_(false) { - Init(); - } - - ~SandboxSymbolizeHelper() { - UnregisterCallback(); - CloseObjectFiles(); - } - - // Returns a O_RDONLY file descriptor for |file_path| if it was opened - // successfully during the initialization. The file is repositioned at - // offset 0. - // IMPORTANT: This function must be async-signal-safe because it can be - // called from a signal handler (symbolizing stack frames for a crash). - int GetFileDescriptor(const char* file_path) { - int fd = -1; - -#if !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - if (file_path) { - // The assumption here is that iterating over std::map - // using a const_iterator does not allocate dynamic memory, hense it is - // async-signal-safe. - std::map::const_iterator it; - for (it = modules_.begin(); it != modules_.end(); ++it) { - if (strcmp((it->first).c_str(), file_path) == 0) { - // POSIX.1-2004 requires an implementation to guarantee that dup() - // is async-signal-safe. - fd = HANDLE_EINTR(dup(it->second)); - break; - } - } - // POSIX.1-2004 requires an implementation to guarantee that lseek() - // is async-signal-safe. - if (fd >= 0 && lseek(fd, 0, SEEK_SET) < 0) { - // Failed to seek. - fd = -1; - } - } -#endif // !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - - return fd; - } - - // Searches for the object file (from /proc/self/maps) that contains - // the specified pc. If found, sets |start_address| to the start address - // of where this object file is mapped in memory, sets the module base - // address into |base_address|, copies the object file name into - // |out_file_name|, and attempts to open the object file. If the object - // file is opened successfully, returns the file descriptor. Otherwise, - // returns -1. |out_file_name_size| is the size of the file name buffer - // (including the null terminator). - // IMPORTANT: This function must be async-signal-safe because it can be - // called from a signal handler (symbolizing stack frames for a crash). - static int OpenObjectFileContainingPc(uint64_t pc, uint64_t& start_address, - uint64_t& base_address, char* file_path, - int file_path_size) { - // This method can only be called after the singleton is instantiated. - // This is ensured by the following facts: - // * This is the only static method in this class, it is private, and - // the class has no friends (except for the DefaultSingletonTraits). - // The compiler guarantees that it can only be called after the - // singleton is instantiated. - // * This method is used as a callback for the stack tracing code and - // the callback registration is done in the constructor, so logically - // it cannot be called before the singleton is created. - SandboxSymbolizeHelper* instance = GetInstance(); - - // Cannot use STL iterators here, since debug iterators use locks. - // NOLINTNEXTLINE(modernize-loop-convert) - for (size_t i = 0; i < instance->regions_.size(); ++i) { - const MappedMemoryRegion& region = instance->regions_[i]; - if (region.start <= pc && pc < region.end) { - start_address = region.start; - base_address = region.base; - if (file_path && file_path_size > 0) { - strncpy(file_path, region.path.c_str(), file_path_size); - // Ensure null termination. - file_path[file_path_size - 1] = '\0'; - } - return instance->GetFileDescriptor(region.path.c_str()); - } - } - return -1; - } - - // Set the base address for each memory region by reading ELF headers in - // process memory. - void SetBaseAddressesForMemoryRegions() { - base::ScopedFD mem_fd( - HANDLE_EINTR(open("/proc/self/mem", O_RDONLY | O_CLOEXEC))); - if (!mem_fd.is_valid()) - return; - - auto safe_memcpy = [&mem_fd](void* dst, uintptr_t src, size_t size) { - return HANDLE_EINTR(pread(mem_fd.get(), dst, size, src)) == ssize_t(size); - }; - - uintptr_t cur_base = 0; - for (auto& r : regions_) { - ElfW(Ehdr) ehdr; - static_assert(SELFMAG <= sizeof(ElfW(Ehdr)), "SELFMAG too large"); - if ((r.permissions & MappedMemoryRegion::READ) && - safe_memcpy(&ehdr, r.start, sizeof(ElfW(Ehdr))) && - memcmp(ehdr.e_ident, ELFMAG, SELFMAG) == 0) { - switch (ehdr.e_type) { - case ET_EXEC: - cur_base = 0; - break; - case ET_DYN: - // Find the segment containing file offset 0. This will correspond - // to the ELF header that we just read. Normally this will have - // virtual address 0, but this is not guaranteed. We must subtract - // the virtual address from the address where the ELF header was - // mapped to get the base address. - // - // If we fail to find a segment for file offset 0, use the address - // of the ELF header as the base address. - cur_base = r.start; - for (unsigned i = 0; i != ehdr.e_phnum; ++i) { - ElfW(Phdr) phdr; - if (safe_memcpy(&phdr, r.start + ehdr.e_phoff + i * sizeof(phdr), - sizeof(phdr)) && - phdr.p_type == PT_LOAD && phdr.p_offset == 0) { - cur_base = r.start - phdr.p_vaddr; - break; - } - } - break; - default: - // ET_REL or ET_CORE. These aren't directly executable, so they - // don't affect the base address. - break; - } - } - - r.base = cur_base; - } - } - - // Parses /proc/self/maps in order to compile a list of all object file names - // for the modules that are loaded in the current process. - // Returns true on success. - bool CacheMemoryRegions() { - // Reads /proc/self/maps. - std::string contents; - if (!ReadProcMaps(&contents)) { - LOG(ERROR) << "Failed to read /proc/self/maps"; - return false; - } - - // Parses /proc/self/maps. - if (!ParseProcMaps(contents, ®ions_)) { - LOG(ERROR) << "Failed to parse the contents of /proc/self/maps"; - return false; - } - - SetBaseAddressesForMemoryRegions(); - - is_initialized_ = true; - return true; - } - - // Opens all object files and caches their file descriptors. - void OpenSymbolFiles() { - // Pre-opening and caching the file descriptors of all loaded modules is - // not safe for production builds. Hence it is only done in non-official - // builds. For more details, take a look at: http://crbug.com/341966. -#if !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - // Open the object files for all read-only executable regions and cache - // their file descriptors. - std::vector::const_iterator it; - for (it = regions_.begin(); it != regions_.end(); ++it) { - const MappedMemoryRegion& region = *it; - // Only interesed in read-only executable regions. - if ((region.permissions & MappedMemoryRegion::READ) == - MappedMemoryRegion::READ && - (region.permissions & MappedMemoryRegion::WRITE) == 0 && - (region.permissions & MappedMemoryRegion::EXECUTE) == - MappedMemoryRegion::EXECUTE) { - if (region.path.empty()) { - // Skip regions with empty file names. - continue; - } - if (region.path[0] == '[') { - // Skip pseudo-paths, like [stack], [vdso], [heap], etc ... - continue; - } - if (base::EndsWith(region.path, " (deleted)", - base::CompareCase::SENSITIVE)) { - // Skip deleted files. - continue; - } - // Avoid duplicates. - if (modules_.find(region.path) == modules_.end()) { - int fd = open(region.path.c_str(), O_RDONLY | O_CLOEXEC); - if (fd >= 0) { - modules_.insert(std::make_pair(region.path, fd)); - } else { - LOG(WARNING) << "Failed to open file: " << region.path - << "\n Error: " << strerror(errno); - } - } - } - } -#endif // !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - } - - // Initializes and installs the symbolization callback. - void Init() { - if (CacheMemoryRegions()) { - OpenSymbolFiles(); - google::InstallSymbolizeOpenObjectFileCallback( - &OpenObjectFileContainingPc); - } - } - - // Unregister symbolization callback. - void UnregisterCallback() { - if (is_initialized_) { - google::InstallSymbolizeOpenObjectFileCallback(nullptr); - is_initialized_ = false; - } - } - - // Closes all file descriptors owned by this instance. - void CloseObjectFiles() { -#if !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - std::map::iterator it; - for (it = modules_.begin(); it != modules_.end(); ++it) { - int ret = IGNORE_EINTR(close(it->second)); - DCHECK(!ret); - it->second = -1; - } - modules_.clear(); -#endif // !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - } - - // Set to true upon successful initialization. - bool is_initialized_; - -#if !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - // Mapping from file name to file descriptor. Includes file descriptors - // for all successfully opened object files and the file descriptor for - // /proc/self/maps. This code is not safe for production builds. - std::map modules_; -#endif // !defined(OFFICIAL_BUILD) || !defined(NO_UNWIND_TABLES) - - // Cache for the process memory regions. Produced by parsing the contents - // of /proc/self/maps cache. - std::vector regions_; - - DISALLOW_COPY_AND_ASSIGN(SandboxSymbolizeHelper); -}; -#endif // USE_SYMBOLIZE - -} // namespace - -bool EnableInProcessStackDumping() { -#if defined(USE_SYMBOLIZE) - SandboxSymbolizeHelper::GetInstance(); -#endif // USE_SYMBOLIZE - - // When running in an application, our code typically expects SIGPIPE - // to be ignored. Therefore, when testing that same code, it should run - // with SIGPIPE ignored as well. - struct sigaction sigpipe_action; - memset(&sigpipe_action, 0, sizeof(sigpipe_action)); - sigpipe_action.sa_handler = SIG_IGN; - sigemptyset(&sigpipe_action.sa_mask); - bool success = (sigaction(SIGPIPE, &sigpipe_action, nullptr) == 0); - - // Avoid hangs during backtrace initialization, see above. - WarmUpBacktrace(); - - struct sigaction action; - memset(&action, 0, sizeof(action)); - action.sa_flags = SA_RESETHAND | SA_SIGINFO; - action.sa_sigaction = &StackDumpSignalHandler; - sigemptyset(&action.sa_mask); - - success &= (sigaction(SIGILL, &action, nullptr) == 0); - success &= (sigaction(SIGABRT, &action, nullptr) == 0); - success &= (sigaction(SIGFPE, &action, nullptr) == 0); - success &= (sigaction(SIGBUS, &action, nullptr) == 0); - success &= (sigaction(SIGSEGV, &action, nullptr) == 0); -// On Linux, SIGSYS is reserved by the kernel for seccomp-bpf sandboxing. -#if !defined(OS_LINUX) - success &= (sigaction(SIGSYS, &action, nullptr) == 0); -#endif // !defined(OS_LINUX) - - return success; -} - -#if !defined(OS_NACL) -bool SetStackDumpFirstChanceCallback(bool (*handler)(int, siginfo_t*, void*)) { - DCHECK(try_handle_signal == nullptr || handler == nullptr); - try_handle_signal = handler; - -#if defined(ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER) || \ - defined(THREAD_SANITIZER) || defined(LEAK_SANITIZER) || \ - defined(UNDEFINED_SANITIZER) - struct sigaction installed_handler; - CHECK_EQ(sigaction(SIGSEGV, NULL, &installed_handler), 0); - // If the installed handler does not point to StackDumpSignalHandler, then - // allow_user_segv_handler is 0. - if (installed_handler.sa_sigaction != StackDumpSignalHandler) { - LOG(WARNING) - << "WARNING: sanitizers are preventing signal handler installation. " - << "WebAssembly trap handlers are disabled.\n"; - return false; - } -#endif - return true; -} -#endif - -size_t CollectStackTrace(void** trace, size_t count) { - // NOTE: This code MUST be async-signal safe (it's used by in-process - // stack dumping signal handler). NO malloc or stdio is allowed here. - -#if !defined(__UCLIBC__) && !defined(_AIX) - // Though the backtrace API man page does not list any possible negative - // return values, we take no chance. - return base::saturated_cast(backtrace(trace, count)); -#else - return 0; -#endif -} - -void StackTrace::PrintWithPrefix(const char* prefix_string) const { -// NOTE: This code MUST be async-signal safe (it's used by in-process -// stack dumping signal handler). NO malloc or stdio is allowed here. - -#if !defined(__UCLIBC__) && !defined(_AIX) - PrintBacktraceOutputHandler handler; - ProcessBacktrace(trace_, count_, prefix_string, &handler); -#endif -} - -#if !defined(__UCLIBC__) && !defined(_AIX) -void StackTrace::OutputToStreamWithPrefix(std::ostream* os, - const char* prefix_string) const { - StreamBacktraceOutputHandler handler(os); - ProcessBacktrace(trace_, count_, prefix_string, &handler); -} -#endif - -namespace internal { - -// NOTE: code from sandbox/linux/seccomp-bpf/demo.cc. -char* itoa_r(intptr_t i, char* buf, size_t sz, int base, size_t padding) { - // Make sure we can write at least one NUL byte. - size_t n = 1; - if (n > sz) - return nullptr; - - if (base < 2 || base > 16) { - buf[0] = '\000'; - return nullptr; - } - - char* start = buf; - - uintptr_t j = i; - - // Handle negative numbers (only for base 10). - if (i < 0 && base == 10) { - // This does "j = -i" while avoiding integer overflow. - j = static_cast(-(i + 1)) + 1; - - // Make sure we can write the '-' character. - if (++n > sz) { - buf[0] = '\000'; - return nullptr; - } - *start++ = '-'; - } - - // Loop until we have converted the entire number. Output at least one - // character (i.e. '0'). - char* ptr = start; - do { - // Make sure there is still enough space left in our output buffer. - if (++n > sz) { - buf[0] = '\000'; - return nullptr; - } - - // Output the next digit. - *ptr++ = "0123456789abcdef"[j % base]; - j /= base; - - if (padding > 0) - padding--; - } while (j > 0 || padding > 0); - - // Terminate the output with a NUL character. - *ptr = '\000'; - - // Conversion to ASCII actually resulted in the digits being in reverse - // order. We can't easily generate them in forward order, as we can't tell - // the number of characters needed until we are done converting. - // So, now, we reverse the string (except for the possible "-" sign). - while (--ptr > start) { - char ch = *ptr; - *ptr = *start; - *start++ = ch; - } - return buf; -} - -} // namespace internal - -} // namespace debug -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/json/json_value_converter.h b/TMessagesProj/jni/voip/webrtc/base/json/json_value_converter.h deleted file mode 100644 index 4e5d7f2e5..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/json/json_value_converter.h +++ /dev/null @@ -1,514 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_JSON_JSON_VALUE_CONVERTER_H_ -#define BASE_JSON_JSON_VALUE_CONVERTER_H_ - -#include - -#include -#include -#include - -#include "base/base_export.h" -#include "base/logging.h" -#include "base/macros.h" -#include "base/memory/ptr_util.h" -#include "base/strings/string16.h" -#include "base/strings/string_piece.h" -#include "base/values.h" - -// JSONValueConverter converts a JSON value into a C++ struct in a -// lightweight way. -// -// Usage: -// For real examples, you may want to refer to _unittest.cc file. -// -// Assume that you have a struct like this: -// struct Message { -// int foo; -// std::string bar; -// static void RegisterJSONConverter( -// JSONValueConverter* converter); -// }; -// -// And you want to parse a json data into this struct. First, you -// need to declare RegisterJSONConverter() method in your struct. -// // static -// void Message::RegisterJSONConverter( -// JSONValueConverter* converter) { -// converter->RegisterIntField("foo", &Message::foo); -// converter->RegisterStringField("bar", &Message::bar); -// } -// -// Then, you just instantiate your JSONValueConverter of your type and call -// Convert() method. -// Message message; -// JSONValueConverter converter; -// converter.Convert(json, &message); -// -// Convert() returns false when it fails. Here "fail" means that the value is -// structurally different from expected, such like a string value appears -// for an int field. Do not report failures for missing fields. -// Also note that Convert() will modify the passed |message| even when it -// fails for performance reason. -// -// For nested field, the internal message also has to implement the registration -// method. Then, just use RegisterNestedField() from the containing struct's -// RegisterJSONConverter method. -// struct Nested { -// Message foo; -// static void RegisterJSONConverter(...) { -// ... -// converter->RegisterNestedField("foo", &Nested::foo); -// } -// }; -// -// For repeated field, we just assume std::vector> -// for its container and you can put RegisterRepeatedInt or some other types. -// Use RegisterRepeatedMessage for nested repeated fields. -// -// Sometimes JSON format uses string representations for other types such -// like enum, timestamp, or URL. You can use RegisterCustomField method -// and specify a function to convert a StringPiece to your type. -// bool ConvertFunc(StringPiece s, YourEnum* result) { -// // do something and return true if succeed... -// } -// struct Message { -// YourEnum ye; -// ... -// static void RegisterJSONConverter(...) { -// ... -// converter->RegsiterCustomField( -// "your_enum", &Message::ye, &ConvertFunc); -// } -// }; - -namespace base { - -template -class JSONValueConverter; - -namespace internal { - -template -class FieldConverterBase { - public: - explicit FieldConverterBase(const std::string& path) : field_path_(path) {} - virtual ~FieldConverterBase() = default; - virtual bool ConvertField(const base::Value& value, StructType* obj) - const = 0; - const std::string& field_path() const { return field_path_; } - - private: - std::string field_path_; - DISALLOW_COPY_AND_ASSIGN(FieldConverterBase); -}; - -template -class ValueConverter { - public: - virtual ~ValueConverter() = default; - virtual bool Convert(const base::Value& value, FieldType* field) const = 0; -}; - -template -class FieldConverter : public FieldConverterBase { - public: - explicit FieldConverter(const std::string& path, - FieldType StructType::* field, - ValueConverter* converter) - : FieldConverterBase(path), - field_pointer_(field), - value_converter_(converter) { - } - - bool ConvertField(const base::Value& value, StructType* dst) const override { - return value_converter_->Convert(value, &(dst->*field_pointer_)); - } - - private: - FieldType StructType::* field_pointer_; - std::unique_ptr> value_converter_; - DISALLOW_COPY_AND_ASSIGN(FieldConverter); -}; - -template -class BasicValueConverter; - -template <> -class BASE_EXPORT BasicValueConverter : public ValueConverter { - public: - BasicValueConverter() = default; - - bool Convert(const base::Value& value, int* field) const override; - - private: - DISALLOW_COPY_AND_ASSIGN(BasicValueConverter); -}; - -template <> -class BASE_EXPORT BasicValueConverter - : public ValueConverter { - public: - BasicValueConverter() = default; - - bool Convert(const base::Value& value, std::string* field) const override; - - private: - DISALLOW_COPY_AND_ASSIGN(BasicValueConverter); -}; - -template <> -class BASE_EXPORT BasicValueConverter - : public ValueConverter { - public: - BasicValueConverter() = default; - - bool Convert(const base::Value& value, string16* field) const override; - - private: - DISALLOW_COPY_AND_ASSIGN(BasicValueConverter); -}; - -template <> -class BASE_EXPORT BasicValueConverter : public ValueConverter { - public: - BasicValueConverter() = default; - - bool Convert(const base::Value& value, double* field) const override; - - private: - DISALLOW_COPY_AND_ASSIGN(BasicValueConverter); -}; - -template <> -class BASE_EXPORT BasicValueConverter : public ValueConverter { - public: - BasicValueConverter() = default; - - bool Convert(const base::Value& value, bool* field) const override; - - private: - DISALLOW_COPY_AND_ASSIGN(BasicValueConverter); -}; - -template -class ValueFieldConverter : public ValueConverter { - public: - typedef bool(*ConvertFunc)(const base::Value* value, FieldType* field); - - explicit ValueFieldConverter(ConvertFunc convert_func) - : convert_func_(convert_func) {} - - bool Convert(const base::Value& value, FieldType* field) const override { - return convert_func_(&value, field); - } - - private: - ConvertFunc convert_func_; - - DISALLOW_COPY_AND_ASSIGN(ValueFieldConverter); -}; - -template -class CustomFieldConverter : public ValueConverter { - public: - typedef bool (*ConvertFunc)(StringPiece value, FieldType* field); - - explicit CustomFieldConverter(ConvertFunc convert_func) - : convert_func_(convert_func) {} - - bool Convert(const base::Value& value, FieldType* field) const override { - std::string string_value; - return value.GetAsString(&string_value) && - convert_func_(string_value, field); - } - - private: - ConvertFunc convert_func_; - - DISALLOW_COPY_AND_ASSIGN(CustomFieldConverter); -}; - -template -class NestedValueConverter : public ValueConverter { - public: - NestedValueConverter() = default; - - bool Convert(const base::Value& value, NestedType* field) const override { - return converter_.Convert(value, field); - } - - private: - JSONValueConverter converter_; - DISALLOW_COPY_AND_ASSIGN(NestedValueConverter); -}; - -template -class RepeatedValueConverter - : public ValueConverter>> { - public: - RepeatedValueConverter() = default; - - bool Convert(const base::Value& value, - std::vector>* field) const override { - if (!value.is_list()) { - // The field is not a list. - return false; - } - - field->reserve(value.GetList().size()); - size_t i = 0; - for (const Value& element : value.GetList()) { - auto e = std::make_unique(); - if (basic_converter_.Convert(element, e.get())) { - field->push_back(std::move(e)); - } else { - DVLOG(1) << "failure at " << i << "-th element"; - return false; - } - i++; - } - return true; - } - - private: - BasicValueConverter basic_converter_; - DISALLOW_COPY_AND_ASSIGN(RepeatedValueConverter); -}; - -template -class RepeatedMessageConverter - : public ValueConverter>> { - public: - RepeatedMessageConverter() = default; - - bool Convert(const base::Value& value, - std::vector>* field) const override { - if (!value.is_list()) - return false; - - field->reserve(value.GetList().size()); - size_t i = 0; - for (const Value& element : value.GetList()) { - auto nested = std::make_unique(); - if (converter_.Convert(element, nested.get())) { - field->push_back(std::move(nested)); - } else { - DVLOG(1) << "failure at " << i << "-th element"; - return false; - } - i++; - } - return true; - } - - private: - JSONValueConverter converter_; - DISALLOW_COPY_AND_ASSIGN(RepeatedMessageConverter); -}; - -template -class RepeatedCustomValueConverter - : public ValueConverter>> { - public: - typedef bool(*ConvertFunc)(const base::Value* value, NestedType* field); - - explicit RepeatedCustomValueConverter(ConvertFunc convert_func) - : convert_func_(convert_func) {} - - bool Convert(const base::Value& value, - std::vector>* field) const override { - if (!value.is_list()) - return false; - - field->reserve(value.GetList().size()); - size_t i = 0; - for (const Value& element : value.GetList()) { - auto nested = std::make_unique(); - if ((*convert_func_)(&element, nested.get())) { - field->push_back(std::move(nested)); - } else { - DVLOG(1) << "failure at " << i << "-th element"; - return false; - } - i++; - } - return true; - } - - private: - ConvertFunc convert_func_; - DISALLOW_COPY_AND_ASSIGN(RepeatedCustomValueConverter); -}; - - -} // namespace internal - -template -class JSONValueConverter { - public: - JSONValueConverter() { - StructType::RegisterJSONConverter(this); - } - - void RegisterIntField(const std::string& field_name, - int StructType::* field) { - fields_.push_back( - std::make_unique>( - field_name, field, new internal::BasicValueConverter)); - } - - void RegisterStringField(const std::string& field_name, - std::string StructType::* field) { - fields_.push_back( - std::make_unique>( - field_name, field, new internal::BasicValueConverter)); - } - - void RegisterStringField(const std::string& field_name, - string16 StructType::* field) { - fields_.push_back( - std::make_unique>( - field_name, field, new internal::BasicValueConverter)); - } - - void RegisterBoolField(const std::string& field_name, - bool StructType::* field) { - fields_.push_back( - std::make_unique>( - field_name, field, new internal::BasicValueConverter)); - } - - void RegisterDoubleField(const std::string& field_name, - double StructType::* field) { - fields_.push_back( - std::make_unique>( - field_name, field, new internal::BasicValueConverter)); - } - - template - void RegisterNestedField( - const std::string& field_name, NestedType StructType::* field) { - fields_.push_back( - std::make_unique>( - field_name, field, new internal::NestedValueConverter)); - } - - template - void RegisterCustomField(const std::string& field_name, - FieldType StructType::*field, - bool (*convert_func)(StringPiece, FieldType*)) { - fields_.push_back( - std::make_unique>( - field_name, field, - new internal::CustomFieldConverter(convert_func))); - } - - template - void RegisterCustomValueField( - const std::string& field_name, - FieldType StructType::* field, - bool (*convert_func)(const base::Value*, FieldType*)) { - fields_.push_back( - std::make_unique>( - field_name, field, - new internal::ValueFieldConverter(convert_func))); - } - - void RegisterRepeatedInt( - const std::string& field_name, - std::vector> StructType::*field) { - fields_.push_back(std::make_unique>>>( - field_name, field, new internal::RepeatedValueConverter)); - } - - void RegisterRepeatedString( - const std::string& field_name, - std::vector> StructType::*field) { - fields_.push_back( - std::make_unique>>>( - field_name, field, - new internal::RepeatedValueConverter)); - } - - void RegisterRepeatedString( - const std::string& field_name, - std::vector> StructType::*field) { - fields_.push_back(std::make_unique>>>( - field_name, field, new internal::RepeatedValueConverter)); - } - - void RegisterRepeatedDouble( - const std::string& field_name, - std::vector> StructType::*field) { - fields_.push_back(std::make_unique>>>( - field_name, field, new internal::RepeatedValueConverter)); - } - - void RegisterRepeatedBool( - const std::string& field_name, - std::vector> StructType::*field) { - fields_.push_back(std::make_unique>>>( - field_name, field, new internal::RepeatedValueConverter)); - } - - template - void RegisterRepeatedCustomValue( - const std::string& field_name, - std::vector> StructType::*field, - bool (*convert_func)(const base::Value*, NestedType*)) { - fields_.push_back( - std::make_unique>>>( - field_name, field, - new internal::RepeatedCustomValueConverter( - convert_func))); - } - - template - void RegisterRepeatedMessage( - const std::string& field_name, - std::vector> StructType::*field) { - fields_.push_back( - std::make_unique>>>( - field_name, field, - new internal::RepeatedMessageConverter)); - } - - bool Convert(const base::Value& value, StructType* output) const { - if (!value.is_dict()) - return false; - - for (size_t i = 0; i < fields_.size(); ++i) { - const internal::FieldConverterBase* field_converter = - fields_[i].get(); - const base::Value* field = value.FindPath(field_converter->field_path()); - if (field) { - if (!field_converter->ConvertField(*field, output)) { - DVLOG(1) << "failure at field " << field_converter->field_path(); - return false; - } - } - } - return true; - } - - private: - std::vector>> - fields_; - - DISALLOW_COPY_AND_ASSIGN(JSONValueConverter); -}; - -} // namespace base - -#endif // BASE_JSON_JSON_VALUE_CONVERTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/synchronization/condition_variable.h b/TMessagesProj/jni/voip/webrtc/base/synchronization/condition_variable.h deleted file mode 100644 index d92b73808..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/synchronization/condition_variable.h +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// ConditionVariable wraps pthreads condition variable synchronization or, on -// Windows, simulates it. This functionality is very helpful for having -// several threads wait for an event, as is common with a thread pool managed -// by a master. The meaning of such an event in the (worker) thread pool -// scenario is that additional tasks are now available for processing. It is -// used in Chrome in the DNS prefetching system to notify worker threads that -// a queue now has items (tasks) which need to be tended to. A related use -// would have a pool manager waiting on a ConditionVariable, waiting for a -// thread in the pool to announce (signal) that there is now more room in a -// (bounded size) communications queue for the manager to deposit tasks, or, -// as a second example, that the queue of tasks is completely empty and all -// workers are waiting. -// -// USAGE NOTE 1: spurious signal events are possible with this and -// most implementations of condition variables. As a result, be -// *sure* to retest your condition before proceeding. The following -// is a good example of doing this correctly: -// -// while (!work_to_be_done()) Wait(...); -// -// In contrast do NOT do the following: -// -// if (!work_to_be_done()) Wait(...); // Don't do this. -// -// Especially avoid the above if you are relying on some other thread only -// issuing a signal up *if* there is work-to-do. There can/will -// be spurious signals. Recheck state on waiting thread before -// assuming the signal was intentional. Caveat caller ;-). -// -// USAGE NOTE 2: Broadcast() frees up all waiting threads at once, -// which leads to contention for the locks they all held when they -// called Wait(). This results in POOR performance. A much better -// approach to getting a lot of threads out of Wait() is to have each -// thread (upon exiting Wait()) call Signal() to free up another -// Wait'ing thread. Look at condition_variable_unittest.cc for -// both examples. -// -// Broadcast() can be used nicely during teardown, as it gets the job -// done, and leaves no sleeping threads... and performance is less -// critical at that point. -// -// The semantics of Broadcast() are carefully crafted so that *all* -// threads that were waiting when the request was made will indeed -// get signaled. Some implementations mess up, and don't signal them -// all, while others allow the wait to be effectively turned off (for -// a while while waiting threads come around). This implementation -// appears correct, as it will not "lose" any signals, and will guarantee -// that all threads get signaled by Broadcast(). -// -// This implementation offers support for "performance" in its selection of -// which thread to revive. Performance, in direct contrast with "fairness," -// assures that the thread that most recently began to Wait() is selected by -// Signal to revive. Fairness would (if publicly supported) assure that the -// thread that has Wait()ed the longest is selected. The default policy -// may improve performance, as the selected thread may have a greater chance of -// having some of its stack data in various CPU caches. - -#ifndef BASE_SYNCHRONIZATION_CONDITION_VARIABLE_H_ -#define BASE_SYNCHRONIZATION_CONDITION_VARIABLE_H_ - -#if defined(OS_POSIX) || defined(OS_FUCHSIA) -#include -#endif - -#include "base/base_export.h" -#include "base/logging.h" -#include "base/macros.h" -#include "base/synchronization/lock.h" -#include "build/build_config.h" - -#if defined(OS_WIN) -#include "base/win/windows_types.h" -#endif - -namespace base { - -class TimeDelta; - -class BASE_EXPORT ConditionVariable { - public: - // Construct a cv for use with ONLY one user lock. - explicit ConditionVariable(Lock* user_lock); - - ~ConditionVariable(); - - // Wait() releases the caller's critical section atomically as it starts to - // sleep, and the reacquires it when it is signaled. The wait functions are - // susceptible to spurious wakeups. (See usage note 1 for more details.) - void Wait(); - void TimedWait(const TimeDelta& max_time); - - // Broadcast() revives all waiting threads. (See usage note 2 for more - // details.) - void Broadcast(); - // Signal() revives one waiting thread. - void Signal(); - - // Declares that this ConditionVariable will only ever be used by a thread - // that is idle at the bottom of its stack and waiting for work (in - // particular, it is not synchronously waiting on this ConditionVariable - // before resuming ongoing work). This is useful to avoid telling - // base-internals that this thread is "blocked" when it's merely idle and - // ready to do work. As such, this is only expected to be used by thread and - // thread pool impls. - void declare_only_used_while_idle() { waiting_is_blocking_ = false; } - - private: - -#if defined(OS_WIN) - CHROME_CONDITION_VARIABLE cv_; - CHROME_SRWLOCK* const srwlock_; -#elif defined(OS_POSIX) || defined(OS_FUCHSIA) - pthread_cond_t condition_; - pthread_mutex_t* user_mutex_; -#endif - -#if DCHECK_IS_ON() - base::Lock* const user_lock_; // Needed to adjust shadow lock state on wait. -#endif - - // Whether a thread invoking Wait() on this ConditionalVariable should be - // considered blocked as opposed to idle (and potentially replaced if part of - // a pool). - bool waiting_is_blocking_ = true; - - DISALLOW_COPY_AND_ASSIGN(ConditionVariable); -}; - -} // namespace base - -#endif // BASE_SYNCHRONIZATION_CONDITION_VARIABLE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/threading/thread_collision_warner.h b/TMessagesProj/jni/voip/webrtc/base/threading/thread_collision_warner.h deleted file mode 100644 index 7f7443b21..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/threading/thread_collision_warner.h +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_THREADING_THREAD_COLLISION_WARNER_H_ -#define BASE_THREADING_THREAD_COLLISION_WARNER_H_ - -#include - -#include "base/atomicops.h" -#include "base/base_export.h" -#include "base/compiler_specific.h" -#include "base/macros.h" - -// A helper class alongside macros to be used to verify assumptions about thread -// safety of a class. -// -// Example: Queue implementation non thread-safe but still usable if clients -// are synchronized somehow. -// -// In this case the macro DFAKE_SCOPED_LOCK has to be -// used, it checks that if a thread is inside the push/pop then -// noone else is still inside the pop/push -// -// class NonThreadSafeQueue { -// public: -// ... -// void push(int) { DFAKE_SCOPED_LOCK(push_pop_); ... } -// int pop() { DFAKE_SCOPED_LOCK(push_pop_); ... } -// ... -// private: -// DFAKE_MUTEX(push_pop_); -// }; -// -// -// Example: Queue implementation non thread-safe but still usable if clients -// are synchronized somehow, it calls a method to "protect" from -// a "protected" method -// -// In this case the macro DFAKE_SCOPED_RECURSIVE_LOCK -// has to be used, it checks that if a thread is inside the push/pop -// then noone else is still inside the pop/push -// -// class NonThreadSafeQueue { -// public: -// void push(int) { -// DFAKE_SCOPED_LOCK(push_pop_); -// ... -// } -// int pop() { -// DFAKE_SCOPED_RECURSIVE_LOCK(push_pop_); -// bar(); -// ... -// } -// void bar() { DFAKE_SCOPED_RECURSIVE_LOCK(push_pop_); ... } -// ... -// private: -// DFAKE_MUTEX(push_pop_); -// }; -// -// -// Example: Queue implementation not usable even if clients are synchronized, -// so only one thread in the class life cycle can use the two members -// push/pop. -// -// In this case the macro DFAKE_SCOPED_LOCK_THREAD_LOCKED pins the -// specified -// critical section the first time a thread enters push or pop, from -// that time on only that thread is allowed to execute push or pop. -// -// class NonThreadSafeQueue { -// public: -// ... -// void push(int) { DFAKE_SCOPED_LOCK_THREAD_LOCKED(push_pop_); ... } -// int pop() { DFAKE_SCOPED_LOCK_THREAD_LOCKED(push_pop_); ... } -// ... -// private: -// DFAKE_MUTEX(push_pop_); -// }; -// -// -// Example: Class that has to be contructed/destroyed on same thread, it has -// a "shareable" method (with external synchronization) and a not -// shareable method (even with external synchronization). -// -// In this case 3 Critical sections have to be defined -// -// class ExoticClass { -// public: -// ExoticClass() { DFAKE_SCOPED_LOCK_THREAD_LOCKED(ctor_dtor_); ... } -// ~ExoticClass() { DFAKE_SCOPED_LOCK_THREAD_LOCKED(ctor_dtor_); ... } -// -// void Shareable() { DFAKE_SCOPED_LOCK(shareable_section_); ... } -// void NotShareable() { DFAKE_SCOPED_LOCK_THREAD_LOCKED(ctor_dtor_); ... } -// ... -// private: -// DFAKE_MUTEX(ctor_dtor_); -// DFAKE_MUTEX(shareable_section_); -// }; - - -#if !defined(NDEBUG) - -#define DFAKE_UNIQUE_VARIABLE_CONCAT(a, b) a##b -// CONCAT1 provides extra level of indirection so that __LINE__ macro expands. -#define DFAKE_UNIQUE_VARIABLE_CONCAT1(a, b) DFAKE_UNIQUE_VARIABLE_CONCAT(a, b) -#define DFAKE_UNIQUE_VARIABLE_NAME(a) DFAKE_UNIQUE_VARIABLE_CONCAT1(a, __LINE__) - -// Defines a class member that acts like a mutex. It is used only as a -// verification tool. -#define DFAKE_MUTEX(obj) \ - mutable base::ThreadCollisionWarner obj -// Asserts the call is never called simultaneously in two threads. Used at -// member function scope. -#define DFAKE_SCOPED_LOCK(obj) \ - base::ThreadCollisionWarner::ScopedCheck DFAKE_UNIQUE_VARIABLE_NAME( \ - s_check_)(&obj) -// Asserts the call is never called simultaneously in two threads. Used at -// member function scope. Same as DFAKE_SCOPED_LOCK but allows recursive locks. -#define DFAKE_SCOPED_RECURSIVE_LOCK(obj) \ - base::ThreadCollisionWarner::ScopedRecursiveCheck \ - DFAKE_UNIQUE_VARIABLE_NAME(sr_check)(&obj) -// Asserts the code is always executed in the same thread. -#define DFAKE_SCOPED_LOCK_THREAD_LOCKED(obj) \ - base::ThreadCollisionWarner::Check DFAKE_UNIQUE_VARIABLE_NAME(check_)(&obj) - -#else - -#define DFAKE_MUTEX(obj) typedef void InternalFakeMutexType##obj -#define DFAKE_SCOPED_LOCK(obj) ((void)0) -#define DFAKE_SCOPED_RECURSIVE_LOCK(obj) ((void)0) -#define DFAKE_SCOPED_LOCK_THREAD_LOCKED(obj) ((void)0) - -#endif - -namespace base { - -// The class ThreadCollisionWarner uses an Asserter to notify the collision -// AsserterBase is the interfaces and DCheckAsserter is the default asserter -// used. During the unit tests is used another class that doesn't "DCHECK" -// in case of collision (check thread_collision_warner_unittests.cc) -struct BASE_EXPORT AsserterBase { - virtual ~AsserterBase() = default; - virtual void warn() = 0; -}; - -struct BASE_EXPORT DCheckAsserter : public AsserterBase { - ~DCheckAsserter() override = default; - void warn() override; -}; - -class BASE_EXPORT ThreadCollisionWarner { - public: - // The parameter asserter is there only for test purpose - explicit ThreadCollisionWarner(AsserterBase* asserter = new DCheckAsserter()) - : valid_thread_id_(0), - counter_(0), - asserter_(asserter) {} - - ~ThreadCollisionWarner() { - delete asserter_; - } - - // This class is meant to be used through the macro - // DFAKE_SCOPED_LOCK_THREAD_LOCKED - // it doesn't leave the critical section, as opposed to ScopedCheck, - // because the critical section being pinned is allowed to be used only - // from one thread - class BASE_EXPORT Check { - public: - explicit Check(ThreadCollisionWarner* warner) - : warner_(warner) { - warner_->EnterSelf(); - } - - ~Check() = default; - - private: - ThreadCollisionWarner* warner_; - - DISALLOW_COPY_AND_ASSIGN(Check); - }; - - // This class is meant to be used through the macro - // DFAKE_SCOPED_LOCK - class BASE_EXPORT ScopedCheck { - public: - explicit ScopedCheck(ThreadCollisionWarner* warner) - : warner_(warner) { - warner_->Enter(); - } - - ~ScopedCheck() { - warner_->Leave(); - } - - private: - ThreadCollisionWarner* warner_; - - DISALLOW_COPY_AND_ASSIGN(ScopedCheck); - }; - - // This class is meant to be used through the macro - // DFAKE_SCOPED_RECURSIVE_LOCK - class BASE_EXPORT ScopedRecursiveCheck { - public: - explicit ScopedRecursiveCheck(ThreadCollisionWarner* warner) - : warner_(warner) { - warner_->EnterSelf(); - } - - ~ScopedRecursiveCheck() { - warner_->Leave(); - } - - private: - ThreadCollisionWarner* warner_; - - DISALLOW_COPY_AND_ASSIGN(ScopedRecursiveCheck); - }; - - private: - // This method stores the current thread identifier and does a DCHECK - // if a another thread has already done it, it is safe if same thread - // calls this multiple time (recursion allowed). - void EnterSelf(); - - // Same as EnterSelf but recursion is not allowed. - void Enter(); - - // Removes the thread_id stored in order to allow other threads to - // call EnterSelf or Enter. - void Leave(); - - // This stores the thread id that is inside the critical section, if the - // value is 0 then no thread is inside. - volatile subtle::Atomic32 valid_thread_id_; - - // Counter to trace how many time a critical section was "pinned" - // (when allowed) in order to unpin it when counter_ reaches 0. - volatile subtle::Atomic32 counter_; - - // Here only for class unit tests purpose, during the test I need to not - // DCHECK but notify the collision with something else. - AsserterBase* asserter_; - - DISALLOW_COPY_AND_ASSIGN(ThreadCollisionWarner); -}; - -} // namespace base - -#endif // BASE_THREADING_THREAD_COLLISION_WARNER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/timer/lap_timer.h b/TMessagesProj/jni/voip/webrtc/base/timer/lap_timer.h deleted file mode 100644 index c28a0df1d..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/timer/lap_timer.h +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TIMER_LAP_TIMER_H_ -#define BASE_TIMER_LAP_TIMER_H_ - -#include "base/base_export.h" -#include "base/macros.h" -#include "base/sequence_checker.h" -#include "base/time/time.h" - -namespace base { - -// LapTimer is used to calculate average times per "Lap" in perf tests. -// NextLap increments the lap counter, used in counting the per lap averages. -// If you initialize the LapTimer with a non zero |warmup_laps|, it will ignore -// the times for that many laps at the start. -// If you set the |time_limit| then you can use HasTimeLimitExpired() to see if -// the current accumulated time has crossed that threshold, with an optimization -// that it only tests this every |check_interval| laps. -// -// See base/timer/lap_timer_unittest.cc for a usage example. -// -class BASE_EXPORT LapTimer { - public: - enum class TimerMethod { - // Measures CPU time consumed by the thread running the LapTimer. - kUseThreadTicks, - // Measures elapsed wall time (default). - kUseTimeTicks - }; - - LapTimer(int warmup_laps, - TimeDelta time_limit, - int check_interval, - TimerMethod timing_method = TimerMethod::kUseTimeTicks); - // Create LapTimer with sensible default values. - LapTimer(TimerMethod timing_method = TimerMethod::kUseTimeTicks); - // Sets the timer back to its starting state. - void Reset(); - // Sets the start point to now. - void Start(); - // Returns true if there are no more warmup laps to do. - bool IsWarmedUp() const; - // Advance the lap counter and update the accumulated time. - // The accumulated time is only updated every check_interval laps. - // If accumulating then the start point will also be updated. - void NextLap(); - // Returns true if the stored time has exceeded the time limit specified. - // May cause a call to Store(). - bool HasTimeLimitExpired() const; - // The average time taken per lap. - TimeDelta TimePerLap() const; - // The number of laps per second. - float LapsPerSecond() const; - // The number of laps recorded. - int NumLaps() const; - - private: - // Returns true if all lap times have been timed. Only true every n'th - // lap, where n = check_interval. - bool HasTimedAllLaps() const; - // Returns the current accumulated time. - TimeDelta GetAccumulatedTime() const; - - const int warmup_laps_; - const TimeDelta time_limit_; - const int check_interval_; - const TimerMethod method_; - - ThreadTicks start_thread_ticks_; - TimeTicks start_time_ticks_; - - ThreadTicks last_timed_lap_end_thread_ticks_; - TimeTicks last_timed_lap_end_ticks_; - - int num_laps_; - int remaining_warmups_ = 0; - int remaining_no_check_laps_ = 0; - - SEQUENCE_CHECKER(sequence_checker_); - DISALLOW_COPY_AND_ASSIGN(LapTimer); -}; -} // namespace base - -#endif // BASE_TIMER_LAP_TIMER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/timer/timer.h b/TMessagesProj/jni/voip/webrtc/base/timer/timer.h deleted file mode 100644 index 240454f2b..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/timer/timer.h +++ /dev/null @@ -1,392 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// OneShotTimer, RepeatingTimer and RetainingOneShotTimer provide a simple timer -// API. As the names suggest, OneShotTimer calls you back once after a time -// delay expires. -// RepeatingTimer on the other hand calls you back periodically with the -// prescribed time interval. -// RetainingOneShotTimer doesn't repeat the task itself like RepeatingTimer, but -// retains the given task after the time out. You can restart it with Reset -// again without giving new task to Start. -// -// All of OneShotTimer, RepeatingTimer and RetainingOneShotTimer cancel the -// timer when they go out of scope, which makes it easy to ensure that you do -// not get called when your object has gone out of scope. Just instantiate a -// timer as a member variable of the class for which you wish to receive timer -// events. -// -// Sample RepeatingTimer usage: -// -// class MyClass { -// public: -// void StartDoingStuff() { -// timer_.Start(FROM_HERE, TimeDelta::FromSeconds(1), -// this, &MyClass::DoStuff); -// } -// void StopDoingStuff() { -// timer_.Stop(); -// } -// private: -// void DoStuff() { -// // This method is called every second to do stuff. -// ... -// } -// base::RepeatingTimer timer_; -// }; -// -// Timers also support a Reset method, which allows you to easily defer the -// timer event until the timer delay passes once again. So, in the above -// example, if 0.5 seconds have already passed, calling Reset on |timer_| -// would postpone DoStuff by another 1 second. In other words, Reset is -// shorthand for calling Stop and then Start again with the same arguments. -// -// These APIs are not thread safe. When a method is called (except the -// constructor), all further method calls must be on the same sequence until -// Stop(). -// -// By default, the scheduled tasks will be run on the same sequence that the -// Timer was *started on*. To mock time in unit tests, some old tests used -// SetTaskRunner() to schedule the delay on a test-controlled TaskRunner. The -// modern and preferred approach to mock time is to use TaskEnvironment's -// MOCK_TIME mode. - -#ifndef BASE_TIMER_TIMER_H_ -#define BASE_TIMER_TIMER_H_ - -// IMPORTANT: If you change timer code, make sure that all tests (including -// disabled ones) from timer_unittests.cc pass locally. Some are disabled -// because they're flaky on the buildbot, but when you run them locally you -// should be able to tell the difference. - -#include - -#include "base/base_export.h" -#include "base/bind.h" -#include "base/bind_helpers.h" -#include "base/callback.h" -#include "base/location.h" -#include "base/macros.h" -#include "base/sequence_checker_impl.h" -#include "base/sequenced_task_runner.h" -#include "base/time/time.h" - -namespace base { - -class TickClock; - -namespace internal { - -class BaseTimerTaskInternal; - -//----------------------------------------------------------------------------- -// This class wraps TaskRunner::PostDelayedTask to manage delayed and repeating -// tasks. See meta comment above for thread-safety requirements. -// Do not use this class directly. Use one of OneShotTimer, RepeatingTimer or -// RetainingOneShotTimer. -// -class BASE_EXPORT TimerBase { - public: - // Constructs a timer. Start must be called later to set task info. - // If |tick_clock| is provided, it is used instead of TimeTicks::Now() to get - // TimeTicks when scheduling tasks. - TimerBase(); - explicit TimerBase(const TickClock* tick_clock); - - // Construct a timer with task info. - // If |tick_clock| is provided, it is used instead of TimeTicks::Now() to get - // TimeTicks when scheduling tasks. - TimerBase(const Location& posted_from, TimeDelta delay); - TimerBase(const Location& posted_from, - TimeDelta delay, - const TickClock* tick_clock); - - virtual ~TimerBase(); - - // Returns true if the timer is running (i.e., not stopped). - bool IsRunning() const; - - // Returns the current delay for this timer. - TimeDelta GetCurrentDelay() const; - - // Sets the task runner on which the delayed task should be scheduled when - // this Timer is running. This method can only be called while this Timer - // isn't running. This is an alternative (old) approach to mock time in tests. - // The modern and preferred approach is to use - // TaskEnvironment::TimeSource::MOCK_TIME. To avoid racy usage of Timer, - // |task_runner| must run tasks on the same sequence which this Timer is bound - // to (started from). TODO(gab): Migrate all callers to - // TaskEnvironment::TimeSource::MOCK_TIME. - virtual void SetTaskRunner(scoped_refptr task_runner); - - // Call this method to stop and cancel the timer. It is a no-op if the timer - // is not running. - virtual void Stop(); - - // Stop running task (if any) and abandon scheduled task (if any). - void AbandonAndStop() { - AbandonScheduledTask(); - - Stop(); - // No more member accesses here: |this| could be deleted at this point. - } - - // Call this method to reset the timer delay. The user task must be set. If - // the timer is not running, this will start it by posting a task. - virtual void Reset(); - - const TimeTicks& desired_run_time() const { return desired_run_time_; } - - protected: - virtual void OnStop() = 0; - virtual void RunUserTask() = 0; - - // Returns the current tick count. - TimeTicks Now() const; - - void set_desired_run_time(TimeTicks desired) { desired_run_time_ = desired; } - void set_is_running(bool running) { is_running_ = running; } - - const Location& posted_from() const { return posted_from_; } - - // The task runner on which the task should be scheduled. If it is null, the - // task runner for the current sequence will be used. - scoped_refptr task_runner_; - - // Timer isn't thread-safe and must only be used on its origin sequence - // (sequence on which it was started). Once fully Stop()'ed it may be - // destroyed or restarted on another sequence. - SequenceChecker origin_sequence_checker_; - - // Allocates a new |scheduled_task_| and posts it on the current sequence with - // the given |delay|. |scheduled_task_| must be null. |scheduled_run_time_| - // and |desired_run_time_| are reset to Now() + delay. - void PostNewScheduledTask(TimeDelta delay); - - void StartInternal(const Location& posted_from, TimeDelta delay); - - private: - friend class BaseTimerTaskInternal; - - // Returns the task runner on which the task should be scheduled. If the - // corresponding |task_runner_| field is null, the task runner for the current - // sequence is returned. - scoped_refptr GetTaskRunner(); - - // Disable |scheduled_task_| and abandon it so that it no longer refers back - // to this object. - void AbandonScheduledTask(); - - // Called by BaseTimerTaskInternal when the delayed task fires. - void RunScheduledTask(); - - // When non-null, the |scheduled_task_| was posted to call RunScheduledTask() - // at |scheduled_run_time_|. - BaseTimerTaskInternal* scheduled_task_; - - // Location in user code. - Location posted_from_; - // Delay requested by user. - TimeDelta delay_; - - // The time at which |scheduled_task_| is expected to fire. This time can be a - // "zero" TimeTicks if the task must be run immediately. - TimeTicks scheduled_run_time_; - - // The desired run time of |user_task_|. The user may update this at any time, - // even if their previous request has not run yet. If |desired_run_time_| is - // greater than |scheduled_run_time_|, a continuation task will be posted to - // wait for the remaining time. This allows us to reuse the pending task so as - // not to flood the delayed queues with orphaned tasks when the user code - // excessively Stops and Starts the timer. This time can be a "zero" TimeTicks - // if the task must be run immediately. - TimeTicks desired_run_time_; - - // The tick clock used to calculate the run time for scheduled tasks. - const TickClock* const tick_clock_; - - // If true, |user_task_| is scheduled to run sometime in the future. - bool is_running_; - - DISALLOW_COPY_AND_ASSIGN(TimerBase); -}; - -} // namespace internal - -//----------------------------------------------------------------------------- -// A simple, one-shot timer. See usage notes at the top of the file. -class BASE_EXPORT OneShotTimer : public internal::TimerBase { - public: - OneShotTimer(); - explicit OneShotTimer(const TickClock* tick_clock); - ~OneShotTimer() override; - - // Start the timer to run at the given |delay| from now. If the timer is - // already running, it will be replaced to call the given |user_task|. - virtual void Start(const Location& posted_from, - TimeDelta delay, - OnceClosure user_task); - - // Start the timer to run at the given |delay| from now. If the timer is - // already running, it will be replaced to call a task formed from - // |receiver->*method|. - template - void Start(const Location& posted_from, - TimeDelta delay, - Receiver* receiver, - void (Receiver::*method)()) { - Start(posted_from, delay, BindOnce(method, Unretained(receiver))); - } - - // Run the scheduled task immediately, and stop the timer. The timer needs to - // be running. - void FireNow(); - - private: - void OnStop() final; - void RunUserTask() final; - - OnceClosure user_task_; - - DISALLOW_COPY_AND_ASSIGN(OneShotTimer); -}; - -//----------------------------------------------------------------------------- -// A simple, repeating timer. See usage notes at the top of the file. -class BASE_EXPORT RepeatingTimer : public internal::TimerBase { - public: - RepeatingTimer(); - explicit RepeatingTimer(const TickClock* tick_clock); - ~RepeatingTimer() override; - - RepeatingTimer(const Location& posted_from, - TimeDelta delay, - RepeatingClosure user_task); - RepeatingTimer(const Location& posted_from, - TimeDelta delay, - RepeatingClosure user_task, - const TickClock* tick_clock); - - // Start the timer to run at the given |delay| from now. If the timer is - // already running, it will be replaced to call the given |user_task|. - virtual void Start(const Location& posted_from, - TimeDelta delay, - RepeatingClosure user_task); - - // Start the timer to run at the given |delay| from now. If the timer is - // already running, it will be replaced to call a task formed from - // |receiver->*method|. - template - void Start(const Location& posted_from, - TimeDelta delay, - Receiver* receiver, - void (Receiver::*method)()) { - Start(posted_from, delay, BindRepeating(method, Unretained(receiver))); - } - - const RepeatingClosure& user_task() const { return user_task_; } - - private: - // Mark this final, so that the destructor can call this safely. - void OnStop() final; - - void RunUserTask() override; - - RepeatingClosure user_task_; - - DISALLOW_COPY_AND_ASSIGN(RepeatingTimer); -}; - -//----------------------------------------------------------------------------- -// A simple, one-shot timer with the retained user_task which is reused for -// multiple invocations of Start(). See usage notes at the top of the file. -class BASE_EXPORT RetainingOneShotTimer : public internal::TimerBase { - public: - RetainingOneShotTimer(); - explicit RetainingOneShotTimer(const TickClock* tick_clock); - ~RetainingOneShotTimer() override; - - RetainingOneShotTimer(const Location& posted_from, - TimeDelta delay, - RepeatingClosure user_task); - RetainingOneShotTimer(const Location& posted_from, - TimeDelta delay, - RepeatingClosure user_task, - const TickClock* tick_clock); - - // Start the timer to run at the given |delay| from now. If the timer is - // already running, it will be replaced to call the given |user_task|. - virtual void Start(const Location& posted_from, - TimeDelta delay, - RepeatingClosure user_task); - - // Start the timer to run at the given |delay| from now. If the timer is - // already running, it will be replaced to call a task formed from - // |receiver->*method|. - template - void Start(const Location& posted_from, - TimeDelta delay, - Receiver* receiver, - void (Receiver::*method)()) { - Start(posted_from, delay, BindRepeating(method, Unretained(receiver))); - } - - const RepeatingClosure& user_task() const { return user_task_; } - - protected: - void set_user_task(const RepeatingClosure& task) { user_task_ = task; } - - private: - // Mark this final, so that the destructor can call this safely. - void OnStop() final; - - void RunUserTask() override; - - RepeatingClosure user_task_; - - DISALLOW_COPY_AND_ASSIGN(RetainingOneShotTimer); -}; - -//----------------------------------------------------------------------------- -// A Delay timer is like The Button from Lost. Once started, you have to keep -// calling Reset otherwise it will call the given method on the sequence it was -// initially Reset() from. -// -// Once created, it is inactive until Reset is called. Once |delay| seconds have -// passed since the last call to Reset, the callback is made. Once the callback -// has been made, it's inactive until Reset is called again. -// -// If destroyed, the timeout is canceled and will not occur even if already -// inflight. -class DelayTimer { - public: - template - DelayTimer(const Location& posted_from, - TimeDelta delay, - Receiver* receiver, - void (Receiver::*method)()) - : DelayTimer(posted_from, delay, receiver, method, nullptr) {} - - template - DelayTimer(const Location& posted_from, - TimeDelta delay, - Receiver* receiver, - void (Receiver::*method)(), - const TickClock* tick_clock) - : timer_(posted_from, - delay, - BindRepeating(method, Unretained(receiver)), - tick_clock) {} - - void Reset() { timer_.Reset(); } - - private: - RetainingOneShotTimer timer_; - - DISALLOW_COPY_AND_ASSIGN(DelayTimer); -}; - -} // namespace base - -#endif // BASE_TIMER_TIMER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc index 59bd1e0c7..876d4c0bf 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc @@ -83,8 +83,8 @@ BroadcastResourceListener::CreateAdapterResource() { MutexLock lock(&lock_); RTC_DCHECK(is_listening_); rtc::scoped_refptr adapter = - new rtc::RefCountedObject(source_resource_->Name() + - "Adapter"); + rtc::make_ref_counted(source_resource_->Name() + + "Adapter"); adapters_.push_back(adapter); return adapter; } diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc index ac1b1db17..741575ae3 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc @@ -15,12 +15,12 @@ #include #include "absl/algorithm/container.h" +#include "api/sequence_checker.h" #include "api/video/video_adaptation_counters.h" #include "call/adaptation/video_stream_adapter.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { @@ -72,7 +72,7 @@ ResourceAdaptationProcessor::ResourceAdaptationProcessor( VideoStreamAdapter* stream_adapter) : task_queue_(nullptr), resource_listener_delegate_( - new rtc::RefCountedObject(this)), + rtc::make_ref_counted(this)), resources_(), stream_adapter_(stream_adapter), last_reported_source_restrictions_(), diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc index 4fc4743a3..6620eff31 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "absl/types/variant.h" +#include "api/sequence_checker.h" #include "api/video/video_adaptation_counters.h" #include "api/video/video_adaptation_reason.h" #include "api/video_codecs/video_encoder.h" @@ -25,7 +26,6 @@ #include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { @@ -45,19 +45,6 @@ int GetHigherFrameRateThan(int fps) { : std::numeric_limits::max(); } -// For resolution, the steps we take are 3/5 (down) and 5/3 (up). -// Notice the asymmetry of which restriction property is set depending on if -// we are adapting up or down: -// - VideoSourceRestrictor::DecreaseResolution() sets the max_pixels_per_frame() -// to the desired target and target_pixels_per_frame() to null. -// - VideoSourceRestrictor::IncreaseResolutionTo() sets the -// target_pixels_per_frame() to the desired target, and max_pixels_per_frame() -// is set according to VideoSourceRestrictor::GetIncreasedMaxPixelsWanted(). -int GetLowerResolutionThan(int pixel_count) { - RTC_DCHECK(pixel_count != std::numeric_limits::max()); - return (pixel_count * 3) / 5; -} - int GetIncreasedMaxPixelsWanted(int target_pixels) { if (target_pixels == std::numeric_limits::max()) return std::numeric_limits::max(); @@ -75,13 +62,14 @@ int GetIncreasedMaxPixelsWanted(int target_pixels) { } bool CanDecreaseResolutionTo(int target_pixels, + int target_pixels_min, const VideoStreamInputState& input_state, const VideoSourceRestrictions& restrictions) { int max_pixels_per_frame = rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( std::numeric_limits::max())); return target_pixels < max_pixels_per_frame && - target_pixels >= input_state.min_pixels_per_frame(); + target_pixels_min >= input_state.min_pixels_per_frame(); } bool CanIncreaseResolutionTo(int target_pixels, @@ -109,6 +97,11 @@ bool CanIncreaseFrameRateTo(int max_frame_rate, } bool MinPixelLimitReached(const VideoStreamInputState& input_state) { + if (input_state.single_active_stream_pixels().has_value()) { + return GetLowerResolutionThan( + input_state.single_active_stream_pixels().value()) < + input_state.min_pixels_per_frame(); + } return input_state.frame_size_pixels().has_value() && GetLowerResolutionThan(input_state.frame_size_pixels().value()) < input_state.min_pixels_per_frame(); @@ -139,6 +132,19 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference( return source_restrictions; } +// For resolution, the steps we take are 3/5 (down) and 5/3 (up). +// Notice the asymmetry of which restriction property is set depending on if +// we are adapting up or down: +// - VideoSourceRestrictor::DecreaseResolution() sets the max_pixels_per_frame() +// to the desired target and target_pixels_per_frame() to null. +// - VideoSourceRestrictor::IncreaseResolutionTo() sets the +// target_pixels_per_frame() to the desired target, and max_pixels_per_frame() +// is set according to VideoSourceRestrictor::GetIncreasedMaxPixelsWanted(). +int GetLowerResolutionThan(int pixel_count) { + RTC_DCHECK(pixel_count != std::numeric_limits::max()); + return (pixel_count * 3) / 5; +} + // TODO(hbos): Use absl::optional<> instead? int GetHigherResolutionThan(int pixel_count) { return pixel_count != std::numeric_limits::max() @@ -470,7 +476,11 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution( const RestrictionsWithCounters& current_restrictions) { int target_pixels = GetLowerResolutionThan(input_state.frame_size_pixels().value()); - if (!CanDecreaseResolutionTo(target_pixels, input_state, + // Use single active stream if set, this stream could be lower than the input. + int target_pixels_min = + GetLowerResolutionThan(input_state.single_active_stream_pixels().value_or( + input_state.frame_size_pixels().value())); + if (!CanDecreaseResolutionTo(target_pixels, target_pixels_min, input_state, current_restrictions.restrictions)) { return Adaptation::Status::kLimitReached; } @@ -693,4 +703,27 @@ VideoStreamAdapter::AwaitingFrameSizeChange::AwaitingFrameSizeChange( : pixels_increased(pixels_increased), frame_size_pixels(frame_size_pixels) {} +absl::optional VideoStreamAdapter::GetSingleActiveLayerPixels( + const VideoCodec& codec) { + int num_active = 0; + absl::optional pixels; + if (codec.codecType == VideoCodecType::kVideoCodecVP9) { + for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) { + if (codec.spatialLayers[i].active) { + ++num_active; + pixels = codec.spatialLayers[i].width * codec.spatialLayers[i].height; + } + } + } else { + for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { + if (codec.simulcastStream[i].active) { + ++num_active; + pixels = + codec.simulcastStream[i].width * codec.simulcastStream[i].height; + } + } + } + return (num_active > 1) ? absl::nullopt : pixels; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h index 3c3595759..3c876b897 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.h @@ -28,6 +28,7 @@ #include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/utility/quality_scaler.h" #include "rtc_base/experiments/balanced_degradation_settings.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -56,6 +57,7 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference( VideoSourceRestrictions source_restrictions, DegradationPreference degradation_preference); +int GetLowerResolutionThan(int pixel_count); int GetHigherResolutionThan(int pixel_count); // Either represents the next VideoSourceRestrictions the VideoStreamAdapter @@ -161,6 +163,9 @@ class VideoStreamAdapter { VideoAdaptationCounters counters; }; + static absl::optional GetSingleActiveLayerPixels( + const VideoCodec& codec); + private: void BroadcastVideoRestrictionsUpdate( const VideoStreamInputState& input_state, @@ -214,7 +219,8 @@ class VideoStreamAdapter { const VideoStreamInputState& input_state) const RTC_RUN_ON(&sequence_checker_); - SequenceChecker sequence_checker_ RTC_GUARDED_BY(&sequence_checker_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_ + RTC_GUARDED_BY(&sequence_checker_); // Gets the input state which is the basis of all adaptations. // Thread safe. VideoStreamInputStateProvider* input_state_provider_; diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.cc index dc3315e6d..9c0d47590 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.cc @@ -19,7 +19,8 @@ VideoStreamInputState::VideoStreamInputState() frame_size_pixels_(absl::nullopt), frames_per_second_(0), video_codec_type_(VideoCodecType::kVideoCodecGeneric), - min_pixels_per_frame_(kDefaultMinPixelsPerFrame) {} + min_pixels_per_frame_(kDefaultMinPixelsPerFrame), + single_active_stream_pixels_(absl::nullopt) {} void VideoStreamInputState::set_has_input(bool has_input) { has_input_ = has_input; @@ -43,6 +44,11 @@ void VideoStreamInputState::set_min_pixels_per_frame(int min_pixels_per_frame) { min_pixels_per_frame_ = min_pixels_per_frame; } +void VideoStreamInputState::set_single_active_stream_pixels( + absl::optional single_active_stream_pixels) { + single_active_stream_pixels_ = single_active_stream_pixels; +} + bool VideoStreamInputState::has_input() const { return has_input_; } @@ -63,6 +69,10 @@ int VideoStreamInputState::min_pixels_per_frame() const { return min_pixels_per_frame_; } +absl::optional VideoStreamInputState::single_active_stream_pixels() const { + return single_active_stream_pixels_; +} + bool VideoStreamInputState::HasInputFrameSizeAndFramesPerSecond() const { return has_input_ && frame_size_pixels_.has_value(); } diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.h index af0d7c78e..191e22386 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state.h @@ -27,12 +27,15 @@ class VideoStreamInputState { void set_frames_per_second(int frames_per_second); void set_video_codec_type(VideoCodecType video_codec_type); void set_min_pixels_per_frame(int min_pixels_per_frame); + void set_single_active_stream_pixels( + absl::optional single_active_stream_pixels); bool has_input() const; absl::optional frame_size_pixels() const; int frames_per_second() const; VideoCodecType video_codec_type() const; int min_pixels_per_frame() const; + absl::optional single_active_stream_pixels() const; bool HasInputFrameSizeAndFramesPerSecond() const; @@ -42,6 +45,7 @@ class VideoStreamInputState { int frames_per_second_; VideoCodecType video_codec_type_; int min_pixels_per_frame_; + absl::optional single_active_stream_pixels_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.cc index 3c0a7e3fa..3261af39e 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_input_state_provider.cc @@ -10,6 +10,8 @@ #include "call/adaptation/video_stream_input_state_provider.h" +#include "call/adaptation/video_stream_adapter.h" + namespace webrtc { VideoStreamInputStateProvider::VideoStreamInputStateProvider( @@ -36,6 +38,9 @@ void VideoStreamInputStateProvider::OnEncoderSettingsChanged( encoder_settings.encoder_config().codec_type); input_state_.set_min_pixels_per_frame( encoder_settings.encoder_info().scaling_settings.min_pixels_per_frame); + input_state_.set_single_active_stream_pixels( + VideoStreamAdapter::GetSingleActiveLayerPixels( + encoder_settings.video_codec())); } VideoStreamInputState VideoStreamInputStateProvider::InputState() { diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h index eee62e9a8..6f7449292 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/audio_receive_stream.h @@ -90,6 +90,13 @@ class AudioReceiveStream { int32_t total_interruption_duration_ms = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp absl::optional estimated_playout_ntp_timestamp_ms; + // Remote outbound stats derived by the received RTCP sender reports. + // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* + absl::optional last_sender_report_timestamp_ms; + absl::optional last_sender_report_remote_timestamp_ms; + uint32_t sender_reports_packets_sent = 0; + uint64_t sender_reports_bytes_sent = 0; + uint64_t sender_reports_reports_count = 0; }; struct Config { @@ -167,6 +174,9 @@ class AudioReceiveStream { // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; + // Returns true if the stream has been started. + virtual bool IsRunning() const = 0; + virtual Stats GetStats(bool get_and_clear_legacy_stats) const = 0; Stats GetStats() { return GetStats(/*get_and_clear_legacy_stats=*/true); } diff --git a/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc b/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc index 5acdc9618..9d25b77ba 100644 --- a/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/audio_send_stream.cc @@ -27,8 +27,7 @@ AudioSendStream::Config::Config(Transport* send_transport) AudioSendStream::Config::~Config() = default; std::string AudioSendStream::Config::ToString() const { - char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + rtc::StringBuilder ss; ss << "{rtp: " << rtp.ToString(); ss << ", rtcp_report_interval_ms: " << rtcp_report_interval_ms; ss << ", send_transport: " << (send_transport ? "(Transport)" : "null"); @@ -39,8 +38,8 @@ std::string AudioSendStream::Config::ToString() const { ss << ", has_dscp: " << (has_dscp ? "true" : "false"); ss << ", send_codec_spec: " << (send_codec_spec ? send_codec_spec->ToString() : ""); - ss << '}'; - return ss.str(); + ss << "}"; + return ss.Release(); } AudioSendStream::Config::Rtp::Rtp() = default; @@ -51,6 +50,12 @@ std::string AudioSendStream::Config::Rtp::ToString() const { char buf[1024]; rtc::SimpleStringBuilder ss(buf); ss << "{ssrc: " << ssrc; + if (!rid.empty()) { + ss << ", rid: " << rid; + } + if (!mid.empty()) { + ss << ", mid: " << mid; + } ss << ", extmap-allow-mixed: " << (extmap_allow_mixed ? "true" : "false"); ss << ", extensions: ["; for (size_t i = 0; i < extensions.size(); ++i) { diff --git a/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.h b/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.h index 8d9a1adb0..c0d664b6f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/call/bitrate_allocator.h @@ -20,8 +20,9 @@ #include #include "api/call/bitrate_allocation.h" +#include "api/sequence_checker.h" #include "api/transport/network_types.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -148,7 +149,7 @@ class BitrateAllocator : public BitrateAllocatorInterface { // video send stream. static uint8_t GetTransmissionMaxBitrateMultiplier(); - SequenceChecker sequenced_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequenced_checker_; LimitObserver* const limit_observer_ RTC_GUARDED_BY(&sequenced_checker_); // Stored in a list to keep track of the insertion order. std::vector allocatable_tracks_ diff --git a/TMessagesProj/jni/voip/webrtc/call/call.cc b/TMessagesProj/jni/voip/webrtc/call/call.cc index e814cff5b..a9ae07b60 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call.cc @@ -19,8 +19,10 @@ #include #include +#include "absl/functional/bind_front.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" #include "api/transport/network_control.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" @@ -31,6 +33,7 @@ #include "call/receive_time_calculator.h" #include "call/rtp_stream_receiver_controller.h" #include "call/rtp_transport_controller_send.h" +#include "call/version.h" #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" @@ -50,7 +53,7 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -165,6 +168,34 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() { return current; } +// Called from the destructor of Call to report the collected send histograms. +void UpdateSendHistograms(Timestamp now, + Timestamp first_sent_packet, + AvgCounter& estimated_send_bitrate_kbps_counter, + AvgCounter& pacer_bitrate_kbps_counter) { + TimeDelta elapsed = now - first_sent_packet; + if (elapsed.seconds() < metrics::kMinRunTimeInSeconds) + return; + + const int kMinRequiredPeriodicSamples = 5; + AggregatedStats send_bitrate_stats = + estimated_send_bitrate_kbps_counter.ProcessAndGetStats(); + if (send_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) { + RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.EstimatedSendBitrateInKbps", + send_bitrate_stats.average); + RTC_LOG(LS_INFO) << "WebRTC.Call.EstimatedSendBitrateInKbps, " + << send_bitrate_stats.ToString(); + } + AggregatedStats pacer_bitrate_stats = + pacer_bitrate_kbps_counter.ProcessAndGetStats(); + if (pacer_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) { + RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.PacerBitrateInKbps", + pacer_bitrate_stats.average); + RTC_LOG(LS_INFO) << "WebRTC.Call.PacerBitrateInKbps, " + << pacer_bitrate_stats.ToString(); + } +} + } // namespace namespace internal { @@ -262,10 +293,17 @@ class Call final : public webrtc::Call, const WebRtcKeyValueConfig& trials() const override; + TaskQueueBase* network_thread() const override; + TaskQueueBase* worker_thread() const override; + // Implements PacketReceiver. DeliveryStatus DeliverPacket(MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) override; + void DeliverPacketAsync(MediaType media_type, + rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us, + PacketCallback callback) override; // Implements RecoveredPacketReceiver. void OnRecoveredPacket(const uint8_t* packet, size_t length) override; @@ -302,10 +340,7 @@ class Call final : public webrtc::Call, MediaType media_type) RTC_SHARED_LOCKS_REQUIRED(worker_thread_); - void UpdateSendHistograms(Timestamp first_sent_packet) - RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void UpdateReceiveHistograms(); - void UpdateHistograms(); void UpdateAggregateNetworkState(); // Ensure that necessary process threads are started, and any required @@ -319,6 +354,7 @@ class Call final : public webrtc::Call, Clock* const clock_; TaskQueueFactory* const task_queue_factory_; TaskQueueBase* const worker_thread_; + TaskQueueBase* const network_thread_; const int num_cpu_cores_; const rtc::scoped_refptr module_process_thread_; @@ -328,15 +364,18 @@ class Call final : public webrtc::Call, NetworkState audio_network_state_; NetworkState video_network_state_; + // TODO(bugs.webrtc.org/11993): Move aggregate_network_up_ over to the + // network thread. bool aggregate_network_up_ RTC_GUARDED_BY(worker_thread_); // Audio, Video, and FlexFEC receive streams are owned by the client that // creates them. + // TODO(bugs.webrtc.org/11993): Move audio_receive_streams_, + // video_receive_streams_ and sync_stream_mapping_ over to the network thread. std::set audio_receive_streams_ RTC_GUARDED_BY(worker_thread_); std::set video_receive_streams_ RTC_GUARDED_BY(worker_thread_); - std::map sync_stream_mapping_ RTC_GUARDED_BY(worker_thread_); @@ -371,6 +410,9 @@ class Call final : public webrtc::Call, // send side BWE are negotiated. const bool use_send_side_bwe; }; + + // TODO(bugs.webrtc.org/11993): Move receive_rtp_config_ over to the + // network thread. std::map receive_rtp_config_ RTC_GUARDED_BY(worker_thread_); @@ -532,7 +574,7 @@ class SharedModuleThread::Impl { } private: - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; mutable int ref_count_ RTC_GUARDED_BY(sequence_checker_) = 0; std::unique_ptr const module_thread_; std::function const on_one_ref_remaining_; @@ -595,6 +637,10 @@ Call::Call(Clock* clock, : clock_(clock), task_queue_factory_(task_queue_factory), worker_thread_(GetCurrentTaskQueueOrThread()), + // If |network_task_queue_| was set to nullptr, network related calls + // must be made on |worker_thread_| (i.e. they're one and the same). + network_thread_(config.network_task_queue_ ? config.network_task_queue_ + : worker_thread_), num_cpu_cores_(CpuInfo::DetectNumberOfCores()), module_process_thread_(std::move(module_process_thread)), call_stats_(new CallStats(clock_, worker_thread_)), @@ -613,7 +659,12 @@ Call::Call(Clock* clock, configured_max_padding_bitrate_bps_(0), estimated_send_bitrate_kbps_counter_(clock_, nullptr, true), pacer_bitrate_kbps_counter_(clock_, nullptr, true), - receive_side_cc_(clock_, transport_send->packet_router()), + receive_side_cc_(clock, + absl::bind_front(&PacketRouter::SendCombinedRtcpPacket, + transport_send->packet_router()), + absl::bind_front(&PacketRouter::SendRemb, + transport_send->packet_router()), + /*network_state_estimator=*/nullptr), receive_time_calculator_(ReceiveTimeCalculator::CreateFromFieldTrial()), video_send_delay_stats_(new SendDelayStats(clock_)), start_ms_(clock_->TimeInMilliseconds()), @@ -621,8 +672,13 @@ Call::Call(Clock* clock, transport_send_(std::move(transport_send)) { RTC_DCHECK(config.event_log != nullptr); RTC_DCHECK(config.trials != nullptr); + RTC_DCHECK(network_thread_); RTC_DCHECK(worker_thread_->IsCurrent()); + // Do not remove this call; it is here to convince the compiler that the + // WebRTC source timestamp string needs to be in the final binary. + LoadWebRTCVersionInRegister(); + call_stats_->RegisterStatsObserver(&receive_side_cc_); module_process_thread_->process_thread()->RegisterModule( @@ -645,17 +701,23 @@ Call::~Call() { module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_); call_stats_->DeregisterStatsObserver(&receive_side_cc_); - absl::optional first_sent_packet_ms = + absl::optional first_sent_packet_time = transport_send_->GetFirstPacketTime(); + Timestamp now = clock_->CurrentTime(); + // Only update histograms after process threads have been shut down, so that // they won't try to concurrently update stats. - if (first_sent_packet_ms) { - UpdateSendHistograms(*first_sent_packet_ms); + if (first_sent_packet_time) { + UpdateSendHistograms(now, *first_sent_packet_time, + estimated_send_bitrate_kbps_counter_, + pacer_bitrate_kbps_counter_); } UpdateReceiveHistograms(); - UpdateHistograms(); + + RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.LifetimeInSeconds", + (now.ms() - start_ms_) / 1000); } void Call::EnsureStarted() { @@ -664,6 +726,8 @@ void Call::EnsureStarted() { } is_started_ = true; + call_stats_->EnsureStarted(); + // This call seems to kick off a number of things, so probably better left // off being kicked off on request rather than in the ctor. transport_send_ptr_->RegisterTargetTransferRateObserver(this); @@ -677,37 +741,6 @@ void Call::SetClientBitratePreferences(const BitrateSettings& preferences) { GetTransportControllerSend()->SetClientBitratePreferences(preferences); } -void Call::UpdateHistograms() { - RTC_HISTOGRAM_COUNTS_100000( - "WebRTC.Call.LifetimeInSeconds", - (clock_->TimeInMilliseconds() - start_ms_) / 1000); -} - -// Called from the dtor. -void Call::UpdateSendHistograms(Timestamp first_sent_packet) { - int64_t elapsed_sec = - (clock_->TimeInMilliseconds() - first_sent_packet.ms()) / 1000; - if (elapsed_sec < metrics::kMinRunTimeInSeconds) - return; - const int kMinRequiredPeriodicSamples = 5; - AggregatedStats send_bitrate_stats = - estimated_send_bitrate_kbps_counter_.ProcessAndGetStats(); - if (send_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.EstimatedSendBitrateInKbps", - send_bitrate_stats.average); - RTC_LOG(LS_INFO) << "WebRTC.Call.EstimatedSendBitrateInKbps, " - << send_bitrate_stats.ToString(); - } - AggregatedStats pacer_bitrate_stats = - pacer_bitrate_kbps_counter_.ProcessAndGetStats(); - if (pacer_bitrate_stats.num_samples > kMinRequiredPeriodicSamples) { - RTC_HISTOGRAM_COUNTS_100000("WebRTC.Call.PacerBitrateInKbps", - pacer_bitrate_stats.average); - RTC_LOG(LS_INFO) << "WebRTC.Call.PacerBitrateInKbps, " - << pacer_bitrate_stats.ToString(); - } -} - void Call::UpdateReceiveHistograms() { if (first_received_rtp_audio_ms_) { RTC_HISTOGRAM_COUNTS_100000( @@ -755,7 +788,6 @@ void Call::UpdateReceiveHistograms() { } PacketReceiver* Call::Receiver() { - RTC_DCHECK_RUN_ON(worker_thread_); return this; } @@ -785,6 +817,8 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( audio_send_ssrcs_.end()); audio_send_ssrcs_[config.rtp.ssrc] = send_stream; + // TODO(bugs.webrtc.org/11993): call AssociateSendStream and + // UpdateAggregateNetworkState asynchronously on the network thread. for (AudioReceiveStream* stream : audio_receive_streams_) { if (stream->config().rtp.local_ssrc == config.rtp.ssrc) { stream->AssociateSendStream(send_stream); @@ -792,6 +826,7 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( } UpdateAggregateNetworkState(); + return send_stream; } @@ -810,6 +845,8 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { size_t num_deleted = audio_send_ssrcs_.erase(ssrc); RTC_DCHECK_EQ(1, num_deleted); + // TODO(bugs.webrtc.org/11993): call AssociateSendStream and + // UpdateAggregateNetworkState asynchronously on the network thread. for (AudioReceiveStream* stream : audio_receive_streams_) { if (stream->config().rtp.local_ssrc == ssrc) { stream->AssociateSendStream(nullptr); @@ -817,6 +854,7 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { } UpdateAggregateNetworkState(); + delete send_stream; } @@ -827,11 +865,19 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( EnsureStarted(); event_log_->Log(std::make_unique( CreateRtcLogStreamConfig(config))); + + // TODO(bugs.webrtc.org/11993): Move the registration between |receive_stream| + // and |audio_receiver_controller_| out of AudioReceiveStream construction and + // set it up asynchronously on the network thread (the registration and + // |audio_receiver_controller_| need to live on the network thread). AudioReceiveStream* receive_stream = new AudioReceiveStream( clock_, &audio_receiver_controller_, transport_send_ptr_->packet_router(), module_process_thread_->process_thread(), config_.neteq_factory, config, config_.audio_state, event_log_); + // TODO(bugs.webrtc.org/11993): Update the below on the network thread. + // We could possibly set up the audio_receiver_controller_ association up + // as part of the async setup. receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config)); audio_receive_streams_.insert(receive_stream); @@ -858,8 +904,12 @@ void Call::DestroyAudioReceiveStream( uint32_t ssrc = config.rtp.remote_ssrc; receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) ->RemoveStream(ssrc); + + // TODO(bugs.webrtc.org/11993): Access the map, rtp config, call ConfigureSync + // and UpdateAggregateNetworkState on the network thread. audio_receive_streams_.erase(audio_receive_stream); const std::string& sync_group = audio_receive_stream->config().sync_group; + const auto it = sync_stream_mapping_.find(sync_group); if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) { sync_stream_mapping_.erase(it); @@ -868,6 +918,9 @@ void Call::DestroyAudioReceiveStream( receive_rtp_config_.erase(ssrc); UpdateAggregateNetworkState(); + // TODO(bugs.webrtc.org/11993): Consider if deleting |audio_receive_stream| + // on the network thread would be better or if we'd need to tear down the + // state in two phases. delete audio_receive_stream; } @@ -980,13 +1033,15 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( EnsureStarted(); - TaskQueueBase* current = GetCurrentTaskQueueOrThread(); - RTC_CHECK(current); + // TODO(bugs.webrtc.org/11993): Move the registration between |receive_stream| + // and |video_receiver_controller_| out of VideoReceiveStream2 construction + // and set it up asynchronously on the network thread (the registration and + // |video_receiver_controller_| need to live on the network thread). VideoReceiveStream2* receive_stream = new VideoReceiveStream2( - task_queue_factory_, current, &video_receiver_controller_, num_cpu_cores_, - transport_send_ptr_->packet_router(), std::move(configuration), - module_process_thread_->process_thread(), call_stats_.get(), clock_, - new VCMTiming(clock_)); + task_queue_factory_, worker_thread_, &video_receiver_controller_, + num_cpu_cores_, transport_send_ptr_->packet_router(), + std::move(configuration), module_process_thread_->process_thread(), + call_stats_.get(), clock_, new VCMTiming(clock_)); const webrtc::VideoReceiveStream::Config& config = receive_stream->config(); if (config.rtp.rtx_ssrc) { @@ -1118,35 +1173,63 @@ const WebRtcKeyValueConfig& Call::trials() const { return *config_.trials; } -void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { - RTC_DCHECK_RUN_ON(worker_thread_); - switch (media) { - case MediaType::AUDIO: - audio_network_state_ = state; - break; - case MediaType::VIDEO: - video_network_state_ = state; - break; - case MediaType::ANY: - case MediaType::DATA: - RTC_NOTREACHED(); - break; - } +TaskQueueBase* Call::network_thread() const { + return network_thread_; +} - UpdateAggregateNetworkState(); - for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) { - video_receive_stream->SignalNetworkState(video_network_state_); +TaskQueueBase* Call::worker_thread() const { + return worker_thread_; +} + +void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(media == MediaType::AUDIO || media == MediaType::VIDEO); + + auto closure = [this, media, state]() { + // TODO(bugs.webrtc.org/11993): Move this over to the network thread. + RTC_DCHECK_RUN_ON(worker_thread_); + if (media == MediaType::AUDIO) { + audio_network_state_ = state; + } else { + RTC_DCHECK_EQ(media, MediaType::VIDEO); + video_network_state_ = state; + } + + // TODO(tommi): Is it necessary to always do this, including if there + // was no change in state? + UpdateAggregateNetworkState(); + + // TODO(tommi): Is it right to do this if media == AUDIO? + for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) { + video_receive_stream->SignalNetworkState(video_network_state_); + } + }; + + if (network_thread_ == worker_thread_) { + closure(); + } else { + // TODO(bugs.webrtc.org/11993): Remove workaround when we no longer need to + // post to the worker thread. + worker_thread_->PostTask(ToQueuedTask(task_safety_, std::move(closure))); } } void Call::OnAudioTransportOverheadChanged(int transport_overhead_per_packet) { - RTC_DCHECK_RUN_ON(worker_thread_); - for (auto& kv : audio_send_ssrcs_) { - kv.second->SetTransportOverhead(transport_overhead_per_packet); - } + RTC_DCHECK_RUN_ON(network_thread_); + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [this, transport_overhead_per_packet]() { + // TODO(bugs.webrtc.org/11993): Move this over to the network thread. + RTC_DCHECK_RUN_ON(worker_thread_); + for (auto& kv : audio_send_ssrcs_) { + kv.second->SetTransportOverhead(transport_overhead_per_packet); + } + })); } void Call::UpdateAggregateNetworkState() { + // TODO(bugs.webrtc.org/11993): Move this over to the network thread. + // RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK_RUN_ON(worker_thread_); bool have_audio = @@ -1173,6 +1256,12 @@ void Call::UpdateAggregateNetworkState() { } void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { + // In production and with most tests, this method will be called on the + // network thread. However some test classes such as DirectTransport don't + // incorporate a network thread. This means that tests for RtpSenderEgress + // and ModuleRtpRtcpImpl2 that use DirectTransport, will call this method + // on a ProcessThread. This is alright as is since we forward the call to + // implementations that either just do a PostTask or use locking. video_send_delay_stats_->OnSentPacket(sent_packet.packet_id, clock_->TimeInMilliseconds()); transport_send_ptr_->OnSentPacket(sent_packet); @@ -1226,6 +1315,7 @@ void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) { } void Call::ConfigureSync(const std::string& sync_group) { + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. // Set sync only if there was no previous one. if (sync_group.empty()) return; @@ -1279,6 +1369,19 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type, const uint8_t* packet, size_t length) { TRACE_EVENT0("webrtc", "Call::DeliverRtcp"); + + // TODO(bugs.webrtc.org/11993): This DCHECK is here just to maintain the + // invariant that currently the only call path to this function is via + // `PeerConnection::InitializeRtcpCallback()`. DeliverRtp on the other hand + // gets called via the channel classes and + // WebRtc[Audio|Video]Channel's `OnPacketReceived`. We'll remove the + // PeerConnection involvement as well as + // `JsepTransportController::OnRtcpPacketReceived_n` and `rtcp_handler` + // and make sure that the flow of packets is consistent from the + // `RtpTransport` class, via the *Channel and *Engine classes and into Call. + // This way we'll also know more about the context of the packet. + RTC_DCHECK_EQ(media_type, MediaType::ANY); + // TODO(pbos): Make sure it's a valid packet. // Return DELIVERY_UNKNOWN_SSRC if it can be determined that // there's no receiver of the packet. @@ -1325,6 +1428,7 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { TRACE_EVENT0("webrtc", "Call::DeliverRtp"); + RTC_DCHECK_NE(media_type, MediaType::ANY); RtpPacketReceived parsed_packet; if (!parsed_packet.Parse(std::move(packet))) @@ -1337,9 +1441,9 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, packet_time_us = receive_time_calculator_->ReconcileReceiveTimes( packet_time_us, rtc::TimeUTCMicros(), clock_->TimeInMicroseconds()); } - parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000); + parsed_packet.set_arrival_time(Timestamp::Micros(packet_time_us)); } else { - parsed_packet.set_arrival_time_ms(clock_->TimeInMilliseconds()); + parsed_packet.set_arrival_time(clock_->CurrentTime()); } // We might get RTP keep-alive packets in accordance with RFC6263 section 4.6. @@ -1375,7 +1479,7 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, received_audio_bytes_per_second_counter_.Add(length); event_log_->Log( std::make_unique(parsed_packet)); - const int64_t arrival_time_ms = parsed_packet.arrival_time_ms(); + const int64_t arrival_time_ms = parsed_packet.arrival_time().ms(); if (!first_received_rtp_audio_ms_) { first_received_rtp_audio_ms_.emplace(arrival_time_ms); } @@ -1389,7 +1493,7 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, received_video_bytes_per_second_counter_.Add(length); event_log_->Log( std::make_unique(parsed_packet)); - const int64_t arrival_time_ms = parsed_packet.arrival_time_ms(); + const int64_t arrival_time_ms = parsed_packet.arrival_time().ms(); if (!first_received_rtp_video_ms_) { first_received_rtp_video_ms_.emplace(arrival_time_ms); } @@ -1412,7 +1516,34 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket( return DeliverRtp(media_type, std::move(packet), packet_time_us); } +void Call::DeliverPacketAsync(MediaType media_type, + rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us, + PacketCallback callback) { + RTC_DCHECK_RUN_ON(network_thread_); + + TaskQueueBase* network_thread = rtc::Thread::Current(); + RTC_DCHECK(network_thread); + + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, network_thread, media_type, p = std::move(packet), + packet_time_us, cb = std::move(callback)] { + RTC_DCHECK_RUN_ON(worker_thread_); + DeliveryStatus status = DeliverPacket(media_type, p, packet_time_us); + if (cb) { + network_thread->PostTask( + ToQueuedTask([cb = std::move(cb), status, media_type, + p = std::move(p), packet_time_us]() { + cb(status, media_type, std::move(p), packet_time_us); + })); + } + })); +} + void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) { + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. + // This method is called synchronously via |OnRtpPacket()| (see DeliverRtp) + // on the same thread. RTC_DCHECK_RUN_ON(worker_thread_); RtpPacketReceived parsed_packet; if (!parsed_packet.Parse(packet, length)) @@ -1450,7 +1581,7 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, ReceivedPacket packet_msg; packet_msg.size = DataSize::Bytes(packet.payload_size()); - packet_msg.receive_time = Timestamp::Millis(packet.arrival_time_ms()); + packet_msg.receive_time = packet.arrival_time(); if (header.extension.hasAbsoluteSendTime) { packet_msg.send_time = header.extension.GetAbsoluteSendTimestamp(); } @@ -1470,8 +1601,8 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, if (media_type == MediaType::VIDEO || (use_send_side_bwe && header.extension.hasTransportSequenceNumber)) { receive_side_cc_.OnReceivedPacket( - packet.arrival_time_ms(), packet.payload_size() + packet.padding_size(), - header); + packet.arrival_time().ms(), + packet.payload_size() + packet.padding_size(), header); } } diff --git a/TMessagesProj/jni/voip/webrtc/call/call.h b/TMessagesProj/jni/voip/webrtc/call/call.h index a2b3b8959..37d784f72 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.h +++ b/TMessagesProj/jni/voip/webrtc/call/call.h @@ -17,6 +17,7 @@ #include "api/adaptation/resource.h" #include "api/media_types.h" +#include "api/task_queue/task_queue_base.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" #include "call/call_config.h" @@ -158,6 +159,9 @@ class Call { virtual const WebRtcKeyValueConfig& trials() const = 0; + virtual TaskQueueBase* network_thread() const = 0; + virtual TaskQueueBase* worker_thread() const = 0; + virtual ~Call() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/call/call_config.cc b/TMessagesProj/jni/voip/webrtc/call/call_config.cc index b149c889e..8b3c91222 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_config.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call_config.cc @@ -14,7 +14,9 @@ namespace webrtc { -CallConfig::CallConfig(RtcEventLog* event_log) : event_log(event_log) { +CallConfig::CallConfig(RtcEventLog* event_log, + TaskQueueBase* network_task_queue /* = nullptr*/) + : event_log(event_log), network_task_queue_(network_task_queue) { RTC_DCHECK(event_log); } diff --git a/TMessagesProj/jni/voip/webrtc/call/call_config.h b/TMessagesProj/jni/voip/webrtc/call/call_config.h index 205f7a48b..95dad3600 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_config.h +++ b/TMessagesProj/jni/voip/webrtc/call/call_config.h @@ -26,7 +26,11 @@ class AudioProcessing; class RtcEventLog; struct CallConfig { - explicit CallConfig(RtcEventLog* event_log); + // If |network_task_queue| is set to nullptr, Call will assume that network + // related callbacks will be made on the same TQ as the Call instance was + // constructed on. + explicit CallConfig(RtcEventLog* event_log, + TaskQueueBase* network_task_queue = nullptr); CallConfig(const CallConfig&); ~CallConfig(); @@ -42,7 +46,7 @@ struct CallConfig { // RtcEventLog to use for this call. Required. // Use webrtc::RtcEventLog::CreateNull() for a null implementation. - RtcEventLog* event_log = nullptr; + RtcEventLog* const event_log = nullptr; // FecController to use for this call. FecControllerFactoryInterface* fec_controller_factory = nullptr; @@ -63,6 +67,8 @@ struct CallConfig { // Key-value mapping of internal configurations to apply, // e.g. field trials. const WebRtcKeyValueConfig* trials = nullptr; + + TaskQueueBase* const network_task_queue_ = nullptr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/call_factory.h b/TMessagesProj/jni/voip/webrtc/call/call_factory.h index 65c0b6532..469bec39e 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_factory.h +++ b/TMessagesProj/jni/voip/webrtc/call/call_factory.h @@ -12,9 +12,10 @@ #define CALL_CALL_FACTORY_H_ #include "api/call/call_factory_interface.h" +#include "api/sequence_checker.h" #include "call/call.h" #include "call/call_config.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -27,7 +28,7 @@ class CallFactory : public CallFactoryInterface { Call* CreateCall(const CallConfig& config) override; - SequenceChecker call_thread_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker call_thread_; rtc::scoped_refptr module_thread_ RTC_GUARDED_BY(call_thread_); }; diff --git a/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc b/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc index aa8894e9a..c163ab2fe 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call_perf_tests.cc @@ -312,14 +312,18 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, DestroyStreams(); - video_send_transport.reset(); - audio_send_transport.reset(); - receive_transport.reset(); - sender_call_->DestroyAudioSendStream(audio_send_stream); receiver_call_->DestroyAudioReceiveStream(audio_receive_stream); DestroyCalls(); + // Call may post periodic rtcp packet to the transport on the process + // thread, thus transport should be destroyed after the call objects. + // Though transports keep pointers to the call objects, transports handle + // packets on the task_queue() and thus wouldn't create a race while current + // destruction happens in the same task as destruction of the call objects. + video_send_transport.reset(); + audio_send_transport.reset(); + receive_transport.reset(); }); observer->PrintResults(); @@ -557,6 +561,18 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { // TODO(sprang): Add integration test for maintain-framerate mode? void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) override { + // The sink wants can change either because an adaptation happened (i.e. + // the pixels or frame rate changed) or for other reasons, such as encoded + // resolutions being communicated (happens whenever we capture a new frame + // size). In this test, we only care about adaptations. + bool did_adapt = + last_wants_.max_pixel_count != wants.max_pixel_count || + last_wants_.target_pixel_count != wants.target_pixel_count || + last_wants_.max_framerate_fps != wants.max_framerate_fps; + last_wants_ = wants; + if (!did_adapt) { + return; + } // At kStart expect CPU overuse. Then expect CPU underuse when the encoder // delay has been decreased. switch (test_phase_) { @@ -621,6 +637,9 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { kAdaptedDown, kAdaptedUp } test_phase_; + + private: + rtc::VideoSinkWants last_wants_; } test; RunBaseTest(&test); @@ -635,7 +654,8 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { static const int kAcceptableBitrateErrorMargin = 15; // +- 7 class BitrateObserver : public test::EndToEndTest { public: - explicit BitrateObserver(bool using_min_transmit_bitrate) + explicit BitrateObserver(bool using_min_transmit_bitrate, + TaskQueueBase* task_queue) : EndToEndTest(kLongTimeoutMs), send_stream_(nullptr), converged_(false), @@ -648,27 +668,31 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { ? kMaxAcceptableTransmitBitrate : (kMaxEncodeBitrateKbps + kAcceptableBitrateErrorMargin / 2)), - num_bitrate_observations_in_range_(0) {} + num_bitrate_observations_in_range_(0), + task_queue_(task_queue) {} private: // TODO(holmer): Run this with a timer instead of once per packet. Action OnSendRtp(const uint8_t* packet, size_t length) override { - VideoSendStream::Stats stats = send_stream_->GetStats(); - if (!stats.substreams.empty()) { - RTC_DCHECK_EQ(1, stats.substreams.size()); - int bitrate_kbps = - stats.substreams.begin()->second.total_bitrate_bps / 1000; - if (bitrate_kbps > min_acceptable_bitrate_ && - bitrate_kbps < max_acceptable_bitrate_) { - converged_ = true; - ++num_bitrate_observations_in_range_; - if (num_bitrate_observations_in_range_ == - kNumBitrateObservationsInRange) - observation_complete_.Set(); + task_queue_->PostTask(ToQueuedTask([this]() { + VideoSendStream::Stats stats = send_stream_->GetStats(); + + if (!stats.substreams.empty()) { + RTC_DCHECK_EQ(1, stats.substreams.size()); + int bitrate_kbps = + stats.substreams.begin()->second.total_bitrate_bps / 1000; + if (bitrate_kbps > min_acceptable_bitrate_ && + bitrate_kbps < max_acceptable_bitrate_) { + converged_ = true; + ++num_bitrate_observations_in_range_; + if (num_bitrate_observations_in_range_ == + kNumBitrateObservationsInRange) + observation_complete_.Set(); + } + if (converged_) + bitrate_kbps_list_.push_back(bitrate_kbps); } - if (converged_) - bitrate_kbps_list_.push_back(bitrate_kbps); - } + })); return SEND_PACKET; } @@ -705,7 +729,8 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { const int max_acceptable_bitrate_; int num_bitrate_observations_in_range_; std::vector bitrate_kbps_list_; - } test(pad_to_min_bitrate); + TaskQueueBase* task_queue_; + } test(pad_to_min_bitrate, task_queue()); fake_encoder_max_bitrate_ = kMaxEncodeBitrateKbps; RunBaseTest(&test); @@ -756,7 +781,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { class BitrateObserver : public test::EndToEndTest, public test::FakeEncoder { public: - BitrateObserver() + explicit BitrateObserver(TaskQueueBase* task_queue) : EndToEndTest(kDefaultTimeoutMs), FakeEncoder(Clock::GetRealTimeClock()), encoder_inits_(0), @@ -765,7 +790,8 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { frame_generator_(nullptr), encoder_factory_(this), bitrate_allocator_factory_( - CreateBuiltinVideoBitrateAllocatorFactory()) {} + CreateBuiltinVideoBitrateAllocatorFactory()), + task_queue_(task_queue) {} int32_t InitEncode(const VideoCodec* config, const VideoEncoder::Settings& settings) override { @@ -815,7 +841,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { bitrate_allocator_factory_.get(); encoder_config->max_bitrate_bps = 2 * kReconfigureThresholdKbps * 1000; encoder_config->video_stream_factory = - new rtc::RefCountedObject(); + rtc::make_ref_counted(); encoder_config_ = encoder_config->Copy(); } @@ -835,7 +861,9 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { ASSERT_TRUE(time_to_reconfigure_.Wait(kDefaultTimeoutMs)) << "Timed out before receiving an initial high bitrate."; frame_generator_->ChangeResolution(kDefaultWidth * 2, kDefaultHeight * 2); - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + SendTask(RTC_FROM_HERE, task_queue_, [&]() { + send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + }); EXPECT_TRUE(Wait()) << "Timed out while waiting for a couple of high bitrate estimates " "after reconfiguring the send stream."; @@ -850,7 +878,8 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { test::VideoEncoderProxyFactory encoder_factory_; std::unique_ptr bitrate_allocator_factory_; VideoEncoderConfig encoder_config_; - } test; + TaskQueueBase* task_queue_; + } test(task_queue()); RunBaseTest(&test); } diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc index 0cd43018a..73c236bc0 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc @@ -270,6 +270,14 @@ const WebRtcKeyValueConfig& DegradedCall::trials() const { return call_->trials(); } +TaskQueueBase* DegradedCall::network_thread() const { + return call_->network_thread(); +} + +TaskQueueBase* DegradedCall::worker_thread() const { + return call_->worker_thread(); +} + void DegradedCall::SignalChannelNetworkState(MediaType media, NetworkState state) { call_->SignalChannelNetworkState(media, state); diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h index d81c65c57..03fc14f28 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h @@ -87,6 +87,9 @@ class DegradedCall : public Call, private PacketReceiver { const WebRtcKeyValueConfig& trials() const override; + TaskQueueBase* network_thread() const override; + TaskQueueBase* worker_thread() const override; + void SignalChannelNetworkState(MediaType media, NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; diff --git a/TMessagesProj/jni/voip/webrtc/call/packet_receiver.h b/TMessagesProj/jni/voip/webrtc/call/packet_receiver.h index df57d8f4f..f18ee65c7 100644 --- a/TMessagesProj/jni/voip/webrtc/call/packet_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/call/packet_receiver.h @@ -11,8 +11,10 @@ #define CALL_PACKET_RECEIVER_H_ #include +#include #include #include +#include #include #include "api/media_types.h" @@ -28,6 +30,32 @@ class PacketReceiver { DELIVERY_PACKET_ERROR, }; + // Definition of the callback to execute when packet delivery is complete. + // The callback will be issued on the same thread as called DeliverPacket. + typedef std::function< + void(DeliveryStatus, MediaType, rtc::CopyOnWriteBuffer, int64_t)> + PacketCallback; + + // Asynchronously handle packet delivery and report back to the caller when + // delivery of the packet has completed. + // Note that if the packet is invalid or can be processed without the need of + // asynchronous operations that the |callback| may have been called before + // the function returns. + // TODO(bugs.webrtc.org/11993): This function is meant to be called on the + // network thread exclusively but while the code is being updated to align + // with those goals, it may be called either on the worker or network threads. + // Update docs etc when the work has been completed. Once we're done with the + // updates, we might be able to go back to returning the status from this + // function instead of having to report it via a callback. + virtual void DeliverPacketAsync(MediaType media_type, + rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us, + PacketCallback callback) { + DeliveryStatus status = DeliverPacket(media_type, packet, packet_time_us); + if (callback) + callback(status, media_type, std::move(packet), packet_time_us); + } + virtual DeliveryStatus DeliverPacket(MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc index 89fbe3dde..37e3e6c7f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc @@ -160,7 +160,7 @@ void RampUpTester::ModifyVideoConfigs( encoder_config->number_of_streams = num_video_streams_; encoder_config->max_bitrate_bps = 2000000; encoder_config->video_stream_factory = - new rtc::RefCountedObject(); + rtc::make_ref_counted(); if (num_video_streams_ == 1) { // For single stream rampup until 1mbps expected_bitrate_bps_ = kSingleStreamTargetBps; @@ -370,7 +370,10 @@ void RampUpTester::TriggerTestDone() { if (!send_stream_) return; - VideoSendStream::Stats send_stats = send_stream_->GetStats(); + VideoSendStream::Stats send_stats; + SendTask(RTC_FROM_HERE, task_queue_, + [&] { send_stats = send_stream_->GetStats(); }); + send_stream_ = nullptr; // To avoid dereferencing a bad pointer. size_t total_packets_sent = 0; @@ -663,7 +666,6 @@ TEST_F(RampUpTest, DISABLED_UpDownUpTransportSequenceNumberPacketLoss) { UpDownUpAudioVideoTransportSequenceNumberRtx #endif TEST_F(RampUpTest, MAYBE_UpDownUpAudioVideoTransportSequenceNumberRtx) { - test::ScopedFieldTrials field_trials("WebRTC-Audio-SendSideBwe/Enabled/"); std::vector loss_rates = {0, 0, 0, 0}; RampUpDownUpTester test(3, 1, 0, kStartBitrateBps, RtpExtension::kTransportSequenceNumberUri, true, @@ -672,7 +674,6 @@ TEST_F(RampUpTest, MAYBE_UpDownUpAudioVideoTransportSequenceNumberRtx) { } TEST_F(RampUpTest, UpDownUpAudioTransportSequenceNumberRtx) { - test::ScopedFieldTrials field_trials("WebRTC-Audio-SendSideBwe/Enabled/"); std::vector loss_rates = {0, 0, 0, 0}; RampUpDownUpTester test(0, 1, 0, kStartBitrateBps, RtpExtension::kTransportSequenceNumberUri, true, diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc index 9fc4ba1c1..ee9619623 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc @@ -53,6 +53,16 @@ size_t RemoveFromMapByValue(Map* map, const Value& value) { RtpDemuxerCriteria::RtpDemuxerCriteria() = default; RtpDemuxerCriteria::~RtpDemuxerCriteria() = default; +bool RtpDemuxerCriteria::operator==(const RtpDemuxerCriteria& other) const { + return this->mid == other.mid && this->rsid == other.rsid && + this->ssrcs == other.ssrcs && + this->payload_types == other.payload_types; +} + +bool RtpDemuxerCriteria::operator!=(const RtpDemuxerCriteria& other) const { + return !(*this == other); +} + std::string RtpDemuxerCriteria::ToString() const { rtc::StringBuilder sb; sb << "{mid: " << (mid.empty() ? "" : mid) @@ -91,7 +101,7 @@ std::string RtpDemuxer::DescribePacket(const RtpPacketReceived& packet) { return sb.Release(); } -RtpDemuxer::RtpDemuxer() = default; +RtpDemuxer::RtpDemuxer(bool use_mid /* = true*/) : use_mid_(use_mid) {} RtpDemuxer::~RtpDemuxer() { RTC_DCHECK(sink_by_mid_.empty()); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h index 3aa7e9df2..00c050858 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h @@ -14,9 +14,12 @@ #include #include #include +#include #include #include +#include "rtc_base/hash.h" + namespace webrtc { class RtpPacketReceived; @@ -28,6 +31,9 @@ struct RtpDemuxerCriteria { RtpDemuxerCriteria(); ~RtpDemuxerCriteria(); + bool operator==(const RtpDemuxerCriteria& other) const; + bool operator!=(const RtpDemuxerCriteria& other) const; + // If not the empty string, will match packets with this MID. std::string mid; @@ -94,7 +100,7 @@ class RtpDemuxer { // relevant for demuxing. static std::string DescribePacket(const RtpPacketReceived& packet); - RtpDemuxer(); + explicit RtpDemuxer(bool use_mid = true); ~RtpDemuxer(); RtpDemuxer(const RtpDemuxer&) = delete; @@ -132,10 +138,6 @@ class RtpDemuxer { // if the packet was forwarded and false if the packet was dropped. bool OnRtpPacket(const RtpPacketReceived& packet); - // Configure whether to look at the MID header extension when demuxing - // incoming RTP packets. By default this is enabled. - void set_use_mid(bool use_mid) { use_mid_ = use_mid; } - private: // Returns true if adding a sink with the given criteria would cause conflicts // with the existing criteria and should be rejected. @@ -169,12 +171,14 @@ class RtpDemuxer { // Note: Mappings are only modified by AddSink/RemoveSink (except for // SSRC mapping which receives all MID, payload type, or RSID to SSRC bindings // discovered when demuxing packets). - std::map sink_by_mid_; - std::map sink_by_ssrc_; - std::multimap sinks_by_pt_; - std::map, RtpPacketSinkInterface*> + std::unordered_map sink_by_mid_; + std::unordered_map sink_by_ssrc_; + std::unordered_multimap sinks_by_pt_; + std::unordered_map, + RtpPacketSinkInterface*, + webrtc::PairHash> sink_by_mid_and_rsid_; - std::map sink_by_rsid_; + std::unordered_map sink_by_rsid_; // Tracks all the MIDs that have been identified in added criteria. Used to // determine if a packet should be dropped right away because the MID is @@ -185,13 +189,13 @@ class RtpDemuxer { // received. // This is stored separately from the sink mappings because if a sink is // removed we want to still remember these associations. - std::map mid_by_ssrc_; - std::map rsid_by_ssrc_; + std::unordered_map mid_by_ssrc_; + std::unordered_map rsid_by_ssrc_; // Adds a binding from the SSRC to the given sink. void AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink); - bool use_mid_ = true; + const bool use_mid_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc index 1f0815547..d6d17dde1 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.cc @@ -139,6 +139,9 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, : ssrc_(ssrc), generic_picture_id_experiment_( absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), + "Enabled")), + simulate_generic_vp9_( + absl::StartsWith(trials.Lookup("WebRTC-Vp9DependencyDescriptor"), "Enabled")) { for (auto& spatial_layer : last_shared_frame_id_) spatial_layer.fill(-1); @@ -164,7 +167,7 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( PopulateRtpWithCodecSpecifics(*codec_specific_info, image.SpatialIndex(), &rtp_video_header); } - rtp_video_header.frame_type = image._frameType, + rtp_video_header.frame_type = image._frameType; rtp_video_header.rotation = image.rotation_; rtp_video_header.content_type = image.content_type_; rtp_video_header.playout_delay = image.playout_delay_; @@ -173,6 +176,7 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( rtp_video_header.color_space = image.ColorSpace() ? absl::make_optional(*image.ColorSpace()) : absl::nullopt; + rtp_video_header.video_frame_tracking_id = image.VideoFrameTrackingId(); SetVideoTiming(image, &rtp_video_header.video_timing); const bool is_keyframe = image._frameType == VideoFrameType::kVideoFrameKey; @@ -243,12 +247,11 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, RTPVideoHeader::GenericDescriptorInfo RtpPayloadParams::GenericDescriptorFromFrameInfo( const GenericFrameInfo& frame_info, - int64_t frame_id, - VideoFrameType frame_type) { + int64_t frame_id) { RTPVideoHeader::GenericDescriptorInfo generic; generic.frame_id = frame_id; generic.dependencies = dependencies_calculator_.FromBuffersUsage( - frame_type, frame_id, frame_info.encoder_buffers); + frame_id, frame_info.encoder_buffers); generic.chain_diffs = chains_calculator_.From(frame_id, frame_info.part_of_chain); generic.spatial_index = frame_info.spatial_id; @@ -269,9 +272,8 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, chains_calculator_.Reset( codec_specific_info->generic_frame_info->part_of_chain); } - rtp_video_header->generic = - GenericDescriptorFromFrameInfo(*codec_specific_info->generic_frame_info, - frame_id, rtp_video_header->frame_type); + rtp_video_header->generic = GenericDescriptorFromFrameInfo( + *codec_specific_info->generic_frame_info, frame_id); return; } @@ -286,8 +288,13 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, } return; case VideoCodecType::kVideoCodecVP9: + if (simulate_generic_vp9_ && codec_specific_info != nullptr) { + Vp9ToGeneric(codec_specific_info->codecSpecific.VP9, frame_id, + *rtp_video_header); + } + return; case VideoCodecType::kVideoCodecAV1: - // TODO(philipel): Implement VP9 and AV1 to generic descriptor. + // TODO(philipel): Implement AV1 to generic descriptor. return; case VideoCodecType::kVideoCodecH264: if (codec_specific_info) { @@ -411,6 +418,150 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, } } +FrameDependencyStructure RtpPayloadParams::MinimalisticVp9Structure( + const CodecSpecificInfoVP9& vp9) { + const int num_spatial_layers = vp9.num_spatial_layers; + const int num_temporal_layers = kMaxTemporalStreams; + FrameDependencyStructure structure; + structure.num_decode_targets = num_spatial_layers * num_temporal_layers; + structure.num_chains = num_spatial_layers; + structure.templates.reserve(num_spatial_layers * num_temporal_layers); + for (int sid = 0; sid < num_spatial_layers; ++sid) { + for (int tid = 0; tid < num_temporal_layers; ++tid) { + FrameDependencyTemplate a_template; + a_template.spatial_id = sid; + a_template.temporal_id = tid; + for (int s = 0; s < num_spatial_layers; ++s) { + for (int t = 0; t < num_temporal_layers; ++t) { + // Prefer kSwitch for indication frame is part of the decode target + // because RtpPayloadParams::Vp9ToGeneric uses that indication more + // often that kRequired, increasing chance custom dti need not to + // use more bits in dependency descriptor on the wire. + a_template.decode_target_indications.push_back( + sid <= s && tid <= t ? DecodeTargetIndication::kSwitch + : DecodeTargetIndication::kNotPresent); + } + } + a_template.frame_diffs.push_back(tid == 0 ? num_spatial_layers * + num_temporal_layers + : num_spatial_layers); + a_template.chain_diffs.assign(structure.num_chains, 1); + structure.templates.push_back(a_template); + + structure.decode_target_protected_by_chain.push_back(sid); + } + if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) { + structure.resolutions.emplace_back(vp9.width[sid], vp9.height[sid]); + } + } + return structure; +} + +void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, + int64_t shared_frame_id, + RTPVideoHeader& rtp_video_header) { + const auto& vp9_header = + absl::get(rtp_video_header.video_type_header); + const int num_spatial_layers = vp9_header.num_spatial_layers; + const int num_temporal_layers = kMaxTemporalStreams; + + int spatial_index = + vp9_header.spatial_idx != kNoSpatialIdx ? vp9_header.spatial_idx : 0; + int temporal_index = + vp9_header.temporal_idx != kNoTemporalIdx ? vp9_header.temporal_idx : 0; + + if (spatial_index >= num_spatial_layers || + temporal_index >= num_temporal_layers || + num_spatial_layers > RtpGenericFrameDescriptor::kMaxSpatialLayers) { + // Prefer to generate no generic layering than an inconsistent one. + return; + } + + RTPVideoHeader::GenericDescriptorInfo& result = + rtp_video_header.generic.emplace(); + + result.frame_id = shared_frame_id; + result.spatial_index = spatial_index; + result.temporal_index = temporal_index; + + result.decode_target_indications.reserve(num_spatial_layers * + num_temporal_layers); + for (int sid = 0; sid < num_spatial_layers; ++sid) { + for (int tid = 0; tid < num_temporal_layers; ++tid) { + DecodeTargetIndication dti; + if (sid < spatial_index || tid < temporal_index) { + dti = DecodeTargetIndication::kNotPresent; + } else if (spatial_index != sid && + vp9_header.non_ref_for_inter_layer_pred) { + dti = DecodeTargetIndication::kNotPresent; + } else if (sid == spatial_index && tid == temporal_index) { + // Assume that if frame is decodable, all of its own layer is decodable. + dti = DecodeTargetIndication::kSwitch; + } else if (sid == spatial_index && vp9_header.temporal_up_switch) { + dti = DecodeTargetIndication::kSwitch; + } else if (!vp9_header.inter_pic_predicted) { + // Key frame or spatial upswitch + dti = DecodeTargetIndication::kSwitch; + } else { + // Make no other assumptions. That should be safe, though suboptimal. + // To provide more accurate dti, encoder wrapper should fill in + // CodecSpecificInfo::generic_frame_info + dti = DecodeTargetIndication::kRequired; + } + result.decode_target_indications.push_back(dti); + } + } + + // Calculate frame dependencies. + static constexpr int kPictureDiffLimit = 128; + if (last_vp9_frame_id_.empty()) { + // Create the array only if it is ever used. + last_vp9_frame_id_.resize(kPictureDiffLimit); + } + if (vp9_header.inter_layer_predicted && spatial_index > 0) { + result.dependencies.push_back( + last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit] + [spatial_index - 1]); + } + if (vp9_header.inter_pic_predicted) { + for (size_t i = 0; i < vp9_header.num_ref_pics; ++i) { + // picture_id is 15 bit number that wraps around. Though undeflow may + // produce picture that exceeds 2^15, it is ok because in this + // code block only last 7 bits of the picture_id are used. + uint16_t depend_on = vp9_header.picture_id - vp9_header.pid_diff[i]; + result.dependencies.push_back( + last_vp9_frame_id_[depend_on % kPictureDiffLimit][spatial_index]); + } + } + last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit][spatial_index] = + shared_frame_id; + + // Calculate chains, asuming chain includes all frames with temporal_id = 0 + if (!vp9_header.inter_pic_predicted && !vp9_header.inter_layer_predicted) { + // Assume frames without dependencies also reset chains. + for (int sid = spatial_index; sid < num_spatial_layers; ++sid) { + chain_last_frame_id_[sid] = -1; + } + } + result.chain_diffs.resize(num_spatial_layers); + for (int sid = 0; sid < num_spatial_layers; ++sid) { + if (chain_last_frame_id_[sid] == -1) { + result.chain_diffs[sid] = 0; + continue; + } + result.chain_diffs[sid] = shared_frame_id - chain_last_frame_id_[sid]; + } + + if (temporal_index == 0) { + chain_last_frame_id_[spatial_index] = shared_frame_id; + if (!vp9_header.non_ref_for_inter_layer_pred) { + for (int sid = spatial_index + 1; sid < num_spatial_layers; ++sid) { + chain_last_frame_id_[sid] = shared_frame_id; + } + } + } +} + void RtpPayloadParams::SetDependenciesVp8Deprecated( const CodecSpecificInfoVP8& vp8_info, int64_t shared_frame_id, diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h index 2e0faeb5c..da53cbc5c 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h @@ -12,6 +12,7 @@ #define CALL_RTP_PAYLOAD_PARAMS_H_ #include +#include #include "absl/types/optional.h" #include "api/transport/webrtc_key_value_config.h" @@ -41,6 +42,14 @@ class RtpPayloadParams final { const CodecSpecificInfo* codec_specific_info, int64_t shared_frame_id); + // Returns structure that aligns with simulated generic info for VP9. + // The templates allow to produce valid dependency descriptor for any vp9 + // stream with up to 4 temporal layers. The set of the templates is not tuned + // for any paricular structure thus dependency descriptor would use more bytes + // on the wire than with tuned templates. + static FrameDependencyStructure MinimalisticVp9Structure( + const CodecSpecificInfoVP9& vp9); + uint32_t ssrc() const; RtpPayloadState state() const; @@ -50,8 +59,7 @@ class RtpPayloadParams final { bool first_frame_in_picture); RTPVideoHeader::GenericDescriptorInfo GenericDescriptorFromFrameInfo( const GenericFrameInfo& frame_info, - int64_t frame_id, - VideoFrameType frame_type); + int64_t frame_id); void SetGeneric(const CodecSpecificInfo* codec_specific_info, int64_t frame_id, bool is_keyframe, @@ -62,6 +70,10 @@ class RtpPayloadParams final { bool is_keyframe, RTPVideoHeader* rtp_video_header); + void Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, + int64_t shared_frame_id, + RTPVideoHeader& rtp_video_header); + void H264ToGeneric(const CodecSpecificInfoH264& h264_info, int64_t shared_frame_id, bool is_keyframe, @@ -95,6 +107,13 @@ class RtpPayloadParams final { std::array, RtpGenericFrameDescriptor::kMaxSpatialLayers> last_shared_frame_id_; + // circular buffer of frame ids for the last 128 vp9 pictures. + // ids for the `picture_id` are stored at the index `picture_id % 128`. + std::vector> + last_vp9_frame_id_; + // Last frame id for each chain + std::array + chain_last_frame_id_; // TODO(eladalon): When additional codecs are supported, // set kMaxCodecBuffersCount to the max() of these codecs' buffer count. @@ -114,6 +133,7 @@ class RtpPayloadParams final { RtpPayloadState state_; const bool generic_picture_id_experiment_; + const bool simulate_generic_vp9_; }; } // namespace webrtc #endif // CALL_RTP_PAYLOAD_PARAMS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc index f440b426d..7150b34bd 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.cc @@ -37,11 +37,7 @@ RtpStreamReceiverController::Receiver::~Receiver() { controller_->RemoveSink(sink_); } -RtpStreamReceiverController::RtpStreamReceiverController() { - // At this level the demuxer is only configured to demux by SSRC, so don't - // worry about MIDs (MIDs are handled by upper layers). - demuxer_.set_use_mid(false); -} +RtpStreamReceiverController::RtpStreamReceiverController() {} RtpStreamReceiverController::~RtpStreamReceiverController() = default; @@ -52,19 +48,19 @@ RtpStreamReceiverController::CreateReceiver(uint32_t ssrc, } bool RtpStreamReceiverController::OnRtpPacket(const RtpPacketReceived& packet) { - rtc::CritScope cs(&lock_); + RTC_DCHECK_RUN_ON(&demuxer_sequence_); return demuxer_.OnRtpPacket(packet); } bool RtpStreamReceiverController::AddSink(uint32_t ssrc, RtpPacketSinkInterface* sink) { - rtc::CritScope cs(&lock_); + RTC_DCHECK_RUN_ON(&demuxer_sequence_); return demuxer_.AddSink(ssrc, sink); } size_t RtpStreamReceiverController::RemoveSink( const RtpPacketSinkInterface* sink) { - rtc::CritScope cs(&lock_); + RTC_DCHECK_RUN_ON(&demuxer_sequence_); return demuxer_.RemoveSink(sink); } diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h index 62447aa52..284c9fa12 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_stream_receiver_controller.h @@ -12,9 +12,9 @@ #include +#include "api/sequence_checker.h" #include "call/rtp_demuxer.h" #include "call/rtp_stream_receiver_controller_interface.h" -#include "rtc_base/deprecated/recursive_critical_section.h" namespace webrtc { @@ -58,13 +58,18 @@ class RtpStreamReceiverController RtpPacketSinkInterface* const sink_; }; - // TODO(nisse): Move to a TaskQueue for synchronization. When used - // by Call, we expect construction and all methods but OnRtpPacket - // to be called on the same thread, and OnRtpPacket to be called - // by a single, but possibly distinct, thread. But applications not - // using Call may have use threads differently. - rtc::RecursiveCriticalSection lock_; - RtpDemuxer demuxer_ RTC_GUARDED_BY(&lock_); + // TODO(bugs.webrtc.org/11993): We expect construction and all methods to be + // called on the same thread/tq. Currently this is the worker thread + // (including OnRtpPacket) but a more natural fit would be the network thread. + // Using a sequence checker to ensure that usage is correct but at the same + // time not require a specific thread/tq, an instance of this class + the + // associated functionality should be easily moved from one execution context + // to another (i.e. when network packets don't hop to the worker thread inside + // of Call). + SequenceChecker demuxer_sequence_; + // At this level the demuxer is only configured to demux by SSRC, so don't + // worry about MIDs (MIDs are handled by upper layers). + RtpDemuxer demuxer_ RTC_GUARDED_BY(&demuxer_sequence_){false /*use_mid*/}; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc index f5adae68a..d743a0bf4 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc @@ -87,7 +87,7 @@ RtpTransportControllerSend::RtpTransportControllerSend( : clock_(clock), event_log_(event_log), bitrate_configurator_(bitrate_config), - process_thread_started_(false), + pacer_started_(false), process_thread_(std::move(process_thread)), use_task_queue_pacer_(IsEnabled(trials, "WebRTC-TaskQueuePacer")), process_thread_pacer_(use_task_queue_pacer_ @@ -496,9 +496,13 @@ void RtpTransportControllerSend::IncludeOverheadInPacedSender() { } void RtpTransportControllerSend::EnsureStarted() { - if (!use_task_queue_pacer_ && !process_thread_started_) { - process_thread_started_ = true; - process_thread_->Start(); + if (!pacer_started_) { + pacer_started_ = true; + if (use_task_queue_pacer_) { + task_queue_pacer_->EnsureStarted(); + } else { + process_thread_->Start(); + } } } diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h index 7025b0331..f0f74c9f2 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h @@ -152,7 +152,7 @@ class RtpTransportControllerSend final std::vector> video_rtp_senders_; RtpBitrateConfigurator bitrate_configurator_; std::map network_routes_; - bool process_thread_started_; + bool pacer_started_; const std::unique_ptr process_thread_; const bool use_task_queue_pacer_; std::unique_ptr process_thread_pacer_; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h index 602908e2a..2aa6d739d 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send_interface.h @@ -40,25 +40,18 @@ class TaskQueue; } // namespace rtc namespace webrtc { -class CallStatsObserver; class FrameEncryptorInterface; class TargetTransferRateObserver; class Transport; -class Module; -class PacedSender; class PacketRouter; class RtpVideoSenderInterface; -class RateLimiter; class RtcpBandwidthObserver; class RtpPacketSender; -class SendDelayStats; -class SendStatisticsProxy; struct RtpSenderObservers { RtcpRttStats* rtcp_rtt_stats; RtcpIntraFrameObserver* intra_frame_callback; RtcpLossNotificationObserver* rtcp_loss_notification_observer; - RtcpStatisticsCallback* rtcp_stats; ReportBlockDataObserver* report_block_data_observer; StreamDataCountersCallback* rtp_stats; BitrateStatisticsObserver* bitrate_observer; @@ -141,7 +134,13 @@ class RtpTransportControllerSendInterface { virtual int64_t GetPacerQueuingDelayMs() const = 0; virtual absl::optional GetFirstPacketTime() const = 0; virtual void EnablePeriodicAlrProbing(bool enable) = 0; + + // Called when a packet has been sent. + // The call should arrive on the network thread, but may not in all cases + // (some tests don't adhere to this). Implementations today should not block + // the calling thread or make assumptions about the thread context. virtual void OnSentPacket(const rtc::SentPacket& sent_packet) = 0; + virtual void OnReceivedPacket(const ReceivedPacket& received_packet) = 0; virtual void SetSdpBitrateParameters( diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc index 9dad424c8..c2a6a564f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc @@ -216,7 +216,6 @@ std::vector CreateRtpStreamSenders( configuration.rtt_stats = observers.rtcp_rtt_stats; configuration.rtcp_packet_type_counter_observer = observers.rtcp_type_observer; - configuration.rtcp_statistics_callback = observers.rtcp_stats; configuration.report_block_data_observer = observers.report_block_data_observer; configuration.paced_sender = transport->packet_sender(); @@ -290,15 +289,6 @@ std::vector CreateRtpStreamSenders( return rtp_streams; } -DataRate CalculateOverheadRate(DataRate data_rate, - DataSize packet_size, - DataSize overhead_per_packet) { - Frequency packet_rate = data_rate / packet_size; - // TOSO(srte): We should not need to round to nearest whole packet per second - // rate here. - return packet_rate.RoundUpTo(Frequency::Hertz(1)) * overhead_per_packet; -} - absl::optional GetVideoCodecType(const RtpConfig& config) { if (config.raw_payload) { return absl::nullopt; @@ -310,6 +300,48 @@ bool TransportSeqNumExtensionConfigured(const RtpConfig& config) { return ext.uri == RtpExtension::kTransportSequenceNumberUri; }); } + +// Returns true when some coded video sequence can be decoded starting with +// this frame without requiring any previous frames. +// e.g. it is the same as a key frame when spatial scalability is not used. +// When spatial scalability is used, then it is true for layer frames of +// a key frame without inter-layer dependencies. +bool IsFirstFrameOfACodedVideoSequence( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) { + if (encoded_image._frameType != VideoFrameType::kVideoFrameKey) { + return false; + } + + if (codec_specific_info != nullptr) { + if (codec_specific_info->generic_frame_info.has_value()) { + // This function is used before + // `codec_specific_info->generic_frame_info->frame_diffs` are calculated, + // so need to use a more complicated way to check for presence of the + // dependencies. + return absl::c_none_of( + codec_specific_info->generic_frame_info->encoder_buffers, + [](const CodecBufferUsage& buffer) { return buffer.referenced; }); + } + + if (codec_specific_info->codecType == VideoCodecType::kVideoCodecVP8 || + codec_specific_info->codecType == VideoCodecType::kVideoCodecH264 || + codec_specific_info->codecType == VideoCodecType::kVideoCodecGeneric) { + // These codecs do not support intra picture dependencies, so a frame + // marked as a key frame should be a key frame. + return true; + } + } + + // Without depenedencies described in generic format do an educated guess. + // It might be wrong for VP9 with spatial layer 0 skipped or higher spatial + // layer not depending on the spatial layer 0. This corner case is unimportant + // for current usage of this helper function. + + // Use <= to accept both 0 (i.e. the first) and nullopt (i.e. the only). + return encoded_image.SpatialIndex() <= 0; +} + } // namespace RtpVideoSender::RtpVideoSender( @@ -330,7 +362,13 @@ RtpVideoSender::RtpVideoSender( : send_side_bwe_with_overhead_(!absl::StartsWith( field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"), "Disabled")), + use_frame_rate_for_overhead_(absl::StartsWith( + field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"), + "Enabled")), has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), + simulate_vp9_structure_(absl::StartsWith( + field_trials_.Lookup("WebRTC-Vp9DependencyDescriptor"), + "Enabled")), active_(false), module_process_thread_(nullptr), suspended_ssrcs_(std::move(suspended_ssrcs)), @@ -375,18 +413,6 @@ RtpVideoSender::RtpVideoSender( // RTP/RTCP initialization. - // We add the highest spatial layer first to ensure it'll be prioritized - // when sending padding, with the hope that the packet rate will be smaller, - // and that it's more important to protect than the lower layers. - - // TODO(nisse): Consider moving registration with PacketRouter last, after the - // modules are fully configured. - for (const RtpStreamSender& stream : rtp_streams_) { - constexpr bool remb_candidate = true; - transport->packet_router()->AddSendRtpModule(stream.rtp_rtcp.get(), - remb_candidate); - } - for (size_t i = 0; i < rtp_config_.extensions.size(); ++i) { const std::string& extension = rtp_config_.extensions[i].uri; int id = rtp_config_.extensions[i].id; @@ -427,9 +453,8 @@ RtpVideoSender::RtpVideoSender( } RtpVideoSender::~RtpVideoSender() { - for (const RtpStreamSender& stream : rtp_streams_) { - transport_->packet_router()->RemoveSendRtpModule(stream.rtp_rtcp.get()); - } + SetActiveModulesLocked( + std::vector(rtp_streams_.size(), /*active=*/false)); transport_->GetStreamFeedbackProvider()->DeRegisterStreamFeedbackObserver( this); } @@ -473,10 +498,29 @@ void RtpVideoSender::SetActiveModulesLocked( if (active_modules[i]) { active_ = true; } + + RtpRtcpInterface& rtp_module = *rtp_streams_[i].rtp_rtcp; + const bool was_active = rtp_module.SendingMedia(); + const bool should_be_active = active_modules[i]; + // Sends a kRtcpByeCode when going from true to false. - rtp_streams_[i].rtp_rtcp->SetSendingStatus(active_modules[i]); + rtp_module.SetSendingStatus(active_modules[i]); + + if (was_active && !should_be_active) { + // Disabling media, remove from packet router map to reduce size and + // prevent any stray packets in the pacer from asynchronously arriving + // to a disabled module. + transport_->packet_router()->RemoveSendRtpModule(&rtp_module); + } + // If set to false this module won't send media. - rtp_streams_[i].rtp_rtcp->SetSendingMediaStatus(active_modules[i]); + rtp_module.SetSendingMediaStatus(active_modules[i]); + + if (!was_active && should_be_active) { + // Turning on media, register with packet router. + transport_->packet_router()->AddSendRtpModule(&rtp_module, + /*remb_candidate=*/true); + } } } @@ -532,14 +576,22 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( rtp_streams_[stream_index].rtp_rtcp->ExpectedRetransmissionTimeMs(); } - if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { + if (IsFirstFrameOfACodedVideoSequence(encoded_image, codec_specific_info)) { // If encoder adapter produce FrameDependencyStructure, pass it so that // dependency descriptor rtp header extension can be used. // If not supported, disable using dependency descriptor by passing nullptr. - rtp_streams_[stream_index].sender_video->SetVideoStructure( - (codec_specific_info && codec_specific_info->template_structure) - ? &*codec_specific_info->template_structure - : nullptr); + RTPSenderVideo& sender_video = *rtp_streams_[stream_index].sender_video; + if (codec_specific_info && codec_specific_info->template_structure) { + sender_video.SetVideoStructure(&*codec_specific_info->template_structure); + } else if (simulate_vp9_structure_ && codec_specific_info && + codec_specific_info->codecType == kVideoCodecVP9) { + FrameDependencyStructure structure = + RtpPayloadParams::MinimalisticVp9Structure( + codec_specific_info->codecSpecific.VP9); + sender_video.SetVideoStructure(&structure); + } else { + sender_video.SetVideoStructure(nullptr); + } } bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage( @@ -766,8 +818,9 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, rtp_config_.max_packet_size + transport_overhead_bytes_per_packet_); uint32_t payload_bitrate_bps = update.target_bitrate.bps(); if (send_side_bwe_with_overhead_ && has_packet_feedback_) { - DataRate overhead_rate = CalculateOverheadRate( - update.target_bitrate, max_total_packet_size, packet_overhead); + DataRate overhead_rate = + CalculateOverheadRate(update.target_bitrate, max_total_packet_size, + packet_overhead, Frequency::Hertz(framerate)); // TODO(srte): We probably should not accept 0 payload bitrate here. payload_bitrate_bps = rtc::saturated_cast(payload_bitrate_bps - overhead_rate.bps()); @@ -806,7 +859,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, DataRate encoder_overhead_rate = CalculateOverheadRate( DataRate::BitsPerSec(encoder_target_rate_bps_), max_total_packet_size - DataSize::Bytes(overhead_bytes_per_packet), - packet_overhead); + packet_overhead, Frequency::Hertz(framerate)); encoder_overhead_rate_bps = std::min( encoder_overhead_rate.bps(), update.target_bitrate.bps() - encoder_target_rate_bps_); @@ -927,4 +980,19 @@ void RtpVideoSender::SetEncodingData(size_t width, fec_controller_->SetEncodingData(width, height, num_temporal_layers, rtp_config_.max_packet_size); } + +DataRate RtpVideoSender::CalculateOverheadRate(DataRate data_rate, + DataSize packet_size, + DataSize overhead_per_packet, + Frequency framerate) const { + Frequency packet_rate = data_rate / packet_size; + if (use_frame_rate_for_overhead_) { + framerate = std::max(framerate, Frequency::Hertz(1)); + DataSize frame_size = data_rate / framerate; + int packets_per_frame = ceil(frame_size / packet_size); + packet_rate = packets_per_frame * framerate; + } + return packet_rate.RoundUpTo(Frequency::Hertz(1)) * overhead_per_packet; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h index 49fd3cc0d..611edc6b2 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h @@ -22,6 +22,7 @@ #include "api/fec_controller.h" #include "api/fec_controller_override.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" #include "api/transport/field_trial_based_config.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" @@ -39,7 +40,6 @@ #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -169,10 +169,16 @@ class RtpVideoSender : public RtpVideoSenderInterface, void ConfigureRids(); bool NackEnabled() const; uint32_t GetPacketizationOverheadRate() const; + DataRate CalculateOverheadRate(DataRate data_rate, + DataSize packet_size, + DataSize overhead_per_packet, + Frequency framerate) const; const FieldTrialBasedConfig field_trials_; const bool send_side_bwe_with_overhead_; + const bool use_frame_rate_for_overhead_; const bool has_packet_feedback_; + const bool simulate_vp9_structure_; // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the // transport task queue. @@ -180,7 +186,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, bool active_ RTC_GUARDED_BY(mutex_); ProcessThread* module_process_thread_; - rtc::ThreadChecker module_process_thread_checker_; + SequenceChecker module_process_thread_checker_; std::map suspended_ssrcs_; const std::unique_ptr fec_controller_; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc index 9e4a41bc8..c0b138b41 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtx_receive_stream.cc @@ -64,7 +64,7 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) { media_packet.SetSequenceNumber((payload[0] << 8) + payload[1]); media_packet.SetPayloadType(it->second); media_packet.set_recovered(true); - media_packet.set_arrival_time_ms(rtx_packet.arrival_time_ms()); + media_packet.set_arrival_time(rtx_packet.arrival_time()); // Skip the RTX header. rtc::ArrayView rtx_payload = payload.subview(kRtxHeaderSize); diff --git a/TMessagesProj/jni/voip/webrtc/call/simulated_network.h b/TMessagesProj/jni/voip/webrtc/call/simulated_network.h index b53ecc0dd..68d066cb8 100644 --- a/TMessagesProj/jni/voip/webrtc/call/simulated_network.h +++ b/TMessagesProj/jni/voip/webrtc/call/simulated_network.h @@ -17,6 +17,7 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/test/simulated_network.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" @@ -24,7 +25,6 @@ #include "rtc_base/random.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { // Implementation of the CoDel active queue management algorithm. Loosely based diff --git a/TMessagesProj/jni/voip/webrtc/call/version.cc b/TMessagesProj/jni/voip/webrtc/call/version.cc new file mode 100644 index 000000000..bc64f0796 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/call/version.cc @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/version.h" + +namespace webrtc { + +// The timestamp is always in UTC. +const char* const kSourceTimestamp = "WebRTC source stamp 2021-05-20T04:01:58"; + +void LoadWebRTCVersionInRegister() { + // Using volatile to instruct the compiler to not optimize `p` away even + // if it looks unused. + const char* volatile p = kSourceTimestamp; + static_cast(p); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/version.h b/TMessagesProj/jni/voip/webrtc/call/version.h new file mode 100644 index 000000000..d476e0e10 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/call/version.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_VERSION_H_ +#define CALL_VERSION_H_ + +// LoadWebRTCVersionInRegistry is a helper function that loads the pointer to +// the WebRTC version string into a register. While this function doesn't do +// anything useful, it is needed in order to avoid that compiler optimizations +// remove the WebRTC version string from the final binary. + +namespace webrtc { + +void LoadWebRTCVersionInRegister(); + +} // namespace webrtc + +#endif // CALL_VERSION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h index 7a6803d9e..4a0a0dcae 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h @@ -215,6 +215,10 @@ class VideoReceiveStream { // Set if the stream is protected using FlexFEC. bool protected_by_flexfec = false; + // Optional callback sink to support additional packet handlsers such as + // FlexFec. + RtpPacketSinkInterface* packet_sink_ = nullptr; + // Map from rtx payload type -> media payload type. // For RTX to be enabled, both an SSRC and this mapping are needed. std::map rtx_associated_payload_types; @@ -252,10 +256,6 @@ class VideoReceiveStream { // used for streaming instead of a real-time call. int target_delay_ms = 0; - // TODO(nisse): Used with VideoDecoderFactory::LegacyCreateVideoDecoder. - // Delete when that method is retired. - std::string stream_id; - // An optional custom frame decryptor that allows the entire frame to be // decrypted in whatever way the caller choses. This is not required by // default. @@ -277,13 +277,6 @@ class VideoReceiveStream { // TODO(pbos): Add info on currently-received codec to Stats. virtual Stats GetStats() const = 0; - // RtpDemuxer only forwards a given RTP packet to one sink. However, some - // sinks, such as FlexFEC, might wish to be informed of all of the packets - // a given sink receives (or any set of sinks). They may do so by registering - // themselves as secondary sinks. - virtual void AddSecondarySink(RtpPacketSinkInterface* sink) = 0; - virtual void RemoveSecondarySink(const RtpPacketSinkInterface* sink) = 0; - virtual std::vector GetSources() const = 0; // Sets a base minimum for the playout delay. Base minimum delay sets lower @@ -324,6 +317,16 @@ class VideoReceiveStream { virtual ~VideoReceiveStream() {} }; +class DEPRECATED_VideoReceiveStream : public VideoReceiveStream { + public: + // RtpDemuxer only forwards a given RTP packet to one sink. However, some + // sinks, such as FlexFEC, might wish to be informed of all of the packets + // a given sink receives (or any set of sinks). They may do so by registering + // themselves as secondary sinks. + virtual void AddSecondarySink(RtpPacketSinkInterface* sink) = 0; + virtual void RemoveSecondarySink(const RtpPacketSinkInterface* sink) = 0; +}; + } // namespace webrtc #endif // CALL_VIDEO_RECEIVE_STREAM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc index 244d78089..25513e4e4 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.cc @@ -51,8 +51,13 @@ std::string VideoSendStream::StreamStats::ToString() const { ss << "retransmit_bps: " << retransmit_bitrate_bps << ", "; ss << "avg_delay_ms: " << avg_delay_ms << ", "; ss << "max_delay_ms: " << max_delay_ms << ", "; - ss << "cum_loss: " << rtcp_stats.packets_lost << ", "; - ss << "max_ext_seq: " << rtcp_stats.extended_highest_sequence_number << ", "; + if (report_block_data) { + ss << "cum_loss: " << report_block_data->report_block().packets_lost + << ", "; + ss << "max_ext_seq: " + << report_block_data->report_block().extended_highest_sequence_number + << ", "; + } ss << "nack: " << rtcp_packet_type_counts.nack_packets << ", "; ss << "fir: " << rtcp_packet_type_counts.fir_packets << ", "; ss << "pli: " << rtcp_packet_type_counts.pli_packets; diff --git a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h index 0df9e6ce0..fd7a101b0 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h @@ -82,7 +82,6 @@ class VideoSendStream { uint64_t total_packet_send_delay_ms = 0; StreamDataCounters rtp_stats; RtcpPacketTypeCounter rtcp_packet_type_counts; - RtcpStatistics rtcp_stats; // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. absl::optional report_block_data; @@ -108,6 +107,7 @@ class VideoSendStream { uint64_t total_encode_time_ms = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalencodedbytestarget uint64_t total_encoded_bytes_target = 0; + uint32_t frames = 0; uint32_t frames_dropped_by_capturer = 0; uint32_t frames_dropped_by_encoder_queue = 0; uint32_t frames_dropped_by_rate_limiter = 0; diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/OWNERS b/TMessagesProj/jni/voip/webrtc/common_audio/OWNERS index ba1c8b11f..4cb53169b 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/common_audio/OWNERS @@ -1,3 +1,3 @@ henrik.lundin@webrtc.org -kwiberg@webrtc.org +minyue@webrtc.org peah@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/division_operations.c b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/division_operations.c index c6195e799..4764ddfcc 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/division_operations.c +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/division_operations.c @@ -98,8 +98,7 @@ int32_t WebRtcSpl_DivResultInQ31(int32_t num, int32_t den) return div; } -int32_t RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5486 -WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low) +int32_t WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low) { int16_t approx, tmp_hi, tmp_low, num_hi, num_low; int32_t tmpW32; @@ -111,8 +110,8 @@ WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low) tmpW32 = (den_hi * approx << 1) + ((den_low * approx >> 15) << 1); // tmpW32 = den * approx - tmpW32 = (int32_t)0x7fffffffL - tmpW32; // result in Q30 (tmpW32 = 2.0-(den*approx)) - // UBSan: 2147483647 - -2 cannot be represented in type 'int' + // result in Q30 (tmpW32 = 2.0-(den*approx)) + tmpW32 = (int32_t)((int64_t)0x7fffffffL - tmpW32); // Store tmpW32 in hi and low format tmp_hi = (int16_t)(tmpW32 >> 16); diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/signal_processing_library.h b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/signal_processing_library.h index 4ad92c4c2..0c13071a2 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/signal_processing_library.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/include/signal_processing_library.h @@ -228,6 +228,25 @@ int32_t WebRtcSpl_MinValueW32Neon(const int32_t* vector, size_t length); int32_t WebRtcSpl_MinValueW32_mips(const int32_t* vector, size_t length); #endif +// Returns both the minimum and maximum values of a 16-bit vector. +// +// Input: +// - vector : 16-bit input vector. +// - length : Number of samples in vector. +// Ouput: +// - max_val : Maximum sample value in |vector|. +// - min_val : Minimum sample value in |vector|. +void WebRtcSpl_MinMaxW16(const int16_t* vector, + size_t length, + int16_t* min_val, + int16_t* max_val); +#if defined(WEBRTC_HAS_NEON) +void WebRtcSpl_MinMaxW16Neon(const int16_t* vector, + size_t length, + int16_t* min_val, + int16_t* max_val); +#endif + // Returns the vector index to the largest absolute value of a 16-bit vector. // // Input: @@ -240,6 +259,17 @@ int32_t WebRtcSpl_MinValueW32_mips(const int32_t* vector, size_t length); // -32768 presenting an int16 absolute value of 32767). size_t WebRtcSpl_MaxAbsIndexW16(const int16_t* vector, size_t length); +// Returns the element with the largest absolute value of a 16-bit vector. Note +// that this function can return a negative value. +// +// Input: +// - vector : 16-bit input vector. +// - length : Number of samples in vector. +// +// Return value : The element with the largest absolute value. Note that this +// may be a negative value. +int16_t WebRtcSpl_MaxAbsElementW16(const int16_t* vector, size_t length); + // Returns the vector index to the maximum sample value of a 16-bit vector. // // Input: diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations.c b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations.c index d249a02d4..1b9542e7e 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations.c +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations.c @@ -155,6 +155,15 @@ size_t WebRtcSpl_MaxAbsIndexW16(const int16_t* vector, size_t length) { return index; } +int16_t WebRtcSpl_MaxAbsElementW16(const int16_t* vector, size_t length) { + int16_t min_val, max_val; + WebRtcSpl_MinMaxW16(vector, length, &min_val, &max_val); + if (min_val == max_val || min_val < -max_val) { + return min_val; + } + return max_val; +} + // Index of maximum value in a word16 vector. size_t WebRtcSpl_MaxIndexW16(const int16_t* vector, size_t length) { size_t i = 0, index = 0; @@ -222,3 +231,26 @@ size_t WebRtcSpl_MinIndexW32(const int32_t* vector, size_t length) { return index; } + +// Finds both the minimum and maximum elements in an array of 16-bit integers. +void WebRtcSpl_MinMaxW16(const int16_t* vector, size_t length, + int16_t* min_val, int16_t* max_val) { +#if defined(WEBRTC_HAS_NEON) + return WebRtcSpl_MinMaxW16Neon(vector, length, min_val, max_val); +#else + int16_t minimum = WEBRTC_SPL_WORD16_MAX; + int16_t maximum = WEBRTC_SPL_WORD16_MIN; + size_t i = 0; + + RTC_DCHECK_GT(length, 0); + + for (i = 0; i < length; i++) { + if (vector[i] < minimum) + minimum = vector[i]; + if (vector[i] > maximum) + maximum = vector[i]; + } + *min_val = minimum; + *max_val = maximum; +#endif +} diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations_neon.c b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations_neon.c index 53217df7b..e5b4b7c71 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations_neon.c +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/min_max_operations_neon.c @@ -281,3 +281,53 @@ int32_t WebRtcSpl_MinValueW32Neon(const int32_t* vector, size_t length) { return minimum; } +// Finds both the minimum and maximum elements in an array of 16-bit integers. +void WebRtcSpl_MinMaxW16Neon(const int16_t* vector, size_t length, + int16_t* min_val, int16_t* max_val) { + int16_t minimum = WEBRTC_SPL_WORD16_MAX; + int16_t maximum = WEBRTC_SPL_WORD16_MIN; + size_t i = 0; + size_t residual = length & 0x7; + + RTC_DCHECK_GT(length, 0); + + const int16_t* p_start = vector; + int16x8_t min16x8 = vdupq_n_s16(WEBRTC_SPL_WORD16_MAX); + int16x8_t max16x8 = vdupq_n_s16(WEBRTC_SPL_WORD16_MIN); + + // First part, unroll the loop 8 times. + for (i = 0; i < length - residual; i += 8) { + int16x8_t in16x8 = vld1q_s16(p_start); + min16x8 = vminq_s16(min16x8, in16x8); + max16x8 = vmaxq_s16(max16x8, in16x8); + p_start += 8; + } + +#if defined(WEBRTC_ARCH_ARM64) + minimum = vminvq_s16(min16x8); + maximum = vmaxvq_s16(max16x8); +#else + int16x4_t min16x4 = vmin_s16(vget_low_s16(min16x8), vget_high_s16(min16x8)); + min16x4 = vpmin_s16(min16x4, min16x4); + min16x4 = vpmin_s16(min16x4, min16x4); + + minimum = vget_lane_s16(min16x4, 0); + + int16x4_t max16x4 = vmax_s16(vget_low_s16(max16x8), vget_high_s16(max16x8)); + max16x4 = vpmax_s16(max16x4, max16x4); + max16x4 = vpmax_s16(max16x4, max16x4); + + maximum = vget_lane_s16(max16x4, 0); +#endif + + // Second part, do the remaining iterations (if any). + for (i = residual; i > 0; i--) { + if (*p_start < minimum) + minimum = *p_start; + if (*p_start > maximum) + maximum = *p_start; + p_start++; + } + *min_val = minimum; + *max_val = maximum; +} diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/splitting_filter.c b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/splitting_filter.c index 399433f0f..b0d83f138 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/splitting_filter.c +++ b/TMessagesProj/jni/voip/webrtc/common_audio/signal_processing/splitting_filter.c @@ -44,9 +44,11 @@ static const uint16_t WebRtcSpl_kAllPassFilter2[3] = {21333, 49062, 63010}; // |data_length| // -void WebRtcSpl_AllPassQMF(int32_t* in_data, size_t data_length, - int32_t* out_data, const uint16_t* filter_coefficients, - int32_t* filter_state) +static void WebRtcSpl_AllPassQMF(int32_t* in_data, + size_t data_length, + int32_t* out_data, + const uint16_t* filter_coefficients, + int32_t* filter_state) { // The procedure is to filter the input with three first order all pass filters // (cascade operations). diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_unittest.h b/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_unittest.h deleted file mode 100644 index ee642063a..000000000 --- a/TMessagesProj/jni/voip/webrtc/common_audio/vad/vad_unittest.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef COMMON_AUDIO_VAD_VAD_UNITTEST_H_ -#define COMMON_AUDIO_VAD_VAD_UNITTEST_H_ - -#include // size_t - -#include "test/gtest.h" - -namespace webrtc { -namespace test { - -// Modes we support -const int kModes[] = {0, 1, 2, 3}; -const size_t kModesSize = sizeof(kModes) / sizeof(*kModes); - -// Rates we support. -const int kRates[] = {8000, 12000, 16000, 24000, 32000, 48000}; -const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates); - -// Frame lengths we support. -const size_t kMaxFrameLength = 1440; -const size_t kFrameLengths[] = { - 80, 120, 160, 240, 320, 480, 640, 960, kMaxFrameLength}; -const size_t kFrameLengthsSize = sizeof(kFrameLengths) / sizeof(*kFrameLengths); - -} // namespace test -} // namespace webrtc - -class VadTest : public ::testing::Test { - protected: - VadTest(); - void SetUp() override; - void TearDown() override; - - // Returns true if the rate and frame length combination is valid. - bool ValidRatesAndFrameLengths(int rate, size_t frame_length); -}; - -#endif // COMMON_AUDIO_VAD_VAD_UNITTEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.cc index 5a75f48f8..3b41599fa 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.cc @@ -28,11 +28,13 @@ const int kMaxQpValue = 51; namespace webrtc { -#define RETURN_ON_FAIL(x, res) \ - if (!(x)) { \ - RTC_LOG_F(LS_ERROR) << "FAILED: " #x; \ - return res; \ - } +#define RETURN_ON_FAIL(x, res) \ + do { \ + if (!(x)) { \ + RTC_LOG_F(LS_ERROR) << "FAILED: " #x; \ + return res; \ + } \ + } while (0) #define RETURN_INV_ON_FAIL(x) RETURN_ON_FAIL(x, kInvalidStream) @@ -62,64 +64,63 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( uint32_t bits_tmp; // first_mb_in_slice: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); // slice_type: ue(v) uint32_t slice_type; - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&slice_type)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(slice_type)); // slice_type's 5..9 range is used to indicate that all slices of a picture // have the same value of slice_type % 5, we don't care about that, so we map // to the corresponding 0..4 range. slice_type %= 5; // pic_parameter_set_id: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); if (sps_->separate_colour_plane_flag == 1) { // colour_plane_id - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(2, bits_tmp)); } // frame_num: u(v) // Represented by log2_max_frame_num bits. - RETURN_INV_ON_FAIL( - slice_reader.ReadBits(&bits_tmp, sps_->log2_max_frame_num)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(sps_->log2_max_frame_num, bits_tmp)); uint32_t field_pic_flag = 0; if (sps_->frame_mbs_only_flag == 0) { // field_pic_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&field_pic_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, field_pic_flag)); if (field_pic_flag != 0) { // bottom_field_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } } if (is_idr) { // idr_pic_id: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } // pic_order_cnt_lsb: u(v) // Represented by sps_.log2_max_pic_order_cnt_lsb bits. if (sps_->pic_order_cnt_type == 0) { RETURN_INV_ON_FAIL( - slice_reader.ReadBits(&bits_tmp, sps_->log2_max_pic_order_cnt_lsb)); + slice_reader.ReadBits(sps_->log2_max_pic_order_cnt_lsb, bits_tmp)); if (pps_->bottom_field_pic_order_in_frame_present_flag && field_pic_flag == 0) { // delta_pic_order_cnt_bottom: se(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } if (sps_->pic_order_cnt_type == 1 && !sps_->delta_pic_order_always_zero_flag) { // delta_pic_order_cnt[0]: se(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); if (pps_->bottom_field_pic_order_in_frame_present_flag && !field_pic_flag) { // delta_pic_order_cnt[1]: se(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } if (pps_->redundant_pic_cnt_present_flag) { // redundant_pic_cnt: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } if (slice_type == H264::SliceType::kB) { // direct_spatial_mv_pred_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } switch (slice_type) { case H264::SliceType::kP: @@ -128,13 +129,13 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( uint32_t num_ref_idx_active_override_flag; // num_ref_idx_active_override_flag: u(1) RETURN_INV_ON_FAIL( - slice_reader.ReadBits(&num_ref_idx_active_override_flag, 1)); + slice_reader.ReadBits(1, num_ref_idx_active_override_flag)); if (num_ref_idx_active_override_flag != 0) { // num_ref_idx_l0_active_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); if (slice_type == H264::SliceType::kB) { // num_ref_idx_l1_active_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } break; @@ -158,20 +159,20 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( // ref_pic_list_modification_flag_l0: u(1) uint32_t ref_pic_list_modification_flag_l0; RETURN_INV_ON_FAIL( - slice_reader.ReadBits(&ref_pic_list_modification_flag_l0, 1)); + slice_reader.ReadBits(1, ref_pic_list_modification_flag_l0)); if (ref_pic_list_modification_flag_l0) { uint32_t modification_of_pic_nums_idc; do { // modification_of_pic_nums_idc: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb( - &modification_of_pic_nums_idc)); + RETURN_INV_ON_FAIL( + slice_reader.ReadExponentialGolomb(modification_of_pic_nums_idc)); if (modification_of_pic_nums_idc == 0 || modification_of_pic_nums_idc == 1) { // abs_diff_pic_num_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } else if (modification_of_pic_nums_idc == 2) { // long_term_pic_num: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } while (modification_of_pic_nums_idc != 3); } @@ -180,20 +181,20 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( // ref_pic_list_modification_flag_l1: u(1) uint32_t ref_pic_list_modification_flag_l1; RETURN_INV_ON_FAIL( - slice_reader.ReadBits(&ref_pic_list_modification_flag_l1, 1)); + slice_reader.ReadBits(1, ref_pic_list_modification_flag_l1)); if (ref_pic_list_modification_flag_l1) { uint32_t modification_of_pic_nums_idc; do { // modification_of_pic_nums_idc: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb( - &modification_of_pic_nums_idc)); + RETURN_INV_ON_FAIL( + slice_reader.ReadExponentialGolomb(modification_of_pic_nums_idc)); if (modification_of_pic_nums_idc == 0 || modification_of_pic_nums_idc == 1) { // abs_diff_pic_num_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } else if (modification_of_pic_nums_idc == 2) { // long_term_pic_num: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } while (modification_of_pic_nums_idc != 3); } @@ -215,35 +216,35 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( if (is_idr) { // no_output_of_prior_pics_flag: u(1) // long_term_reference_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(2, bits_tmp)); } else { // adaptive_ref_pic_marking_mode_flag: u(1) uint32_t adaptive_ref_pic_marking_mode_flag; RETURN_INV_ON_FAIL( - slice_reader.ReadBits(&adaptive_ref_pic_marking_mode_flag, 1)); + slice_reader.ReadBits(1, adaptive_ref_pic_marking_mode_flag)); if (adaptive_ref_pic_marking_mode_flag) { uint32_t memory_management_control_operation; do { // memory_management_control_operation: ue(v) RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb( - &memory_management_control_operation)); + memory_management_control_operation)); if (memory_management_control_operation == 1 || memory_management_control_operation == 3) { // difference_of_pic_nums_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } if (memory_management_control_operation == 2) { // long_term_pic_num: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } if (memory_management_control_operation == 3 || memory_management_control_operation == 6) { // long_term_frame_idx: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } if (memory_management_control_operation == 4) { // max_long_term_frame_idx_plus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } while (memory_management_control_operation != 0); } @@ -252,12 +253,12 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( if (pps_->entropy_coding_mode_flag && slice_type != H264::SliceType::kI && slice_type != H264::SliceType::kSi) { // cabac_init_idc: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } int32_t last_slice_qp_delta; RETURN_INV_ON_FAIL( - slice_reader.ReadSignedExponentialGolomb(&last_slice_qp_delta)); + slice_reader.ReadSignedExponentialGolomb(last_slice_qp_delta)); if (abs(last_slice_qp_delta) > kMaxAbsQpDeltaValue) { // Something has gone wrong, and the parsed value is invalid. RTC_LOG(LS_WARNING) << "Parsed QP value out of range."; @@ -275,14 +276,14 @@ void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { sps_ = SpsParser::ParseSps(slice + H264::kNaluTypeSize, length - H264::kNaluTypeSize); if (!sps_) - RTC_LOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream."; + RTC_DLOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream."; break; } case H264::NaluType::kPps: { pps_ = PpsParser::ParsePps(slice + H264::kNaluTypeSize, length - H264::kNaluTypeSize); if (!pps_) - RTC_LOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream."; + RTC_DLOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream."; break; } case H264::NaluType::kAud: @@ -291,40 +292,29 @@ void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { default: Result res = ParseNonParameterSetNalu(slice, length, nalu_type); if (res != kOk) - RTC_LOG(LS_INFO) << "Failed to parse bitstream. Error: " << res; + RTC_DLOG(LS_INFO) << "Failed to parse bitstream. Error: " << res; break; } } -void H264BitstreamParser::ParseBitstream(const uint8_t* bitstream, - size_t length) { - std::vector nalu_indices = - H264::FindNaluIndices(bitstream, length); - for (const H264::NaluIndex& index : nalu_indices) - ParseSlice(&bitstream[index.payload_start_offset], index.payload_size); -} - -bool H264BitstreamParser::GetLastSliceQp(int* qp) const { - if (!last_slice_qp_delta_ || !pps_) - return false; - const int parsed_qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_; - if (parsed_qp < kMinQpValue || parsed_qp > kMaxQpValue) { - RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream."; - return false; - } - *qp = parsed_qp; - return true; -} - void H264BitstreamParser::ParseBitstream( rtc::ArrayView bitstream) { - ParseBitstream(bitstream.data(), bitstream.size()); + std::vector nalu_indices = + H264::FindNaluIndices(bitstream.data(), bitstream.size()); + for (const H264::NaluIndex& index : nalu_indices) + ParseSlice(bitstream.data() + index.payload_start_offset, + index.payload_size); } absl::optional H264BitstreamParser::GetLastSliceQp() const { - int qp; - bool success = GetLastSliceQp(&qp); - return success ? absl::optional(qp) : absl::nullopt; + if (!last_slice_qp_delta_ || !pps_) + return absl::nullopt; + const int qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_; + if (qp < kMinQpValue || qp > kMaxQpValue) { + RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream."; + return absl::nullopt; + } + return qp; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.h b/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.h index 48190665f..05427825a 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/h264_bitstream_parser.h @@ -31,11 +31,6 @@ class H264BitstreamParser : public BitstreamParser { H264BitstreamParser(); ~H264BitstreamParser() override; - // These are here for backwards-compatability for the time being. - void ParseBitstream(const uint8_t* bitstream, size_t length); - bool GetLastSliceQp(int* qp) const; - - // New interface. void ParseBitstream(rtc::ArrayView bitstream) override; absl::optional GetLastSliceQp() const override; diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/pps_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h264/pps_parser.cc index ae0165218..3d3725f95 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/pps_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/pps_parser.cc @@ -18,9 +18,11 @@ #include "rtc_base/checks.h" #define RETURN_EMPTY_ON_FAIL(x) \ - if (!(x)) { \ - return absl::nullopt; \ - } + do { \ + if (!(x)) { \ + return absl::nullopt; \ + } \ + } while (0) namespace { const int kMaxPicInitQpDeltaValue = 25; @@ -64,14 +66,14 @@ absl::optional PpsParser::ParsePpsIdFromSlice(const uint8_t* data, uint32_t golomb_tmp; // first_mb_in_slice: ue(v) - if (!slice_reader.ReadExponentialGolomb(&golomb_tmp)) + if (!slice_reader.ReadExponentialGolomb(golomb_tmp)) return absl::nullopt; // slice_type: ue(v) - if (!slice_reader.ReadExponentialGolomb(&golomb_tmp)) + if (!slice_reader.ReadExponentialGolomb(golomb_tmp)) return absl::nullopt; // pic_parameter_set_id: ue(v) uint32_t slice_pps_id; - if (!slice_reader.ReadExponentialGolomb(&slice_pps_id)) + if (!slice_reader.ReadExponentialGolomb(slice_pps_id)) return absl::nullopt; return slice_pps_id; } @@ -86,30 +88,29 @@ absl::optional PpsParser::ParseInternal( uint32_t golomb_ignored; // entropy_coding_mode_flag: u(1) uint32_t entropy_coding_mode_flag; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&entropy_coding_mode_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, entropy_coding_mode_flag)); pps.entropy_coding_mode_flag = entropy_coding_mode_flag != 0; // bottom_field_pic_order_in_frame_present_flag: u(1) uint32_t bottom_field_pic_order_in_frame_present_flag; RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadBits(&bottom_field_pic_order_in_frame_present_flag, 1)); + bit_buffer->ReadBits(1, bottom_field_pic_order_in_frame_present_flag)); pps.bottom_field_pic_order_in_frame_present_flag = bottom_field_pic_order_in_frame_present_flag != 0; // num_slice_groups_minus1: ue(v) uint32_t num_slice_groups_minus1; RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadExponentialGolomb(&num_slice_groups_minus1)); + bit_buffer->ReadExponentialGolomb(num_slice_groups_minus1)); if (num_slice_groups_minus1 > 0) { uint32_t slice_group_map_type; // slice_group_map_type: ue(v) RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadExponentialGolomb(&slice_group_map_type)); + bit_buffer->ReadExponentialGolomb(slice_group_map_type)); if (slice_group_map_type == 0) { for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1; ++i_group) { // run_length_minus1[iGroup]: ue(v) - RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); } } else if (slice_group_map_type == 1) { // TODO(sprang): Implement support for dispersed slice group map type. @@ -118,23 +119,21 @@ absl::optional PpsParser::ParseInternal( for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1; ++i_group) { // top_left[iGroup]: ue(v) - RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); // bottom_right[iGroup]: ue(v) - RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); } } else if (slice_group_map_type == 3 || slice_group_map_type == 4 || slice_group_map_type == 5) { // slice_group_change_direction_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // slice_group_change_rate_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); } else if (slice_group_map_type == 6) { // pic_size_in_map_units_minus1: ue(v) uint32_t pic_size_in_map_units_minus1; RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadExponentialGolomb(&pic_size_in_map_units_minus1)); + bit_buffer->ReadExponentialGolomb(pic_size_in_map_units_minus1)); uint32_t slice_group_id_bits = 0; uint32_t num_slice_groups = num_slice_groups_minus1 + 1; // If num_slice_groups is not a power of two an additional bit is required @@ -149,39 +148,39 @@ absl::optional PpsParser::ParseInternal( // slice_group_id[i]: u(v) // Represented by ceil(log2(num_slice_groups_minus1 + 1)) bits. RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadBits(&bits_tmp, slice_group_id_bits)); + bit_buffer->ReadBits(slice_group_id_bits, bits_tmp)); } } } // num_ref_idx_l0_default_active_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); // num_ref_idx_l1_default_active_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); // weighted_pred_flag: u(1) uint32_t weighted_pred_flag; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&weighted_pred_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, weighted_pred_flag)); pps.weighted_pred_flag = weighted_pred_flag != 0; // weighted_bipred_idc: u(2) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_bipred_idc, 2)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(2, pps.weighted_bipred_idc)); // pic_init_qp_minus26: se(v) RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadSignedExponentialGolomb(&pps.pic_init_qp_minus26)); + bit_buffer->ReadSignedExponentialGolomb(pps.pic_init_qp_minus26)); // Sanity-check parsed value if (pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue || pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) { RETURN_EMPTY_ON_FAIL(false); } // pic_init_qs_minus26: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); // chroma_qp_index_offset: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); // deblocking_filter_control_present_flag: u(1) // constrained_intra_pred_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 2)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(2, bits_tmp)); // redundant_pic_cnt_present_flag: u(1) RETURN_EMPTY_ON_FAIL( - bit_buffer->ReadBits(&pps.redundant_pic_cnt_present_flag, 1)); + bit_buffer->ReadBits(1, pps.redundant_pic_cnt_present_flag)); return pps; } @@ -189,11 +188,15 @@ absl::optional PpsParser::ParseInternal( bool PpsParser::ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer, uint32_t* pps_id, uint32_t* sps_id) { + if (pps_id == nullptr) + return false; // pic_parameter_set_id: ue(v) - if (!bit_buffer->ReadExponentialGolomb(pps_id)) + if (!bit_buffer->ReadExponentialGolomb(*pps_id)) + return false; + if (sps_id == nullptr) return false; // seq_parameter_set_id: ue(v) - if (!bit_buffer->ReadExponentialGolomb(sps_id)) + if (!bit_buffer->ReadExponentialGolomb(*sps_id)) return false; return true; } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/profile_level_id.h b/TMessagesProj/jni/voip/webrtc/common_video/h264/profile_level_id.h deleted file mode 100644 index 07b49e57c..000000000 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/profile_level_id.h +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef COMMON_VIDEO_H264_PROFILE_LEVEL_ID_H_ -#define COMMON_VIDEO_H264_PROFILE_LEVEL_ID_H_ - -#include "media/base/h264_profile_level_id.h" - -// TODO(zhihuang): Delete this file once dependent applications switch to -// including "webrtc/media/base/h264_profile_level_id.h" directly. - -#endif // COMMON_VIDEO_H264_PROFILE_LEVEL_ID_H_ diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_parser.cc index 3d78184e7..f505928f2 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_parser.cc @@ -71,14 +71,14 @@ absl::optional SpsParser::ParseSpsUpToVui( // profile_idc: u(8). We need it to determine if we need to read/skip chroma // formats. uint8_t profile_idc; - RETURN_EMPTY_ON_FAIL(buffer->ReadUInt8(&profile_idc)); + RETURN_EMPTY_ON_FAIL(buffer->ReadUInt8(profile_idc)); // constraint_set0_flag through constraint_set5_flag + reserved_zero_2bits // 1 bit each for the flags + 2 bits = 8 bits = 1 byte. RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1)); // level_idc: u(8) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBytes(1)); // seq_parameter_set_id: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.id)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.id)); sps.separate_colour_plane_flag = 0; // See if profile_idc has chroma format information. if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || @@ -86,21 +86,20 @@ absl::optional SpsParser::ParseSpsUpToVui( profile_idc == 86 || profile_idc == 118 || profile_idc == 128 || profile_idc == 138 || profile_idc == 139 || profile_idc == 134) { // chroma_format_idc: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&chroma_format_idc)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(chroma_format_idc)); if (chroma_format_idc == 3) { // separate_colour_plane_flag: u(1) - RETURN_EMPTY_ON_FAIL( - buffer->ReadBits(&sps.separate_colour_plane_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.separate_colour_plane_flag)); } // bit_depth_luma_minus8: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // bit_depth_chroma_minus8: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // qpprime_y_zero_transform_bypass_flag: u(1) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); // seq_scaling_matrix_present_flag: u(1) uint32_t seq_scaling_matrix_present_flag; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&seq_scaling_matrix_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, seq_scaling_matrix_present_flag)); if (seq_scaling_matrix_present_flag) { // Process the scaling lists just enough to be able to properly // skip over them, so we can still read the resolution on streams @@ -110,7 +109,7 @@ absl::optional SpsParser::ParseSpsUpToVui( // seq_scaling_list_present_flag[i] : u(1) uint32_t seq_scaling_list_present_flags; RETURN_EMPTY_ON_FAIL( - buffer->ReadBits(&seq_scaling_list_present_flags, 1)); + buffer->ReadBits(1, seq_scaling_list_present_flags)); if (seq_scaling_list_present_flags != 0) { int last_scale = 8; int next_scale = 8; @@ -120,7 +119,7 @@ absl::optional SpsParser::ParseSpsUpToVui( int32_t delta_scale; // delta_scale: se(v) RETURN_EMPTY_ON_FAIL( - buffer->ReadSignedExponentialGolomb(&delta_scale)); + buffer->ReadSignedExponentialGolomb(delta_scale)); RETURN_EMPTY_ON_FAIL(delta_scale >= kScalingDeltaMin && delta_scale <= kScaldingDeltaMax); next_scale = (last_scale + delta_scale + 256) % 256; @@ -140,18 +139,18 @@ absl::optional SpsParser::ParseSpsUpToVui( // log2_max_frame_num_minus4: ue(v) uint32_t log2_max_frame_num_minus4; - if (!buffer->ReadExponentialGolomb(&log2_max_frame_num_minus4) || + if (!buffer->ReadExponentialGolomb(log2_max_frame_num_minus4) || log2_max_frame_num_minus4 > kMaxLog2Minus4) { return OptionalSps(); } sps.log2_max_frame_num = log2_max_frame_num_minus4 + 4; // pic_order_cnt_type: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.pic_order_cnt_type)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.pic_order_cnt_type)); if (sps.pic_order_cnt_type == 0) { // log2_max_pic_order_cnt_lsb_minus4: ue(v) uint32_t log2_max_pic_order_cnt_lsb_minus4; - if (!buffer->ReadExponentialGolomb(&log2_max_pic_order_cnt_lsb_minus4) || + if (!buffer->ReadExponentialGolomb(log2_max_pic_order_cnt_lsb_minus4) || log2_max_pic_order_cnt_lsb_minus4 > kMaxLog2Minus4) { return OptionalSps(); } @@ -159,22 +158,22 @@ absl::optional SpsParser::ParseSpsUpToVui( } else if (sps.pic_order_cnt_type == 1) { // delta_pic_order_always_zero_flag: u(1) RETURN_EMPTY_ON_FAIL( - buffer->ReadBits(&sps.delta_pic_order_always_zero_flag, 1)); + buffer->ReadBits(1, sps.delta_pic_order_always_zero_flag)); // offset_for_non_ref_pic: se(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // offset_for_top_to_bottom_field: se(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // num_ref_frames_in_pic_order_cnt_cycle: ue(v) uint32_t num_ref_frames_in_pic_order_cnt_cycle; RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&num_ref_frames_in_pic_order_cnt_cycle)); + buffer->ReadExponentialGolomb(num_ref_frames_in_pic_order_cnt_cycle)); for (size_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) { // offset_for_ref_frame[i]: se(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); } } // max_num_ref_frames: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.max_num_ref_frames)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.max_num_ref_frames)); // gaps_in_frame_num_value_allowed_flag: u(1) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); // @@ -185,13 +184,13 @@ absl::optional SpsParser::ParseSpsUpToVui( // // pic_width_in_mbs_minus1: ue(v) uint32_t pic_width_in_mbs_minus1; - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&pic_width_in_mbs_minus1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(pic_width_in_mbs_minus1)); // pic_height_in_map_units_minus1: ue(v) uint32_t pic_height_in_map_units_minus1; RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&pic_height_in_map_units_minus1)); + buffer->ReadExponentialGolomb(pic_height_in_map_units_minus1)); // frame_mbs_only_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.frame_mbs_only_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.frame_mbs_only_flag)); if (!sps.frame_mbs_only_flag) { // mb_adaptive_frame_field_flag: u(1) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); @@ -207,19 +206,18 @@ absl::optional SpsParser::ParseSpsUpToVui( uint32_t frame_crop_right_offset = 0; uint32_t frame_crop_top_offset = 0; uint32_t frame_crop_bottom_offset = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&frame_cropping_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, frame_cropping_flag)); if (frame_cropping_flag) { // frame_crop_{left, right, top, bottom}_offset: ue(v) + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(frame_crop_left_offset)); RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&frame_crop_left_offset)); + buffer->ReadExponentialGolomb(frame_crop_right_offset)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(frame_crop_top_offset)); RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&frame_crop_right_offset)); - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&frame_crop_top_offset)); - RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&frame_crop_bottom_offset)); + buffer->ReadExponentialGolomb(frame_crop_bottom_offset)); } // vui_parameters_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.vui_params_present, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.vui_params_present)); // Far enough! We don't use the rest of the SPS. diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc index 0d16be825..856b012b3 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h264/sps_vui_rewriter.cc @@ -45,29 +45,31 @@ enum SpsValidEvent { kSpsRewrittenMax = 8 }; -#define RETURN_FALSE_ON_FAIL(x) \ - if (!(x)) { \ - RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \ - return false; \ - } +#define RETURN_FALSE_ON_FAIL(x) \ + do { \ + if (!(x)) { \ + RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \ + return false; \ + } \ + } while (0) #define COPY_UINT8(src, dest, tmp) \ do { \ - RETURN_FALSE_ON_FAIL((src)->ReadUInt8(&tmp)); \ + RETURN_FALSE_ON_FAIL((src)->ReadUInt8(tmp)); \ if (dest) \ RETURN_FALSE_ON_FAIL((dest)->WriteUInt8(tmp)); \ } while (0) #define COPY_EXP_GOLOMB(src, dest, tmp) \ do { \ - RETURN_FALSE_ON_FAIL((src)->ReadExponentialGolomb(&tmp)); \ + RETURN_FALSE_ON_FAIL((src)->ReadExponentialGolomb(tmp)); \ if (dest) \ RETURN_FALSE_ON_FAIL((dest)->WriteExponentialGolomb(tmp)); \ } while (0) #define COPY_BITS(src, dest, tmp, bits) \ do { \ - RETURN_FALSE_ON_FAIL((src)->ReadBits(&tmp, bits)); \ + RETURN_FALSE_ON_FAIL((src)->ReadBits(bits, tmp)); \ if (dest) \ RETURN_FALSE_ON_FAIL((dest)->WriteBits(tmp, bits)); \ } while (0) @@ -369,7 +371,7 @@ bool CopyAndRewriteVui(const SpsParser::SpsState& sps, // bitstream_restriction_flag: u(1) uint32_t bitstream_restriction_flag; - RETURN_FALSE_ON_FAIL(source->ReadBits(&bitstream_restriction_flag, 1)); + RETURN_FALSE_ON_FAIL(source->ReadBits(1, bitstream_restriction_flag)); RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1)); if (bitstream_restriction_flag == 0) { // We're adding one from scratch. @@ -396,9 +398,9 @@ bool CopyAndRewriteVui(const SpsParser::SpsState& sps, // want, then we don't need to be rewriting. uint32_t max_num_reorder_frames, max_dec_frame_buffering; RETURN_FALSE_ON_FAIL( - source->ReadExponentialGolomb(&max_num_reorder_frames)); + source->ReadExponentialGolomb(max_num_reorder_frames)); RETURN_FALSE_ON_FAIL( - source->ReadExponentialGolomb(&max_dec_frame_buffering)); + source->ReadExponentialGolomb(max_dec_frame_buffering)); RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(0)); RETURN_FALSE_ON_FAIL( destination->WriteExponentialGolomb(sps.max_num_ref_frames)); @@ -511,15 +513,15 @@ bool CopyOrRewriteVideoSignalTypeInfo( uint8_t colour_primaries = 3; // H264 default: unspecified uint8_t transfer_characteristics = 3; // H264 default: unspecified uint8_t matrix_coefficients = 3; // H264 default: unspecified - RETURN_FALSE_ON_FAIL(source->ReadBits(&video_signal_type_present_flag, 1)); + RETURN_FALSE_ON_FAIL(source->ReadBits(1, video_signal_type_present_flag)); if (video_signal_type_present_flag) { - RETURN_FALSE_ON_FAIL(source->ReadBits(&video_format, 3)); - RETURN_FALSE_ON_FAIL(source->ReadBits(&video_full_range_flag, 1)); - RETURN_FALSE_ON_FAIL(source->ReadBits(&colour_description_present_flag, 1)); + RETURN_FALSE_ON_FAIL(source->ReadBits(3, video_format)); + RETURN_FALSE_ON_FAIL(source->ReadBits(1, video_full_range_flag)); + RETURN_FALSE_ON_FAIL(source->ReadBits(1, colour_description_present_flag)); if (colour_description_present_flag) { - RETURN_FALSE_ON_FAIL(source->ReadUInt8(&colour_primaries)); - RETURN_FALSE_ON_FAIL(source->ReadUInt8(&transfer_characteristics)); - RETURN_FALSE_ON_FAIL(source->ReadUInt8(&matrix_coefficients)); + RETURN_FALSE_ON_FAIL(source->ReadUInt8(colour_primaries)); + RETURN_FALSE_ON_FAIL(source->ReadUInt8(transfer_characteristics)); + RETURN_FALSE_ON_FAIL(source->ReadUInt8(matrix_coefficients)); } } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc index 4c766e67f..a23d01571 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_bitstream_parser.cc @@ -63,19 +63,19 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( // first_slice_segment_in_pic_flag: u(1) uint32_t first_slice_segment_in_pic_flag = 0; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&first_slice_segment_in_pic_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, first_slice_segment_in_pic_flag)); if (H265::NaluType::kBlaWLp <= nalu_type && nalu_type <= H265::NaluType::kRsvIrapVcl23) { // no_output_of_prior_pics_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } // slice_pic_parameter_set_id: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); uint32_t dependent_slice_segment_flag = 0; if (first_slice_segment_in_pic_flag == 0) { if (pps_->dependent_slice_segments_enabled_flag) { // dependent_slice_segment_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&dependent_slice_segment_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, dependent_slice_segment_flag)); } // slice_segment_address: u(v) @@ -90,24 +90,24 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( pic_height_in_ctbs_y++; uint32_t slice_segment_address_bits = H265::Log2(pic_height_in_ctbs_y * pic_width_in_ctbs_y); - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, slice_segment_address_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(slice_segment_address_bits, bits_tmp)); } if (dependent_slice_segment_flag == 0) { for (uint32_t i = 0; i < pps_->num_extra_slice_header_bits; i++) { // slice_reserved_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } // slice_type: ue(v) uint32_t slice_type = 0; - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&slice_type)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(slice_type)); if (pps_->output_flag_present_flag) { // pic_output_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } if (sps_->separate_colour_plane_flag) { // colour_plane_id: u(2) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 2)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(2, bits_tmp)); } uint32_t num_long_term_sps = 0; uint32_t num_long_term_pics = 0; @@ -120,9 +120,9 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( if (nalu_type != H265::NaluType::kIdrWRadl && nalu_type != H265::NaluType::kIdrNLp) { // slice_pic_order_cnt_lsb: u(v) uint32_t slice_pic_order_cnt_lsb_bits = sps_->log2_max_pic_order_cnt_lsb_minus4 + 4; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, slice_pic_order_cnt_lsb_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(slice_pic_order_cnt_lsb_bits, bits_tmp)); // short_term_ref_pic_set_sps_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&short_term_ref_pic_set_sps_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, short_term_ref_pic_set_sps_flag)); if (!short_term_ref_pic_set_sps_flag) { absl::optional ref_pic_set = H265SpsParser::ParseShortTermRefPicSet(sps_->num_short_term_ref_pic_sets, @@ -139,16 +139,16 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( short_term_ref_pic_set_idx_bits++; } if (short_term_ref_pic_set_idx_bits > 0) { - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&short_term_ref_pic_set_idx, short_term_ref_pic_set_idx_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(short_term_ref_pic_set_idx_bits, short_term_ref_pic_set_idx)); } } if (sps_->long_term_ref_pics_present_flag) { if (sps_->num_long_term_ref_pics_sps > 0) { // num_long_term_sps: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_long_term_sps)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(num_long_term_sps)); } // num_long_term_sps: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_long_term_pics)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(num_long_term_pics)); lt_idx_sps.resize(num_long_term_sps + num_long_term_pics, 0); used_by_curr_pic_lt_flag.resize(num_long_term_sps + num_long_term_pics, 0); for (uint32_t i = 0; i < num_long_term_sps + num_long_term_pics; i++) { @@ -156,52 +156,52 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( if (sps_->num_long_term_ref_pics_sps > 1) { // lt_idx_sps: u(v) uint32_t lt_idx_sps_bits = H265::Log2(sps_->num_long_term_ref_pics_sps); - RETURN_INV_ON_FAIL(slice_reader.ReadBits(<_idx_sps[i], lt_idx_sps_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(lt_idx_sps_bits, lt_idx_sps[i])); } } else { // poc_lsb_lt: u(v) uint32_t poc_lsb_lt_bits = sps_->log2_max_pic_order_cnt_lsb_minus4 + 4; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, poc_lsb_lt_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(poc_lsb_lt_bits, bits_tmp)); // used_by_curr_pic_lt_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&used_by_curr_pic_lt_flag[i], 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, used_by_curr_pic_lt_flag[i])); } // delta_poc_msb_present_flag: u(1) uint32_t delta_poc_msb_present_flag = 0; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&delta_poc_msb_present_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, delta_poc_msb_present_flag)); if (delta_poc_msb_present_flag) { // delta_poc_msb_cycle_lt: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } } if (sps_->sps_temporal_mvp_enabled_flag) { // slice_temporal_mvp_enabled_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&slice_temporal_mvp_enabled_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, slice_temporal_mvp_enabled_flag)); } } if (sps_->sample_adaptive_offset_enabled_flag) { // slice_sao_luma_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); uint32_t chroma_array_type = sps_->separate_colour_plane_flag == 0 ? sps_->chroma_format_idc : 0; if (chroma_array_type != 0) { // slice_sao_chroma_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } } if (slice_type == H265::SliceType::kP || slice_type == H265::SliceType::kB) { // num_ref_idx_active_override_flag: u(1) uint32_t num_ref_idx_active_override_flag = 0; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&num_ref_idx_active_override_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, num_ref_idx_active_override_flag)); uint32_t num_ref_idx_l0_active_minus1 = pps_->num_ref_idx_l0_default_active_minus1; uint32_t num_ref_idx_l1_active_minus1 = pps_->num_ref_idx_l1_default_active_minus1; if (num_ref_idx_active_override_flag) { // num_ref_idx_l0_active_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_ref_idx_l0_active_minus1)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(num_ref_idx_l0_active_minus1)); if (slice_type == H265::SliceType::kB) { // num_ref_idx_l1_active_minus1: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&num_ref_idx_l1_active_minus1)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(num_ref_idx_l1_active_minus1)); } } uint32_t num_pic_total_curr = CalcNumPocTotalCurr( @@ -216,43 +216,43 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( } // ref_pic_list_modification_flag_l0: u(1) uint32_t ref_pic_list_modification_flag_l0 = 0; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&ref_pic_list_modification_flag_l0, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, ref_pic_list_modification_flag_l0)); if (ref_pic_list_modification_flag_l0) { for (uint32_t i = 0; i < num_ref_idx_l0_active_minus1; i++) { // list_entry_l0: u(v) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, list_entry_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(list_entry_bits, bits_tmp)); } } if (slice_type == H265::SliceType::kB) { // ref_pic_list_modification_flag_l1: u(1) uint32_t ref_pic_list_modification_flag_l1 = 0; - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&ref_pic_list_modification_flag_l1, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, ref_pic_list_modification_flag_l1)); if (ref_pic_list_modification_flag_l1) { for (uint32_t i = 0; i < num_ref_idx_l1_active_minus1; i++) { // list_entry_l1: u(v) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, list_entry_bits)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(list_entry_bits, bits_tmp)); } } } } if (slice_type == H265::SliceType::kB) { // mvd_l1_zero_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } if (pps_->cabac_init_present_flag) { // cabac_init_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&bits_tmp, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, bits_tmp)); } if (slice_temporal_mvp_enabled_flag) { uint32_t collocated_from_l0_flag = 0; if (slice_type == H265::SliceType::kB) { // collocated_from_l0_flag: u(1) - RETURN_INV_ON_FAIL(slice_reader.ReadBits(&collocated_from_l0_flag, 1)); + RETURN_INV_ON_FAIL(slice_reader.ReadBits(1, collocated_from_l0_flag)); } if ((collocated_from_l0_flag && num_ref_idx_l0_active_minus1 > 0) || (!collocated_from_l0_flag && num_ref_idx_l1_active_minus1 > 0)) { // collocated_ref_idx: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); } } if ((pps_->weighted_pred_flag && slice_type == H265::SliceType::kP) @@ -263,7 +263,7 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( return kUnsupportedStream; } // five_minus_max_num_merge_cand: ue(v) - RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(&golomb_tmp)); + RETURN_INV_ON_FAIL(slice_reader.ReadExponentialGolomb(golomb_tmp)); // TODO(piasy): motion_vector_resolution_control_idc? } } @@ -271,7 +271,7 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( // slice_qp_delta: se(v) int32_t last_slice_qp_delta; RETURN_INV_ON_FAIL( - slice_reader.ReadSignedExponentialGolomb(&last_slice_qp_delta)); + slice_reader.ReadSignedExponentialGolomb(last_slice_qp_delta)); if (abs(last_slice_qp_delta) > kMaxAbsQpDeltaValue) { // Something has gone wrong, and the parsed value is invalid. RTC_LOG(LS_WARNING) << "Parsed QP value out of range."; diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_pps_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_pps_parser.cc index 165cdb653..ec44211a2 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_pps_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_pps_parser.cc @@ -68,7 +68,7 @@ absl::optional H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( // first_slice_segment_in_pic_flag: u(1) uint32_t first_slice_segment_in_pic_flag = 0; RETURN_EMPTY_ON_FAIL( - slice_reader.ReadBits(&first_slice_segment_in_pic_flag, 1)); + slice_reader.ReadBits(1, first_slice_segment_in_pic_flag)); if (nalu_type >= H265::NaluType::kBlaWLp && nalu_type <= H265::NaluType::kRsvIrapVcl23) { @@ -78,7 +78,7 @@ absl::optional H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( // slice_pic_parameter_set_id: ue(v) uint32_t slice_pic_parameter_set_id = 0; - if (!slice_reader.ReadExponentialGolomb(&slice_pic_parameter_set_id)) + if (!slice_reader.ReadExponentialGolomb(slice_pic_parameter_set_id)) return absl::nullopt; return slice_pic_parameter_set_id; @@ -94,98 +94,98 @@ absl::optional H265PpsParser::ParseInternal( uint32_t golomb_ignored; int32_t signed_golomb_ignored; // dependent_slice_segments_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.dependent_slice_segments_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps.dependent_slice_segments_enabled_flag)); // output_flag_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.output_flag_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps.output_flag_present_flag)); // num_extra_slice_header_bits: u(3) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.num_extra_slice_header_bits, 3)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(3, pps.num_extra_slice_header_bits)); // sign_data_hiding_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // cabac_init_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.cabac_init_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps.cabac_init_present_flag)); // num_ref_idx_l0_default_active_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&pps.num_ref_idx_l0_default_active_minus1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(pps.num_ref_idx_l0_default_active_minus1)); // num_ref_idx_l1_default_active_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&pps.num_ref_idx_l1_default_active_minus1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(pps.num_ref_idx_l1_default_active_minus1)); // init_qp_minus26: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&pps.pic_init_qp_minus26)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(pps.pic_init_qp_minus26)); // Sanity-check parsed value if (pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue || pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) { RETURN_EMPTY_ON_FAIL(false); } // constrained_intra_pred_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // transform_skip_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // cu_qp_delta_enabled_flag: u(1) uint32_t cu_qp_delta_enabled_flag = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&cu_qp_delta_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, cu_qp_delta_enabled_flag)); if (cu_qp_delta_enabled_flag) { // diff_cu_qp_delta_depth: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); } // pps_cb_qp_offset: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(signed_golomb_ignored)); // pps_cr_qp_offset: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(signed_golomb_ignored)); // pps_slice_chroma_qp_offsets_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // weighted_pred_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_pred_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps.weighted_pred_flag)); // weighted_bipred_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.weighted_bipred_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps.weighted_bipred_flag)); // transquant_bypass_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // tiles_enabled_flag: u(1) uint32_t tiles_enabled_flag = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&tiles_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, tiles_enabled_flag)); // entropy_coding_sync_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); if (tiles_enabled_flag) { // num_tile_columns_minus1: ue(v) uint32_t num_tile_columns_minus1 = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&num_tile_columns_minus1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(num_tile_columns_minus1)); // num_tile_rows_minus1: ue(v) uint32_t num_tile_rows_minus1 = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&num_tile_rows_minus1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(num_tile_rows_minus1)); // uniform_spacing_flag: u(1) uint32_t uniform_spacing_flag = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&uniform_spacing_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, uniform_spacing_flag)); if (!uniform_spacing_flag) { for (uint32_t i = 0; i < num_tile_columns_minus1; i++) { // column_width_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); } for (uint32_t i = 0; i < num_tile_rows_minus1; i++) { // row_height_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); } // loop_filter_across_tiles_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); } } // pps_loop_filter_across_slices_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // deblocking_filter_control_present_flag: u(1) uint32_t deblocking_filter_control_present_flag = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&deblocking_filter_control_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, deblocking_filter_control_present_flag)); if (deblocking_filter_control_present_flag) { // deblocking_filter_override_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); // pps_deblocking_filter_disabled_flag: u(1) uint32_t pps_deblocking_filter_disabled_flag = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps_deblocking_filter_disabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps_deblocking_filter_disabled_flag)); if (!pps_deblocking_filter_disabled_flag) { // pps_beta_offset_div2: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(signed_golomb_ignored)); // pps_tc_offset_div2: se(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(&signed_golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadSignedExponentialGolomb(signed_golomb_ignored)); } } // pps_scaling_list_data_present_flag: u(1) uint32_t pps_scaling_list_data_present_flag = 0; - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps_scaling_list_data_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps_scaling_list_data_present_flag)); if (pps_scaling_list_data_present_flag) { // scaling_list_data() if (!H265SpsParser::ParseScalingListData(bit_buffer)) { @@ -193,11 +193,11 @@ absl::optional H265PpsParser::ParseInternal( } } // lists_modification_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&pps.lists_modification_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, pps.lists_modification_present_flag)); // log2_parallel_merge_level_minus2: ue(v) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadExponentialGolomb(golomb_ignored)); // slice_segment_header_extension_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(&bits_tmp, 1)); + RETURN_EMPTY_ON_FAIL(bit_buffer->ReadBits(1, bits_tmp)); return pps; } @@ -206,10 +206,10 @@ bool H265PpsParser::ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer, uint32_t* pps_id, uint32_t* sps_id) { // pic_parameter_set_id: ue(v) - if (!bit_buffer->ReadExponentialGolomb(pps_id)) + if (!bit_buffer->ReadExponentialGolomb(*pps_id)) return false; // seq_parameter_set_id: ue(v) - if (!bit_buffer->ReadExponentialGolomb(sps_id)) + if (!bit_buffer->ReadExponentialGolomb(*sps_id)) return false; return true; } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_sps_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_sps_parser.cc index eec0a1953..a9041a71a 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_sps_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_sps_parser.cc @@ -63,22 +63,22 @@ bool H265SpsParser::ParseScalingListData(rtc::BitBuffer* buffer) { for (int size_id = 0; size_id < 4; size_id++) { for (int matrix_id = 0; matrix_id < 6; matrix_id += (size_id == 3) ? 3 : 1) { // scaling_list_pred_mode_flag: u(1) - RETURN_FALSE_ON_FAIL(buffer->ReadBits(&scaling_list_pred_mode_flag[size_id][matrix_id], 1)); + RETURN_FALSE_ON_FAIL(buffer->ReadBits(1, scaling_list_pred_mode_flag[size_id][matrix_id])); if (!scaling_list_pred_mode_flag[size_id][matrix_id]) { // scaling_list_pred_matrix_id_delta: ue(v) - RETURN_FALSE_ON_FAIL(buffer->ReadExponentialGolomb(&scaling_list_pred_matrix_id_delta[size_id][matrix_id])); + RETURN_FALSE_ON_FAIL(buffer->ReadExponentialGolomb(scaling_list_pred_matrix_id_delta[size_id][matrix_id])); } else { int32_t next_coef = 8; uint32_t coef_num = std::min(64, 1 << (4 + (size_id << 1))); if (size_id > 1) { // scaling_list_dc_coef_minus8: se(v) - RETURN_FALSE_ON_FAIL(buffer->ReadSignedExponentialGolomb(&scaling_list_dc_coef_minus8[size_id - 2][matrix_id])); + RETURN_FALSE_ON_FAIL(buffer->ReadSignedExponentialGolomb(scaling_list_dc_coef_minus8[size_id - 2][matrix_id])); next_coef = scaling_list_dc_coef_minus8[size_id - 2][matrix_id]; } for (uint32_t i = 0; i < coef_num; i++) { // scaling_list_delta_coef: se(v) int32_t scaling_list_delta_coef = 0; - RETURN_FALSE_ON_FAIL(buffer->ReadSignedExponentialGolomb(&scaling_list_delta_coef)); + RETURN_FALSE_ON_FAIL(buffer->ReadSignedExponentialGolomb(scaling_list_delta_coef)); next_coef = (next_coef + scaling_list_delta_coef + 256) % 256; scaling_list[size_id][matrix_id][i] = next_coef; } @@ -97,20 +97,20 @@ absl::optional H265SpsParser::ParseShortTermR uint32_t inter_ref_pic_set_prediction_flag = 0; if (st_rps_idx != 0) { // inter_ref_pic_set_prediction_flag: u(1) - RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&inter_ref_pic_set_prediction_flag, 1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(1, inter_ref_pic_set_prediction_flag)); } if (inter_ref_pic_set_prediction_flag) { uint32_t delta_idx_minus1 = 0; if (st_rps_idx == num_short_term_ref_pic_sets) { // delta_idx_minus1: ue(v) - RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&delta_idx_minus1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(delta_idx_minus1)); } // delta_rps_sign: u(1) uint32_t delta_rps_sign = 0; - RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&delta_rps_sign, 1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(1, delta_rps_sign)); // abs_delta_rps_minus1: ue(v) uint32_t abs_delta_rps_minus1 = 0; - RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&abs_delta_rps_minus1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(abs_delta_rps_minus1)); uint32_t ref_rps_idx = st_rps_idx - (delta_idx_minus1 + 1); uint32_t num_delta_pocs = 0; if (short_term_ref_pic_set[ref_rps_idx].inter_ref_pic_set_prediction_flag) { @@ -131,33 +131,33 @@ absl::optional H265SpsParser::ParseShortTermR ref_pic_set.use_delta_flag.resize(num_delta_pocs + 1, 1); for (uint32_t j = 0; j <= num_delta_pocs; j++) { // used_by_curr_pic_flag: u(1) - RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.used_by_curr_pic_flag[j], 1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(1, ref_pic_set.used_by_curr_pic_flag[j])); if (!ref_pic_set.used_by_curr_pic_flag[j]) { // use_delta_flag: u(1) - RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.use_delta_flag[j], 1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(1, ref_pic_set.use_delta_flag[j])); } } } else { // num_negative_pics: ue(v) - RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.num_negative_pics)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(ref_pic_set.num_negative_pics)); // num_positive_pics: ue(v) - RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.num_positive_pics)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(ref_pic_set.num_positive_pics)); ref_pic_set.delta_poc_s0_minus1.resize(ref_pic_set.num_negative_pics, 0); ref_pic_set.used_by_curr_pic_s0_flag.resize(ref_pic_set.num_negative_pics, 0); for (uint32_t i = 0; i < ref_pic_set.num_negative_pics; i++) { // delta_poc_s0_minus1: ue(v) - RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.delta_poc_s0_minus1[i])); + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(ref_pic_set.delta_poc_s0_minus1[i])); // used_by_curr_pic_s0_flag: u(1) - RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.used_by_curr_pic_s0_flag[i], 1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(1, ref_pic_set.used_by_curr_pic_s0_flag[i])); } ref_pic_set.delta_poc_s1_minus1.resize(ref_pic_set.num_positive_pics, 0); ref_pic_set.used_by_curr_pic_s1_flag.resize(ref_pic_set.num_positive_pics, 0); for (uint32_t i = 0; i < ref_pic_set.num_positive_pics; i++) { // delta_poc_s1_minus1: ue(v) - RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(&ref_pic_set.delta_poc_s1_minus1[i])); + RETURN_EMPTY2_ON_FAIL(buffer->ReadExponentialGolomb(ref_pic_set.delta_poc_s1_minus1[i])); // used_by_curr_pic_s1_flag: u(1) - RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(&ref_pic_set.used_by_curr_pic_s1_flag[i], 1)); + RETURN_EMPTY2_ON_FAIL(buffer->ReadBits(1, ref_pic_set.used_by_curr_pic_s1_flag[i])); } } @@ -184,10 +184,10 @@ absl::optional H265SpsParser::ParseSpsInternal( // sps_video_parameter_set_id: u(4) uint32_t sps_video_parameter_set_id = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_video_parameter_set_id, 4)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(4, sps_video_parameter_set_id)); // sps_max_sub_layers_minus1: u(3) uint32_t sps_max_sub_layers_minus1 = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_max_sub_layers_minus1, 3)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(3, sps_max_sub_layers_minus1)); sps.sps_max_sub_layers_minus1 = sps_max_sub_layers_minus1; sps.sps_max_dec_pic_buffering_minus1.resize(sps_max_sub_layers_minus1 + 1, 0); // sps_temporal_id_nesting_flag: u(1) @@ -214,8 +214,8 @@ absl::optional H265SpsParser::ParseSpsInternal( uint32_t sub_layer_level_present = 0; for (uint32_t i = 0; i < sps_max_sub_layers_minus1; i++) { // sublayer_profile_present_flag and sublayer_level_presnet_flag: u(2) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sub_layer_profile_present, 1)); - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sub_layer_level_present, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sub_layer_profile_present)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sub_layer_level_present)); sub_layer_profile_present_flags.push_back(sub_layer_profile_present); sub_layer_level_present_flags.push_back(sub_layer_level_present); } @@ -246,24 +246,24 @@ absl::optional H265SpsParser::ParseSpsInternal( } } // sps_seq_parameter_set_id: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.id)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.id)); // chrome_format_idc: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.chroma_format_idc)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.chroma_format_idc)); if (sps.chroma_format_idc == 3) { // seperate_colour_plane_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.separate_colour_plane_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.separate_colour_plane_flag)); } uint32_t pic_width_in_luma_samples = 0; uint32_t pic_height_in_luma_samples = 0; // pic_width_in_luma_samples: ue(v) RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&pic_width_in_luma_samples)); + buffer->ReadExponentialGolomb(pic_width_in_luma_samples)); // pic_height_in_luma_samples: ue(v) RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&pic_height_in_luma_samples)); + buffer->ReadExponentialGolomb(pic_height_in_luma_samples)); // conformance_window_flag: u(1) uint32_t conformance_window_flag = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&conformance_window_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, conformance_window_flag)); uint32_t conf_win_left_offset = 0; uint32_t conf_win_right_offset = 0; @@ -271,53 +271,53 @@ absl::optional H265SpsParser::ParseSpsInternal( uint32_t conf_win_bottom_offset = 0; if (conformance_window_flag) { // conf_win_left_offset: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&conf_win_left_offset)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(conf_win_left_offset)); // conf_win_right_offset: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&conf_win_right_offset)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(conf_win_right_offset)); // conf_win_top_offset: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&conf_win_top_offset)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(conf_win_top_offset)); // conf_win_bottom_offset: ue(v) RETURN_EMPTY_ON_FAIL( - buffer->ReadExponentialGolomb(&conf_win_bottom_offset)); + buffer->ReadExponentialGolomb(conf_win_bottom_offset)); } // bit_depth_luma_minus8: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // bit_depth_chroma_minus8: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // log2_max_pic_order_cnt_lsb_minus4: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.log2_max_pic_order_cnt_lsb_minus4)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.log2_max_pic_order_cnt_lsb_minus4)); uint32_t sps_sub_layer_ordering_info_present_flag = 0; // sps_sub_layer_ordering_info_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_sub_layer_ordering_info_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps_sub_layer_ordering_info_present_flag)); for (uint32_t i = (sps_sub_layer_ordering_info_present_flag != 0) ? 0 : sps_max_sub_layers_minus1; i <= sps_max_sub_layers_minus1; i++) { // sps_max_dec_pic_buffering_minus1: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.sps_max_dec_pic_buffering_minus1[i])); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.sps_max_dec_pic_buffering_minus1[i])); // sps_max_num_reorder_pics: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // sps_max_latency_increase_plus1: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); } // log2_min_luma_coding_block_size_minus3: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.log2_min_luma_coding_block_size_minus3)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.log2_min_luma_coding_block_size_minus3)); // log2_diff_max_min_luma_coding_block_size: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.log2_diff_max_min_luma_coding_block_size)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.log2_diff_max_min_luma_coding_block_size)); // log2_min_luma_transform_block_size_minus2: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // log2_diff_max_min_luma_transform_block_size: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // max_transform_hierarchy_depth_inter: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // max_transform_hierarchy_depth_intra: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // scaling_list_enabled_flag: u(1) uint32_t scaling_list_enabled_flag = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&scaling_list_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, scaling_list_enabled_flag)); if (scaling_list_enabled_flag) { // sps_scaling_list_data_present_flag: u(1) uint32_t sps_scaling_list_data_present_flag = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps_scaling_list_data_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps_scaling_list_data_present_flag)); if (sps_scaling_list_data_present_flag) { // scaling_list_data() if (!ParseScalingListData(buffer)) { @@ -329,25 +329,25 @@ absl::optional H265SpsParser::ParseSpsInternal( // amp_enabled_flag: u(1) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); // sample_adaptive_offset_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.sample_adaptive_offset_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.sample_adaptive_offset_enabled_flag)); // pcm_enabled_flag: u(1) uint32_t pcm_enabled_flag = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&pcm_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, pcm_enabled_flag)); if (pcm_enabled_flag) { // pcm_sample_bit_depth_luma_minus1: u(4) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(4)); // pcm_sample_bit_depth_chroma_minus1: u(4) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(4)); // log2_min_pcm_luma_coding_block_size_minus3: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // log2_diff_max_min_pcm_luma_coding_block_size: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&golomb_ignored)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(golomb_ignored)); // pcm_loop_filter_disabled_flag: u(1) RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(1)); } // num_short_term_ref_pic_sets: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.num_short_term_ref_pic_sets)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.num_short_term_ref_pic_sets)); sps.short_term_ref_pic_set.resize(sps.num_short_term_ref_pic_sets); for (uint32_t st_rps_idx = 0; st_rps_idx < sps.num_short_term_ref_pic_sets; st_rps_idx++) { // st_ref_pic_set() @@ -361,22 +361,22 @@ absl::optional H265SpsParser::ParseSpsInternal( } // long_term_ref_pics_present_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.long_term_ref_pics_present_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.long_term_ref_pics_present_flag)); if (sps.long_term_ref_pics_present_flag) { // num_long_term_ref_pics_sps: ue(v) - RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(&sps.num_long_term_ref_pics_sps)); + RETURN_EMPTY_ON_FAIL(buffer->ReadExponentialGolomb(sps.num_long_term_ref_pics_sps)); sps.used_by_curr_pic_lt_sps_flag.resize(sps.num_long_term_ref_pics_sps, 0); for (uint32_t i = 0; i < sps.num_long_term_ref_pics_sps; i++) { // lt_ref_pic_poc_lsb_sps: u(v) uint32_t lt_ref_pic_poc_lsb_sps_bits = sps.log2_max_pic_order_cnt_lsb_minus4 + 4; RETURN_EMPTY_ON_FAIL(buffer->ConsumeBits(lt_ref_pic_poc_lsb_sps_bits)); // used_by_curr_pic_lt_sps_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.used_by_curr_pic_lt_sps_flag[i], 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.used_by_curr_pic_lt_sps_flag[i])); } } // sps_temporal_mvp_enabled_flag: u(1) - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&sps.sps_temporal_mvp_enabled_flag, 1)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(1, sps.sps_temporal_mvp_enabled_flag)); // Far enough! We don't use the rest of the SPS. diff --git a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_vps_parser.cc b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_vps_parser.cc index 002aabb1d..8c85b6501 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_vps_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/h265/h265_vps_parser.cc @@ -52,7 +52,7 @@ absl::optional H265VpsParser::ParseInternal( // vps_video_parameter_set_id: u(4) vps.id = 0; - RETURN_EMPTY_ON_FAIL(buffer->ReadBits(&vps.id, 4)); + RETURN_EMPTY_ON_FAIL(buffer->ReadBits(4, vps.id)); return OptionalVps(vps); } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h b/TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h index 0dcd4efcb..d616c5a2e 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/include/incoming_video_stream.h @@ -13,13 +13,14 @@ #include +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "common_video/video_render_frames.h" #include "rtc_base/race_checker.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -34,10 +35,10 @@ class IncomingVideoStream : public rtc::VideoSinkInterface { void OnFrame(const VideoFrame& video_frame) override; void Dequeue(); - rtc::ThreadChecker main_thread_checker_; + SequenceChecker main_thread_checker_; rtc::RaceChecker decoder_race_checker_; - VideoRenderFrames render_buffers_; // Only touched on the TaskQueue. + VideoRenderFrames render_buffers_ RTC_GUARDED_BY(&incoming_render_queue_); rtc::VideoSinkInterface* const callback_; rtc::TaskQueue incoming_render_queue_; }; diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h index bc70f34ec..593464abe 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer.h @@ -12,10 +12,10 @@ #define COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_ #include +#include #include "api/scoped_refptr.h" #include "api/video/video_frame_buffer.h" -#include "rtc_base/callback.h" #include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -29,7 +29,7 @@ rtc::scoped_refptr WrapI420Buffer( int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used); + std::function no_longer_used); rtc::scoped_refptr WrapI444Buffer( int width, @@ -40,7 +40,7 @@ rtc::scoped_refptr WrapI444Buffer( int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used); + std::function no_longer_used); rtc::scoped_refptr WrapI420ABuffer( int width, @@ -53,7 +53,7 @@ rtc::scoped_refptr WrapI420ABuffer( int v_stride, const uint8_t* a_plane, int a_stride, - const rtc::Callback0& no_longer_used); + std::function no_longer_used); rtc::scoped_refptr WrapYuvBuffer( VideoFrameBuffer::Type type, @@ -65,7 +65,7 @@ rtc::scoped_refptr WrapYuvBuffer( int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used); + std::function no_longer_used); rtc::scoped_refptr WrapI010Buffer( int width, @@ -76,7 +76,7 @@ rtc::scoped_refptr WrapI010Buffer( int u_stride, const uint16_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used); + std::function no_longer_used); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc b/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc index d1f8beac5..15c668e78 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc @@ -42,7 +42,7 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { // into the lambda instead of copying it, but it doesn't work unless we change // OnFrame to take its frame argument by value instead of const reference. incoming_render_queue_.PostTask([this, video_frame = video_frame]() mutable { - RTC_DCHECK(incoming_render_queue_.IsCurrent()); + RTC_DCHECK_RUN_ON(&incoming_render_queue_); if (render_buffers_.AddFrame(std::move(video_frame)) == 1) Dequeue(); }); @@ -50,7 +50,7 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { void IncomingVideoStream::Dequeue() { TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue"); - RTC_DCHECK(incoming_render_queue_.IsCurrent()); + RTC_DCHECK_RUN_ON(&incoming_render_queue_); absl::optional frame_to_render = render_buffers_.FrameToRender(); if (frame_to_render) callback_->OnFrame(*frame_to_render); diff --git a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h index d27250a93..03c9ff49c 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/include/webrtc_libyuv.h @@ -32,17 +32,12 @@ enum class VideoType { kI420, kIYUV, kRGB24, - kABGR, kARGB, - kARGB4444, kRGB565, - kARGB1555, kYUY2, kYV12, kUYVY, kMJPEG, - kNV21, - kNV12, kBGRA, }; diff --git a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc index c7613cefd..cc6a71a61 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/libyuv/webrtc_libyuv.cc @@ -14,7 +14,6 @@ #include "api/video/i420_buffer.h" #include "common_video/include/video_frame_buffer.h" -#include "rtc_base/bind.h" #include "rtc_base/checks.h" #include "third_party/libyuv/include/libyuv.h" @@ -26,8 +25,6 @@ size_t CalcBufferSize(VideoType type, int width, int height) { size_t buffer_size = 0; switch (type) { case VideoType::kI420: - case VideoType::kNV12: - case VideoType::kNV21: case VideoType::kIYUV: case VideoType::kYV12: { int half_width = (width + 1) >> 1; @@ -35,9 +32,7 @@ size_t CalcBufferSize(VideoType type, int width, int height) { buffer_size = width * height + half_width * half_height * 2; break; } - case VideoType::kARGB4444: case VideoType::kRGB565: - case VideoType::kARGB1555: case VideoType::kYUY2: case VideoType::kUYVY: buffer_size = width * height * 2; @@ -98,8 +93,6 @@ int ConvertVideoType(VideoType video_type) { return libyuv::FOURCC_YV12; case VideoType::kRGB24: return libyuv::FOURCC_24BG; - case VideoType::kABGR: - return libyuv::FOURCC_ABGR; case VideoType::kRGB565: return libyuv::FOURCC_RGBP; case VideoType::kYUY2: @@ -108,18 +101,10 @@ int ConvertVideoType(VideoType video_type) { return libyuv::FOURCC_UYVY; case VideoType::kMJPEG: return libyuv::FOURCC_MJPG; - case VideoType::kNV21: - return libyuv::FOURCC_NV21; - case VideoType::kNV12: - return libyuv::FOURCC_NV12; case VideoType::kARGB: return libyuv::FOURCC_ARGB; case VideoType::kBGRA: return libyuv::FOURCC_BGRA; - case VideoType::kARGB4444: - return libyuv::FOURCC_R444; - case VideoType::kARGB1555: - return libyuv::FOURCC_RGBO; } RTC_NOTREACHED(); return libyuv::FOURCC_ANY; @@ -138,10 +123,6 @@ int ConvertFromI420(const VideoFrame& src_frame, ConvertVideoType(dst_video_type)); } -// Helper functions for keeping references alive. -void KeepBufferRefs(rtc::scoped_refptr, - rtc::scoped_refptr) {} - rtc::scoped_refptr ScaleI420ABuffer( const I420ABufferInterface& buffer, int target_width, @@ -160,7 +141,8 @@ rtc::scoped_refptr ScaleI420ABuffer( yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(), axx_buffer->StrideY(), - rtc::Bind(&KeepBufferRefs, yuv_buffer, axx_buffer)); + // To keep references alive. + [yuv_buffer, axx_buffer] {}); return merged_buffer; } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc index 823c5ad7a..78a126419 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer.cc @@ -30,7 +30,7 @@ class WrappedYuvBuffer : public Base { int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used) + std::function no_longer_used) : width_(width), height_(height), y_plane_(y_plane), @@ -70,7 +70,7 @@ class WrappedYuvBuffer : public Base { const int y_stride_; const int u_stride_; const int v_stride_; - rtc::Callback0 no_longer_used_cb_; + std::function no_longer_used_cb_; }; // Template to implement a wrapped buffer for a I4??BufferInterface. @@ -87,7 +87,7 @@ class WrappedYuvaBuffer : public WrappedYuvBuffer { int v_stride, const uint8_t* a_plane, int a_stride, - const rtc::Callback0& no_longer_used) + std::function no_longer_used) : WrappedYuvBuffer(width, height, y_plane, @@ -136,7 +136,7 @@ class WrappedYuv16BBuffer : public Base { int u_stride, const uint16_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used) + std::function no_longer_used) : width_(width), height_(height), y_plane_(y_plane), @@ -176,7 +176,7 @@ class WrappedYuv16BBuffer : public Base { const int y_stride_; const int u_stride_; const int v_stride_; - rtc::Callback0 no_longer_used_cb_; + std::function no_longer_used_cb_; }; class I010BufferBase : public I010BufferInterface { @@ -206,9 +206,9 @@ rtc::scoped_refptr WrapI420Buffer( int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used) { + std::function no_longer_used) { return rtc::scoped_refptr( - new rtc::RefCountedObject>( + rtc::make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } @@ -224,9 +224,9 @@ rtc::scoped_refptr WrapI420ABuffer( int v_stride, const uint8_t* a_plane, int a_stride, - const rtc::Callback0& no_longer_used) { + std::function no_longer_used) { return rtc::scoped_refptr( - new rtc::RefCountedObject>( + rtc::make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, a_plane, a_stride, no_longer_used)); } @@ -240,9 +240,9 @@ rtc::scoped_refptr WrapI444Buffer( int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used) { + std::function no_longer_used) { return rtc::scoped_refptr( - new rtc::RefCountedObject>( + rtc::make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } @@ -257,7 +257,7 @@ rtc::scoped_refptr WrapYuvBuffer( int u_stride, const uint8_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used) { + std::function no_longer_used) { switch (type) { case VideoFrameBuffer::Type::kI420: return WrapI420Buffer(width, height, y_plane, y_stride, u_plane, u_stride, @@ -266,8 +266,7 @@ rtc::scoped_refptr WrapYuvBuffer( return WrapI444Buffer(width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used); default: - FATAL() << "Unexpected frame buffer type."; - return nullptr; + RTC_CHECK_NOTREACHED(); } } @@ -280,9 +279,9 @@ rtc::scoped_refptr WrapI010Buffer( int u_stride, const uint16_t* v_plane, int v_stride, - const rtc::Callback0& no_longer_used) { + std::function no_longer_used) { return rtc::scoped_refptr( - new rtc::RefCountedObject>( + rtc::make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc index 6df240d9f..d225370a4 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc @@ -107,7 +107,7 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( return nullptr; // Allocate new buffer. rtc::scoped_refptr buffer = - new rtc::RefCountedObject(width, height); + rtc::make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); @@ -138,7 +138,7 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( return nullptr; // Allocate new buffer. rtc::scoped_refptr buffer = - new rtc::RefCountedObject(width, height); + rtc::make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/blob_encoding.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/blob_encoding.cc index 48316b052..96699dc96 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/blob_encoding.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/blob_encoding.cc @@ -58,49 +58,30 @@ std::vector DecodeBlobs(absl::string_view encoded_blobs, return std::vector(); } - size_t read_idx = 0; - // Read the lengths of all blobs. std::vector lengths(num_of_blobs); for (size_t i = 0; i < num_of_blobs; ++i) { - if (read_idx >= encoded_blobs.length()) { - RTC_DCHECK_EQ(read_idx, encoded_blobs.length()); - RTC_LOG(LS_WARNING) << "Corrupt input; excessive number of blobs."; - return std::vector(); - } - - const size_t read_bytes = - DecodeVarInt(encoded_blobs.substr(read_idx), &lengths[i]); - if (read_bytes == 0) { + bool success = false; + std::tie(success, encoded_blobs) = DecodeVarInt(encoded_blobs, &lengths[i]); + if (!success) { RTC_LOG(LS_WARNING) << "Corrupt input; varint decoding failed."; return std::vector(); } - - read_idx += read_bytes; - - // Note: It might be that read_idx == encoded_blobs.length(), if this - // is the last iteration, and all of the blobs are the empty string. - RTC_DCHECK_LE(read_idx, encoded_blobs.length()); } // Read the blobs themselves. std::vector blobs(num_of_blobs); for (size_t i = 0; i < num_of_blobs; ++i) { - if (read_idx + lengths[i] < read_idx) { // Wrap-around detection. - RTC_LOG(LS_WARNING) << "Corrupt input; unreasonably large blob sequence."; - return std::vector(); - } - - if (read_idx + lengths[i] > encoded_blobs.length()) { + if (lengths[i] > encoded_blobs.length()) { RTC_LOG(LS_WARNING) << "Corrupt input; blob sizes exceed input size."; return std::vector(); } - blobs[i] = encoded_blobs.substr(read_idx, lengths[i]); - read_idx += lengths[i]; + blobs[i] = encoded_blobs.substr(0, lengths[i]); + encoded_blobs = encoded_blobs.substr(lengths[i]); } - if (read_idx != encoded_blobs.length()) { + if (!encoded_blobs.empty()) { RTC_LOG(LS_WARNING) << "Corrupt input; unrecognized trailer."; return std::vector(); } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc index 022fb9c16..7bccdabdc 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc @@ -693,7 +693,7 @@ bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(const std::string& input) { uint32_t encoding_type_bits; const bool result = - reader.ReadBits(&encoding_type_bits, kBitsInHeaderForEncodingType); + reader.ReadBits(kBitsInHeaderForEncodingType, encoding_type_bits); RTC_DCHECK(result); const auto encoding_type = static_cast(encoding_type_bits); @@ -729,7 +729,7 @@ std::unique_ptr FixedLengthDeltaDecoder::Create( // Encoding type uint32_t encoding_type_bits; const bool result = - reader->ReadBits(&encoding_type_bits, kBitsInHeaderForEncodingType); + reader->ReadBits(kBitsInHeaderForEncodingType, encoding_type_bits); RTC_DCHECK(result); const EncodingType encoding = static_cast(encoding_type_bits); if (encoding != EncodingType::kFixedSizeUnsignedDeltasNoEarlyWrapNoOpt && @@ -742,7 +742,7 @@ std::unique_ptr FixedLengthDeltaDecoder::Create( uint32_t read_buffer; // delta_width_bits - if (!reader->ReadBits(&read_buffer, kBitsInHeaderForDeltaWidthBits)) { + if (!reader->ReadBits(kBitsInHeaderForDeltaWidthBits, read_buffer)) { return nullptr; } RTC_DCHECK_LE(read_buffer, 64 - 1); // See encoding for -1's rationale. @@ -759,20 +759,20 @@ std::unique_ptr FixedLengthDeltaDecoder::Create( value_width_bits = kDefaultValueWidthBits; } else { // signed_deltas - if (!reader->ReadBits(&read_buffer, kBitsInHeaderForSignedDeltas)) { + if (!reader->ReadBits(kBitsInHeaderForSignedDeltas, read_buffer)) { return nullptr; } signed_deltas = rtc::dchecked_cast(read_buffer); // values_optional - if (!reader->ReadBits(&read_buffer, kBitsInHeaderForValuesOptional)) { + if (!reader->ReadBits(kBitsInHeaderForValuesOptional, read_buffer)) { return nullptr; } RTC_DCHECK_LE(read_buffer, 1); values_optional = rtc::dchecked_cast(read_buffer); // value_width_bits - if (!reader->ReadBits(&read_buffer, kBitsInHeaderForValueWidthBits)) { + if (!reader->ReadBits(kBitsInHeaderForValueWidthBits, read_buffer)) { return nullptr; } RTC_DCHECK_LE(read_buffer, 64 - 1); // See encoding for -1's rationale. @@ -813,7 +813,7 @@ std::vector> FixedLengthDeltaDecoder::Decode() { if (params_.values_optional()) { for (size_t i = 0; i < num_of_deltas_; ++i) { uint32_t exists; - if (!reader_->ReadBits(&exists, 1u)) { + if (!reader_->ReadBits(1u, exists)) { RTC_LOG(LS_WARNING) << "Failed to read existence-indicating bit."; return std::vector>(); } @@ -877,7 +877,7 @@ bool FixedLengthDeltaDecoder::ParseDelta(uint64_t* delta) { uint32_t higher_bits; if (higher_bit_count > 0) { - if (!reader_->ReadBits(&higher_bits, higher_bit_count)) { + if (!reader_->ReadBits(higher_bit_count, higher_bits)) { RTC_LOG(LS_WARNING) << "Failed to read higher half of delta."; return false; } @@ -885,7 +885,7 @@ bool FixedLengthDeltaDecoder::ParseDelta(uint64_t* delta) { higher_bits = 0; } - if (!reader_->ReadBits(&lower_bits, lower_bit_count)) { + if (!reader_->ReadBits(lower_bit_count, lower_bits)) { RTC_LOG(LS_WARNING) << "Failed to read lower half of delta."; return false; } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc index 143df8821..2bd750785 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc @@ -15,6 +15,8 @@ #include #include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/network_state_predictor.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/transport/network_types.h" @@ -38,7 +40,6 @@ #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" @@ -593,14 +594,14 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacketOutgoing( std::string RtcEventLogEncoderLegacy::EncodeRtpPacketIncoming( const RtcEventRtpPacketIncoming& event) { - return EncodeRtpPacket(event.timestamp_us(), event.header(), + return EncodeRtpPacket(event.timestamp_us(), event.RawHeader(), event.packet_length(), PacedPacketInfo::kNotAProbe, true); } std::string RtcEventLogEncoderLegacy::EncodeRtpPacketOutgoing( const RtcEventRtpPacketOutgoing& event) { - return EncodeRtpPacket(event.timestamp_us(), event.header(), + return EncodeRtpPacket(event.timestamp_us(), event.RawHeader(), event.packet_length(), event.probe_cluster_id(), false); } @@ -736,7 +737,7 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( std::string RtcEventLogEncoderLegacy::EncodeRtpPacket( int64_t timestamp_us, - const webrtc::RtpPacket& header, + rtc::ArrayView header, size_t packet_length, int probe_cluster_id, bool is_incoming) { diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h index 3105dc1e6..37296e797 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h @@ -15,6 +15,7 @@ #include #include +#include "api/array_view.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" #include "rtc_base/buffer.h" @@ -94,7 +95,7 @@ class RtcEventLogEncoderLegacy final : public RtcEventLogEncoder { const rtc::Buffer& packet, bool is_incoming); std::string EncodeRtpPacket(int64_t timestamp_us, - const RtpPacket& header, + rtc::ArrayView header, size_t packet_length, int probe_cluster_id, bool is_incoming); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc index 7f1b0c7d6..947f2dfe8 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc @@ -12,6 +12,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/network_state_predictor.h" #include "logging/rtc_event_log/encoder/blob_encoding.h" #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" @@ -43,7 +44,6 @@ #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" @@ -291,7 +291,7 @@ rtclog2::IceCandidatePairEvent::IceCandidatePairEventType ConvertToProtoFormat( // Copies all RTCP blocks except APP, SDES and unknown from |packet| to // |buffer|. |buffer| must have space for at least |packet.size()| bytes. -size_t RemoveNonWhitelistedRtcpBlocks(const rtc::Buffer& packet, +size_t RemoveNonAllowlistedRtcpBlocks(const rtc::Buffer& packet, uint8_t* buffer) { RTC_DCHECK(buffer != nullptr); rtcp::CommonHeader header; @@ -318,7 +318,7 @@ size_t RemoveNonWhitelistedRtcpBlocks(const rtc::Buffer& packet, // inter-arrival jitter, third-party loss reports, payload-specific // feedback and extended reports. // TODO(terelius): As an optimization, don't copy anything if all blocks - // in the packet are whitelisted types. + // in the packet are allowlisted types. memcpy(buffer + buffer_length, block_begin, block_size); buffer_length += block_size; break; @@ -348,7 +348,7 @@ void EncodeRtcpPacket(rtc::ArrayView batch, { std::vector buffer(base_event->packet().size()); size_t buffer_length = - RemoveNonWhitelistedRtcpBlocks(base_event->packet(), buffer.data()); + RemoveNonAllowlistedRtcpBlocks(base_event->packet(), buffer.data()); proto_batch->set_raw_packet(buffer.data(), buffer_length); } @@ -377,7 +377,7 @@ void EncodeRtcpPacket(rtc::ArrayView batch, const EventType* event = batch[i + 1]; scrubed_packets[i].resize(event->packet().size()); static_assert(sizeof(std::string::value_type) == sizeof(uint8_t), ""); - const size_t buffer_length = RemoveNonWhitelistedRtcpBlocks( + const size_t buffer_length = RemoveNonAllowlistedRtcpBlocks( event->packet(), reinterpret_cast(&scrubed_packets[i][0])); if (buffer_length < event->packet().size()) { scrubed_packets[i].resize(buffer_length); @@ -396,12 +396,12 @@ void EncodeRtpPacket(const std::vector& batch, // Base event const EventType* const base_event = batch[0]; proto_batch->set_timestamp_ms(base_event->timestamp_ms()); - proto_batch->set_marker(base_event->header().Marker()); + proto_batch->set_marker(base_event->Marker()); // TODO(terelius): Is payload type needed? - proto_batch->set_payload_type(base_event->header().PayloadType()); - proto_batch->set_sequence_number(base_event->header().SequenceNumber()); - proto_batch->set_rtp_timestamp(base_event->header().Timestamp()); - proto_batch->set_ssrc(base_event->header().Ssrc()); + proto_batch->set_payload_type(base_event->PayloadType()); + proto_batch->set_sequence_number(base_event->SequenceNumber()); + proto_batch->set_rtp_timestamp(base_event->Timestamp()); + proto_batch->set_ssrc(base_event->Ssrc()); proto_batch->set_payload_size(base_event->payload_length()); proto_batch->set_header_size(base_event->header_length()); proto_batch->set_padding_size(base_event->padding_length()); @@ -410,8 +410,7 @@ void EncodeRtpPacket(const std::vector& batch, absl::optional base_transport_sequence_number; { uint16_t seqnum; - if (base_event->header().template GetExtension( - &seqnum)) { + if (base_event->template GetExtension(&seqnum)) { proto_batch->set_transport_sequence_number(seqnum); base_transport_sequence_number = seqnum; } @@ -420,8 +419,7 @@ void EncodeRtpPacket(const std::vector& batch, absl::optional unsigned_base_transmission_time_offset; { int32_t offset; - if (base_event->header().template GetExtension( - &offset)) { + if (base_event->template GetExtension(&offset)) { proto_batch->set_transmission_time_offset(offset); unsigned_base_transmission_time_offset = ToUnsigned(offset); } @@ -430,8 +428,7 @@ void EncodeRtpPacket(const std::vector& batch, absl::optional base_absolute_send_time; { uint32_t sendtime; - if (base_event->header().template GetExtension( - &sendtime)) { + if (base_event->template GetExtension(&sendtime)) { proto_batch->set_absolute_send_time(sendtime); base_absolute_send_time = sendtime; } @@ -440,8 +437,7 @@ void EncodeRtpPacket(const std::vector& batch, absl::optional base_video_rotation; { VideoRotation video_rotation; - if (base_event->header().template GetExtension( - &video_rotation)) { + if (base_event->template GetExtension(&video_rotation)) { proto_batch->set_video_rotation( ConvertVideoRotationToCVOByte(video_rotation)); base_video_rotation = ConvertVideoRotationToCVOByte(video_rotation); @@ -453,8 +449,8 @@ void EncodeRtpPacket(const std::vector& batch, { bool voice_activity; uint8_t audio_level; - if (base_event->header().template GetExtension(&voice_activity, - &audio_level)) { + if (base_event->template GetExtension(&voice_activity, + &audio_level)) { RTC_DCHECK_LE(audio_level, 0x7Fu); base_audio_level = audio_level; proto_batch->set_audio_level(audio_level); @@ -486,9 +482,9 @@ void EncodeRtpPacket(const std::vector& batch, // marker (RTP base) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - values[i] = event->header().Marker(); + values[i] = event->Marker(); } - encoded_deltas = EncodeDeltas(base_event->header().Marker(), values); + encoded_deltas = EncodeDeltas(base_event->Marker(), values); if (!encoded_deltas.empty()) { proto_batch->set_marker_deltas(encoded_deltas); } @@ -496,9 +492,9 @@ void EncodeRtpPacket(const std::vector& batch, // payload_type (RTP base) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - values[i] = event->header().PayloadType(); + values[i] = event->PayloadType(); } - encoded_deltas = EncodeDeltas(base_event->header().PayloadType(), values); + encoded_deltas = EncodeDeltas(base_event->PayloadType(), values); if (!encoded_deltas.empty()) { proto_batch->set_payload_type_deltas(encoded_deltas); } @@ -506,9 +502,9 @@ void EncodeRtpPacket(const std::vector& batch, // sequence_number (RTP base) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - values[i] = event->header().SequenceNumber(); + values[i] = event->SequenceNumber(); } - encoded_deltas = EncodeDeltas(base_event->header().SequenceNumber(), values); + encoded_deltas = EncodeDeltas(base_event->SequenceNumber(), values); if (!encoded_deltas.empty()) { proto_batch->set_sequence_number_deltas(encoded_deltas); } @@ -516,9 +512,9 @@ void EncodeRtpPacket(const std::vector& batch, // rtp_timestamp (RTP base) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - values[i] = event->header().Timestamp(); + values[i] = event->Timestamp(); } - encoded_deltas = EncodeDeltas(base_event->header().Timestamp(), values); + encoded_deltas = EncodeDeltas(base_event->Timestamp(), values); if (!encoded_deltas.empty()) { proto_batch->set_rtp_timestamp_deltas(encoded_deltas); } @@ -526,9 +522,9 @@ void EncodeRtpPacket(const std::vector& batch, // ssrc (RTP base) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - values[i] = event->header().Ssrc(); + values[i] = event->Ssrc(); } - encoded_deltas = EncodeDeltas(base_event->header().Ssrc(), values); + encoded_deltas = EncodeDeltas(base_event->Ssrc(), values); if (!encoded_deltas.empty()) { proto_batch->set_ssrc_deltas(encoded_deltas); } @@ -567,8 +563,7 @@ void EncodeRtpPacket(const std::vector& batch, for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; uint16_t seqnum; - if (event->header().template GetExtension( - &seqnum)) { + if (event->template GetExtension(&seqnum)) { values[i] = seqnum; } else { values[i].reset(); @@ -583,7 +578,7 @@ void EncodeRtpPacket(const std::vector& batch, for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; int32_t offset; - if (event->header().template GetExtension(&offset)) { + if (event->template GetExtension(&offset)) { values[i] = ToUnsigned(offset); } else { values[i].reset(); @@ -598,7 +593,7 @@ void EncodeRtpPacket(const std::vector& batch, for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; uint32_t sendtime; - if (event->header().template GetExtension(&sendtime)) { + if (event->template GetExtension(&sendtime)) { values[i] = sendtime; } else { values[i].reset(); @@ -613,8 +608,7 @@ void EncodeRtpPacket(const std::vector& batch, for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; VideoRotation video_rotation; - if (event->header().template GetExtension( - &video_rotation)) { + if (event->template GetExtension(&video_rotation)) { values[i] = ConvertVideoRotationToCVOByte(video_rotation); } else { values[i].reset(); @@ -630,8 +624,8 @@ void EncodeRtpPacket(const std::vector& batch, const EventType* event = batch[i + 1]; bool voice_activity; uint8_t audio_level; - if (event->header().template GetExtension(&voice_activity, - &audio_level)) { + if (event->template GetExtension(&voice_activity, + &audio_level)) { RTC_DCHECK_LE(audio_level, 0x7Fu); values[i] = audio_level; } else { @@ -648,8 +642,8 @@ void EncodeRtpPacket(const std::vector& batch, const EventType* event = batch[i + 1]; bool voice_activity; uint8_t audio_level; - if (event->header().template GetExtension(&voice_activity, - &audio_level)) { + if (event->template GetExtension(&voice_activity, + &audio_level)) { RTC_DCHECK_LE(audio_level, 0x7Fu); values[i] = voice_activity; } else { @@ -825,14 +819,14 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( case RtcEvent::Type::RtpPacketIncoming: { auto* rtc_event = static_cast(it->get()); - auto& v = incoming_rtp_packets[rtc_event->header().Ssrc()]; + auto& v = incoming_rtp_packets[rtc_event->Ssrc()]; v.emplace_back(rtc_event); break; } case RtcEvent::Type::RtpPacketOutgoing: { auto* rtc_event = static_cast(it->get()); - auto& v = outgoing_rtp_packets[rtc_event->header().Ssrc()]; + auto& v = outgoing_rtp_packets[rtc_event->Ssrc()]; v.emplace_back(rtc_event); break; } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.cc index b2c695ee7..f2819c0c7 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.cc @@ -39,7 +39,8 @@ std::string EncodeVarInt(uint64_t input) { // There is some code duplication between the flavors of this function. // For performance's sake, it's best to just keep it. -size_t DecodeVarInt(absl::string_view input, uint64_t* output) { +std::pair DecodeVarInt(absl::string_view input, + uint64_t* output) { RTC_DCHECK(output); uint64_t decoded = 0; @@ -48,11 +49,11 @@ size_t DecodeVarInt(absl::string_view input, uint64_t* output) { << static_cast(7 * i)); if (!(input[i] & 0x80)) { *output = decoded; - return i + 1; + return {true, input.substr(i + 1)}; } } - return 0; + return {false, input}; } // There is some code duplication between the flavors of this function. @@ -63,7 +64,7 @@ size_t DecodeVarInt(rtc::BitBuffer* input, uint64_t* output) { uint64_t decoded = 0; for (size_t i = 0; i < kMaxVarIntLengthBytes; ++i) { uint8_t byte; - if (!input->ReadUInt8(&byte)) { + if (!input->ReadUInt8(byte)) { return 0; } decoded += diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.h index 178c9cec1..dbe1f1103 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/var_int.h @@ -15,6 +15,7 @@ #include #include +#include #include "absl/strings/string_view.h" #include "rtc_base/bit_buffer.h" @@ -26,20 +27,23 @@ extern const size_t kMaxVarIntLengthBytes; // Encode a given uint64_t as a varint. From least to most significant, // each batch of seven bits are put into the lower bits of a byte, and the last // remaining bit in that byte (the highest one) marks whether additional bytes -// follow (which happens if and only if there are other bits in |input| which +// follow (which happens if and only if there are other bits in `input` which // are non-zero). // Notes: If input == 0, one byte is used. If input is uint64_t::max, exactly // kMaxVarIntLengthBytes are used. std::string EncodeVarInt(uint64_t input); // Inverse of EncodeVarInt(). -// If decoding is successful, a non-zero number is returned, indicating the -// number of bytes read from |input|, and the decoded varint is written -// into |output|. -// If not successful, 0 is returned, and |output| is not modified. -size_t DecodeVarInt(absl::string_view input, uint64_t* output); +// Returns true and the remaining (unread) slice of the input if decoding +// succeeds. Returns false otherwise and `output` is not modified. +std::pair DecodeVarInt(absl::string_view input, + uint64_t* output); // Same as other version, but uses a rtc::BitBuffer for input. +// If decoding is successful, a non-zero number is returned, indicating the +// number of bytes read from `input`, and the decoded varint is written +// into `output`. +// If not successful, 0 is returned, and `output` is not modified. // Some bits may be consumed even if a varint fails to be read. size_t DecodeVarInt(rtc::BitBuffer* input, uint64_t* output); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc index 8ab7f798c..3c307b9ca 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc @@ -21,14 +21,6 @@ RtcEventAlrState::RtcEventAlrState(const RtcEventAlrState& other) RtcEventAlrState::~RtcEventAlrState() = default; -RtcEvent::Type RtcEventAlrState::GetType() const { - return RtcEvent::Type::AlrStateEvent; -} - -bool RtcEventAlrState::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventAlrState::Copy() const { return absl::WrapUnique(new RtcEventAlrState(*this)); } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h index 0869aa4d9..3ad0f005f 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h @@ -19,12 +19,13 @@ namespace webrtc { class RtcEventAlrState final : public RtcEvent { public: + static constexpr Type kType = Type::AlrStateEvent; + explicit RtcEventAlrState(bool in_alr); ~RtcEventAlrState() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -36,5 +37,17 @@ class RtcEventAlrState final : public RtcEvent { const bool in_alr_; }; +struct LoggedAlrStateEvent { + LoggedAlrStateEvent() = default; + LoggedAlrStateEvent(int64_t timestamp_us, bool in_alr) + : timestamp_us(timestamp_us), in_alr(in_alr) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + bool in_alr; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ALR_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc index 73783167a..5f2d55c35 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc @@ -31,14 +31,6 @@ RtcEventAudioNetworkAdaptation::RtcEventAudioNetworkAdaptation( RtcEventAudioNetworkAdaptation::~RtcEventAudioNetworkAdaptation() = default; -RtcEvent::Type RtcEventAudioNetworkAdaptation::GetType() const { - return RtcEvent::Type::AudioNetworkAdaptation; -} - -bool RtcEventAudioNetworkAdaptation::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventAudioNetworkAdaptation::Copy() const { return absl::WrapUnique(new RtcEventAudioNetworkAdaptation(*this)); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h index 7c5005456..2b183bb30 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h @@ -14,6 +14,7 @@ #include #include "api/rtc_event_log/rtc_event.h" +#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" namespace webrtc { @@ -21,13 +22,14 @@ struct AudioEncoderRuntimeConfig; class RtcEventAudioNetworkAdaptation final : public RtcEvent { public: + static constexpr Type kType = Type::AudioNetworkAdaptation; + explicit RtcEventAudioNetworkAdaptation( std::unique_ptr config); ~RtcEventAudioNetworkAdaptation() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -39,6 +41,19 @@ class RtcEventAudioNetworkAdaptation final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedAudioNetworkAdaptationEvent { + LoggedAudioNetworkAdaptationEvent() = default; + LoggedAudioNetworkAdaptationEvent(int64_t timestamp_us, + const AudioEncoderRuntimeConfig& config) + : timestamp_us(timestamp_us), config(config) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + AudioEncoderRuntimeConfig config; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc index 6c4aa98d3..dae61c4df 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc @@ -19,14 +19,6 @@ RtcEventAudioPlayout::RtcEventAudioPlayout(uint32_t ssrc) : ssrc_(ssrc) {} RtcEventAudioPlayout::RtcEventAudioPlayout(const RtcEventAudioPlayout& other) : RtcEvent(other.timestamp_us_), ssrc_(other.ssrc_) {} -RtcEvent::Type RtcEventAudioPlayout::GetType() const { - return RtcEvent::Type::AudioPlayout; -} - -bool RtcEventAudioPlayout::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventAudioPlayout::Copy() const { return absl::WrapUnique( new RtcEventAudioPlayout(*this)); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h index 45836b79e..83825217a 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h @@ -21,12 +21,13 @@ namespace webrtc { class RtcEventAudioPlayout final : public RtcEvent { public: + static constexpr Type kType = Type::AudioPlayout; + explicit RtcEventAudioPlayout(uint32_t ssrc); ~RtcEventAudioPlayout() override = default; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -38,6 +39,18 @@ class RtcEventAudioPlayout final : public RtcEvent { const uint32_t ssrc_; }; +struct LoggedAudioPlayoutEvent { + LoggedAudioPlayoutEvent() = default; + LoggedAudioPlayoutEvent(int64_t timestamp_us, uint32_t ssrc) + : timestamp_us(timestamp_us), ssrc(ssrc) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + uint32_t ssrc; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_PLAYOUT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc index 5cdfb473b..87caaff09 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc @@ -31,14 +31,6 @@ RtcEventAudioReceiveStreamConfig::RtcEventAudioReceiveStreamConfig( RtcEventAudioReceiveStreamConfig::~RtcEventAudioReceiveStreamConfig() = default; -RtcEvent::Type RtcEventAudioReceiveStreamConfig::GetType() const { - return RtcEvent::Type::AudioReceiveStreamConfig; -} - -bool RtcEventAudioReceiveStreamConfig::IsConfigEvent() const { - return true; -} - std::unique_ptr RtcEventAudioReceiveStreamConfig::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h index 67f28602f..1edd8e1e4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h @@ -14,22 +14,20 @@ #include #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { -namespace rtclog { -struct StreamConfig; -} // namespace rtclog - class RtcEventAudioReceiveStreamConfig final : public RtcEvent { public: + static constexpr Type kType = Type::AudioReceiveStreamConfig; + explicit RtcEventAudioReceiveStreamConfig( std::unique_ptr config); ~RtcEventAudioReceiveStreamConfig() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return true; } std::unique_ptr Copy() const; @@ -42,6 +40,18 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedAudioRecvConfig { + LoggedAudioRecvConfig() = default; + LoggedAudioRecvConfig(int64_t timestamp_us, const rtclog::StreamConfig config) + : timestamp_us(timestamp_us), config(config) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + rtclog::StreamConfig config; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc index f4403afdd..681ae11e6 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc @@ -31,14 +31,6 @@ RtcEventAudioSendStreamConfig::RtcEventAudioSendStreamConfig( RtcEventAudioSendStreamConfig::~RtcEventAudioSendStreamConfig() = default; -RtcEvent::Type RtcEventAudioSendStreamConfig::GetType() const { - return RtcEvent::Type::AudioSendStreamConfig; -} - -bool RtcEventAudioSendStreamConfig::IsConfigEvent() const { - return true; -} - std::unique_ptr RtcEventAudioSendStreamConfig::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h index 8617b950e..d3c60683b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h @@ -14,22 +14,20 @@ #include #include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { -namespace rtclog { -struct StreamConfig; -} // namespace rtclog - class RtcEventAudioSendStreamConfig final : public RtcEvent { public: + static constexpr Type kType = Type::AudioSendStreamConfig; + explicit RtcEventAudioSendStreamConfig( std::unique_ptr config); ~RtcEventAudioSendStreamConfig() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return true; } std::unique_ptr Copy() const; @@ -41,6 +39,17 @@ class RtcEventAudioSendStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedAudioSendConfig { + LoggedAudioSendConfig() = default; + LoggedAudioSendConfig(int64_t timestamp_us, const rtclog::StreamConfig config) + : timestamp_us(timestamp_us), config(config) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + rtclog::StreamConfig config; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc index dcc87421f..f3f12192c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc @@ -11,7 +11,7 @@ #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" #include "absl/memory/memory.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "api/network_state_predictor.h" namespace webrtc { @@ -28,14 +28,6 @@ RtcEventBweUpdateDelayBased::RtcEventBweUpdateDelayBased( RtcEventBweUpdateDelayBased::~RtcEventBweUpdateDelayBased() = default; -RtcEvent::Type RtcEventBweUpdateDelayBased::GetType() const { - return RtcEvent::Type::BweUpdateDelayBased; -} - -bool RtcEventBweUpdateDelayBased::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventBweUpdateDelayBased::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h index 8908ce2be..a83ea8b69 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h @@ -15,21 +15,21 @@ #include +#include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event.h" namespace webrtc { -enum class BandwidthUsage; - class RtcEventBweUpdateDelayBased final : public RtcEvent { public: + static constexpr Type kType = Type::BweUpdateDelayBased; + RtcEventBweUpdateDelayBased(int32_t bitrate_bps, BandwidthUsage detector_state); ~RtcEventBweUpdateDelayBased() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -43,6 +43,23 @@ class RtcEventBweUpdateDelayBased final : public RtcEvent { const BandwidthUsage detector_state_; }; +struct LoggedBweDelayBasedUpdate { + LoggedBweDelayBasedUpdate() = default; + LoggedBweDelayBasedUpdate(int64_t timestamp_us, + int32_t bitrate_bps, + BandwidthUsage detector_state) + : timestamp_us(timestamp_us), + bitrate_bps(bitrate_bps), + detector_state(detector_state) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int32_t bitrate_bps; + BandwidthUsage detector_state; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BWE_UPDATE_DELAY_BASED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc index 8453238cf..44524ab03 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc @@ -30,14 +30,6 @@ RtcEventBweUpdateLossBased::RtcEventBweUpdateLossBased( RtcEventBweUpdateLossBased::~RtcEventBweUpdateLossBased() = default; -RtcEvent::Type RtcEventBweUpdateLossBased::GetType() const { - return RtcEvent::Type::BweUpdateLossBased; -} - -bool RtcEventBweUpdateLossBased::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventBweUpdateLossBased::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h index 78829a958..b638f1ac1 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h @@ -21,14 +21,15 @@ namespace webrtc { class RtcEventBweUpdateLossBased final : public RtcEvent { public: + static constexpr Type kType = Type::BweUpdateLossBased; + RtcEventBweUpdateLossBased(int32_t bitrate_bps_, uint8_t fraction_loss_, int32_t total_packets_); ~RtcEventBweUpdateLossBased() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -44,6 +45,26 @@ class RtcEventBweUpdateLossBased final : public RtcEvent { const int32_t total_packets_; }; +struct LoggedBweLossBasedUpdate { + LoggedBweLossBasedUpdate() = default; + LoggedBweLossBasedUpdate(int64_t timestamp_us, + int32_t bitrate_bps, + uint8_t fraction_lost, + int32_t expected_packets) + : timestamp_us(timestamp_us), + bitrate_bps(bitrate_bps), + fraction_lost(fraction_lost), + expected_packets(expected_packets) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int32_t bitrate_bps; + uint8_t fraction_lost; + int32_t expected_packets; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BWE_UPDATE_LOSS_BASED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc index ac8e642da..f00342df7 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc @@ -24,14 +24,6 @@ RtcEventDtlsTransportState::RtcEventDtlsTransportState( RtcEventDtlsTransportState::~RtcEventDtlsTransportState() = default; -RtcEvent::Type RtcEventDtlsTransportState::GetType() const { - return RtcEvent::Type::DtlsTransportState; -} - -bool RtcEventDtlsTransportState::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventDtlsTransportState::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h index 4fbe5a793..af35a3f3b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h @@ -20,11 +20,13 @@ namespace webrtc { class RtcEventDtlsTransportState : public RtcEvent { public: + static constexpr Type kType = Type::DtlsTransportState; + explicit RtcEventDtlsTransportState(DtlsTransportState state); ~RtcEventDtlsTransportState() override; - Type GetType() const override; - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -38,6 +40,14 @@ class RtcEventDtlsTransportState : public RtcEvent { const DtlsTransportState dtls_transport_state_; }; +struct LoggedDtlsTransportState { + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + DtlsTransportState dtls_transport_state; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc index 16c1e7b8e..d4cb093ce 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc @@ -23,14 +23,6 @@ RtcEventDtlsWritableState::RtcEventDtlsWritableState( RtcEventDtlsWritableState::~RtcEventDtlsWritableState() = default; -RtcEvent::Type RtcEventDtlsWritableState::GetType() const { - return RtcEvent::Type::DtlsWritableState; -} - -bool RtcEventDtlsWritableState::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventDtlsWritableState::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h index 06a7f9db7..c3ecce00e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h @@ -19,11 +19,13 @@ namespace webrtc { class RtcEventDtlsWritableState : public RtcEvent { public: + static constexpr Type kType = Type::DtlsWritableState; + explicit RtcEventDtlsWritableState(bool writable); ~RtcEventDtlsWritableState() override; - Type GetType() const override; - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -35,6 +37,17 @@ class RtcEventDtlsWritableState : public RtcEvent { const bool writable_; }; +struct LoggedDtlsWritableState { + LoggedDtlsWritableState() = default; + explicit LoggedDtlsWritableState(bool writable) : writable(writable) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + bool writable; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.cc index 19929e77f..cde412e6c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.cc @@ -36,14 +36,6 @@ RtcEventFrameDecoded::RtcEventFrameDecoded(const RtcEventFrameDecoded& other) codec_(other.codec_), qp_(other.qp_) {} -RtcEvent::Type RtcEventFrameDecoded::GetType() const { - return RtcEvent::Type::FrameDecoded; -} - -bool RtcEventFrameDecoded::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventFrameDecoded::Copy() const { return absl::WrapUnique( new RtcEventFrameDecoded(*this)); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h index ebc0522c9..c549aa883 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h @@ -22,6 +22,8 @@ namespace webrtc { class RtcEventFrameDecoded final : public RtcEvent { public: + static constexpr Type kType = Type::FrameDecoded; + RtcEventFrameDecoded(int64_t render_time_ms, uint32_t ssrc, int width, @@ -30,9 +32,8 @@ class RtcEventFrameDecoded final : public RtcEvent { uint8_t qp); ~RtcEventFrameDecoded() override = default; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -54,6 +55,19 @@ class RtcEventFrameDecoded final : public RtcEvent { const uint8_t qp_; }; +struct LoggedFrameDecoded { + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int64_t render_time_ms; + uint32_t ssrc; + int width; + int height; + VideoCodecType codec; + uint8_t qp; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FRAME_DECODED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc index 0ffe62259..ba18d50ab 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc @@ -23,6 +23,7 @@ RtcEventGenericAckReceived::CreateLogs( const std::vector& acked_packets) { std::vector> result; int64_t time_us = rtc::TimeMicros(); + result.reserve(acked_packets.size()); for (const AckedPacket& packet : acked_packets) { result.emplace_back(new RtcEventGenericAckReceived( time_us, packet_number, packet.packet_number, @@ -51,12 +52,4 @@ RtcEventGenericAckReceived::RtcEventGenericAckReceived( RtcEventGenericAckReceived::~RtcEventGenericAckReceived() = default; -RtcEvent::Type RtcEventGenericAckReceived::GetType() const { - return RtcEvent::Type::GenericAckReceived; -} - -bool RtcEventGenericAckReceived::IsConfigEvent() const { - return false; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h index 689c12461..76e3cc24c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h @@ -30,6 +30,8 @@ struct AckedPacket { class RtcEventGenericAckReceived final : public RtcEvent { public: + static constexpr Type kType = Type::GenericAckReceived; + // For a collection of acked packets, it creates a vector of logs to log with // the same timestamp. static std::vector> CreateLogs( @@ -40,9 +42,8 @@ class RtcEventGenericAckReceived final : public RtcEvent { std::unique_ptr Copy() const; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } // An identifier of the packet which contained an ack. int64_t packet_number() const { return packet_number_; } @@ -74,6 +75,26 @@ class RtcEventGenericAckReceived final : public RtcEvent { const absl::optional receive_acked_packet_time_ms_; }; +struct LoggedGenericAckReceived { + LoggedGenericAckReceived() = default; + LoggedGenericAckReceived(int64_t timestamp_us, + int64_t packet_number, + int64_t acked_packet_number, + absl::optional receive_acked_packet_time_ms) + : timestamp_us(timestamp_us), + packet_number(packet_number), + acked_packet_number(acked_packet_number), + receive_acked_packet_time_ms(receive_acked_packet_time_ms) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int64_t packet_number; + int64_t acked_packet_number; + absl::optional receive_acked_packet_time_ms; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc index 92558b29d..0bdc4dd50 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc @@ -28,12 +28,5 @@ std::unique_ptr RtcEventGenericPacketReceived::Copy() const { return absl::WrapUnique(new RtcEventGenericPacketReceived(*this)); } -RtcEvent::Type RtcEventGenericPacketReceived::GetType() const { - return RtcEvent::Type::GenericPacketReceived; -} - -bool RtcEventGenericPacketReceived::IsConfigEvent() const { - return false; -} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h index 1034826a4..45e5e4cc4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h @@ -19,14 +19,15 @@ namespace webrtc { class RtcEventGenericPacketReceived final : public RtcEvent { public: + static constexpr Type kType = Type::GenericPacketReceived; + RtcEventGenericPacketReceived(int64_t packet_number, size_t packet_length); ~RtcEventGenericPacketReceived() override; std::unique_ptr Copy() const; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } // An identifier of the packet. int64_t packet_number() const { return packet_number_; } @@ -42,6 +43,23 @@ class RtcEventGenericPacketReceived final : public RtcEvent { const size_t packet_length_; }; +struct LoggedGenericPacketReceived { + LoggedGenericPacketReceived() = default; + LoggedGenericPacketReceived(int64_t timestamp_us, + int64_t packet_number, + int packet_length) + : timestamp_us(timestamp_us), + packet_number(packet_number), + packet_length(packet_length) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int64_t packet_number; + int packet_length; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc index ef761d5e9..e8335624b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc @@ -33,12 +33,4 @@ std::unique_ptr RtcEventGenericPacketSent::Copy() return absl::WrapUnique(new RtcEventGenericPacketSent(*this)); } -RtcEvent::Type RtcEventGenericPacketSent::GetType() const { - return RtcEvent::Type::GenericPacketSent; -} - -bool RtcEventGenericPacketSent::IsConfigEvent() const { - return false; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h index d87a54cbe..9ebafbe2e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h @@ -19,6 +19,8 @@ namespace webrtc { class RtcEventGenericPacketSent final : public RtcEvent { public: + static constexpr Type kType = Type::GenericPacketSent; + RtcEventGenericPacketSent(int64_t packet_number, size_t overhead_length, size_t payload_length, @@ -27,9 +29,8 @@ class RtcEventGenericPacketSent final : public RtcEvent { std::unique_ptr Copy() const; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } // An identifier of the packet. int64_t packet_number() const { return packet_number_; } @@ -59,6 +60,31 @@ class RtcEventGenericPacketSent final : public RtcEvent { const size_t padding_length_; }; +struct LoggedGenericPacketSent { + LoggedGenericPacketSent() = default; + LoggedGenericPacketSent(int64_t timestamp_us, + int64_t packet_number, + size_t overhead_length, + size_t payload_length, + size_t padding_length) + : timestamp_us(timestamp_us), + packet_number(packet_number), + overhead_length(overhead_length), + payload_length(payload_length), + padding_length(padding_length) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + size_t packet_length() const { + return payload_length + padding_length + overhead_length; + } + int64_t timestamp_us; + int64_t packet_number; + size_t overhead_length; + size_t payload_length; + size_t padding_length; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc index 225362d9e..2b4b5ba76 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc @@ -31,14 +31,6 @@ RtcEventIceCandidatePair::RtcEventIceCandidatePair( RtcEventIceCandidatePair::~RtcEventIceCandidatePair() = default; -RtcEvent::Type RtcEventIceCandidatePair::GetType() const { - return RtcEvent::Type::IceCandidatePairEvent; -} - -bool RtcEventIceCandidatePair::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventIceCandidatePair::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h index 88b8c8268..717ddf360 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h @@ -29,15 +29,16 @@ enum class IceCandidatePairEventType { class RtcEventIceCandidatePair final : public RtcEvent { public: + static constexpr Type kType = Type::IceCandidatePairEvent; + RtcEventIceCandidatePair(IceCandidatePairEventType type, uint32_t candidate_pair_id, uint32_t transaction_id); ~RtcEventIceCandidatePair() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -53,6 +54,26 @@ class RtcEventIceCandidatePair final : public RtcEvent { const uint32_t transaction_id_; }; +struct LoggedIceCandidatePairEvent { + LoggedIceCandidatePairEvent() = default; + LoggedIceCandidatePairEvent(int64_t timestamp_us, + IceCandidatePairEventType type, + uint32_t candidate_pair_id, + uint32_t transaction_id) + : timestamp_us(timestamp_us), + type(type), + candidate_pair_id(candidate_pair_id), + transaction_id(transaction_id) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + IceCandidatePairEventType type; + uint32_t candidate_pair_id; + uint32_t transaction_id; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ICE_CANDIDATE_PAIR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc index fbb8a73df..eb458c464 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc @@ -54,16 +54,6 @@ RtcEventIceCandidatePairConfig::RtcEventIceCandidatePairConfig( RtcEventIceCandidatePairConfig::~RtcEventIceCandidatePairConfig() = default; -RtcEvent::Type RtcEventIceCandidatePairConfig::GetType() const { - return RtcEvent::Type::IceCandidatePairConfig; -} - -// The ICE candidate pair config event is not equivalent to a RtcEventLog config -// event. -bool RtcEventIceCandidatePairConfig::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventIceCandidatePairConfig::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h index 338942acb..ab2eaf242 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h @@ -83,6 +83,8 @@ class IceCandidatePairDescription { class RtcEventIceCandidatePairConfig final : public RtcEvent { public: + static constexpr Type kType = Type::IceCandidatePairConfig; + RtcEventIceCandidatePairConfig( IceCandidatePairConfigType type, uint32_t candidate_pair_id, @@ -90,9 +92,9 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { ~RtcEventIceCandidatePairConfig() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + // N.B. An ICE config event is not considered an RtcEventLog config event. + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -110,6 +112,22 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { const IceCandidatePairDescription candidate_pair_desc_; }; +struct LoggedIceCandidatePairConfig { + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + IceCandidatePairConfigType type; + uint32_t candidate_pair_id; + IceCandidateType local_candidate_type; + IceCandidatePairProtocol local_relay_protocol; + IceCandidateNetworkType local_network_type; + IceCandidatePairAddressFamily local_address_family; + IceCandidateType remote_candidate_type; + IceCandidatePairAddressFamily remote_address_family; + IceCandidatePairProtocol candidate_pair_protocol; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ICE_CANDIDATE_PAIR_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc index c11a6ce78..c3d9e59b4 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc @@ -31,14 +31,6 @@ RtcEventProbeClusterCreated::RtcEventProbeClusterCreated( min_probes_(other.min_probes_), min_bytes_(other.min_bytes_) {} -RtcEvent::Type RtcEventProbeClusterCreated::GetType() const { - return RtcEvent::Type::ProbeClusterCreated; -} - -bool RtcEventProbeClusterCreated::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventProbeClusterCreated::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h index 7bfe6a252..f3221b91f 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h @@ -21,15 +21,16 @@ namespace webrtc { class RtcEventProbeClusterCreated final : public RtcEvent { public: + static constexpr Type kType = Type::ProbeClusterCreated; + RtcEventProbeClusterCreated(int32_t id, int32_t bitrate_bps, uint32_t min_probes, uint32_t min_bytes); ~RtcEventProbeClusterCreated() override = default; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -47,6 +48,29 @@ class RtcEventProbeClusterCreated final : public RtcEvent { const uint32_t min_bytes_; }; +struct LoggedBweProbeClusterCreatedEvent { + LoggedBweProbeClusterCreatedEvent() = default; + LoggedBweProbeClusterCreatedEvent(int64_t timestamp_us, + int32_t id, + int32_t bitrate_bps, + uint32_t min_packets, + uint32_t min_bytes) + : timestamp_us(timestamp_us), + id(id), + bitrate_bps(bitrate_bps), + min_packets(min_packets), + min_bytes(min_bytes) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int32_t id; + int32_t bitrate_bps; + uint32_t min_packets; + uint32_t min_bytes; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_CLUSTER_CREATED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc index 295003ae8..a79b0c173 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc @@ -25,14 +25,6 @@ RtcEventProbeResultFailure::RtcEventProbeResultFailure( id_(other.id_), failure_reason_(other.failure_reason_) {} -RtcEvent::Type RtcEventProbeResultFailure::GetType() const { - return RtcEvent::Type::ProbeResultFailure; -} - -bool RtcEventProbeResultFailure::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventProbeResultFailure::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h index a493de8ec..868c30b61 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h @@ -28,12 +28,13 @@ enum class ProbeFailureReason { class RtcEventProbeResultFailure final : public RtcEvent { public: + static constexpr Type kType = Type::ProbeResultFailure; + RtcEventProbeResultFailure(int32_t id, ProbeFailureReason failure_reason); ~RtcEventProbeResultFailure() override = default; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -47,6 +48,21 @@ class RtcEventProbeResultFailure final : public RtcEvent { const ProbeFailureReason failure_reason_; }; +struct LoggedBweProbeFailureEvent { + LoggedBweProbeFailureEvent() = default; + LoggedBweProbeFailureEvent(int64_t timestamp_us, + int32_t id, + ProbeFailureReason failure_reason) + : timestamp_us(timestamp_us), id(id), failure_reason(failure_reason) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int32_t id; + ProbeFailureReason failure_reason; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_RESULT_FAILURE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.cc index d5f9e2f78..e7bc7c25d 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.cc @@ -24,14 +24,6 @@ RtcEventProbeResultSuccess::RtcEventProbeResultSuccess( id_(other.id_), bitrate_bps_(other.bitrate_bps_) {} -RtcEvent::Type RtcEventProbeResultSuccess::GetType() const { - return RtcEvent::Type::ProbeResultSuccess; -} - -bool RtcEventProbeResultSuccess::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventProbeResultSuccess::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h index e0aba982a..e3746681f 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h @@ -21,12 +21,13 @@ namespace webrtc { class RtcEventProbeResultSuccess final : public RtcEvent { public: + static constexpr Type kType = Type::ProbeResultSuccess; + RtcEventProbeResultSuccess(int32_t id, int32_t bitrate_bps); ~RtcEventProbeResultSuccess() override = default; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -40,6 +41,21 @@ class RtcEventProbeResultSuccess final : public RtcEvent { const int32_t bitrate_bps_; }; +struct LoggedBweProbeSuccessEvent { + LoggedBweProbeSuccessEvent() = default; + LoggedBweProbeSuccessEvent(int64_t timestamp_us, + int32_t id, + int32_t bitrate_bps) + : timestamp_us(timestamp_us), id(id), bitrate_bps(bitrate_bps) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + int32_t id; + int32_t bitrate_bps; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_RESULT_SUCCESS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h index b7919483c..29b0c4719 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h @@ -20,15 +20,29 @@ namespace webrtc { class RtcEventRemoteEstimate final : public RtcEvent { public: + static constexpr Type kType = Type::RemoteEstimateEvent; + RtcEventRemoteEstimate(DataRate link_capacity_lower, DataRate link_capacity_upper) : link_capacity_lower_(link_capacity_lower), link_capacity_upper_(link_capacity_upper) {} - Type GetType() const override { return RtcEvent::Type::RemoteEstimateEvent; } + + Type GetType() const override { return kType; } bool IsConfigEvent() const override { return false; } const DataRate link_capacity_lower_; const DataRate link_capacity_upper_; }; + +struct LoggedRemoteEstimateEvent { + LoggedRemoteEstimateEvent() = default; + + int64_t log_time_us() const { return timestamp_ms * 1000; } + int64_t log_time_ms() const { return timestamp_ms; } + + int64_t timestamp_ms; + absl::optional link_capacity_lower; + absl::optional link_capacity_upper; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.cc index 1ea63e2dc..71bd78b34 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.cc @@ -24,14 +24,6 @@ RtcEventRouteChange::RtcEventRouteChange(const RtcEventRouteChange& other) RtcEventRouteChange::~RtcEventRouteChange() = default; -RtcEvent::Type RtcEventRouteChange::GetType() const { - return RtcEvent::Type::RouteChangeEvent; -} - -bool RtcEventRouteChange::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventRouteChange::Copy() const { return absl::WrapUnique(new RtcEventRouteChange(*this)); } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h index 09fb31c57..455a83214 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h @@ -19,12 +19,13 @@ namespace webrtc { class RtcEventRouteChange final : public RtcEvent { public: + static constexpr Type kType = Type::RouteChangeEvent; + RtcEventRouteChange(bool connected, uint32_t overhead); ~RtcEventRouteChange() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; @@ -38,5 +39,20 @@ class RtcEventRouteChange final : public RtcEvent { const uint32_t overhead_; }; +struct LoggedRouteChangeEvent { + LoggedRouteChangeEvent() = default; + LoggedRouteChangeEvent(int64_t timestamp_ms, + bool connected, + uint32_t overhead) + : timestamp_ms(timestamp_ms), connected(connected), overhead(overhead) {} + + int64_t log_time_us() const { return timestamp_ms * 1000; } + int64_t log_time_ms() const { return timestamp_ms; } + + int64_t timestamp_ms; + bool connected; + uint32_t overhead; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc index 45a418f1f..0ea700a02 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc @@ -25,14 +25,6 @@ RtcEventRtcpPacketIncoming::RtcEventRtcpPacketIncoming( RtcEventRtcpPacketIncoming::~RtcEventRtcpPacketIncoming() = default; -RtcEvent::Type RtcEventRtcpPacketIncoming::GetType() const { - return RtcEvent::Type::RtcpPacketIncoming; -} - -bool RtcEventRtcpPacketIncoming::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventRtcpPacketIncoming::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h index 8237afea9..1cbac7712 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h @@ -23,12 +23,13 @@ namespace webrtc { class RtcEventRtcpPacketIncoming final : public RtcEvent { public: + static constexpr Type kType = Type::RtcpPacketIncoming; + explicit RtcEventRtcpPacketIncoming(rtc::ArrayView packet); ~RtcEventRtcpPacketIncoming() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc index b583e5614..b6a41ac03 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc @@ -25,14 +25,6 @@ RtcEventRtcpPacketOutgoing::RtcEventRtcpPacketOutgoing( RtcEventRtcpPacketOutgoing::~RtcEventRtcpPacketOutgoing() = default; -RtcEvent::Type RtcEventRtcpPacketOutgoing::GetType() const { - return RtcEvent::Type::RtcpPacketOutgoing; -} - -bool RtcEventRtcpPacketOutgoing::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventRtcpPacketOutgoing::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h index f451c7301..0ecccbeaa 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h @@ -23,12 +23,13 @@ namespace webrtc { class RtcEventRtcpPacketOutgoing final : public RtcEvent { public: + static constexpr Type kType = Type::RtcpPacketOutgoing; + explicit RtcEventRtcpPacketOutgoing(rtc::ArrayView packet); ~RtcEventRtcpPacketOutgoing() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc index 898c0aaf8..4cf33a238 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc @@ -18,33 +18,14 @@ namespace webrtc { RtcEventRtpPacketIncoming::RtcEventRtpPacketIncoming( const RtpPacketReceived& packet) - : payload_length_(packet.payload_size()), - header_length_(packet.headers_size()), - padding_length_(packet.padding_size()) { - header_.CopyHeaderFrom(packet); - RTC_DCHECK_EQ(packet.size(), - payload_length_ + header_length_ + padding_length_); -} + : packet_(packet) {} RtcEventRtpPacketIncoming::RtcEventRtpPacketIncoming( const RtcEventRtpPacketIncoming& other) - : RtcEvent(other.timestamp_us_), - payload_length_(other.payload_length_), - header_length_(other.header_length_), - padding_length_(other.padding_length_) { - header_.CopyHeaderFrom(other.header_); -} + : RtcEvent(other.timestamp_us_), packet_(other.packet_) {} RtcEventRtpPacketIncoming::~RtcEventRtpPacketIncoming() = default; -RtcEvent::Type RtcEventRtpPacketIncoming::GetType() const { - return RtcEvent::Type::RtpPacketIncoming; -} - -bool RtcEventRtpPacketIncoming::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventRtpPacketIncoming::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h index 7e30d6d7c..ee48fa360 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h @@ -11,8 +11,12 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_INCOMING_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_INCOMING_H_ +#include +#include #include +#include +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "modules/rtp_rtcp/source/rtp_packet.h" @@ -22,31 +26,43 @@ class RtpPacketReceived; class RtcEventRtpPacketIncoming final : public RtcEvent { public: + static constexpr Type kType = Type::RtpPacketIncoming; + explicit RtcEventRtpPacketIncoming(const RtpPacketReceived& packet); ~RtcEventRtpPacketIncoming() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; - size_t packet_length() const { - return payload_length_ + header_length_ + padding_length_; + size_t packet_length() const { return packet_.size(); } + + rtc::ArrayView RawHeader() const { + return rtc::MakeArrayView(packet_.data(), header_length()); + } + uint32_t Ssrc() const { return packet_.Ssrc(); } + uint32_t Timestamp() const { return packet_.Timestamp(); } + uint16_t SequenceNumber() const { return packet_.SequenceNumber(); } + uint8_t PayloadType() const { return packet_.PayloadType(); } + bool Marker() const { return packet_.Marker(); } + template + bool GetExtension(Args&&... args) const { + return packet_.GetExtension(std::forward(args)...); + } + template + bool HasExtension() const { + return packet_.HasExtension(); } - const RtpPacket& header() const { return header_; } - size_t payload_length() const { return payload_length_; } - size_t header_length() const { return header_length_; } - size_t padding_length() const { return padding_length_; } + size_t payload_length() const { return packet_.payload_size(); } + size_t header_length() const { return packet_.headers_size(); } + size_t padding_length() const { return packet_.padding_size(); } private: RtcEventRtpPacketIncoming(const RtcEventRtpPacketIncoming& other); - RtpPacket header_; // Only the packet's header will be stored here. - const size_t payload_length_; // Media payload, excluding header and padding. - const size_t header_length_; // RTP header. - const size_t padding_length_; // RTP padding. + const RtpPacket packet_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc index 050474edd..a6a4d9970 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc @@ -19,35 +19,16 @@ namespace webrtc { RtcEventRtpPacketOutgoing::RtcEventRtpPacketOutgoing( const RtpPacketToSend& packet, int probe_cluster_id) - : payload_length_(packet.payload_size()), - header_length_(packet.headers_size()), - padding_length_(packet.padding_size()), - probe_cluster_id_(probe_cluster_id) { - header_.CopyHeaderFrom(packet); - RTC_DCHECK_EQ(packet.size(), - payload_length_ + header_length_ + padding_length_); -} + : packet_(packet), probe_cluster_id_(probe_cluster_id) {} RtcEventRtpPacketOutgoing::RtcEventRtpPacketOutgoing( const RtcEventRtpPacketOutgoing& other) : RtcEvent(other.timestamp_us_), - payload_length_(other.payload_length_), - header_length_(other.header_length_), - padding_length_(other.padding_length_), - probe_cluster_id_(other.probe_cluster_id_) { - header_.CopyHeaderFrom(other.header_); -} + packet_(other.packet_), + probe_cluster_id_(other.probe_cluster_id_) {} RtcEventRtpPacketOutgoing::~RtcEventRtpPacketOutgoing() = default; -RtcEvent::Type RtcEventRtpPacketOutgoing::GetType() const { - return RtcEvent::Type::RtpPacketOutgoing; -} - -bool RtcEventRtpPacketOutgoing::IsConfigEvent() const { - return false; -} - std::unique_ptr RtcEventRtpPacketOutgoing::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h index 9211367bf..9ef5b1afd 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h @@ -11,8 +11,12 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_OUTGOING_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_RTP_PACKET_OUTGOING_H_ +#include +#include #include +#include +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "modules/rtp_rtcp/source/rtp_packet.h" @@ -22,33 +26,45 @@ class RtpPacketToSend; class RtcEventRtpPacketOutgoing final : public RtcEvent { public: + static constexpr Type kType = Type::RtpPacketOutgoing; + RtcEventRtpPacketOutgoing(const RtpPacketToSend& packet, int probe_cluster_id); ~RtcEventRtpPacketOutgoing() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return false; } std::unique_ptr Copy() const; - size_t packet_length() const { - return payload_length_ + header_length_ + padding_length_; + size_t packet_length() const { return packet_.size(); } + + rtc::ArrayView RawHeader() const { + return rtc::MakeArrayView(packet_.data(), header_length()); + } + uint32_t Ssrc() const { return packet_.Ssrc(); } + uint32_t Timestamp() const { return packet_.Timestamp(); } + uint16_t SequenceNumber() const { return packet_.SequenceNumber(); } + uint8_t PayloadType() const { return packet_.PayloadType(); } + bool Marker() const { return packet_.Marker(); } + template + bool GetExtension(Args&&... args) const { + return packet_.GetExtension(std::forward(args)...); + } + template + bool HasExtension() const { + return packet_.HasExtension(); } - const RtpPacket& header() const { return header_; } - size_t payload_length() const { return payload_length_; } - size_t header_length() const { return header_length_; } - size_t padding_length() const { return padding_length_; } + size_t payload_length() const { return packet_.payload_size(); } + size_t header_length() const { return packet_.headers_size(); } + size_t padding_length() const { return packet_.padding_size(); } int probe_cluster_id() const { return probe_cluster_id_; } private: RtcEventRtpPacketOutgoing(const RtcEventRtpPacketOutgoing& other); - RtpPacket header_; // Only the packet's header will be stored here. - const size_t payload_length_; // Media payload, excluding header and padding. - const size_t header_length_; // RTP header. - const size_t padding_length_; // RTP padding. + const RtpPacket packet_; // TODO(eladalon): Delete |probe_cluster_id_| along with legacy encoding. const int probe_cluster_id_; }; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc index 8942f8a64..90ab8185a 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc @@ -30,14 +30,6 @@ RtcEventVideoReceiveStreamConfig::RtcEventVideoReceiveStreamConfig( RtcEventVideoReceiveStreamConfig::~RtcEventVideoReceiveStreamConfig() = default; -RtcEvent::Type RtcEventVideoReceiveStreamConfig::GetType() const { - return Type::VideoReceiveStreamConfig; -} - -bool RtcEventVideoReceiveStreamConfig::IsConfigEvent() const { - return true; -} - std::unique_ptr RtcEventVideoReceiveStreamConfig::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h index fe5099d11..2bf52476a 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h @@ -20,13 +20,14 @@ namespace webrtc { class RtcEventVideoReceiveStreamConfig final : public RtcEvent { public: + static constexpr Type kType = Type::VideoReceiveStreamConfig; + explicit RtcEventVideoReceiveStreamConfig( std::unique_ptr config); ~RtcEventVideoReceiveStreamConfig() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return true; } std::unique_ptr Copy() const; @@ -39,6 +40,18 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedVideoRecvConfig { + LoggedVideoRecvConfig() = default; + LoggedVideoRecvConfig(int64_t timestamp_us, const rtclog::StreamConfig config) + : timestamp_us(timestamp_us), config(config) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + rtclog::StreamConfig config; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc index 2c33466ab..c28a476d0 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc @@ -27,14 +27,6 @@ RtcEventVideoSendStreamConfig::RtcEventVideoSendStreamConfig( RtcEventVideoSendStreamConfig::~RtcEventVideoSendStreamConfig() = default; -RtcEvent::Type RtcEventVideoSendStreamConfig::GetType() const { - return RtcEvent::Type::VideoSendStreamConfig; -} - -bool RtcEventVideoSendStreamConfig::IsConfigEvent() const { - return true; -} - std::unique_ptr RtcEventVideoSendStreamConfig::Copy() const { return absl::WrapUnique( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h index 11dd148b9..cf95afc4d 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h @@ -20,13 +20,14 @@ namespace webrtc { class RtcEventVideoSendStreamConfig final : public RtcEvent { public: + static constexpr Type kType = Type::VideoSendStreamConfig; + explicit RtcEventVideoSendStreamConfig( std::unique_ptr config); ~RtcEventVideoSendStreamConfig() override; - Type GetType() const override; - - bool IsConfigEvent() const override; + Type GetType() const override { return kType; } + bool IsConfigEvent() const override { return true; } std::unique_ptr Copy() const; @@ -38,6 +39,17 @@ class RtcEventVideoSendStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedVideoSendConfig { + LoggedVideoSendConfig() = default; + LoggedVideoSendConfig(int64_t timestamp_us, const rtclog::StreamConfig config) + : timestamp_us(timestamp_us), config(config) {} + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + rtclog::StreamConfig config; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.cc index 55f4b582c..5a44b0069 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.cc @@ -10,32 +10,29 @@ #include "logging/rtc_event_log/fake_rtc_event_log.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" -#include "rtc_base/bind.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" +#include +#include + +#include "api/rtc_event_log/rtc_event_log.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { -FakeRtcEventLog::FakeRtcEventLog(rtc::Thread* thread) : thread_(thread) { - RTC_DCHECK(thread_); -} -FakeRtcEventLog::~FakeRtcEventLog() = default; - bool FakeRtcEventLog::StartLogging(std::unique_ptr output, int64_t output_period_ms) { return true; } -void FakeRtcEventLog::StopLogging() { - invoker_.Flush(thread_); -} +void FakeRtcEventLog::StopLogging() {} void FakeRtcEventLog::Log(std::unique_ptr event) { - RtcEvent::Type rtc_event_type = event->GetType(); - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread_, - rtc::Bind(&FakeRtcEventLog::IncrementEventCount, this, rtc_event_type)); + MutexLock lock(&mu_); + ++count_[event->GetType()]; +} + +int FakeRtcEventLog::GetEventCount(RtcEvent::Type event_type) { + MutexLock lock(&mu_); + return count_[event_type]; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.h index fb0e6ff4d..effa7507f 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log.h @@ -16,26 +16,25 @@ #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "rtc_base/async_invoker.h" -#include "rtc_base/thread.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { class FakeRtcEventLog : public RtcEventLog { public: - explicit FakeRtcEventLog(rtc::Thread* thread); - ~FakeRtcEventLog() override; + FakeRtcEventLog() = default; + ~FakeRtcEventLog() override = default; + bool StartLogging(std::unique_ptr output, int64_t output_period_ms) override; void StopLogging() override; void Log(std::unique_ptr event) override; - int GetEventCount(RtcEvent::Type event_type) { return count_[event_type]; } + int GetEventCount(RtcEvent::Type event_type); private: - void IncrementEventCount(RtcEvent::Type event_type) { ++count_[event_type]; } - std::map count_; - rtc::Thread* thread_; - rtc::AsyncInvoker invoker_; + Mutex mu_; + std::map count_ RTC_GUARDED_BY(mu_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc index f84f74fdb..f663ec5ab 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.cc @@ -10,14 +10,16 @@ #include "logging/rtc_event_log/fake_rtc_event_log_factory.h" +#include + #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/fake_rtc_event_log.h" namespace webrtc { std::unique_ptr FakeRtcEventLogFactory::CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) { - std::unique_ptr fake_event_log(new FakeRtcEventLog(thread())); + RtcEventLog::EncodingType /*encoding_type*/) { + auto fake_event_log = std::make_unique(); last_log_created_ = fake_event_log.get(); return fake_event_log; } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h index 873e50efd..114c3e632 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/fake_rtc_event_log_factory.h @@ -15,24 +15,21 @@ #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "logging/rtc_event_log/fake_rtc_event_log.h" -#include "rtc_base/thread.h" namespace webrtc { class FakeRtcEventLogFactory : public RtcEventLogFactoryInterface { public: - explicit FakeRtcEventLogFactory(rtc::Thread* thread) : thread_(thread) {} - ~FakeRtcEventLogFactory() override {} + FakeRtcEventLogFactory() = default; + ~FakeRtcEventLogFactory() override = default; std::unique_ptr CreateRtcEventLog( RtcEventLog::EncodingType encoding_type) override; - webrtc::RtcEventLog* last_log_created() { return last_log_created_; } - rtc::Thread* thread() { return thread_; } + webrtc::FakeRtcEventLog* last_log_created() { return last_log_created_; } private: - webrtc::RtcEventLog* last_log_created_; - rtc::Thread* thread_; + webrtc::FakeRtcEventLog* last_log_created_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h index 192f7cf81..da7653d39 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h @@ -15,17 +15,9 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" -#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "api/video/video_codec_type.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" -#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" -#include "logging/rtc_event_log/rtc_stream_config.h" -#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" @@ -43,226 +35,6 @@ namespace webrtc { // possible by having a base class (containing e.g. the log time) are not // considered to outweigh the added memory and runtime overhead incurred by // adding a vptr. -struct LoggedAlrStateEvent { - LoggedAlrStateEvent() = default; - LoggedAlrStateEvent(int64_t timestamp_us, bool in_alr) - : timestamp_us(timestamp_us), in_alr(in_alr) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - bool in_alr; -}; - -struct LoggedAudioPlayoutEvent { - LoggedAudioPlayoutEvent() = default; - LoggedAudioPlayoutEvent(int64_t timestamp_us, uint32_t ssrc) - : timestamp_us(timestamp_us), ssrc(ssrc) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - uint32_t ssrc; -}; - -struct LoggedAudioNetworkAdaptationEvent { - LoggedAudioNetworkAdaptationEvent() = default; - LoggedAudioNetworkAdaptationEvent(int64_t timestamp_us, - const AudioEncoderRuntimeConfig& config) - : timestamp_us(timestamp_us), config(config) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - AudioEncoderRuntimeConfig config; -}; - -struct LoggedBweDelayBasedUpdate { - LoggedBweDelayBasedUpdate() = default; - LoggedBweDelayBasedUpdate(int64_t timestamp_us, - int32_t bitrate_bps, - BandwidthUsage detector_state) - : timestamp_us(timestamp_us), - bitrate_bps(bitrate_bps), - detector_state(detector_state) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int32_t bitrate_bps; - BandwidthUsage detector_state; -}; - -struct LoggedBweLossBasedUpdate { - LoggedBweLossBasedUpdate() = default; - LoggedBweLossBasedUpdate(int64_t timestamp_us, - int32_t bitrate_bps, - uint8_t fraction_lost, - int32_t expected_packets) - : timestamp_us(timestamp_us), - bitrate_bps(bitrate_bps), - fraction_lost(fraction_lost), - expected_packets(expected_packets) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int32_t bitrate_bps; - uint8_t fraction_lost; - int32_t expected_packets; -}; - -struct LoggedDtlsTransportState { - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - DtlsTransportState dtls_transport_state; -}; - -struct LoggedDtlsWritableState { - LoggedDtlsWritableState() = default; - explicit LoggedDtlsWritableState(bool writable) : writable(writable) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - bool writable; -}; - -struct LoggedBweProbeClusterCreatedEvent { - LoggedBweProbeClusterCreatedEvent() = default; - LoggedBweProbeClusterCreatedEvent(int64_t timestamp_us, - int32_t id, - int32_t bitrate_bps, - uint32_t min_packets, - uint32_t min_bytes) - : timestamp_us(timestamp_us), - id(id), - bitrate_bps(bitrate_bps), - min_packets(min_packets), - min_bytes(min_bytes) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int32_t id; - int32_t bitrate_bps; - uint32_t min_packets; - uint32_t min_bytes; -}; - -struct LoggedBweProbeSuccessEvent { - LoggedBweProbeSuccessEvent() = default; - LoggedBweProbeSuccessEvent(int64_t timestamp_us, - int32_t id, - int32_t bitrate_bps) - : timestamp_us(timestamp_us), id(id), bitrate_bps(bitrate_bps) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int32_t id; - int32_t bitrate_bps; -}; - -struct LoggedBweProbeFailureEvent { - LoggedBweProbeFailureEvent() = default; - LoggedBweProbeFailureEvent(int64_t timestamp_us, - int32_t id, - ProbeFailureReason failure_reason) - : timestamp_us(timestamp_us), id(id), failure_reason(failure_reason) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int32_t id; - ProbeFailureReason failure_reason; -}; - -struct LoggedFrameDecoded { - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int64_t render_time_ms; - uint32_t ssrc; - int width; - int height; - VideoCodecType codec; - uint8_t qp; -}; - -struct LoggedIceCandidatePairConfig { - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - IceCandidatePairConfigType type; - uint32_t candidate_pair_id; - IceCandidateType local_candidate_type; - IceCandidatePairProtocol local_relay_protocol; - IceCandidateNetworkType local_network_type; - IceCandidatePairAddressFamily local_address_family; - IceCandidateType remote_candidate_type; - IceCandidatePairAddressFamily remote_address_family; - IceCandidatePairProtocol candidate_pair_protocol; -}; - -struct LoggedIceCandidatePairEvent { - LoggedIceCandidatePairEvent() = default; - LoggedIceCandidatePairEvent(int64_t timestamp_us, - IceCandidatePairEventType type, - uint32_t candidate_pair_id, - uint32_t transaction_id) - : timestamp_us(timestamp_us), - type(type), - candidate_pair_id(candidate_pair_id), - transaction_id(transaction_id) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - IceCandidatePairEventType type; - uint32_t candidate_pair_id; - uint32_t transaction_id; -}; - -struct LoggedRouteChangeEvent { - LoggedRouteChangeEvent() = default; - LoggedRouteChangeEvent(int64_t timestamp_ms, - bool connected, - uint32_t overhead) - : timestamp_ms(timestamp_ms), connected(connected), overhead(overhead) {} - - int64_t log_time_us() const { return timestamp_ms * 1000; } - int64_t log_time_ms() const { return timestamp_ms; } - - int64_t timestamp_ms; - bool connected; - uint32_t overhead; -}; - -struct LoggedRemoteEstimateEvent { - LoggedRemoteEstimateEvent() = default; - - int64_t log_time_us() const { return timestamp_ms * 1000; } - int64_t log_time_ms() const { return timestamp_ms; } - - int64_t timestamp_ms; - absl::optional link_capacity_lower; - absl::optional link_capacity_upper; -}; struct LoggedRtpPacket { LoggedRtpPacket(int64_t timestamp_us, @@ -457,6 +229,16 @@ struct LoggedRtcpPacketLossNotification { rtcp::LossNotification loss_notification; }; +struct LoggedRtcpPacketBye { + LoggedRtcpPacketBye() = default; + + int64_t log_time_us() const { return timestamp_us; } + int64_t log_time_ms() const { return timestamp_us / 1000; } + + int64_t timestamp_us; + rtcp::Bye bye; +}; + struct LoggedStartEvent { explicit LoggedStartEvent(int64_t timestamp_us) : LoggedStartEvent(timestamp_us, timestamp_us / 1000) {} @@ -480,54 +262,6 @@ struct LoggedStopEvent { int64_t timestamp_us; }; -struct LoggedAudioRecvConfig { - LoggedAudioRecvConfig() = default; - LoggedAudioRecvConfig(int64_t timestamp_us, const rtclog::StreamConfig config) - : timestamp_us(timestamp_us), config(config) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - rtclog::StreamConfig config; -}; - -struct LoggedAudioSendConfig { - LoggedAudioSendConfig() = default; - LoggedAudioSendConfig(int64_t timestamp_us, const rtclog::StreamConfig config) - : timestamp_us(timestamp_us), config(config) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - rtclog::StreamConfig config; -}; - -struct LoggedVideoRecvConfig { - LoggedVideoRecvConfig() = default; - LoggedVideoRecvConfig(int64_t timestamp_us, const rtclog::StreamConfig config) - : timestamp_us(timestamp_us), config(config) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - rtclog::StreamConfig config; -}; - -struct LoggedVideoSendConfig { - LoggedVideoSendConfig() = default; - LoggedVideoSendConfig(int64_t timestamp_us, const rtclog::StreamConfig config) - : timestamp_us(timestamp_us), config(config) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - rtclog::StreamConfig config; -}; - struct InferredRouteChangeEvent { int64_t log_time_ms() const { return log_time.ms(); } int64_t log_time_us() const { return log_time.us(); } @@ -602,68 +336,9 @@ struct LoggedIceEvent { LoggedIceEventType event_type; }; -struct LoggedGenericPacketSent { - LoggedGenericPacketSent() = default; - LoggedGenericPacketSent(int64_t timestamp_us, - int64_t packet_number, - size_t overhead_length, - size_t payload_length, - size_t padding_length) - : timestamp_us(timestamp_us), - packet_number(packet_number), - overhead_length(overhead_length), - payload_length(payload_length), - padding_length(padding_length) {} - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - size_t packet_length() const { - return payload_length + padding_length + overhead_length; - } - int64_t timestamp_us; - int64_t packet_number; - size_t overhead_length; - size_t payload_length; - size_t padding_length; -}; -struct LoggedGenericPacketReceived { - LoggedGenericPacketReceived() = default; - LoggedGenericPacketReceived(int64_t timestamp_us, - int64_t packet_number, - int packet_length) - : timestamp_us(timestamp_us), - packet_number(packet_number), - packet_length(packet_length) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int64_t packet_number; - int packet_length; -}; - -struct LoggedGenericAckReceived { - LoggedGenericAckReceived() = default; - LoggedGenericAckReceived(int64_t timestamp_us, - int64_t packet_number, - int64_t acked_packet_number, - absl::optional receive_acked_packet_time_ms) - : timestamp_us(timestamp_us), - packet_number(packet_number), - acked_packet_number(acked_packet_number), - receive_acked_packet_time_ms(receive_acked_packet_time_ms) {} - - int64_t log_time_us() const { return timestamp_us; } - int64_t log_time_ms() const { return timestamp_us / 1000; } - - int64_t timestamp_us; - int64_t packet_number; - int64_t acked_packet_number; - absl::optional receive_acked_packet_time_ms; -}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h index 9c7aae669..0b6a71b24 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.h @@ -21,9 +21,10 @@ #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log_output.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" @@ -78,7 +79,7 @@ class RtcEventLogImpl final : public RtcEventLog { int64_t last_output_ms_ RTC_GUARDED_BY(*task_queue_); bool output_scheduled_ RTC_GUARDED_BY(*task_queue_); - SequenceChecker logging_state_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker logging_state_checker_; bool logging_state_started_ RTC_GUARDED_BY(logging_state_checker_); // Since we are posting tasks bound to |this|, it is critical that the event diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc index 24d5962aa..99d3a0f72 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc @@ -14,25 +14,23 @@ #include #include -#include -#include // no-presubmit-check TODO(webrtc:8982) #include #include #include #include "absl/memory/memory.h" #include "absl/types/optional.h" +#include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "logging/rtc_event_log/encoder/blob_encoding.h" #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" +#include "logging/rtc_event_log/encoder/var_int.h" #include "logging/rtc_event_log/rtc_event_processor.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" -#include "modules/include/module_common_types.h" #include "modules/include/module_common_types_public.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" @@ -43,6 +41,7 @@ #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/protobuf_utils.h" +#include "rtc_base/system/file_wrapper.h" // These macros were added to convert existing code using RTC_CHECKs // to returning a Status object instead. Macros are necessary (over @@ -99,6 +98,8 @@ using webrtc_event_logging::ToUnsigned; namespace webrtc { namespace { +constexpr int64_t kMaxLogSize = 250000000; + constexpr size_t kIpv4Overhead = 20; constexpr size_t kIpv6Overhead = 40; constexpr size_t kUdpOverhead = 8; @@ -316,33 +317,6 @@ VideoCodecType GetRuntimeCodecType(rtclog2::FrameDecodedEvents::Codec codec) { return VideoCodecType::kVideoCodecMultiplex; } -// Reads a VarInt from |stream| and returns it. Also writes the read bytes to -// |buffer| starting |bytes_written| bytes into the buffer. |bytes_written| is -// incremented for each written byte. -ParsedRtcEventLog::ParseStatusOr ParseVarInt( - std::istream& stream, // no-presubmit-check TODO(webrtc:8982) - char* buffer, - size_t* bytes_written) { - uint64_t varint = 0; - for (size_t bytes_read = 0; bytes_read < 10; ++bytes_read) { - // The most significant bit of each byte is 0 if it is the last byte in - // the varint and 1 otherwise. Thus, we take the 7 least significant bits - // of each byte and shift them 7 bits for each byte read previously to get - // the (unsigned) integer. - int byte = stream.get(); - RTC_PARSE_CHECK_OR_RETURN(!stream.eof()); - RTC_DCHECK_GE(byte, 0); - RTC_DCHECK_LE(byte, 255); - varint |= static_cast(byte & 0x7F) << (7 * bytes_read); - buffer[*bytes_written] = byte; - *bytes_written += 1; - if ((byte & 0x80) == 0) { - return varint; - } - } - RTC_PARSE_CHECK_OR_RETURN(false); -} - ParsedRtcEventLog::ParseStatus GetHeaderExtensions( std::vector* header_extensions, const RepeatedPtrField& @@ -697,6 +671,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks( std::vector* nack_list, std::vector* fir_list, std::vector* pli_list, + std::vector* bye_list, std::vector* transport_feedback_list, std::vector* loss_notification_list) { rtcp::CommonHeader header; @@ -741,7 +716,13 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks( if (parsed_block.pli.Parse(header)) { pli_list->push_back(std::move(parsed_block)); } - } else if (header.type() == rtcp::Remb::kPacketType && + } else if (header.type() == rtcp::Bye::kPacketType) { + LoggedRtcpPacketBye parsed_block; + parsed_block.timestamp_us = timestamp_us; + if (parsed_block.bye.Parse(header)) { + bye_list->push_back(std::move(parsed_block)); + } + } else if (header.type() == rtcp::Psfb::kPacketType && header.fmt() == rtcp::Psfb::kAfbMessageType) { bool type_found = false; if (!type_found) { @@ -1105,27 +1086,39 @@ void ParsedRtcEventLog::Clear() { ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseFile( const std::string& filename) { - std::ifstream file( // no-presubmit-check TODO(webrtc:8982) - filename, std::ios_base::in | std::ios_base::binary); - if (!file.good() || !file.is_open()) { - RTC_LOG(LS_WARNING) << "Could not open file for reading."; - RTC_PARSE_CHECK_OR_RETURN(file.good() && file.is_open()); + FileWrapper file = FileWrapper::OpenReadOnly(filename); + if (!file.is_open()) { + RTC_LOG(LS_WARNING) << "Could not open file " << filename + << " for reading."; + RTC_PARSE_CHECK_OR_RETURN(file.is_open()); } - return ParseStream(file); + // Compute file size. + long signed_filesize = file.FileSize(); // NOLINT(runtime/int) + RTC_PARSE_CHECK_OR_RETURN_GE(signed_filesize, 0); + RTC_PARSE_CHECK_OR_RETURN_LE(signed_filesize, kMaxLogSize); + size_t filesize = rtc::checked_cast(signed_filesize); + + // Read file into memory. + std::string buffer(filesize, '\0'); + size_t bytes_read = file.Read(&buffer[0], buffer.size()); + if (bytes_read != filesize) { + RTC_LOG(LS_WARNING) << "Failed to read file " << filename; + RTC_PARSE_CHECK_OR_RETURN_EQ(bytes_read, filesize); + } + + return ParseStream(buffer); } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseString( const std::string& s) { - std::istringstream stream( // no-presubmit-check TODO(webrtc:8982) - s, std::ios_base::in | std::ios_base::binary); - return ParseStream(stream); + return ParseStream(s); } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( - std::istream& stream) { // no-presubmit-check TODO(webrtc:8982) + const std::string& s) { Clear(); - ParseStatus status = ParseStreamInternal(stream); + ParseStatus status = ParseStreamInternal(s); // Cache the configured SSRCs. for (const auto& video_recv_config : video_recv_configs()) { @@ -1185,7 +1178,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( auto status = StoreRtcpBlocks( timestamp_us, packet_begin, packet_end, &incoming_sr_, &incoming_rr_, &incoming_xr_, &incoming_remb_, &incoming_nack_, &incoming_fir_, - &incoming_pli_, &incoming_transport_feedback_, + &incoming_pli_, &incoming_bye_, &incoming_transport_feedback_, &incoming_loss_notification_); RTC_RETURN_IF_ERROR(status); } @@ -1197,7 +1190,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( auto status = StoreRtcpBlocks( timestamp_us, packet_begin, packet_end, &outgoing_sr_, &outgoing_rr_, &outgoing_xr_, &outgoing_remb_, &outgoing_nack_, &outgoing_fir_, - &outgoing_pli_, &outgoing_transport_feedback_, + &outgoing_pli_, &outgoing_bye_, &outgoing_transport_feedback_, &outgoing_loss_notification_); RTC_RETURN_IF_ERROR(status); } @@ -1276,17 +1269,12 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( - std::istream& stream) { // no-presubmit-check TODO(webrtc:8982) + absl::string_view s) { constexpr uint64_t kMaxEventSize = 10000000; // Sanity check. - std::vector buffer(0xFFFF); - RTC_DCHECK(stream.good()); - while (1) { - // Check whether we have reached end of file. - stream.peek(); - if (stream.eof()) { - break; - } + while (!s.empty()) { + absl::string_view event_start = s; + bool success = false; // Read the next message tag. Protobuf defines the message tag as // (field_number << 3) | wire_type. In the legacy encoding, the field number @@ -1294,18 +1282,18 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( // In the new encoding we still expect the wire type to be 2, but the field // number will be greater than 1. constexpr uint64_t kExpectedV1Tag = (1 << 3) | 2; - size_t bytes_written = 0; - ParsedRtcEventLog::ParseStatusOr tag = - ParseVarInt(stream, buffer.data(), &bytes_written); - if (!tag.ok()) { + uint64_t tag = 0; + std::tie(success, s) = DecodeVarInt(s, &tag); + if (!success) { RTC_LOG(LS_WARNING) - << "Missing field tag from beginning of protobuf event."; + << "Failed to read field tag from beginning of protobuf event."; RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, kIncompleteLogError); - return tag.status(); + return ParseStatus::Error("Failed to read field tag varint", __FILE__, + __LINE__); } constexpr uint64_t kWireTypeMask = 0x07; - const uint64_t wire_type = tag.value() & kWireTypeMask; + const uint64_t wire_type = tag & kWireTypeMask; if (wire_type != 2) { RTC_LOG(LS_WARNING) << "Expected field tag with wire type 2 (length " "delimited message). Found wire type " @@ -1316,36 +1304,32 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( } // Read the length field. - ParsedRtcEventLog::ParseStatusOr message_length = - ParseVarInt(stream, buffer.data(), &bytes_written); - if (!message_length.ok()) { + uint64_t message_length = 0; + std::tie(success, s) = DecodeVarInt(s, &message_length); + if (!success) { RTC_LOG(LS_WARNING) << "Missing message length after protobuf field tag."; RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, kIncompleteLogError); - return message_length.status(); - } else if (message_length.value() > kMaxEventSize) { + return ParseStatus::Error("Failed to read message length varint", + __FILE__, __LINE__); + } + + if (message_length > s.size()) { RTC_LOG(LS_WARNING) << "Protobuf message length is too large."; RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, kIncompleteLogError); - RTC_PARSE_CHECK_OR_RETURN_LE(message_length.value(), kMaxEventSize); + RTC_PARSE_CHECK_OR_RETURN_LE(message_length, kMaxEventSize); } - // Read the next protobuf event to a temporary char buffer. - if (buffer.size() < bytes_written + message_length.value()) - buffer.resize(bytes_written + message_length.value()); - stream.read(buffer.data() + bytes_written, message_length.value()); - if (stream.gcount() != static_cast(message_length.value())) { - RTC_LOG(LS_WARNING) << "Failed to read protobuf message."; - RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, - kIncompleteLogError); - RTC_PARSE_CHECK_OR_RETURN(false); - } - size_t buffer_size = bytes_written + message_length.value(); + // Skip forward to the start of the next event. + s = s.substr(message_length); + size_t total_event_size = event_start.size() - s.size(); + RTC_CHECK_LE(total_event_size, event_start.size()); - if (tag.value() == kExpectedV1Tag) { + if (tag == kExpectedV1Tag) { // Parse the protobuf event from the buffer. rtclog::EventStream event_stream; - if (!event_stream.ParseFromArray(buffer.data(), buffer_size)) { + if (!event_stream.ParseFromArray(event_start.data(), total_event_size)) { RTC_LOG(LS_WARNING) << "Failed to parse legacy-format protobuf message."; RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, @@ -1359,7 +1343,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( } else { // Parse the protobuf event from the buffer. rtclog2::EventStream event_stream; - if (!event_stream.ParseFromArray(buffer.data(), buffer_size)) { + if (!event_stream.ParseFromArray(event_start.data(), total_event_size)) { RTC_LOG(LS_WARNING) << "Failed to parse new-format protobuf message."; RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, kIncompleteLogError); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h index dce075aff..67e1a09ff 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h @@ -14,14 +14,36 @@ #include #include #include -#include // no-presubmit-check TODO(webrtc:8982) #include #include // pair #include +#include "absl/base/attributes.h" #include "api/rtc_event_log/rtc_event_log.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/logged_events.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" @@ -274,7 +296,7 @@ class ParsedRtcEventLog { return error_ + " failed at " + file_ + " line " + std::to_string(line_); } - RTC_DEPRECATED operator bool() const { return ok(); } + ABSL_DEPRECATED("Use ok() instead") operator bool() const { return ok(); } private: ParseStatus() : error_(), file_(), line_(0) {} @@ -366,9 +388,8 @@ class ParsedRtcEventLog { // Reads an RtcEventLog from a string and returns success if successful. ParseStatus ParseString(const std::string& s); - // Reads an RtcEventLog from an istream and returns success if successful. - ParseStatus ParseStream( - std::istream& stream); // no-presubmit-check TODO(webrtc:8982) + // Reads an RtcEventLog from an string and returns success if successful. + ParseStatus ParseStream(const std::string& s); MediaType GetMediaType(uint32_t ssrc, PacketDirection direction) const; @@ -581,6 +602,15 @@ class ParsedRtcEventLog { } } + const std::vector& byes( + PacketDirection direction) const { + if (direction == kIncomingPacket) { + return incoming_bye_; + } else { + return outgoing_bye_; + } + } + const std::vector& transport_feedbacks( PacketDirection direction) const { if (direction == kIncomingPacket) { @@ -635,8 +665,7 @@ class ParsedRtcEventLog { std::vector GetRouteChanges() const; private: - ABSL_MUST_USE_RESULT ParseStatus ParseStreamInternal( - std::istream& stream); // no-presubmit-check TODO(webrtc:8982) + ABSL_MUST_USE_RESULT ParseStatus ParseStreamInternal(absl::string_view s); ABSL_MUST_USE_RESULT ParseStatus StoreParsedLegacyEvent(const rtclog::Event& event); @@ -827,6 +856,8 @@ class ParsedRtcEventLog { std::vector outgoing_fir_; std::vector incoming_pli_; std::vector outgoing_pli_; + std::vector incoming_bye_; + std::vector outgoing_bye_; std::vector incoming_transport_feedback_; std::vector outgoing_transport_feedback_; std::vector incoming_loss_notification_; diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc deleted file mode 100644 index 1c13cc639..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.cc +++ /dev/null @@ -1,1313 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "logging/rtc_event_log/rtc_event_log_unittest_helper.h" - -#include // memcmp - -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtp_headers.h" -#include "api/rtp_parameters.h" -#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "modules/rtp_rtcp/include/rtp_cvo.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" -#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" -#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/time_utils.h" -#include "system_wrappers/include/ntp_time.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace test { - -namespace { - -struct ExtensionPair { - RTPExtensionType type; - const char* name; -}; - -constexpr int kMaxCsrcs = 3; - -// Maximum serialized size of a header extension, including 1 byte ID. -constexpr int kMaxExtensionSizeBytes = 4; -constexpr int kMaxNumExtensions = 5; - -constexpr ExtensionPair kExtensions[kMaxNumExtensions] = { - {RTPExtensionType::kRtpExtensionTransmissionTimeOffset, - RtpExtension::kTimestampOffsetUri}, - {RTPExtensionType::kRtpExtensionAbsoluteSendTime, - RtpExtension::kAbsSendTimeUri}, - {RTPExtensionType::kRtpExtensionTransportSequenceNumber, - RtpExtension::kTransportSequenceNumberUri}, - {RTPExtensionType::kRtpExtensionAudioLevel, RtpExtension::kAudioLevelUri}, - {RTPExtensionType::kRtpExtensionVideoRotation, - RtpExtension::kVideoRotationUri}}; - -template -void ShuffleInPlace(Random* prng, rtc::ArrayView array) { - RTC_DCHECK_LE(array.size(), std::numeric_limits::max()); - for (uint32_t i = 0; i + 1 < array.size(); i++) { - uint32_t other = prng->Rand(i, static_cast(array.size() - 1)); - std::swap(array[i], array[other]); - } -} - -absl::optional GetExtensionId(const std::vector& extensions, - const std::string& uri) { - for (const auto& extension : extensions) { - if (extension.uri == uri) - return extension.id; - } - return absl::nullopt; -} - -} // namespace - -std::unique_ptr EventGenerator::NewAlrState() { - return std::make_unique(prng_.Rand()); -} - -std::unique_ptr EventGenerator::NewAudioPlayout( - uint32_t ssrc) { - return std::make_unique(ssrc); -} - -std::unique_ptr -EventGenerator::NewAudioNetworkAdaptation() { - std::unique_ptr config = - std::make_unique(); - - config->bitrate_bps = prng_.Rand(0, 3000000); - config->enable_fec = prng_.Rand(); - config->enable_dtx = prng_.Rand(); - config->frame_length_ms = prng_.Rand(10, 120); - config->num_channels = prng_.Rand(1, 2); - config->uplink_packet_loss_fraction = prng_.Rand(); - - return std::make_unique(std::move(config)); -} - -std::unique_ptr -EventGenerator::NewBweUpdateDelayBased() { - constexpr int32_t kMaxBweBps = 20000000; - int32_t bitrate_bps = prng_.Rand(0, kMaxBweBps); - BandwidthUsage state = static_cast( - prng_.Rand(static_cast(BandwidthUsage::kLast) - 1)); - return std::make_unique(bitrate_bps, state); -} - -std::unique_ptr -EventGenerator::NewBweUpdateLossBased() { - constexpr int32_t kMaxBweBps = 20000000; - constexpr int32_t kMaxPackets = 1000; - int32_t bitrate_bps = prng_.Rand(0, kMaxBweBps); - uint8_t fraction_lost = prng_.Rand(); - int32_t total_packets = prng_.Rand(1, kMaxPackets); - - return std::make_unique( - bitrate_bps, fraction_lost, total_packets); -} - -std::unique_ptr -EventGenerator::NewDtlsTransportState() { - DtlsTransportState state = static_cast( - prng_.Rand(static_cast(DtlsTransportState::kNumValues) - 1)); - - return std::make_unique(state); -} - -std::unique_ptr -EventGenerator::NewDtlsWritableState() { - bool writable = prng_.Rand(); - return std::make_unique(writable); -} - -std::unique_ptr EventGenerator::NewFrameDecodedEvent( - uint32_t ssrc) { - constexpr int kMinRenderDelayMs = 1; - constexpr int kMaxRenderDelayMs = 2000000; - constexpr int kMaxWidth = 15360; - constexpr int kMaxHeight = 8640; - constexpr int kMinWidth = 16; - constexpr int kMinHeight = 16; - constexpr int kNumCodecTypes = 5; - - constexpr VideoCodecType kCodecList[kNumCodecTypes] = { - kVideoCodecGeneric, kVideoCodecVP8, kVideoCodecVP9, kVideoCodecAV1, - kVideoCodecH264}; - const int64_t render_time_ms = - rtc::TimeMillis() + prng_.Rand(kMinRenderDelayMs, kMaxRenderDelayMs); - const int width = prng_.Rand(kMinWidth, kMaxWidth); - const int height = prng_.Rand(kMinHeight, kMaxHeight); - const VideoCodecType codec = kCodecList[prng_.Rand(0, kNumCodecTypes - 1)]; - const uint8_t qp = prng_.Rand(); - return std::make_unique(render_time_ms, ssrc, width, - height, codec, qp); -} - -std::unique_ptr -EventGenerator::NewProbeClusterCreated() { - constexpr int kMaxBweBps = 20000000; - constexpr int kMaxNumProbes = 10000; - int id = prng_.Rand(1, kMaxNumProbes); - int bitrate_bps = prng_.Rand(0, kMaxBweBps); - int min_probes = prng_.Rand(5, 50); - int min_bytes = prng_.Rand(500, 50000); - - return std::make_unique(id, bitrate_bps, - min_probes, min_bytes); -} - -std::unique_ptr -EventGenerator::NewProbeResultFailure() { - constexpr int kMaxNumProbes = 10000; - int id = prng_.Rand(1, kMaxNumProbes); - ProbeFailureReason reason = static_cast( - prng_.Rand(static_cast(ProbeFailureReason::kLast) - 1)); - - return std::make_unique(id, reason); -} - -std::unique_ptr -EventGenerator::NewProbeResultSuccess() { - constexpr int kMaxBweBps = 20000000; - constexpr int kMaxNumProbes = 10000; - int id = prng_.Rand(1, kMaxNumProbes); - int bitrate_bps = prng_.Rand(0, kMaxBweBps); - - return std::make_unique(id, bitrate_bps); -} - -std::unique_ptr -EventGenerator::NewIceCandidatePairConfig() { - IceCandidateType local_candidate_type = static_cast( - prng_.Rand(static_cast(IceCandidateType::kNumValues) - 1)); - IceCandidateNetworkType local_network_type = - static_cast(prng_.Rand( - static_cast(IceCandidateNetworkType::kNumValues) - 1)); - IceCandidatePairAddressFamily local_address_family = - static_cast(prng_.Rand( - static_cast(IceCandidatePairAddressFamily::kNumValues) - - 1)); - IceCandidateType remote_candidate_type = static_cast( - prng_.Rand(static_cast(IceCandidateType::kNumValues) - 1)); - IceCandidatePairAddressFamily remote_address_family = - static_cast(prng_.Rand( - static_cast(IceCandidatePairAddressFamily::kNumValues) - - 1)); - IceCandidatePairProtocol protocol_type = - static_cast(prng_.Rand( - static_cast(IceCandidatePairProtocol::kNumValues) - 1)); - - IceCandidatePairDescription desc; - desc.local_candidate_type = local_candidate_type; - desc.local_relay_protocol = protocol_type; - desc.local_network_type = local_network_type; - desc.local_address_family = local_address_family; - desc.remote_candidate_type = remote_candidate_type; - desc.remote_address_family = remote_address_family; - desc.candidate_pair_protocol = protocol_type; - - IceCandidatePairConfigType type = - static_cast(prng_.Rand( - static_cast(IceCandidatePairConfigType::kNumValues) - 1)); - uint32_t pair_id = prng_.Rand(); - return std::make_unique(type, pair_id, desc); -} - -std::unique_ptr -EventGenerator::NewIceCandidatePair() { - IceCandidatePairEventType type = - static_cast(prng_.Rand( - static_cast(IceCandidatePairEventType::kNumValues) - 1)); - uint32_t pair_id = prng_.Rand(); - uint32_t transaction_id = prng_.Rand(); - - return std::make_unique(type, pair_id, - transaction_id); -} - -rtcp::ReportBlock EventGenerator::NewReportBlock() { - rtcp::ReportBlock report_block; - report_block.SetMediaSsrc(prng_.Rand()); - report_block.SetFractionLost(prng_.Rand()); - // cumulative_lost is a 3-byte signed value. - RTC_DCHECK(report_block.SetCumulativeLost( - prng_.Rand(-(1 << 23) + 1, (1 << 23) - 1))); - report_block.SetExtHighestSeqNum(prng_.Rand()); - report_block.SetJitter(prng_.Rand()); - report_block.SetLastSr(prng_.Rand()); - report_block.SetDelayLastSr(prng_.Rand()); - return report_block; -} - -rtcp::SenderReport EventGenerator::NewSenderReport() { - rtcp::SenderReport sender_report; - sender_report.SetSenderSsrc(prng_.Rand()); - sender_report.SetNtp(NtpTime(prng_.Rand(), prng_.Rand())); - sender_report.SetRtpTimestamp(prng_.Rand()); - sender_report.SetPacketCount(prng_.Rand()); - sender_report.SetOctetCount(prng_.Rand()); - sender_report.AddReportBlock(NewReportBlock()); - return sender_report; -} - -rtcp::ReceiverReport EventGenerator::NewReceiverReport() { - rtcp::ReceiverReport receiver_report; - receiver_report.SetSenderSsrc(prng_.Rand()); - receiver_report.AddReportBlock(NewReportBlock()); - return receiver_report; -} - -rtcp::ExtendedReports EventGenerator::NewExtendedReports() { - rtcp::ExtendedReports extended_report; - extended_report.SetSenderSsrc(prng_.Rand()); - - rtcp::Rrtr rrtr; - rrtr.SetNtp(NtpTime(prng_.Rand(), prng_.Rand())); - extended_report.SetRrtr(rrtr); - - rtcp::ReceiveTimeInfo time_info( - prng_.Rand(), prng_.Rand(), prng_.Rand()); - extended_report.AddDlrrItem(time_info); - - rtcp::TargetBitrate target_bitrate; - target_bitrate.AddTargetBitrate(/*spatial layer*/ prng_.Rand(0, 3), - /*temporal layer*/ prng_.Rand(0, 3), - /*bitrate kbps*/ prng_.Rand(0, 50000)); - target_bitrate.AddTargetBitrate(/*spatial layer*/ prng_.Rand(4, 7), - /*temporal layer*/ prng_.Rand(4, 7), - /*bitrate kbps*/ prng_.Rand(0, 50000)); - extended_report.SetTargetBitrate(target_bitrate); - return extended_report; -} - -rtcp::Nack EventGenerator::NewNack() { - rtcp::Nack nack; - uint16_t base_seq_no = prng_.Rand(); - std::vector nack_list; - nack_list.push_back(base_seq_no); - for (uint16_t i = 1u; i < 10u; i++) { - if (prng_.Rand()) - nack_list.push_back(base_seq_no + i); - } - nack.SetPacketIds(nack_list); - return nack; -} - -rtcp::Fir EventGenerator::NewFir() { - rtcp::Fir fir; - fir.SetSenderSsrc(prng_.Rand()); - fir.AddRequestTo(/*ssrc*/ prng_.Rand(), - /*seq num*/ prng_.Rand()); - fir.AddRequestTo(/*ssrc*/ prng_.Rand(), - /*seq num*/ prng_.Rand()); - return fir; -} - -rtcp::Pli EventGenerator::NewPli() { - rtcp::Pli pli; - pli.SetSenderSsrc(prng_.Rand()); - pli.SetMediaSsrc(prng_.Rand()); - return pli; -} - -rtcp::TransportFeedback EventGenerator::NewTransportFeedback() { - rtcp::TransportFeedback transport_feedback; - uint16_t base_seq_no = prng_.Rand(); - int64_t base_time_us = prng_.Rand(); - transport_feedback.SetBase(base_seq_no, base_time_us); - transport_feedback.AddReceivedPacket(base_seq_no, base_time_us); - int64_t time_us = base_time_us; - for (uint16_t i = 1u; i < 10u; i++) { - time_us += prng_.Rand(0, 100000); - if (prng_.Rand()) { - transport_feedback.AddReceivedPacket(base_seq_no + i, time_us); - } - } - return transport_feedback; -} - -rtcp::Remb EventGenerator::NewRemb() { - rtcp::Remb remb; - // The remb bitrate is transported as a 16-bit mantissa and an 8-bit exponent. - uint64_t bitrate_bps = prng_.Rand(0, (1 << 16) - 1) << prng_.Rand(7); - std::vector ssrcs{prng_.Rand(), prng_.Rand()}; - remb.SetSsrcs(ssrcs); - remb.SetBitrateBps(bitrate_bps); - return remb; -} - -rtcp::LossNotification EventGenerator::NewLossNotification() { - rtcp::LossNotification loss_notification; - const uint16_t last_decoded = prng_.Rand(); - const uint16_t last_received = - last_decoded + (prng_.Rand() & 0x7fff); - const bool decodability_flag = prng_.Rand(); - EXPECT_TRUE( - loss_notification.Set(last_decoded, last_received, decodability_flag)); - return loss_notification; -} - -std::unique_ptr EventGenerator::NewRouteChange() { - return std::make_unique(prng_.Rand(), - prng_.Rand(0, 128)); -} - -std::unique_ptr EventGenerator::NewRemoteEstimate() { - return std::make_unique( - DataRate::KilobitsPerSec(prng_.Rand(0, 100000)), - DataRate::KilobitsPerSec(prng_.Rand(0, 100000))); -} - -std::unique_ptr -EventGenerator::NewRtcpPacketIncoming() { - enum class SupportedRtcpTypes { - kSenderReport = 0, - kReceiverReport, - kExtendedReports, - kFir, - kPli, - kNack, - kRemb, - kTransportFeedback, - kNumValues - }; - SupportedRtcpTypes type = static_cast( - prng_.Rand(0, static_cast(SupportedRtcpTypes::kNumValues) - 1)); - switch (type) { - case SupportedRtcpTypes::kSenderReport: { - rtcp::SenderReport sender_report = NewSenderReport(); - rtc::Buffer buffer = sender_report.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kReceiverReport: { - rtcp::ReceiverReport receiver_report = NewReceiverReport(); - rtc::Buffer buffer = receiver_report.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kExtendedReports: { - rtcp::ExtendedReports extended_report = NewExtendedReports(); - rtc::Buffer buffer = extended_report.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kFir: { - rtcp::Fir fir = NewFir(); - rtc::Buffer buffer = fir.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kPli: { - rtcp::Pli pli = NewPli(); - rtc::Buffer buffer = pli.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kNack: { - rtcp::Nack nack = NewNack(); - rtc::Buffer buffer = nack.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kRemb: { - rtcp::Remb remb = NewRemb(); - rtc::Buffer buffer = remb.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kTransportFeedback: { - rtcp::TransportFeedback transport_feedback = NewTransportFeedback(); - rtc::Buffer buffer = transport_feedback.Build(); - return std::make_unique(buffer); - } - default: - RTC_NOTREACHED(); - rtc::Buffer buffer; - return std::make_unique(buffer); - } -} - -std::unique_ptr -EventGenerator::NewRtcpPacketOutgoing() { - enum class SupportedRtcpTypes { - kSenderReport = 0, - kReceiverReport, - kExtendedReports, - kFir, - kPli, - kNack, - kRemb, - kTransportFeedback, - kNumValues - }; - SupportedRtcpTypes type = static_cast( - prng_.Rand(0, static_cast(SupportedRtcpTypes::kNumValues) - 1)); - switch (type) { - case SupportedRtcpTypes::kSenderReport: { - rtcp::SenderReport sender_report = NewSenderReport(); - rtc::Buffer buffer = sender_report.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kReceiverReport: { - rtcp::ReceiverReport receiver_report = NewReceiverReport(); - rtc::Buffer buffer = receiver_report.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kExtendedReports: { - rtcp::ExtendedReports extended_report = NewExtendedReports(); - rtc::Buffer buffer = extended_report.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kFir: { - rtcp::Fir fir = NewFir(); - rtc::Buffer buffer = fir.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kPli: { - rtcp::Pli pli = NewPli(); - rtc::Buffer buffer = pli.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kNack: { - rtcp::Nack nack = NewNack(); - rtc::Buffer buffer = nack.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kRemb: { - rtcp::Remb remb = NewRemb(); - rtc::Buffer buffer = remb.Build(); - return std::make_unique(buffer); - } - case SupportedRtcpTypes::kTransportFeedback: { - rtcp::TransportFeedback transport_feedback = NewTransportFeedback(); - rtc::Buffer buffer = transport_feedback.Build(); - return std::make_unique(buffer); - } - default: - RTC_NOTREACHED(); - rtc::Buffer buffer; - return std::make_unique(buffer); - } -} - -std::unique_ptr -EventGenerator::NewGenericPacketSent() { - return std::make_unique( - sent_packet_number_++, prng_.Rand(40, 50), prng_.Rand(0, 150), - prng_.Rand(0, 1000)); -} -std::unique_ptr -EventGenerator::NewGenericPacketReceived() { - return std::make_unique( - received_packet_number_++, prng_.Rand(40, 250)); -} -std::unique_ptr -EventGenerator::NewGenericAckReceived() { - absl::optional receive_timestamp = absl::nullopt; - if (prng_.Rand(0, 2) > 0) { - receive_timestamp = prng_.Rand(0, 100000); - } - AckedPacket packet = {prng_.Rand(40, 250), receive_timestamp}; - return std::move(RtcEventGenericAckReceived::CreateLogs( - received_packet_number_++, std::vector{packet})[0]); -} - -void EventGenerator::RandomizeRtpPacket( - size_t payload_size, - size_t padding_size, - uint32_t ssrc, - const RtpHeaderExtensionMap& extension_map, - RtpPacket* rtp_packet, - bool all_configured_exts) { - constexpr int kMaxPayloadType = 127; - rtp_packet->SetPayloadType(prng_.Rand(kMaxPayloadType)); - rtp_packet->SetMarker(prng_.Rand()); - rtp_packet->SetSequenceNumber(prng_.Rand()); - rtp_packet->SetSsrc(ssrc); - rtp_packet->SetTimestamp(prng_.Rand()); - - uint32_t csrcs_count = prng_.Rand(0, kMaxCsrcs); - std::vector csrcs; - for (size_t i = 0; i < csrcs_count; i++) { - csrcs.push_back(prng_.Rand()); - } - rtp_packet->SetCsrcs(csrcs); - - if (extension_map.IsRegistered(TransmissionOffset::kId) && - (all_configured_exts || prng_.Rand())) { - rtp_packet->SetExtension(prng_.Rand(0x00ffffff)); - } - - if (extension_map.IsRegistered(AudioLevel::kId) && - (all_configured_exts || prng_.Rand())) { - rtp_packet->SetExtension(prng_.Rand(), prng_.Rand(127)); - } - - if (extension_map.IsRegistered(AbsoluteSendTime::kId) && - (all_configured_exts || prng_.Rand())) { - rtp_packet->SetExtension(prng_.Rand(0x00ffffff)); - } - - if (extension_map.IsRegistered(VideoOrientation::kId) && - (all_configured_exts || prng_.Rand())) { - rtp_packet->SetExtension(prng_.Rand(3)); - } - - if (extension_map.IsRegistered(TransportSequenceNumber::kId) && - (all_configured_exts || prng_.Rand())) { - rtp_packet->SetExtension(prng_.Rand()); - } - - RTC_CHECK_LE(rtp_packet->headers_size() + payload_size, IP_PACKET_SIZE); - - uint8_t* payload = rtp_packet->AllocatePayload(payload_size); - RTC_DCHECK(payload != nullptr); - for (size_t i = 0; i < payload_size; i++) { - payload[i] = prng_.Rand(); - } - RTC_CHECK(rtp_packet->SetPadding(padding_size)); -} - -std::unique_ptr EventGenerator::NewRtpPacketIncoming( - uint32_t ssrc, - const RtpHeaderExtensionMap& extension_map, - bool all_configured_exts) { - constexpr size_t kMaxPaddingLength = 224; - const bool padding = prng_.Rand(0, 9) == 0; // Let padding be 10% probable. - const size_t padding_size = !padding ? 0u : prng_.Rand(0u, kMaxPaddingLength); - - // 12 bytes RTP header, 4 bytes for 0xBEDE + alignment, 4 bytes per CSRC. - constexpr size_t kMaxHeaderSize = - 16 + 4 * kMaxCsrcs + kMaxExtensionSizeBytes * kMaxNumExtensions; - - // In principle, a packet can contain both padding and other payload. - // Currently, RTC eventlog encoder-parser can only maintain padding length if - // packet is full padding. - // TODO(webrtc:9730): Remove the deterministic logic for padding_size > 0. - size_t payload_size = - padding_size > 0 ? 0 - : prng_.Rand(0u, static_cast(IP_PACKET_SIZE - - 1 - padding_size - - kMaxHeaderSize)); - - RtpPacketReceived rtp_packet(&extension_map); - RandomizeRtpPacket(payload_size, padding_size, ssrc, extension_map, - &rtp_packet, all_configured_exts); - - return std::make_unique(rtp_packet); -} - -std::unique_ptr EventGenerator::NewRtpPacketOutgoing( - uint32_t ssrc, - const RtpHeaderExtensionMap& extension_map, - bool all_configured_exts) { - constexpr size_t kMaxPaddingLength = 224; - const bool padding = prng_.Rand(0, 9) == 0; // Let padding be 10% probable. - const size_t padding_size = !padding ? 0u : prng_.Rand(0u, kMaxPaddingLength); - - // 12 bytes RTP header, 4 bytes for 0xBEDE + alignment, 4 bytes per CSRC. - constexpr size_t kMaxHeaderSize = - 16 + 4 * kMaxCsrcs + kMaxExtensionSizeBytes * kMaxNumExtensions; - - // In principle,a packet can contain both padding and other payload. - // Currently, RTC eventlog encoder-parser can only maintain padding length if - // packet is full padding. - // TODO(webrtc:9730): Remove the deterministic logic for padding_size > 0. - size_t payload_size = - padding_size > 0 ? 0 - : prng_.Rand(0u, static_cast(IP_PACKET_SIZE - - 1 - padding_size - - kMaxHeaderSize)); - - RtpPacketToSend rtp_packet(&extension_map, - kMaxHeaderSize + payload_size + padding_size); - RandomizeRtpPacket(payload_size, padding_size, ssrc, extension_map, - &rtp_packet, all_configured_exts); - - int probe_cluster_id = prng_.Rand(0, 100000); - return std::make_unique(rtp_packet, - probe_cluster_id); -} - -RtpHeaderExtensionMap EventGenerator::NewRtpHeaderExtensionMap( - bool configure_all) { - RtpHeaderExtensionMap extension_map; - std::vector id(RtpExtension::kOneByteHeaderExtensionMaxId - - RtpExtension::kMinId + 1); - std::iota(id.begin(), id.end(), RtpExtension::kMinId); - ShuffleInPlace(&prng_, rtc::ArrayView(id)); - - if (configure_all || prng_.Rand()) { - extension_map.Register(id[0]); - } - if (configure_all || prng_.Rand()) { - extension_map.Register(id[1]); - } - if (configure_all || prng_.Rand()) { - extension_map.Register(id[2]); - } - if (configure_all || prng_.Rand()) { - extension_map.Register(id[3]); - } - if (configure_all || prng_.Rand()) { - extension_map.Register(id[4]); - } - - return extension_map; -} - -std::unique_ptr -EventGenerator::NewAudioReceiveStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions) { - auto config = std::make_unique(); - // Add SSRCs for the stream. - config->remote_ssrc = ssrc; - config->local_ssrc = prng_.Rand(); - // Add header extensions. - for (size_t i = 0; i < kMaxNumExtensions; i++) { - uint8_t id = extensions.GetId(kExtensions[i].type); - if (id != RtpHeaderExtensionMap::kInvalidId) { - config->rtp_extensions.emplace_back(kExtensions[i].name, id); - } - } - - return std::make_unique(std::move(config)); -} - -std::unique_ptr -EventGenerator::NewAudioSendStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions) { - auto config = std::make_unique(); - // Add SSRC to the stream. - config->local_ssrc = ssrc; - // Add header extensions. - for (size_t i = 0; i < kMaxNumExtensions; i++) { - uint8_t id = extensions.GetId(kExtensions[i].type); - if (id != RtpHeaderExtensionMap::kInvalidId) { - config->rtp_extensions.emplace_back(kExtensions[i].name, id); - } - } - return std::make_unique(std::move(config)); -} - -std::unique_ptr -EventGenerator::NewVideoReceiveStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions) { - auto config = std::make_unique(); - - // Add SSRCs for the stream. - config->remote_ssrc = ssrc; - config->local_ssrc = prng_.Rand(); - // Add extensions and settings for RTCP. - config->rtcp_mode = - prng_.Rand() ? RtcpMode::kCompound : RtcpMode::kReducedSize; - config->remb = prng_.Rand(); - config->rtx_ssrc = prng_.Rand(); - config->codecs.emplace_back(prng_.Rand() ? "VP8" : "H264", - prng_.Rand(127), prng_.Rand(127)); - // Add header extensions. - for (size_t i = 0; i < kMaxNumExtensions; i++) { - uint8_t id = extensions.GetId(kExtensions[i].type); - if (id != RtpHeaderExtensionMap::kInvalidId) { - config->rtp_extensions.emplace_back(kExtensions[i].name, id); - } - } - return std::make_unique(std::move(config)); -} - -std::unique_ptr -EventGenerator::NewVideoSendStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions) { - auto config = std::make_unique(); - - config->codecs.emplace_back(prng_.Rand() ? "VP8" : "H264", - prng_.Rand(127), prng_.Rand(127)); - config->local_ssrc = ssrc; - config->rtx_ssrc = prng_.Rand(); - // Add header extensions. - for (size_t i = 0; i < kMaxNumExtensions; i++) { - uint8_t id = extensions.GetId(kExtensions[i].type); - if (id != RtpHeaderExtensionMap::kInvalidId) { - config->rtp_extensions.emplace_back(kExtensions[i].name, id); - } - } - return std::make_unique(std::move(config)); -} - -void EventVerifier::VerifyLoggedAlrStateEvent( - const RtcEventAlrState& original_event, - const LoggedAlrStateEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.in_alr(), logged_event.in_alr); -} - -void EventVerifier::VerifyLoggedAudioPlayoutEvent( - const RtcEventAudioPlayout& original_event, - const LoggedAudioPlayoutEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.ssrc(), logged_event.ssrc); -} - -void EventVerifier::VerifyLoggedAudioNetworkAdaptationEvent( - const RtcEventAudioNetworkAdaptation& original_event, - const LoggedAudioNetworkAdaptationEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - EXPECT_EQ(original_event.config().bitrate_bps, - logged_event.config.bitrate_bps); - EXPECT_EQ(original_event.config().enable_dtx, logged_event.config.enable_dtx); - EXPECT_EQ(original_event.config().enable_fec, logged_event.config.enable_fec); - EXPECT_EQ(original_event.config().frame_length_ms, - logged_event.config.frame_length_ms); - EXPECT_EQ(original_event.config().num_channels, - logged_event.config.num_channels); - - // uplink_packet_loss_fraction - ASSERT_EQ(original_event.config().uplink_packet_loss_fraction.has_value(), - logged_event.config.uplink_packet_loss_fraction.has_value()); - if (original_event.config().uplink_packet_loss_fraction.has_value()) { - const float original = - original_event.config().uplink_packet_loss_fraction.value(); - const float logged = - logged_event.config.uplink_packet_loss_fraction.value(); - const float uplink_packet_loss_fraction_delta = std::abs(original - logged); - EXPECT_LE(uplink_packet_loss_fraction_delta, 0.0001f); - } -} - -void EventVerifier::VerifyLoggedBweDelayBasedUpdate( - const RtcEventBweUpdateDelayBased& original_event, - const LoggedBweDelayBasedUpdate& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); - EXPECT_EQ(original_event.detector_state(), logged_event.detector_state); -} - -void EventVerifier::VerifyLoggedBweLossBasedUpdate( - const RtcEventBweUpdateLossBased& original_event, - const LoggedBweLossBasedUpdate& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); - EXPECT_EQ(original_event.fraction_loss(), logged_event.fraction_lost); - EXPECT_EQ(original_event.total_packets(), logged_event.expected_packets); -} - -void EventVerifier::VerifyLoggedBweProbeClusterCreatedEvent( - const RtcEventProbeClusterCreated& original_event, - const LoggedBweProbeClusterCreatedEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.id(), logged_event.id); - EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); - EXPECT_EQ(original_event.min_probes(), logged_event.min_packets); - EXPECT_EQ(original_event.min_bytes(), logged_event.min_bytes); -} - -void EventVerifier::VerifyLoggedBweProbeFailureEvent( - const RtcEventProbeResultFailure& original_event, - const LoggedBweProbeFailureEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.id(), logged_event.id); - EXPECT_EQ(original_event.failure_reason(), logged_event.failure_reason); -} - -void EventVerifier::VerifyLoggedBweProbeSuccessEvent( - const RtcEventProbeResultSuccess& original_event, - const LoggedBweProbeSuccessEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.id(), logged_event.id); - EXPECT_EQ(original_event.bitrate_bps(), logged_event.bitrate_bps); -} - -void EventVerifier::VerifyLoggedDtlsTransportState( - const RtcEventDtlsTransportState& original_event, - const LoggedDtlsTransportState& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.dtls_transport_state(), - logged_event.dtls_transport_state); -} - -void EventVerifier::VerifyLoggedDtlsWritableState( - const RtcEventDtlsWritableState& original_event, - const LoggedDtlsWritableState& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.writable(), logged_event.writable); -} - -void EventVerifier::VerifyLoggedFrameDecoded( - const RtcEventFrameDecoded& original_event, - const LoggedFrameDecoded& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.ssrc(), logged_event.ssrc); - EXPECT_EQ(original_event.render_time_ms(), logged_event.render_time_ms); - EXPECT_EQ(original_event.width(), logged_event.width); - EXPECT_EQ(original_event.height(), logged_event.height); - EXPECT_EQ(original_event.codec(), logged_event.codec); - EXPECT_EQ(original_event.qp(), logged_event.qp); -} - -void EventVerifier::VerifyLoggedIceCandidatePairConfig( - const RtcEventIceCandidatePairConfig& original_event, - const LoggedIceCandidatePairConfig& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - EXPECT_EQ(original_event.type(), logged_event.type); - EXPECT_EQ(original_event.candidate_pair_id(), logged_event.candidate_pair_id); - EXPECT_EQ(original_event.candidate_pair_desc().local_candidate_type, - logged_event.local_candidate_type); - EXPECT_EQ(original_event.candidate_pair_desc().local_relay_protocol, - logged_event.local_relay_protocol); - EXPECT_EQ(original_event.candidate_pair_desc().local_network_type, - logged_event.local_network_type); - EXPECT_EQ(original_event.candidate_pair_desc().local_address_family, - logged_event.local_address_family); - EXPECT_EQ(original_event.candidate_pair_desc().remote_candidate_type, - logged_event.remote_candidate_type); - EXPECT_EQ(original_event.candidate_pair_desc().remote_address_family, - logged_event.remote_address_family); - EXPECT_EQ(original_event.candidate_pair_desc().candidate_pair_protocol, - logged_event.candidate_pair_protocol); -} - -void EventVerifier::VerifyLoggedIceCandidatePairEvent( - const RtcEventIceCandidatePair& original_event, - const LoggedIceCandidatePairEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - EXPECT_EQ(original_event.type(), logged_event.type); - EXPECT_EQ(original_event.candidate_pair_id(), logged_event.candidate_pair_id); - if (encoding_type_ == RtcEventLog::EncodingType::NewFormat) { - EXPECT_EQ(original_event.transaction_id(), logged_event.transaction_id); - } -} - -void VerifyLoggedRtpHeader(const RtpPacket& original_header, - const RTPHeader& logged_header) { - // Standard RTP header. - EXPECT_EQ(original_header.Marker(), logged_header.markerBit); - EXPECT_EQ(original_header.PayloadType(), logged_header.payloadType); - EXPECT_EQ(original_header.SequenceNumber(), logged_header.sequenceNumber); - EXPECT_EQ(original_header.Timestamp(), logged_header.timestamp); - EXPECT_EQ(original_header.Ssrc(), logged_header.ssrc); - - EXPECT_EQ(original_header.headers_size(), logged_header.headerLength); - - // TransmissionOffset header extension. - ASSERT_EQ(original_header.HasExtension(), - logged_header.extension.hasTransmissionTimeOffset); - if (logged_header.extension.hasTransmissionTimeOffset) { - int32_t offset; - ASSERT_TRUE(original_header.GetExtension(&offset)); - EXPECT_EQ(offset, logged_header.extension.transmissionTimeOffset); - } - - // AbsoluteSendTime header extension. - ASSERT_EQ(original_header.HasExtension(), - logged_header.extension.hasAbsoluteSendTime); - if (logged_header.extension.hasAbsoluteSendTime) { - uint32_t sendtime; - ASSERT_TRUE(original_header.GetExtension(&sendtime)); - EXPECT_EQ(sendtime, logged_header.extension.absoluteSendTime); - } - - // TransportSequenceNumber header extension. - ASSERT_EQ(original_header.HasExtension(), - logged_header.extension.hasTransportSequenceNumber); - if (logged_header.extension.hasTransportSequenceNumber) { - uint16_t seqnum; - ASSERT_TRUE(original_header.GetExtension(&seqnum)); - EXPECT_EQ(seqnum, logged_header.extension.transportSequenceNumber); - } - - // AudioLevel header extension. - ASSERT_EQ(original_header.HasExtension(), - logged_header.extension.hasAudioLevel); - if (logged_header.extension.hasAudioLevel) { - bool voice_activity; - uint8_t audio_level; - ASSERT_TRUE(original_header.GetExtension(&voice_activity, - &audio_level)); - EXPECT_EQ(voice_activity, logged_header.extension.voiceActivity); - EXPECT_EQ(audio_level, logged_header.extension.audioLevel); - } - - // VideoOrientation header extension. - ASSERT_EQ(original_header.HasExtension(), - logged_header.extension.hasVideoRotation); - if (logged_header.extension.hasVideoRotation) { - uint8_t rotation; - ASSERT_TRUE(original_header.GetExtension(&rotation)); - EXPECT_EQ(ConvertCVOByteToVideoRotation(rotation), - logged_header.extension.videoRotation); - } -} - -void EventVerifier::VerifyLoggedRouteChangeEvent( - const RtcEventRouteChange& original_event, - const LoggedRouteChangeEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.connected(), logged_event.connected); - EXPECT_EQ(original_event.overhead(), logged_event.overhead); -} - -void EventVerifier::VerifyLoggedRemoteEstimateEvent( - const RtcEventRemoteEstimate& original_event, - const LoggedRemoteEstimateEvent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.link_capacity_lower_, - logged_event.link_capacity_lower); - EXPECT_EQ(original_event.link_capacity_upper_, - logged_event.link_capacity_upper); -} - -void EventVerifier::VerifyLoggedRtpPacketIncoming( - const RtcEventRtpPacketIncoming& original_event, - const LoggedRtpPacketIncoming& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - EXPECT_EQ(original_event.header().headers_size(), - logged_event.rtp.header_length); - - EXPECT_EQ(original_event.packet_length(), logged_event.rtp.total_length); - - // Currently, RTC eventlog encoder-parser can only maintain padding length - // if packet is full padding. - EXPECT_EQ(original_event.padding_length(), - logged_event.rtp.header.paddingLength); - - VerifyLoggedRtpHeader(original_event.header(), logged_event.rtp.header); -} - -void EventVerifier::VerifyLoggedRtpPacketOutgoing( - const RtcEventRtpPacketOutgoing& original_event, - const LoggedRtpPacketOutgoing& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - EXPECT_EQ(original_event.header().headers_size(), - logged_event.rtp.header_length); - - EXPECT_EQ(original_event.packet_length(), logged_event.rtp.total_length); - - // Currently, RTC eventlog encoder-parser can only maintain padding length - // if packet is full padding. - EXPECT_EQ(original_event.padding_length(), - logged_event.rtp.header.paddingLength); - - // TODO(terelius): Probe cluster ID isn't parsed, used or tested. Unless - // someone has a strong reason to keep it, it'll be removed. - - VerifyLoggedRtpHeader(original_event.header(), logged_event.rtp.header); -} - -void EventVerifier::VerifyLoggedGenericPacketSent( - const RtcEventGenericPacketSent& original_event, - const LoggedGenericPacketSent& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.packet_number(), logged_event.packet_number); - EXPECT_EQ(original_event.overhead_length(), logged_event.overhead_length); - EXPECT_EQ(original_event.payload_length(), logged_event.payload_length); - EXPECT_EQ(original_event.padding_length(), logged_event.padding_length); -} - -void EventVerifier::VerifyLoggedGenericPacketReceived( - const RtcEventGenericPacketReceived& original_event, - const LoggedGenericPacketReceived& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.packet_number(), logged_event.packet_number); - EXPECT_EQ(static_cast(original_event.packet_length()), - logged_event.packet_length); -} - -void EventVerifier::VerifyLoggedGenericAckReceived( - const RtcEventGenericAckReceived& original_event, - const LoggedGenericAckReceived& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - EXPECT_EQ(original_event.packet_number(), logged_event.packet_number); - EXPECT_EQ(original_event.acked_packet_number(), - logged_event.acked_packet_number); - EXPECT_EQ(original_event.receive_acked_packet_time_ms(), - logged_event.receive_acked_packet_time_ms); -} - -void EventVerifier::VerifyLoggedRtcpPacketIncoming( - const RtcEventRtcpPacketIncoming& original_event, - const LoggedRtcpPacketIncoming& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - ASSERT_EQ(original_event.packet().size(), logged_event.rtcp.raw_data.size()); - EXPECT_EQ( - memcmp(original_event.packet().data(), logged_event.rtcp.raw_data.data(), - original_event.packet().size()), - 0); -} - -void EventVerifier::VerifyLoggedRtcpPacketOutgoing( - const RtcEventRtcpPacketOutgoing& original_event, - const LoggedRtcpPacketOutgoing& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - - ASSERT_EQ(original_event.packet().size(), logged_event.rtcp.raw_data.size()); - EXPECT_EQ( - memcmp(original_event.packet().data(), logged_event.rtcp.raw_data.data(), - original_event.packet().size()), - 0); -} - -void EventVerifier::VerifyReportBlock( - const rtcp::ReportBlock& original_report_block, - const rtcp::ReportBlock& logged_report_block) { - EXPECT_EQ(original_report_block.source_ssrc(), - logged_report_block.source_ssrc()); - EXPECT_EQ(original_report_block.fraction_lost(), - logged_report_block.fraction_lost()); - EXPECT_EQ(original_report_block.cumulative_lost_signed(), - logged_report_block.cumulative_lost_signed()); - EXPECT_EQ(original_report_block.extended_high_seq_num(), - logged_report_block.extended_high_seq_num()); - EXPECT_EQ(original_report_block.jitter(), logged_report_block.jitter()); - EXPECT_EQ(original_report_block.last_sr(), logged_report_block.last_sr()); - EXPECT_EQ(original_report_block.delay_since_last_sr(), - logged_report_block.delay_since_last_sr()); -} - -void EventVerifier::VerifyLoggedSenderReport( - int64_t log_time_us, - const rtcp::SenderReport& original_sr, - const LoggedRtcpPacketSenderReport& logged_sr) { - EXPECT_EQ(log_time_us, logged_sr.log_time_us()); - EXPECT_EQ(original_sr.sender_ssrc(), logged_sr.sr.sender_ssrc()); - EXPECT_EQ(original_sr.ntp(), logged_sr.sr.ntp()); - EXPECT_EQ(original_sr.rtp_timestamp(), logged_sr.sr.rtp_timestamp()); - EXPECT_EQ(original_sr.sender_packet_count(), - logged_sr.sr.sender_packet_count()); - EXPECT_EQ(original_sr.sender_octet_count(), - logged_sr.sr.sender_octet_count()); - ASSERT_EQ(original_sr.report_blocks().size(), - logged_sr.sr.report_blocks().size()); - for (size_t i = 0; i < original_sr.report_blocks().size(); i++) { - VerifyReportBlock(original_sr.report_blocks()[i], - logged_sr.sr.report_blocks()[i]); - } -} - -void EventVerifier::VerifyLoggedReceiverReport( - int64_t log_time_us, - const rtcp::ReceiverReport& original_rr, - const LoggedRtcpPacketReceiverReport& logged_rr) { - EXPECT_EQ(log_time_us, logged_rr.log_time_us()); - EXPECT_EQ(original_rr.sender_ssrc(), logged_rr.rr.sender_ssrc()); - ASSERT_EQ(original_rr.report_blocks().size(), - logged_rr.rr.report_blocks().size()); - for (size_t i = 0; i < original_rr.report_blocks().size(); i++) { - VerifyReportBlock(original_rr.report_blocks()[i], - logged_rr.rr.report_blocks()[i]); - } -} - -void EventVerifier::VerifyLoggedExtendedReports( - int64_t log_time_us, - const rtcp::ExtendedReports& original_xr, - const LoggedRtcpPacketExtendedReports& logged_xr) { - EXPECT_EQ(original_xr.sender_ssrc(), logged_xr.xr.sender_ssrc()); - - EXPECT_EQ(original_xr.rrtr().has_value(), logged_xr.xr.rrtr().has_value()); - if (original_xr.rrtr().has_value() && logged_xr.xr.rrtr().has_value()) { - EXPECT_EQ(original_xr.rrtr()->ntp(), logged_xr.xr.rrtr()->ntp()); - } - - const auto& original_subblocks = original_xr.dlrr().sub_blocks(); - const auto& logged_subblocks = logged_xr.xr.dlrr().sub_blocks(); - ASSERT_EQ(original_subblocks.size(), logged_subblocks.size()); - for (size_t i = 0; i < original_subblocks.size(); i++) { - EXPECT_EQ(original_subblocks[i].ssrc, logged_subblocks[i].ssrc); - EXPECT_EQ(original_subblocks[i].last_rr, logged_subblocks[i].last_rr); - EXPECT_EQ(original_subblocks[i].delay_since_last_rr, - logged_subblocks[i].delay_since_last_rr); - } - - EXPECT_EQ(original_xr.target_bitrate().has_value(), - logged_xr.xr.target_bitrate().has_value()); - if (original_xr.target_bitrate().has_value() && - logged_xr.xr.target_bitrate().has_value()) { - const auto& original_bitrates = - original_xr.target_bitrate()->GetTargetBitrates(); - const auto& logged_bitrates = - logged_xr.xr.target_bitrate()->GetTargetBitrates(); - ASSERT_EQ(original_bitrates.size(), logged_bitrates.size()); - for (size_t i = 0; i < original_bitrates.size(); i++) { - EXPECT_EQ(original_bitrates[i].spatial_layer, - logged_bitrates[i].spatial_layer); - EXPECT_EQ(original_bitrates[i].temporal_layer, - logged_bitrates[i].temporal_layer); - EXPECT_EQ(original_bitrates[i].target_bitrate_kbps, - logged_bitrates[i].target_bitrate_kbps); - } - } -} - -void EventVerifier::VerifyLoggedFir(int64_t log_time_us, - const rtcp::Fir& original_fir, - const LoggedRtcpPacketFir& logged_fir) { - EXPECT_EQ(original_fir.sender_ssrc(), logged_fir.fir.sender_ssrc()); - - const auto& original_requests = original_fir.requests(); - const auto& logged_requests = logged_fir.fir.requests(); - ASSERT_EQ(original_requests.size(), logged_requests.size()); - for (size_t i = 0; i < original_requests.size(); i++) { - EXPECT_EQ(original_requests[i].ssrc, logged_requests[i].ssrc); - EXPECT_EQ(original_requests[i].seq_nr, logged_requests[i].seq_nr); - } -} - -void EventVerifier::VerifyLoggedPli(int64_t log_time_us, - const rtcp::Pli& original_pli, - const LoggedRtcpPacketPli& logged_pli) { - EXPECT_EQ(original_pli.sender_ssrc(), logged_pli.pli.sender_ssrc()); - EXPECT_EQ(original_pli.media_ssrc(), logged_pli.pli.media_ssrc()); -} - -void EventVerifier::VerifyLoggedNack(int64_t log_time_us, - const rtcp::Nack& original_nack, - const LoggedRtcpPacketNack& logged_nack) { - EXPECT_EQ(log_time_us, logged_nack.log_time_us()); - EXPECT_EQ(original_nack.packet_ids(), logged_nack.nack.packet_ids()); -} - -void EventVerifier::VerifyLoggedTransportFeedback( - int64_t log_time_us, - const rtcp::TransportFeedback& original_transport_feedback, - const LoggedRtcpPacketTransportFeedback& logged_transport_feedback) { - EXPECT_EQ(log_time_us, logged_transport_feedback.log_time_us()); - ASSERT_EQ( - original_transport_feedback.GetReceivedPackets().size(), - logged_transport_feedback.transport_feedback.GetReceivedPackets().size()); - for (size_t i = 0; - i < original_transport_feedback.GetReceivedPackets().size(); i++) { - EXPECT_EQ( - original_transport_feedback.GetReceivedPackets()[i].sequence_number(), - logged_transport_feedback.transport_feedback.GetReceivedPackets()[i] - .sequence_number()); - EXPECT_EQ( - original_transport_feedback.GetReceivedPackets()[i].delta_us(), - logged_transport_feedback.transport_feedback.GetReceivedPackets()[i] - .delta_us()); - } -} - -void EventVerifier::VerifyLoggedRemb(int64_t log_time_us, - const rtcp::Remb& original_remb, - const LoggedRtcpPacketRemb& logged_remb) { - EXPECT_EQ(log_time_us, logged_remb.log_time_us()); - EXPECT_EQ(original_remb.ssrcs(), logged_remb.remb.ssrcs()); - EXPECT_EQ(original_remb.bitrate_bps(), logged_remb.remb.bitrate_bps()); -} - -void EventVerifier::VerifyLoggedLossNotification( - int64_t log_time_us, - const rtcp::LossNotification& original_loss_notification, - const LoggedRtcpPacketLossNotification& logged_loss_notification) { - EXPECT_EQ(log_time_us, logged_loss_notification.log_time_us()); - EXPECT_EQ(original_loss_notification.last_decoded(), - logged_loss_notification.loss_notification.last_decoded()); - EXPECT_EQ(original_loss_notification.last_received(), - logged_loss_notification.loss_notification.last_received()); - EXPECT_EQ(original_loss_notification.decodability_flag(), - logged_loss_notification.loss_notification.decodability_flag()); -} - -void EventVerifier::VerifyLoggedStartEvent( - int64_t start_time_us, - int64_t utc_start_time_us, - const LoggedStartEvent& logged_event) const { - EXPECT_EQ(start_time_us / 1000, logged_event.log_time_ms()); - if (encoding_type_ == RtcEventLog::EncodingType::NewFormat) { - EXPECT_EQ(utc_start_time_us / 1000, logged_event.utc_start_time_ms); - } -} - -void EventVerifier::VerifyLoggedStopEvent( - int64_t stop_time_us, - const LoggedStopEvent& logged_event) const { - EXPECT_EQ(stop_time_us / 1000, logged_event.log_time_ms()); -} - -void VerifyLoggedStreamConfig(const rtclog::StreamConfig& original_config, - const rtclog::StreamConfig& logged_config) { - EXPECT_EQ(original_config.local_ssrc, logged_config.local_ssrc); - EXPECT_EQ(original_config.remote_ssrc, logged_config.remote_ssrc); - EXPECT_EQ(original_config.rtx_ssrc, logged_config.rtx_ssrc); - - EXPECT_EQ(original_config.rtp_extensions.size(), - logged_config.rtp_extensions.size()); - size_t recognized_extensions = 0; - for (size_t i = 0; i < kMaxNumExtensions; i++) { - auto original_id = - GetExtensionId(original_config.rtp_extensions, kExtensions[i].name); - auto logged_id = - GetExtensionId(logged_config.rtp_extensions, kExtensions[i].name); - EXPECT_EQ(original_id, logged_id) - << "IDs for " << kExtensions[i].name << " don't match. Original ID " - << original_id.value_or(-1) << ". Parsed ID " << logged_id.value_or(-1) - << "."; - if (original_id) { - recognized_extensions++; - } - } - EXPECT_EQ(recognized_extensions, original_config.rtp_extensions.size()); -} - -void EventVerifier::VerifyLoggedAudioRecvConfig( - const RtcEventAudioReceiveStreamConfig& original_event, - const LoggedAudioRecvConfig& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - VerifyLoggedStreamConfig(original_event.config(), logged_event.config); -} - -void EventVerifier::VerifyLoggedAudioSendConfig( - const RtcEventAudioSendStreamConfig& original_event, - const LoggedAudioSendConfig& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - VerifyLoggedStreamConfig(original_event.config(), logged_event.config); -} - -void EventVerifier::VerifyLoggedVideoRecvConfig( - const RtcEventVideoReceiveStreamConfig& original_event, - const LoggedVideoRecvConfig& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - VerifyLoggedStreamConfig(original_event.config(), logged_event.config); -} - -void EventVerifier::VerifyLoggedVideoSendConfig( - const RtcEventVideoSendStreamConfig& original_event, - const LoggedVideoSendConfig& logged_event) const { - EXPECT_EQ(original_event.timestamp_ms(), logged_event.log_time_ms()); - VerifyLoggedStreamConfig(original_event.config(), logged_event.config); -} - -} // namespace test -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.h deleted file mode 100644 index 8f564ff9d..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_unittest_helper.h +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_UNITTEST_HELPER_H_ -#define LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_UNITTEST_HELPER_H_ - -#include -#include - -#include - -#include "logging/rtc_event_log/events/rtc_event_alr_state.h" -#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" -#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" -#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" -#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" -#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" -#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" -#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" -#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" -#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" -#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" -#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" -#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" -#include "logging/rtc_event_log/events/rtc_event_route_change.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" -#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" -#include "logging/rtc_event_log/rtc_event_log_parser.h" -#include "logging/rtc_event_log/rtc_stream_config.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" -#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" -#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" -#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" -#include "rtc_base/random.h" - -namespace webrtc { - -namespace test { - -class EventGenerator { - public: - explicit EventGenerator(uint64_t seed) : prng_(seed) {} - - std::unique_ptr NewAlrState(); - std::unique_ptr NewAudioNetworkAdaptation(); - std::unique_ptr NewAudioPlayout(uint32_t ssrc); - std::unique_ptr NewBweUpdateDelayBased(); - std::unique_ptr NewBweUpdateLossBased(); - std::unique_ptr NewDtlsTransportState(); - std::unique_ptr NewDtlsWritableState(); - std::unique_ptr NewFrameDecodedEvent(uint32_t ssrc); - std::unique_ptr NewGenericAckReceived(); - std::unique_ptr NewGenericPacketReceived(); - std::unique_ptr NewGenericPacketSent(); - std::unique_ptr NewIceCandidatePair(); - std::unique_ptr NewIceCandidatePairConfig(); - std::unique_ptr NewProbeClusterCreated(); - std::unique_ptr NewProbeResultFailure(); - std::unique_ptr NewProbeResultSuccess(); - std::unique_ptr NewRouteChange(); - std::unique_ptr NewRemoteEstimate(); - std::unique_ptr NewRtcpPacketIncoming(); - std::unique_ptr NewRtcpPacketOutgoing(); - - rtcp::SenderReport NewSenderReport(); - rtcp::ReceiverReport NewReceiverReport(); - rtcp::ExtendedReports NewExtendedReports(); - rtcp::Nack NewNack(); - rtcp::Remb NewRemb(); - rtcp::Fir NewFir(); - rtcp::Pli NewPli(); - rtcp::TransportFeedback NewTransportFeedback(); - rtcp::LossNotification NewLossNotification(); - - // |all_configured_exts| determines whether the RTP packet exhibits all - // configured extensions, or a random subset thereof. - void RandomizeRtpPacket(size_t payload_size, - size_t padding_size, - uint32_t ssrc, - const RtpHeaderExtensionMap& extension_map, - RtpPacket* rtp_packet, - bool all_configured_exts); - - // |all_configured_exts| determines whether the RTP packet exhibits all - // configured extensions, or a random subset thereof. - std::unique_ptr NewRtpPacketIncoming( - uint32_t ssrc, - const RtpHeaderExtensionMap& extension_map, - bool all_configured_exts = true); - - // |all_configured_exts| determines whether the RTP packet exhibits all - // configured extensions, or a random subset thereof. - std::unique_ptr NewRtpPacketOutgoing( - uint32_t ssrc, - const RtpHeaderExtensionMap& extension_map, - bool all_configured_exts = true); - - // |configure_all| determines whether all supported extensions are configured, - // or a random subset. - RtpHeaderExtensionMap NewRtpHeaderExtensionMap(bool configure_all = false); - - std::unique_ptr NewAudioReceiveStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions); - - std::unique_ptr NewAudioSendStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions); - - std::unique_ptr NewVideoReceiveStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions); - - std::unique_ptr NewVideoSendStreamConfig( - uint32_t ssrc, - const RtpHeaderExtensionMap& extensions); - - private: - rtcp::ReportBlock NewReportBlock(); - int sent_packet_number_ = 0; - int received_packet_number_ = 0; - - Random prng_; -}; - -class EventVerifier { - public: - explicit EventVerifier(RtcEventLog::EncodingType encoding_type) - : encoding_type_(encoding_type) {} - - void VerifyLoggedAlrStateEvent(const RtcEventAlrState& original_event, - const LoggedAlrStateEvent& logged_event) const; - - void VerifyLoggedAudioPlayoutEvent( - const RtcEventAudioPlayout& original_event, - const LoggedAudioPlayoutEvent& logged_event) const; - - void VerifyLoggedAudioNetworkAdaptationEvent( - const RtcEventAudioNetworkAdaptation& original_event, - const LoggedAudioNetworkAdaptationEvent& logged_event) const; - - void VerifyLoggedBweDelayBasedUpdate( - const RtcEventBweUpdateDelayBased& original_event, - const LoggedBweDelayBasedUpdate& logged_event) const; - - void VerifyLoggedBweLossBasedUpdate( - const RtcEventBweUpdateLossBased& original_event, - const LoggedBweLossBasedUpdate& logged_event) const; - - void VerifyLoggedBweProbeClusterCreatedEvent( - const RtcEventProbeClusterCreated& original_event, - const LoggedBweProbeClusterCreatedEvent& logged_event) const; - - void VerifyLoggedBweProbeFailureEvent( - const RtcEventProbeResultFailure& original_event, - const LoggedBweProbeFailureEvent& logged_event) const; - - void VerifyLoggedBweProbeSuccessEvent( - const RtcEventProbeResultSuccess& original_event, - const LoggedBweProbeSuccessEvent& logged_event) const; - - void VerifyLoggedDtlsTransportState( - const RtcEventDtlsTransportState& original_event, - const LoggedDtlsTransportState& logged_event) const; - - void VerifyLoggedDtlsWritableState( - const RtcEventDtlsWritableState& original_event, - const LoggedDtlsWritableState& logged_event) const; - - void VerifyLoggedFrameDecoded(const RtcEventFrameDecoded& original_event, - const LoggedFrameDecoded& logged_event) const; - - void VerifyLoggedIceCandidatePairConfig( - const RtcEventIceCandidatePairConfig& original_event, - const LoggedIceCandidatePairConfig& logged_event) const; - - void VerifyLoggedIceCandidatePairEvent( - const RtcEventIceCandidatePair& original_event, - const LoggedIceCandidatePairEvent& logged_event) const; - - void VerifyLoggedRouteChangeEvent( - const RtcEventRouteChange& original_event, - const LoggedRouteChangeEvent& logged_event) const; - - void VerifyLoggedRemoteEstimateEvent( - const RtcEventRemoteEstimate& original_event, - const LoggedRemoteEstimateEvent& logged_event) const; - - void VerifyLoggedRtpPacketIncoming( - const RtcEventRtpPacketIncoming& original_event, - const LoggedRtpPacketIncoming& logged_event) const; - - void VerifyLoggedRtpPacketOutgoing( - const RtcEventRtpPacketOutgoing& original_event, - const LoggedRtpPacketOutgoing& logged_event) const; - - void VerifyLoggedGenericPacketSent( - const RtcEventGenericPacketSent& original_event, - const LoggedGenericPacketSent& logged_event) const; - - void VerifyLoggedGenericPacketReceived( - const RtcEventGenericPacketReceived& original_event, - const LoggedGenericPacketReceived& logged_event) const; - - void VerifyLoggedGenericAckReceived( - const RtcEventGenericAckReceived& original_event, - const LoggedGenericAckReceived& logged_event) const; - - template - void VerifyLoggedRtpPacket(const EventType& original_event, - const ParsedType& logged_event) { - static_assert(sizeof(ParsedType) == 0, - "You have to use one of the two defined template " - "specializations of VerifyLoggedRtpPacket"); - } - - template - void VerifyLoggedRtpPacket(const RtcEventRtpPacketIncoming& original_event, - const LoggedRtpPacketIncoming& logged_event) { - VerifyLoggedRtpPacketIncoming(original_event, logged_event); - } - - template - void VerifyLoggedRtpPacket(const RtcEventRtpPacketOutgoing& original_event, - const LoggedRtpPacketOutgoing& logged_event) { - VerifyLoggedRtpPacketOutgoing(original_event, logged_event); - } - - void VerifyLoggedRtcpPacketIncoming( - const RtcEventRtcpPacketIncoming& original_event, - const LoggedRtcpPacketIncoming& logged_event) const; - - void VerifyLoggedRtcpPacketOutgoing( - const RtcEventRtcpPacketOutgoing& original_event, - const LoggedRtcpPacketOutgoing& logged_event) const; - - void VerifyLoggedSenderReport(int64_t log_time_us, - const rtcp::SenderReport& original_sr, - const LoggedRtcpPacketSenderReport& logged_sr); - void VerifyLoggedReceiverReport( - int64_t log_time_us, - const rtcp::ReceiverReport& original_rr, - const LoggedRtcpPacketReceiverReport& logged_rr); - void VerifyLoggedExtendedReports( - int64_t log_time_us, - const rtcp::ExtendedReports& original_xr, - const LoggedRtcpPacketExtendedReports& logged_xr); - void VerifyLoggedFir(int64_t log_time_us, - const rtcp::Fir& original_fir, - const LoggedRtcpPacketFir& logged_fir); - void VerifyLoggedPli(int64_t log_time_us, - const rtcp::Pli& original_pli, - const LoggedRtcpPacketPli& logged_pli); - void VerifyLoggedNack(int64_t log_time_us, - const rtcp::Nack& original_nack, - const LoggedRtcpPacketNack& logged_nack); - void VerifyLoggedTransportFeedback( - int64_t log_time_us, - const rtcp::TransportFeedback& original_transport_feedback, - const LoggedRtcpPacketTransportFeedback& logged_transport_feedback); - void VerifyLoggedRemb(int64_t log_time_us, - const rtcp::Remb& original_remb, - const LoggedRtcpPacketRemb& logged_remb); - void VerifyLoggedLossNotification( - int64_t log_time_us, - const rtcp::LossNotification& original_loss_notification, - const LoggedRtcpPacketLossNotification& logged_loss_notification); - - void VerifyLoggedStartEvent(int64_t start_time_us, - int64_t utc_start_time_us, - const LoggedStartEvent& logged_event) const; - void VerifyLoggedStopEvent(int64_t stop_time_us, - const LoggedStopEvent& logged_event) const; - - void VerifyLoggedAudioRecvConfig( - const RtcEventAudioReceiveStreamConfig& original_event, - const LoggedAudioRecvConfig& logged_event) const; - - void VerifyLoggedAudioSendConfig( - const RtcEventAudioSendStreamConfig& original_event, - const LoggedAudioSendConfig& logged_event) const; - - void VerifyLoggedVideoRecvConfig( - const RtcEventVideoReceiveStreamConfig& original_event, - const LoggedVideoRecvConfig& logged_event) const; - - void VerifyLoggedVideoSendConfig( - const RtcEventVideoSendStreamConfig& original_event, - const LoggedVideoSendConfig& logged_event) const; - - private: - void VerifyReportBlock(const rtcp::ReportBlock& original_report_block, - const rtcp::ReportBlock& logged_report_block); - - RtcEventLog::EncodingType encoding_type_; -}; - -} // namespace test -} // namespace webrtc - -#endif // LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_UNITTEST_HELPER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/audio_source.h b/TMessagesProj/jni/voip/webrtc/media/base/audio_source.h index 8a8796800..51fe0e13e 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/audio_source.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/audio_source.h @@ -36,6 +36,11 @@ class AudioSource { // Called when the AudioSource is going away. virtual void OnClose() = 0; + // Returns the number of channels encoded by the sink. This can be less than + // the number_of_channels if down-mixing occur. A value of -1 means an + // unknown number. + virtual int NumPreferredChannels() const = 0; + protected: virtual ~Sink() {} }; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/codec.cc b/TMessagesProj/jni/voip/webrtc/media/base/codec.cc index 6b9a052da..cb6913e76 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/codec.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/codec.cc @@ -12,12 +12,13 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" -#include "media/base/h264_profile_level_id.h" -#include "media/base/vp9_profile.h" +#include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/vp9_profile.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/field_trial.h" namespace cricket { namespace { @@ -50,25 +51,13 @@ bool IsSameCodecSpecific(const std::string& name1, absl::EqualsIgnoreCase(name, name2); }; if (either_name_matches(kH264CodecName)) - return webrtc::H264::IsSameH264Profile(params1, params2) && + return webrtc::H264IsSameProfile(params1, params2) && IsSameH264PacketizationMode(params1, params2); if (either_name_matches(kVp9CodecName)) - return webrtc::IsSameVP9Profile(params1, params2); + return webrtc::VP9IsSameProfile(params1, params2); return true; } -bool IsCodecInList( - const webrtc::SdpVideoFormat& format, - const std::vector& existing_formats) { - for (auto existing_format : existing_formats) { - if (IsSameCodec(format.name, format.parameters, existing_format.name, - existing_format.parameters)) { - return true; - } - } - return false; -} - } // namespace FeedbackParams::FeedbackParams() = default; @@ -143,10 +132,32 @@ bool Codec::operator==(const Codec& c) const { bool Codec::Matches(const Codec& codec) const { // Match the codec id/name based on the typical static/dynamic name rules. // Matching is case-insensitive. - const int kMaxStaticPayloadId = 95; - return (id <= kMaxStaticPayloadId || codec.id <= kMaxStaticPayloadId) - ? (id == codec.id) - : (absl::EqualsIgnoreCase(name, codec.name)); + + // Legacy behaviour with killswitch. + if (webrtc::field_trial::IsDisabled( + "WebRTC-PayloadTypes-Lower-Dynamic-Range")) { + const int kMaxStaticPayloadId = 95; + return (id <= kMaxStaticPayloadId || codec.id <= kMaxStaticPayloadId) + ? (id == codec.id) + : (absl::EqualsIgnoreCase(name, codec.name)); + } + // We support the ranges [96, 127] and more recently [35, 65]. + // https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-1 + // Within those ranges we match by codec name, outside by codec id. + const int kLowerDynamicRangeMin = 35; + const int kLowerDynamicRangeMax = 65; + const int kUpperDynamicRangeMin = 96; + const int kUpperDynamicRangeMax = 127; + const bool is_id_in_dynamic_range = + (id >= kLowerDynamicRangeMin && id <= kLowerDynamicRangeMax) || + (id >= kUpperDynamicRangeMin && id <= kUpperDynamicRangeMax); + const bool is_codec_id_in_dynamic_range = + (codec.id >= kLowerDynamicRangeMin && + codec.id <= kLowerDynamicRangeMax) || + (codec.id >= kUpperDynamicRangeMin && codec.id <= kUpperDynamicRangeMax); + return is_id_in_dynamic_range && is_codec_id_in_dynamic_range + ? (absl::EqualsIgnoreCase(name, codec.name)) + : (id == codec.id); } bool Codec::MatchesCapability( @@ -262,7 +273,11 @@ webrtc::RtpCodecParameters AudioCodec::ToCodecParameters() const { std::string VideoCodec::ToString() const { char buf[256]; rtc::SimpleStringBuilder sb(buf); - sb << "VideoCodec[" << id << ":" << name << "]"; + sb << "VideoCodec[" << id << ":" << name; + if (packetization.has_value()) { + sb << ":" << *packetization; + } + sb << "]"; return sb.str(); } @@ -369,25 +384,6 @@ bool VideoCodec::ValidateCodecFormat() const { return true; } -RtpDataCodec::RtpDataCodec(int id, const std::string& name) - : Codec(id, name, kDataCodecClockrate) {} - -RtpDataCodec::RtpDataCodec() : Codec() { - clockrate = kDataCodecClockrate; -} - -RtpDataCodec::RtpDataCodec(const RtpDataCodec& c) = default; -RtpDataCodec::RtpDataCodec(RtpDataCodec&& c) = default; -RtpDataCodec& RtpDataCodec::operator=(const RtpDataCodec& c) = default; -RtpDataCodec& RtpDataCodec::operator=(RtpDataCodec&& c) = default; - -std::string RtpDataCodec::ToString() const { - char buf[256]; - rtc::SimpleStringBuilder sb(buf); - sb << "RtpDataCodec[" << id << ":" << name << "]"; - return sb.str(); -} - bool HasLntf(const Codec& codec) { return codec.HasFeedbackParam( FeedbackParam(kRtcpFbParamLntf, kParamValueEmpty)); @@ -425,6 +421,8 @@ const VideoCodec* FindMatchingCodec( return nullptr; } +// TODO(crbug.com/1187565): Remove once downstream projects stopped using this +// method in favor of SdpVideoFormat::IsSameCodec(). bool IsSameCodec(const std::string& name1, const CodecParameterMap& params1, const std::string& name2, @@ -446,15 +444,16 @@ void AddH264ConstrainedBaselineProfileToSupportedFormats( for (auto it = supported_formats->cbegin(); it != supported_formats->cend(); ++it) { if (it->name == cricket::kH264CodecName) { - const absl::optional profile_level_id = - webrtc::H264::ParseSdpProfileLevelId(it->parameters); - if (profile_level_id && profile_level_id->profile != - webrtc::H264::kProfileConstrainedBaseline) { + const absl::optional profile_level_id = + webrtc::ParseSdpForH264ProfileLevelId(it->parameters); + if (profile_level_id && + profile_level_id->profile != + webrtc::H264Profile::kProfileConstrainedBaseline) { webrtc::SdpVideoFormat cbp_format = *it; - webrtc::H264::ProfileLevelId cbp_profile = *profile_level_id; - cbp_profile.profile = webrtc::H264::kProfileConstrainedBaseline; + webrtc::H264ProfileLevelId cbp_profile = *profile_level_id; + cbp_profile.profile = webrtc::H264Profile::kProfileConstrainedBaseline; cbp_format.parameters[cricket::kH264FmtpProfileLevelId] = - *webrtc::H264::ProfileLevelIdToString(cbp_profile); + *webrtc::H264ProfileLevelIdToString(cbp_profile); cbr_supported_formats.push_back(cbp_format); } } @@ -465,7 +464,7 @@ void AddH264ConstrainedBaselineProfileToSupportedFormats( std::copy_if(cbr_supported_formats.begin(), cbr_supported_formats.end(), std::back_inserter(*supported_formats), [supported_formats](const webrtc::SdpVideoFormat& format) { - return !IsCodecInList(format, *supported_formats); + return !format.IsCodecInList(*supported_formats); }); if (supported_formats->size() > original_size) { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/codec.h b/TMessagesProj/jni/voip/webrtc/media/base/codec.h index c3be2334c..c7c99bf73 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/codec.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/codec.h @@ -202,23 +202,6 @@ struct RTC_EXPORT VideoCodec : public Codec { void SetDefaultParameters(); }; -struct RtpDataCodec : public Codec { - RtpDataCodec(int id, const std::string& name); - RtpDataCodec(); - RtpDataCodec(const RtpDataCodec& c); - RtpDataCodec(RtpDataCodec&& c); - ~RtpDataCodec() override = default; - - RtpDataCodec& operator=(const RtpDataCodec& c); - RtpDataCodec& operator=(RtpDataCodec&& c); - - std::string ToString() const; -}; - -// For backwards compatibility -// TODO(bugs.webrtc.org/10597): Remove when no longer needed. -typedef RtpDataCodec DataCodec; - // Get the codec setting associated with |payload_type|. If there // is no codec associated with that payload type it returns nullptr. template diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc index 734a30be7..aa8e2325b 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.cc @@ -18,6 +18,7 @@ #include "rtc_base/checks.h" namespace cricket { +using webrtc::TaskQueueBase; FakeVoiceMediaChannel::DtmfInfo::DtmfInfo(uint32_t ssrc, int event_code, @@ -49,8 +50,11 @@ AudioSource* FakeVoiceMediaChannel::VoiceChannelAudioSink::source() const { } FakeVoiceMediaChannel::FakeVoiceMediaChannel(FakeVoiceEngine* engine, - const AudioOptions& options) - : engine_(engine), max_bps_(-1) { + const AudioOptions& options, + TaskQueueBase* network_thread) + : RtpHelper(network_thread), + engine_(engine), + max_bps_(-1) { output_scalings_[0] = 1.0; // For default channel. SetOptions(options); } @@ -253,8 +257,11 @@ bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info, } FakeVideoMediaChannel::FakeVideoMediaChannel(FakeVideoEngine* engine, - const VideoOptions& options) - : engine_(engine), max_bps_(-1) { + const VideoOptions& options, + TaskQueueBase* network_thread) + : RtpHelper(network_thread), + engine_(engine), + max_bps_(-1) { SetOptions(options); } FakeVideoMediaChannel::~FakeVideoMediaChannel() { @@ -422,93 +429,6 @@ void FakeVideoMediaChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) { void FakeVideoMediaChannel::GenerateKeyFrame(uint32_t ssrc) {} -FakeDataMediaChannel::FakeDataMediaChannel(void* unused, - const DataOptions& options) - : send_blocked_(false), max_bps_(-1) {} -FakeDataMediaChannel::~FakeDataMediaChannel() {} -const std::vector& FakeDataMediaChannel::recv_codecs() const { - return recv_codecs_; -} -const std::vector& FakeDataMediaChannel::send_codecs() const { - return send_codecs_; -} -const std::vector& FakeDataMediaChannel::codecs() const { - return send_codecs(); -} -int FakeDataMediaChannel::max_bps() const { - return max_bps_; -} -bool FakeDataMediaChannel::SetSendParameters(const DataSendParameters& params) { - set_send_rtcp_parameters(params.rtcp); - return (SetSendCodecs(params.codecs) && - SetMaxSendBandwidth(params.max_bandwidth_bps)); -} -bool FakeDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) { - set_recv_rtcp_parameters(params.rtcp); - return SetRecvCodecs(params.codecs); -} -bool FakeDataMediaChannel::SetSend(bool send) { - return set_sending(send); -} -bool FakeDataMediaChannel::SetReceive(bool receive) { - set_playout(receive); - return true; -} -bool FakeDataMediaChannel::AddRecvStream(const StreamParams& sp) { - if (!RtpHelper::AddRecvStream(sp)) - return false; - return true; -} -bool FakeDataMediaChannel::RemoveRecvStream(uint32_t ssrc) { - if (!RtpHelper::RemoveRecvStream(ssrc)) - return false; - return true; -} -bool FakeDataMediaChannel::SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result) { - if (send_blocked_) { - *result = SDR_BLOCK; - return false; - } else { - last_sent_data_params_ = params; - last_sent_data_ = std::string(payload.data(), payload.size()); - return true; - } -} -SendDataParams FakeDataMediaChannel::last_sent_data_params() { - return last_sent_data_params_; -} -std::string FakeDataMediaChannel::last_sent_data() { - return last_sent_data_; -} -bool FakeDataMediaChannel::is_send_blocked() { - return send_blocked_; -} -void FakeDataMediaChannel::set_send_blocked(bool blocked) { - send_blocked_ = blocked; -} -bool FakeDataMediaChannel::SetRecvCodecs(const std::vector& codecs) { - if (fail_set_recv_codecs()) { - // Fake the failure in SetRecvCodecs. - return false; - } - recv_codecs_ = codecs; - return true; -} -bool FakeDataMediaChannel::SetSendCodecs(const std::vector& codecs) { - if (fail_set_send_codecs()) { - // Fake the failure in SetSendCodecs. - return false; - } - send_codecs_ = codecs; - return true; -} -bool FakeDataMediaChannel::SetMaxSendBandwidth(int bps) { - max_bps_ = bps; - return true; -} - FakeVoiceEngine::FakeVoiceEngine() : fail_create_channel_(false) { // Add a fake audio codec. Note that the name must not be "" as there are // sanity checks against that. @@ -527,7 +447,8 @@ VoiceMediaChannel* FakeVoiceEngine::CreateMediaChannel( return nullptr; } - FakeVoiceMediaChannel* ch = new FakeVoiceMediaChannel(this, options); + FakeVoiceMediaChannel* ch = + new FakeVoiceMediaChannel(this, options, call->network_thread()); channels_.push_back(ch); return ch; } @@ -593,7 +514,8 @@ VideoMediaChannel* FakeVideoEngine::CreateMediaChannel( return nullptr; } - FakeVideoMediaChannel* ch = new FakeVideoMediaChannel(this, options); + FakeVideoMediaChannel* ch = + new FakeVideoMediaChannel(this, options, call->network_thread()); channels_.emplace_back(ch); return ch; } @@ -668,22 +590,4 @@ void FakeMediaEngine::set_fail_create_channel(bool fail) { video_->fail_create_channel_ = fail; } -DataMediaChannel* FakeDataEngine::CreateChannel(const MediaConfig& config) { - FakeDataMediaChannel* ch = new FakeDataMediaChannel(this, DataOptions()); - channels_.push_back(ch); - return ch; -} -FakeDataMediaChannel* FakeDataEngine::GetChannel(size_t index) { - return (channels_.size() > index) ? channels_[index] : NULL; -} -void FakeDataEngine::UnregisterChannel(DataMediaChannel* channel) { - channels_.erase(absl::c_find(channels_, channel)); -} -void FakeDataEngine::SetDataCodecs(const std::vector& data_codecs) { - data_codecs_ = data_codecs; -} -const std::vector& FakeDataEngine::data_codecs() { - return data_codecs_; -} - } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h index 1751dd8bf..e4f7b6659 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h @@ -11,6 +11,7 @@ #ifndef MEDIA_BASE_FAKE_MEDIA_ENGINE_H_ #define MEDIA_BASE_FAKE_MEDIA_ENGINE_H_ +#include #include #include #include @@ -42,8 +43,9 @@ class FakeVoiceEngine; template class RtpHelper : public Base { public: - RtpHelper() - : sending_(false), + explicit RtpHelper(webrtc::TaskQueueBase* network_thread) + : Base(network_thread), + sending_(false), playout_(false), fail_set_send_codecs_(false), fail_set_recv_codecs_(false), @@ -118,6 +120,8 @@ class RtpHelper : public Base { return RemoveStreamBySsrc(&send_streams_, ssrc); } virtual void ResetUnsignaledRecvStream() {} + virtual void OnDemuxerCriteriaUpdatePending() {} + virtual void OnDemuxerCriteriaUpdateComplete() {} virtual bool AddRecvStream(const StreamParams& sp) { if (absl::c_linear_search(receive_streams_, sp)) { @@ -265,14 +269,14 @@ class RtpHelper : public Base { void set_recv_rtcp_parameters(const RtcpParameters& params) { recv_rtcp_parameters_ = params; } - virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) { + void OnPacketReceived(rtc::CopyOnWriteBuffer packet, + int64_t packet_time_us) override { rtp_packets_.push_back(std::string(packet.cdata(), packet.size())); } - virtual void OnReadyToSend(bool ready) { ready_to_send_ = ready; } - - virtual void OnNetworkRouteChanged(const std::string& transport_name, - const rtc::NetworkRoute& network_route) { + void OnPacketSent(const rtc::SentPacket& sent_packet) override {} + void OnReadyToSend(bool ready) override { ready_to_send_ = ready; } + void OnNetworkRouteChanged(const std::string& transport_name, + const rtc::NetworkRoute& network_route) override { last_network_route_ = network_route; ++num_network_route_changes_; transport_overhead_per_packet_ = network_route.packet_overhead; @@ -281,7 +285,10 @@ class RtpHelper : public Base { bool fail_set_recv_codecs() const { return fail_set_recv_codecs_; } private: - bool sending_; + // TODO(bugs.webrtc.org/12783): This flag is used from more than one thread. + // As a workaround for tsan, it's currently std::atomic but that might not + // be the appropriate fix. + std::atomic sending_; bool playout_; std::vector recv_extensions_; std::vector send_extensions_; @@ -312,8 +319,9 @@ class FakeVoiceMediaChannel : public RtpHelper { int event_code; int duration; }; - explicit FakeVoiceMediaChannel(FakeVoiceEngine* engine, - const AudioOptions& options); + FakeVoiceMediaChannel(FakeVoiceEngine* engine, + const AudioOptions& options, + webrtc::TaskQueueBase* network_thread); ~FakeVoiceMediaChannel(); const std::vector& recv_codecs() const; const std::vector& send_codecs() const; @@ -371,6 +379,7 @@ class FakeVoiceMediaChannel : public RtpHelper { size_t number_of_frames, absl::optional absolute_capture_timestamp_ms) override; void OnClose() override; + int NumPreferredChannels() const override { return -1; } AudioSource* source() const; private: @@ -403,7 +412,9 @@ bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info, class FakeVideoMediaChannel : public RtpHelper { public: - FakeVideoMediaChannel(FakeVideoEngine* engine, const VideoOptions& options); + FakeVideoMediaChannel(FakeVideoEngine* engine, + const VideoOptions& options, + webrtc::TaskQueueBase* network_thread); ~FakeVideoMediaChannel(); @@ -469,48 +480,6 @@ class FakeVideoMediaChannel : public RtpHelper { int max_bps_; }; -// Dummy option class, needed for the DataTraits abstraction in -// channel_unittest.c. -class DataOptions {}; - -class FakeDataMediaChannel : public RtpHelper { - public: - explicit FakeDataMediaChannel(void* unused, const DataOptions& options); - ~FakeDataMediaChannel(); - const std::vector& recv_codecs() const; - const std::vector& send_codecs() const; - const std::vector& codecs() const; - int max_bps() const; - - bool SetSendParameters(const DataSendParameters& params) override; - bool SetRecvParameters(const DataRecvParameters& params) override; - bool SetSend(bool send) override; - bool SetReceive(bool receive) override; - bool AddRecvStream(const StreamParams& sp) override; - bool RemoveRecvStream(uint32_t ssrc) override; - - bool SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result) override; - - SendDataParams last_sent_data_params(); - std::string last_sent_data(); - bool is_send_blocked(); - void set_send_blocked(bool blocked); - - private: - bool SetRecvCodecs(const std::vector& codecs); - bool SetSendCodecs(const std::vector& codecs); - bool SetMaxSendBandwidth(int bps); - - std::vector recv_codecs_; - std::vector send_codecs_; - SendDataParams last_sent_data_params_; - std::string last_sent_data_; - bool send_blocked_; - int max_bps_; -}; - class FakeVoiceEngine : public VoiceEngineInterface { public: FakeVoiceEngine(); @@ -606,25 +575,6 @@ class FakeMediaEngine : public CompositeMediaEngine { FakeVideoEngine* const video_; }; -// Have to come afterwards due to declaration order - -class FakeDataEngine : public DataEngineInterface { - public: - DataMediaChannel* CreateChannel(const MediaConfig& config) override; - - FakeDataMediaChannel* GetChannel(size_t index); - - void UnregisterChannel(DataMediaChannel* channel); - - void SetDataCodecs(const std::vector& data_codecs); - - const std::vector& data_codecs() override; - - private: - std::vector channels_; - std::vector data_codecs_; -}; - } // namespace cricket #endif // MEDIA_BASE_FAKE_MEDIA_ENGINE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h index 3df83edce..02d53f678 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_network_interface.h @@ -129,7 +129,8 @@ class FakeNetworkInterface : public MediaChannel::NetworkInterface, rtp_packets_.push_back(*packet); if (conf_) { for (size_t i = 0; i < conf_sent_ssrcs_.size(); ++i) { - if (!SetRtpSsrc(packet->data(), packet->size(), conf_sent_ssrcs_[i])) { + if (!SetRtpSsrc(packet->MutableData(), packet->size(), + conf_sent_ssrcs_[i])) { return false; } PostMessage(ST_RTP, *packet); diff --git a/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.cc b/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.cc index 32fa02c14..6f9fa4669 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.cc @@ -10,301 +10,33 @@ #include "media/base/h264_profile_level_id.h" -#include -#include -#include - -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" +// TODO(crbug.com/1187565): Remove this file once downstream projects stop +// depend on it. namespace webrtc { namespace H264 { -namespace { - -const char kProfileLevelId[] = "profile-level-id"; -const char kLevelAsymmetryAllowed[] = "level-asymmetry-allowed"; - -// For level_idc=11 and profile_idc=0x42, 0x4D, or 0x58, the constraint set3 -// flag specifies if level 1b or level 1.1 is used. -const uint8_t kConstraintSet3Flag = 0x10; - -// Convert a string of 8 characters into a byte where the positions containing -// character c will have their bit set. For example, c = 'x', str = "x1xx0000" -// will return 0b10110000. constexpr is used so that the pattern table in -// kProfilePatterns is statically initialized. -constexpr uint8_t ByteMaskString(char c, const char (&str)[9]) { - return (str[0] == c) << 7 | (str[1] == c) << 6 | (str[2] == c) << 5 | - (str[3] == c) << 4 | (str[4] == c) << 3 | (str[5] == c) << 2 | - (str[6] == c) << 1 | (str[7] == c) << 0; -} - -// Class for matching bit patterns such as "x1xx0000" where 'x' is allowed to be -// either 0 or 1. -class BitPattern { - public: - explicit constexpr BitPattern(const char (&str)[9]) - : mask_(~ByteMaskString('x', str)), - masked_value_(ByteMaskString('1', str)) {} - - bool IsMatch(uint8_t value) const { return masked_value_ == (value & mask_); } - - private: - const uint8_t mask_; - const uint8_t masked_value_; -}; - -// Table for converting between profile_idc/profile_iop to H264::Profile. -struct ProfilePattern { - const uint8_t profile_idc; - const BitPattern profile_iop; - const Profile profile; -}; - -// This is from https://tools.ietf.org/html/rfc6184#section-8.1. -constexpr ProfilePattern kProfilePatterns[] = { - {0x42, BitPattern("x1xx0000"), kProfileConstrainedBaseline}, - {0x4D, BitPattern("1xxx0000"), kProfileConstrainedBaseline}, - {0x58, BitPattern("11xx0000"), kProfileConstrainedBaseline}, - {0x42, BitPattern("x0xx0000"), kProfileBaseline}, - {0x58, BitPattern("10xx0000"), kProfileBaseline}, - {0x4D, BitPattern("0x0x0000"), kProfileMain}, - {0x64, BitPattern("00000000"), kProfileHigh}, - {0x64, BitPattern("00001100"), kProfileConstrainedHigh}}; - -// Compare H264 levels and handle the level 1b case. -bool IsLess(Level a, Level b) { - if (a == kLevel1_b) - return b != kLevel1 && b != kLevel1_b; - if (b == kLevel1_b) - return a == kLevel1; - return a < b; -} - -Level Min(Level a, Level b) { - return IsLess(a, b) ? a : b; -} - -bool IsLevelAsymmetryAllowed(const CodecParameterMap& params) { - const auto it = params.find(kLevelAsymmetryAllowed); - return it != params.end() && strcmp(it->second.c_str(), "1") == 0; -} - -struct LevelConstraint { - const int max_macroblocks_per_second; - const int max_macroblock_frame_size; - const webrtc::H264::Level level; -}; - -// This is from ITU-T H.264 (02/2016) Table A-1 – Level limits. -static constexpr LevelConstraint kLevelConstraints[] = { - {1485, 99, webrtc::H264::kLevel1}, - {1485, 99, webrtc::H264::kLevel1_b}, - {3000, 396, webrtc::H264::kLevel1_1}, - {6000, 396, webrtc::H264::kLevel1_2}, - {11880, 396, webrtc::H264::kLevel1_3}, - {11880, 396, webrtc::H264::kLevel2}, - {19800, 792, webrtc::H264::kLevel2_1}, - {20250, 1620, webrtc::H264::kLevel2_2}, - {40500, 1620, webrtc::H264::kLevel3}, - {108000, 3600, webrtc::H264::kLevel3_1}, - {216000, 5120, webrtc::H264::kLevel3_2}, - {245760, 8192, webrtc::H264::kLevel4}, - {245760, 8192, webrtc::H264::kLevel4_1}, - {522240, 8704, webrtc::H264::kLevel4_2}, - {589824, 22080, webrtc::H264::kLevel5}, - {983040, 36864, webrtc::H264::kLevel5_1}, - {2073600, 36864, webrtc::H264::kLevel5_2}, -}; - -} // anonymous namespace - absl::optional ParseProfileLevelId(const char* str) { - // The string should consist of 3 bytes in hexadecimal format. - if (strlen(str) != 6u) - return absl::nullopt; - const uint32_t profile_level_id_numeric = strtol(str, nullptr, 16); - if (profile_level_id_numeric == 0) - return absl::nullopt; - - // Separate into three bytes. - const uint8_t level_idc = - static_cast(profile_level_id_numeric & 0xFF); - const uint8_t profile_iop = - static_cast((profile_level_id_numeric >> 8) & 0xFF); - const uint8_t profile_idc = - static_cast((profile_level_id_numeric >> 16) & 0xFF); - - // Parse level based on level_idc and constraint set 3 flag. - Level level; - switch (level_idc) { - case kLevel1_1: - level = (profile_iop & kConstraintSet3Flag) != 0 ? kLevel1_b : kLevel1_1; - break; - case kLevel1: - case kLevel1_2: - case kLevel1_3: - case kLevel2: - case kLevel2_1: - case kLevel2_2: - case kLevel3: - case kLevel3_1: - case kLevel3_2: - case kLevel4: - case kLevel4_1: - case kLevel4_2: - case kLevel5: - case kLevel5_1: - case kLevel5_2: - level = static_cast(level_idc); - break; - default: - // Unrecognized level_idc. - return absl::nullopt; - } - - // Parse profile_idc/profile_iop into a Profile enum. - for (const ProfilePattern& pattern : kProfilePatterns) { - if (profile_idc == pattern.profile_idc && - pattern.profile_iop.IsMatch(profile_iop)) { - return ProfileLevelId(pattern.profile, level); - } - } - - // Unrecognized profile_idc/profile_iop combination. - return absl::nullopt; -} - -absl::optional SupportedLevel(int max_frame_pixel_count, float max_fps) { - static const int kPixelsPerMacroblock = 16 * 16; - - for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) { - const LevelConstraint& level_constraint = kLevelConstraints[i]; - if (level_constraint.max_macroblock_frame_size * kPixelsPerMacroblock <= - max_frame_pixel_count && - level_constraint.max_macroblocks_per_second <= - max_fps * level_constraint.max_macroblock_frame_size) { - return level_constraint.level; - } - } - - // No level supported. - return absl::nullopt; + return webrtc::ParseH264ProfileLevelId(str); } absl::optional ParseSdpProfileLevelId( - const CodecParameterMap& params) { - // TODO(magjed): The default should really be kProfileBaseline and kLevel1 - // according to the spec: https://tools.ietf.org/html/rfc6184#section-8.1. In - // order to not break backwards compatibility with older versions of WebRTC - // where external codecs don't have any parameters, use - // kProfileConstrainedBaseline kLevel3_1 instead. This workaround will only be - // done in an interim period to allow external clients to update their code. - // http://crbug/webrtc/6337. - static const ProfileLevelId kDefaultProfileLevelId( - kProfileConstrainedBaseline, kLevel3_1); + const SdpVideoFormat::Parameters& params) { + return webrtc::ParseSdpForH264ProfileLevelId(params); +} - const auto profile_level_id_it = params.find(kProfileLevelId); - return (profile_level_id_it == params.end()) - ? kDefaultProfileLevelId - : ParseProfileLevelId(profile_level_id_it->second.c_str()); +absl::optional SupportedLevel(int max_frame_pixel_count, float max_fps) { + return webrtc::H264SupportedLevel(max_frame_pixel_count, max_fps); } absl::optional ProfileLevelIdToString( const ProfileLevelId& profile_level_id) { - // Handle special case level == 1b. - if (profile_level_id.level == kLevel1_b) { - switch (profile_level_id.profile) { - case kProfileConstrainedBaseline: - return {"42f00b"}; - case kProfileBaseline: - return {"42100b"}; - case kProfileMain: - return {"4d100b"}; - // Level 1b is not allowed for other profiles. - default: - return absl::nullopt; - } - } - - const char* profile_idc_iop_string; - switch (profile_level_id.profile) { - case kProfileConstrainedBaseline: - profile_idc_iop_string = "42e0"; - break; - case kProfileBaseline: - profile_idc_iop_string = "4200"; - break; - case kProfileMain: - profile_idc_iop_string = "4d00"; - break; - case kProfileConstrainedHigh: - profile_idc_iop_string = "640c"; - break; - case kProfileHigh: - profile_idc_iop_string = "6400"; - break; - // Unrecognized profile. - default: - return absl::nullopt; - } - - char str[7]; - snprintf(str, 7u, "%s%02x", profile_idc_iop_string, profile_level_id.level); - return {str}; + return webrtc::H264ProfileLevelIdToString(profile_level_id); } -// Set level according to https://tools.ietf.org/html/rfc6184#section-8.2.2. -void GenerateProfileLevelIdForAnswer( - const CodecParameterMap& local_supported_params, - const CodecParameterMap& remote_offered_params, - CodecParameterMap* answer_params) { - // If both local and remote haven't set profile-level-id, they are both using - // the default profile. In this case, don't set profile-level-id in answer - // either. - if (!local_supported_params.count(kProfileLevelId) && - !remote_offered_params.count(kProfileLevelId)) { - return; - } - - // Parse profile-level-ids. - const absl::optional local_profile_level_id = - ParseSdpProfileLevelId(local_supported_params); - const absl::optional remote_profile_level_id = - ParseSdpProfileLevelId(remote_offered_params); - // The local and remote codec must have valid and equal H264 Profiles. - RTC_DCHECK(local_profile_level_id); - RTC_DCHECK(remote_profile_level_id); - RTC_DCHECK_EQ(local_profile_level_id->profile, - remote_profile_level_id->profile); - - // Parse level information. - const bool level_asymmetry_allowed = - IsLevelAsymmetryAllowed(local_supported_params) && - IsLevelAsymmetryAllowed(remote_offered_params); - const Level local_level = local_profile_level_id->level; - const Level remote_level = remote_profile_level_id->level; - const Level min_level = Min(local_level, remote_level); - - // Determine answer level. When level asymmetry is not allowed, level upgrade - // is not allowed, i.e., the level in the answer must be equal to or lower - // than the level in the offer. - const Level answer_level = level_asymmetry_allowed ? local_level : min_level; - - // Set the resulting profile-level-id in the answer parameters. - (*answer_params)[kProfileLevelId] = *ProfileLevelIdToString( - ProfileLevelId(local_profile_level_id->profile, answer_level)); -} - -bool IsSameH264Profile(const CodecParameterMap& params1, - const CodecParameterMap& params2) { - const absl::optional profile_level_id = - webrtc::H264::ParseSdpProfileLevelId(params1); - const absl::optional other_profile_level_id = - webrtc::H264::ParseSdpProfileLevelId(params2); - // Compare H264 profiles, but not levels. - return profile_level_id && other_profile_level_id && - profile_level_id->profile == other_profile_level_id->profile; +bool IsSameH264Profile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2) { + return webrtc::H264IsSameProfile(params1, params2); } } // namespace H264 diff --git a/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.h b/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.h index f0f7928a3..c85709faa 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/h264_profile_level_id.h @@ -11,54 +11,45 @@ #ifndef MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_ #define MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_ -#include #include -#include "absl/types/optional.h" -#include "rtc_base/system/rtc_export.h" +#include "api/video_codecs/h264_profile_level_id.h" + +// TODO(crbug.com/1187565): Remove this file once downstream projects stop +// depend on it. namespace webrtc { namespace H264 { -enum Profile { - kProfileConstrainedBaseline, - kProfileBaseline, - kProfileMain, - kProfileConstrainedHigh, - kProfileHigh, -}; +typedef H264Profile Profile; +typedef H264Level Level; +typedef H264ProfileLevelId ProfileLevelId; -// Map containting SDP codec parameters. -typedef std::map CodecParameterMap; +constexpr H264Profile kProfileConstrainedBaseline = + H264Profile::kProfileConstrainedBaseline; +constexpr H264Profile kProfileBaseline = H264Profile::kProfileBaseline; +constexpr H264Profile kProfileMain = H264Profile::kProfileMain; +constexpr H264Profile kProfileConstrainedHigh = + H264Profile::kProfileConstrainedHigh; +constexpr H264Profile kProfileHigh = H264Profile::kProfileHigh; -// All values are equal to ten times the level number, except level 1b which is -// special. -enum Level { - kLevel1_b = 0, - kLevel1 = 10, - kLevel1_1 = 11, - kLevel1_2 = 12, - kLevel1_3 = 13, - kLevel2 = 20, - kLevel2_1 = 21, - kLevel2_2 = 22, - kLevel3 = 30, - kLevel3_1 = 31, - kLevel3_2 = 32, - kLevel4 = 40, - kLevel4_1 = 41, - kLevel4_2 = 42, - kLevel5 = 50, - kLevel5_1 = 51, - kLevel5_2 = 52 -}; - -struct ProfileLevelId { - constexpr ProfileLevelId(Profile profile, Level level) - : profile(profile), level(level) {} - Profile profile; - Level level; -}; +constexpr H264Level kLevel1_b = H264Level::kLevel1_b; +constexpr H264Level kLevel1 = H264Level::kLevel1; +constexpr H264Level kLevel1_1 = H264Level::kLevel1_1; +constexpr H264Level kLevel1_2 = H264Level::kLevel1_2; +constexpr H264Level kLevel1_3 = H264Level::kLevel1_3; +constexpr H264Level kLevel2 = H264Level::kLevel2; +constexpr H264Level kLevel2_1 = H264Level::kLevel2_1; +constexpr H264Level kLevel2_2 = H264Level::kLevel2_2; +constexpr H264Level kLevel3 = H264Level::kLevel3; +constexpr H264Level kLevel3_1 = H264Level::kLevel3_1; +constexpr H264Level kLevel3_2 = H264Level::kLevel3_2; +constexpr H264Level kLevel4 = H264Level::kLevel4; +constexpr H264Level kLevel4_1 = H264Level::kLevel4_1; +constexpr H264Level kLevel4_2 = H264Level::kLevel4_2; +constexpr H264Level kLevel5 = H264Level::kLevel5; +constexpr H264Level kLevel5_1 = H264Level::kLevel5_1; +constexpr H264Level kLevel5_2 = H264Level::kLevel5_2; // Parse profile level id that is represented as a string of 3 hex bytes. // Nothing will be returned if the string is not a recognized H264 @@ -70,7 +61,7 @@ absl::optional ParseProfileLevelId(const char* str); // returned if the profile-level-id key is missing. Nothing will be returned if // the key is present but the string is invalid. RTC_EXPORT absl::optional ParseSdpProfileLevelId( - const CodecParameterMap& params); + const SdpVideoFormat::Parameters& params); // Given that a decoder supports up to a given frame size (in pixels) at up to a // given number of frames per second, return the highest H.264 level where it @@ -84,33 +75,11 @@ RTC_EXPORT absl::optional SupportedLevel(int max_frame_pixel_count, RTC_EXPORT absl::optional ProfileLevelIdToString( const ProfileLevelId& profile_level_id); -// Generate codec parameters that will be used as answer in an SDP negotiation -// based on local supported parameters and remote offered parameters. Both -// |local_supported_params|, |remote_offered_params|, and |answer_params| -// represent sendrecv media descriptions, i.e they are a mix of both encode and -// decode capabilities. In theory, when the profile in |local_supported_params| -// represent a strict superset of the profile in |remote_offered_params|, we -// could limit the profile in |answer_params| to the profile in -// |remote_offered_params|. However, to simplify the code, each supported H264 -// profile should be listed explicitly in the list of local supported codecs, -// even if they are redundant. Then each local codec in the list should be -// tested one at a time against the remote codec, and only when the profiles are -// equal should this function be called. Therefore, this function does not need -// to handle profile intersection, and the profile of |local_supported_params| -// and |remote_offered_params| must be equal before calling this function. The -// parameters that are used when negotiating are the level part of -// profile-level-id and level-asymmetry-allowed. -void GenerateProfileLevelIdForAnswer( - const CodecParameterMap& local_supported_params, - const CodecParameterMap& remote_offered_params, - CodecParameterMap* answer_params); - // Returns true if the parameters have the same H264 profile, i.e. the same // H264::Profile (Baseline, High, etc). -bool IsSameH264Profile(const CodecParameterMap& params1, - const CodecParameterMap& params2); +RTC_EXPORT bool IsSameH264Profile(const SdpVideoFormat::Parameters& params1, + const SdpVideoFormat::Parameters& params2); } // namespace H264 } // namespace webrtc - #endif // MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc index 0cef36e2b..01b043b82 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc @@ -10,21 +10,40 @@ #include "media/base/media_channel.h" +#include "media/base/rtp_utils.h" +#include "rtc_base/task_utils/to_queued_task.h" + namespace cricket { +using webrtc::FrameDecryptorInterface; +using webrtc::FrameEncryptorInterface; +using webrtc::FrameTransformerInterface; +using webrtc::PendingTaskSafetyFlag; +using webrtc::TaskQueueBase; +using webrtc::ToQueuedTask; +using webrtc::VideoTrackInterface; VideoOptions::VideoOptions() - : content_hint(webrtc::VideoTrackInterface::ContentHint::kNone) {} + : content_hint(VideoTrackInterface::ContentHint::kNone) {} VideoOptions::~VideoOptions() = default; -MediaChannel::MediaChannel(const MediaConfig& config) - : enable_dscp_(config.enable_dscp) {} +MediaChannel::MediaChannel(const MediaConfig& config, + TaskQueueBase* network_thread) + : enable_dscp_(config.enable_dscp), + network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), + network_thread_(network_thread) {} -MediaChannel::MediaChannel() : enable_dscp_(false) {} +MediaChannel::MediaChannel(TaskQueueBase* network_thread) + : enable_dscp_(false), + network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), + network_thread_(network_thread) {} -MediaChannel::~MediaChannel() {} +MediaChannel::~MediaChannel() { + RTC_DCHECK(!network_interface_); +} void MediaChannel::SetInterface(NetworkInterface* iface) { - webrtc::MutexLock lock(&network_interface_mutex_); + RTC_DCHECK_RUN_ON(network_thread_); + iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive(); network_interface_ = iface; UpdateDscp(); } @@ -35,24 +54,163 @@ int MediaChannel::GetRtpSendTimeExtnId() const { void MediaChannel::SetFrameEncryptor( uint32_t ssrc, - rtc::scoped_refptr frame_encryptor) { + rtc::scoped_refptr frame_encryptor) { // Placeholder should be pure virtual once internal supports it. } void MediaChannel::SetFrameDecryptor( uint32_t ssrc, - rtc::scoped_refptr frame_decryptor) { + rtc::scoped_refptr frame_decryptor) { // Placeholder should be pure virtual once internal supports it. } void MediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {} +bool MediaChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options) { + return DoSendPacket(packet, false, options); +} + +bool MediaChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet, + const rtc::PacketOptions& options) { + return DoSendPacket(packet, true, options); +} + +int MediaChannel::SetOption(NetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) { + RTC_DCHECK_RUN_ON(network_thread_); + return SetOptionLocked(type, opt, option); +} + +// Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. +// Set to true if it's allowed to mix one- and two-byte RTP header extensions +// in the same stream. The setter and getter must only be called from +// worker_thread. +void MediaChannel::SetExtmapAllowMixed(bool extmap_allow_mixed) { + extmap_allow_mixed_ = extmap_allow_mixed; +} + +bool MediaChannel::ExtmapAllowMixed() const { + return extmap_allow_mixed_; +} + void MediaChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) {} + rtc::scoped_refptr frame_transformer) {} + void MediaChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) {} + rtc::scoped_refptr frame_transformer) {} + +int MediaChannel::SetOptionLocked(NetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) { + if (!network_interface_) + return -1; + return network_interface_->SetOption(type, opt, option); +} + +bool MediaChannel::DscpEnabled() const { + return enable_dscp_; +} + +// This is the DSCP value used for both RTP and RTCP channels if DSCP is +// enabled. It can be changed at any time via |SetPreferredDscp|. +rtc::DiffServCodePoint MediaChannel::PreferredDscp() const { + RTC_DCHECK_RUN_ON(network_thread_); + return preferred_dscp_; +} + +void MediaChannel::SetPreferredDscp(rtc::DiffServCodePoint new_dscp) { + if (!network_thread_->IsCurrent()) { + // This is currently the common path as the derived channel classes + // get called on the worker thread. There are still some tests though + // that call directly on the network thread. + network_thread_->PostTask(ToQueuedTask( + network_safety_, [this, new_dscp]() { SetPreferredDscp(new_dscp); })); + return; + } + + RTC_DCHECK_RUN_ON(network_thread_); + if (new_dscp == preferred_dscp_) + return; + + preferred_dscp_ = new_dscp; + UpdateDscp(); +} + +rtc::scoped_refptr MediaChannel::network_safety() { + return network_safety_; +} + +void MediaChannel::UpdateDscp() { + rtc::DiffServCodePoint value = + enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; + int ret = + SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value); + if (ret == 0) + SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, value); +} + +bool MediaChannel::DoSendPacket(rtc::CopyOnWriteBuffer* packet, + bool rtcp, + const rtc::PacketOptions& options) { + RTC_DCHECK_RUN_ON(network_thread_); + if (!network_interface_) + return false; + + return (!rtcp) ? network_interface_->SendPacket(packet, options) + : network_interface_->SendRtcp(packet, options); +} + +void MediaChannel::SendRtp(const uint8_t* data, + size_t len, + const webrtc::PacketOptions& options) { + auto send = + [this, packet_id = options.packet_id, + included_in_feedback = options.included_in_feedback, + included_in_allocation = options.included_in_allocation, + packet = rtc::CopyOnWriteBuffer(data, len, kMaxRtpPacketLen)]() mutable { + rtc::PacketOptions rtc_options; + rtc_options.packet_id = packet_id; + if (DscpEnabled()) { + rtc_options.dscp = PreferredDscp(); + } + rtc_options.info_signaled_after_sent.included_in_feedback = + included_in_feedback; + rtc_options.info_signaled_after_sent.included_in_allocation = + included_in_allocation; + SendPacket(&packet, rtc_options); + }; + + // TODO(bugs.webrtc.org/11993): ModuleRtpRtcpImpl2 and related classes (e.g. + // RTCPSender) aren't aware of the network thread and may trigger calls to + // this function from different threads. Update those classes to keep + // network traffic on the network thread. + if (network_thread_->IsCurrent()) { + send(); + } else { + network_thread_->PostTask(ToQueuedTask(network_safety_, std::move(send))); + } +} + +void MediaChannel::SendRtcp(const uint8_t* data, size_t len) { + auto send = [this, packet = rtc::CopyOnWriteBuffer( + data, len, kMaxRtpPacketLen)]() mutable { + rtc::PacketOptions rtc_options; + if (DscpEnabled()) { + rtc_options.dscp = PreferredDscp(); + } + SendRtcp(&packet, rtc_options); + }; + + if (network_thread_->IsCurrent()) { + send(); + } else { + network_thread_->PostTask(ToQueuedTask(network_safety_, std::move(send))); + } +} MediaSenderInfo::MediaSenderInfo() = default; MediaSenderInfo::~MediaSenderInfo() = default; @@ -78,9 +236,6 @@ VoiceMediaInfo::~VoiceMediaInfo() = default; VideoMediaInfo::VideoMediaInfo() = default; VideoMediaInfo::~VideoMediaInfo() = default; -DataMediaInfo::DataMediaInfo() = default; -DataMediaInfo::~DataMediaInfo() = default; - AudioSendParameters::AudioSendParameters() = default; AudioSendParameters::~AudioSendParameters() = default; @@ -107,31 +262,4 @@ cricket::MediaType VideoMediaChannel::media_type() const { return cricket::MediaType::MEDIA_TYPE_VIDEO; } -DataMediaChannel::DataMediaChannel() = default; -DataMediaChannel::DataMediaChannel(const MediaConfig& config) - : MediaChannel(config) {} -DataMediaChannel::~DataMediaChannel() = default; - -webrtc::RtpParameters DataMediaChannel::GetRtpSendParameters( - uint32_t ssrc) const { - // GetRtpSendParameters is not supported for DataMediaChannel. - RTC_NOTREACHED(); - return webrtc::RtpParameters(); -} -webrtc::RTCError DataMediaChannel::SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters) { - // SetRtpSendParameters is not supported for DataMediaChannel. - RTC_NOTREACHED(); - return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); -} - -cricket::MediaType DataMediaChannel::media_type() const { - return cricket::MediaType::MEDIA_TYPE_DATA; -} - -bool DataMediaChannel::GetStats(DataMediaInfo* info) { - return true; -} - } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h index a947b4799..a4a925e91 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h @@ -26,6 +26,7 @@ #include "api/media_stream_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/transport/data_channel_transport_interface.h" #include "api/transport/rtp/rtp_source.h" #include "api/video/video_content_type.h" #include "api/video/video_sink_interface.h" @@ -43,7 +44,6 @@ #include "modules/rtp_rtcp/include/report_block_data.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" -#include "rtc_base/callback.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/dscp.h" #include "rtc_base/logging.h" @@ -51,8 +51,7 @@ #include "rtc_base/socket.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace rtc { class Timing; @@ -155,7 +154,7 @@ struct VideoOptions { } }; -class MediaChannel : public sigslot::has_slots<> { +class MediaChannel { public: class NetworkInterface { public: @@ -170,18 +169,21 @@ class MediaChannel : public sigslot::has_slots<> { virtual ~NetworkInterface() {} }; - explicit MediaChannel(const MediaConfig& config); - MediaChannel(); - ~MediaChannel() override; + MediaChannel(const MediaConfig& config, + webrtc::TaskQueueBase* network_thread); + explicit MediaChannel(webrtc::TaskQueueBase* network_thread); + virtual ~MediaChannel(); virtual cricket::MediaType media_type() const = 0; // Sets the abstract interface class for sending RTP/RTCP data. - virtual void SetInterface(NetworkInterface* iface) - RTC_LOCKS_EXCLUDED(network_interface_mutex_); - // Called when a RTP packet is received. + virtual void SetInterface(NetworkInterface* iface); + // Called on the network when an RTP packet is received. virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) = 0; + // Called on the network thread after a transport has finished sending a + // packet. + virtual void OnPacketSent(const rtc::SentPacket& sent_packet) = 0; // Called when the socket's ability to send has changed. virtual void OnReadyToSend(bool ready) = 0; // Called when the network route used for sending packets changed. @@ -207,6 +209,17 @@ class MediaChannel : public sigslot::has_slots<> { // Resets any cached StreamParams for an unsignaled RecvStream, and removes // any existing unsignaled streams. virtual void ResetUnsignaledRecvStream() = 0; + // Informs the media channel when the transport's demuxer criteria is updated. + // * OnDemuxerCriteriaUpdatePending() happens on the same thread that the + // channel's streams are added and removed (worker thread). + // * OnDemuxerCriteriaUpdateComplete() happens on the thread where the demuxer + // lives (network thread). + // Because the demuxer is updated asynchronously, there is a window of time + // where packets are arriving to the channel for streams that have already + // been removed on the worker thread. It is important NOT to treat these as + // new unsignalled ssrcs. + virtual void OnDemuxerCriteriaUpdatePending() = 0; + virtual void OnDemuxerCriteriaUpdateComplete() = 0; // Returns the absoulte sendtime extension id value from media channel. virtual int GetRtpSendTimeExtnId() const; // Set the frame encryptor to use on all outgoing frames. This is optional. @@ -229,30 +242,21 @@ class MediaChannel : public sigslot::has_slots<> { // Base method to send packet using NetworkInterface. bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { - return DoSendPacket(packet, false, options); - } + const rtc::PacketOptions& options); bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { - return DoSendPacket(packet, true, options); - } + const rtc::PacketOptions& options); int SetOption(NetworkInterface::SocketType type, rtc::Socket::Option opt, - int option) RTC_LOCKS_EXCLUDED(network_interface_mutex_) { - webrtc::MutexLock lock(&network_interface_mutex_); - return SetOptionLocked(type, opt, option); - } + int option); // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. // Set to true if it's allowed to mix one- and two-byte RTP header extensions // in the same stream. The setter and getter must only be called from // worker_thread. - void SetExtmapAllowMixed(bool extmap_allow_mixed) { - extmap_allow_mixed_ = extmap_allow_mixed; - } - bool ExtmapAllowMixed() const { return extmap_allow_mixed_; } + void SetExtmapAllowMixed(bool extmap_allow_mixed); + bool ExtmapAllowMixed() const; virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; virtual webrtc::RTCError SetRtpSendParameters( @@ -269,69 +273,42 @@ class MediaChannel : public sigslot::has_slots<> { protected: int SetOptionLocked(NetworkInterface::SocketType type, rtc::Socket::Option opt, - int option) - RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_mutex_) { - if (!network_interface_) - return -1; - return network_interface_->SetOption(type, opt, option); - } + int option) RTC_RUN_ON(network_thread_); - bool DscpEnabled() const { return enable_dscp_; } + bool DscpEnabled() const; // This is the DSCP value used for both RTP and RTCP channels if DSCP is // enabled. It can be changed at any time via |SetPreferredDscp|. - rtc::DiffServCodePoint PreferredDscp() const - RTC_LOCKS_EXCLUDED(network_interface_mutex_) { - webrtc::MutexLock lock(&network_interface_mutex_); - return preferred_dscp_; - } + rtc::DiffServCodePoint PreferredDscp() const; + void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); - int SetPreferredDscp(rtc::DiffServCodePoint preferred_dscp) - RTC_LOCKS_EXCLUDED(network_interface_mutex_) { - webrtc::MutexLock lock(&network_interface_mutex_); - if (preferred_dscp == preferred_dscp_) { - return 0; - } - preferred_dscp_ = preferred_dscp; - return UpdateDscp(); - } + rtc::scoped_refptr network_safety(); + + // Utility implementation for derived classes (video/voice) that applies + // the packet options and passes the data onwards to `SendPacket`. + void SendRtp(const uint8_t* data, + size_t len, + const webrtc::PacketOptions& options); + + void SendRtcp(const uint8_t* data, size_t len); private: // Apply the preferred DSCP setting to the underlying network interface RTP // and RTCP channels. If DSCP is disabled, then apply the default DSCP value. - int UpdateDscp() RTC_EXCLUSIVE_LOCKS_REQUIRED(network_interface_mutex_) { - rtc::DiffServCodePoint value = - enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; - int ret = - SetOptionLocked(NetworkInterface::ST_RTP, rtc::Socket::OPT_DSCP, value); - if (ret == 0) { - ret = SetOptionLocked(NetworkInterface::ST_RTCP, rtc::Socket::OPT_DSCP, - value); - } - return ret; - } + void UpdateDscp() RTC_RUN_ON(network_thread_); bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, bool rtcp, - const rtc::PacketOptions& options) - RTC_LOCKS_EXCLUDED(network_interface_mutex_) { - webrtc::MutexLock lock(&network_interface_mutex_); - if (!network_interface_) - return false; - - return (!rtcp) ? network_interface_->SendPacket(packet, options) - : network_interface_->SendRtcp(packet, options); - } + const rtc::PacketOptions& options); const bool enable_dscp_; - // |network_interface_| can be accessed from the worker_thread and - // from any MediaEngine threads. This critical section is to protect accessing - // of network_interface_ object. - mutable webrtc::Mutex network_interface_mutex_; - NetworkInterface* network_interface_ - RTC_GUARDED_BY(network_interface_mutex_) = nullptr; - rtc::DiffServCodePoint preferred_dscp_ - RTC_GUARDED_BY(network_interface_mutex_) = rtc::DSCP_DEFAULT; + const rtc::scoped_refptr network_safety_ + RTC_PT_GUARDED_BY(network_thread_); + webrtc::TaskQueueBase* const network_thread_; + NetworkInterface* network_interface_ RTC_GUARDED_BY(network_thread_) = + nullptr; + rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = + rtc::DSCP_DEFAULT; bool extmap_allow_mixed_ = false; }; @@ -537,6 +514,13 @@ struct VoiceReceiverInfo : public MediaReceiverInfo { // longer than 150 ms). int32_t interruption_count = 0; int32_t total_interruption_duration_ms = 0; + // Remote outbound stats derived by the received RTCP sender reports. + // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* + absl::optional last_sender_report_timestamp_ms; + absl::optional last_sender_report_remote_timestamp_ms; + uint32_t sender_reports_packets_sent = 0; + uint64_t sender_reports_bytes_sent = 0; + uint64_t sender_reports_reports_count = 0; }; struct VideoSenderInfo : public MediaSenderInfo { @@ -549,6 +533,7 @@ struct VideoSenderInfo : public MediaSenderInfo { int nacks_rcvd = 0; int send_frame_width = 0; int send_frame_height = 0; + int frames = 0; int framerate_input = 0; int framerate_sent = 0; int aggregated_framerate_sent = 0; @@ -617,6 +602,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo { uint32_t total_pauses_duration_ms = 0; uint32_t total_frames_duration_ms = 0; double sum_squared_frame_durations = 0.0; + uint32_t jitter_ms = 0; webrtc::VideoContentType content_type = webrtc::VideoContentType::UNSPECIFIED; @@ -657,14 +643,6 @@ struct VideoReceiverInfo : public MediaReceiverInfo { absl::optional timing_frame_info; }; -struct DataSenderInfo : public MediaSenderInfo { - uint32_t ssrc = 0; -}; - -struct DataReceiverInfo : public MediaReceiverInfo { - uint32_t ssrc = 0; -}; - struct BandwidthEstimationInfo { int available_send_bandwidth = 0; int available_recv_bandwidth = 0; @@ -718,17 +696,6 @@ struct VideoMediaInfo { RtpCodecParametersMap receive_codecs; }; -struct DataMediaInfo { - DataMediaInfo(); - ~DataMediaInfo(); - void Clear() { - senders.clear(); - receivers.clear(); - } - std::vector senders; - std::vector receivers; -}; - struct RtcpParameters { bool reduced_size = false; bool remote_estimate = false; @@ -799,9 +766,11 @@ struct AudioRecvParameters : RtpParameters {}; class VoiceMediaChannel : public MediaChannel, public Delayable { public: - VoiceMediaChannel() {} - explicit VoiceMediaChannel(const MediaConfig& config) - : MediaChannel(config) {} + explicit VoiceMediaChannel(webrtc::TaskQueueBase* network_thread) + : MediaChannel(network_thread) {} + VoiceMediaChannel(const MediaConfig& config, + webrtc::TaskQueueBase* network_thread) + : MediaChannel(config, network_thread) {} ~VoiceMediaChannel() override {} cricket::MediaType media_type() const override; @@ -869,9 +838,11 @@ struct VideoRecvParameters : RtpParameters {}; class VideoMediaChannel : public MediaChannel, public Delayable { public: - VideoMediaChannel() {} - explicit VideoMediaChannel(const MediaConfig& config) - : MediaChannel(config) {} + explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread) + : MediaChannel(network_thread) {} + VideoMediaChannel(const MediaConfig& config, + webrtc::TaskQueueBase* network_thread) + : MediaChannel(config, network_thread) {} ~VideoMediaChannel() override {} cricket::MediaType media_type() const override; @@ -922,102 +893,21 @@ class VideoMediaChannel : public MediaChannel, public Delayable { virtual std::vector GetSources(uint32_t ssrc) const = 0; }; -enum DataMessageType { - // Chrome-Internal use only. See SctpDataMediaChannel for the actual PPID - // values. - DMT_NONE = 0, - DMT_CONTROL = 1, - DMT_BINARY = 2, - DMT_TEXT = 3, -}; - // Info about data received in DataMediaChannel. For use in // DataMediaChannel::SignalDataReceived and in all of the signals that // signal fires, on up the chain. struct ReceiveDataParams { // The in-packet stream indentifier. - // RTP data channels use SSRCs, SCTP data channels use SIDs. - union { - uint32_t ssrc; - int sid = 0; - }; + // SCTP data channels use SIDs. + int sid = 0; // The type of message (binary, text, or control). - DataMessageType type = DMT_TEXT; + webrtc::DataMessageType type = webrtc::DataMessageType::kText; // A per-stream value incremented per packet in the stream. int seq_num = 0; - // A per-stream value monotonically increasing with time. - int timestamp = 0; -}; - -struct SendDataParams { - // The in-packet stream indentifier. - // RTP data channels use SSRCs, SCTP data channels use SIDs. - union { - uint32_t ssrc; - int sid = 0; - }; - // The type of message (binary, text, or control). - DataMessageType type = DMT_TEXT; - - // TODO(pthatcher): Make |ordered| and |reliable| true by default? - // For SCTP, whether to send messages flagged as ordered or not. - // If false, messages can be received out of order. - bool ordered = false; - // For SCTP, whether the messages are sent reliably or not. - // If false, messages may be lost. - bool reliable = false; - // For SCTP, if reliable == false, provide partial reliability by - // resending up to this many times. Either count or millis - // is supported, not both at the same time. - int max_rtx_count = 0; - // For SCTP, if reliable == false, provide partial reliability by - // resending for up to this many milliseconds. Either count or millis - // is supported, not both at the same time. - int max_rtx_ms = 0; }; enum SendDataResult { SDR_SUCCESS, SDR_ERROR, SDR_BLOCK }; -struct DataSendParameters : RtpSendParameters {}; - -struct DataRecvParameters : RtpParameters {}; - -class DataMediaChannel : public MediaChannel { - public: - DataMediaChannel(); - explicit DataMediaChannel(const MediaConfig& config); - ~DataMediaChannel() override; - - cricket::MediaType media_type() const override; - virtual bool SetSendParameters(const DataSendParameters& params) = 0; - virtual bool SetRecvParameters(const DataRecvParameters& params) = 0; - - // RtpParameter methods are not supported for Data channel. - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters) override; - - // TODO(pthatcher): Implement this. - virtual bool GetStats(DataMediaInfo* info); - - virtual bool SetSend(bool send) = 0; - virtual bool SetReceive(bool receive) = 0; - - void OnNetworkRouteChanged(const std::string& transport_name, - const rtc::NetworkRoute& network_route) override {} - - virtual bool SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result = NULL) = 0; - // Signals when data is received (params, data, len) - sigslot::signal3 - SignalDataReceived; - // Signal when the media channel is ready to send the stream. Arguments are: - // writable(bool) - sigslot::signal1 SignalReadyToSend; -}; - } // namespace cricket #endif // MEDIA_BASE_MEDIA_CHANNEL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc index 840da1d9b..35cb8543f 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.cc @@ -13,14 +13,15 @@ namespace cricket { const int kVideoCodecClockrate = 90000; -const int kDataCodecClockrate = 90000; -const int kRtpDataMaxBandwidth = 30720; // bps + +const int kVideoMtu = 1200; +const int kVideoRtpSendBufferSize = 65536; +const int kVideoRtpRecvBufferSize = 262144; const float kHighSystemCpuThreshold = 0.85f; const float kLowSystemCpuThreshold = 0.65f; const float kProcessCpuThreshold = 0.10f; -const char kRtxCodecName[] = "rtx"; const char kRedCodecName[] = "red"; const char kUlpfecCodecName[] = "ulpfec"; const char kMultiplexCodecName[] = "multiplex"; @@ -32,7 +33,11 @@ const char kFlexfecCodecName[] = "flexfec-03"; // draft-ietf-payload-flexible-fec-scheme-02.txt const char kFlexfecFmtpRepairWindow[] = "repair-window"; +// RFC 4588 RTP Retransmission Payload Format +const char kRtxCodecName[] = "rtx"; +const char kCodecParamRtxTime[] = "rtx-time"; const char kCodecParamAssociatedPayloadType[] = "apt"; + const char kCodecParamAssociatedCodecName[] = "acn"; const char kOpusCodecName[] = "opus"; @@ -56,9 +61,6 @@ const char kCodecParamUseDtx[] = "usedtx"; const char kCodecParamMaxAverageBitrate[] = "maxaveragebitrate"; const char kCodecParamMaxPlaybackRate[] = "maxplaybackrate"; -const char kCodecParamSctpProtocol[] = "protocol"; -const char kCodecParamSctpStreams[] = "streams"; - const char kParamValueTrue[] = "1"; const char kParamValueEmpty[] = ""; @@ -92,11 +94,6 @@ const char kCodecParamMaxBitrate[] = "x-google-max-bitrate"; const char kCodecParamMinBitrate[] = "x-google-min-bitrate"; const char kCodecParamStartBitrate[] = "x-google-start-bitrate"; const char kCodecParamMaxQuantization[] = "x-google-max-quantization"; -const char kCodecParamPort[] = "x-google-port"; -const char kCodecParamMaxMessageSize[] = "x-google-max-message-size"; - -const int kGoogleRtpDataCodecPlType = 109; -const char kGoogleRtpDataCodecName[] = "google-data"; const char kComfortNoiseCodecName[] = "CN"; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h index 2f5733dfb..ab1194b30 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_constants.h @@ -20,15 +20,16 @@ namespace cricket { extern const int kVideoCodecClockrate; -extern const int kDataCodecClockrate; -extern const int kRtpDataMaxBandwidth; // bps + +extern const int kVideoMtu; +extern const int kVideoRtpSendBufferSize; +extern const int kVideoRtpRecvBufferSize; // Default CPU thresholds. extern const float kHighSystemCpuThreshold; extern const float kLowSystemCpuThreshold; extern const float kProcessCpuThreshold; -extern const char kRtxCodecName[]; extern const char kRedCodecName[]; extern const char kUlpfecCodecName[]; extern const char kFlexfecCodecName[]; @@ -36,8 +37,10 @@ extern const char kMultiplexCodecName[]; extern const char kFlexfecFmtpRepairWindow[]; -// Codec parameters +extern const char kRtxCodecName[]; +extern const char kCodecParamRtxTime[]; extern const char kCodecParamAssociatedPayloadType[]; + extern const char kCodecParamAssociatedCodecName[]; extern const char kOpusCodecName[]; @@ -61,8 +64,6 @@ extern const char kCodecParamUseInbandFec[]; extern const char kCodecParamUseDtx[]; extern const char kCodecParamMaxAverageBitrate[]; extern const char kCodecParamMaxPlaybackRate[]; -extern const char kCodecParamSctpProtocol[]; -extern const char kCodecParamSctpStreams[]; extern const char kParamValueTrue[]; // Parameters are stored as parameter/value pairs. For parameters who do not @@ -115,14 +116,6 @@ extern const char kCodecParamMaxBitrate[]; extern const char kCodecParamMinBitrate[]; extern const char kCodecParamStartBitrate[]; extern const char kCodecParamMaxQuantization[]; -extern const char kCodecParamPort[]; -extern const char kCodecParamMaxMessageSize[]; - -// We put the data codec names here so callers of DataEngine::CreateChannel -// don't have to import rtpdataengine.h to get the codec names they want to -// pass in. -extern const int kGoogleRtpDataCodecPlType; -extern const char kGoogleRtpDataCodecName[]; extern const char kComfortNoiseCodecName[]; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h index 1d8917cfc..6f47127f3 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h @@ -121,9 +121,9 @@ class MediaEngineInterface { public: virtual ~MediaEngineInterface() {} - // Initialization - // Starts the engine. + // Initialization. Needs to be called on the worker thread. virtual bool Init() = 0; + virtual VoiceEngineInterface& voice() = 0; virtual VideoEngineInterface& video() = 0; virtual const VoiceEngineInterface& voice() const = 0; @@ -141,6 +141,8 @@ class CompositeMediaEngine : public MediaEngineInterface { CompositeMediaEngine(std::unique_ptr audio_engine, std::unique_ptr video_engine); ~CompositeMediaEngine() override; + + // Always succeeds. bool Init() override; VoiceEngineInterface& voice() override; @@ -150,21 +152,8 @@ class CompositeMediaEngine : public MediaEngineInterface { private: const std::unique_ptr trials_; - std::unique_ptr voice_engine_; - std::unique_ptr video_engine_; -}; - -enum DataChannelType { - DCT_NONE = 0, - DCT_RTP = 1, - DCT_SCTP = 2, -}; - -class DataEngineInterface { - public: - virtual ~DataEngineInterface() {} - virtual DataMediaChannel* CreateChannel(const MediaConfig& config) = 0; - virtual const std::vector& data_codecs() = 0; + const std::unique_ptr voice_engine_; + const std::unique_ptr video_engine_; }; webrtc::RtpParameters CreateRtpParametersWithOneEncoding(); diff --git a/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc b/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc deleted file mode 100644 index f05dffabf..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.cc +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/base/rtp_data_engine.h" - -#include - -#include "absl/strings/match.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" -#include "media/base/rtp_utils.h" -#include "media/base/stream_params.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/data_rate_limiter.h" -#include "rtc_base/helpers.h" -#include "rtc_base/logging.h" -#include "rtc_base/sanitizer.h" - -namespace cricket { - -// We want to avoid IP fragmentation. -static const size_t kDataMaxRtpPacketLen = 1200U; -// We reserve space after the RTP header for future wiggle room. -static const unsigned char kReservedSpace[] = {0x00, 0x00, 0x00, 0x00}; - -// Amount of overhead SRTP may take. We need to leave room in the -// buffer for it, otherwise SRTP will fail later. If SRTP ever uses -// more than this, we need to increase this number. -static const size_t kMaxSrtpHmacOverhead = 16; - -RtpDataEngine::RtpDataEngine() { - data_codecs_.push_back( - DataCodec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName)); -} - -DataMediaChannel* RtpDataEngine::CreateChannel(const MediaConfig& config) { - return new RtpDataMediaChannel(config); -} - -static const DataCodec* FindCodecByName(const std::vector& codecs, - const std::string& name) { - for (const DataCodec& codec : codecs) { - if (absl::EqualsIgnoreCase(name, codec.name)) - return &codec; - } - return nullptr; -} - -RtpDataMediaChannel::RtpDataMediaChannel(const MediaConfig& config) - : DataMediaChannel(config) { - Construct(); - SetPreferredDscp(rtc::DSCP_AF41); -} - -void RtpDataMediaChannel::Construct() { - sending_ = false; - receiving_ = false; - send_limiter_.reset(new rtc::DataRateLimiter(kRtpDataMaxBandwidth / 8, 1.0)); -} - -RtpDataMediaChannel::~RtpDataMediaChannel() { - std::map::const_iterator iter; - for (iter = rtp_clock_by_send_ssrc_.begin(); - iter != rtp_clock_by_send_ssrc_.end(); ++iter) { - delete iter->second; - } -} - -void RTC_NO_SANITIZE("float-cast-overflow") // bugs.webrtc.org/8204 - RtpClock::Tick(double now, int* seq_num, uint32_t* timestamp) { - *seq_num = ++last_seq_num_; - *timestamp = timestamp_offset_ + static_cast(now * clockrate_); - // UBSan: 5.92374e+10 is outside the range of representable values of type - // 'unsigned int' -} - -const DataCodec* FindUnknownCodec(const std::vector& codecs) { - DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName); - std::vector::const_iterator iter; - for (iter = codecs.begin(); iter != codecs.end(); ++iter) { - if (!iter->Matches(data_codec)) { - return &(*iter); - } - } - return NULL; -} - -const DataCodec* FindKnownCodec(const std::vector& codecs) { - DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName); - std::vector::const_iterator iter; - for (iter = codecs.begin(); iter != codecs.end(); ++iter) { - if (iter->Matches(data_codec)) { - return &(*iter); - } - } - return NULL; -} - -bool RtpDataMediaChannel::SetRecvCodecs(const std::vector& codecs) { - const DataCodec* unknown_codec = FindUnknownCodec(codecs); - if (unknown_codec) { - RTC_LOG(LS_WARNING) << "Failed to SetRecvCodecs because of unknown codec: " - << unknown_codec->ToString(); - return false; - } - - recv_codecs_ = codecs; - return true; -} - -bool RtpDataMediaChannel::SetSendCodecs(const std::vector& codecs) { - const DataCodec* known_codec = FindKnownCodec(codecs); - if (!known_codec) { - RTC_LOG(LS_WARNING) - << "Failed to SetSendCodecs because there is no known codec."; - return false; - } - - send_codecs_ = codecs; - return true; -} - -bool RtpDataMediaChannel::SetSendParameters(const DataSendParameters& params) { - return (SetSendCodecs(params.codecs) && - SetMaxSendBandwidth(params.max_bandwidth_bps)); -} - -bool RtpDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) { - return SetRecvCodecs(params.codecs); -} - -bool RtpDataMediaChannel::AddSendStream(const StreamParams& stream) { - if (!stream.has_ssrcs()) { - return false; - } - - if (GetStreamBySsrc(send_streams_, stream.first_ssrc())) { - RTC_LOG(LS_WARNING) << "Not adding data send stream '" << stream.id - << "' with ssrc=" << stream.first_ssrc() - << " because stream already exists."; - return false; - } - - send_streams_.push_back(stream); - // TODO(pthatcher): This should be per-stream, not per-ssrc. - // And we should probably allow more than one per stream. - rtp_clock_by_send_ssrc_[stream.first_ssrc()] = - new RtpClock(kDataCodecClockrate, rtc::CreateRandomNonZeroId(), - rtc::CreateRandomNonZeroId()); - - RTC_LOG(LS_INFO) << "Added data send stream '" << stream.id - << "' with ssrc=" << stream.first_ssrc(); - return true; -} - -bool RtpDataMediaChannel::RemoveSendStream(uint32_t ssrc) { - if (!GetStreamBySsrc(send_streams_, ssrc)) { - return false; - } - - RemoveStreamBySsrc(&send_streams_, ssrc); - delete rtp_clock_by_send_ssrc_[ssrc]; - rtp_clock_by_send_ssrc_.erase(ssrc); - return true; -} - -bool RtpDataMediaChannel::AddRecvStream(const StreamParams& stream) { - if (!stream.has_ssrcs()) { - return false; - } - - if (GetStreamBySsrc(recv_streams_, stream.first_ssrc())) { - RTC_LOG(LS_WARNING) << "Not adding data recv stream '" << stream.id - << "' with ssrc=" << stream.first_ssrc() - << " because stream already exists."; - return false; - } - - recv_streams_.push_back(stream); - RTC_LOG(LS_INFO) << "Added data recv stream '" << stream.id - << "' with ssrc=" << stream.first_ssrc(); - return true; -} - -bool RtpDataMediaChannel::RemoveRecvStream(uint32_t ssrc) { - RemoveStreamBySsrc(&recv_streams_, ssrc); - return true; -} - -// Not implemented. -void RtpDataMediaChannel::ResetUnsignaledRecvStream() {} - -void RtpDataMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t /* packet_time_us */) { - RtpHeader header; - if (!GetRtpHeader(packet.cdata(), packet.size(), &header)) { - return; - } - - size_t header_length; - if (!GetRtpHeaderLen(packet.cdata(), packet.size(), &header_length)) { - return; - } - const char* data = - packet.cdata() + header_length + sizeof(kReservedSpace); - size_t data_len = packet.size() - header_length - sizeof(kReservedSpace); - - if (!receiving_) { - RTC_LOG(LS_WARNING) << "Not receiving packet " << header.ssrc << ":" - << header.seq_num << " before SetReceive(true) called."; - return; - } - - if (!FindCodecById(recv_codecs_, header.payload_type)) { - return; - } - - if (!GetStreamBySsrc(recv_streams_, header.ssrc)) { - RTC_LOG(LS_WARNING) << "Received packet for unknown ssrc: " << header.ssrc; - return; - } - - // Uncomment this for easy debugging. - // const auto* found_stream = GetStreamBySsrc(recv_streams_, header.ssrc); - // RTC_LOG(LS_INFO) << "Received packet" - // << " groupid=" << found_stream.groupid - // << ", ssrc=" << header.ssrc - // << ", seqnum=" << header.seq_num - // << ", timestamp=" << header.timestamp - // << ", len=" << data_len; - - ReceiveDataParams params; - params.ssrc = header.ssrc; - params.seq_num = header.seq_num; - params.timestamp = header.timestamp; - SignalDataReceived(params, data, data_len); -} - -bool RtpDataMediaChannel::SetMaxSendBandwidth(int bps) { - if (bps <= 0) { - bps = kRtpDataMaxBandwidth; - } - send_limiter_.reset(new rtc::DataRateLimiter(bps / 8, 1.0)); - RTC_LOG(LS_INFO) << "RtpDataMediaChannel::SetSendBandwidth to " << bps - << "bps."; - return true; -} - -bool RtpDataMediaChannel::SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result) { - if (result) { - // If we return true, we'll set this to SDR_SUCCESS. - *result = SDR_ERROR; - } - if (!sending_) { - RTC_LOG(LS_WARNING) << "Not sending packet with ssrc=" << params.ssrc - << " len=" << payload.size() - << " before SetSend(true)."; - return false; - } - - if (params.type != cricket::DMT_TEXT) { - RTC_LOG(LS_WARNING) - << "Not sending data because binary type is unsupported."; - return false; - } - - const StreamParams* found_stream = - GetStreamBySsrc(send_streams_, params.ssrc); - if (!found_stream) { - RTC_LOG(LS_WARNING) << "Not sending data because ssrc is unknown: " - << params.ssrc; - return false; - } - - const DataCodec* found_codec = - FindCodecByName(send_codecs_, kGoogleRtpDataCodecName); - if (!found_codec) { - RTC_LOG(LS_WARNING) << "Not sending data because codec is unknown: " - << kGoogleRtpDataCodecName; - return false; - } - - size_t packet_len = (kMinRtpPacketLen + sizeof(kReservedSpace) + - payload.size() + kMaxSrtpHmacOverhead); - if (packet_len > kDataMaxRtpPacketLen) { - return false; - } - - double now = - rtc::TimeMicros() / static_cast(rtc::kNumMicrosecsPerSec); - - if (!send_limiter_->CanUse(packet_len, now)) { - RTC_LOG(LS_VERBOSE) << "Dropped data packet of len=" << packet_len - << "; already sent " << send_limiter_->used_in_period() - << "/" << send_limiter_->max_per_period(); - return false; - } - - RtpHeader header; - header.payload_type = found_codec->id; - header.ssrc = params.ssrc; - rtp_clock_by_send_ssrc_[header.ssrc]->Tick(now, &header.seq_num, - &header.timestamp); - - rtc::CopyOnWriteBuffer packet(kMinRtpPacketLen, packet_len); - if (!SetRtpHeader(packet.data(), packet.size(), header)) { - return false; - } - packet.AppendData(kReservedSpace); - packet.AppendData(payload); - - RTC_LOG(LS_VERBOSE) << "Sent RTP data packet: " - " stream=" - << found_stream->id << " ssrc=" << header.ssrc - << ", seqnum=" << header.seq_num - << ", timestamp=" << header.timestamp - << ", len=" << payload.size(); - - rtc::PacketOptions options; - options.info_signaled_after_sent.packet_type = rtc::PacketType::kData; - MediaChannel::SendPacket(&packet, options); - send_limiter_->Use(packet_len, now); - if (result) { - *result = SDR_SUCCESS; - } - return true; -} - -} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.h deleted file mode 100644 index e5f071d5a..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/base/rtp_data_engine.h +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_BASE_RTP_DATA_ENGINE_H_ -#define MEDIA_BASE_RTP_DATA_ENGINE_H_ - -#include -#include -#include -#include - -#include "media/base/codec.h" -#include "media/base/media_channel.h" -#include "media/base/media_constants.h" -#include "media/base/media_engine.h" - -namespace rtc { -class DataRateLimiter; -} - -namespace cricket { - -class RtpDataEngine : public DataEngineInterface { - public: - RtpDataEngine(); - - virtual DataMediaChannel* CreateChannel(const MediaConfig& config); - - virtual const std::vector& data_codecs() { return data_codecs_; } - - private: - std::vector data_codecs_; -}; - -// Keep track of sequence number and timestamp of an RTP stream. The -// sequence number starts with a "random" value and increments. The -// timestamp starts with a "random" value and increases monotonically -// according to the clockrate. -class RtpClock { - public: - RtpClock(int clockrate, uint16_t first_seq_num, uint32_t timestamp_offset) - : clockrate_(clockrate), - last_seq_num_(first_seq_num), - timestamp_offset_(timestamp_offset) {} - - // Given the current time (in number of seconds which must be - // monotonically increasing), Return the next sequence number and - // timestamp. - void Tick(double now, int* seq_num, uint32_t* timestamp); - - private: - int clockrate_; - uint16_t last_seq_num_; - uint32_t timestamp_offset_; -}; - -class RtpDataMediaChannel : public DataMediaChannel { - public: - explicit RtpDataMediaChannel(const MediaConfig& config); - virtual ~RtpDataMediaChannel(); - - virtual bool SetSendParameters(const DataSendParameters& params); - virtual bool SetRecvParameters(const DataRecvParameters& params); - virtual bool AddSendStream(const StreamParams& sp); - virtual bool RemoveSendStream(uint32_t ssrc); - virtual bool AddRecvStream(const StreamParams& sp); - virtual bool RemoveRecvStream(uint32_t ssrc); - virtual void ResetUnsignaledRecvStream(); - virtual bool SetSend(bool send) { - sending_ = send; - return true; - } - virtual bool SetReceive(bool receive) { - receiving_ = receive; - return true; - } - virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us); - virtual void OnReadyToSend(bool ready) {} - virtual bool SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result); - - private: - void Construct(); - bool SetMaxSendBandwidth(int bps); - bool SetSendCodecs(const std::vector& codecs); - bool SetRecvCodecs(const std::vector& codecs); - - bool sending_; - bool receiving_; - std::vector send_codecs_; - std::vector recv_codecs_; - std::vector send_streams_; - std::vector recv_streams_; - std::map rtp_clock_by_send_ssrc_; - std::unique_ptr send_limiter_; -}; - -} // namespace cricket - -#endif // MEDIA_BASE_RTP_DATA_ENGINE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/sdp_fmtp_utils.cc b/TMessagesProj/jni/voip/webrtc/media/base/sdp_fmtp_utils.cc deleted file mode 100644 index 4ffc3b969..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/base/sdp_fmtp_utils.cc +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/base/sdp_fmtp_utils.h" - -#include -#include - -#include "rtc_base/string_to_number.h" - -namespace webrtc { -namespace { -// Max frame rate for VP8 and VP9 video. -const char kVPxFmtpMaxFrameRate[] = "max-fr"; -// Max frame size for VP8 and VP9 video. -const char kVPxFmtpMaxFrameSize[] = "max-fs"; -const int kVPxFmtpFrameSizeSubBlockPixels = 256; - -absl::optional ParsePositiveNumberFromParams( - const SdpVideoFormat::Parameters& params, - const char* parameter_name) { - const auto max_frame_rate_it = params.find(parameter_name); - if (max_frame_rate_it == params.end()) - return absl::nullopt; - - const absl::optional i = - rtc::StringToNumber(max_frame_rate_it->second); - if (!i.has_value() || i.value() <= 0) - return absl::nullopt; - return i; -} - -} // namespace - -absl::optional ParseSdpForVPxMaxFrameRate( - const SdpVideoFormat::Parameters& params) { - return ParsePositiveNumberFromParams(params, kVPxFmtpMaxFrameRate); -} - -absl::optional ParseSdpForVPxMaxFrameSize( - const SdpVideoFormat::Parameters& params) { - const absl::optional i = - ParsePositiveNumberFromParams(params, kVPxFmtpMaxFrameSize); - return i ? absl::make_optional(i.value() * kVPxFmtpFrameSizeSubBlockPixels) - : absl::nullopt; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/media/base/sdp_fmtp_utils.h b/TMessagesProj/jni/voip/webrtc/media/base/sdp_fmtp_utils.h deleted file mode 100644 index 04e918361..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/base/sdp_fmtp_utils.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_BASE_SDP_FMTP_UTILS_H_ -#define MEDIA_BASE_SDP_FMTP_UTILS_H_ - -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" - -namespace webrtc { - -// Parse max frame rate from SDP FMTP line. absl::nullopt is returned if the -// field is missing or not a number. -absl::optional ParseSdpForVPxMaxFrameRate( - const SdpVideoFormat::Parameters& params); - -// Parse max frame size from SDP FMTP line. absl::nullopt is returned if the -// field is missing or not a number. Please note that the value is stored in sub -// blocks but the returned value is in total number of pixels. -absl::optional ParseSdpForVPxMaxFrameSize( - const SdpVideoFormat::Parameters& params); - -} // namespace webrtc - -#endif // MEDIA_BASE_SDP_FMTP_UTILS_H__ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/sdp_video_format_utils.cc b/TMessagesProj/jni/voip/webrtc/media/base/sdp_video_format_utils.cc new file mode 100644 index 000000000..a156afdc0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/media/base/sdp_video_format_utils.cc @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "media/base/sdp_video_format_utils.h" + +#include +#include +#include + +#include "api/video_codecs/h264_profile_level_id.h" +#include "rtc_base/checks.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { +namespace { +const char kProfileLevelId[] = "profile-level-id"; +const char kH264LevelAsymmetryAllowed[] = "level-asymmetry-allowed"; +// Max frame rate for VP8 and VP9 video. +const char kVPxFmtpMaxFrameRate[] = "max-fr"; +// Max frame size for VP8 and VP9 video. +const char kVPxFmtpMaxFrameSize[] = "max-fs"; +const int kVPxFmtpFrameSizeSubBlockPixels = 256; + +bool IsH264LevelAsymmetryAllowed(const SdpVideoFormat::Parameters& params) { + const auto it = params.find(kH264LevelAsymmetryAllowed); + return it != params.end() && strcmp(it->second.c_str(), "1") == 0; +} + +// Compare H264 levels and handle the level 1b case. +bool H264LevelIsLess(H264Level a, H264Level b) { + if (a == H264Level::kLevel1_b) + return b != H264Level::kLevel1 && b != H264Level::kLevel1_b; + if (b == H264Level::kLevel1_b) + return a == H264Level::kLevel1; + return a < b; +} + +H264Level H264LevelMin(H264Level a, H264Level b) { + return H264LevelIsLess(a, b) ? a : b; +} + +absl::optional ParsePositiveNumberFromParams( + const SdpVideoFormat::Parameters& params, + const char* parameter_name) { + const auto max_frame_rate_it = params.find(parameter_name); + if (max_frame_rate_it == params.end()) + return absl::nullopt; + + const absl::optional i = + rtc::StringToNumber(max_frame_rate_it->second); + if (!i.has_value() || i.value() <= 0) + return absl::nullopt; + return i; +} + +} // namespace + +// Set level according to https://tools.ietf.org/html/rfc6184#section-8.2.2. +void H264GenerateProfileLevelIdForAnswer( + const SdpVideoFormat::Parameters& local_supported_params, + const SdpVideoFormat::Parameters& remote_offered_params, + SdpVideoFormat::Parameters* answer_params) { + // If both local and remote haven't set profile-level-id, they are both using + // the default profile. In this case, don't set profile-level-id in answer + // either. + if (!local_supported_params.count(kProfileLevelId) && + !remote_offered_params.count(kProfileLevelId)) { + return; + } + + // Parse profile-level-ids. + const absl::optional local_profile_level_id = + ParseSdpForH264ProfileLevelId(local_supported_params); + const absl::optional remote_profile_level_id = + ParseSdpForH264ProfileLevelId(remote_offered_params); + // The local and remote codec must have valid and equal H264 Profiles. + RTC_DCHECK(local_profile_level_id); + RTC_DCHECK(remote_profile_level_id); + RTC_DCHECK_EQ(local_profile_level_id->profile, + remote_profile_level_id->profile); + + // Parse level information. + const bool level_asymmetry_allowed = + IsH264LevelAsymmetryAllowed(local_supported_params) && + IsH264LevelAsymmetryAllowed(remote_offered_params); + const H264Level local_level = local_profile_level_id->level; + const H264Level remote_level = remote_profile_level_id->level; + const H264Level min_level = H264LevelMin(local_level, remote_level); + + // Determine answer level. When level asymmetry is not allowed, level upgrade + // is not allowed, i.e., the level in the answer must be equal to or lower + // than the level in the offer. + const H264Level answer_level = + level_asymmetry_allowed ? local_level : min_level; + + // Set the resulting profile-level-id in the answer parameters. + (*answer_params)[kProfileLevelId] = *H264ProfileLevelIdToString( + H264ProfileLevelId(local_profile_level_id->profile, answer_level)); +} + +absl::optional ParseSdpForVPxMaxFrameRate( + const SdpVideoFormat::Parameters& params) { + return ParsePositiveNumberFromParams(params, kVPxFmtpMaxFrameRate); +} + +absl::optional ParseSdpForVPxMaxFrameSize( + const SdpVideoFormat::Parameters& params) { + const absl::optional i = + ParsePositiveNumberFromParams(params, kVPxFmtpMaxFrameSize); + return i ? absl::make_optional(i.value() * kVPxFmtpFrameSizeSubBlockPixels) + : absl::nullopt; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/media/base/sdp_video_format_utils.h b/TMessagesProj/jni/voip/webrtc/media/base/sdp_video_format_utils.h new file mode 100644 index 000000000..6671c182a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/media/base/sdp_video_format_utils.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_BASE_SDP_VIDEO_FORMAT_UTILS_H_ +#define MEDIA_BASE_SDP_VIDEO_FORMAT_UTILS_H_ + +#include "absl/types/optional.h" +#include "api/video_codecs/sdp_video_format.h" + +namespace webrtc { +// Generate codec parameters that will be used as answer in an SDP negotiation +// based on local supported parameters and remote offered parameters. Both +// |local_supported_params|, |remote_offered_params|, and |answer_params| +// represent sendrecv media descriptions, i.e they are a mix of both encode and +// decode capabilities. In theory, when the profile in |local_supported_params| +// represent a strict superset of the profile in |remote_offered_params|, we +// could limit the profile in |answer_params| to the profile in +// |remote_offered_params|. However, to simplify the code, each supported H264 +// profile should be listed explicitly in the list of local supported codecs, +// even if they are redundant. Then each local codec in the list should be +// tested one at a time against the remote codec, and only when the profiles are +// equal should this function be called. Therefore, this function does not need +// to handle profile intersection, and the profile of |local_supported_params| +// and |remote_offered_params| must be equal before calling this function. The +// parameters that are used when negotiating are the level part of +// profile-level-id and level-asymmetry-allowed. +void H264GenerateProfileLevelIdForAnswer( + const SdpVideoFormat::Parameters& local_supported_params, + const SdpVideoFormat::Parameters& remote_offered_params, + SdpVideoFormat::Parameters* answer_params); + +// Parse max frame rate from SDP FMTP line. absl::nullopt is returned if the +// field is missing or not a number. +absl::optional ParseSdpForVPxMaxFrameRate( + const SdpVideoFormat::Parameters& params); + +// Parse max frame size from SDP FMTP line. absl::nullopt is returned if the +// field is missing or not a number. Please note that the value is stored in sub +// blocks but the returned value is in total number of pixels. +absl::optional ParseSdpForVPxMaxFrameSize( + const SdpVideoFormat::Parameters& params); + +} // namespace webrtc + +#endif // MEDIA_BASE_SDP_VIDEO_FORMAT_UTILS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/turn_utils.h b/TMessagesProj/jni/voip/webrtc/media/base/turn_utils.h index ed8e282ba..82e492c02 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/turn_utils.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/turn_utils.h @@ -18,8 +18,6 @@ namespace cricket { -struct PacketOptions; - // Finds data location within a TURN Channel Message or TURN Send Indication // message. bool RTC_EXPORT UnwrapTurnPacket(const uint8_t* packet, diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc b/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc index e6a91368f..3c20eca96 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.cc @@ -94,6 +94,7 @@ void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) { } void VideoBroadcaster::OnDiscardedFrame() { + webrtc::MutexLock lock(&sinks_and_wants_lock_); for (auto& sink_pair : sink_pairs()) { sink_pair.sink->OnDiscardedFrame(); } diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.h b/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.h index 0703862c4..2f4e57822 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_broadcaster.h @@ -12,12 +12,12 @@ #define MEDIA_BASE_VIDEO_BROADCASTER_H_ #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_source_interface.h" #include "media/base/video_source_base.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace rtc { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_source_base.h b/TMessagesProj/jni/voip/webrtc/media/base/video_source_base.h index 507fa1064..59b7dab16 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_source_base.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_source_base.h @@ -13,10 +13,10 @@ #include +#include "api/sequence_checker.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" -#include "rtc_base/thread_checker.h" namespace rtc { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.h b/TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.h index e47204fed..d44a7998d 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/vp9_profile.h @@ -11,43 +11,9 @@ #ifndef MEDIA_BASE_VP9_PROFILE_H_ #define MEDIA_BASE_VP9_PROFILE_H_ -#include +#include "api/video_codecs/vp9_profile.h" -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// Profile information for VP9 video. -extern RTC_EXPORT const char kVP9FmtpProfileId[]; - -enum class VP9Profile { - kProfile0, - kProfile1, - kProfile2, -}; - -// Helper functions to convert VP9Profile to std::string. Returns "0" by -// default. -RTC_EXPORT std::string VP9ProfileToString(VP9Profile profile); - -// Helper functions to convert std::string to VP9Profile. Returns null if given -// an invalid profile string. -absl::optional StringToVP9Profile(const std::string& str); - -// Parse profile that is represented as a string of single digit contained in an -// SDP key-value map. A default profile(kProfile0) will be returned if the -// profile key is missing. Nothing will be returned if the key is present but -// the string is invalid. -RTC_EXPORT absl::optional ParseSdpForVP9Profile( - const SdpVideoFormat::Parameters& params); - -// Returns true if the parameters have the same VP9 profile, or neither contains -// VP9 profile. -bool IsSameVP9Profile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2); - -} // namespace webrtc +// TODO(crbug.com/1187565): Remove this file once downstream projects stop +// depend on it. #endif // MEDIA_BASE_VP9_PROFILE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/constants.cc b/TMessagesProj/jni/voip/webrtc/media/engine/constants.cc deleted file mode 100644 index 12d6ddde5..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/engine/constants.cc +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/engine/constants.h" - -namespace cricket { - -const int kVideoMtu = 1200; -const int kVideoRtpSendBufferSize = 65536; -const int kVideoRtpRecvBufferSize = 262144; - -} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/constants.h b/TMessagesProj/jni/voip/webrtc/media/engine/constants.h deleted file mode 100644 index 9a421d987..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/engine/constants.h +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_ENGINE_CONSTANTS_H_ -#define MEDIA_ENGINE_CONSTANTS_H_ - -namespace cricket { - -extern const int kVideoMtu; -extern const int kVideoRtpSendBufferSize; -extern const int kVideoRtpRecvBufferSize; - -} // namespace cricket - -#endif // MEDIA_ENGINE_CONSTANTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc index e320880b2..76a70aaa5 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.cc @@ -17,6 +17,7 @@ #include "media/base/rtp_utils.h" #include "rtc_base/checks.h" #include "rtc_base/gunit.h" +#include "rtc_base/thread.h" namespace cricket { FakeAudioSendStream::FakeAudioSendStream( @@ -326,10 +327,7 @@ void FakeVideoSendStream::InjectVideoSinkWants( FakeVideoReceiveStream::FakeVideoReceiveStream( webrtc::VideoReceiveStream::Config config) - : config_(std::move(config)), - receiving_(false), - num_added_secondary_sinks_(0), - num_removed_secondary_sinks_(0) {} + : config_(std::move(config)), receiving_(false) {} const webrtc::VideoReceiveStream::Config& FakeVideoReceiveStream::GetConfig() const { @@ -361,24 +359,6 @@ void FakeVideoReceiveStream::SetStats( stats_ = stats; } -void FakeVideoReceiveStream::AddSecondarySink( - webrtc::RtpPacketSinkInterface* sink) { - ++num_added_secondary_sinks_; -} - -void FakeVideoReceiveStream::RemoveSecondarySink( - const webrtc::RtpPacketSinkInterface* sink) { - ++num_removed_secondary_sinks_; -} - -int FakeVideoReceiveStream::GetNumAddedSecondarySinks() const { - return num_added_secondary_sinks_; -} - -int FakeVideoReceiveStream::GetNumRemovedSecondarySinks() const { - return num_removed_secondary_sinks_; -} - FakeFlexfecReceiveStream::FakeFlexfecReceiveStream( const webrtc::FlexfecReceiveStream::Config& config) : config_(config) {} @@ -398,7 +378,13 @@ void FakeFlexfecReceiveStream::OnRtpPacket(const webrtc::RtpPacketReceived&) { } FakeCall::FakeCall() - : audio_network_state_(webrtc::kNetworkUp), + : FakeCall(rtc::Thread::Current(), rtc::Thread::Current()) {} + +FakeCall::FakeCall(webrtc::TaskQueueBase* worker_thread, + webrtc::TaskQueueBase* network_thread) + : network_thread_(network_thread), + worker_thread_(worker_thread), + audio_network_state_(webrtc::kNetworkUp), video_network_state_(webrtc::kNetworkUp), num_created_send_streams_(0), num_created_receive_streams_(0) {} @@ -599,14 +585,17 @@ FakeCall::DeliveryStatus FakeCall::DeliverPacket(webrtc::MediaType media_type, if (media_type == webrtc::MediaType::VIDEO) { for (auto receiver : video_receive_streams_) { - if (receiver->GetConfig().rtp.remote_ssrc == ssrc) + if (receiver->GetConfig().rtp.remote_ssrc == ssrc) { + ++delivered_packets_by_ssrc_[ssrc]; return DELIVERY_OK; + } } } if (media_type == webrtc::MediaType::AUDIO) { for (auto receiver : audio_receive_streams_) { if (receiver->GetConfig().rtp.remote_ssrc == ssrc) { receiver->DeliverRtp(packet.cdata(), packet.size(), packet_time_us); + ++delivered_packets_by_ssrc_[ssrc]; return DELIVERY_OK; } } @@ -630,6 +619,14 @@ webrtc::Call::Stats FakeCall::GetStats() const { return stats_; } +webrtc::TaskQueueBase* FakeCall::network_thread() const { + return network_thread_; +} + +webrtc::TaskQueueBase* FakeCall::worker_thread() const { + return worker_thread_; +} + void FakeCall::SignalChannelNetworkState(webrtc::MediaType media, webrtc::NetworkState state) { switch (media) { diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h index 385bbcd76..fd383dadd 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h @@ -20,6 +20,7 @@ #ifndef MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ #define MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ +#include #include #include #include @@ -104,6 +105,7 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream { void Reconfigure(const webrtc::AudioReceiveStream::Config& config) override; void Start() override { started_ = true; } void Stop() override { started_ = false; } + bool IsRunning() const override { return started_; } webrtc::AudioReceiveStream::Stats GetStats( bool get_and_clear_legacy_stats) const override; @@ -218,12 +220,6 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { void SetStats(const webrtc::VideoReceiveStream::Stats& stats); - void AddSecondarySink(webrtc::RtpPacketSinkInterface* sink) override; - void RemoveSecondarySink(const webrtc::RtpPacketSinkInterface* sink) override; - - int GetNumAddedSecondarySinks() const; - int GetNumRemovedSecondarySinks() const; - std::vector GetSources() const override { return std::vector(); } @@ -266,9 +262,6 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream { webrtc::VideoReceiveStream::Stats stats_; int base_mininum_playout_delay_ms_ = 0; - - int num_added_secondary_sinks_; - int num_removed_secondary_sinks_; }; class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream { @@ -289,6 +282,8 @@ class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream { class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { public: FakeCall(); + FakeCall(webrtc::TaskQueueBase* worker_thread, + webrtc::TaskQueueBase* network_thread); ~FakeCall() override; webrtc::MockRtpTransportControllerSend* GetMockTransportControllerSend() { @@ -307,6 +302,10 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { const std::vector& GetFlexfecReceiveStreams(); rtc::SentPacket last_sent_packet() const { return last_sent_packet_; } + size_t GetDeliveredPacketsForSsrc(uint32_t ssrc) const { + auto it = delivered_packets_by_ssrc_.find(ssrc); + return it != delivered_packets_by_ssrc_.end() ? it->second : 0u; + } // This is useful if we care about the last media packet (with id populated) // but not the last ICE packet (with -1 ID). @@ -367,12 +366,18 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { return trials_; } + webrtc::TaskQueueBase* network_thread() const override; + webrtc::TaskQueueBase* worker_thread() const override; + void SignalChannelNetworkState(webrtc::MediaType media, webrtc::NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; void OnSentPacket(const rtc::SentPacket& sent_packet) override; + webrtc::TaskQueueBase* const network_thread_; + webrtc::TaskQueueBase* const worker_thread_; + ::testing::NiceMock transport_controller_send_; @@ -387,6 +392,7 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { std::vector video_receive_streams_; std::vector audio_receive_streams_; std::vector flexfec_receive_streams_; + std::map delivered_packets_by_ssrc_; int num_created_send_streams_; int num_created_receive_streams_; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc index d512b731a..a8d1f0000 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc @@ -23,23 +23,6 @@ namespace webrtc { -namespace { - -bool IsFormatSupported( - const std::vector& supported_formats, - const webrtc::SdpVideoFormat& format) { - for (const webrtc::SdpVideoFormat& supported_format : supported_formats) { - if (cricket::IsSameCodec(format.name, format.parameters, - supported_format.name, - supported_format.parameters)) { - return true; - } - } - return false; -} - -} // namespace - std::vector InternalDecoderFactory::GetSupportedFormats() const { std::vector formats; @@ -55,8 +38,9 @@ std::vector InternalDecoderFactory::GetSupportedFormats() std::unique_ptr InternalDecoderFactory::CreateVideoDecoder( const SdpVideoFormat& format) { - if (!IsFormatSupported(GetSupportedFormats(), format)) { - RTC_LOG(LS_ERROR) << "Trying to create decoder for unsupported format"; + if (!format.IsCodecInList(GetSupportedFormats())) { + RTC_LOG(LS_WARNING) << "Trying to create decoder for unsupported format. " + << format.ToString(); return nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/payload_type_mapper.cc b/TMessagesProj/jni/voip/webrtc/media/engine/payload_type_mapper.cc index e9f863ca6..4c4697599 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/payload_type_mapper.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/payload_type_mapper.cc @@ -32,18 +32,18 @@ PayloadTypeMapper::PayloadTypeMapper() max_payload_type_(127), mappings_( {// Static payload type assignments according to RFC 3551. - {{"PCMU", 8000, 1}, 0}, + {{kPcmuCodecName, 8000, 1}, 0}, {{"GSM", 8000, 1}, 3}, {{"G723", 8000, 1}, 4}, {{"DVI4", 8000, 1}, 5}, {{"DVI4", 16000, 1}, 6}, {{"LPC", 8000, 1}, 7}, - {{"PCMA", 8000, 1}, 8}, - {{"G722", 8000, 1}, 9}, - {{"L16", 44100, 2}, 10}, - {{"L16", 44100, 1}, 11}, + {{kPcmaCodecName, 8000, 1}, 8}, + {{kG722CodecName, 8000, 1}, 9}, + {{kL16CodecName, 44100, 2}, 10}, + {{kL16CodecName, 44100, 1}, 11}, {{"QCELP", 8000, 1}, 12}, - {{"CN", 8000, 1}, 13}, + {{kCnCodecName, 8000, 1}, 13}, // RFC 4566 is a bit ambiguous on the contents of the "encoding // parameters" field, which, for audio, encodes the number of // channels. It is "optional and may be omitted if the number of @@ -61,7 +61,6 @@ PayloadTypeMapper::PayloadTypeMapper() // Payload type assignments currently used by WebRTC. // Includes data to reduce collisions (and thus reassignments) - {{kGoogleRtpDataCodecName, 0, 0}, kGoogleRtpDataCodecPlType}, {{kIlbcCodecName, 8000, 1}, 102}, {{kIsacCodecName, 16000, 1}, 103}, {{kIsacCodecName, 32000, 1}, 104}, @@ -70,7 +69,8 @@ PayloadTypeMapper::PayloadTypeMapper() {{kOpusCodecName, 48000, 2, - {{"minptime", "10"}, {"useinbandfec", "1"}}}, + {{kCodecParamMinPTime, "10"}, + {kCodecParamUseInbandFec, kParamValueTrue}}}, 111}, // TODO(solenberg): Remove the hard coded 16k,32k,48k DTMF once we // assign payload types dynamically for send side as well. diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc index f74d4adfb..ebc6a240f 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast.cc @@ -15,14 +15,15 @@ #include #include +#include #include "absl/strings/match.h" #include "absl/types/optional.h" #include "api/video/video_codec_constants.h" #include "media/base/media_constants.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h" #include "rtc_base/experiments/normalize_simulcast_size_experiment.h" #include "rtc_base/experiments/rate_control_settings.h" @@ -41,6 +42,15 @@ constexpr webrtc::DataRate Interpolate(const webrtc::DataRate& a, constexpr char kUseLegacySimulcastLayerLimitFieldTrial[] = "WebRTC-LegacySimulcastLayerLimit"; +constexpr double kDefaultMaxRoundupRate = 0.1; + +// TODO(webrtc:12415): Flip this to a kill switch when this feature launches. +bool EnableLowresBitrateInterpolation( + const webrtc::WebRtcKeyValueConfig& trials) { + return absl::StartsWith( + trials.Lookup("WebRTC-LowresSimulcastBitrateInterpolation"), "Enabled"); +} + // Limits for legacy conference screensharing mode. Currently used for the // lower of the two simulcast streams. constexpr webrtc::DataRate kScreenshareDefaultTl0Bitrate = @@ -61,7 +71,7 @@ struct SimulcastFormat { int width; int height; // The maximum number of simulcast layers can be used for - // resolutions at |widthxheigh| for legacy applications. + // resolutions at |widthxheight| for legacy applications. size_t max_layers; // The maximum bitrate for encoding stream at |widthxheight|, when we are // not sending the next higher spatial stream. @@ -96,10 +106,29 @@ constexpr const SimulcastFormat kSimulcastFormats[] = { {320, 180, 1, webrtc::DataRate::KilobitsPerSec(200), webrtc::DataRate::KilobitsPerSec(150), webrtc::DataRate::KilobitsPerSec(30)}, - {0, 0, 1, webrtc::DataRate::KilobitsPerSec(200), - webrtc::DataRate::KilobitsPerSec(150), + // As the resolution goes down, interpolate the target and max bitrates down + // towards zero. The min bitrate is still limited at 30 kbps and the target + // and the max will be capped from below accordingly. + {0, 0, 1, webrtc::DataRate::KilobitsPerSec(0), + webrtc::DataRate::KilobitsPerSec(0), webrtc::DataRate::KilobitsPerSec(30)}}; +std::vector GetSimulcastFormats( + bool enable_lowres_bitrate_interpolation) { + std::vector formats; + formats.insert(formats.begin(), std::begin(kSimulcastFormats), + std::end(kSimulcastFormats)); + if (!enable_lowres_bitrate_interpolation) { + RTC_CHECK_GE(formats.size(), 2u); + SimulcastFormat& format0x0 = formats[formats.size() - 1]; + const SimulcastFormat& format_prev = formats[formats.size() - 2]; + format0x0.max_bitrate = format_prev.max_bitrate; + format0x0.target_bitrate = format_prev.target_bitrate; + format0x0.min_bitrate = format_prev.min_bitrate; + } + return formats; +} + const int kMaxScreenshareSimulcastLayers = 2; // Multiway: Number of temporal layers for each simulcast stream. @@ -135,12 +164,14 @@ int DefaultNumberOfTemporalLayers(int simulcast_id, return default_num_temporal_layers; } -int FindSimulcastFormatIndex(int width, int height) { +int FindSimulcastFormatIndex(int width, + int height, + bool enable_lowres_bitrate_interpolation) { RTC_DCHECK_GE(width, 0); RTC_DCHECK_GE(height, 0); - for (uint32_t i = 0; i < arraysize(kSimulcastFormats); ++i) { - if (width * height >= - kSimulcastFormats[i].width * kSimulcastFormats[i].height) { + const auto formats = GetSimulcastFormats(enable_lowres_bitrate_interpolation); + for (uint32_t i = 0; i < formats.size(); ++i) { + if (width * height >= formats[i].width * formats[i].height) { return i; } } @@ -162,42 +193,70 @@ int NormalizeSimulcastSize(int size, size_t simulcast_layers) { return ((size >> base2_exponent) << base2_exponent); } -SimulcastFormat InterpolateSimulcastFormat(int width, int height) { - const int index = FindSimulcastFormatIndex(width, height); +SimulcastFormat InterpolateSimulcastFormat( + int width, + int height, + absl::optional max_roundup_rate, + bool enable_lowres_bitrate_interpolation) { + const auto formats = GetSimulcastFormats(enable_lowres_bitrate_interpolation); + const int index = FindSimulcastFormatIndex( + width, height, enable_lowres_bitrate_interpolation); if (index == 0) - return kSimulcastFormats[index]; + return formats[index]; const int total_pixels_up = - kSimulcastFormats[index - 1].width * kSimulcastFormats[index - 1].height; - const int total_pixels_down = - kSimulcastFormats[index].width * kSimulcastFormats[index].height; + formats[index - 1].width * formats[index - 1].height; + const int total_pixels_down = formats[index].width * formats[index].height; const int total_pixels = width * height; const float rate = (total_pixels_up - total_pixels) / static_cast(total_pixels_up - total_pixels_down); - size_t max_layers = kSimulcastFormats[index].max_layers; - webrtc::DataRate max_bitrate = - Interpolate(kSimulcastFormats[index - 1].max_bitrate, - kSimulcastFormats[index].max_bitrate, rate); - webrtc::DataRate target_bitrate = - Interpolate(kSimulcastFormats[index - 1].target_bitrate, - kSimulcastFormats[index].target_bitrate, rate); - webrtc::DataRate min_bitrate = - Interpolate(kSimulcastFormats[index - 1].min_bitrate, - kSimulcastFormats[index].min_bitrate, rate); + // Use upper resolution if |rate| is below the configured threshold. + size_t max_layers = (rate < max_roundup_rate.value_or(kDefaultMaxRoundupRate)) + ? formats[index - 1].max_layers + : formats[index].max_layers; + webrtc::DataRate max_bitrate = Interpolate(formats[index - 1].max_bitrate, + formats[index].max_bitrate, rate); + webrtc::DataRate target_bitrate = Interpolate( + formats[index - 1].target_bitrate, formats[index].target_bitrate, rate); + webrtc::DataRate min_bitrate = Interpolate(formats[index - 1].min_bitrate, + formats[index].min_bitrate, rate); return {width, height, max_layers, max_bitrate, target_bitrate, min_bitrate}; } -webrtc::DataRate FindSimulcastMaxBitrate(int width, int height) { - return InterpolateSimulcastFormat(width, height).max_bitrate; +SimulcastFormat InterpolateSimulcastFormat( + int width, + int height, + bool enable_lowres_bitrate_interpolation) { + return InterpolateSimulcastFormat(width, height, absl::nullopt, + enable_lowres_bitrate_interpolation); } -webrtc::DataRate FindSimulcastTargetBitrate(int width, int height) { - return InterpolateSimulcastFormat(width, height).target_bitrate; +webrtc::DataRate FindSimulcastMaxBitrate( + int width, + int height, + bool enable_lowres_bitrate_interpolation) { + return InterpolateSimulcastFormat(width, height, + enable_lowres_bitrate_interpolation) + .max_bitrate; } -webrtc::DataRate FindSimulcastMinBitrate(int width, int height) { - return InterpolateSimulcastFormat(width, height).min_bitrate; +webrtc::DataRate FindSimulcastTargetBitrate( + int width, + int height, + bool enable_lowres_bitrate_interpolation) { + return InterpolateSimulcastFormat(width, height, + enable_lowres_bitrate_interpolation) + .target_bitrate; +} + +webrtc::DataRate FindSimulcastMinBitrate( + int width, + int height, + bool enable_lowres_bitrate_interpolation) { + return InterpolateSimulcastFormat(width, height, + enable_lowres_bitrate_interpolation) + .min_bitrate; } void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate, @@ -235,9 +294,21 @@ size_t LimitSimulcastLayerCount(int width, const webrtc::WebRtcKeyValueConfig& trials) { if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial), "Disabled")) { + // Max layers from one higher resolution in kSimulcastFormats will be used + // if the ratio (pixels_up - pixels) / (pixels_up - pixels_down) is less + // than configured |max_ratio|. pixels_down is the selected index in + // kSimulcastFormats based on pixels. + webrtc::FieldTrialOptional max_ratio("max_ratio"); + webrtc::ParseFieldTrial({&max_ratio}, + trials.Lookup("WebRTC-SimulcastLayerLimitRoundUp")); + + const bool enable_lowres_bitrate_interpolation = + EnableLowresBitrateInterpolation(trials); size_t adaptive_layer_count = std::max( need_layers, - kSimulcastFormats[FindSimulcastFormatIndex(width, height)].max_layers); + InterpolateSimulcastFormat(width, height, max_ratio.GetOptional(), + enable_lowres_bitrate_interpolation) + .max_layers); if (layer_count > adaptive_layer_count) { RTC_LOG(LS_WARNING) << "Reducing simulcast layer count from " << layer_count << " to " << adaptive_layer_count; @@ -291,6 +362,9 @@ std::vector GetNormalSimulcastLayers( const webrtc::WebRtcKeyValueConfig& trials) { std::vector layers(layer_count); + const bool enable_lowres_bitrate_interpolation = + EnableLowresBitrateInterpolation(trials); + // Format width and height has to be divisible by |2 ^ num_simulcast_layers - // 1|. width = NormalizeSimulcastSize(width, layer_count); @@ -306,9 +380,14 @@ std::vector GetNormalSimulcastLayers( temporal_layers_supported ? DefaultNumberOfTemporalLayers(s, false, trials) : 1; - layers[s].max_bitrate_bps = FindSimulcastMaxBitrate(width, height).bps(); + layers[s].max_bitrate_bps = + FindSimulcastMaxBitrate(width, height, + enable_lowres_bitrate_interpolation) + .bps(); layers[s].target_bitrate_bps = - FindSimulcastTargetBitrate(width, height).bps(); + FindSimulcastTargetBitrate(width, height, + enable_lowres_bitrate_interpolation) + .bps(); int num_temporal_layers = DefaultNumberOfTemporalLayers(s, false, trials); if (s == 0) { // If alternative temporal rate allocation is selected, adjust the @@ -335,7 +414,17 @@ std::vector GetNormalSimulcastLayers( layers[s].target_bitrate_bps = static_cast(layers[s].target_bitrate_bps * rate_factor); } - layers[s].min_bitrate_bps = FindSimulcastMinBitrate(width, height).bps(); + layers[s].min_bitrate_bps = + FindSimulcastMinBitrate(width, height, + enable_lowres_bitrate_interpolation) + .bps(); + + // Ensure consistency. + layers[s].max_bitrate_bps = + std::max(layers[s].min_bitrate_bps, layers[s].max_bitrate_bps); + layers[s].target_bitrate_bps = + std::max(layers[s].min_bitrate_bps, layers[s].target_bitrate_bps); + layers[s].max_framerate = kDefaultVideoMaxFramerate; width /= 2; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc index e0c0ff7bc..bee7b23c4 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/algorithm/container.h" #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "api/video/video_codec_constants.h" @@ -61,25 +62,29 @@ uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) { return bitrate_sum; } -int NumberOfStreams(const webrtc::VideoCodec& codec) { - int streams = +int CountAllStreams(const webrtc::VideoCodec& codec) { + int total_streams_count = codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams; - uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec); + uint32_t simulcast_max_bitrate = + SumStreamMaxBitrate(total_streams_count, codec); if (simulcast_max_bitrate == 0) { - streams = 1; + total_streams_count = 1; } - return streams; + return total_streams_count; } -int NumActiveStreams(const webrtc::VideoCodec& codec) { - int num_configured_streams = NumberOfStreams(codec); - int num_active_streams = 0; - for (int i = 0; i < num_configured_streams; ++i) { +int CountActiveStreams(const webrtc::VideoCodec& codec) { + if (codec.numberOfSimulcastStreams < 1) { + return 1; + } + int total_streams_count = CountAllStreams(codec); + int active_streams_count = 0; + for (int i = 0; i < total_streams_count; ++i) { if (codec.simulcastStream[i].active) { - ++num_active_streams; + ++active_streams_count; } } - return num_active_streams; + return active_streams_count; } int VerifyCodec(const webrtc::VideoCodec* inst) { @@ -97,42 +102,146 @@ int VerifyCodec(const webrtc::VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } if (inst->codecType == webrtc::kVideoCodecVP8 && - inst->VP8().automaticResizeOn && NumActiveStreams(*inst) > 1) { + inst->VP8().automaticResizeOn && CountActiveStreams(*inst) > 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } return WEBRTC_VIDEO_CODEC_OK; } -bool StreamResolutionCompare(const webrtc::SpatialLayer& a, - const webrtc::SpatialLayer& b) { +bool StreamQualityCompare(const webrtc::SpatialLayer& a, + const webrtc::SpatialLayer& b) { return std::tie(a.height, a.width, a.maxBitrate, a.maxFramerate) < std::tie(b.height, b.width, b.maxBitrate, b.maxFramerate); } -// An EncodedImageCallback implementation that forwards on calls to a -// SimulcastEncoderAdapter, but with the stream index it's registered with as -// the first parameter to Encoded. -class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback { - public: - AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter, - size_t stream_idx) - : adapter_(adapter), stream_idx_(stream_idx) {} +void GetLowestAndHighestQualityStreamIndixes( + rtc::ArrayView streams, + int* lowest_quality_stream_idx, + int* highest_quality_stream_idx) { + const auto lowest_highest_quality_streams = + absl::c_minmax_element(streams, StreamQualityCompare); + *lowest_quality_stream_idx = + std::distance(streams.begin(), lowest_highest_quality_streams.first); + *highest_quality_stream_idx = + std::distance(streams.begin(), lowest_highest_quality_streams.second); +} - EncodedImageCallback::Result OnEncodedImage( - const webrtc::EncodedImage& encoded_image, - const webrtc::CodecSpecificInfo* codec_specific_info) override { - return adapter_->OnEncodedImage(stream_idx_, encoded_image, - codec_specific_info); +std::vector GetStreamStartBitratesKbps( + const webrtc::VideoCodec& codec) { + std::vector start_bitrates; + std::unique_ptr rate_allocator = + std::make_unique(codec); + webrtc::VideoBitrateAllocation allocation = + rate_allocator->Allocate(webrtc::VideoBitrateAllocationParameters( + codec.startBitrate * 1000, codec.maxFramerate)); + + int total_streams_count = CountAllStreams(codec); + for (int i = 0; i < total_streams_count; ++i) { + uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000; + start_bitrates.push_back(stream_bitrate); } + return start_bitrates; +} - private: - webrtc::SimulcastEncoderAdapter* const adapter_; - const size_t stream_idx_; -}; } // namespace namespace webrtc { +SimulcastEncoderAdapter::EncoderContext::EncoderContext( + std::unique_ptr encoder, + bool prefer_temporal_support) + : encoder_(std::move(encoder)), + prefer_temporal_support_(prefer_temporal_support) {} + +void SimulcastEncoderAdapter::EncoderContext::Release() { + if (encoder_) { + encoder_->RegisterEncodeCompleteCallback(nullptr); + encoder_->Release(); + } +} + +SimulcastEncoderAdapter::StreamContext::StreamContext( + SimulcastEncoderAdapter* parent, + std::unique_ptr encoder_context, + std::unique_ptr framerate_controller, + int stream_idx, + uint16_t width, + uint16_t height, + bool is_paused) + : parent_(parent), + encoder_context_(std::move(encoder_context)), + framerate_controller_(std::move(framerate_controller)), + stream_idx_(stream_idx), + width_(width), + height_(height), + is_keyframe_needed_(false), + is_paused_(is_paused) { + if (parent_) { + encoder_context_->encoder().RegisterEncodeCompleteCallback(this); + } +} + +SimulcastEncoderAdapter::StreamContext::StreamContext(StreamContext&& rhs) + : parent_(rhs.parent_), + encoder_context_(std::move(rhs.encoder_context_)), + framerate_controller_(std::move(rhs.framerate_controller_)), + stream_idx_(rhs.stream_idx_), + width_(rhs.width_), + height_(rhs.height_), + is_keyframe_needed_(rhs.is_keyframe_needed_), + is_paused_(rhs.is_paused_) { + if (parent_) { + encoder_context_->encoder().RegisterEncodeCompleteCallback(this); + } +} + +SimulcastEncoderAdapter::StreamContext::~StreamContext() { + if (encoder_context_) { + encoder_context_->Release(); + } +} + +std::unique_ptr +SimulcastEncoderAdapter::StreamContext::ReleaseEncoderContext() && { + encoder_context_->Release(); + return std::move(encoder_context_); +} + +void SimulcastEncoderAdapter::StreamContext::OnKeyframe(Timestamp timestamp) { + is_keyframe_needed_ = false; + if (framerate_controller_) { + framerate_controller_->AddFrame(timestamp.ms()); + } +} + +bool SimulcastEncoderAdapter::StreamContext::ShouldDropFrame( + Timestamp timestamp) { + if (!framerate_controller_) { + return false; + } + + if (framerate_controller_->DropFrame(timestamp.ms())) { + return true; + } + framerate_controller_->AddFrame(timestamp.ms()); + return false; +} + +EncodedImageCallback::Result +SimulcastEncoderAdapter::StreamContext::OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) { + RTC_CHECK(parent_); // If null, this method should never be called. + return parent_->OnEncodedImage(stream_idx_, encoded_image, + codec_specific_info); +} + +void SimulcastEncoderAdapter::StreamContext::OnDroppedFrame( + DropReason /*reason*/) { + RTC_CHECK(parent_); // If null, this method should never be called. + parent_->OnDroppedFrame(stream_idx_); +} + SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory, const SdpVideoFormat& format) : SimulcastEncoderAdapter(factory, nullptr, format) {} @@ -145,6 +254,8 @@ SimulcastEncoderAdapter::SimulcastEncoderAdapter( primary_encoder_factory_(primary_factory), fallback_encoder_factory_(fallback_factory), video_format_(format), + total_streams_count_(0), + bypass_mode_(false), encoded_complete_callback_(nullptr), experimental_boosted_screenshare_qp_(GetScreenshareBoostedQpValue()), boost_base_layer_quality_(RateControlSettings::ParseFromFieldTrials() @@ -164,25 +275,23 @@ SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { } void SimulcastEncoderAdapter::SetFecControllerOverride( - FecControllerOverride* fec_controller_override) { + FecControllerOverride* /*fec_controller_override*/) { // Ignored. } int SimulcastEncoderAdapter::Release() { RTC_DCHECK_RUN_ON(&encoder_queue_); - while (!streaminfos_.empty()) { - std::unique_ptr encoder = - std::move(streaminfos_.back().encoder); - // Even though it seems very unlikely, there are no guarantees that the - // encoder will not call back after being Release()'d. Therefore, we first - // disable the callbacks here. - encoder->RegisterEncodeCompleteCallback(nullptr); - encoder->Release(); - streaminfos_.pop_back(); // Deletes callback adapter. - stored_encoders_.push(std::move(encoder)); + while (!stream_contexts_.empty()) { + // Move the encoder instances and put it on the |cached_encoder_contexts_| + // where it may possibly be reused from (ordering does not matter). + cached_encoder_contexts_.push_front( + std::move(stream_contexts_.back()).ReleaseEncoderContext()); + stream_contexts_.pop_back(); } + bypass_mode_ = false; + // It's legal to move the encoder to another queue now. encoder_queue_.Detach(); @@ -191,7 +300,6 @@ int SimulcastEncoderAdapter::Release() { return WEBRTC_VIDEO_CODEC_OK; } -// TODO(eladalon): s/inst/codec_settings/g. int SimulcastEncoderAdapter::InitEncode( const VideoCodec* inst, const VideoEncoder::Settings& settings) { @@ -206,136 +314,114 @@ int SimulcastEncoderAdapter::InitEncode( return ret; } - ret = Release(); - if (ret < 0) { - return ret; - } - - int number_of_streams = NumberOfStreams(*inst); - RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams); - bool doing_simulcast_using_adapter = (number_of_streams > 1); - int num_active_streams = NumActiveStreams(*inst); + Release(); codec_ = *inst; - SimulcastRateAllocator rate_allocator(codec_); - VideoBitrateAllocation allocation = - rate_allocator.Allocate(VideoBitrateAllocationParameters( - codec_.startBitrate * 1000, codec_.maxFramerate)); - std::vector start_bitrates; - for (int i = 0; i < kMaxSimulcastStreams; ++i) { - uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000; - start_bitrates.push_back(stream_bitrate); + total_streams_count_ = CountAllStreams(*inst); + + // TODO(ronghuawu): Remove once this is handled in LibvpxVp8Encoder. + if (codec_.qpMax < kDefaultMinQp) { + codec_.qpMax = kDefaultMaxQp; } - // Create |number_of_streams| of encoder instances and init them. - const auto minmax = std::minmax_element( - std::begin(codec_.simulcastStream), - std::begin(codec_.simulcastStream) + number_of_streams, - StreamResolutionCompare); - const auto lowest_resolution_stream_index = - std::distance(std::begin(codec_.simulcastStream), minmax.first); - const auto highest_resolution_stream_index = - std::distance(std::begin(codec_.simulcastStream), minmax.second); + bool is_legacy_singlecast = codec_.numberOfSimulcastStreams == 0; + int lowest_quality_stream_idx = 0; + int highest_quality_stream_idx = 0; + if (!is_legacy_singlecast) { + GetLowestAndHighestQualityStreamIndixes( + rtc::ArrayView(codec_.simulcastStream, + total_streams_count_), + &lowest_quality_stream_idx, &highest_quality_stream_idx); + } - RTC_DCHECK_LT(lowest_resolution_stream_index, number_of_streams); - RTC_DCHECK_LT(highest_resolution_stream_index, number_of_streams); + std::unique_ptr encoder_context = FetchOrCreateEncoderContext( + /*is_lowest_quality_stream=*/( + is_legacy_singlecast || + codec_.simulcastStream[lowest_quality_stream_idx].active)); + if (encoder_context == nullptr) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } - for (int i = 0; i < number_of_streams; ++i) { - // If an existing encoder instance exists, reuse it. - // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here, - // when we start storing that state outside the encoder wrappers. - std::unique_ptr encoder; - if (!stored_encoders_.empty()) { - encoder = std::move(stored_encoders_.top()); - stored_encoders_.pop(); - } else { - encoder = primary_encoder_factory_->CreateVideoEncoder(video_format_); - if (fallback_encoder_factory_ != nullptr) { - encoder = CreateVideoEncoderSoftwareFallbackWrapper( - fallback_encoder_factory_->CreateVideoEncoder(video_format_), - std::move(encoder), - i == lowest_resolution_stream_index && - prefer_temporal_support_on_base_layer_); - } + // Two distinct scenarios: + // * Singlecast (total_streams_count == 1) or simulcast with simulcast-capable + // underlaying encoder implementation. SEA operates in bypass mode: original + // settings are passed to the underlaying encoder, frame encode complete + // callback is not intercepted. + // * Multi-encoder simulcast or singlecast if layers are deactivated + // (total_streams_count > 1 and active_streams_count >= 1). SEA creates + // N=active_streams_count encoders and configures each to produce a single + // stream. + + // Singlecast or simulcast with simulcast-capable underlaying encoder. + if (total_streams_count_ == 1 || + encoder_context->encoder().GetEncoderInfo().supports_simulcast) { + int ret = encoder_context->encoder().InitEncode(&codec_, settings); + if (ret >= 0) { + int active_streams_count = CountActiveStreams(*inst); + stream_contexts_.emplace_back( + /*parent=*/nullptr, std::move(encoder_context), + /*framerate_controller=*/nullptr, /*stream_idx=*/0, codec_.width, + codec_.height, /*is_paused=*/active_streams_count == 0); + bypass_mode_ = true; + + DestroyStoredEncoders(); + rtc::AtomicOps::ReleaseStore(&inited_, 1); + return WEBRTC_VIDEO_CODEC_OK; } - bool encoder_initialized = false; - if (doing_simulcast_using_adapter && i == 0 && - encoder->GetEncoderInfo().supports_simulcast) { - ret = encoder->InitEncode(&codec_, settings); - if (ret < 0) { - encoder->Release(); - } else { - doing_simulcast_using_adapter = false; - number_of_streams = 1; - encoder_initialized = true; - } + encoder_context->Release(); + if (total_streams_count_ == 1) { + // Failed to initialize singlecast encoder. + return ret; + } + } + + // Multi-encoder simulcast or singlecast (deactivated layers). + std::vector stream_start_bitrate_kbps = + GetStreamStartBitratesKbps(codec_); + + for (int stream_idx = 0; stream_idx < total_streams_count_; ++stream_idx) { + if (!is_legacy_singlecast && !codec_.simulcastStream[stream_idx].active) { + continue; } - VideoCodec stream_codec; - uint32_t start_bitrate_kbps = start_bitrates[i]; - const bool send_stream = doing_simulcast_using_adapter - ? start_bitrate_kbps > 0 - : num_active_streams > 0; - if (!doing_simulcast_using_adapter) { - stream_codec = codec_; - stream_codec.numberOfSimulcastStreams = - std::max(1, stream_codec.numberOfSimulcastStreams); - } else { - // Cap start bitrate to the min bitrate in order to avoid strange codec - // behavior. Since sending will be false, this should not matter. - StreamResolution stream_resolution = - i == highest_resolution_stream_index - ? StreamResolution::HIGHEST - : i == lowest_resolution_stream_index ? StreamResolution::LOWEST - : StreamResolution::OTHER; - - start_bitrate_kbps = - std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps); - PopulateStreamCodec(codec_, i, start_bitrate_kbps, stream_resolution, - &stream_codec); + if (encoder_context == nullptr) { + encoder_context = FetchOrCreateEncoderContext( + /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx); + } + if (encoder_context == nullptr) { + Release(); + return WEBRTC_VIDEO_CODEC_MEMORY; } - // TODO(ronghuawu): Remove once this is handled in LibvpxVp8Encoder. - if (stream_codec.qpMax < kDefaultMinQp) { - stream_codec.qpMax = kDefaultMaxQp; + VideoCodec stream_codec = MakeStreamCodec( + codec_, stream_idx, stream_start_bitrate_kbps[stream_idx], + /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx, + /*is_highest_quality_stream=*/stream_idx == highest_quality_stream_idx); + + int ret = encoder_context->encoder().InitEncode(&stream_codec, settings); + if (ret < 0) { + encoder_context.reset(); + Release(); + return ret; } - if (!encoder_initialized) { - ret = encoder->InitEncode(&stream_codec, settings); - if (ret < 0) { - // Explicitly destroy the current encoder; because we haven't registered - // a StreamInfo for it yet, Release won't do anything about it. - encoder.reset(); - Release(); - return ret; - } - } + // Intercept frame encode complete callback only for upper streams, where + // we need to set a correct stream index. Set |parent| to nullptr for the + // lowest stream to bypass the callback. + SimulcastEncoderAdapter* parent = stream_idx > 0 ? this : nullptr; - if (!doing_simulcast_using_adapter) { - // Without simulcast, just pass through the encoder info from the one - // active encoder. - encoder->RegisterEncodeCompleteCallback(encoded_complete_callback_); - streaminfos_.emplace_back( - std::move(encoder), nullptr, - std::make_unique(stream_codec.maxFramerate), - stream_codec.width, stream_codec.height, send_stream); - } else { - std::unique_ptr callback( - new AdapterEncodedImageCallback(this, i)); - encoder->RegisterEncodeCompleteCallback(callback.get()); - streaminfos_.emplace_back( - std::move(encoder), std::move(callback), - std::make_unique(stream_codec.maxFramerate), - stream_codec.width, stream_codec.height, send_stream); - } + bool is_paused = stream_start_bitrate_kbps[stream_idx] == 0; + stream_contexts_.emplace_back( + parent, std::move(encoder_context), + std::make_unique(stream_codec.maxFramerate), + stream_idx, stream_codec.width, stream_codec.height, is_paused); } // To save memory, don't store encoders that we don't use. DestroyStoredEncoders(); rtc::AtomicOps::ReleaseStore(&inited_, 1); - return WEBRTC_VIDEO_CODEC_OK; } @@ -351,22 +437,46 @@ int SimulcastEncoderAdapter::Encode( return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } + if (encoder_info_override_.requested_resolution_alignment()) { + const int alignment = + *encoder_info_override_.requested_resolution_alignment(); + if (input_image.width() % alignment != 0 || + input_image.height() % alignment != 0) { + RTC_LOG(LS_WARNING) << "Frame " << input_image.width() << "x" + << input_image.height() << " not divisible by " + << alignment; + return WEBRTC_VIDEO_CODEC_ERROR; + } + if (encoder_info_override_.apply_alignment_to_all_simulcast_layers()) { + for (const auto& layer : stream_contexts_) { + if (layer.width() % alignment != 0 || layer.height() % alignment != 0) { + RTC_LOG(LS_WARNING) + << "Codec " << layer.width() << "x" << layer.height() + << " not divisible by " << alignment; + return WEBRTC_VIDEO_CODEC_ERROR; + } + } + } + } + // All active streams should generate a key frame if // a key frame is requested by any stream. - bool send_key_frame = false; + bool is_keyframe_needed = false; if (frame_types) { - for (size_t i = 0; i < frame_types->size(); ++i) { - if (frame_types->at(i) == VideoFrameType::kVideoFrameKey) { - send_key_frame = true; + for (const auto& frame_type : *frame_types) { + if (frame_type == VideoFrameType::kVideoFrameKey) { + is_keyframe_needed = true; break; } } } - for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { - if (streaminfos_[stream_idx].key_frame_request && - streaminfos_[stream_idx].send_stream) { - send_key_frame = true; - break; + + if (!is_keyframe_needed) { + for (const auto& layer : stream_contexts_) { + if (layer.is_keyframe_needed()) { + is_keyframe_needed = true; + break; + } } } @@ -374,36 +484,34 @@ int SimulcastEncoderAdapter::Encode( rtc::scoped_refptr src_buffer; int src_width = input_image.width(); int src_height = input_image.height(); - for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { + + for (auto& layer : stream_contexts_) { // Don't encode frames in resolutions that we don't intend to send. - if (!streaminfos_[stream_idx].send_stream) { + if (layer.is_paused()) { continue; } - const uint32_t frame_timestamp_ms = - 1000 * input_image.timestamp() / 90000; // kVideoPayloadTypeFrequency; + // Convert timestamp from RTP 90kHz clock. + const Timestamp frame_timestamp = + Timestamp::Micros((1000 * input_image.timestamp()) / 90); // If adapter is passed through and only one sw encoder does simulcast, // frame types for all streams should be passed to the encoder unchanged. // Otherwise a single per-encoder frame type is passed. std::vector stream_frame_types( - streaminfos_.size() == 1 ? NumberOfStreams(codec_) : 1); - if (send_key_frame) { + bypass_mode_ ? total_streams_count_ : 1); + if (is_keyframe_needed) { std::fill(stream_frame_types.begin(), stream_frame_types.end(), VideoFrameType::kVideoFrameKey); - streaminfos_[stream_idx].key_frame_request = false; + layer.OnKeyframe(frame_timestamp); } else { - if (streaminfos_[stream_idx].framerate_controller->DropFrame( - frame_timestamp_ms)) { + if (layer.ShouldDropFrame(frame_timestamp)) { continue; } std::fill(stream_frame_types.begin(), stream_frame_types.end(), VideoFrameType::kVideoFrameDelta); } - streaminfos_[stream_idx].framerate_controller->AddFrame(frame_timestamp_ms); - int dst_width = streaminfos_[stream_idx].width; - int dst_height = streaminfos_[stream_idx].height; // If scaling isn't required, because the input resolution // matches the destination or the input image is empty (e.g. // a keyframe request for encoders with internal camera @@ -414,14 +522,11 @@ int SimulcastEncoderAdapter::Encode( // correctly sample/scale the source texture. // TODO(perkj): ensure that works going forward, and figure out how this // affects webrtc:5683. - if ((dst_width == src_width && dst_height == src_height) || + if ((layer.width() == src_width && layer.height() == src_height) || (input_image.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative && - streaminfos_[stream_idx] - .encoder->GetEncoderInfo() - .supports_native_handle)) { - int ret = streaminfos_[stream_idx].encoder->Encode(input_image, - &stream_frame_types); + layer.encoder().GetEncoderInfo().supports_native_handle)) { + int ret = layer.encoder().Encode(input_image, &stream_frame_types); if (ret != WEBRTC_VIDEO_CODEC_OK) { return ret; } @@ -430,7 +535,7 @@ int SimulcastEncoderAdapter::Encode( src_buffer = input_image.video_frame_buffer(); } rtc::scoped_refptr dst_buffer = - src_buffer->Scale(dst_width, dst_height); + src_buffer->Scale(layer.width(), layer.height()); if (!dst_buffer) { RTC_LOG(LS_ERROR) << "Failed to scale video frame"; return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; @@ -443,8 +548,7 @@ int SimulcastEncoderAdapter::Encode( frame.set_rotation(webrtc::kVideoRotation_0); frame.set_update_rect( VideoFrame::UpdateRect{0, 0, frame.width(), frame.height()}); - int ret = - streaminfos_[stream_idx].encoder->Encode(frame, &stream_frame_types); + int ret = layer.encoder().Encode(frame, &stream_frame_types); if (ret != WEBRTC_VIDEO_CODEC_OK) { return ret; } @@ -458,8 +562,10 @@ int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) { RTC_DCHECK_RUN_ON(&encoder_queue_); encoded_complete_callback_ = callback; - if (streaminfos_.size() == 1) { - streaminfos_[0].encoder->RegisterEncodeCompleteCallback(callback); + if (!stream_contexts_.empty() && stream_contexts_.front().stream_idx() == 0) { + // Bypass frame encode complete callback for the lowest layer since there is + // no need to override frame's spatial index. + stream_contexts_.front().encoder().RegisterEncodeCompleteCallback(callback); } return WEBRTC_VIDEO_CODEC_OK; } @@ -480,21 +586,21 @@ void SimulcastEncoderAdapter::SetRates( codec_.maxFramerate = static_cast(parameters.framerate_fps + 0.5); - if (streaminfos_.size() == 1) { - // Not doing simulcast. - streaminfos_[0].encoder->SetRates(parameters); + if (bypass_mode_) { + stream_contexts_.front().encoder().SetRates(parameters); return; } - for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) { + for (StreamContext& layer_context : stream_contexts_) { + int stream_idx = layer_context.stream_idx(); uint32_t stream_bitrate_kbps = parameters.bitrate.GetSpatialLayerSum(stream_idx) / 1000; // Need a key frame if we have not sent this stream before. - if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) { - streaminfos_[stream_idx].key_frame_request = true; + if (stream_bitrate_kbps > 0 && layer_context.is_paused()) { + layer_context.set_is_keyframe_needed(); } - streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0; + layer_context.set_is_paused(stream_bitrate_kbps == 0); // Slice the temporal layers out of the full allocation and pass it on to // the encoder handling the current simulcast stream. @@ -524,28 +630,28 @@ void SimulcastEncoderAdapter::SetRates( stream_parameters.framerate_fps = std::min( parameters.framerate_fps, - streaminfos_[stream_idx].framerate_controller->GetTargetRate()); + layer_context.target_fps().value_or(parameters.framerate_fps)); - streaminfos_[stream_idx].encoder->SetRates(stream_parameters); + layer_context.encoder().SetRates(stream_parameters); } } void SimulcastEncoderAdapter::OnPacketLossRateUpdate(float packet_loss_rate) { - for (StreamInfo& info : streaminfos_) { - info.encoder->OnPacketLossRateUpdate(packet_loss_rate); + for (auto& c : stream_contexts_) { + c.encoder().OnPacketLossRateUpdate(packet_loss_rate); } } void SimulcastEncoderAdapter::OnRttUpdate(int64_t rtt_ms) { - for (StreamInfo& info : streaminfos_) { - info.encoder->OnRttUpdate(rtt_ms); + for (auto& c : stream_contexts_) { + c.encoder().OnRttUpdate(rtt_ms); } } void SimulcastEncoderAdapter::OnLossNotification( const LossNotification& loss_notification) { - for (StreamInfo& info : streaminfos_) { - info.encoder->OnLossNotification(loss_notification); + for (auto& c : stream_contexts_) { + c.encoder().OnLossNotification(loss_notification); } } @@ -564,59 +670,8 @@ EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( &stream_codec_specific); } -void SimulcastEncoderAdapter::PopulateStreamCodec( - const webrtc::VideoCodec& inst, - int stream_index, - uint32_t start_bitrate_kbps, - StreamResolution stream_resolution, - webrtc::VideoCodec* stream_codec) { - *stream_codec = inst; - - // Stream specific settings. - stream_codec->numberOfSimulcastStreams = 0; - stream_codec->width = inst.simulcastStream[stream_index].width; - stream_codec->height = inst.simulcastStream[stream_index].height; - stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate; - stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate; - stream_codec->maxFramerate = inst.simulcastStream[stream_index].maxFramerate; - stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax; - stream_codec->active = inst.simulcastStream[stream_index].active; - // Settings that are based on stream/resolution. - if (stream_resolution == StreamResolution::LOWEST) { - // Settings for lowest spatial resolutions. - if (inst.mode == VideoCodecMode::kScreensharing) { - if (experimental_boosted_screenshare_qp_) { - stream_codec->qpMax = *experimental_boosted_screenshare_qp_; - } - } else if (boost_base_layer_quality_) { - stream_codec->qpMax = kLowestResMaxQp; - } - } - if (inst.codecType == webrtc::kVideoCodecVP8) { - stream_codec->VP8()->numberOfTemporalLayers = - inst.simulcastStream[stream_index].numberOfTemporalLayers; - if (stream_resolution != StreamResolution::HIGHEST) { - // For resolutions below CIF, set the codec |complexity| parameter to - // kComplexityHigher, which maps to cpu_used = -4. - int pixels_per_frame = stream_codec->width * stream_codec->height; - if (pixels_per_frame < 352 * 288) { - stream_codec->VP8()->complexity = - webrtc::VideoCodecComplexity::kComplexityHigher; - } - // Turn off denoising for all streams but the highest resolution. - stream_codec->VP8()->denoisingOn = false; - } - } else if (inst.codecType == webrtc::kVideoCodecH264) { - stream_codec->H264()->numberOfTemporalLayers = - inst.simulcastStream[stream_index].numberOfTemporalLayers; - } - // TODO(ronghuawu): what to do with targetBitrate. - - stream_codec->startBitrate = start_bitrate_kbps; - - // Legacy screenshare mode is only enabled for the first simulcast layer - stream_codec->legacy_conference_mode = - inst.legacy_conference_mode && stream_index == 0; +void SimulcastEncoderAdapter::OnDroppedFrame(size_t stream_idx) { + // Not yet implemented. } bool SimulcastEncoderAdapter::Initialized() const { @@ -624,15 +679,130 @@ bool SimulcastEncoderAdapter::Initialized() const { } void SimulcastEncoderAdapter::DestroyStoredEncoders() { - while (!stored_encoders_.empty()) { - stored_encoders_.pop(); + while (!cached_encoder_contexts_.empty()) { + cached_encoder_contexts_.pop_back(); + } +} + +std::unique_ptr +SimulcastEncoderAdapter::FetchOrCreateEncoderContext( + bool is_lowest_quality_stream) { + bool prefer_temporal_support = fallback_encoder_factory_ != nullptr && + is_lowest_quality_stream && + prefer_temporal_support_on_base_layer_; + + // Toggling of |prefer_temporal_support| requires encoder recreation. Find + // and reuse encoder with desired |prefer_temporal_support|. Otherwise, if + // there is no such encoder in the cache, create a new instance. + auto encoder_context_iter = + std::find_if(cached_encoder_contexts_.begin(), + cached_encoder_contexts_.end(), [&](auto& encoder_context) { + return encoder_context->prefer_temporal_support() == + prefer_temporal_support; + }); + + std::unique_ptr encoder_context; + if (encoder_context_iter != cached_encoder_contexts_.end()) { + encoder_context = std::move(*encoder_context_iter); + cached_encoder_contexts_.erase(encoder_context_iter); + } else { + std::unique_ptr encoder = + primary_encoder_factory_->CreateVideoEncoder(video_format_); + if (fallback_encoder_factory_ != nullptr) { + encoder = CreateVideoEncoderSoftwareFallbackWrapper( + fallback_encoder_factory_->CreateVideoEncoder(video_format_), + std::move(encoder), prefer_temporal_support); + } + + encoder_context = std::make_unique( + std::move(encoder), prefer_temporal_support); + } + + encoder_context->encoder().RegisterEncodeCompleteCallback( + encoded_complete_callback_); + return encoder_context; +} + +webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( + const webrtc::VideoCodec& codec, + int stream_idx, + uint32_t start_bitrate_kbps, + bool is_lowest_quality_stream, + bool is_highest_quality_stream) { + webrtc::VideoCodec codec_params = codec; + const SpatialLayer& stream_params = codec.simulcastStream[stream_idx]; + + codec_params.numberOfSimulcastStreams = 0; + codec_params.width = stream_params.width; + codec_params.height = stream_params.height; + codec_params.maxBitrate = stream_params.maxBitrate; + codec_params.minBitrate = stream_params.minBitrate; + codec_params.maxFramerate = stream_params.maxFramerate; + codec_params.qpMax = stream_params.qpMax; + codec_params.active = stream_params.active; + // Settings that are based on stream/resolution. + if (is_lowest_quality_stream) { + // Settings for lowest spatial resolutions. + if (codec.mode == VideoCodecMode::kScreensharing) { + if (experimental_boosted_screenshare_qp_) { + codec_params.qpMax = *experimental_boosted_screenshare_qp_; + } + } else if (boost_base_layer_quality_) { + codec_params.qpMax = kLowestResMaxQp; + } + } + if (codec.codecType == webrtc::kVideoCodecVP8) { + codec_params.VP8()->numberOfTemporalLayers = + stream_params.numberOfTemporalLayers; + if (!is_highest_quality_stream) { + // For resolutions below CIF, set the codec |complexity| parameter to + // kComplexityHigher, which maps to cpu_used = -4. + int pixels_per_frame = codec_params.width * codec_params.height; + if (pixels_per_frame < 352 * 288) { + codec_params.VP8()->complexity = + webrtc::VideoCodecComplexity::kComplexityHigher; + } + // Turn off denoising for all streams but the highest resolution. + codec_params.VP8()->denoisingOn = false; + } + } else if (codec.codecType == webrtc::kVideoCodecH264) { + codec_params.H264()->numberOfTemporalLayers = + stream_params.numberOfTemporalLayers; + } + + // Cap start bitrate to the min bitrate in order to avoid strange codec + // behavior. + codec_params.startBitrate = + std::max(stream_params.minBitrate, start_bitrate_kbps); + + // Legacy screenshare mode is only enabled for the first simulcast layer + codec_params.legacy_conference_mode = + codec.legacy_conference_mode && stream_idx == 0; + + return codec_params; +} + +void SimulcastEncoderAdapter::OverrideFromFieldTrial( + VideoEncoder::EncoderInfo* info) const { + if (encoder_info_override_.requested_resolution_alignment()) { + info->requested_resolution_alignment = + *encoder_info_override_.requested_resolution_alignment(); + info->apply_alignment_to_all_simulcast_layers = + encoder_info_override_.apply_alignment_to_all_simulcast_layers(); + } + if (!encoder_info_override_.resolution_bitrate_limits().empty()) { + info->resolution_bitrate_limits = + encoder_info_override_.resolution_bitrate_limits(); } } VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { - if (streaminfos_.size() == 1) { + if (stream_contexts_.size() == 1) { // Not using simulcast adapting functionality, just pass through. - return streaminfos_[0].encoder->GetEncoderInfo(); + VideoEncoder::EncoderInfo info = + stream_contexts_.front().encoder().GetEncoderInfo(); + OverrideFromFieldTrial(&info); + return info; } VideoEncoder::EncoderInfo encoder_info; @@ -641,16 +811,16 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { encoder_info.apply_alignment_to_all_simulcast_layers = false; encoder_info.supports_native_handle = true; encoder_info.scaling_settings.thresholds = absl::nullopt; - if (streaminfos_.empty()) { + if (stream_contexts_.empty()) { + OverrideFromFieldTrial(&encoder_info); return encoder_info; } encoder_info.scaling_settings = VideoEncoder::ScalingSettings::kOff; - int num_active_streams = NumActiveStreams(codec_); - for (size_t i = 0; i < streaminfos_.size(); ++i) { + for (size_t i = 0; i < stream_contexts_.size(); ++i) { VideoEncoder::EncoderInfo encoder_impl_info = - streaminfos_[i].encoder->GetEncoderInfo(); + stream_contexts_[i].encoder().GetEncoderInfo(); if (i == 0) { // Encoder name indicates names of all sub-encoders. @@ -693,12 +863,11 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) { encoder_info.apply_alignment_to_all_simulcast_layers = true; } - if (num_active_streams == 1 && codec_.simulcastStream[i].active) { - encoder_info.scaling_settings = encoder_impl_info.scaling_settings; - } } encoder_info.implementation_name += ")"; + OverrideFromFieldTrial(&encoder_info); + return encoder_info; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h index 5b2c02757..2cb29edfd 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.h @@ -12,6 +12,7 @@ #ifndef MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_ #define MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_ +#include #include #include #include @@ -20,19 +21,19 @@ #include "absl/types/optional.h" #include "api/fec_controller_override.h" +#include "api/sequence_checker.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/framerate_controller.h" #include "rtc_base/atomic_ops.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/experiments/encoder_info_settings.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { -class SimulcastRateAllocator; -class VideoEncoderFactory; - // SimulcastEncoderAdapter implements simulcast support by creating multiple // webrtc::VideoEncoder instances with the given VideoEncoderFactory. // The object is created and destroyed on the worker thread, but all public @@ -64,75 +65,119 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { void OnRttUpdate(int64_t rtt_ms) override; void OnLossNotification(const LossNotification& loss_notification) override; - // Eventual handler for the contained encoders' EncodedImageCallbacks, but - // called from an internal helper that also knows the correct stream - // index. + EncoderInfo GetEncoderInfo() const override; + + private: + class EncoderContext { + public: + EncoderContext(std::unique_ptr encoder, + bool prefer_temporal_support); + EncoderContext& operator=(EncoderContext&&) = delete; + + VideoEncoder& encoder() { return *encoder_; } + bool prefer_temporal_support() { return prefer_temporal_support_; } + void Release(); + + private: + std::unique_ptr encoder_; + bool prefer_temporal_support_; + }; + + class StreamContext : public EncodedImageCallback { + public: + StreamContext(SimulcastEncoderAdapter* parent, + std::unique_ptr encoder_context, + std::unique_ptr framerate_controller, + int stream_idx, + uint16_t width, + uint16_t height, + bool send_stream); + StreamContext(StreamContext&& rhs); + StreamContext& operator=(StreamContext&&) = delete; + ~StreamContext() override; + + Result OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* codec_specific_info) override; + void OnDroppedFrame(DropReason reason) override; + + VideoEncoder& encoder() { return encoder_context_->encoder(); } + const VideoEncoder& encoder() const { return encoder_context_->encoder(); } + int stream_idx() const { return stream_idx_; } + uint16_t width() const { return width_; } + uint16_t height() const { return height_; } + bool is_keyframe_needed() const { + return !is_paused_ && is_keyframe_needed_; + } + void set_is_keyframe_needed() { is_keyframe_needed_ = true; } + bool is_paused() const { return is_paused_; } + void set_is_paused(bool is_paused) { is_paused_ = is_paused; } + absl::optional target_fps() const { + return framerate_controller_ == nullptr + ? absl::nullopt + : absl::optional( + framerate_controller_->GetTargetRate()); + } + + std::unique_ptr ReleaseEncoderContext() &&; + void OnKeyframe(Timestamp timestamp); + bool ShouldDropFrame(Timestamp timestamp); + + private: + SimulcastEncoderAdapter* const parent_; + std::unique_ptr encoder_context_; + std::unique_ptr framerate_controller_; + const int stream_idx_; + const uint16_t width_; + const uint16_t height_; + bool is_keyframe_needed_; + bool is_paused_; + }; + + bool Initialized() const; + + void DestroyStoredEncoders(); + + std::unique_ptr FetchOrCreateEncoderContext( + bool is_lowest_quality_stream); + + webrtc::VideoCodec MakeStreamCodec(const webrtc::VideoCodec& codec, + int stream_idx, + uint32_t start_bitrate_kbps, + bool is_lowest_quality_stream, + bool is_highest_quality_stream); + EncodedImageCallback::Result OnEncodedImage( size_t stream_idx, const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info); - EncoderInfo GetEncoderInfo() const override; + void OnDroppedFrame(size_t stream_idx); - private: - struct StreamInfo { - StreamInfo(std::unique_ptr encoder, - std::unique_ptr callback, - std::unique_ptr framerate_controller, - uint16_t width, - uint16_t height, - bool send_stream) - : encoder(std::move(encoder)), - callback(std::move(callback)), - framerate_controller(std::move(framerate_controller)), - width(width), - height(height), - key_frame_request(false), - send_stream(send_stream) {} - std::unique_ptr encoder; - std::unique_ptr callback; - std::unique_ptr framerate_controller; - uint16_t width; - uint16_t height; - bool key_frame_request; - bool send_stream; - }; - - enum class StreamResolution { - OTHER, - HIGHEST, - LOWEST, - }; - - // Populate the codec settings for each simulcast stream. - void PopulateStreamCodec(const webrtc::VideoCodec& inst, - int stream_index, - uint32_t start_bitrate_kbps, - StreamResolution stream_resolution, - webrtc::VideoCodec* stream_codec); - - bool Initialized() const; - - void DestroyStoredEncoders(); + void OverrideFromFieldTrial(VideoEncoder::EncoderInfo* info) const; volatile int inited_; // Accessed atomically. VideoEncoderFactory* const primary_encoder_factory_; VideoEncoderFactory* const fallback_encoder_factory_; const SdpVideoFormat video_format_; VideoCodec codec_; - std::vector streaminfos_; + int total_streams_count_; + bool bypass_mode_; + std::vector stream_contexts_; EncodedImageCallback* encoded_complete_callback_; // Used for checking the single-threaded access of the encoder interface. - SequenceChecker encoder_queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker encoder_queue_; // Store encoders in between calls to Release and InitEncode, so they don't // have to be recreated. Remaining encoders are destroyed by the destructor. - std::stack> stored_encoders_; + std::list> cached_encoder_contexts_; const absl::optional experimental_boosted_screenshare_qp_; const bool boost_base_layer_quality_; const bool prefer_temporal_support_on_base_layer_; + + const SimulcastEncoderAdapterEncoderInfoSettings encoder_info_override_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc index 17176512c..7ac666ec9 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.cc @@ -39,7 +39,6 @@ std::unique_ptr CreateMediaEngine( std::move(dependencies.audio_decoder_factory), std::move(dependencies.audio_mixer), std::move(dependencies.audio_processing), - std::move(dependencies.onUnknownAudioSsrc), dependencies.audio_frame_processor, trials); #ifdef HAVE_WEBRTC_VIDEO auto video_engine = std::make_unique( diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h index 2bfd6b29b..34ec4cdc9 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_media_engine.h @@ -22,6 +22,7 @@ #include "api/rtp_parameters.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" +#include "api/transport/field_trial_based_config.h" #include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" @@ -52,8 +53,6 @@ struct MediaEngineDependencies { std::unique_ptr video_encoder_factory; std::unique_ptr video_decoder_factory; - std::function onUnknownAudioSsrc = nullptr; - const webrtc::WebRtcKeyValueConfig* trials = nullptr; }; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc index 8a916c4c7..0bf4f2063 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc @@ -39,7 +39,6 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -67,6 +66,11 @@ bool IsEnabled(const webrtc::WebRtcKeyValueConfig& trials, return absl::StartsWith(trials.Lookup(name), "Enabled"); } +bool IsDisabled(const webrtc::WebRtcKeyValueConfig& trials, + absl::string_view name) { + return absl::StartsWith(trials.Lookup(name), "Disabled"); +} + bool PowerOfTwo(int value) { return (value > 0) && ((value & (value - 1)) == 0); } @@ -105,62 +109,11 @@ void AddDefaultFeedbackParams(VideoCodec* codec, // the input codecs, and also add ULPFEC, RED, FlexFEC, and associated RTX // codecs for recognized codecs (VP8, VP9, H264, and RED). It will also add // default feedback params to the codecs. -std::vector AssignPayloadTypesAndDefaultCodecs( - std::vector input_formats, - const webrtc::WebRtcKeyValueConfig& trials) { - if (input_formats.empty()) - return std::vector(); - static const int kFirstDynamicPayloadType = 96; - static const int kLastDynamicPayloadType = 127; - int payload_type = kFirstDynamicPayloadType; - - input_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName)); - input_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName)); - - if (IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised")) { - webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName); - // This value is currently arbitrarily set to 10 seconds. (The unit - // is microseconds.) This parameter MUST be present in the SDP, but - // we never use the actual value anywhere in our code however. - // TODO(brandtr): Consider honouring this value in the sender and receiver. - flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}}; - input_formats.push_back(flexfec_format); - } - - std::vector output_codecs; - for (const webrtc::SdpVideoFormat& format : input_formats) { - VideoCodec codec(format); - codec.id = payload_type; - AddDefaultFeedbackParams(&codec, trials); - output_codecs.push_back(codec); - - // Increment payload type. - ++payload_type; - if (payload_type > kLastDynamicPayloadType) { - RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest."; - break; - } - - // Add associated RTX codec for non-FEC codecs. - if (!absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName) && - !absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName)) { - output_codecs.push_back( - VideoCodec::CreateRtxCodec(payload_type, codec.id)); - - // Increment payload type. - ++payload_type; - if (payload_type > kLastDynamicPayloadType) { - RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest."; - break; - } - } - } - return output_codecs; -} - // is_decoder_factory is needed to keep track of the implict assumption that any // H264 decoder also supports constrained base line profile. -// TODO(kron): Perhaps it better to move the implcit knowledge to the place +// Also, is_decoder_factory is used to decide whether FlexFEC video format +// should be advertised as supported. +// TODO(kron): Perhaps it is better to move the implicit knowledge to the place // where codecs are negotiated. template std::vector GetPayloadTypesAndDefaultCodecs( @@ -177,8 +130,95 @@ std::vector GetPayloadTypesAndDefaultCodecs( AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats); } - return AssignPayloadTypesAndDefaultCodecs(std::move(supported_formats), - trials); + if (supported_formats.empty()) + return std::vector(); + + // Due to interoperability issues with old Chrome/WebRTC versions only use + // the lower range for new codecs. + static const int kFirstDynamicPayloadTypeLowerRange = 35; + static const int kLastDynamicPayloadTypeLowerRange = 65; + + static const int kFirstDynamicPayloadTypeUpperRange = 96; + static const int kLastDynamicPayloadTypeUpperRange = 127; + int payload_type_upper = kFirstDynamicPayloadTypeUpperRange; + int payload_type_lower = kFirstDynamicPayloadTypeLowerRange; + + supported_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName)); + supported_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName)); + + // flexfec-03 is supported as + // - receive codec unless WebRTC-FlexFEC-03-Advertised is disabled + // - send codec if WebRTC-FlexFEC-03-Advertised is enabled + if ((is_decoder_factory && + !IsDisabled(trials, "WebRTC-FlexFEC-03-Advertised")) || + (!is_decoder_factory && + IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised"))) { + webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName); + // This value is currently arbitrarily set to 10 seconds. (The unit + // is microseconds.) This parameter MUST be present in the SDP, but + // we never use the actual value anywhere in our code however. + // TODO(brandtr): Consider honouring this value in the sender and receiver. + flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}}; + supported_formats.push_back(flexfec_format); + } + + std::vector output_codecs; + for (const webrtc::SdpVideoFormat& format : supported_formats) { + VideoCodec codec(format); + bool isCodecValidForLowerRange = + absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName) || + absl::EqualsIgnoreCase(codec.name, kAv1CodecName); + if (!isCodecValidForLowerRange) { + codec.id = payload_type_upper++; + } else { + codec.id = payload_type_lower++; + } + AddDefaultFeedbackParams(&codec, trials); + output_codecs.push_back(codec); + + if (payload_type_upper > kLastDynamicPayloadTypeUpperRange) { + RTC_LOG(LS_ERROR) + << "Out of dynamic payload types [96,127], skipping the rest."; + // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12194): + // continue in lower range. + break; + } + if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) { + // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248): + // return an error. + RTC_LOG(LS_ERROR) + << "Out of dynamic payload types [35,65], skipping the rest."; + break; + } + + // Add associated RTX codec for non-FEC codecs. + if (!absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName) && + !absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName)) { + if (!isCodecValidForLowerRange) { + output_codecs.push_back( + VideoCodec::CreateRtxCodec(payload_type_upper++, codec.id)); + } else { + output_codecs.push_back( + VideoCodec::CreateRtxCodec(payload_type_lower++, codec.id)); + } + + if (payload_type_upper > kLastDynamicPayloadTypeUpperRange) { + RTC_LOG(LS_ERROR) + << "Out of dynamic payload types [96,127], skipping rtx."; + // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12194): + // continue in lower range. + break; + } + if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) { + // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248): + // return an error. + RTC_LOG(LS_ERROR) + << "Out of dynamic payload types [35,65], skipping rtx."; + break; + } + } + } + return output_codecs; } bool IsTemporalLayersSupported(const std::string& codec_name) { @@ -461,7 +501,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( webrtc::VideoCodecH264 h264_settings = webrtc::VideoEncoder::GetDefaultH264Settings(); h264_settings.frameDroppingOn = frame_dropping; - return new rtc::RefCountedObject< + return rtc::make_ref_counted< webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings); } if (absl::EqualsIgnoreCase(codec.name, kVp8CodecName)) { @@ -471,7 +511,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( // VP8 denoising is enabled by default. vp8_settings.denoisingOn = codec_default_denoising ? true : denoising; vp8_settings.frameDroppingOn = frame_dropping; - return new rtc::RefCountedObject< + return rtc::make_ref_counted< webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) { @@ -521,7 +561,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( vp9_settings.flexibleMode = vp9_settings.numberOfSpatialLayers > 1; vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOn; } - return new rtc::RefCountedObject< + return rtc::make_ref_counted< webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); } return nullptr; @@ -585,11 +625,11 @@ WebRtcVideoEngine::WebRtcVideoEngine( : decoder_factory_(std::move(video_decoder_factory)), encoder_factory_(std::move(video_encoder_factory)), trials_(trials) { - RTC_LOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine()"; + RTC_DLOG(LS_INFO) << "WebRtcVideoEngine::WebRtcVideoEngine()"; } WebRtcVideoEngine::~WebRtcVideoEngine() { - RTC_LOG(LS_INFO) << "WebRtcVideoEngine::~WebRtcVideoEngine"; + RTC_DLOG(LS_INFO) << "WebRtcVideoEngine::~WebRtcVideoEngine"; } VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel( @@ -645,6 +685,12 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const { ? webrtc::RtpTransceiverDirection::kSendRecv : webrtc::RtpTransceiverDirection::kStopped); + result.emplace_back( + webrtc::RtpExtension::kVideoFrameTrackingIdUri, id++, + IsEnabled(trials_, "WebRTC-VideoFrameTrackingIdAdvertised") + ? webrtc::RtpTransceiverDirection::kSendRecv + : webrtc::RtpTransceiverDirection::kStopped); + return result; } @@ -656,8 +702,8 @@ WebRtcVideoChannel::WebRtcVideoChannel( webrtc::VideoEncoderFactory* encoder_factory, webrtc::VideoDecoderFactory* decoder_factory, webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory) - : VideoMediaChannel(config), - worker_thread_(rtc::Thread::Current()), + : VideoMediaChannel(config, call->network_thread()), + worker_thread_(call->worker_thread()), call_(call), unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_), video_config_(config.video), @@ -675,7 +721,8 @@ WebRtcVideoChannel::WebRtcVideoChannel( "WebRTC-Video-BufferPacketsWithUnknownSsrc") ? new UnhandledPacketsBuffer() : nullptr) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&thread_checker_); + network_thread_checker_.Detach(); rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc; sending_ = false; @@ -711,8 +758,8 @@ WebRtcVideoChannel::SelectSendVideoCodecs( // following the spec in https://tools.ietf.org/html/rfc6184#section-8.2.2 // since we should limit the encode level to the lower of local and remote // level when level asymmetry is not allowed. - if (IsSameCodec(format_it->name, format_it->parameters, - remote_codec.codec.name, remote_codec.codec.params)) { + if (format_it->IsSameCodec( + {remote_codec.codec.name, remote_codec.codec.params})) { encoders.push_back(remote_codec); // To allow the VideoEncoderFactory to keep information about which @@ -775,7 +822,6 @@ bool WebRtcVideoChannel::GetChangedSendParameters( // Never enable sending FlexFEC, unless we are in the experiment. if (!IsEnabled(call_->trials(), "WebRTC-FlexFEC-03")) { - RTC_LOG(LS_INFO) << "WebRTC-FlexFEC-03 field trial is not enabled."; for (VideoCodecSettings& codec : negotiated_codecs) codec.flexfec_payload_type = -1; } @@ -907,8 +953,8 @@ void WebRtcVideoChannel::RequestEncoderSwitch( RTC_DCHECK_RUN_ON(&thread_checker_); for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { - if (IsSameCodec(format.name, format.parameters, codec_setting.codec.name, - codec_setting.codec.params)) { + if (format.IsSameCodec( + {codec_setting.codec.name, codec_setting.codec.params})) { VideoCodecSettings new_codec_setting = codec_setting; for (const auto& kv : format.parameters) { new_codec_setting.codec.params[kv.first] = kv.second; @@ -991,7 +1037,7 @@ bool WebRtcVideoChannel::ApplyChangedParams( if (changed_params.send_codec || changed_params.rtcp_mode) { // Update receive feedback parameters from new codec or RTCP mode. RTC_LOG(LS_INFO) - << "SetFeedbackOptions on all the receive streams because the send " + << "SetFeedbackParameters on all the receive streams because the send " "codec or RTCP mode has changed."; for (auto& kv : receive_streams_) { RTC_DCHECK(kv.second != nullptr); @@ -999,7 +1045,8 @@ bool WebRtcVideoChannel::ApplyChangedParams( HasLntf(send_codec_->codec), HasNack(send_codec_->codec), HasTransportCc(send_codec_->codec), send_params_.rtcp.reduced_size ? webrtc::RtcpMode::kReducedSize - : webrtc::RtcpMode::kCompound); + : webrtc::RtcpMode::kCompound, + send_codec_->rtx_time); } } return true; @@ -1127,7 +1174,8 @@ bool WebRtcVideoChannel::GetChangedRecvParameters( const std::vector mapped_codecs = MapCodecs(params.codecs); if (mapped_codecs.empty()) { - RTC_LOG(LS_ERROR) << "SetRecvParameters called without any video codecs."; + RTC_LOG(LS_ERROR) + << "GetChangedRecvParameters called without any video codecs."; return false; } @@ -1140,7 +1188,7 @@ bool WebRtcVideoChannel::GetChangedRecvParameters( for (const VideoCodecSettings& mapped_codec : mapped_codecs) { if (!FindMatchingCodec(local_supported_codecs, mapped_codec.codec)) { RTC_LOG(LS_ERROR) - << "SetRecvParameters called with unsupported video codec: " + << "GetChangedRecvParameters called with unsupported video codec: " << mapped_codec.codec.ToString(); return false; } @@ -1172,25 +1220,25 @@ bool WebRtcVideoChannel::GetChangedRecvParameters( bool WebRtcVideoChannel::SetRecvParameters(const VideoRecvParameters& params) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "WebRtcVideoChannel::SetRecvParameters"); - RTC_LOG(LS_INFO) << "SetRecvParameters: " << params.ToString(); + RTC_DLOG(LS_INFO) << "SetRecvParameters: " << params.ToString(); ChangedRecvParameters changed_params; if (!GetChangedRecvParameters(params, &changed_params)) { return false; } if (changed_params.flexfec_payload_type) { - RTC_LOG(LS_INFO) << "Changing FlexFEC payload type (recv) from " - << recv_flexfec_payload_type_ << " to " - << *changed_params.flexfec_payload_type; + RTC_DLOG(LS_INFO) << "Changing FlexFEC payload type (recv) from " + << recv_flexfec_payload_type_ << " to " + << *changed_params.flexfec_payload_type; recv_flexfec_payload_type_ = *changed_params.flexfec_payload_type; } if (changed_params.rtp_header_extensions) { recv_rtp_extensions_ = *changed_params.rtp_header_extensions; } if (changed_params.codec_settings) { - RTC_LOG(LS_INFO) << "Changing recv codecs from " - << CodecSettingsVectorToString(recv_codecs_) << " to " - << CodecSettingsVectorToString( - *changed_params.codec_settings); + RTC_DLOG(LS_INFO) << "Changing recv codecs from " + << CodecSettingsVectorToString(recv_codecs_) << " to " + << CodecSettingsVectorToString( + *changed_params.codec_settings); recv_codecs_ = *changed_params.codec_settings; } @@ -1309,21 +1357,6 @@ bool WebRtcVideoChannel::AddSendStream(const StreamParams& sp) { video_config_.periodic_alr_bandwidth_probing; config.encoder_settings.experiment_cpu_load_estimator = video_config_.experiment_cpu_load_estimator; - using TargetBitrateType = - webrtc::VideoStreamEncoderSettings::BitrateAllocationCallbackType; - if (send_rtp_extensions_ && - webrtc::RtpExtension::FindHeaderExtensionByUri( - *send_rtp_extensions_, - webrtc::RtpExtension::kVideoLayersAllocationUri)) { - config.encoder_settings.allocation_cb_type = - TargetBitrateType::kVideoLayersAllocation; - } else if (IsEnabled(call_->trials(), "WebRTC-Target-Bitrate-Rtcp")) { - config.encoder_settings.allocation_cb_type = - TargetBitrateType::kVideoBitrateAllocation; - } else { - config.encoder_settings.allocation_cb_type = - TargetBitrateType::kVideoBitrateAllocationWhenScreenSharing; - } config.encoder_settings.encoder_factory = encoder_factory_; config.encoder_settings.bitrate_allocator_factory = bitrate_allocator_factory_; @@ -1490,6 +1523,12 @@ void WebRtcVideoChannel::ConfigureReceiverRtp( ? webrtc::RtcpMode::kReducedSize : webrtc::RtcpMode::kCompound; + // rtx-time (RFC 4588) is a declarative attribute similar to rtcp-rsize and + // determined by the sender / send codec. + if (send_codec_ && send_codec_->rtx_time != -1) { + config->rtp.nack.rtp_history_ms = send_codec_->rtx_time; + } + config->rtp.transport_cc = send_codec_ ? HasTransportCc(send_codec_->codec) : false; @@ -1499,7 +1538,7 @@ void WebRtcVideoChannel::ConfigureReceiverRtp( // TODO(brandtr): Generalize when we add support for multistream protection. flexfec_config->payload_type = recv_flexfec_payload_type_; - if (IsEnabled(call_->trials(), "WebRTC-FlexFEC-03-Advertised") && + if (!IsDisabled(call_->trials(), "WebRTC-FlexFEC-03-Advertised") && sp.GetFecFrSsrc(ssrc, &flexfec_config->remote_ssrc)) { flexfec_config->protected_media_ssrcs = {ssrc}; flexfec_config->local_ssrc = config->rtp.local_ssrc; @@ -1547,6 +1586,19 @@ void WebRtcVideoChannel::ResetUnsignaledRecvStream() { } } +void WebRtcVideoChannel::OnDemuxerCriteriaUpdatePending() { + RTC_DCHECK_RUN_ON(&thread_checker_); + ++demuxer_criteria_id_; +} + +void WebRtcVideoChannel::OnDemuxerCriteriaUpdateComplete() { + RTC_DCHECK_RUN_ON(&network_thread_checker_); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this] { + RTC_DCHECK_RUN_ON(&thread_checker_); + ++demuxer_criteria_completed_id_; + })); +} + bool WebRtcVideoChannel::SetSink( uint32_t ssrc, rtc::VideoSinkInterface* sink) { @@ -1657,67 +1709,95 @@ void WebRtcVideoChannel::FillSendAndReceiveCodecStats( void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(&thread_checker_); - const webrtc::PacketReceiver::DeliveryStatus delivery_result = - call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, packet, - packet_time_us); - switch (delivery_result) { - case webrtc::PacketReceiver::DELIVERY_OK: - return; - case webrtc::PacketReceiver::DELIVERY_PACKET_ERROR: - return; - case webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC: - break; - } + RTC_DCHECK_RUN_ON(&network_thread_checker_); + // TODO(bugs.webrtc.org/11993): This code is very similar to what + // WebRtcVoiceMediaChannel::OnPacketReceived does. For maintainability and + // consistency it would be good to move the interaction with call_->Receiver() + // to a common implementation and provide a callback on the worker thread + // for the exception case (DELIVERY_UNKNOWN_SSRC) and how retry is attempted. + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [this, packet, packet_time_us] { + RTC_DCHECK_RUN_ON(&thread_checker_); + const webrtc::PacketReceiver::DeliveryStatus delivery_result = + call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, packet, + packet_time_us); + switch (delivery_result) { + case webrtc::PacketReceiver::DELIVERY_OK: + return; + case webrtc::PacketReceiver::DELIVERY_PACKET_ERROR: + return; + case webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC: + break; + } - uint32_t ssrc = 0; - if (!GetRtpSsrc(packet.cdata(), packet.size(), &ssrc)) { - return; - } + uint32_t ssrc = 0; + if (!GetRtpSsrc(packet.cdata(), packet.size(), &ssrc)) { + return; + } - if (unknown_ssrc_packet_buffer_) { - unknown_ssrc_packet_buffer_->AddPacket(ssrc, packet_time_us, packet); - return; - } + if (unknown_ssrc_packet_buffer_) { + unknown_ssrc_packet_buffer_->AddPacket(ssrc, packet_time_us, packet); + return; + } - if (discard_unknown_ssrc_packets_) { - return; - } + if (discard_unknown_ssrc_packets_) { + return; + } - int payload_type = 0; - if (!GetRtpPayloadType(packet.cdata(), packet.size(), &payload_type)) { - return; - } + int payload_type = 0; + if (!GetRtpPayloadType(packet.cdata(), packet.size(), &payload_type)) { + return; + } - // See if this payload_type is registered as one that usually gets its own - // SSRC (RTX) or at least is safe to drop either way (FEC). If it is, and - // it wasn't handled above by DeliverPacket, that means we don't know what - // stream it associates with, and we shouldn't ever create an implicit channel - // for these. - for (auto& codec : recv_codecs_) { - if (payload_type == codec.rtx_payload_type || - payload_type == codec.ulpfec.red_rtx_payload_type || - payload_type == codec.ulpfec.ulpfec_payload_type) { - return; - } - } - if (payload_type == recv_flexfec_payload_type_) { - return; - } + // See if this payload_type is registered as one that usually gets its + // own SSRC (RTX) or at least is safe to drop either way (FEC). If it + // is, and it wasn't handled above by DeliverPacket, that means we don't + // know what stream it associates with, and we shouldn't ever create an + // implicit channel for these. + for (auto& codec : recv_codecs_) { + if (payload_type == codec.rtx_payload_type || + payload_type == codec.ulpfec.red_rtx_payload_type || + payload_type == codec.ulpfec.ulpfec_payload_type) { + return; + } + } + if (payload_type == recv_flexfec_payload_type_) { + return; + } - switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc)) { - case UnsignalledSsrcHandler::kDropPacket: - return; - case UnsignalledSsrcHandler::kDeliverPacket: - break; - } + // Ignore unknown ssrcs if there is a demuxer criteria update pending. + // During a demuxer update we may receive ssrcs that were recently + // removed or we may receve ssrcs that were recently configured for a + // different video channel. + if (demuxer_criteria_id_ != demuxer_criteria_completed_id_) { + return; + } - if (call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, packet, - packet_time_us) != - webrtc::PacketReceiver::DELIVERY_OK) { - RTC_LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery."; - return; - } + switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc)) { + case UnsignalledSsrcHandler::kDropPacket: + return; + case UnsignalledSsrcHandler::kDeliverPacket: + break; + } + + if (call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, packet, + packet_time_us) != + webrtc::PacketReceiver::DELIVERY_OK) { + RTC_LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery."; + } + })); +} + +void WebRtcVideoChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { + RTC_DCHECK_RUN_ON(&network_thread_checker_); + // TODO(tommi): We shouldn't need to go through call_ to deliver this + // notification. We should already have direct access to + // video_send_delay_stats_ and transport_send_ptr_ via `stream_`. + // So we should be able to remove OnSentPacket from Call and handle this per + // channel instead. At the moment Call::OnSentPacket calls OnSentPacket for + // the video stats, for all sent packets, including audio, which causes + // unnecessary lookups. + call_->OnSentPacket(sent_packet); } void WebRtcVideoChannel::BackfillBufferedPackets( @@ -1767,7 +1847,7 @@ void WebRtcVideoChannel::BackfillBufferedPackets( } void WebRtcVideoChannel::OnReadyToSend(bool ready) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&network_thread_checker_); RTC_LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready."); call_->SignalChannelNetworkState( webrtc::MediaType::VIDEO, @@ -1777,15 +1857,19 @@ void WebRtcVideoChannel::OnReadyToSend(bool ready) { void WebRtcVideoChannel::OnNetworkRouteChanged( const std::string& transport_name, const rtc::NetworkRoute& network_route) { - RTC_DCHECK_RUN_ON(&thread_checker_); - call_->GetTransportControllerSend()->OnNetworkRouteChanged(transport_name, - network_route); - call_->GetTransportControllerSend()->OnTransportOverheadChanged( - network_route.packet_overhead); + RTC_DCHECK_RUN_ON(&network_thread_checker_); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, name = transport_name, route = network_route] { + RTC_DCHECK_RUN_ON(&thread_checker_); + webrtc::RtpTransportControllerSendInterface* transport = + call_->GetTransportControllerSend(); + transport->OnNetworkRouteChanged(name, route); + transport->OnTransportOverheadChanged(route.packet_overhead); + })); } void WebRtcVideoChannel::SetInterface(NetworkInterface* iface) { - RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_RUN_ON(&network_thread_checker_); MediaChannel::SetInterface(iface); // Set the RTP recv/send buffer to a bigger size. @@ -1934,27 +2018,13 @@ std::vector WebRtcVideoChannel::GetSources( bool WebRtcVideoChannel::SendRtp(const uint8_t* data, size_t len, const webrtc::PacketOptions& options) { - rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen); - rtc::PacketOptions rtc_options; - rtc_options.packet_id = options.packet_id; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - rtc_options.info_signaled_after_sent.included_in_feedback = - options.included_in_feedback; - rtc_options.info_signaled_after_sent.included_in_allocation = - options.included_in_allocation; - return MediaChannel::SendPacket(&packet, rtc_options); + MediaChannel::SendRtp(data, len, options); + return true; } bool WebRtcVideoChannel::SendRtcp(const uint8_t* data, size_t len) { - rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen); - rtc::PacketOptions rtc_options; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - - return MediaChannel::SendRtcp(&packet, rtc_options); + MediaChannel::SendRtcp(data, len); + return true; } WebRtcVideoChannel::WebRtcVideoSendStream::VideoSendStreamParameters:: @@ -1981,7 +2051,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( // TODO(deadbeef): Don't duplicate information between send_params, // rtp_extensions, options, etc. const VideoSendParameters& send_params) - : worker_thread_(rtc::Thread::Current()), + : worker_thread_(call->worker_thread()), ssrcs_(sp.ssrcs), ssrc_groups_(sp.ssrc_groups), call_(call), @@ -2270,6 +2340,9 @@ webrtc::RTCError WebRtcVideoChannel::WebRtcVideoSendStream::SetRtpParameters( // TODO(bugs.webrtc.org/8807): The active field as well should not require // a full encoder reconfiguration, but it needs to update both the bitrate // allocator and the video bitrate allocator. + // + // Note that the simulcast encoder adapter relies on the fact that layers + // de/activation triggers encoder reinitialization. bool new_send_state = false; for (size_t i = 0; i < rtp_parameters_.encodings.size(); ++i) { bool new_active = IsLayerActive(new_parameters.encodings[i]); @@ -2450,11 +2523,17 @@ WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( encoder_config.legacy_conference_mode = parameters_.conference_mode; + encoder_config.is_quality_scaling_allowed = + !disable_automatic_resize_ && !is_screencast && + (parameters_.config.rtp.ssrcs.size() == 1 || + NumActiveStreams(rtp_parameters_) == 1); + int max_qp = kDefaultQpMax; codec.GetParam(kCodecParamMaxQuantization, &max_qp); encoder_config.video_stream_factory = - new rtc::RefCountedObject( + rtc::make_ref_counted( codec.name, max_qp, is_screencast, parameters_.conference_mode); + return encoder_config; } @@ -2532,6 +2611,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( stats.quality_limitation_resolution_changes; common_info.encoder_implementation_name = stats.encoder_implementation_name; common_info.ssrc_groups = ssrc_groups_; + common_info.frames = stats.frames; common_info.framerate_input = stats.input_frame_rate; common_info.avg_encode_ms = stats.avg_encode_time_ms; common_info.encode_usage_percent = stats.encode_usage_percent; @@ -2581,15 +2661,18 @@ WebRtcVideoChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( stream_stats.rtp_stats.retransmitted.payload_bytes; info.retransmitted_packets_sent = stream_stats.rtp_stats.retransmitted.packets; - info.packets_lost = stream_stats.rtcp_stats.packets_lost; info.firs_rcvd = stream_stats.rtcp_packet_type_counts.fir_packets; info.nacks_rcvd = stream_stats.rtcp_packet_type_counts.nack_packets; info.plis_rcvd = stream_stats.rtcp_packet_type_counts.pli_packets; if (stream_stats.report_block_data.has_value()) { - info.report_block_datas.push_back(stream_stats.report_block_data.value()); + info.packets_lost = + stream_stats.report_block_data->report_block().packets_lost; + info.fraction_lost = + static_cast( + stream_stats.report_block_data->report_block().fraction_lost) / + (1 << 8); + info.report_block_datas.push_back(*stream_stats.report_block_data); } - info.fraction_lost = - static_cast(stream_stats.rtcp_stats.fraction_lost) / (1 << 8); info.qp_sum = stream_stats.qp_sum; info.total_encode_time_ms = stream_stats.total_encode_time_ms; info.total_encoded_bytes_target = stream_stats.total_encoded_bytes_target; @@ -2740,17 +2823,14 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream( estimated_remote_start_ntp_time_ms_(0) { config_.renderer = this; ConfigureCodecs(recv_codecs); - ConfigureFlexfecCodec(flexfec_config.payload_type); - MaybeRecreateWebRtcFlexfecStream(); + flexfec_config_.payload_type = flexfec_config.payload_type; RecreateWebRtcVideoStream(); } WebRtcVideoChannel::WebRtcVideoReceiveStream::~WebRtcVideoReceiveStream() { - if (flexfec_stream_) { - MaybeDissociateFlexfecFromVideo(); - call_->DestroyFlexfecReceiveStream(flexfec_stream_); - } call_->DestroyVideoReceiveStream(stream_); + if (flexfec_stream_) + call_->DestroyFlexfecReceiveStream(flexfec_stream_); } const std::vector& @@ -2812,6 +2892,11 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs( config_.rtp.lntf.enabled = HasLntf(codec.codec); config_.rtp.nack.rtp_history_ms = HasNack(codec.codec) ? kNackHistoryMs : 0; + // The rtx-time parameter can be used to override the hardcoded default for + // the NACK buffer length. + if (codec.rtx_time != -1 && config_.rtp.nack.rtp_history_ms != 0) { + config_.rtp.nack.rtp_history_ms = codec.rtx_time; + } config_.rtp.rtcp_xr.receiver_reference_time_report = HasRrtr(codec.codec); if (codec.ulpfec.red_rtx_payload_type != -1) { config_.rtp @@ -2820,11 +2905,6 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureCodecs( } } -void WebRtcVideoChannel::WebRtcVideoReceiveStream::ConfigureFlexfecCodec( - int flexfec_payload_type) { - flexfec_config_.payload_type = flexfec_payload_type; -} - void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetLocalSsrc( uint32_t local_ssrc) { // TODO(pbos): Consider turning this sanity check into a RTC_DCHECK. You @@ -2841,9 +2921,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetLocalSsrc( config_.rtp.local_ssrc = local_ssrc; flexfec_config_.local_ssrc = local_ssrc; RTC_LOG(LS_INFO) - << "RecreateWebRtcStream (recv) because of SetLocalSsrc; local_ssrc=" + << "RecreateWebRtcVideoStream (recv) because of SetLocalSsrc; local_ssrc=" << local_ssrc; - MaybeRecreateWebRtcFlexfecStream(); RecreateWebRtcVideoStream(); } @@ -2851,8 +2930,10 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters( bool lntf_enabled, bool nack_enabled, bool transport_cc_enabled, - webrtc::RtcpMode rtcp_mode) { - int nack_history_ms = nack_enabled ? kNackHistoryMs : 0; + webrtc::RtcpMode rtcp_mode, + int rtx_time) { + int nack_history_ms = + nack_enabled ? rtx_time != -1 ? rtx_time : kNackHistoryMs : 0; if (config_.rtp.lntf.enabled == lntf_enabled && config_.rtp.nack.rtp_history_ms == nack_history_ms && config_.rtp.transport_cc == transport_cc_enabled && @@ -2861,7 +2942,8 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters( << "Ignoring call to SetFeedbackParameters because parameters are " "unchanged; lntf=" << lntf_enabled << ", nack=" << nack_enabled - << ", transport_cc=" << transport_cc_enabled; + << ", transport_cc=" << transport_cc_enabled + << ", rtx_time=" << rtx_time; return; } config_.rtp.lntf.enabled = lntf_enabled; @@ -2872,17 +2954,15 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetFeedbackParameters( // based on the rtcp-fb for the FlexFEC codec, not the media codec. flexfec_config_.transport_cc = config_.rtp.transport_cc; flexfec_config_.rtcp_mode = config_.rtp.rtcp_mode; - RTC_LOG(LS_INFO) - << "RecreateWebRtcStream (recv) because of SetFeedbackParameters; nack=" - << nack_enabled << ", transport_cc=" << transport_cc_enabled; - MaybeRecreateWebRtcFlexfecStream(); + RTC_LOG(LS_INFO) << "RecreateWebRtcVideoStream (recv) because of " + "SetFeedbackParameters; nack=" + << nack_enabled << ", transport_cc=" << transport_cc_enabled; RecreateWebRtcVideoStream(); } void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters( const ChangedRecvParameters& params) { bool video_needs_recreation = false; - bool flexfec_needs_recreation = false; if (params.codec_settings) { ConfigureCodecs(*params.codec_settings); video_needs_recreation = true; @@ -2891,20 +2971,16 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters( config_.rtp.extensions = *params.rtp_header_extensions; flexfec_config_.rtp_header_extensions = *params.rtp_header_extensions; video_needs_recreation = true; - flexfec_needs_recreation = true; } if (params.flexfec_payload_type) { - ConfigureFlexfecCodec(*params.flexfec_payload_type); - flexfec_needs_recreation = true; - } - if (flexfec_needs_recreation) { - RTC_LOG(LS_INFO) << "MaybeRecreateWebRtcFlexfecStream (recv) because of " - "SetRecvParameters"; - MaybeRecreateWebRtcFlexfecStream(); + flexfec_config_.payload_type = *params.flexfec_payload_type; + // TODO(tommi): See if it is better to always have a flexfec stream object + // configured and instead of recreating the video stream, reconfigure the + // flexfec object from within the rtp callback (soon to be on the network + // thread). + video_needs_recreation = true; } if (video_needs_recreation) { - RTC_LOG(LS_INFO) - << "RecreateWebRtcVideoStream (recv) because of SetRecvParameters"; RecreateWebRtcVideoStream(); } } @@ -2917,13 +2993,22 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { recording_state = stream_->SetAndGetRecordingState( webrtc::VideoReceiveStream::RecordingState(), /*generate_key_frame=*/false); - MaybeDissociateFlexfecFromVideo(); call_->DestroyVideoReceiveStream(stream_); stream_ = nullptr; } + + if (flexfec_stream_) { + call_->DestroyFlexfecReceiveStream(flexfec_stream_); + flexfec_stream_ = nullptr; + } + + if (flexfec_config_.IsCompleteAndEnabled()) { + flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_); + } + webrtc::VideoReceiveStream::Config config = config_.Copy(); config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr); - config.stream_id = stream_params_.id; + config.rtp.packet_sink_ = flexfec_stream_; stream_ = call_->CreateVideoReceiveStream(std::move(config)); if (base_minimum_playout_delay_ms) { stream_->SetBaseMinimumPlayoutDelayMs( @@ -2933,7 +3018,7 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { stream_->SetAndGetRecordingState(std::move(*recording_state), /*generate_key_frame=*/false); } - MaybeAssociateFlexfecWithVideo(); + stream_->Start(); if (IsEnabled(call_->trials(), "WebRTC-Video-BufferPacketsWithUnknownSsrc")) { @@ -2941,33 +3026,6 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateWebRtcVideoStream() { } } -void WebRtcVideoChannel::WebRtcVideoReceiveStream:: - MaybeRecreateWebRtcFlexfecStream() { - if (flexfec_stream_) { - MaybeDissociateFlexfecFromVideo(); - call_->DestroyFlexfecReceiveStream(flexfec_stream_); - flexfec_stream_ = nullptr; - } - if (flexfec_config_.IsCompleteAndEnabled()) { - flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_); - MaybeAssociateFlexfecWithVideo(); - } -} - -void WebRtcVideoChannel::WebRtcVideoReceiveStream:: - MaybeAssociateFlexfecWithVideo() { - if (stream_ && flexfec_stream_) { - stream_->AddSecondarySink(flexfec_stream_); - } -} - -void WebRtcVideoChannel::WebRtcVideoReceiveStream:: - MaybeDissociateFlexfecFromVideo() { - if (stream_ && flexfec_stream_) { - stream_->RemoveSecondarySink(flexfec_stream_); - } -} - void WebRtcVideoChannel::WebRtcVideoReceiveStream::OnFrame( const webrtc::VideoFrame& frame) { webrtc::MutexLock lock(&sink_lock_); @@ -3047,6 +3105,7 @@ WebRtcVideoChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( stats.rtp_stats.packet_counter.padding_bytes; info.packets_rcvd = stats.rtp_stats.packet_counter.packets; info.packets_lost = stats.rtp_stats.packets_lost; + info.jitter_ms = stats.rtp_stats.jitter; info.framerate_rcvd = stats.network_frame_rate; info.framerate_decoded = stats.decode_frame_rate; @@ -3153,20 +3212,21 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream:: } WebRtcVideoChannel::VideoCodecSettings::VideoCodecSettings() - : flexfec_payload_type(-1), rtx_payload_type(-1) {} + : flexfec_payload_type(-1), rtx_payload_type(-1), rtx_time(-1) {} bool WebRtcVideoChannel::VideoCodecSettings::operator==( const WebRtcVideoChannel::VideoCodecSettings& other) const { return codec == other.codec && ulpfec == other.ulpfec && flexfec_payload_type == other.flexfec_payload_type && - rtx_payload_type == other.rtx_payload_type; + rtx_payload_type == other.rtx_payload_type && + rtx_time == other.rtx_time; } bool WebRtcVideoChannel::VideoCodecSettings::EqualsDisregardingFlexfec( const WebRtcVideoChannel::VideoCodecSettings& a, const WebRtcVideoChannel::VideoCodecSettings& b) { return a.codec == b.codec && a.ulpfec == b.ulpfec && - a.rtx_payload_type == b.rtx_payload_type; + a.rtx_payload_type == b.rtx_payload_type && a.rtx_time == b.rtx_time; } bool WebRtcVideoChannel::VideoCodecSettings::operator!=( @@ -3184,6 +3244,7 @@ WebRtcVideoChannel::MapCodecs(const std::vector& codecs) { std::map payload_codec_type; // |rtx_mapping| maps video payload type to rtx payload type. std::map rtx_mapping; + std::map rtx_time_mapping; webrtc::UlpfecConfig ulpfec_config; absl::optional flexfec_payload_type; @@ -3245,6 +3306,10 @@ WebRtcVideoChannel::MapCodecs(const std::vector& codecs) { << in_codec.ToString(); return {}; } + int rtx_time; + if (in_codec.GetParam(kCodecParamRtxTime, &rtx_time) && rtx_time > 0) { + rtx_time_mapping[associated_payload_type] = rtx_time; + } rtx_mapping[associated_payload_type] = payload_type; break; } @@ -3294,6 +3359,16 @@ WebRtcVideoChannel::MapCodecs(const std::vector& codecs) { if (it != rtx_mapping.end()) { const int rtx_payload_type = it->second; codec_settings.rtx_payload_type = rtx_payload_type; + + auto rtx_time_it = rtx_time_mapping.find(payload_type); + if (rtx_time_it != rtx_time_mapping.end()) { + const int rtx_time = rtx_time_it->second; + if (rtx_time < kNackHistoryMs) { + codec_settings.rtx_time = rtx_time; + } else { + codec_settings.rtx_time = kNackHistoryMs; + } + } } } @@ -3498,7 +3573,7 @@ EncoderStreamFactory::CreateDefaultVideoStreams( *encoder_config.simulcast_layers[0].num_temporal_layers; } } - + layer.scalability_mode = encoder_config.simulcast_layers[0].scalability_mode; layers.push_back(layer); return layers; } @@ -3625,6 +3700,32 @@ EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( BoostMaxSimulcastLayer( webrtc::DataRate::BitsPerSec(encoder_config.max_bitrate_bps), &layers); } + + // Sort the layers by max_bitrate_bps, they might not always be from + // smallest to biggest + std::vector index(layers.size()); + std::iota(index.begin(), index.end(), 0); + std::stable_sort(index.begin(), index.end(), [&layers](size_t a, size_t b) { + return layers[a].max_bitrate_bps < layers[b].max_bitrate_bps; + }); + + if (!layers[index[0]].active) { + // Adjust min bitrate of the first active layer to allow it to go as low as + // the lowest (now inactive) layer could. + // Otherwise, if e.g. a single HD stream is active, it would have 600kbps + // min bitrate, which would always be allocated to the stream. + // This would lead to congested network, dropped frames and overall bad + // experience. + + const int min_configured_bitrate = layers[index[0]].min_bitrate_bps; + for (size_t i = 0; i < layers.size(); ++i) { + if (layers[index[i]].active) { + layers[index[i]].min_bitrate_bps = min_configured_bitrate; + break; + } + } + } + return layers; } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h index 321a5a8c2..e8125e12a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "api/call/transport.h" +#include "api/sequence_checker.h" #include "api/transport/field_trial_based_config.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_frame.h" @@ -30,23 +31,17 @@ #include "call/video_receive_stream.h" #include "call/video_send_stream.h" #include "media/base/media_engine.h" -#include "media/engine/constants.h" #include "media/engine/unhandled_packets_buffer.h" #include "rtc_base/network_route.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { class VideoDecoderFactory; class VideoEncoderFactory; -struct MediaConfig; } // namespace webrtc -namespace rtc { -class Thread; -} // namespace rtc - namespace cricket { class WebRtcVideoChannel; @@ -159,6 +154,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, bool AddRecvStream(const StreamParams& sp, bool default_stream); bool RemoveRecvStream(uint32_t ssrc) override; void ResetUnsignaledRecvStream() override; + void OnDemuxerCriteriaUpdatePending() override; + void OnDemuxerCriteriaUpdateComplete() override; bool SetSink(uint32_t ssrc, rtc::VideoSinkInterface* sink) override; void SetDefaultSink( @@ -168,6 +165,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) override; + void OnPacketSent(const rtc::SentPacket& sent_packet) override; void OnReadyToSend(bool ready) override; void OnNetworkRouteChanged(const std::string& transport_name, const rtc::NetworkRoute& network_route) override; @@ -273,6 +271,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::UlpfecConfig ulpfec; int flexfec_payload_type; // -1 if absent. int rtx_payload_type; // -1 if absent. + int rtx_time; // -1 if absent. }; struct ChangedSendParameters { @@ -397,8 +396,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::DegradationPreference GetDegradationPreference() const RTC_EXCLUSIVE_LOCKS_REQUIRED(&thread_checker_); - rtc::ThreadChecker thread_checker_; - rtc::Thread* worker_thread_; + webrtc::SequenceChecker thread_checker_; + webrtc::TaskQueueBase* const worker_thread_; const std::vector ssrcs_ RTC_GUARDED_BY(&thread_checker_); const std::vector ssrc_groups_ RTC_GUARDED_BY(&thread_checker_); webrtc::Call* const call_; @@ -455,7 +454,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, void SetFeedbackParameters(bool lntf_enabled, bool nack_enabled, bool transport_cc_enabled, - webrtc::RtcpMode rtcp_mode); + webrtc::RtcpMode rtcp_mode, + int rtx_time); void SetRecvParameters(const ChangedRecvParameters& recv_params); void OnFrame(const webrtc::VideoFrame& frame) override; @@ -483,13 +483,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, private: void RecreateWebRtcVideoStream(); - void MaybeRecreateWebRtcFlexfecStream(); - - void MaybeAssociateFlexfecWithVideo(); - void MaybeDissociateFlexfecFromVideo(); void ConfigureCodecs(const std::vector& recv_codecs); - void ConfigureFlexfecCodec(int flexfec_payload_type); std::string GetCodecNameFromPayloadType(int payload_type); @@ -553,12 +548,14 @@ class WebRtcVideoChannel : public VideoMediaChannel, void FillSendAndReceiveCodecStats(VideoMediaInfo* video_media_info) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); - rtc::Thread* const worker_thread_; - rtc::ThreadChecker thread_checker_; + webrtc::TaskQueueBase* const worker_thread_; + webrtc::ScopedTaskSafety task_safety_; + webrtc::SequenceChecker network_thread_checker_; + webrtc::SequenceChecker thread_checker_; uint32_t rtcp_receiver_report_ssrc_ RTC_GUARDED_BY(thread_checker_); bool sending_ RTC_GUARDED_BY(thread_checker_); - webrtc::Call* const call_ RTC_GUARDED_BY(thread_checker_); + webrtc::Call* const call_; DefaultUnsignalledSsrcHandler default_unsignalled_ssrc_handler_ RTC_GUARDED_BY(thread_checker_); @@ -575,6 +572,22 @@ class WebRtcVideoChannel : public VideoMediaChannel, RTC_GUARDED_BY(thread_checker_); std::map receive_streams_ RTC_GUARDED_BY(thread_checker_); + // When the channel and demuxer get reconfigured, there is a window of time + // where we have to be prepared for packets arriving based on the old demuxer + // criteria because the streams live on the worker thread and the demuxer + // lives on the network thread. Because packets are posted from the network + // thread to the worker thread, they can still be in-flight when streams are + // reconfgured. This can happen when |demuxer_criteria_id_| and + // |demuxer_criteria_completed_id_| don't match. During this time, we do not + // want to create unsignalled receive streams and should instead drop the + // packets. E.g: + // * If RemoveRecvStream(old_ssrc) was recently called, there may be packets + // in-flight for that ssrc. This happens when a receiver becomes inactive. + // * If we go from one to many m= sections, the demuxer may change from + // forwarding all packets to only forwarding the configured ssrcs, so there + // is a risk of receiving ssrcs for other, recently added m= sections. + uint32_t demuxer_criteria_id_ RTC_GUARDED_BY(thread_checker_) = 0; + uint32_t demuxer_criteria_completed_id_ RTC_GUARDED_BY(thread_checker_) = 0; std::set send_ssrcs_ RTC_GUARDED_BY(thread_checker_); std::set receive_ssrcs_ RTC_GUARDED_BY(thread_checker_); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc index fc526f12a..a23d9ac24 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc @@ -11,6 +11,7 @@ #include "media/engine/webrtc_voice_engine.h" #include +#include #include #include #include @@ -46,6 +47,8 @@ #include "rtc_base/strings/audio_format_to_string.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/strings/string_format.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/metrics.h" @@ -207,7 +210,9 @@ bool IsEnabled(const webrtc::WebRtcKeyValueConfig& config, struct AdaptivePtimeConfig { bool enabled = false; webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16); - webrtc::DataRate min_encoder_bitrate = webrtc::DataRate::KilobitsPerSec(12); + // Value is chosen to ensure FEC can be encoded, see LBRR_WB_MIN_RATE_BPS in + // libopus. + webrtc::DataRate min_encoder_bitrate = webrtc::DataRate::KilobitsPerSec(16); bool use_slow_adaptation = true; absl::optional audio_network_adaptor_config; @@ -244,7 +249,6 @@ WebRtcVoiceEngine::WebRtcVoiceEngine( const rtc::scoped_refptr& decoder_factory, rtc::scoped_refptr audio_mixer, rtc::scoped_refptr audio_processing, - std::function onUnknownAudioSsrc, webrtc::AudioFrameProcessor* audio_frame_processor, const webrtc::WebRtcKeyValueConfig& trials) : task_queue_factory_(task_queue_factory), @@ -253,7 +257,6 @@ WebRtcVoiceEngine::WebRtcVoiceEngine( decoder_factory_(decoder_factory), audio_mixer_(audio_mixer), apm_(audio_processing), - onUnknownAudioSsrc_(onUnknownAudioSsrc), audio_frame_processor_(audio_frame_processor), audio_red_for_opus_trial_enabled_( IsEnabled(trials, "WebRTC-Audio-Red-For-Opus")), @@ -269,7 +272,7 @@ WebRtcVoiceEngine::WebRtcVoiceEngine( } WebRtcVoiceEngine::~WebRtcVoiceEngine() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::~WebRtcVoiceEngine"; if (initialized_) { StopAecDump(); @@ -283,7 +286,7 @@ WebRtcVoiceEngine::~WebRtcVoiceEngine() { } void WebRtcVoiceEngine::Init() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::Init"; // TaskQueue expects to be created/destroyed on the same thread. @@ -326,7 +329,7 @@ void WebRtcVoiceEngine::Init() { config.audio_device_module = adm_; if (audio_frame_processor_) config.async_audio_processing_factory = - new rtc::RefCountedObject( + rtc::make_ref_counted( *audio_frame_processor_, *task_queue_factory_); audio_state_ = webrtc::AudioState::Create(config); } @@ -339,27 +342,32 @@ void WebRtcVoiceEngine::Init() { AudioOptions options; options.echo_cancellation = true; options.auto_gain_control = true; +#if defined(WEBRTC_IOS) + // On iOS, VPIO provides built-in NS. + options.noise_suppression = false; + options.typing_detection = false; +#else options.noise_suppression = true; + options.typing_detection = true; +#endif + options.experimental_ns = false; options.highpass_filter = true; options.stereo_swapping = false; options.audio_jitter_buffer_max_packets = 200; options.audio_jitter_buffer_fast_accelerate = false; options.audio_jitter_buffer_min_delay_ms = 0; options.audio_jitter_buffer_enable_rtx_handling = false; - options.typing_detection = true; options.experimental_agc = false; - options.experimental_ns = false; options.residual_echo_detector = true; bool error = ApplyOptions(options); RTC_DCHECK(error); } - initialized_ = true; } rtc::scoped_refptr WebRtcVoiceEngine::GetAudioState() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return audio_state_; } @@ -368,13 +376,13 @@ VoiceMediaChannel* WebRtcVoiceEngine::CreateMediaChannel( const MediaConfig& config, const AudioOptions& options, const webrtc::CryptoOptions& crypto_options) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(call->worker_thread()); return new WebRtcVoiceMediaChannel(this, config, options, crypto_options, call); } bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::ApplyOptions: " << options_in.ToString(); AudioOptions options = options_in; // The options are modified below. @@ -400,14 +408,8 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { use_mobile_software_aec = true; #endif -// Set and adjust noise suppressor options. -#if defined(WEBRTC_IOS) - // On iOS, VPIO provides built-in NS. - options.noise_suppression = false; - options.typing_detection = false; - options.experimental_ns = false; - RTC_LOG(LS_INFO) << "Always disable NS on iOS. Use built-in instead."; -#elif defined(WEBRTC_ANDROID) +// Override noise suppression options for Android. +#if defined(WEBRTC_ANDROID) options.typing_detection = false; options.experimental_ns = false; #endif @@ -623,22 +625,9 @@ WebRtcVoiceEngine::GetRtpHeaderExtensions() const { return result; } -void WebRtcVoiceEngine::RegisterChannel(WebRtcVoiceMediaChannel* channel) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); - RTC_DCHECK(channel); - channels_.push_back(channel); -} - -void WebRtcVoiceEngine::UnregisterChannel(WebRtcVoiceMediaChannel* channel) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); - auto it = absl::c_find(channels_, channel); - RTC_DCHECK(it != channels_.end()); - channels_.erase(it); -} - bool WebRtcVoiceEngine::StartAecDump(webrtc::FileWrapper file, int64_t max_size_bytes) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); webrtc::AudioProcessing* ap = apm(); if (!ap) { @@ -653,7 +642,7 @@ bool WebRtcVoiceEngine::StartAecDump(webrtc::FileWrapper file, } void WebRtcVoiceEngine::StopAecDump() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); webrtc::AudioProcessing* ap = apm(); if (ap) { ap->DetachAecDump(); @@ -664,18 +653,18 @@ void WebRtcVoiceEngine::StopAecDump() { } webrtc::AudioDeviceModule* WebRtcVoiceEngine::adm() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(adm_); return adm_.get(); } webrtc::AudioProcessing* WebRtcVoiceEngine::apm() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return apm_.get(); } webrtc::AudioState* WebRtcVoiceEngine::audio_state() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(audio_state_); return audio_state_.get(); } @@ -817,7 +806,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream WebRtcAudioSendStream& operator=(const WebRtcAudioSendStream&) = delete; ~WebRtcAudioSendStream() override { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); ClearSource(); call_->DestroyAudioSendStream(stream_); } @@ -829,7 +818,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream } void SetRtpExtensions(const std::vector& extensions) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.rtp.extensions = extensions; rtp_parameters_.header_extensions = extensions; ReconfigureAudioSendStream(); @@ -841,7 +830,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream } void SetMid(const std::string& mid) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (config_.rtp.mid == mid) { return; } @@ -851,14 +840,14 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream void SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_encryptor = frame_encryptor; ReconfigureAudioSendStream(); } void SetAudioNetworkAdaptorConfig( const absl::optional& audio_network_adaptor_config) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (audio_network_adaptor_config_from_options_ == audio_network_adaptor_config) { return; @@ -870,7 +859,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream } bool SetMaxSendBitrate(int bps) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(config_.send_codec_spec); RTC_DCHECK(audio_codec_spec_); auto send_rate = ComputeSendBitrate( @@ -893,32 +882,32 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream int payload_freq, int event, int duration_ms) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); return stream_->SendTelephoneEvent(payload_type, payload_freq, event, duration_ms); } void SetSend(bool send) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); send_ = send; UpdateSendState(); } void SetMuted(bool muted) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); stream_->SetMuted(muted); muted_ = muted; } bool muted() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); return muted_; } webrtc::AudioSendStream::Stats GetStats(bool has_remote_tracks) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); return stream_->GetStats(has_remote_tracks); } @@ -928,7 +917,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // This method is called on the libjingle worker thread. // TODO(xians): Make sure Start() is called only once. void SetSource(AudioSource* source) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(source); if (source_) { RTC_DCHECK(source_ == source); @@ -943,7 +932,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // callback will be received after this method. // This method is called on the libjingle worker thread. void ClearSource() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (source_) { source_->SetSink(nullptr); source_ = nullptr; @@ -979,7 +968,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // Callback from the |source_| when it is going away. In case Start() has // never been called, this callback won't be triggered. void OnClose() override { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Set |source_| to nullptr to make sure no more callback will get into // the source. source_ = nullptr; @@ -1046,14 +1035,14 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream void SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_transformer = std::move(frame_transformer); ReconfigureAudioSendStream(); } private: void UpdateSendState() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); RTC_DCHECK_EQ(1UL, rtp_parameters_.encodings.size()); if (send_ && source_ != nullptr && rtp_parameters_.encodings[0].active) { @@ -1064,7 +1053,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream } void UpdateAllowedBitrateRange() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // The order of precedence, from lowest to highest is: // - a reasonable default of 32kbps min/max // - fixed target bitrate from codec spec @@ -1096,7 +1085,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream void UpdateSendCodecSpec( const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.send_codec_spec = send_codec_spec; auto info = config_.encoder_factory->QueryAudioEncoder(send_codec_spec.format); @@ -1117,6 +1106,14 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream *audio_codec_spec_); UpdateAllowedBitrateRange(); + + // Encoder will only use two channels if the stereo parameter is set. + const auto& it = send_codec_spec.format.parameters.find("stereo"); + if (it != send_codec_spec.format.parameters.end() && it->second == "1") { + num_encoded_channels_ = 2; + } else { + num_encoded_channels_ = 1; + } } void UpdateAudioNetworkAdaptorConfig() { @@ -1131,13 +1128,15 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream } void ReconfigureAudioSendStream() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); stream_->Reconfigure(config_); } + int NumPreferredChannels() const override { return num_encoded_channels_; } + const AdaptivePtimeConfig adaptive_ptime_config_; - rtc::ThreadChecker worker_thread_checker_; + webrtc::SequenceChecker worker_thread_checker_; rtc::RaceChecker audio_capture_race_checker_; webrtc::Call* call_ = nullptr; webrtc::AudioSendStream::Config config_; @@ -1157,6 +1156,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream // TODO(webrtc:11717): Remove this once audio_network_adaptor in AudioOptions // has been removed. absl::optional audio_network_adaptor_config_from_options_; + std::atomic num_encoded_channels_{-1}; }; class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { @@ -1210,19 +1210,19 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { WebRtcAudioReceiveStream& operator=(const WebRtcAudioReceiveStream&) = delete; ~WebRtcAudioReceiveStream() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); call_->DestroyAudioReceiveStream(stream_); } void SetFrameDecryptor( rtc::scoped_refptr frame_decryptor) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_decryptor = frame_decryptor; RecreateAudioReceiveStream(); } void SetLocalSsrc(uint32_t local_ssrc) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (local_ssrc != config_.rtp.local_ssrc) { config_.rtp.local_ssrc = local_ssrc; RecreateAudioReceiveStream(); @@ -1231,7 +1231,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { void SetUseTransportCcAndRecreateStream(bool use_transport_cc, bool use_nack) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.rtp.transport_cc = use_transport_cc; config_.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0; ReconfigureAudioReceiveStream(); @@ -1239,21 +1239,21 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { void SetRtpExtensionsAndRecreateStream( const std::vector& extensions) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.rtp.extensions = extensions; RecreateAudioReceiveStream(); } // Set a new payload type -> decoder map. void SetDecoderMap(const std::map& decoder_map) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.decoder_map = decoder_map; ReconfigureAudioReceiveStream(); } void MaybeRecreateAudioReceiveStream( const std::vector& stream_ids) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); std::string sync_group; if (!stream_ids.empty()) { sync_group = stream_ids[0]; @@ -1269,13 +1269,13 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { webrtc::AudioReceiveStream::Stats GetStats( bool get_and_clear_legacy_stats) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); return stream_->GetStats(get_and_clear_legacy_stats); } void SetRawAudioSink(std::unique_ptr sink) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Need to update the stream's sink first; once raw_audio_sink_ is // reassigned, whatever was in there before is destroyed. stream_->SetSink(sink.get()); @@ -1283,24 +1283,23 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { } void SetOutputVolume(double volume) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); output_volume_ = volume; stream_->SetGain(volume); } void SetPlayout(bool playout) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); if (playout) { stream_->Start(); } else { stream_->Stop(); } - playout_ = playout; } bool SetBaseMinimumPlayoutDelayMs(int delay_ms) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); if (stream_->SetBaseMinimumPlayoutDelayMs(delay_ms)) { // Memorize only valid delay because during stream recreation it will be @@ -1317,13 +1316,13 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { } int GetBaseMinimumPlayoutDelayMs() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); return stream_->GetBaseMinimumPlayoutDelayMs(); } std::vector GetSources() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); return stream_->GetSources(); } @@ -1339,37 +1338,39 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream { void SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_transformer = std::move(frame_transformer); ReconfigureAudioReceiveStream(); } private: void RecreateAudioReceiveStream() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + bool was_running = false; if (stream_) { + was_running = stream_->IsRunning(); call_->DestroyAudioReceiveStream(stream_); } stream_ = call_->CreateAudioReceiveStream(config_); RTC_CHECK(stream_); stream_->SetGain(output_volume_); - SetPlayout(playout_); + if (was_running) + SetPlayout(was_running); stream_->SetSink(raw_audio_sink_.get()); } void ReconfigureAudioReceiveStream() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); stream_->Reconfigure(config_); } - rtc::ThreadChecker worker_thread_checker_; + webrtc::SequenceChecker worker_thread_checker_; webrtc::Call* call_ = nullptr; webrtc::AudioReceiveStream::Config config_; // The stream is owned by WebRtcAudioReceiveStream and may be reallocated if // configuration changes. webrtc::AudioReceiveStream* stream_ = nullptr; - bool playout_ = false; float output_volume_ = 1.0; std::unique_ptr raw_audio_sink_; }; @@ -1380,21 +1381,22 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel( const AudioOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::Call* call) - : VoiceMediaChannel(config), + : VoiceMediaChannel(config, call->network_thread()), + worker_thread_(call->worker_thread()), engine_(engine), call_(call), audio_config_(config.audio), crypto_options_(crypto_options), audio_red_for_opus_trial_enabled_( IsEnabled(call->trials(), "WebRTC-Audio-Red-For-Opus")) { + network_thread_checker_.Detach(); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel"; RTC_DCHECK(call); - engine->RegisterChannel(this); SetOptions(options); } WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel"; // TODO(solenberg): Should be able to delete the streams directly, without // going through RemoveNnStream(), once stream objects handle @@ -1405,13 +1407,12 @@ WebRtcVoiceMediaChannel::~WebRtcVoiceMediaChannel() { while (!recv_streams_.empty()) { RemoveRecvStream(recv_streams_.begin()->first); } - engine()->UnregisterChannel(this); } bool WebRtcVoiceMediaChannel::SetSendParameters( const AudioSendParameters& params) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetSendParameters"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetSendParameters: " << params.ToString(); // TODO(pthatcher): Refactor this to be more clean now that we have @@ -1457,7 +1458,7 @@ bool WebRtcVoiceMediaChannel::SetSendParameters( bool WebRtcVoiceMediaChannel::SetRecvParameters( const AudioRecvParameters& params) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetRecvParameters"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetRecvParameters: " << params.ToString(); // TODO(pthatcher): Refactor this to be more clean now that we have @@ -1484,7 +1485,7 @@ bool WebRtcVoiceMediaChannel::SetRecvParameters( webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters( uint32_t ssrc) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto it = send_streams_.find(ssrc); if (it == send_streams_.end()) { RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream " @@ -1505,7 +1506,7 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters( webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( uint32_t ssrc, const webrtc::RtpParameters& parameters) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto it = send_streams_.find(ssrc); if (it == send_streams_.end()) { RTC_LOG(LS_WARNING) << "Attempting to set RTP send parameters for stream " @@ -1560,7 +1561,7 @@ webrtc::RTCError WebRtcVoiceMediaChannel::SetRtpSendParameters( webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters( uint32_t ssrc) const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); webrtc::RtpParameters rtp_params; auto it = recv_streams_.find(ssrc); if (it == recv_streams_.end()) { @@ -1580,7 +1581,7 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters( webrtc::RtpParameters WebRtcVoiceMediaChannel::GetDefaultRtpReceiveParameters() const { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); webrtc::RtpParameters rtp_params; if (!default_sink_) { RTC_LOG(LS_WARNING) << "Attempting to get RTP parameters for the default, " @@ -1597,7 +1598,7 @@ webrtc::RtpParameters WebRtcVoiceMediaChannel::GetDefaultRtpReceiveParameters() } bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "Setting voice channel options: " << options.ToString(); // We retain all of the existing options, and apply the given ones @@ -1623,7 +1624,7 @@ bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) { bool WebRtcVoiceMediaChannel::SetRecvCodecs( const std::vector& codecs) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); // Set the payload types to be used for incoming media. RTC_LOG(LS_INFO) << "Setting receive voice codecs."; @@ -1681,21 +1682,22 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs( return true; } - if (playout_) { - // Receive codecs can not be changed while playing. So we temporarily - // pause playout. - ChangePlayout(false); - } + bool playout_enabled = playout_; + // Receive codecs can not be changed while playing. So we temporarily + // pause playout. + SetPlayout(false); + RTC_DCHECK(!playout_); decoder_map_ = std::move(decoder_map); for (auto& kv : recv_streams_) { kv.second->SetDecoderMap(decoder_map_); } + recv_codecs_ = codecs; - if (desired_playout_ && !playout_) { - ChangePlayout(desired_playout_); - } + SetPlayout(playout_enabled); + RTC_DCHECK_EQ(playout_, playout_enabled); + return true; } @@ -1704,7 +1706,7 @@ bool WebRtcVoiceMediaChannel::SetRecvCodecs( // and receive streams may be reconfigured based on the new settings. bool WebRtcVoiceMediaChannel::SetSendCodecs( const std::vector& codecs) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); dtmf_payload_type_ = absl::nullopt; dtmf_payload_freq_ = -1; @@ -1850,13 +1852,8 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs( } void WebRtcVoiceMediaChannel::SetPlayout(bool playout) { - desired_playout_ = playout; - return ChangePlayout(desired_playout_); -} - -void WebRtcVoiceMediaChannel::ChangePlayout(bool playout) { - TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::ChangePlayout"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetPlayout"); + RTC_DCHECK_RUN_ON(worker_thread_); if (playout_ == playout) { return; } @@ -1899,7 +1896,7 @@ bool WebRtcVoiceMediaChannel::SetAudioSend(uint32_t ssrc, bool enable, const AudioOptions* options, AudioSource* source) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); // TODO(solenberg): The state change should be fully rolled back if any one of // these calls fail. if (!SetLocalSource(ssrc, source)) { @@ -1916,7 +1913,7 @@ bool WebRtcVoiceMediaChannel::SetAudioSend(uint32_t ssrc, bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::AddSendStream"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "AddSendStream: " << sp.ToString(); uint32_t ssrc = sp.first_ssrc(); @@ -1955,7 +1952,7 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) { bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32_t ssrc) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::RemoveSendStream"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "RemoveSendStream: " << ssrc; auto it = send_streams_.find(ssrc); @@ -1981,7 +1978,7 @@ bool WebRtcVoiceMediaChannel::RemoveSendStream(uint32_t ssrc) { bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::AddRecvStream"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "AddRecvStream: " << sp.ToString(); if (!sp.has_ssrcs()) { @@ -2027,7 +2024,7 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) { bool WebRtcVoiceMediaChannel::RemoveRecvStream(uint32_t ssrc) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::RemoveRecvStream"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "RemoveRecvStream: " << ssrc; const auto it = recv_streams_.find(ssrc); @@ -2046,7 +2043,7 @@ bool WebRtcVoiceMediaChannel::RemoveRecvStream(uint32_t ssrc) { } void WebRtcVoiceMediaChannel::ResetUnsignaledRecvStream() { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "ResetUnsignaledRecvStream."; unsignaled_stream_params_ = StreamParams(); // Create a copy since RemoveRecvStream will modify |unsignaled_recv_ssrcs_|. @@ -2056,6 +2053,13 @@ void WebRtcVoiceMediaChannel::ResetUnsignaledRecvStream() { } } +// Not implemented. +// TODO(https://crbug.com/webrtc/12676): Implement a fix for the unsignalled +// SSRC race that can happen when an m= section goes from receiving to not +// receiving. +void WebRtcVoiceMediaChannel::OnDemuxerCriteriaUpdatePending() {} +void WebRtcVoiceMediaChannel::OnDemuxerCriteriaUpdateComplete() {} + bool WebRtcVoiceMediaChannel::SetLocalSource(uint32_t ssrc, AudioSource* source) { auto it = send_streams_.find(ssrc); @@ -2080,7 +2084,7 @@ bool WebRtcVoiceMediaChannel::SetLocalSource(uint32_t ssrc, } bool WebRtcVoiceMediaChannel::SetOutputVolume(uint32_t ssrc, double volume) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << rtc::StringFormat("WRVMC::%s({ssrc=%u}, {volume=%.2f})", __func__, ssrc, volume); const auto it = recv_streams_.find(ssrc); @@ -2098,7 +2102,7 @@ bool WebRtcVoiceMediaChannel::SetOutputVolume(uint32_t ssrc, double volume) { } bool WebRtcVoiceMediaChannel::SetDefaultOutputVolume(double volume) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); default_recv_volume_ = volume; for (uint32_t ssrc : unsignaled_recv_ssrcs_) { const auto it = recv_streams_.find(ssrc); @@ -2115,7 +2119,7 @@ bool WebRtcVoiceMediaChannel::SetDefaultOutputVolume(double volume) { bool WebRtcVoiceMediaChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); std::vector ssrcs(1, ssrc); // SSRC of 0 represents the default receive stream. if (ssrc == 0) { @@ -2158,7 +2162,7 @@ bool WebRtcVoiceMediaChannel::CanInsertDtmf() { void WebRtcVoiceMediaChannel::SetFrameDecryptor( uint32_t ssrc, rtc::scoped_refptr frame_decryptor) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = recv_streams_.find(ssrc); if (matching_stream != recv_streams_.end()) { matching_stream->second->SetFrameDecryptor(frame_decryptor); @@ -2172,7 +2176,7 @@ void WebRtcVoiceMediaChannel::SetFrameDecryptor( void WebRtcVoiceMediaChannel::SetFrameEncryptor( uint32_t ssrc, rtc::scoped_refptr frame_encryptor) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream != send_streams_.end()) { matching_stream->second->SetFrameEncryptor(frame_encryptor); @@ -2182,7 +2186,7 @@ void WebRtcVoiceMediaChannel::SetFrameEncryptor( bool WebRtcVoiceMediaChannel::InsertDtmf(uint32_t ssrc, int event, int duration) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::InsertDtmf"; if (!CanInsertDtmf()) { return false; @@ -2205,82 +2209,104 @@ bool WebRtcVoiceMediaChannel::InsertDtmf(uint32_t ssrc, void WebRtcVoiceMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&network_thread_checker_); + // TODO(bugs.webrtc.org/11993): This code is very similar to what + // WebRtcVideoChannel::OnPacketReceived does. For maintainability and + // consistency it would be good to move the interaction with call_->Receiver() + // to a common implementation and provide a callback on the worker thread + // for the exception case (DELIVERY_UNKNOWN_SSRC) and how retry is attempted. + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, packet, + packet_time_us] { + RTC_DCHECK_RUN_ON(worker_thread_); - webrtc::PacketReceiver::DeliveryStatus delivery_result = - call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, packet, - packet_time_us); + webrtc::PacketReceiver::DeliveryStatus delivery_result = + call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, packet, + packet_time_us); - if (delivery_result != webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC) { - return; - } - - // Create an unsignaled receive stream for this previously not received ssrc. - // If there already is N unsignaled receive streams, delete the oldest. - // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208 - uint32_t ssrc = 0; - if (!GetRtpSsrc(packet.cdata(), packet.size(), &ssrc)) { - return; - } - RTC_DCHECK(!absl::c_linear_search(unsignaled_recv_ssrcs_, ssrc)); - - if (engine()->onUnknownAudioSsrc_) { - engine()->onUnknownAudioSsrc_(ssrc); - } - - // Add new stream. - StreamParams sp = unsignaled_stream_params_; - sp.ssrcs.push_back(ssrc); - RTC_LOG(LS_INFO) << "Creating unsignaled receive stream for SSRC=" << ssrc; - if (!AddRecvStream(sp)) { - RTC_LOG(LS_WARNING) << "Could not create unsignaled receive stream."; - return; - } - unsignaled_recv_ssrcs_.push_back(ssrc); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.NumOfUnsignaledStreams", - unsignaled_recv_ssrcs_.size(), 1, 100, 101); - - // Remove oldest unsignaled stream, if we have too many. - if (unsignaled_recv_ssrcs_.size() > kMaxUnsignaledRecvStreams) { - uint32_t remove_ssrc = unsignaled_recv_ssrcs_.front(); - RTC_DLOG(LS_INFO) << "Removing unsignaled receive stream with SSRC=" - << remove_ssrc; - RemoveRecvStream(remove_ssrc); - } - RTC_DCHECK_GE(kMaxUnsignaledRecvStreams, unsignaled_recv_ssrcs_.size()); - - SetOutputVolume(ssrc, default_recv_volume_); - SetBaseMinimumPlayoutDelayMs(ssrc, default_recv_base_minimum_delay_ms_); - - // The default sink can only be attached to one stream at a time, so we hook - // it up to the *latest* unsignaled stream we've seen, in order to support the - // case where the SSRC of one unsignaled stream changes. - if (default_sink_) { - for (uint32_t drop_ssrc : unsignaled_recv_ssrcs_) { - auto it = recv_streams_.find(drop_ssrc); - it->second->SetRawAudioSink(nullptr); + if (delivery_result != webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC) { + return; } - std::unique_ptr proxy_sink( - new ProxySink(default_sink_.get())); - SetRawAudioSink(ssrc, std::move(proxy_sink)); - } - delivery_result = call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, - packet, packet_time_us); - RTC_DCHECK_NE(webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC, delivery_result); + // Create an unsignaled receive stream for this previously not received + // ssrc. If there already is N unsignaled receive streams, delete the + // oldest. See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208 + uint32_t ssrc = 0; + if (!GetRtpSsrc(packet.cdata(), packet.size(), &ssrc)) { + return; + } + RTC_DCHECK(!absl::c_linear_search(unsignaled_recv_ssrcs_, ssrc)); + + // Add new stream. + StreamParams sp = unsignaled_stream_params_; + sp.ssrcs.push_back(ssrc); + RTC_LOG(LS_INFO) << "Creating unsignaled receive stream for SSRC=" << ssrc; + if (!AddRecvStream(sp)) { + RTC_LOG(LS_WARNING) << "Could not create unsignaled receive stream."; + return; + } + unsignaled_recv_ssrcs_.push_back(ssrc); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.NumOfUnsignaledStreams", + unsignaled_recv_ssrcs_.size(), 1, 100, 101); + + // Remove oldest unsignaled stream, if we have too many. + if (unsignaled_recv_ssrcs_.size() > kMaxUnsignaledRecvStreams) { + uint32_t remove_ssrc = unsignaled_recv_ssrcs_.front(); + RTC_DLOG(LS_INFO) << "Removing unsignaled receive stream with SSRC=" + << remove_ssrc; + RemoveRecvStream(remove_ssrc); + } + RTC_DCHECK_GE(kMaxUnsignaledRecvStreams, unsignaled_recv_ssrcs_.size()); + + SetOutputVolume(ssrc, default_recv_volume_); + SetBaseMinimumPlayoutDelayMs(ssrc, default_recv_base_minimum_delay_ms_); + + // The default sink can only be attached to one stream at a time, so we hook + // it up to the *latest* unsignaled stream we've seen, in order to support + // the case where the SSRC of one unsignaled stream changes. + if (default_sink_) { + for (uint32_t drop_ssrc : unsignaled_recv_ssrcs_) { + auto it = recv_streams_.find(drop_ssrc); + it->second->SetRawAudioSink(nullptr); + } + std::unique_ptr proxy_sink( + new ProxySink(default_sink_.get())); + SetRawAudioSink(ssrc, std::move(proxy_sink)); + } + + delivery_result = call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO, + packet, packet_time_us); + RTC_DCHECK_NE(webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC, + delivery_result); + })); +} + +void WebRtcVoiceMediaChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { + RTC_DCHECK_RUN_ON(&network_thread_checker_); + // TODO(tommi): We shouldn't need to go through call_ to deliver this + // notification. We should already have direct access to + // video_send_delay_stats_ and transport_send_ptr_ via `stream_`. + // So we should be able to remove OnSentPacket from Call and handle this per + // channel instead. At the moment Call::OnSentPacket calls OnSentPacket for + // the video stats, which we should be able to skip. + call_->OnSentPacket(sent_packet); } void WebRtcVoiceMediaChannel::OnNetworkRouteChanged( const std::string& transport_name, const rtc::NetworkRoute& network_route) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); - call_->GetTransportControllerSend()->OnNetworkRouteChanged(transport_name, - network_route); + RTC_DCHECK_RUN_ON(&network_thread_checker_); + call_->OnAudioTransportOverheadChanged(network_route.packet_overhead); + + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, name = transport_name, route = network_route] { + RTC_DCHECK_RUN_ON(worker_thread_); + call_->GetTransportControllerSend()->OnNetworkRouteChanged(name, route); + })); } bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); const auto it = send_streams_.find(ssrc); if (it == send_streams_.end()) { RTC_LOG(LS_WARNING) << "The specified ssrc " << ssrc << " is not in use."; @@ -2318,7 +2344,7 @@ bool WebRtcVoiceMediaChannel::SetMaxSendBitrate(int bps) { } void WebRtcVoiceMediaChannel::OnReadyToSend(bool ready) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&network_thread_checker_); RTC_LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready."); call_->SignalChannelNetworkState( webrtc::MediaType::AUDIO, @@ -2328,7 +2354,7 @@ void WebRtcVoiceMediaChannel::OnReadyToSend(bool ready) { bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, bool get_and_clear_legacy_stats) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::GetStats"); - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(info); // Get SSRC and stats for each sender. @@ -2436,6 +2462,13 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, stats.relative_packet_arrival_delay_seconds; rinfo.interruption_count = stats.interruption_count; rinfo.total_interruption_duration_ms = stats.total_interruption_duration_ms; + rinfo.last_sender_report_timestamp_ms = + stats.last_sender_report_timestamp_ms; + rinfo.last_sender_report_remote_timestamp_ms = + stats.last_sender_report_remote_timestamp_ms; + rinfo.sender_reports_packets_sent = stats.sender_reports_packets_sent; + rinfo.sender_reports_bytes_sent = stats.sender_reports_bytes_sent; + rinfo.sender_reports_reports_count = stats.sender_reports_reports_count; info->receivers.push_back(rinfo); } @@ -2459,7 +2492,7 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info, void WebRtcVoiceMediaChannel::SetRawAudioSink( uint32_t ssrc, std::unique_ptr sink) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::SetRawAudioSink: ssrc:" << ssrc << " " << (sink ? "(ptr)" : "NULL"); const auto it = recv_streams_.find(ssrc); @@ -2472,7 +2505,7 @@ void WebRtcVoiceMediaChannel::SetRawAudioSink( void WebRtcVoiceMediaChannel::SetDefaultRawAudioSink( std::unique_ptr sink) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::SetDefaultRawAudioSink:"; if (!unsignaled_recv_ssrcs_.empty()) { std::unique_ptr proxy_sink( @@ -2496,7 +2529,7 @@ std::vector WebRtcVoiceMediaChannel::GetSources( void WebRtcVoiceMediaChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream == send_streams_.end()) { RTC_LOG(LS_INFO) << "Attempting to set frame transformer for SSRC:" << ssrc @@ -2510,7 +2543,7 @@ void WebRtcVoiceMediaChannel::SetEncoderToPacketizerFrameTransformer( void WebRtcVoiceMediaChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = recv_streams_.find(ssrc); if (matching_stream == recv_streams_.end()) { RTC_LOG(LS_INFO) << "Attempting to set frame transformer for SSRC:" << ssrc @@ -2521,9 +2554,21 @@ void WebRtcVoiceMediaChannel::SetDepacketizerToDecoderFrameTransformer( std::move(frame_transformer)); } +bool WebRtcVoiceMediaChannel::SendRtp(const uint8_t* data, + size_t len, + const webrtc::PacketOptions& options) { + MediaChannel::SendRtp(data, len, options); + return true; +} + +bool WebRtcVoiceMediaChannel::SendRtcp(const uint8_t* data, size_t len) { + MediaChannel::SendRtcp(data, len); + return true; +} + bool WebRtcVoiceMediaChannel::MaybeDeregisterUnsignaledRecvStream( uint32_t ssrc) { - RTC_DCHECK(worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread_); auto it = absl::c_find(unsignaled_recv_ssrcs_, ssrc); if (it != unsignaled_recv_ssrcs_.end()) { unsignaled_recv_ssrcs_.erase(it); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h index b212b3a3f..147688b0e 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_encoder_factory.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/rtp_source.h" #include "api/transport/webrtc_key_value_config.h" @@ -29,7 +30,7 @@ #include "rtc_base/buffer.h" #include "rtc_base/network_route.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace webrtc { class AudioFrameProcessor; @@ -37,8 +38,6 @@ class AudioFrameProcessor; namespace cricket { -class AudioDeviceModule; -class AudioMixer; class AudioSource; class WebRtcVoiceMediaChannel; @@ -55,7 +54,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { const rtc::scoped_refptr& decoder_factory, rtc::scoped_refptr audio_mixer, rtc::scoped_refptr audio_processing, - std::function onUnknownAudioSsrc, webrtc::AudioFrameProcessor* audio_frame_processor, const webrtc::WebRtcKeyValueConfig& trials); @@ -80,12 +78,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { std::vector GetRtpHeaderExtensions() const override; - // For tracking WebRtc channels. Needed because we have to pause them - // all when switching devices. - // May only be called by WebRtcVoiceMediaChannel. - void RegisterChannel(WebRtcVoiceMediaChannel* channel); - void UnregisterChannel(WebRtcVoiceMediaChannel* channel); - // Starts AEC dump using an existing file. A maximum file size in bytes can be // specified. When the maximum file size is reached, logging is stopped and // the file is closed. If max_size_bytes is set to <= 0, no limit will be @@ -113,8 +105,8 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { std::vector CollectCodecs( const std::vector& specs) const; - rtc::ThreadChecker signal_thread_checker_; - rtc::ThreadChecker worker_thread_checker_; + webrtc::SequenceChecker signal_thread_checker_; + webrtc::SequenceChecker worker_thread_checker_; // The audio device module. rtc::scoped_refptr adm_; @@ -129,7 +121,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { rtc::scoped_refptr audio_state_; std::vector send_codecs_; std::vector recv_codecs_; - std::vector channels_; bool is_dumping_aec_ = false; bool initialized_ = false; @@ -142,8 +133,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { int audio_jitter_buffer_min_delay_ms_ = 0; bool audio_jitter_buffer_enable_rtx_handling_ = false; - std::function onUnknownAudioSsrc_ = nullptr; - // If this field trial is enabled, we will negotiate and use RFC 2198 // redundancy for opus audio. const bool audio_red_for_opus_trial_enabled_; @@ -189,6 +178,8 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, bool AddRecvStream(const StreamParams& sp) override; bool RemoveRecvStream(uint32_t ssrc) override; void ResetUnsignaledRecvStream() override; + void OnDemuxerCriteriaUpdatePending() override; + void OnDemuxerCriteriaUpdateComplete() override; // E2EE Frame API // Set a frame decryptor to a particular ssrc that will intercept all @@ -217,6 +208,7 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) override; + void OnPacketSent(const rtc::SentPacket& sent_packet) override; void OnNetworkRouteChanged(const std::string& transport_name, const rtc::NetworkRoute& network_route) override; void OnReadyToSend(bool ready) override; @@ -247,29 +239,9 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, // implements Transport interface bool SendRtp(const uint8_t* data, size_t len, - const webrtc::PacketOptions& options) override { - rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen); - rtc::PacketOptions rtc_options; - rtc_options.packet_id = options.packet_id; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - rtc_options.info_signaled_after_sent.included_in_feedback = - options.included_in_feedback; - rtc_options.info_signaled_after_sent.included_in_allocation = - options.included_in_allocation; - return VoiceMediaChannel::SendPacket(&packet, rtc_options); - } + const webrtc::PacketOptions& options) override; - bool SendRtcp(const uint8_t* data, size_t len) override { - rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen); - rtc::PacketOptions rtc_options; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - - return VoiceMediaChannel::SendRtcp(&packet, rtc_options); - } + bool SendRtcp(const uint8_t* data, size_t len) override; private: bool SetOptions(const AudioOptions& options); @@ -279,7 +251,6 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, bool MuteStream(uint32_t ssrc, bool mute); WebRtcVoiceEngine* engine() { return engine_; } - void ChangePlayout(bool playout); int CreateVoEChannel(); bool DeleteVoEChannel(int channel); bool SetMaxSendBitrate(int bps); @@ -288,7 +259,9 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, // unsignaled anymore (i.e. it is now removed, or signaled), and return true. bool MaybeDeregisterUnsignaledRecvStream(uint32_t ssrc); - rtc::ThreadChecker worker_thread_checker_; + webrtc::TaskQueueBase* const worker_thread_; + webrtc::ScopedTaskSafety task_safety_; + webrtc::SequenceChecker network_thread_checker_; WebRtcVoiceEngine* const engine_ = nullptr; std::vector send_codecs_; @@ -304,7 +277,6 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, int dtmf_payload_freq_ = -1; bool recv_transport_cc_enabled_ = false; bool recv_nack_enabled_ = false; - bool desired_playout_ = false; bool playout_ = false; bool send_ = false; webrtc::Call* const call_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/OWNERS b/TMessagesProj/jni/voip/webrtc/media/sctp/OWNERS index a32f041ac..da2f0178a 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/OWNERS @@ -1 +1,3 @@ +boivie@webrtc.org deadbeef@webrtc.org +orphis@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc new file mode 100644 index 000000000..0a6cb3346 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc @@ -0,0 +1,531 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "media/sctp/dcsctp_transport.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "media/base/media_channel.h" +#include "net/dcsctp/public/packet_observer.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/socket/dcsctp_socket.h" +#include "p2p/base/packet_transport_internal.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/thread.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +namespace { + +enum class WebrtcPPID : dcsctp::PPID::UnderlyingType { + // https://www.rfc-editor.org/rfc/rfc8832.html#section-8.1 + kDCEP = 50, + // https://www.rfc-editor.org/rfc/rfc8831.html#section-8 + kString = 51, + kBinaryPartial = 52, // Deprecated + kBinary = 53, + kStringPartial = 54, // Deprecated + kStringEmpty = 56, + kBinaryEmpty = 57, +}; + +WebrtcPPID ToPPID(DataMessageType message_type, size_t size) { + switch (message_type) { + case webrtc::DataMessageType::kControl: + return WebrtcPPID::kDCEP; + case webrtc::DataMessageType::kText: + return size > 0 ? WebrtcPPID::kString : WebrtcPPID::kStringEmpty; + case webrtc::DataMessageType::kBinary: + return size > 0 ? WebrtcPPID::kBinary : WebrtcPPID::kBinaryEmpty; + } +} + +absl::optional ToDataMessageType(dcsctp::PPID ppid) { + switch (static_cast(ppid.value())) { + case WebrtcPPID::kDCEP: + return webrtc::DataMessageType::kControl; + case WebrtcPPID::kString: + case WebrtcPPID::kStringPartial: + case WebrtcPPID::kStringEmpty: + return webrtc::DataMessageType::kText; + case WebrtcPPID::kBinary: + case WebrtcPPID::kBinaryPartial: + case WebrtcPPID::kBinaryEmpty: + return webrtc::DataMessageType::kBinary; + } + return absl::nullopt; +} + +bool IsEmptyPPID(dcsctp::PPID ppid) { + WebrtcPPID webrtc_ppid = static_cast(ppid.value()); + return webrtc_ppid == WebrtcPPID::kStringEmpty || + webrtc_ppid == WebrtcPPID::kBinaryEmpty; +} + +// Print outs all sent and received packets to the logs, at LS_VERBOSE severity. +class TextPcapPacketObserver : public dcsctp::PacketObserver { + public: + explicit TextPcapPacketObserver(absl::string_view name) : name_(name) {} + + void OnSentPacket(dcsctp::TimeMs now, rtc::ArrayView payload) { + PrintPacket("O ", now, payload); + } + + void OnReceivedPacket(dcsctp::TimeMs now, + rtc::ArrayView payload) { + PrintPacket("I ", now, payload); + } + + private: + void PrintPacket(absl::string_view prefix, + dcsctp::TimeMs now, + rtc::ArrayView payload) { + rtc::StringBuilder s; + s << prefix; + int64_t remaining = *now % (24 * 60 * 60 * 1000); + int hours = remaining / (60 * 60 * 1000); + remaining = remaining % (60 * 60 * 1000); + int minutes = remaining / (60 * 1000); + remaining = remaining % (60 * 1000); + int seconds = remaining / 1000; + int ms = remaining % 1000; + s.AppendFormat("%02d:%02d:%02d.%03d", hours, minutes, seconds, ms); + s << " 0000"; + for (uint8_t byte : payload) { + s.AppendFormat(" %02x", byte); + } + s << " # SCTP_PACKET " << name_; + RTC_LOG(LS_VERBOSE) << s.str(); + } + + const std::string name_; +}; + +} // namespace + +DcSctpTransport::DcSctpTransport(rtc::Thread* network_thread, + rtc::PacketTransportInternal* transport, + Clock* clock) + : network_thread_(network_thread), + transport_(transport), + clock_(clock), + random_(clock_->TimeInMicroseconds()), + task_queue_timeout_factory_( + *network_thread, + [this]() { return TimeMillis(); }, + [this](dcsctp::TimeoutID timeout_id) { + socket_->HandleTimeout(timeout_id); + }) { + RTC_DCHECK_RUN_ON(network_thread_); + static int instance_count = 0; + rtc::StringBuilder sb; + sb << debug_name_ << instance_count++; + debug_name_ = sb.Release(); + ConnectTransportSignals(); +} + +DcSctpTransport::~DcSctpTransport() { + if (socket_) { + socket_->Close(); + } +} + +void DcSctpTransport::SetDtlsTransport( + rtc::PacketTransportInternal* transport) { + RTC_DCHECK_RUN_ON(network_thread_); + DisconnectTransportSignals(); + transport_ = transport; + ConnectTransportSignals(); + MaybeConnectSocket(); +} + +bool DcSctpTransport::Start(int local_sctp_port, + int remote_sctp_port, + int max_message_size) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(max_message_size > 0); + + RTC_LOG(LS_INFO) << debug_name_ << "->Start(local=" << local_sctp_port + << ", remote=" << remote_sctp_port + << ", max_message_size=" << max_message_size << ")"; + + if (!socket_) { + dcsctp::DcSctpOptions options; + options.local_port = local_sctp_port; + options.remote_port = remote_sctp_port; + options.max_message_size = max_message_size; + + std::unique_ptr packet_observer; + if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE)) { + packet_observer = std::make_unique(debug_name_); + } + + socket_ = std::make_unique( + debug_name_, *this, std::move(packet_observer), options); + } else { + if (local_sctp_port != socket_->options().local_port || + remote_sctp_port != socket_->options().remote_port) { + RTC_LOG(LS_ERROR) + << debug_name_ << "->Start(local=" << local_sctp_port + << ", remote=" << remote_sctp_port + << "): Can't change ports on already started transport."; + return false; + } + socket_->SetMaxMessageSize(max_message_size); + } + + MaybeConnectSocket(); + + return true; +} + +bool DcSctpTransport::OpenStream(int sid) { + RTC_LOG(LS_INFO) << debug_name_ << "->OpenStream(" << sid << ")."; + if (!socket_) { + RTC_LOG(LS_ERROR) << debug_name_ << "->OpenStream(sid=" << sid + << "): Transport is not started."; + return false; + } + return true; +} + +bool DcSctpTransport::ResetStream(int sid) { + RTC_LOG(LS_INFO) << debug_name_ << "->ResetStream(" << sid << ")."; + if (!socket_) { + RTC_LOG(LS_ERROR) << debug_name_ << "->OpenStream(sid=" << sid + << "): Transport is not started."; + return false; + } + dcsctp::StreamID streams[1] = {dcsctp::StreamID(static_cast(sid))}; + socket_->ResetStreams(streams); + return true; +} + +bool DcSctpTransport::SendData(int sid, + const SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + cricket::SendDataResult* result) { + RTC_DCHECK_RUN_ON(network_thread_); + + RTC_LOG(LS_VERBOSE) << debug_name_ << "->SendData(sid=" << sid + << ", type=" << static_cast(params.type) + << ", length=" << payload.size() << ")."; + + if (!socket_) { + RTC_LOG(LS_ERROR) << debug_name_ + << "->SendData(...): Transport is not started."; + *result = cricket::SDR_ERROR; + return false; + } + + auto max_message_size = socket_->options().max_message_size; + if (max_message_size > 0 && payload.size() > max_message_size) { + RTC_LOG(LS_WARNING) << debug_name_ + << "->SendData(...): " + "Trying to send packet bigger " + "than the max message size: " + << payload.size() << " vs max of " << max_message_size; + *result = cricket::SDR_ERROR; + return false; + } + + std::vector message_payload(payload.cdata(), + payload.cdata() + payload.size()); + if (message_payload.empty()) { + // https://www.rfc-editor.org/rfc/rfc8831.html#section-6.6 + // SCTP does not support the sending of empty user messages. Therefore, if + // an empty message has to be sent, the appropriate PPID (WebRTC String + // Empty or WebRTC Binary Empty) is used, and the SCTP user message of one + // zero byte is sent. + message_payload.push_back('\0'); + } + + dcsctp::DcSctpMessage message( + dcsctp::StreamID(static_cast(sid)), + dcsctp::PPID(static_cast(ToPPID(params.type, payload.size()))), + std::move(message_payload)); + + dcsctp::SendOptions send_options; + send_options.unordered = dcsctp::IsUnordered(!params.ordered); + if (params.max_rtx_ms.has_value()) { + RTC_DCHECK(*params.max_rtx_ms >= 0 && + *params.max_rtx_ms <= std::numeric_limits::max()); + send_options.lifetime = dcsctp::DurationMs(*params.max_rtx_ms); + } + if (params.max_rtx_count.has_value()) { + RTC_DCHECK(*params.max_rtx_count >= 0 && + *params.max_rtx_count <= std::numeric_limits::max()); + send_options.max_retransmissions = *params.max_rtx_count; + } + + auto error = socket_->Send(std::move(message), send_options); + switch (error) { + case dcsctp::SendStatus::kSuccess: + *result = cricket::SDR_SUCCESS; + break; + case dcsctp::SendStatus::kErrorResourceExhaustion: + *result = cricket::SDR_BLOCK; + ready_to_send_data_ = false; + break; + default: + RTC_LOG(LS_ERROR) << debug_name_ + << "->SendData(...): send() failed with error " + << dcsctp::ToString(error) << "."; + *result = cricket::SDR_ERROR; + } + + return *result == cricket::SDR_SUCCESS; +} + +bool DcSctpTransport::ReadyToSendData() { + return ready_to_send_data_; +} + +int DcSctpTransport::max_message_size() const { + if (!socket_) { + RTC_LOG(LS_ERROR) << debug_name_ + << "->max_message_size(...): Transport is not started."; + return 0; + } + return socket_->options().max_message_size; +} + +absl::optional DcSctpTransport::max_outbound_streams() const { + if (!socket_) + return absl::nullopt; + return socket_->options().announced_maximum_outgoing_streams; +} + +absl::optional DcSctpTransport::max_inbound_streams() const { + if (!socket_) + return absl::nullopt; + return socket_->options().announced_maximum_incoming_streams; +} + +void DcSctpTransport::set_debug_name_for_testing(const char* debug_name) { + debug_name_ = debug_name; +} + +void DcSctpTransport::SendPacket(rtc::ArrayView data) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(socket_); + + if (data.size() > (socket_->options().mtu)) { + RTC_LOG(LS_ERROR) << debug_name_ + << "->SendPacket(...): " + "SCTP seems to have made a packet that is bigger " + "than its official MTU: " + << data.size() << " vs max of " << socket_->options().mtu; + return; + } + TRACE_EVENT0("webrtc", "DcSctpTransport::SendPacket"); + + if (!transport_ || !transport_->writable()) + return; + + RTC_LOG(LS_VERBOSE) << debug_name_ << "->SendPacket(length=" << data.size() + << ")"; + + auto result = + transport_->SendPacket(reinterpret_cast(data.data()), + data.size(), rtc::PacketOptions(), 0); + + if (result < 0) { + RTC_LOG(LS_WARNING) << debug_name_ << "->SendPacket(length=" << data.size() + << ") failed with error: " << transport_->GetError() + << "."; + } +} + +std::unique_ptr DcSctpTransport::CreateTimeout() { + return task_queue_timeout_factory_.CreateTimeout(); +} + +dcsctp::TimeMs DcSctpTransport::TimeMillis() { + return dcsctp::TimeMs(clock_->TimeInMilliseconds()); +} + +uint32_t DcSctpTransport::GetRandomInt(uint32_t low, uint32_t high) { + return random_.Rand(low, high); +} + +void DcSctpTransport::NotifyOutgoingMessageBufferEmpty() { + if (!ready_to_send_data_) { + ready_to_send_data_ = true; + SignalReadyToSendData(); + } +} + +void DcSctpTransport::OnMessageReceived(dcsctp::DcSctpMessage message) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnMessageReceived(sid=" + << message.stream_id().value() + << ", ppid=" << message.ppid().value() + << ", length=" << message.payload().size() << ")."; + cricket::ReceiveDataParams receive_data_params; + receive_data_params.sid = message.stream_id().value(); + auto type = ToDataMessageType(message.ppid()); + if (!type.has_value()) { + RTC_LOG(LS_VERBOSE) << debug_name_ + << "->OnMessageReceived(): Received an unknown PPID " + << message.ppid().value() + << " on an SCTP packet. Dropping."; + } + receive_data_params.type = *type; + // No seq_num available from dcSCTP + receive_data_params.seq_num = 0; + receive_buffer_.Clear(); + if (!IsEmptyPPID(message.ppid())) + receive_buffer_.AppendData(message.payload().data(), + message.payload().size()); + + SignalDataReceived(receive_data_params, receive_buffer_); +} + +void DcSctpTransport::OnError(dcsctp::ErrorKind error, + absl::string_view message) { + RTC_LOG(LS_ERROR) << debug_name_ + << "->OnError(error=" << dcsctp::ToString(error) + << ", message=" << message << ")."; +} + +void DcSctpTransport::OnAborted(dcsctp::ErrorKind error, + absl::string_view message) { + RTC_LOG(LS_ERROR) << debug_name_ + << "->OnAborted(error=" << dcsctp::ToString(error) + << ", message=" << message << ")."; + ready_to_send_data_ = false; +} + +void DcSctpTransport::OnConnected() { + RTC_LOG(LS_INFO) << debug_name_ << "->OnConnected()."; + ready_to_send_data_ = true; + SignalReadyToSendData(); + SignalAssociationChangeCommunicationUp(); +} + +void DcSctpTransport::OnClosed() { + RTC_LOG(LS_INFO) << debug_name_ << "->OnClosed()."; + ready_to_send_data_ = false; +} + +void DcSctpTransport::OnConnectionRestarted() { + RTC_LOG(LS_INFO) << debug_name_ << "->OnConnectionRestarted()."; +} + +void DcSctpTransport::OnStreamsResetFailed( + rtc::ArrayView outgoing_streams, + absl::string_view reason) { + // TODO(orphis): Need a test to check for correct behavior + for (auto& stream_id : outgoing_streams) { + RTC_LOG(LS_WARNING) + << debug_name_ + << "->OnStreamsResetFailed(...): Outgoing stream reset failed" + << ", sid=" << stream_id.value() << ", reason: " << reason << "."; + } +} + +void DcSctpTransport::OnStreamsResetPerformed( + rtc::ArrayView outgoing_streams) { + for (auto& stream_id : outgoing_streams) { + RTC_LOG(LS_INFO) << debug_name_ + << "->OnStreamsResetPerformed(...): Outgoing stream reset" + << ", sid=" << stream_id.value(); + SignalClosingProcedureComplete(stream_id.value()); + } +} + +void DcSctpTransport::OnIncomingStreamsReset( + rtc::ArrayView incoming_streams) { + for (auto& stream_id : incoming_streams) { + RTC_LOG(LS_INFO) << debug_name_ + << "->OnIncomingStreamsReset(...): Incoming stream reset" + << ", sid=" << stream_id.value(); + SignalClosingProcedureStartedRemotely(stream_id.value()); + SignalClosingProcedureComplete(stream_id.value()); + } +} + +void DcSctpTransport::ConnectTransportSignals() { + RTC_DCHECK_RUN_ON(network_thread_); + if (!transport_) { + return; + } + transport_->SignalWritableState.connect( + this, &DcSctpTransport::OnTransportWritableState); + transport_->SignalReadPacket.connect(this, + &DcSctpTransport::OnTransportReadPacket); + transport_->SignalClosed.connect(this, &DcSctpTransport::OnTransportClosed); +} + +void DcSctpTransport::DisconnectTransportSignals() { + RTC_DCHECK_RUN_ON(network_thread_); + if (!transport_) { + return; + } + transport_->SignalWritableState.disconnect(this); + transport_->SignalReadPacket.disconnect(this); + transport_->SignalClosed.disconnect(this); +} + +void DcSctpTransport::OnTransportWritableState( + rtc::PacketTransportInternal* transport) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK_EQ(transport_, transport); + + RTC_LOG(LS_VERBOSE) << debug_name_ + << "->OnTransportWritableState(), writable=" + << transport->writable(); + + MaybeConnectSocket(); +} + +void DcSctpTransport::OnTransportReadPacket( + rtc::PacketTransportInternal* transport, + const char* data, + size_t length, + const int64_t& /* packet_time_us */, + int flags) { + if (flags) { + // We are only interested in SCTP packets. + return; + } + + RTC_LOG(LS_VERBOSE) << debug_name_ + << "->OnTransportReadPacket(), length=" << length; + if (socket_) { + socket_->ReceivePacket(rtc::ArrayView( + reinterpret_cast(data), length)); + } +} + +void DcSctpTransport::OnTransportClosed( + rtc::PacketTransportInternal* transport) { + RTC_LOG(LS_VERBOSE) << debug_name_ << "->OnTransportClosed()."; + SignalClosedAbruptly(); +} + +void DcSctpTransport::MaybeConnectSocket() { + if (transport_ && transport_->writable() && socket_ && + socket_->state() == dcsctp::SocketState::kClosed) { + socket_->Connect(); + } +} +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h new file mode 100644 index 000000000..f154c4492 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_SCTP_DCSCTP_TRANSPORT_H_ +#define MEDIA_SCTP_DCSCTP_TRANSPORT_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "media/sctp/sctp_transport_internal.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/timer/task_queue_timeout.h" +#include "p2p/base/packet_transport_internal.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/random.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +class DcSctpTransport : public cricket::SctpTransportInternal, + public dcsctp::DcSctpSocketCallbacks, + public sigslot::has_slots<> { + public: + DcSctpTransport(rtc::Thread* network_thread, + rtc::PacketTransportInternal* transport, + Clock* clock); + ~DcSctpTransport() override; + + // cricket::SctpTransportInternal + void SetDtlsTransport(rtc::PacketTransportInternal* transport) override; + bool Start(int local_sctp_port, + int remote_sctp_port, + int max_message_size) override; + bool OpenStream(int sid) override; + bool ResetStream(int sid) override; + bool SendData(int sid, + const SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + cricket::SendDataResult* result = nullptr) override; + bool ReadyToSendData() override; + int max_message_size() const override; + absl::optional max_outbound_streams() const override; + absl::optional max_inbound_streams() const override; + void set_debug_name_for_testing(const char* debug_name) override; + + private: + // dcsctp::DcSctpSocketCallbacks + void SendPacket(rtc::ArrayView data) override; + std::unique_ptr CreateTimeout() override; + dcsctp::TimeMs TimeMillis() override; + uint32_t GetRandomInt(uint32_t low, uint32_t high) override; + void NotifyOutgoingMessageBufferEmpty() override; + void OnMessageReceived(dcsctp::DcSctpMessage message) override; + void OnError(dcsctp::ErrorKind error, absl::string_view message) override; + void OnAborted(dcsctp::ErrorKind error, absl::string_view message) override; + void OnConnected() override; + void OnClosed() override; + void OnConnectionRestarted() override; + void OnStreamsResetFailed( + rtc::ArrayView outgoing_streams, + absl::string_view reason) override; + void OnStreamsResetPerformed( + rtc::ArrayView outgoing_streams) override; + void OnIncomingStreamsReset( + rtc::ArrayView incoming_streams) override; + + // Transport callbacks + void ConnectTransportSignals(); + void DisconnectTransportSignals(); + void OnTransportWritableState(rtc::PacketTransportInternal* transport); + void OnTransportReadPacket(rtc::PacketTransportInternal* transport, + const char* data, + size_t length, + const int64_t& /* packet_time_us */, + int flags); + void OnTransportClosed(rtc::PacketTransportInternal* transport); + + void MaybeConnectSocket(); + + rtc::Thread* network_thread_; + rtc::PacketTransportInternal* transport_; + Clock* clock_; + Random random_; + + dcsctp::TaskQueueTimeoutFactory task_queue_timeout_factory_; + std::unique_ptr socket_; + std::string debug_name_ = "DcSctpTransport"; + rtc::CopyOnWriteBuffer receive_buffer_; + + bool ready_to_send_data_ = false; +}; + +} // namespace webrtc + +#endif // MEDIA_SCTP_DCSCTP_TRANSPORT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/noop.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/noop.cc deleted file mode 100644 index a3523b18b..000000000 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/noop.cc +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This file is only needed to make ninja happy on some platforms. -// On some platforms it is not possible to link an rtc_static_library -// without any source file listed in the GN target. diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc new file mode 100644 index 000000000..769a1a2b1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc @@ -0,0 +1,52 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "media/sctp/sctp_transport_factory.h" + +#include "rtc_base/system/unused.h" + +#ifdef WEBRTC_HAVE_DCSCTP +#include "media/sctp/dcsctp_transport.h" // nogncheck +#include "system_wrappers/include/clock.h" // nogncheck +#include "system_wrappers/include/field_trial.h" // nogncheck +#endif + +#ifdef WEBRTC_HAVE_USRSCTP +#include "media/sctp/usrsctp_transport.h" // nogncheck +#endif + +namespace cricket { + +SctpTransportFactory::SctpTransportFactory(rtc::Thread* network_thread) + : network_thread_(network_thread), use_dcsctp_("Enabled", false) { + RTC_UNUSED(network_thread_); + webrtc::ParseFieldTrial({&use_dcsctp_}, "WebRTC-DataChannel-Dcsctp"); +} + +std::unique_ptr +SctpTransportFactory::CreateSctpTransport( + rtc::PacketTransportInternal* transport) { + std::unique_ptr result; +#ifdef WEBRTC_HAVE_DCSCTP + if (use_dcsctp_.Get()) { + result = std::unique_ptr(new webrtc::DcSctpTransport( + network_thread_, transport, webrtc::Clock::GetRealTimeClock())); + } +#endif +#ifdef WEBRTC_HAVE_USRSCTP + if (!result) { + result = std::unique_ptr( + new UsrsctpTransport(network_thread_, transport)); + } +#endif + return result; +} + +} // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h new file mode 100644 index 000000000..ed7c2163d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_SCTP_SCTP_TRANSPORT_FACTORY_H_ +#define MEDIA_SCTP_SCTP_TRANSPORT_FACTORY_H_ + +#include + +#include "api/transport/sctp_transport_factory_interface.h" +#include "media/sctp/sctp_transport_internal.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/thread.h" + +namespace cricket { + +class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface { + public: + explicit SctpTransportFactory(rtc::Thread* network_thread); + + std::unique_ptr CreateSctpTransport( + rtc::PacketTransportInternal* transport) override; + + private: + rtc::Thread* network_thread_; + webrtc::FieldTrialFlag use_dcsctp_; +}; + +} // namespace cricket + +#endif // MEDIA_SCTP_SCTP_TRANSPORT_FACTORY_H__ diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h index dc8ac4558..96c35ffb9 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_internal.h @@ -18,6 +18,7 @@ #include #include +#include "api/transport/data_channel_transport_interface.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/thread.h" // For SendDataParams/ReceiveDataParams. @@ -101,7 +102,8 @@ class SctpTransportInternal { // usrsctp that will then post the network interface). // Returns true iff successful data somewhere on the send-queue/network. // Uses |params.ssrc| as the SCTP sid. - virtual bool SendData(const SendDataParams& params, + virtual bool SendData(int sid, + const webrtc::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, SendDataResult* result = nullptr) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.cc similarity index 76% rename from TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc rename to TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.cc index 7c2eee3b1..d43c01720 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.cc @@ -20,6 +20,7 @@ enum PreservedErrno { // Successful return value from usrsctp callbacks. Is not actually used by // usrsctp, but all example programs for usrsctp use 1 as their return value. constexpr int kSctpSuccessReturn = 1; +constexpr int kSctpErrorReturn = 0; } // namespace @@ -29,15 +30,17 @@ constexpr int kSctpSuccessReturn = 1; #include #include +#include #include "absl/algorithm/container.h" #include "absl/base/attributes.h" #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "media/base/codec.h" #include "media/base/media_channel.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" -#include "media/sctp/sctp_transport.h" +#include "media/sctp/usrsctp_transport.h" #include "p2p/base/dtls_transport_internal.h" // For PF_NORMAL #include "rtc_base/arraysize.h" #include "rtc_base/copy_on_write_buffer.h" @@ -46,92 +49,69 @@ constexpr int kSctpSuccessReturn = 1; #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/string_utils.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/trace_event.h" namespace { // The biggest SCTP packet. Starting from a 'safe' wire MTU value of 1280, -// take off 80 bytes for DTLS/TURN/TCP/IP overhead. -static constexpr size_t kSctpMtu = 1200; +// take off 85 bytes for DTLS/TURN/TCP/IP and ciphertext overhead. +// +// Additionally, it's possible that TURN adds an additional 4 bytes of overhead +// after a channel has been established, so we subtract an additional 4 bytes. +// +// 1280 IPV6 MTU +// -40 IPV6 header +// -8 UDP +// -24 GCM Cipher +// -13 DTLS record header +// -4 TURN ChannelData +// = 1191 bytes. +static constexpr size_t kSctpMtu = 1191; // Set the initial value of the static SCTP Data Engines reference count. ABSL_CONST_INIT int g_usrsctp_usage_count = 0; ABSL_CONST_INIT bool g_usrsctp_initialized_ = false; ABSL_CONST_INIT webrtc::GlobalMutex g_usrsctp_lock_(absl::kConstInit); +ABSL_CONST_INIT char kZero[] = {'\0'}; // DataMessageType is used for the SCTP "Payload Protocol Identifier", as // defined in http://tools.ietf.org/html/rfc4960#section-14.4 // // For the list of IANA approved values see: +// https://tools.ietf.org/html/rfc8831 Sec. 8 // http://www.iana.org/assignments/sctp-parameters/sctp-parameters.xml // The value is not used by SCTP itself. It indicates the protocol running // on top of SCTP. enum { PPID_NONE = 0, // No protocol is specified. - // Matches the PPIDs in mozilla source and - // https://datatracker.ietf.org/doc/draft-ietf-rtcweb-data-protocol Sec. 9 - // They're not yet assigned by IANA. PPID_CONTROL = 50, - PPID_BINARY_PARTIAL = 52, + PPID_TEXT_LAST = 51, + PPID_BINARY_PARTIAL = 52, // Deprecated PPID_BINARY_LAST = 53, - PPID_TEXT_PARTIAL = 54, - PPID_TEXT_LAST = 51 -}; - -// Maps SCTP transport ID to SctpTransport object, necessary in send threshold -// callback and outgoing packet callback. -// TODO(crbug.com/1076703): Remove once the underlying problem is fixed or -// workaround is provided in usrsctp. -class SctpTransportMap { - public: - SctpTransportMap() = default; - - // Assigns a new unused ID to the following transport. - uintptr_t Register(cricket::SctpTransport* transport) { - webrtc::MutexLock lock(&lock_); - // usrsctp_connect fails with a value of 0... - if (next_id_ == 0) { - ++next_id_; - } - // In case we've wrapped around and need to find an empty spot from a - // removed transport. Assumes we'll never be full. - while (map_.find(next_id_) != map_.end()) { - ++next_id_; - if (next_id_ == 0) { - ++next_id_; - } - }; - map_[next_id_] = transport; - return next_id_++; - } - - // Returns true if found. - bool Deregister(uintptr_t id) { - webrtc::MutexLock lock(&lock_); - return map_.erase(id) > 0; - } - - cricket::SctpTransport* Retrieve(uintptr_t id) const { - webrtc::MutexLock lock(&lock_); - auto it = map_.find(id); - if (it == map_.end()) { - return nullptr; - } - return it->second; - } - - private: - mutable webrtc::Mutex lock_; - - uintptr_t next_id_ RTC_GUARDED_BY(lock_) = 0; - std::unordered_map map_ - RTC_GUARDED_BY(lock_); + PPID_TEXT_PARTIAL = 54, // Deprecated + PPID_TEXT_EMPTY = 56, + PPID_BINARY_EMPTY = 57, }; // Should only be modified by UsrSctpWrapper. -ABSL_CONST_INIT SctpTransportMap* g_transport_map_ = nullptr; +ABSL_CONST_INIT cricket::UsrsctpTransportMap* g_transport_map_ = nullptr; + +// Helper that will call C's free automatically. +// TODO(b/181900299): Figure out why unique_ptr with a custom deleter is causing +// issues in a certain build environment. +class AutoFreedPointer { + public: + explicit AutoFreedPointer(void* ptr) : ptr_(ptr) {} + AutoFreedPointer(AutoFreedPointer&& o) : ptr_(o.ptr_) { o.ptr_ = nullptr; } + ~AutoFreedPointer() { free(ptr_); } + + void* get() const { return ptr_; } + + private: + void* ptr_; +}; // Helper for logging SCTP messages. #if defined(__GNUC__) @@ -149,44 +129,41 @@ void DebugSctpPrintf(const char* format, ...) { } // Get the PPID to use for the terminating fragment of this type. -uint32_t GetPpid(cricket::DataMessageType type) { +uint32_t GetPpid(webrtc::DataMessageType type, size_t size) { switch (type) { - default: - case cricket::DMT_NONE: - return PPID_NONE; - case cricket::DMT_CONTROL: + case webrtc::DataMessageType::kControl: return PPID_CONTROL; - case cricket::DMT_BINARY: - return PPID_BINARY_LAST; - case cricket::DMT_TEXT: - return PPID_TEXT_LAST; + case webrtc::DataMessageType::kBinary: + return size > 0 ? PPID_BINARY_LAST : PPID_BINARY_EMPTY; + case webrtc::DataMessageType::kText: + return size > 0 ? PPID_TEXT_LAST : PPID_TEXT_EMPTY; } } -bool GetDataMediaType(uint32_t ppid, cricket::DataMessageType* dest) { +bool GetDataMediaType(uint32_t ppid, webrtc::DataMessageType* dest) { RTC_DCHECK(dest != NULL); switch (ppid) { case PPID_BINARY_PARTIAL: case PPID_BINARY_LAST: - *dest = cricket::DMT_BINARY; + case PPID_BINARY_EMPTY: + *dest = webrtc::DataMessageType::kBinary; return true; case PPID_TEXT_PARTIAL: case PPID_TEXT_LAST: - *dest = cricket::DMT_TEXT; + case PPID_TEXT_EMPTY: + *dest = webrtc::DataMessageType::kText; return true; case PPID_CONTROL: - *dest = cricket::DMT_CONTROL; + *dest = webrtc::DataMessageType::kControl; return true; - - case PPID_NONE: - *dest = cricket::DMT_NONE; - return true; - - default: - return false; } + return false; +} + +bool IsEmptyPPID(uint32_t ppid) { + return ppid == PPID_BINARY_EMPTY || ppid == PPID_TEXT_EMPTY; } // Log the packet in text2pcap format, if log level is at LS_VERBOSE. @@ -226,11 +203,13 @@ void VerboseLogPacket(const void* data, size_t length, int direction) { // Creates the sctp_sendv_spa struct used for setting flags in the // sctp_sendv() call. -sctp_sendv_spa CreateSctpSendParams(const cricket::SendDataParams& params) { +sctp_sendv_spa CreateSctpSendParams(int sid, + const webrtc::SendDataParams& params, + size_t size) { struct sctp_sendv_spa spa = {0}; spa.sendv_flags |= SCTP_SEND_SNDINFO_VALID; - spa.sendv_sndinfo.snd_sid = params.sid; - spa.sendv_sndinfo.snd_ppid = rtc::HostToNetwork32(GetPpid(params.type)); + spa.sendv_sndinfo.snd_sid = sid; + spa.sendv_sndinfo.snd_ppid = rtc::HostToNetwork32(GetPpid(params.type, size)); // Explicitly marking the EOR flag turns the usrsctp_sendv call below into a // non atomic operation. This means that the sctp lib might only accept the // message partially. This is done in order to improve throughput, so that we @@ -238,18 +217,22 @@ sctp_sendv_spa CreateSctpSendParams(const cricket::SendDataParams& params) { // example. spa.sendv_sndinfo.snd_flags |= SCTP_EOR; - // Ordered implies reliable. if (!params.ordered) { spa.sendv_sndinfo.snd_flags |= SCTP_UNORDERED; - if (params.max_rtx_count >= 0 || params.max_rtx_ms == 0) { - spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; - spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX; - spa.sendv_prinfo.pr_value = params.max_rtx_count; - } else { - spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; - spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL; - spa.sendv_prinfo.pr_value = params.max_rtx_ms; - } + } + if (params.max_rtx_count.has_value()) { + RTC_DCHECK(*params.max_rtx_count >= 0 && + *params.max_rtx_count <= std::numeric_limits::max()); + spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; + spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_RTX; + spa.sendv_prinfo.pr_value = *params.max_rtx_count; + } + if (params.max_rtx_ms.has_value()) { + RTC_DCHECK(*params.max_rtx_ms >= 0 && + *params.max_rtx_ms <= std::numeric_limits::max()); + spa.sendv_flags |= SCTP_SEND_PRINFO_VALID; + spa.sendv_prinfo.pr_policy = SCTP_PR_SCTP_TTL; + spa.sendv_prinfo.pr_value = *params.max_rtx_ms; } return spa; } @@ -257,9 +240,75 @@ sctp_sendv_spa CreateSctpSendParams(const cricket::SendDataParams& params) { namespace cricket { +// Maps SCTP transport ID to UsrsctpTransport object, necessary in send +// threshold callback and outgoing packet callback. It also provides a facility +// to safely post a task to an UsrsctpTransport's network thread from another +// thread. +class UsrsctpTransportMap { + public: + UsrsctpTransportMap() = default; + + // Assigns a new unused ID to the following transport. + uintptr_t Register(cricket::UsrsctpTransport* transport) { + webrtc::MutexLock lock(&lock_); + // usrsctp_connect fails with a value of 0... + if (next_id_ == 0) { + ++next_id_; + } + // In case we've wrapped around and need to find an empty spot from a + // removed transport. Assumes we'll never be full. + while (map_.find(next_id_) != map_.end()) { + ++next_id_; + if (next_id_ == 0) { + ++next_id_; + } + } + map_[next_id_] = transport; + return next_id_++; + } + + // Returns true if found. + bool Deregister(uintptr_t id) { + webrtc::MutexLock lock(&lock_); + return map_.erase(id) > 0; + } + + // Posts |action| to the network thread of the transport identified by |id| + // and returns true if found, all while holding a lock to protect against the + // transport being simultaneously deleted/deregistered, or returns false if + // not found. + template + bool PostToTransportThread(uintptr_t id, F action) const { + webrtc::MutexLock lock(&lock_); + UsrsctpTransport* transport = RetrieveWhileHoldingLock(id); + if (!transport) { + return false; + } + transport->network_thread_->PostTask(ToQueuedTask( + transport->task_safety_, + [transport, action{std::move(action)}]() { action(transport); })); + return true; + } + + private: + UsrsctpTransport* RetrieveWhileHoldingLock(uintptr_t id) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_) { + auto it = map_.find(id); + if (it == map_.end()) { + return nullptr; + } + return it->second; + } + + mutable webrtc::Mutex lock_; + + uintptr_t next_id_ RTC_GUARDED_BY(lock_) = 0; + std::unordered_map map_ RTC_GUARDED_BY(lock_); +}; + // Handles global init/deinit, and mapping from usrsctp callbacks to -// SctpTransport calls. -class SctpTransport::UsrSctpWrapper { +// UsrsctpTransport calls. +class UsrsctpTransport::UsrSctpWrapper { public: static void InitializeUsrSctp() { RTC_LOG(LS_INFO) << __FUNCTION__; @@ -318,7 +367,7 @@ class SctpTransport::UsrSctpWrapper { // send in the SCTP INIT message. usrsctp_sysctl_set_sctp_nr_outgoing_streams_default(kMaxSctpStreams); - g_transport_map_ = new SctpTransportMap(); + g_transport_map_ = new UsrsctpTransportMap(); } static void UninitializeUsrSctp() { @@ -369,14 +418,6 @@ class SctpTransport::UsrSctpWrapper { << "OnSctpOutboundPacket called after usrsctp uninitialized?"; return EINVAL; } - SctpTransport* transport = - g_transport_map_->Retrieve(reinterpret_cast(addr)); - if (!transport) { - RTC_LOG(LS_ERROR) - << "OnSctpOutboundPacket: Failed to get transport for socket ID " - << addr; - return EINVAL; - } RTC_LOG(LS_VERBOSE) << "global OnSctpOutboundPacket():" "addr: " << addr << "; length: " << length @@ -384,13 +425,24 @@ class SctpTransport::UsrSctpWrapper { << "; set_df: " << rtc::ToHex(set_df); VerboseLogPacket(data, length, SCTP_DUMP_OUTBOUND); + // Note: We have to copy the data; the caller will delete it. rtc::CopyOnWriteBuffer buf(reinterpret_cast(data), length); - // TODO(deadbeef): Why do we need an AsyncInvoke here? We're already on the - // right thread and don't need to unwind the stack. - transport->invoker_.AsyncInvoke( - RTC_FROM_HERE, transport->network_thread_, - rtc::Bind(&SctpTransport::OnPacketFromSctpToNetwork, transport, buf)); + + // PostsToTransportThread protects against the transport being + // simultaneously deregistered/deleted, since this callback may come from + // the SCTP timer thread and thus race with the network thread. + bool found = g_transport_map_->PostToTransportThread( + reinterpret_cast(addr), [buf](UsrsctpTransport* transport) { + transport->OnPacketFromSctpToNetwork(buf); + }); + if (!found) { + RTC_LOG(LS_ERROR) + << "OnSctpOutboundPacket: Failed to get transport for socket ID " + << addr << "; possibly was already destroyed."; + return EINVAL; + } + return 0; } @@ -405,89 +457,123 @@ class SctpTransport::UsrSctpWrapper { struct sctp_rcvinfo rcv, int flags, void* ulp_info) { - SctpTransport* transport = GetTransportFromSocket(sock); - if (!transport) { + AutoFreedPointer owned_data(data); + + absl::optional id = GetTransportIdFromSocket(sock); + if (!id) { RTC_LOG(LS_ERROR) - << "OnSctpInboundPacket: Failed to get transport for socket " << sock - << "; possibly was already destroyed."; - free(data); - return 0; + << "OnSctpInboundPacket: Failed to get transport ID from socket " + << sock; + return kSctpErrorReturn; } - // Sanity check that both methods of getting the SctpTransport pointer - // yield the same result. - RTC_CHECK_EQ(transport, static_cast(ulp_info)); - int result = - transport->OnDataOrNotificationFromSctp(data, length, rcv, flags); - free(data); - return result; + + if (!g_transport_map_) { + RTC_LOG(LS_ERROR) + << "OnSctpInboundPacket called after usrsctp uninitialized?"; + return kSctpErrorReturn; + } + // PostsToTransportThread protects against the transport being + // simultaneously deregistered/deleted, since this callback may come from + // the SCTP timer thread and thus race with the network thread. + bool found = g_transport_map_->PostToTransportThread( + *id, [owned_data{std::move(owned_data)}, length, rcv, + flags](UsrsctpTransport* transport) { + transport->OnDataOrNotificationFromSctp(owned_data.get(), length, rcv, + flags); + }); + if (!found) { + RTC_LOG(LS_ERROR) + << "OnSctpInboundPacket: Failed to get transport for socket ID " + << *id << "; possibly was already destroyed."; + return kSctpErrorReturn; + } + return kSctpSuccessReturn; } - static SctpTransport* GetTransportFromSocket(struct socket* sock) { + static absl::optional GetTransportIdFromSocket( + struct socket* sock) { + absl::optional ret; struct sockaddr* addrs = nullptr; int naddrs = usrsctp_getladdrs(sock, 0, &addrs); if (naddrs <= 0 || addrs[0].sa_family != AF_CONN) { - return nullptr; + return ret; } // usrsctp_getladdrs() returns the addresses bound to this socket, which - // contains the SctpTransport id as sconn_addr. Read the id, + // contains the UsrsctpTransport id as sconn_addr. Read the id, // then free the list of addresses once we have the pointer. We only open // AF_CONN sockets, and they should all have the sconn_addr set to the // id of the transport that created them, so [0] is as good as any other. struct sockaddr_conn* sconn = reinterpret_cast(&addrs[0]); - if (!g_transport_map_) { - RTC_LOG(LS_ERROR) - << "GetTransportFromSocket called after usrsctp uninitialized?"; - usrsctp_freeladdrs(addrs); - return nullptr; - } - SctpTransport* transport = g_transport_map_->Retrieve( - reinterpret_cast(sconn->sconn_addr)); + ret = reinterpret_cast(sconn->sconn_addr); usrsctp_freeladdrs(addrs); - return transport; + return ret; } // TODO(crbug.com/webrtc/11899): This is a legacy callback signature, remove // when usrsctp is updated. static int SendThresholdCallback(struct socket* sock, uint32_t sb_free) { - // Fired on our I/O thread. SctpTransport::OnPacketReceived() gets + // Fired on our I/O thread. UsrsctpTransport::OnPacketReceived() gets // a packet containing acknowledgments, which goes into usrsctp_conninput, // and then back here. - SctpTransport* transport = GetTransportFromSocket(sock); - if (!transport) { + absl::optional id = GetTransportIdFromSocket(sock); + if (!id) { RTC_LOG(LS_ERROR) - << "SendThresholdCallback: Failed to get transport for socket " - << sock << "; possibly was already destroyed."; + << "SendThresholdCallback: Failed to get transport ID from socket " + << sock; return 0; } - transport->OnSendThresholdCallback(); + if (!g_transport_map_) { + RTC_LOG(LS_ERROR) + << "SendThresholdCallback called after usrsctp uninitialized?"; + return 0; + } + bool found = g_transport_map_->PostToTransportThread( + *id, [](UsrsctpTransport* transport) { + transport->OnSendThresholdCallback(); + }); + if (!found) { + RTC_LOG(LS_ERROR) + << "SendThresholdCallback: Failed to get transport for socket ID " + << *id << "; possibly was already destroyed."; + } return 0; } static int SendThresholdCallback(struct socket* sock, uint32_t sb_free, void* ulp_info) { - // Fired on our I/O thread. SctpTransport::OnPacketReceived() gets + // Fired on our I/O thread. UsrsctpTransport::OnPacketReceived() gets // a packet containing acknowledgments, which goes into usrsctp_conninput, // and then back here. - SctpTransport* transport = GetTransportFromSocket(sock); - if (!transport) { + absl::optional id = GetTransportIdFromSocket(sock); + if (!id) { RTC_LOG(LS_ERROR) - << "SendThresholdCallback: Failed to get transport for socket " - << sock << "; possibly was already destroyed."; + << "SendThresholdCallback: Failed to get transport ID from socket " + << sock; return 0; } - // Sanity check that both methods of getting the SctpTransport pointer - // yield the same result. - RTC_CHECK_EQ(transport, static_cast(ulp_info)); - transport->OnSendThresholdCallback(); + if (!g_transport_map_) { + RTC_LOG(LS_ERROR) + << "SendThresholdCallback called after usrsctp uninitialized?"; + return 0; + } + bool found = g_transport_map_->PostToTransportThread( + *id, [](UsrsctpTransport* transport) { + transport->OnSendThresholdCallback(); + }); + if (!found) { + RTC_LOG(LS_ERROR) + << "SendThresholdCallback: Failed to get transport for socket ID " + << *id << "; possibly was already destroyed."; + } return 0; } }; -SctpTransport::SctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport) +UsrsctpTransport::UsrsctpTransport(rtc::Thread* network_thread, + rtc::PacketTransportInternal* transport) : network_thread_(network_thread), transport_(transport), was_ever_writable_(transport ? transport->writable() : false) { @@ -496,16 +582,17 @@ SctpTransport::SctpTransport(rtc::Thread* network_thread, ConnectTransportSignals(); } -SctpTransport::~SctpTransport() { +UsrsctpTransport::~UsrsctpTransport() { + RTC_DCHECK_RUN_ON(network_thread_); // Close abruptly; no reset procedure. CloseSctpSocket(); // It's not strictly necessary to reset these fields to nullptr, // but having these fields set to nullptr is a clear indication that // object was destructed. There was a bug in usrsctp when it - // invoked OnSctpOutboundPacket callback for destructed SctpTransport, + // invoked OnSctpOutboundPacket callback for destructed UsrsctpTransport, // which caused obscure SIGSEGV on access to these fields, // having this fields set to nullptr will make it easier to understand - // that SctpTransport was destructed and "use-after-free" bug happen. + // that UsrsctpTransport was destructed and "use-after-free" bug happen. // SIGSEGV error triggered on dereference these pointers will also // be easier to understand due to 0x0 address. All of this assumes // that ASAN is not enabled to detect "use-after-free", which is @@ -514,7 +601,8 @@ SctpTransport::~SctpTransport() { transport_ = nullptr; } -void SctpTransport::SetDtlsTransport(rtc::PacketTransportInternal* transport) { +void UsrsctpTransport::SetDtlsTransport( + rtc::PacketTransportInternal* transport) { RTC_DCHECK_RUN_ON(network_thread_); DisconnectTransportSignals(); transport_ = transport; @@ -530,9 +618,9 @@ void SctpTransport::SetDtlsTransport(rtc::PacketTransportInternal* transport) { } } -bool SctpTransport::Start(int local_sctp_port, - int remote_sctp_port, - int max_message_size) { +bool UsrsctpTransport::Start(int local_sctp_port, + int remote_sctp_port, + int max_message_size) { RTC_DCHECK_RUN_ON(network_thread_); if (local_sctp_port == -1) { local_sctp_port = kSctpDefaultPort; @@ -574,7 +662,7 @@ bool SctpTransport::Start(int local_sctp_port, return true; } -bool SctpTransport::OpenStream(int sid) { +bool UsrsctpTransport::OpenStream(int sid) { RTC_DCHECK_RUN_ON(network_thread_); if (sid > kMaxSctpSid) { RTC_LOG(LS_WARNING) << debug_name_ @@ -606,7 +694,7 @@ bool SctpTransport::OpenStream(int sid) { } } -bool SctpTransport::ResetStream(int sid) { +bool UsrsctpTransport::ResetStream(int sid) { RTC_DCHECK_RUN_ON(network_thread_); auto it = stream_status_by_sid_.find(sid); @@ -628,9 +716,10 @@ bool SctpTransport::ResetStream(int sid) { return true; } -bool SctpTransport::SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result) { +bool UsrsctpTransport::SendData(int sid, + const webrtc::SendDataParams& params, + const rtc::CopyOnWriteBuffer& payload, + SendDataResult* result) { RTC_DCHECK_RUN_ON(network_thread_); if (partial_outgoing_message_.has_value()) { @@ -641,8 +730,23 @@ bool SctpTransport::SendData(const SendDataParams& params, ready_to_send_data_ = false; return false; } + + // Do not queue data to send on a closing stream. + auto it = stream_status_by_sid_.find(sid); + if (it == stream_status_by_sid_.end() || !it->second.is_open()) { + RTC_LOG(LS_WARNING) + << debug_name_ + << "->SendData(...): " + "Not sending data because sid is unknown or closing: " + << sid; + if (result) { + *result = SDR_ERROR; + } + return false; + } + size_t payload_size = payload.size(); - OutgoingMessage message(payload, params); + OutgoingMessage message(payload, sid, params); SendDataResult send_message_result = SendMessageInternal(&message); if (result) { *result = send_message_result; @@ -665,24 +769,23 @@ bool SctpTransport::SendData(const SendDataParams& params, return true; } -SendDataResult SctpTransport::SendMessageInternal(OutgoingMessage* message) { +SendDataResult UsrsctpTransport::SendMessageInternal(OutgoingMessage* message) { RTC_DCHECK_RUN_ON(network_thread_); if (!sock_) { RTC_LOG(LS_WARNING) << debug_name_ << "->SendMessageInternal(...): " "Not sending packet with sid=" - << message->send_params().sid - << " len=" << message->size() << " before Start()."; + << message->sid() << " len=" << message->size() + << " before Start()."; return SDR_ERROR; } - if (message->send_params().type != DMT_CONTROL) { - auto it = stream_status_by_sid_.find(message->send_params().sid); - if (it == stream_status_by_sid_.end() || !it->second.is_open()) { - RTC_LOG(LS_WARNING) - << debug_name_ - << "->SendMessageInternal(...): " - "Not sending data because sid is unknown or closing: " - << message->send_params().sid; + if (message->send_params().type != webrtc::DataMessageType::kControl) { + auto it = stream_status_by_sid_.find(message->sid()); + if (it == stream_status_by_sid_.end()) { + RTC_LOG(LS_WARNING) << debug_name_ + << "->SendMessageInternal(...): " + "Not sending data because sid is unknown: " + << message->sid(); return SDR_ERROR; } } @@ -694,13 +797,23 @@ SendDataResult SctpTransport::SendMessageInternal(OutgoingMessage* message) { } // Send data using SCTP. - sctp_sendv_spa spa = CreateSctpSendParams(message->send_params()); + sctp_sendv_spa spa = CreateSctpSendParams( + message->sid(), message->send_params(), message->size()); + const void* data = message->data(); + size_t data_length = message->size(); + if (message->size() == 0) { + // Empty messages are replaced by a single NUL byte on the wire as SCTP + // doesn't support empty messages. + // The PPID carries the information that the payload needs to be ignored. + data = kZero; + data_length = 1; + } // Note: this send call is not atomic because the EOR bit is set. This means // that usrsctp can partially accept this message and it is our duty to buffer // the rest. - ssize_t send_res = usrsctp_sendv( - sock_, message->data(), message->size(), NULL, 0, &spa, - rtc::checked_cast(sizeof(spa)), SCTP_SENDV_SPA, 0); + ssize_t send_res = usrsctp_sendv(sock_, data, data_length, NULL, 0, &spa, + rtc::checked_cast(sizeof(spa)), + SCTP_SENDV_SPA, 0); if (send_res < 0) { if (errno == SCTP_EWOULDBLOCK) { ready_to_send_data_ = false; @@ -716,29 +829,30 @@ SendDataResult SctpTransport::SendMessageInternal(OutgoingMessage* message) { } size_t amount_sent = static_cast(send_res); - RTC_DCHECK_LE(amount_sent, message->size()); - message->Advance(amount_sent); + RTC_DCHECK_LE(amount_sent, data_length); + if (message->size() != 0) + message->Advance(amount_sent); // Only way out now is success. return SDR_SUCCESS; } -bool SctpTransport::ReadyToSendData() { +bool UsrsctpTransport::ReadyToSendData() { RTC_DCHECK_RUN_ON(network_thread_); return ready_to_send_data_; } -void SctpTransport::ConnectTransportSignals() { +void UsrsctpTransport::ConnectTransportSignals() { RTC_DCHECK_RUN_ON(network_thread_); if (!transport_) { return; } transport_->SignalWritableState.connect(this, - &SctpTransport::OnWritableState); - transport_->SignalReadPacket.connect(this, &SctpTransport::OnPacketRead); - transport_->SignalClosed.connect(this, &SctpTransport::OnClosed); + &UsrsctpTransport::OnWritableState); + transport_->SignalReadPacket.connect(this, &UsrsctpTransport::OnPacketRead); + transport_->SignalClosed.connect(this, &UsrsctpTransport::OnClosed); } -void SctpTransport::DisconnectTransportSignals() { +void UsrsctpTransport::DisconnectTransportSignals() { RTC_DCHECK_RUN_ON(network_thread_); if (!transport_) { return; @@ -748,7 +862,7 @@ void SctpTransport::DisconnectTransportSignals() { transport_->SignalClosed.disconnect(this); } -bool SctpTransport::Connect() { +bool UsrsctpTransport::Connect() { RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_VERBOSE) << debug_name_ << "->Connect()."; @@ -811,7 +925,7 @@ bool SctpTransport::Connect() { return true; } -bool SctpTransport::OpenSctpSocket() { +bool UsrsctpTransport::OpenSctpSocket() { RTC_DCHECK_RUN_ON(network_thread_); if (sock_) { RTC_LOG(LS_WARNING) << debug_name_ @@ -854,7 +968,7 @@ bool SctpTransport::OpenSctpSocket() { return true; } -bool SctpTransport::ConfigureSctpSocket() { +bool UsrsctpTransport::ConfigureSctpSocket() { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(sock_); // Make the socket non-blocking. Connect, close, shutdown etc will not block @@ -935,7 +1049,7 @@ bool SctpTransport::ConfigureSctpSocket() { return true; } -void SctpTransport::CloseSctpSocket() { +void UsrsctpTransport::CloseSctpSocket() { RTC_DCHECK_RUN_ON(network_thread_); if (sock_) { // We assume that SO_LINGER option is set to close the association when @@ -950,16 +1064,22 @@ void SctpTransport::CloseSctpSocket() { } } -bool SctpTransport::SendQueuedStreamResets() { +bool UsrsctpTransport::SendQueuedStreamResets() { RTC_DCHECK_RUN_ON(network_thread_); + auto needs_reset = + [this](const std::map::value_type& stream) { + // Ignore streams with partial outgoing messages as they are required to + // be fully sent by the WebRTC spec + // https://w3c.github.io/webrtc-pc/#closing-procedure + return stream.second.need_outgoing_reset() && + (!partial_outgoing_message_.has_value() || + partial_outgoing_message_.value().sid() != + static_cast(stream.first)); + }; // Figure out how many streams need to be reset. We need to do this so we can // allocate the right amount of memory for the sctp_reset_streams structure. - size_t num_streams = absl::c_count_if( - stream_status_by_sid_, - [](const std::map::value_type& stream) { - return stream.second.need_outgoing_reset(); - }); + size_t num_streams = absl::c_count_if(stream_status_by_sid_, needs_reset); if (num_streams == 0) { // Nothing to reset. return true; @@ -978,12 +1098,10 @@ bool SctpTransport::SendQueuedStreamResets() { resetp->srs_number_streams = rtc::checked_cast(num_streams); int result_idx = 0; - for (const std::map::value_type& stream : - stream_status_by_sid_) { - if (!stream.second.need_outgoing_reset()) { - continue; + for (const auto& stream : stream_status_by_sid_) { + if (needs_reset(stream)) { + resetp->srs_stream_list[result_idx++] = stream.first; } - resetp->srs_stream_list[result_idx++] = stream.first; } int ret = @@ -1012,7 +1130,7 @@ bool SctpTransport::SendQueuedStreamResets() { return true; } -void SctpTransport::SetReadyToSendData() { +void UsrsctpTransport::SetReadyToSendData() { RTC_DCHECK_RUN_ON(network_thread_); if (!ready_to_send_data_) { ready_to_send_data_ = true; @@ -1020,7 +1138,7 @@ void SctpTransport::SetReadyToSendData() { } } -bool SctpTransport::SendBufferedMessage() { +bool UsrsctpTransport::SendBufferedMessage() { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(partial_outgoing_message_.has_value()); RTC_DLOG(LS_VERBOSE) << "Sending partially buffered message of size " @@ -1032,11 +1150,21 @@ bool SctpTransport::SendBufferedMessage() { return false; } RTC_DCHECK_EQ(0u, partial_outgoing_message_->size()); + + int sid = partial_outgoing_message_->sid(); partial_outgoing_message_.reset(); + + // Send the queued stream reset if it was pending for this stream. + auto it = stream_status_by_sid_.find(sid); + if (it->second.need_outgoing_reset()) { + SendQueuedStreamResets(); + } + return true; } -void SctpTransport::OnWritableState(rtc::PacketTransportInternal* transport) { +void UsrsctpTransport::OnWritableState( + rtc::PacketTransportInternal* transport) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_EQ(transport_, transport); if (!was_ever_writable_ && transport->writable()) { @@ -1048,14 +1176,14 @@ void SctpTransport::OnWritableState(rtc::PacketTransportInternal* transport) { } // Called by network interface when a packet has been received. -void SctpTransport::OnPacketRead(rtc::PacketTransportInternal* transport, - const char* data, - size_t len, - const int64_t& /* packet_time_us */, - int flags) { +void UsrsctpTransport::OnPacketRead(rtc::PacketTransportInternal* transport, + const char* data, + size_t len, + const int64_t& /* packet_time_us */, + int flags) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_EQ(transport_, transport); - TRACE_EVENT0("webrtc", "SctpTransport::OnPacketRead"); + TRACE_EVENT0("webrtc", "UsrsctpTransport::OnPacketRead"); if (flags & PF_SRTP_BYPASS) { // We are only interested in SCTP packets. @@ -1082,11 +1210,11 @@ void SctpTransport::OnPacketRead(rtc::PacketTransportInternal* transport, } } -void SctpTransport::OnClosed(rtc::PacketTransportInternal* transport) { +void UsrsctpTransport::OnClosed(rtc::PacketTransportInternal* transport) { SignalClosedAbruptly(); } -void SctpTransport::OnSendThresholdCallback() { +void UsrsctpTransport::OnSendThresholdCallback() { RTC_DCHECK_RUN_ON(network_thread_); if (partial_outgoing_message_.has_value()) { if (!SendBufferedMessage()) { @@ -1097,7 +1225,7 @@ void SctpTransport::OnSendThresholdCallback() { SetReadyToSendData(); } -sockaddr_conn SctpTransport::GetSctpSockAddr(int port) { +sockaddr_conn UsrsctpTransport::GetSctpSockAddr(int port) { sockaddr_conn sconn = {0}; sconn.sconn_family = AF_CONN; #ifdef HAVE_SCONN_LEN @@ -1109,7 +1237,7 @@ sockaddr_conn SctpTransport::GetSctpSockAddr(int port) { return sconn; } -void SctpTransport::OnPacketFromSctpToNetwork( +void UsrsctpTransport::OnPacketFromSctpToNetwork( const rtc::CopyOnWriteBuffer& buffer) { RTC_DCHECK_RUN_ON(network_thread_); if (buffer.size() > (kSctpMtu)) { @@ -1119,7 +1247,7 @@ void SctpTransport::OnPacketFromSctpToNetwork( "than its official MTU: " << buffer.size() << " vs max of " << kSctpMtu; } - TRACE_EVENT0("webrtc", "SctpTransport::OnPacketFromSctpToNetwork"); + TRACE_EVENT0("webrtc", "UsrsctpTransport::OnPacketFromSctpToNetwork"); // Don't create noise by trying to send a packet when the DTLS transport isn't // even writable. @@ -1132,24 +1260,25 @@ void SctpTransport::OnPacketFromSctpToNetwork( rtc::PacketOptions(), PF_NORMAL); } -int SctpTransport::InjectDataOrNotificationFromSctpForTesting( - void* data, +void UsrsctpTransport::InjectDataOrNotificationFromSctpForTesting( + const void* data, size_t length, struct sctp_rcvinfo rcv, int flags) { - return OnDataOrNotificationFromSctp(data, length, rcv, flags); + OnDataOrNotificationFromSctp(data, length, rcv, flags); } -int SctpTransport::OnDataOrNotificationFromSctp(void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags) { +void UsrsctpTransport::OnDataOrNotificationFromSctp(const void* data, + size_t length, + struct sctp_rcvinfo rcv, + int flags) { + RTC_DCHECK_RUN_ON(network_thread_); // If data is NULL, the SCTP association has been closed. if (!data) { RTC_LOG(LS_INFO) << debug_name_ << "->OnDataOrNotificationFromSctp(...): " "No data; association closed."; - return kSctpSuccessReturn; + return; } // Handle notifications early. @@ -1162,13 +1291,10 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, << "->OnDataOrNotificationFromSctp(...): SCTP notification" << " length=" << length; - // Copy and dispatch asynchronously - rtc::CopyOnWriteBuffer notification(reinterpret_cast(data), + rtc::CopyOnWriteBuffer notification(reinterpret_cast(data), length); - invoker_.AsyncInvoke( - RTC_FROM_HERE, network_thread_, - rtc::Bind(&SctpTransport::OnNotificationFromSctp, this, notification)); - return kSctpSuccessReturn; + OnNotificationFromSctp(notification); + return; } // Log data chunk @@ -1181,12 +1307,12 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, << ", eor=" << ((flags & MSG_EOR) ? "y" : "n"); // Validate payload protocol identifier - DataMessageType type = DMT_NONE; + webrtc::DataMessageType type; if (!GetDataMediaType(ppid, &type)) { // Unexpected PPID, dropping RTC_LOG(LS_ERROR) << "Received an unknown PPID " << ppid << " on an SCTP packet. Dropping."; - return kSctpSuccessReturn; + return; } // Expect only continuation messages belonging to the same SID. The SCTP @@ -1208,12 +1334,13 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, // Furthermore, it is increased per stream and not on the whole // association. params.seq_num = rcv.rcv_ssn; - // There is no timestamp field in the SCTP API - params.timestamp = 0; - // Append the chunk's data to the message buffer - partial_incoming_message_.AppendData(reinterpret_cast(data), - length); + // Append the chunk's data to the message buffer unless we have a chunk with a + // PPID marking an empty message. + // See: https://tools.ietf.org/html/rfc8831#section-6.6 + if (!IsEmptyPPID(ppid)) + partial_incoming_message_.AppendData(reinterpret_cast(data), + length); partial_params_ = params; partial_flags_ = flags; @@ -1222,7 +1349,7 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, if (partial_incoming_message_.size() < kSctpSendBufferSize) { // We still have space in the buffer. Continue buffering chunks until // the message is complete before handing it out. - return kSctpSuccessReturn; + return; } else { // The sender is exceeding the maximum message size that we announced. // Spit out a warning but still hand out the partial message. Note that @@ -1236,20 +1363,12 @@ int SctpTransport::OnDataOrNotificationFromSctp(void* data, } } - // Dispatch the complete message. - // The ownership of the packet transfers to |invoker_|. Using - // CopyOnWriteBuffer is the most convenient way to do this. - invoker_.AsyncInvoke( - RTC_FROM_HERE, network_thread_, - rtc::Bind(&SctpTransport::OnDataFromSctpToTransport, this, params, - partial_incoming_message_)); - - // Reset the message buffer + // Dispatch the complete message and reset the message buffer. + OnDataFromSctpToTransport(params, partial_incoming_message_); partial_incoming_message_.Clear(); - return kSctpSuccessReturn; } -void SctpTransport::OnDataFromSctpToTransport( +void UsrsctpTransport::OnDataFromSctpToTransport( const ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { RTC_DCHECK_RUN_ON(network_thread_); @@ -1262,7 +1381,7 @@ void SctpTransport::OnDataFromSctpToTransport( SignalDataReceived(params, buffer); } -void SctpTransport::OnNotificationFromSctp( +void UsrsctpTransport::OnNotificationFromSctp( const rtc::CopyOnWriteBuffer& buffer) { RTC_DCHECK_RUN_ON(network_thread_); if (buffer.size() < sizeof(sctp_notification::sn_header)) { @@ -1363,7 +1482,8 @@ void SctpTransport::OnNotificationFromSctp( } } -void SctpTransport::OnNotificationAssocChange(const sctp_assoc_change& change) { +void UsrsctpTransport::OnNotificationAssocChange( + const sctp_assoc_change& change) { RTC_DCHECK_RUN_ON(network_thread_); switch (change.sac_state) { case SCTP_COMM_UP: @@ -1395,7 +1515,7 @@ void SctpTransport::OnNotificationAssocChange(const sctp_assoc_change& change) { } } -void SctpTransport::OnStreamResetEvent( +void UsrsctpTransport::OnStreamResetEvent( const struct sctp_stream_reset_event* evt) { RTC_DCHECK_RUN_ON(network_thread_); diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h similarity index 85% rename from TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h rename to TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h index 54542af6b..5dcf57b24 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MEDIA_SCTP_SCTP_TRANSPORT_H_ -#define MEDIA_SCTP_SCTP_TRANSPORT_H_ +#ifndef MEDIA_SCTP_USRSCTP_TRANSPORT_H_ +#define MEDIA_SCTP_USRSCTP_TRANSPORT_H_ #include @@ -21,11 +21,10 @@ #include #include "absl/types/optional.h" -#include "api/transport/sctp_transport_factory_interface.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" // For SendDataParams/ReceiveDataParams. @@ -66,23 +65,24 @@ struct SctpInboundPacket; // 12. SctpTransport::SignalDataReceived(data) // [from the same thread, methods registered/connected to // SctpTransport are called with the recieved data] -class SctpTransport : public SctpTransportInternal, - public sigslot::has_slots<> { +class UsrsctpTransport : public SctpTransportInternal, + public sigslot::has_slots<> { public: // |network_thread| is where packets will be processed and callbacks from // this transport will be posted, and is the only thread on which public // methods can be called. // |transport| is not required (can be null). - SctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport); - ~SctpTransport() override; + UsrsctpTransport(rtc::Thread* network_thread, + rtc::PacketTransportInternal* transport); + ~UsrsctpTransport() override; // SctpTransportInternal overrides (see sctptransportinternal.h for comments). void SetDtlsTransport(rtc::PacketTransportInternal* transport) override; bool Start(int local_port, int remote_port, int max_message_size) override; bool OpenStream(int sid) override; bool ResetStream(int sid) override; - bool SendData(const SendDataParams& params, + bool SendData(int sid, + const webrtc::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, SendDataResult* result = nullptr) override; bool ReadyToSendData() override; @@ -96,10 +96,10 @@ class SctpTransport : public SctpTransportInternal, void set_debug_name_for_testing(const char* debug_name) override { debug_name_ = debug_name; } - int InjectDataOrNotificationFromSctpForTesting(void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags); + void InjectDataOrNotificationFromSctpForTesting(const void* data, + size_t length, + struct sctp_rcvinfo rcv, + int flags); // Exposed to allow Post call from c-callbacks. // TODO(deadbeef): Remove this or at least make it return a const pointer. @@ -114,8 +114,9 @@ class SctpTransport : public SctpTransportInternal, class OutgoingMessage { public: OutgoingMessage(const rtc::CopyOnWriteBuffer& buffer, - const SendDataParams& send_params) - : buffer_(buffer), send_params_(send_params) {} + int sid, + const webrtc::SendDataParams& send_params) + : buffer_(buffer), sid_(sid), send_params_(send_params) {} // Advances the buffer by the incremented amount. Must not advance further // than the current data size. @@ -128,11 +129,13 @@ class SctpTransport : public SctpTransportInternal, const void* data() const { return buffer_.data() + offset_; } - SendDataParams send_params() const { return send_params_; } + int sid() const { return sid_; } + webrtc::SendDataParams send_params() const { return send_params_; } private: const rtc::CopyOnWriteBuffer buffer_; - const SendDataParams send_params_; + int sid_; + const webrtc::SendDataParams send_params_; size_t offset_ = 0; }; @@ -180,12 +183,12 @@ class SctpTransport : public SctpTransportInternal, // Called using |invoker_| to send packet on the network. void OnPacketFromSctpToNetwork(const rtc::CopyOnWriteBuffer& buffer); - // Called on the SCTP thread. + // Called on the network thread. // Flags are standard socket API flags (RFC 6458). - int OnDataOrNotificationFromSctp(void* data, - size_t length, - struct sctp_rcvinfo rcv, - int flags); + void OnDataOrNotificationFromSctp(const void* data, + size_t length, + struct sctp_rcvinfo rcv, + int flags); // Called using |invoker_| to decide what to do with the data. void OnDataFromSctpToTransport(const ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer); @@ -199,7 +202,7 @@ class SctpTransport : public SctpTransportInternal, // outgoing data to the network interface. rtc::Thread* network_thread_; // Helps pass inbound/outbound packets asynchronously to the network thread. - rtc::AsyncInvoker invoker_; + webrtc::ScopedTaskSafety task_safety_; // Underlying DTLS transport. rtc::PacketTransportInternal* transport_ = nullptr; @@ -270,7 +273,7 @@ class SctpTransport : public SctpTransportInternal, std::map stream_status_by_sid_; // A static human-readable name for debugging messages. - const char* debug_name_ = "SctpTransport"; + const char* debug_name_ = "UsrsctpTransport"; // Hides usrsctp interactions from this header file. class UsrSctpWrapper; // Number of channels negotiated. Not set before negotiation completes. @@ -281,24 +284,13 @@ class SctpTransport : public SctpTransportInternal, // various callbacks. uintptr_t id_ = 0; - RTC_DISALLOW_COPY_AND_ASSIGN(SctpTransport); + friend class UsrsctpTransportMap; + + RTC_DISALLOW_COPY_AND_ASSIGN(UsrsctpTransport); }; -class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface { - public: - explicit SctpTransportFactory(rtc::Thread* network_thread) - : network_thread_(network_thread) {} - - std::unique_ptr CreateSctpTransport( - rtc::PacketTransportInternal* transport) override { - return std::unique_ptr( - new SctpTransport(network_thread_, transport)); - } - - private: - rtc::Thread* network_thread_; -}; +class UsrsctpTransportMap; } // namespace cricket -#endif // MEDIA_SCTP_SCTP_TRANSPORT_H_ +#endif // MEDIA_SCTP_USRSCTP_TRANSPORT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/OWNERS index 3d4b4fd20..f7a0e4797 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/OWNERS @@ -1,4 +1,3 @@ henrik.lundin@webrtc.org -kwiberg@webrtc.org minyue@webrtc.org ivoc@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc index 600cb0c06..7546ac178 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc @@ -171,9 +171,8 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodeImpl( last_frame_active_ = true; break; } - case Vad::kError: { - FATAL(); // Fails only if fed invalid data. - break; + default: { + RTC_CHECK_NOTREACHED(); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c index 308902f57..77da78ba7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c @@ -16,9 +16,11 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/abs_quant.h" + #include "modules/audio_coding/codecs/ilbc/abs_quant_loop.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h index 331921cc6..c72e29cf2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c index 2d8a9980e..cf9266299 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/abs_quant_loop.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/sort_sq.h" void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h index a193a07cd..841d73b9f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * predictive noise shaping encoding of scaled start state diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h index d73ef0da5..c2d62ed2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h @@ -13,6 +13,7 @@ #include #include + #include #include "api/audio_codecs/audio_decoder.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h index fe3e32980..05a900e3c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h @@ -11,6 +11,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ +#include +#include + #include #include "absl/types/optional.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c index 77b0f7f73..c915a2f9f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c @@ -16,10 +16,11 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" #include "modules/audio_coding/codecs/ilbc/augmented_cb_corr.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" + void WebRtcIlbcfix_AugmentedCbCorr( int16_t *target, /* (i) Target vector */ int16_t *buffer, /* (i) Memory buffer */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h index 646e5649b..2e9612e51 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Calculate correlation between target and Augmented codebooks diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c index 566af7dd3..1a9b882ad 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/bw_expand.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h index d25325c7f..ff9b0b302 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * lpc bandwidth expansion diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h index f4df38728..8f7c66316 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h @@ -20,16 +20,18 @@ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_ #include +#include #include +#include "absl/base/attributes.h" #include "modules/audio_coding/codecs/ilbc/defines.h" -#include "rtc_base/system/unused.h" /*----------------------------------------------------------------* * Construct decoded vector from codebook and gains. *---------------------------------------------------------------*/ // Returns true on success, false on failure. +ABSL_MUST_USE_RESULT bool WebRtcIlbcfix_CbConstruct( int16_t* decvector, /* (o) Decoded vector */ const int16_t* index, /* (i) Codebook indices */ @@ -37,6 +39,6 @@ bool WebRtcIlbcfix_CbConstruct( int16_t* mem, /* (i) Buffer for codevector construction */ size_t lMem, /* (i) Length of buffer */ size_t veclen /* (i) Length of vector */ - ) RTC_WARN_UNUSED_RESULT; +); #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c index 9304a9168..21e419760 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c @@ -16,9 +16,11 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/cb_mem_energy.h" + #include "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Function WebRtcIlbcfix_CbMemEnergy computes the energy of all diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h index 894f5d051..17ec337dc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_ +#include +#include + void WebRtcIlbcfix_CbMemEnergy( size_t range, int16_t* CB, /* (i) The CB memory (1:st section) */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c index df9ff45be..0619bbe42 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" void WebRtcIlbcfix_CbMemEnergyAugmentation( int16_t *interpSamples, /* (i) The interpolated samples */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h index b7b972f91..d7b7a0d97 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_ +#include +#include + void WebRtcIlbcfix_CbMemEnergyAugmentation( int16_t* interpSamples, /* (i) The interpolated samples */ int16_t* CBmem, /* (i) The CB memory */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c index 35d3ce4bc..58c0c5fe6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /* Compute the energy of the rest of the cb memory diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h index 5511ef1f3..1d1e8d62b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_ +#include +#include + void WebRtcIlbcfix_CbMemEnergyCalc( int32_t energy, /* (i) input start energy */ size_t range, /* (i) number of iterations */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c index 88b2f01f9..24b529235 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c @@ -16,18 +16,20 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/gain_quant.h" -#include "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/cb_search.h" + +#include "modules/audio_coding/codecs/ilbc/augmented_cb_corr.h" #include "modules/audio_coding/codecs/ilbc/cb_mem_energy.h" -#include "modules/audio_coding/codecs/ilbc/interpolate_samples.h" #include "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h" #include "modules/audio_coding/codecs/ilbc/cb_search_core.h" -#include "modules/audio_coding/codecs/ilbc/energy_inverse.h" -#include "modules/audio_coding/codecs/ilbc/augmented_cb_corr.h" #include "modules/audio_coding/codecs/ilbc/cb_update_best_index.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" #include "modules/audio_coding/codecs/ilbc/create_augmented_vec.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/energy_inverse.h" +#include "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h" +#include "modules/audio_coding/codecs/ilbc/gain_quant.h" +#include "modules/audio_coding/codecs/ilbc/interpolate_samples.h" /*----------------------------------------------------------------* * Search routine for codebook encoding and gain quantization. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h index 393a2de12..84a52c786 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h @@ -19,6 +19,11 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_ +#include +#include + +#include "modules/audio_coding/codecs/ilbc/defines.h" + void WebRtcIlbcfix_CbSearch( IlbcEncoder* iLBCenc_inst, /* (i) the encoder state structure */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c index 09d26d35d..a75e5b0ab 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/cb_search_core.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" void WebRtcIlbcfix_CbSearchCore( int32_t *cDot, /* (i) Cross Correlation */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h index af5a1db5e..5da70e098 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include void WebRtcIlbcfix_CbSearchCore( int32_t* cDot, /* (i) Cross Correlation */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c index ed20c468e..d6fa4d93d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c @@ -16,9 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/cb_update_best_index.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" void WebRtcIlbcfix_CbUpdateBestIndex( int32_t CritNew, /* (i) New Potentially best Criteria */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h index 3f57d4845..1a95d531e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include void WebRtcIlbcfix_CbUpdateBestIndex( int32_t CritNew, /* (i) New Potentially best Criteria */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c index 38a3069d9..b4eee6621 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/chebyshev.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*------------------------------------------------------------------* * Calculate the Chevyshev polynomial series diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h index 64b2f4907..7e7742c5c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*------------------------------------------------------------------* * Calculate the Chevyshev polynomial series diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c index b43f2fc10..452bc78e3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/comp_corr.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h index 1e6b296ea..010c6a1ce 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Compute cross correlation and pitch gain for pitch prediction diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.c index 8efa6aea9..22f2acb33 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.c @@ -16,9 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" + /* HP Filters {b[0] b[1] b[2] -a[1] -a[2]} */ const int16_t WebRtcIlbcfix_kHpInCoefs[5] = {3798, -7596, 3798, 7807, -3733}; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.h index 07369a39e..a8645c00d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/constants.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /* high pass filters */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c index 6a4d058eb..8033c959b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c @@ -16,10 +16,12 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "rtc_base/sanitizer.h" +#include "modules/audio_coding/codecs/ilbc/create_augmented_vec.h" + #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "rtc_base/sanitizer.h" /*----------------------------------------------------------------* * Recreate a specific codebook vector from the augmented part. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h index 28c940006..d7e5be1c2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Recreate a specific codebook vector from the augmented part. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.c index 3848bc7d7..d7621d5b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.c @@ -16,22 +16,23 @@ ******************************************************************/ -// Defines WEBRTC_ARCH_BIG_ENDIAN, used below. +#include "modules/audio_coding/codecs/ilbc/decode.h" + +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/decode_residual.h" +#include "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/do_plc.h" +#include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" +#include "modules/audio_coding/codecs/ilbc/hp_output.h" +#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" +#include "modules/audio_coding/codecs/ilbc/init_decode.h" +#include "modules/audio_coding/codecs/ilbc/lsf_check.h" +#include "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h" +#include "modules/audio_coding/codecs/ilbc/unpack_bits.h" +#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" #include "rtc_base/system/arch.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h" -#include "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" -#include "modules/audio_coding/codecs/ilbc/do_plc.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" -#include "modules/audio_coding/codecs/ilbc/lsf_check.h" -#include "modules/audio_coding/codecs/ilbc/decode_residual.h" -#include "modules/audio_coding/codecs/ilbc/unpack_bits.h" -#include "modules/audio_coding/codecs/ilbc/hp_output.h" -#include "modules/audio_coding/codecs/ilbc/init_decode.h" #ifndef WEBRTC_ARCH_BIG_ENDIAN #include "modules/audio_coding/codecs/ilbc/swap_bytes.h" #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.h index d73f79880..a7d291011 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode.h @@ -21,21 +21,22 @@ #include +#include "absl/base/attributes.h" #include "modules/audio_coding/codecs/ilbc/defines.h" -#include "rtc_base/system/unused.h" /*----------------------------------------------------------------* * main decoder function *---------------------------------------------------------------*/ // Returns 0 on success, -1 on error. +ABSL_MUST_USE_RESULT int WebRtcIlbcfix_DecodeImpl( int16_t* decblock, /* (o) decoded signal block */ const uint16_t* bytes, /* (i) encoded signal bits */ IlbcDecoder* iLBCdec_inst, /* (i/o) the decoder state structure */ - int16_t mode /* (i) 0: bad packet, PLC, - 1: normal */ - ) RTC_WARN_UNUSED_RESULT; + int16_t mode /* (i) 0: bad packet, PLC, + 1: normal */ +); #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c index 3c113aeb2..a9668e288 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c @@ -20,15 +20,15 @@ #include -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/state_construct.h" #include "modules/audio_coding/codecs/ilbc/cb_construct.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" -#include "modules/audio_coding/codecs/ilbc/do_plc.h" #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/do_plc.h" #include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" +#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" #include "modules/audio_coding/codecs/ilbc/lsf_check.h" +#include "modules/audio_coding/codecs/ilbc/state_construct.h" +#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" /*----------------------------------------------------------------* * frame residual decoder function (subrutine to iLBC_decode) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h index d54aada29..d07957766 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h @@ -20,10 +20,11 @@ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_ #include +#include #include +#include "absl/base/attributes.h" #include "modules/audio_coding/codecs/ilbc/defines.h" -#include "rtc_base/system/unused.h" /*----------------------------------------------------------------* * frame residual decoder function (subrutine to iLBC_decode) @@ -31,6 +32,7 @@ // Returns true on success, false on failure. In case of failure, the decoder // state may be corrupted and needs resetting. +ABSL_MUST_USE_RESULT bool WebRtcIlbcfix_DecodeResidual( IlbcDecoder* iLBCdec_inst, /* (i/o) the decoder state structure */ iLBC_bits* iLBC_encbits, /* (i/o) Encoded bits, which are used @@ -38,6 +40,6 @@ bool WebRtcIlbcfix_DecodeResidual( int16_t* decresidual, /* (o) decoded residual frame */ int16_t* syntdenum /* (i) the decoded synthesis filter coefficients */ - ) RTC_WARN_UNUSED_RESULT; +); #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c index 8413a7376..d96bb9b2e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c @@ -16,10 +16,12 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h" +#include "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h" + #include "modules/audio_coding/codecs/ilbc/bw_expand.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h" /*----------------------------------------------------------------* * obtain synthesis and weighting filters form lsf coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h index 48d43ec2f..8b0811446 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/defines.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/defines.h index 43948a20c..64135c488 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/defines.h @@ -18,6 +18,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_ +#include #include #include "common_audio/signal_processing/include/signal_processing_library.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c index 26ec03f80..9ca6ca48e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c @@ -16,10 +16,12 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/comp_corr.h" +#include "modules/audio_coding/codecs/ilbc/do_plc.h" + #include "modules/audio_coding/codecs/ilbc/bw_expand.h" +#include "modules/audio_coding/codecs/ilbc/comp_corr.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Packet loss concealment routine. Conceals a residual signal diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h index 2fbae1de8..c19c4eca3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.c index 912e23ca8..8e536221c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.c @@ -16,28 +16,29 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/encode.h" + #include -// Defines WEBRTC_ARCH_BIG_ENDIAN, used below. -#include "rtc_base/system/arch.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/lpc_encode.h" -#include "modules/audio_coding/codecs/ilbc/frame_classify.h" -#include "modules/audio_coding/codecs/ilbc/state_search.h" -#include "modules/audio_coding/codecs/ilbc/state_construct.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/cb_search.h" #include "modules/audio_coding/codecs/ilbc/cb_construct.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_enc.h" -#include "modules/audio_coding/codecs/ilbc/pack_bits.h" +#include "modules/audio_coding/codecs/ilbc/cb_search.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/frame_classify.h" #include "modules/audio_coding/codecs/ilbc/hp_input.h" +#include "modules/audio_coding/codecs/ilbc/index_conv_enc.h" +#include "modules/audio_coding/codecs/ilbc/lpc_encode.h" +#include "modules/audio_coding/codecs/ilbc/pack_bits.h" +#include "modules/audio_coding/codecs/ilbc/state_construct.h" +#include "modules/audio_coding/codecs/ilbc/state_search.h" #include "rtc_base/checks.h" +#include "rtc_base/system/arch.h" #ifdef SPLIT_10MS #include "modules/audio_coding/codecs/ilbc/unpack_bits.h" #include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" #endif + #ifndef WEBRTC_ARCH_BIG_ENDIAN #include "modules/audio_coding/codecs/ilbc/swap_bytes.h" #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.h index db00e2c46..bc3e187d9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/encode.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h index 359a9e201..15391cf23 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENERGY_INVERSE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENERGY_INVERSE_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /* Inverses the in vector in into Q29 domain */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c index 0a5f044db..cd3d0a4db 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/enh_upsample.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * upsample finite array assuming zeros outside bounds diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c index d5cd977ea..bd4e60015 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/enhancer.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/get_sync_seq.h" #include "modules/audio_coding/codecs/ilbc/smooth.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h index 1a6131b91..386949347 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * perform enhancement on idata+centerStartPos through diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c index f85df6d7c..ca23e19ae 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c @@ -16,13 +16,16 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" + +#include #include -#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/enhancer.h" #include "modules/audio_coding/codecs/ilbc/hp_output.h" +#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" @@ -201,11 +204,13 @@ size_t // (o) Estimated lag in end of in[] regressor=in+tlag-1; /* scaling */ - max16 = WebRtcSpl_MaxAbsValueW16(regressor, plc_blockl + 3 - 1); - if (max16>5000) - shifts=2; - else - shifts=0; + // Note that this is not abs-max, so we will take the absolute value below. + max16 = WebRtcSpl_MaxAbsElementW16(regressor, plc_blockl + 3 - 1); + const int16_t max_target = + WebRtcSpl_MaxAbsElementW16(target, plc_blockl + 3 - 1); + const int64_t max_val = plc_blockl * abs(max16 * max_target); + const int32_t factor = max_val >> 31; + shifts = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor); /* compute cross correlation */ WebRtcSpl_CrossCorrelation(corr32, target, regressor, plc_blockl, 3, shifts, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h index de4571514..5022a47c3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_INTERFACE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_INTERFACE_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c index 462421191..6b4f30c96 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Construct an additional codebook vector by filtering the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h index c51ac39a4..661262e42 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Construct an additional codebook vector by filtering the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c index 6edf92165..c1084b164 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/frame_classify.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Classification of subframes to localize start state diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h index 43c6e57a5..7615106d7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h @@ -19,6 +19,11 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_ +#include +#include + +#include "modules/audio_coding/codecs/ilbc/defines.h" + size_t WebRtcIlbcfix_FrameClassify( /* (o) Index to the max-energy sub frame */ IlbcEncoder* iLBCenc_inst, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c index cb405aebb..1357dece3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/gain_dequant.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * decoder for quantized gains in the gain-shape coding of diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h index 86cc78707..2b97550b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * decoder for quantized gains in the gain-shape coding of diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c index 2472239ad..9a6d49d51 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/gain_quant.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * quantizer for the gain in the gain-shape coding of residual diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h index 51c0bc9a5..761f7d2f7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * quantizer for the gain in the gain-shape coding of residual diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c index d3479ec0b..145cb964d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c @@ -18,9 +18,9 @@ #include "modules/audio_coding/codecs/ilbc/get_cd_vec.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/constants.h" #include "modules/audio_coding/codecs/ilbc/create_augmented_vec.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Construct codebook vector for given index. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h index 647b0634a..99537dd0f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h @@ -23,17 +23,18 @@ #include #include +#include "absl/base/attributes.h" #include "modules/audio_coding/codecs/ilbc/defines.h" -#include "rtc_base/system/unused.h" // Returns true on success, false on failure. In case of failure, the decoder // state may be corrupted and needs resetting. +ABSL_MUST_USE_RESULT bool WebRtcIlbcfix_GetCbVec( int16_t* cbvec, /* (o) Constructed codebook vector */ int16_t* mem, /* (i) Codebook buffer */ size_t index, /* (i) Codebook index */ size_t lMem, /* (i) Length of codebook buffer */ size_t cbveclen /* (i) Codebook vector length */ - ) RTC_WARN_UNUSED_RESULT; +); #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c index ecf57701a..e0fb21caf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/get_lsp_poly.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h index d469409af..70c9c4d4b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * Construct the polynomials F1(z) and F2(z) from the LSP diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c index c5b11f14d..68a569a40 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c @@ -16,10 +16,12 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/get_sync_seq.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/refiner.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/nearest_neighbor.h" +#include "modules/audio_coding/codecs/ilbc/refiner.h" /*----------------------------------------------------------------* * get the pitch-synchronous sample sequence diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h index 2281b0672..90962fa06 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * get the pitch-synchronous sample sequence diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c index dd6e20b8a..be582f2e2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/hp_input.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h index 682441ac2..9143d8efe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include // clang-format off // Bad job here. https://bugs.llvm.org/show_bug.cgi?id=34274 diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c index 0628e5874..cc5f6dcd3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/hp_output.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h index 54d976309..6d1bd3cd8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include // clang-format off // Bad job here. https://bugs.llvm.org/show_bug.cgi?id=34274 diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c index 8ebe51308..ba6c3e46c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c @@ -16,14 +16,15 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/ilbc.h" + #include -#include "modules/audio_coding/codecs/ilbc/ilbc.h" +#include "modules/audio_coding/codecs/ilbc/decode.h" #include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/init_encode.h" #include "modules/audio_coding/codecs/ilbc/encode.h" #include "modules/audio_coding/codecs/ilbc/init_decode.h" -#include "modules/audio_coding/codecs/ilbc/decode.h" +#include "modules/audio_coding/codecs/ilbc/init_encode.h" #include "rtc_base/checks.h" int16_t WebRtcIlbcfix_EncoderAssign(IlbcEncoderInstance** iLBC_encinst, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c index ad12cee39..d78f81a89 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" void WebRtcIlbcfix_IndexConvDec( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c index 6cf164e7e..83144150b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c @@ -16,7 +16,10 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/index_conv_enc.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" + /*----------------------------------------------------------------* * Convert the codebook indexes to make the search easier *---------------------------------------------------------------*/ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h index f89949949..4fbf98084 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * Convert the codebook indexes to make the search easier diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c index c63cc7ce8..3eb41e33b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/init_decode.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Initiation of decoder instance. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h index fdcf9f0b1..a2b7b9128 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h @@ -19,6 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_ +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c index b21f77efa..aa858e94b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/init_encode.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * Initiation of encoder instance. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h index f91a9b0ad..4ada6a30c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h @@ -19,6 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_ +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c index 3ce480eaa..17ed244bd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/interpolate.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * interpolation between vectors diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h index 9f0323649..892082b75 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * interpolation between vectors diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c index 9ca38a4a3..6dddd6fb8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/interpolate_samples.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" void WebRtcIlbcfix_InterpolateSamples( int16_t *interpSamples, /* (o) The interpolated samples */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h index 264a101aa..bc665d785 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Construct the interpolated samples for the Augmented CB diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c index 9b2a0c0d4..89f6d2972 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c @@ -16,12 +16,14 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h" -#include "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h" -#include "modules/audio_coding/codecs/ilbc/simple_lsf_quant.h" -#include "modules/audio_coding/codecs/ilbc/lsf_check.h" +#include "modules/audio_coding/codecs/ilbc/lpc_encode.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/lsf_check.h" +#include "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h" +#include "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h" +#include "modules/audio_coding/codecs/ilbc/simple_lsf_quant.h" /*----------------------------------------------------------------* * lpc encoder diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h index 256fa492b..a67b77acb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c index 684b2ce8c..9f0e19a2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/lsf_check.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * check for stability of lsf coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h index d367c1d09..9ba90a31e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * check for stability of lsf coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c index e501f3cb6..04de5e7e6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c @@ -16,9 +16,11 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h" + +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/interpolate.h" #include "modules/audio_coding/codecs/ilbc/lsf_to_poly.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * interpolation of lsf coefficients for the decoder diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h index 016897a14..6cc9d9746 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * interpolation of lsf coefficients for the decoder diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c index a660c4f2f..618821216 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/interpolate.h" #include "modules/audio_coding/codecs/ilbc/lsf_to_poly.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h index 9cb0dd9f4..b278a10f4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * lsf interpolator and conversion from lsf to a coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c index 8767e2dec..ee8292f39 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/lsf_to_lsp.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * conversion from lsf to lsp coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h index 921101ac9..6bc6c44db 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * conversion from lsf to lsp coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c index 4dbf96dc0..8ca91d82f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c @@ -16,10 +16,12 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/lsf_to_lsp.h" -#include "modules/audio_coding/codecs/ilbc/get_lsp_poly.h" +#include "modules/audio_coding/codecs/ilbc/lsf_to_poly.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/get_lsp_poly.h" +#include "modules/audio_coding/codecs/ilbc/lsf_to_lsp.h" void WebRtcIlbcfix_Lsf2Poly( int16_t *a, /* (o) predictor coefficients (order = 10) in Q12 */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h index e551836f5..f26d3a8d2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * Convert from LSF coefficients to A coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c index db11cfed2..227f4d45b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/lsp_to_lsf.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * conversion from LSP coefficients to LSF coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h index 358786e75..c2f4b7692 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * conversion from LSP coefficients to LSF coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c index 2f2a05887..9b870e0ef 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/my_corr.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h index 21deea523..c0c2fa4a4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_MY_CORR_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_MY_CORR_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * compute cross correlation between sequences diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c index 9d78528ba..1ecdd96d5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c @@ -16,7 +16,7 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/nearest_neighbor.h" void WebRtcIlbcfix_NearestNeighbor(size_t* index, const size_t* array, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h index 68b5c59b4..704cf2a37 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Find index in array such that the array element with said diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c index 507e25e3e..dd44eb8fb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/pack_bits.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c index 92aa16538..7192eaab4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c @@ -16,10 +16,11 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/poly_to_lsf.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/poly_to_lsp.h" #include "modules/audio_coding/codecs/ilbc/lsp_to_lsf.h" +#include "modules/audio_coding/codecs/ilbc/poly_to_lsp.h" void WebRtcIlbcfix_Poly2Lsf( int16_t *lsf, /* (o) lsf coefficients (Q13) */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h index 8a68d07d6..363e392bb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * conversion from lpc coefficients to lsf coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c index 88df6396e..ad0ecd70a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c @@ -16,9 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/poly_to_lsp.h" + #include "modules/audio_coding/codecs/ilbc/chebyshev.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" /*----------------------------------------------------------------* * conversion from lpc coefficients to lsp coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h index 76378f293..928ee4efd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * conversion from lpc coefficients to lsp coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.c index d8a9bfb57..5bdab7a4b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.c @@ -16,7 +16,8 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/refiner.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" #include "modules/audio_coding/codecs/ilbc/enh_upsample.h" #include "modules/audio_coding/codecs/ilbc/my_corr.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.h index 87d0de781..564c9d96e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/refiner.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * find segment starting near idata+estSegPos that has highest diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c index df41b281b..7343530a5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c @@ -16,10 +16,12 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h" +#include "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h" + #include "modules/audio_coding/codecs/ilbc/bw_expand.h" #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h" /*----------------------------------------------------------------* * lsf interpolator (subrutine to LPCencode) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h index 317f613d6..ee53e4bd0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h @@ -19,6 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_ +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c index 09e64ac6d..fdc4553d9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c @@ -16,11 +16,13 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/window32_w32.h" +#include "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h" + #include "modules/audio_coding/codecs/ilbc/bw_expand.h" -#include "modules/audio_coding/codecs/ilbc/poly_to_lsf.h" #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/poly_to_lsf.h" +#include "modules/audio_coding/codecs/ilbc/window32_w32.h" /*----------------------------------------------------------------* * lpc analysis (subrutine to LPCencode) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h index 3b0548d05..b5c839ba2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h @@ -19,6 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_ +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c index 90673a2a8..e7494ceb5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * obtain dequantized lsf coefficients from quantization index diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h index ee1848644..6d97d3df3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * obtain dequantized lsf coefficients from quantization index diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c index 45373a943..1291d1442 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c @@ -16,9 +16,11 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/simple_lsf_quant.h" + +#include "modules/audio_coding/codecs/ilbc/constants.h" #include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/split_vq.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" /*----------------------------------------------------------------* * lsf quantizer (subrutine to LPCencode) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h index 74fb0be59..66b553213 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * lsf quantizer (subrutine to LPCencode) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.c index edafb0c93..631b2f432 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/smooth.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/smooth_out_data.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.h index 52e7ff97b..c8752be64 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * find the smoothed output data diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c index 72b3a4779..9f952bfb9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/smooth_out_data.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "rtc_base/sanitizer.h" // An s32 + s32 -> s32 addition that's allowed to overflow. (It's still diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h index df946e3a7..318e7b04a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * help function to WebRtcIlbcfix_Smooth() diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c index dd3ca8057..c3a24750f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/sort_sq.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h index 1fe7fbf23..02028dae9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * scalar quantization diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c index 2f218ed59..c1f04d228 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/split_vq.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" #include "modules/audio_coding/codecs/ilbc/vq3.h" #include "modules/audio_coding/codecs/ilbc/vq4.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h index 6bc2db67e..e4b02a2bc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h @@ -19,7 +19,7 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include /*----------------------------------------------------------------* * split vector quantization diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c index 753415b9c..c58086c03 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c @@ -16,8 +16,10 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/state_construct.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * decoding of the start state diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h index 0dadf4886..4c3011937 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_CONSTRUCT_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_CONSTRUCT_H_ +#include +#include + /*----------------------------------------------------------------* * Generate the start state from the quantized indexes *---------------------------------------------------------------*/ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.c index 5e8a2f55a..7227ac9d4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.c @@ -16,9 +16,11 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/state_search.h" + #include "modules/audio_coding/codecs/ilbc/abs_quant.h" +#include "modules/audio_coding/codecs/ilbc/constants.h" +#include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* * encoding of start state diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.h index 1ad27cec0..6469138a0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/state_search.h @@ -19,6 +19,9 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_ +#include +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c index 806cc2a39..bbafc1a2e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c @@ -16,7 +16,7 @@ ******************************************************************/ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include "modules/audio_coding/codecs/ilbc/swap_bytes.h" /*----------------------------------------------------------------* * Swap bytes (to simplify operations on Little Endian machines) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h index 381b73ae4..c59bf3068 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * Swap bytes (to simplify operations on Little Endian machines) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c index ad6a7ee1d..a9a0147b9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/unpack_bits.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h index 4fd0a80b4..1a63280e6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h @@ -19,6 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_ +#include + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq3.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq3.c index b63a7a847..d9375fb99 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq3.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq3.c @@ -17,6 +17,7 @@ ******************************************************************/ #include "modules/audio_coding/codecs/ilbc/vq3.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq4.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq4.c index 2522ac28e..c9a65aec2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq4.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/vq4.c @@ -17,6 +17,7 @@ ******************************************************************/ #include "modules/audio_coding/codecs/ilbc/vq4.h" + #include "modules/audio_coding/codecs/ilbc/constants.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c index fe19de685..e82d16722 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/window32_w32.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h index 0cef0848f..15d72c5ba 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * window multiplication diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c index f6375dfbf..9dc880b37 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c @@ -16,6 +16,8 @@ ******************************************************************/ +#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" + #include "modules/audio_coding/codecs/ilbc/defines.h" /*----------------------------------------------------------------* diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h index e6c3d3f60..3be5a296b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h @@ -19,7 +19,8 @@ #ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_ #define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_ -#include "modules/audio_coding/codecs/ilbc/defines.h" +#include +#include /*----------------------------------------------------------------* * cross correlation which finds the optimal lag for the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h index 0bde3f797..fa8451520 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h @@ -140,6 +140,11 @@ AudioEncoder::EncodedInfo AudioEncoderIsacT::EncodeImpl( kSufficientEncodeBufferSizeBytes, [&](rtc::ArrayView encoded) { int r = T::Encode(isac_state_, audio.data(), encoded.data()); + if (T::GetErrorCode(isac_state_) == 6450) { + // Isac is not able to effectively compress all types of signals. This + // is a limitation of the codec that cannot be easily fixed. + r = 0; + } RTC_CHECK_GE(r, 0) << "Encode failed (error code " << T::GetErrorCode(isac_state_) << ")"; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc index 203cb5aeb..7c62e98c5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc @@ -704,6 +704,11 @@ bool AudioEncoderOpusImpl::RecreateEncoderInstance( } void AudioEncoderOpusImpl::SetFrameLength(int frame_length_ms) { + if (next_frame_length_ms_ != frame_length_ms) { + RTC_LOG(LS_VERBOSE) << "Update Opus frame length " + << "from " << next_frame_length_ms_ << " ms " + << "to " << frame_length_ms << " ms."; + } next_frame_length_ms_ = frame_length_ms; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_inst.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_inst.h index 148baa280..2c25e43f2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_inst.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_inst.h @@ -25,6 +25,9 @@ struct WebRtcOpusEncInst { OpusMSEncoder* multistream_encoder; size_t channels; int in_dtx_mode; + bool avoid_noise_pumping_during_dtx; + int sample_rate_hz; + float smooth_energy_non_active_frames; }; struct WebRtcOpusDecInst { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc index ca39ed823..f684452ad 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.cc @@ -12,6 +12,9 @@ #include +#include + +#include "api/array_view.h" #include "rtc_base/checks.h" #include "system_wrappers/include/field_trial.h" @@ -36,6 +39,9 @@ enum { constexpr char kPlcUsePrevDecodedSamplesFieldTrial[] = "WebRTC-Audio-OpusPlcUsePrevDecodedSamples"; +constexpr char kAvoidNoisePumpingDuringDtxFieldTrial[] = + "WebRTC-Audio-OpusAvoidNoisePumpingDuringDtx"; + static int FrameSizePerChannel(int frame_size_ms, int sample_rate_hz) { RTC_DCHECK_GT(frame_size_ms, 0); RTC_DCHECK_EQ(frame_size_ms % 10, 0); @@ -54,6 +60,46 @@ static int DefaultFrameSizePerChannel(int sample_rate_hz) { return FrameSizePerChannel(20, sample_rate_hz); } +// Returns true if the `encoded` payload corresponds to a refresh DTX packet +// whose energy is larger than the expected for non activity packets. +static bool WebRtcOpus_IsHighEnergyRefreshDtxPacket( + OpusEncInst* inst, + rtc::ArrayView frame, + rtc::ArrayView encoded) { + if (encoded.size() <= 2) { + return false; + } + int number_frames = + frame.size() / DefaultFrameSizePerChannel(inst->sample_rate_hz); + if (number_frames > 0 && + WebRtcOpus_PacketHasVoiceActivity(encoded.data(), encoded.size()) == 0) { + const float average_frame_energy = + std::accumulate(frame.begin(), frame.end(), 0.0f, + [](float a, int32_t b) { return a + b * b; }) / + number_frames; + if (WebRtcOpus_GetInDtx(inst) == 1 && + average_frame_energy >= inst->smooth_energy_non_active_frames * 0.5f) { + // This is a refresh DTX packet as the encoder is in DTX and has + // produced a payload > 2 bytes. This refresh packet has a higher energy + // than the smooth energy of non activity frames (with a 3 dB negative + // margin) and, therefore, it is flagged as a high energy refresh DTX + // packet. + return true; + } + // The average energy is tracked in a similar way as the modeling of the + // comfort noise in the Silk decoder in Opus + // (third_party/opus/src/silk/CNG.c). + if (average_frame_energy < inst->smooth_energy_non_active_frames * 0.5f) { + inst->smooth_energy_non_active_frames = average_frame_energy; + } else { + inst->smooth_energy_non_active_frames += + (average_frame_energy - inst->smooth_energy_non_active_frames) * + 0.25f; + } + } + return false; +} + int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst, size_t channels, int32_t application, @@ -88,6 +134,10 @@ int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst, state->in_dtx_mode = 0; state->channels = channels; + state->sample_rate_hz = sample_rate_hz; + state->smooth_energy_non_active_frames = 0.0f; + state->avoid_noise_pumping_during_dtx = + webrtc::field_trial::IsEnabled(kAvoidNoisePumpingDuringDtxFieldTrial); *inst = state; return 0; @@ -120,9 +170,10 @@ int16_t WebRtcOpus_MultistreamEncoderCreate( RTC_DCHECK(state); int error; - state->multistream_encoder = - opus_multistream_encoder_create(48000, channels, streams, coupled_streams, - channel_mapping, opus_app, &error); + const int sample_rate_hz = 48000; + state->multistream_encoder = opus_multistream_encoder_create( + sample_rate_hz, channels, streams, coupled_streams, channel_mapping, + opus_app, &error); if (error != OPUS_OK || (!state->encoder && !state->multistream_encoder)) { WebRtcOpus_EncoderFree(state); @@ -131,6 +182,9 @@ int16_t WebRtcOpus_MultistreamEncoderCreate( state->in_dtx_mode = 0; state->channels = channels; + state->sample_rate_hz = sample_rate_hz; + state->smooth_energy_non_active_frames = 0.0f; + state->avoid_noise_pumping_during_dtx = false; *inst = state; return 0; @@ -188,6 +242,21 @@ int WebRtcOpus_Encode(OpusEncInst* inst, } } + if (inst->avoid_noise_pumping_during_dtx && WebRtcOpus_GetUseDtx(inst) == 1 && + WebRtcOpus_IsHighEnergyRefreshDtxPacket( + inst, rtc::MakeArrayView(audio_in, samples), + rtc::MakeArrayView(encoded, res))) { + // This packet is a high energy refresh DTX packet. For avoiding an increase + // of the energy in the DTX region at the decoder, this packet is + // substituted by a TOC byte with one empty frame. + // The number of frames described in the TOC byte + // (https://tools.ietf.org/html/rfc6716#section-3.1) are overwritten to + // always indicate one frame (last two bits equal to 0). + encoded[0] = encoded[0] & 0b11111100; + inst->in_dtx_mode = 1; + // The payload is just the TOC byte and has 1 byte as length. + return 1; + } inst->in_dtx_mode = 0; return res; } @@ -316,6 +385,16 @@ int16_t WebRtcOpus_DisableDtx(OpusEncInst* inst) { } } +int16_t WebRtcOpus_GetUseDtx(OpusEncInst* inst) { + if (inst) { + opus_int32 use_dtx; + if (ENCODER_CTL(inst, OPUS_GET_DTX(&use_dtx)) == 0) { + return use_dtx; + } + } + return -1; +} + int16_t WebRtcOpus_EnableCbr(OpusEncInst* inst) { if (inst) { return ENCODER_CTL(inst, OPUS_SET_VBR(0)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.h index 2a3ceaa7d..89159ce1c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_interface.h @@ -231,6 +231,20 @@ int16_t WebRtcOpus_EnableDtx(OpusEncInst* inst); */ int16_t WebRtcOpus_DisableDtx(OpusEncInst* inst); +/**************************************************************************** + * WebRtcOpus_GetUseDtx() + * + * This function gets the DTX configuration used for encoding. + * + * Input: + * - inst : Encoder context + * + * Return value : 0 - Encoder does not use DTX. + * 1 - Encoder uses DTX. + * -1 - Error. + */ +int16_t WebRtcOpus_GetUseDtx(OpusEncInst* inst); + /**************************************************************************** * WebRtcOpus_EnableCbr() * diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc index 1432e3182..c72768e93 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc @@ -17,22 +17,51 @@ #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { -// RED packets must be less than 1024 bytes to fit the 10 bit block length. -static constexpr const int kRedMaxPacketSize = 1 << 10; -// The typical MTU is 1200 bytes. -static constexpr const size_t kAudioMaxRtpPacketLen = 1200; +static constexpr const int kRedMaxPacketSize = + 1 << 10; // RED packets must be less than 1024 bytes to fit the 10 bit + // block length. +static constexpr const size_t kAudioMaxRtpPacketLen = + 1200; // The typical MTU is 1200 bytes. + +static constexpr size_t kRedHeaderLength = 4; // 4 bytes RED header. +static constexpr size_t kRedLastHeaderLength = + 1; // reduced size for last RED header. + +static constexpr size_t kRedNumberOfRedundantEncodings = + 2; // The level of redundancy we support. AudioEncoderCopyRed::Config::Config() = default; AudioEncoderCopyRed::Config::Config(Config&&) = default; AudioEncoderCopyRed::Config::~Config() = default; +size_t GetMaxRedundancyFromFieldTrial() { + const std::string red_trial = + webrtc::field_trial::FindFullName("WebRTC-Audio-Red-For-Opus"); + size_t redundancy = 0; + if (sscanf(red_trial.c_str(), "Enabled-%zu", &redundancy) != 1 || + redundancy < 1 || redundancy > 9) { + return kRedNumberOfRedundantEncodings; + } + return redundancy; +} + AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config) : speech_encoder_(std::move(config.speech_encoder)), + primary_encoded_(0, kAudioMaxRtpPacketLen), max_packet_length_(kAudioMaxRtpPacketLen), red_payload_type_(config.payload_type) { RTC_CHECK(speech_encoder_) << "Speech encoder not provided."; + + auto number_of_redundant_encodings = GetMaxRedundancyFromFieldTrial(); + for (size_t i = 0; i < number_of_redundant_encodings; i++) { + std::pair redundant; + redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen); + redundant_encodings_.push_front(std::move(redundant)); + } } AudioEncoderCopyRed::~AudioEncoderCopyRed() = default; @@ -61,104 +90,86 @@ int AudioEncoderCopyRed::GetTargetBitrate() const { return speech_encoder_->GetTargetBitrate(); } -size_t AudioEncoderCopyRed::CalculateHeaderLength(size_t encoded_bytes) const { - size_t header_size = 1; - size_t bytes_available = max_packet_length_ - encoded_bytes; - if (secondary_info_.encoded_bytes > 0 && - secondary_info_.encoded_bytes < bytes_available) { - header_size += 4; - bytes_available -= secondary_info_.encoded_bytes; - } - if (tertiary_info_.encoded_bytes > 0 && - tertiary_info_.encoded_bytes < bytes_available) { - header_size += 4; - } - return header_size > 1 ? header_size : 0; -} - AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl( uint32_t rtp_timestamp, rtc::ArrayView audio, rtc::Buffer* encoded) { - rtc::Buffer primary_encoded; + primary_encoded_.Clear(); EncodedInfo info = - speech_encoder_->Encode(rtp_timestamp, audio, &primary_encoded); + speech_encoder_->Encode(rtp_timestamp, audio, &primary_encoded_); RTC_CHECK(info.redundant.empty()) << "Cannot use nested redundant encoders."; - RTC_DCHECK_EQ(primary_encoded.size(), info.encoded_bytes); + RTC_DCHECK_EQ(primary_encoded_.size(), info.encoded_bytes); if (info.encoded_bytes == 0 || info.encoded_bytes > kRedMaxPacketSize) { return info; } RTC_DCHECK_GT(max_packet_length_, info.encoded_bytes); + size_t header_length_bytes = kRedLastHeaderLength; + size_t bytes_available = max_packet_length_ - info.encoded_bytes; + auto it = redundant_encodings_.begin(); + + // Determine how much redundancy we can fit into our packet by + // iterating forward. + for (; it != redundant_encodings_.end(); it++) { + if (bytes_available < kRedHeaderLength + it->first.encoded_bytes) { + break; + } + if (it->first.encoded_bytes == 0) { + break; + } + bytes_available -= kRedHeaderLength + it->first.encoded_bytes; + header_length_bytes += kRedHeaderLength; + } + // Allocate room for RFC 2198 header if there is redundant data. // Otherwise this will send the primary payload type without // wrapping in RED. - const size_t header_length_bytes = CalculateHeaderLength(info.encoded_bytes); + if (header_length_bytes == kRedLastHeaderLength) { + header_length_bytes = 0; + } encoded->SetSize(header_length_bytes); + // Iterate backwards and append the data. size_t header_offset = 0; - size_t bytes_available = max_packet_length_ - info.encoded_bytes; - if (tertiary_info_.encoded_bytes > 0 && - tertiary_info_.encoded_bytes + secondary_info_.encoded_bytes < - bytes_available) { - encoded->AppendData(tertiary_encoded_); + while (it-- != redundant_encodings_.begin()) { + encoded->AppendData(it->second); const uint32_t timestamp_delta = - info.encoded_timestamp - tertiary_info_.encoded_timestamp; - - encoded->data()[header_offset] = tertiary_info_.payload_type | 0x80; + info.encoded_timestamp - it->first.encoded_timestamp; + encoded->data()[header_offset] = it->first.payload_type | 0x80; rtc::SetBE16(static_cast(encoded->data()) + header_offset + 1, - (timestamp_delta << 2) | (tertiary_info_.encoded_bytes >> 8)); - encoded->data()[header_offset + 3] = tertiary_info_.encoded_bytes & 0xff; - header_offset += 4; - bytes_available -= tertiary_info_.encoded_bytes; - } - - if (secondary_info_.encoded_bytes > 0 && - secondary_info_.encoded_bytes < bytes_available) { - encoded->AppendData(secondary_encoded_); - - const uint32_t timestamp_delta = - info.encoded_timestamp - secondary_info_.encoded_timestamp; - - encoded->data()[header_offset] = secondary_info_.payload_type | 0x80; - rtc::SetBE16(static_cast(encoded->data()) + header_offset + 1, - (timestamp_delta << 2) | (secondary_info_.encoded_bytes >> 8)); - encoded->data()[header_offset + 3] = secondary_info_.encoded_bytes & 0xff; - header_offset += 4; - bytes_available -= secondary_info_.encoded_bytes; - } - - encoded->AppendData(primary_encoded); - if (header_length_bytes > 0) { - RTC_DCHECK_EQ(header_offset, header_length_bytes - 1); - encoded->data()[header_offset] = info.payload_type; + (timestamp_delta << 2) | (it->first.encoded_bytes >> 8)); + encoded->data()[header_offset + 3] = it->first.encoded_bytes & 0xff; + header_offset += kRedHeaderLength; + info.redundant.push_back(it->first); } // |info| will be implicitly cast to an EncodedInfoLeaf struct, effectively // discarding the (empty) vector of redundant information. This is // intentional. - info.redundant.push_back(info); - RTC_DCHECK_EQ(info.redundant.size(), 1); - RTC_DCHECK_EQ(info.speech, info.redundant[0].speech); - if (secondary_info_.encoded_bytes > 0) { - info.redundant.push_back(secondary_info_); - RTC_DCHECK_EQ(info.redundant.size(), 2); - } - if (tertiary_info_.encoded_bytes > 0) { - info.redundant.push_back(tertiary_info_); - RTC_DCHECK_EQ(info.redundant.size(), - 2 + (secondary_info_.encoded_bytes > 0 ? 1 : 0)); + if (header_length_bytes > 0) { + info.redundant.push_back(info); + RTC_DCHECK_EQ(info.speech, + info.redundant[info.redundant.size() - 1].speech); } - // Save secondary to tertiary. - tertiary_encoded_.SetData(secondary_encoded_); - tertiary_info_ = secondary_info_; + encoded->AppendData(primary_encoded_); + if (header_length_bytes > 0) { + RTC_DCHECK_EQ(header_offset, header_length_bytes - 1); + encoded->data()[header_offset] = info.payload_type; + } - // Save primary to secondary. - secondary_encoded_.SetData(primary_encoded); - secondary_info_ = info; + // Shift the redundant encodings. + it = redundant_encodings_.begin(); + for (auto next = std::next(it); next != redundant_encodings_.end(); + it++, next = std::next(it)) { + next->first = it->first; + next->second.SetData(it->second); + } + it = redundant_encodings_.begin(); + it->first = info; + it->second.SetData(primary_encoded_); // Update main EncodedInfo. if (header_length_bytes > 0) { @@ -170,8 +181,13 @@ AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl( void AudioEncoderCopyRed::Reset() { speech_encoder_->Reset(); - secondary_encoded_.Clear(); - secondary_info_.encoded_bytes = 0; + auto number_of_redundant_encodings = redundant_encodings_.size(); + redundant_encodings_.clear(); + for (size_t i = 0; i < number_of_redundant_encodings; i++) { + std::pair redundant; + redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen); + redundant_encodings_.push_front(std::move(redundant)); + } } bool AudioEncoderCopyRed::SetFec(bool enable) { @@ -182,6 +198,10 @@ bool AudioEncoderCopyRed::SetDtx(bool enable) { return speech_encoder_->SetDtx(enable); } +bool AudioEncoderCopyRed::GetDtx() const { + return speech_encoder_->GetDtx(); +} + bool AudioEncoderCopyRed::SetApplication(Application application) { return speech_encoder_->SetApplication(application); } @@ -190,9 +210,14 @@ void AudioEncoderCopyRed::SetMaxPlaybackRate(int frequency_hz) { speech_encoder_->SetMaxPlaybackRate(frequency_hz); } -rtc::ArrayView> -AudioEncoderCopyRed::ReclaimContainedEncoders() { - return rtc::ArrayView>(&speech_encoder_, 1); +bool AudioEncoderCopyRed::EnableAudioNetworkAdaptor( + const std::string& config_string, + RtcEventLog* event_log) { + return speech_encoder_->EnableAudioNetworkAdaptor(config_string, event_log); +} + +void AudioEncoderCopyRed::DisableAudioNetworkAdaptor() { + speech_encoder_->DisableAudioNetworkAdaptor(); } void AudioEncoderCopyRed::OnReceivedUplinkPacketLossFraction( @@ -208,14 +233,38 @@ void AudioEncoderCopyRed::OnReceivedUplinkBandwidth( bwe_period_ms); } +void AudioEncoderCopyRed::OnReceivedUplinkAllocation( + BitrateAllocationUpdate update) { + speech_encoder_->OnReceivedUplinkAllocation(update); +} + absl::optional> AudioEncoderCopyRed::GetFrameLengthRange() const { return speech_encoder_->GetFrameLengthRange(); } +void AudioEncoderCopyRed::OnReceivedRtt(int rtt_ms) { + speech_encoder_->OnReceivedRtt(rtt_ms); +} + void AudioEncoderCopyRed::OnReceivedOverhead(size_t overhead_bytes_per_packet) { max_packet_length_ = kAudioMaxRtpPacketLen - overhead_bytes_per_packet; return speech_encoder_->OnReceivedOverhead(overhead_bytes_per_packet); } +void AudioEncoderCopyRed::SetReceiverFrameLengthRange(int min_frame_length_ms, + int max_frame_length_ms) { + return speech_encoder_->SetReceiverFrameLengthRange(min_frame_length_ms, + max_frame_length_ms); +} + +ANAStats AudioEncoderCopyRed::GetANAStats() const { + return speech_encoder_->GetANAStats(); +} + +rtc::ArrayView> +AudioEncoderCopyRed::ReclaimContainedEncoders() { + return rtc::ArrayView>(&speech_encoder_, 1); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h index 9806772ba..d5b1bf686 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h @@ -14,6 +14,7 @@ #include #include +#include #include #include @@ -26,10 +27,12 @@ namespace webrtc { -// This class implements redundant audio coding. The class object will have an -// underlying AudioEncoder object that performs the actual encodings. The -// current class will gather the two latest encodings from the underlying codec -// into one packet. +// This class implements redundant audio coding as described in +// https://tools.ietf.org/html/rfc2198 +// The class object will have an underlying AudioEncoder object that performs +// the actual encodings. The current class will gather the N latest encodings +// from the underlying codec into one packet. Currently N is hard-coded to 2. + class AudioEncoderCopyRed final : public AudioEncoder { public: struct Config { @@ -50,21 +53,33 @@ class AudioEncoderCopyRed final : public AudioEncoder { size_t Num10MsFramesInNextPacket() const override; size_t Max10MsFramesInAPacket() const override; int GetTargetBitrate() const override; + void Reset() override; bool SetFec(bool enable) override; + bool SetDtx(bool enable) override; + bool GetDtx() const override; + bool SetApplication(Application application) override; void SetMaxPlaybackRate(int frequency_hz) override; - rtc::ArrayView> ReclaimContainedEncoders() - override; + bool EnableAudioNetworkAdaptor(const std::string& config_string, + RtcEventLog* event_log) override; + void DisableAudioNetworkAdaptor() override; void OnReceivedUplinkPacketLossFraction( float uplink_packet_loss_fraction) override; void OnReceivedUplinkBandwidth( int target_audio_bitrate_bps, absl::optional bwe_period_ms) override; + void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override; + void OnReceivedRtt(int rtt_ms) override; void OnReceivedOverhead(size_t overhead_bytes_per_packet) override; + void SetReceiverFrameLengthRange(int min_frame_length_ms, + int max_frame_length_ms) override; + ANAStats GetANAStats() const override; absl::optional> GetFrameLengthRange() const override; + rtc::ArrayView> ReclaimContainedEncoders() + override; protected: EncodedInfo EncodeImpl(uint32_t rtp_timestamp, @@ -72,15 +87,11 @@ class AudioEncoderCopyRed final : public AudioEncoder { rtc::Buffer* encoded) override; private: - size_t CalculateHeaderLength(size_t encoded_bytes) const; - std::unique_ptr speech_encoder_; + rtc::Buffer primary_encoded_; size_t max_packet_length_; int red_payload_type_; - rtc::Buffer secondary_encoded_; - EncodedInfoLeaf secondary_info_; - rtc::Buffer tertiary_encoded_; - EncodedInfoLeaf tertiary_info_; + std::list> redundant_encodings_; RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCopyRed); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h index 07aa8c956..a7210dadc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h @@ -13,8 +13,6 @@ #include -#include "rtc_base/deprecation.h" - namespace webrtc { /////////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc index 7ad006545..8901c01f7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc @@ -35,14 +35,17 @@ void BufferLevelFilter::Update(size_t buffer_size_samples, // |level_factor_| and |filtered_current_level_| are in Q8. // |buffer_size_samples| is in Q0. const int64_t filtered_current_level = - ((level_factor_ * int64_t{filtered_current_level_}) >> 8) + - ((256 - level_factor_) * rtc::dchecked_cast(buffer_size_samples)); + (level_factor_ * int64_t{filtered_current_level_} >> 8) + + (256 - level_factor_) * rtc::dchecked_cast(buffer_size_samples); // Account for time-scale operations (accelerate and pre-emptive expand) and // make sure that the filtered value remains non-negative. filtered_current_level_ = rtc::saturated_cast(std::max( - 0, - filtered_current_level - (int64_t{time_stretched_samples} * (1 << 8)))); + 0, filtered_current_level - int64_t{time_stretched_samples} * (1 << 8))); +} + +void BufferLevelFilter::SetFilteredBufferLevel(int buffer_size_samples) { + filtered_current_level_ = buffer_size_samples * 256; } void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level_ms) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h index bb3185667..218a14264 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_ #include +#include #include "rtc_base/constructor_magic.h" @@ -28,13 +29,18 @@ class BufferLevelFilter { // bypassing the filter operation). virtual void Update(size_t buffer_size_samples, int time_stretched_samples); + // Set the filtered buffer level to a particular value directly. This should + // only be used in case of large changes in buffer size, such as buffer + // flushes. + virtual void SetFilteredBufferLevel(int buffer_size_samples); + // The target level is used to select the appropriate filter coefficient. virtual void SetTargetBufferLevel(int target_buffer_level_ms); // Returns filtered current level in number of samples. virtual int filtered_current_level() const { // Round to nearest whole sample. - return (filtered_current_level_ + (1 << 7)) >> 8; + return (int64_t{filtered_current_level_} + (1 << 7)) >> 8; } private: diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/cross_correlation.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/cross_correlation.cc index 2a03d4af2..37ed9374f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/cross_correlation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/cross_correlation.cc @@ -25,8 +25,10 @@ int CrossCorrelationWithAutoShift(const int16_t* sequence_1, size_t cross_correlation_length, int cross_correlation_step, int32_t* cross_correlation) { - // Find the maximum absolute value of sequence_1 and 2. - const int16_t max_1 = WebRtcSpl_MaxAbsValueW16(sequence_1, sequence_1_length); + // Find the element that has the maximum absolute value of sequence_1 and 2. + // Note that these values may be negative. + const int16_t max_1 = + WebRtcSpl_MaxAbsElementW16(sequence_1, sequence_1_length); const int sequence_2_shift = cross_correlation_step * (static_cast(cross_correlation_length) - 1); const int16_t* sequence_2_start = @@ -34,23 +36,13 @@ int CrossCorrelationWithAutoShift(const int16_t* sequence_1, const size_t sequence_2_length = sequence_1_length + std::abs(sequence_2_shift); const int16_t max_2 = - WebRtcSpl_MaxAbsValueW16(sequence_2_start, sequence_2_length); + WebRtcSpl_MaxAbsElementW16(sequence_2_start, sequence_2_length); // In order to avoid overflow when computing the sum we should scale the // samples so that (in_vector_length * max_1 * max_2) will not overflow. - // Expected scaling fulfills - // 1) sufficient: - // sequence_1_length * (max_1 * max_2 >> scaling) <= 0x7fffffff; - // 2) necessary: - // if (scaling > 0) - // sequence_1_length * (max_1 * max_2 >> (scaling - 1)) > 0x7fffffff; - // The following calculation fulfills 1) and almost fulfills 2). - // There are some corner cases that 2) is not satisfied, e.g., - // max_1 = 17, max_2 = 30848, sequence_1_length = 4095, in such case, - // optimal scaling is 0, while the following calculation results in 1. - const int32_t factor = - (max_1 * max_2) / (std::numeric_limits::max() / - static_cast(sequence_1_length)); + const int64_t max_value = + abs(max_1 * max_2) * static_cast(sequence_1_length); + const int32_t factor = max_value >> 31; const int scaling = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor); WebRtcSpl_CrossCorrelation(cross_correlation, sequence_1, sequence_2, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc index 9c0ee9682..cb6daf062 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.cc @@ -50,8 +50,8 @@ DecisionLogic::DecisionLogic( disallow_time_stretching_(!config.allow_time_stretching), timescale_countdown_( tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1)), - estimate_dtx_delay_("estimate_dtx_delay", false), - time_stretch_cn_("time_stretch_cn", false), + estimate_dtx_delay_("estimate_dtx_delay", true), + time_stretch_cn_("time_stretch_cn", true), target_level_window_ms_("target_level_window", kDefaultTargetLevelWindowMs, 0, @@ -211,6 +211,7 @@ absl::optional DecisionLogic::PacketArrived( int fs_hz, bool should_update_stats, const PacketArrivedInfo& info) { + buffer_flush_ = buffer_flush_ || info.buffer_flush; if (info.is_cng_or_dtmf) { last_pack_cng_or_dtmf_ = true; return absl::nullopt; @@ -238,7 +239,12 @@ void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples) { timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval); } - buffer_level_filter_->Update(buffer_size_samples, time_stretched_samples); + if (buffer_flush_) { + buffer_level_filter_->SetFilteredBufferLevel(buffer_size_samples); + buffer_flush_ = false; + } else { + buffer_level_filter_->Update(buffer_size_samples, time_stretched_samples); + } prev_time_scale_ = false; time_stretched_cn_samples_ = 0; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h index 08feba64d..8be451141 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h @@ -188,6 +188,7 @@ class DecisionLogic : public NetEqController { int num_consecutive_expands_ = 0; int time_stretched_cn_samples_ = 0; bool last_pack_cng_or_dtmf_ = true; + bool buffer_flush_ = false; FieldTrialParameter estimate_dtx_delay_; FieldTrialParameter time_stretch_cn_; FieldTrialConstrained target_level_window_ms_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc index 33eeb96f1..aec80cfa4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.cc @@ -158,7 +158,8 @@ absl::optional DelayManager::Update(uint32_t timestamp, } const int expected_iat_ms = - 1000 * static_cast(timestamp - last_timestamp_) / sample_rate_hz; + 1000ll * static_cast(timestamp - last_timestamp_) / + sample_rate_hz; const int iat_ms = packet_iat_stopwatch_->ElapsedMs(); const int iat_delay_ms = iat_ms - expected_iat_ms; int relative_delay; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.cc index f1f2cc97e..5bf239bfc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.cc @@ -50,6 +50,9 @@ size_t Merge::Process(int16_t* input, assert(fs_hz_ == 8000 || fs_hz_ == 16000 || fs_hz_ == 32000 || fs_hz_ == 48000); assert(fs_hz_ <= kMaxSampleRate); // Should not be possible. + if (input_length == 0) { + return 0; + } size_t old_length; size_t expand_period; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h index fdfdbb4d1..6d88e0921 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_neteq_controller.h @@ -48,6 +48,7 @@ class MockNetEqController : public NetEqController { bool should_update_stats, const PacketArrivedInfo& info), (override)); + MOCK_METHOD(void, NotifyMutedState, (), (override)); MOCK_METHOD(bool, PeakFound, (), (const, override)); MOCK_METHOD(int, GetFilteredBufferLevel, (), (const, override)); MOCK_METHOD(void, set_sample_memory, (int32_t value), (override)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h index e466ea6c8..48357ea46 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h @@ -22,11 +22,23 @@ class MockPacketBuffer : public PacketBuffer { : PacketBuffer(max_number_of_packets, tick_timer) {} ~MockPacketBuffer() override { Die(); } MOCK_METHOD(void, Die, ()); - MOCK_METHOD(void, Flush, (), (override)); + MOCK_METHOD(void, Flush, (StatisticsCalculator * stats), (override)); + MOCK_METHOD(void, + PartialFlush, + (int target_level_ms, + size_t sample_rate, + size_t last_decoded_length, + StatisticsCalculator* stats), + (override)); MOCK_METHOD(bool, Empty, (), (const, override)); MOCK_METHOD(int, InsertPacket, - (Packet && packet, StatisticsCalculator* stats), + (Packet && packet, + StatisticsCalculator* stats, + size_t last_decoded_length, + size_t sample_rate, + int target_level_ms, + const DecoderDatabase& decoder_database), (override)); MOCK_METHOD(int, InsertPacketList, @@ -34,7 +46,10 @@ class MockPacketBuffer : public PacketBuffer { const DecoderDatabase& decoder_database, absl::optional* current_rtp_payload_type, absl::optional* current_cng_rtp_payload_type, - StatisticsCalculator* stats), + StatisticsCalculator* stats, + size_t last_decoded_length, + size_t sample_rate, + int target_level_ms), (override)); MOCK_METHOD(int, NextTimestamp, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc index f8d5d9dc1..6ac157fc1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.cc @@ -499,7 +499,7 @@ absl::optional NetEqImpl::GetDecoderFormat( void NetEqImpl::FlushBuffers() { MutexLock lock(&mutex_); RTC_LOG(LS_VERBOSE) << "FlushBuffers"; - packet_buffer_->Flush(); + packet_buffer_->Flush(stats_.get()); assert(sync_buffer_.get()); assert(expand_.get()); sync_buffer_->Flush(); @@ -565,19 +565,19 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, return kInvalidPointer; } - int64_t receive_time_ms = clock_->TimeInMilliseconds(); + Timestamp receive_time = clock_->CurrentTime(); stats_->ReceivedPacket(); PacketList packet_list; // Insert packet in a packet list. - packet_list.push_back([&rtp_header, &payload, &receive_time_ms] { + packet_list.push_back([&rtp_header, &payload, &receive_time] { // Convert to Packet. Packet packet; packet.payload_type = rtp_header.payloadType; packet.sequence_number = rtp_header.sequenceNumber; packet.timestamp = rtp_header.timestamp; packet.payload.SetData(payload.data(), payload.size()); - packet.packet_info = RtpPacketInfo(rtp_header, receive_time_ms); + packet.packet_info = RtpPacketInfo(rtp_header, receive_time); // Waiting time will be set upon inserting the packet in the buffer. RTC_DCHECK(!packet.waiting_time); return packet; @@ -607,7 +607,7 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, // the packet has been successfully inserted into the packet buffer. // Flush the packet buffer and DTMF buffer. - packet_buffer_->Flush(); + packet_buffer_->Flush(stats_.get()); dtmf_buffer_->Flush(); // Update audio buffer timestamp. @@ -746,13 +746,23 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, } // Insert packets in buffer. + const int target_level_ms = controller_->TargetLevelMs(); const int ret = packet_buffer_->InsertPacketList( &parsed_packet_list, *decoder_database_, ¤t_rtp_payload_type_, - ¤t_cng_rtp_payload_type_, stats_.get()); + ¤t_cng_rtp_payload_type_, stats_.get(), decoder_frame_length_, + last_output_sample_rate_hz_, target_level_ms); + bool buffer_flush_occured = false; if (ret == PacketBuffer::kFlushed) { // Reset DSP timestamp etc. if packet buffer flushed. new_codec_ = true; update_sample_rate_and_channels = true; + buffer_flush_occured = true; + } else if (ret == PacketBuffer::kPartialFlush) { + // Forward sync buffer timestamp + timestamp_ = packet_buffer_->PeekNextPacket()->timestamp; + sync_buffer_->IncreaseEndTimestamp(timestamp_ - + sync_buffer_->end_timestamp()); + buffer_flush_occured = true; } else if (ret != PacketBuffer::kOK) { return kOtherError; } @@ -810,6 +820,7 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, info.main_timestamp = main_timestamp; info.main_sequence_number = main_sequence_number; info.is_dtx = is_dtx; + info.buffer_flush = buffer_flush_occured; // Only update statistics if incoming packet is not older than last played // out packet or RTX handling is enabled, and if new codec flag is not // set. @@ -1203,6 +1214,11 @@ int NetEqImpl::GetDecision(Operation* operation, } controller_->ExpandDecision(*operation); + if ((last_mode_ == Mode::kCodecPlc) && (*operation != Operation::kExpand)) { + // Getting out of the PLC expand mode, reporting interruptions. + // NetEq PLC reports this metrics in expand.cc + stats_->EndExpandEvent(fs_hz_); + } // Check conditions for reset. if (new_codec_ || *operation == Operation::kUndefined) { @@ -2148,7 +2164,7 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) { expand_->overlap_length()); normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_, - expand_.get())); + expand_.get(), stats_.get())); accelerate_.reset( accelerate_factory_->Create(fs_hz, channels, *background_noise_)); preemptive_expand_.reset(preemptive_expand_factory_->Create( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc index 967deea77..3ed0e26a7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc @@ -14,7 +14,6 @@ #include // min -#include "api/audio_codecs/audio_decoder.h" #include "common_audio/signal_processing/include/signal_processing_library.h" #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/background_noise.h" @@ -50,6 +49,13 @@ int Normal::Process(const int16_t* input, // TODO(hlundin): Investigate this further. const int fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); + // If last call resulted in a CodedPlc we don't need to do cross-fading but we + // need to report the end of the interruption once we are back to normal + // operation. + if (last_mode == NetEq::Mode::kCodecPlc) { + statistics_->EndExpandEvent(fs_hz_); + } + // Check if last RecOut call resulted in an Expand. If so, we have to take // care of some cross-fading and unmuting. if (last_mode == NetEq::Mode::kExpand) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h index d8c13e619..d6dc84a2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h @@ -15,6 +15,7 @@ #include // Access to size_t. #include "api/neteq/neteq.h" +#include "modules/audio_coding/neteq/statistics_calculator.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/safe_conversions.h" @@ -35,14 +36,16 @@ class Normal { Normal(int fs_hz, DecoderDatabase* decoder_database, const BackgroundNoise& background_noise, - Expand* expand) + Expand* expand, + StatisticsCalculator* statistics) : fs_hz_(fs_hz), decoder_database_(decoder_database), background_noise_(background_noise), expand_(expand), samples_per_ms_(rtc::CheckedDivExact(fs_hz_, 1000)), default_win_slope_Q14_( - rtc::dchecked_cast((1 << 14) / samples_per_ms_)) {} + rtc::dchecked_cast((1 << 14) / samples_per_ms_)), + statistics_(statistics) {} virtual ~Normal() {} @@ -64,6 +67,7 @@ class Normal { Expand* expand_; const size_t samples_per_ms_; const int16_t default_win_slope_Q14_; + StatisticsCalculator* const statistics_; RTC_DISALLOW_COPY_AND_ASSIGN(Normal); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.cc index 059308f7f..86ae8475c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.cc @@ -25,8 +25,10 @@ #include "modules/audio_coding/neteq/decoder_database.h" #include "modules/audio_coding/neteq/statistics_calculator.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -61,27 +63,80 @@ void LogPacketDiscarded(int codec_level, StatisticsCalculator* stats) { } } +absl::optional GetSmartflushingConfig() { + absl::optional result; + std::string field_trial_string = + field_trial::FindFullName("WebRTC-Audio-NetEqSmartFlushing"); + result = SmartFlushingConfig(); + bool enabled = false; + auto parser = StructParametersParser::Create( + "enabled", &enabled, "target_level_threshold_ms", + &result->target_level_threshold_ms, "target_level_multiplier", + &result->target_level_multiplier); + parser->Parse(field_trial_string); + if (!enabled) { + return absl::nullopt; + } + RTC_LOG(LS_INFO) << "Using smart flushing, target_level_threshold_ms: " + << result->target_level_threshold_ms + << ", target_level_multiplier: " + << result->target_level_multiplier; + return result; +} + } // namespace PacketBuffer::PacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer) - : max_number_of_packets_(max_number_of_packets), tick_timer_(tick_timer) {} + : smart_flushing_config_(GetSmartflushingConfig()), + max_number_of_packets_(max_number_of_packets), + tick_timer_(tick_timer) {} // Destructor. All packets in the buffer will be destroyed. PacketBuffer::~PacketBuffer() { - Flush(); + buffer_.clear(); } // Flush the buffer. All packets in the buffer will be destroyed. -void PacketBuffer::Flush() { +void PacketBuffer::Flush(StatisticsCalculator* stats) { + for (auto& p : buffer_) { + LogPacketDiscarded(p.priority.codec_level, stats); + } buffer_.clear(); + stats->FlushedPacketBuffer(); +} + +void PacketBuffer::PartialFlush(int target_level_ms, + size_t sample_rate, + size_t last_decoded_length, + StatisticsCalculator* stats) { + // Make sure that at least half the packet buffer capacity will be available + // after the flush. This is done to avoid getting stuck if the target level is + // very high. + int target_level_samples = + std::min(target_level_ms * sample_rate / 1000, + max_number_of_packets_ * last_decoded_length / 2); + // We should avoid flushing to very low levels. + target_level_samples = std::max( + target_level_samples, smart_flushing_config_->target_level_threshold_ms); + while (GetSpanSamples(last_decoded_length, sample_rate, true) > + static_cast(target_level_samples) || + buffer_.size() > max_number_of_packets_ / 2) { + LogPacketDiscarded(PeekNextPacket()->priority.codec_level, stats); + buffer_.pop_front(); + } } bool PacketBuffer::Empty() const { return buffer_.empty(); } -int PacketBuffer::InsertPacket(Packet&& packet, StatisticsCalculator* stats) { +int PacketBuffer::InsertPacket(Packet&& packet, + StatisticsCalculator* stats, + size_t last_decoded_length, + size_t sample_rate, + int target_level_ms, + const DecoderDatabase& decoder_database) { if (packet.empty()) { RTC_LOG(LS_WARNING) << "InsertPacket invalid packet"; return kInvalidPacket; @@ -94,12 +149,32 @@ int PacketBuffer::InsertPacket(Packet&& packet, StatisticsCalculator* stats) { packet.waiting_time = tick_timer_->GetNewStopwatch(); - if (buffer_.size() >= max_number_of_packets_) { - // Buffer is full. Flush it. - Flush(); - stats->FlushedPacketBuffer(); - RTC_LOG(LS_WARNING) << "Packet buffer flushed"; - return_val = kFlushed; + // Perform a smart flush if the buffer size exceeds a multiple of the target + // level. + const size_t span_threshold = + smart_flushing_config_ + ? smart_flushing_config_->target_level_multiplier * + std::max(smart_flushing_config_->target_level_threshold_ms, + target_level_ms) * + sample_rate / 1000 + : 0; + const bool smart_flush = + smart_flushing_config_.has_value() && + GetSpanSamples(last_decoded_length, sample_rate, true) >= span_threshold; + if (buffer_.size() >= max_number_of_packets_ || smart_flush) { + size_t buffer_size_before_flush = buffer_.size(); + if (smart_flushing_config_.has_value()) { + // Flush down to the target level. + PartialFlush(target_level_ms, sample_rate, last_decoded_length, stats); + return_val = kPartialFlush; + } else { + // Buffer is full. + Flush(stats); + return_val = kFlushed; + } + RTC_LOG(LS_WARNING) << "Packet buffer flushed, " + << (buffer_size_before_flush - buffer_.size()) + << " packets discarded."; } // Get an iterator pointing to the place in the buffer where the new packet @@ -134,7 +209,10 @@ int PacketBuffer::InsertPacketList( const DecoderDatabase& decoder_database, absl::optional* current_rtp_payload_type, absl::optional* current_cng_rtp_payload_type, - StatisticsCalculator* stats) { + StatisticsCalculator* stats, + size_t last_decoded_length, + size_t sample_rate, + int target_level_ms) { RTC_DCHECK(stats); bool flushed = false; for (auto& packet : *packet_list) { @@ -143,7 +221,7 @@ int PacketBuffer::InsertPacketList( **current_cng_rtp_payload_type != packet.payload_type) { // New CNG payload type implies new codec type. *current_rtp_payload_type = absl::nullopt; - Flush(); + Flush(stats); flushed = true; } *current_cng_rtp_payload_type = packet.payload_type; @@ -156,12 +234,14 @@ int PacketBuffer::InsertPacketList( **current_cng_rtp_payload_type, decoder_database))) { *current_cng_rtp_payload_type = absl::nullopt; - Flush(); + Flush(stats); flushed = true; } *current_rtp_payload_type = packet.payload_type; } - int return_val = InsertPacket(std::move(packet), stats); + int return_val = + InsertPacket(std::move(packet), stats, last_decoded_length, sample_rate, + target_level_ms, decoder_database); if (return_val == kFlushed) { // The buffer flushed, but this is not an error. We can still continue. flushed = true; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h index c00db294c..cd2adf711 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h @@ -22,6 +22,14 @@ namespace webrtc { class DecoderDatabase; class StatisticsCalculator; class TickTimer; +struct SmartFlushingConfig { + // When calculating the flushing threshold, the maximum between the target + // level and this value is used. + int target_level_threshold_ms = 500; + // A smart flush is triggered when the packet buffer contains a multiple of + // the target level. + int target_level_multiplier = 3; +}; // This is the actual buffer holding the packets before decoding. class PacketBuffer { @@ -29,6 +37,7 @@ class PacketBuffer { enum BufferReturnCodes { kOK = 0, kFlushed, + kPartialFlush, kNotFound, kBufferEmpty, kInvalidPacket, @@ -43,7 +52,13 @@ class PacketBuffer { virtual ~PacketBuffer(); // Flushes the buffer and deletes all packets in it. - virtual void Flush(); + virtual void Flush(StatisticsCalculator* stats); + + // Partial flush. Flush packets but leave some packets behind. + virtual void PartialFlush(int target_level_ms, + size_t sample_rate, + size_t last_decoded_length, + StatisticsCalculator* stats); // Returns true for an empty buffer. virtual bool Empty() const; @@ -52,7 +67,12 @@ class PacketBuffer { // the packet object. // Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer // was flushed due to overfilling. - virtual int InsertPacket(Packet&& packet, StatisticsCalculator* stats); + virtual int InsertPacket(Packet&& packet, + StatisticsCalculator* stats, + size_t last_decoded_length, + size_t sample_rate, + int target_level_ms, + const DecoderDatabase& decoder_database); // Inserts a list of packets into the buffer. The buffer will take over // ownership of the packet objects. @@ -67,7 +87,10 @@ class PacketBuffer { const DecoderDatabase& decoder_database, absl::optional* current_rtp_payload_type, absl::optional* current_cng_rtp_payload_type, - StatisticsCalculator* stats); + StatisticsCalculator* stats, + size_t last_decoded_length, + size_t sample_rate, + int target_level_ms); // Gets the timestamp for the first packet in the buffer and writes it to the // output variable |next_timestamp|. @@ -146,6 +169,7 @@ class PacketBuffer { } private: + absl::optional smart_flushing_config_; size_t max_number_of_packets_; PacketList buffer_; const TickTimer* tick_timer_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc index 5681464f4..f5cd9c29e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.cc @@ -139,7 +139,7 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { /*rtp_timestamp=*/new_packet.timestamp, /*audio_level=*/absl::nullopt, /*absolute_capture_time=*/absl::nullopt, - /*receive_time_ms=*/red_packet.packet_info.receive_time_ms()); + /*receive_time=*/red_packet.packet_info.receive_time()); new_packets.push_front(std::move(new_packet)); payload_ptr += payload_length; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h index 820d279d6..9e9182aed 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_player.h @@ -15,12 +15,12 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/aaudio_wrapper.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/message_handler.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -95,12 +95,12 @@ class AAudioPlayer final : public AAudioObserverInterface, // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker main_thread_checker_; + SequenceChecker main_thread_checker_; // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a // real-time thread owned by AAudio. Detached during construction of this // object. - rtc::ThreadChecker thread_checker_aaudio_; + SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. rtc::Thread* main_thread_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h index d9427e2ae..bbf2cacf9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_recorder.h @@ -15,11 +15,11 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/aaudio_wrapper.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/message_handler.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -88,12 +88,12 @@ class AAudioRecorder : public AAudioObserverInterface, // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a // real-time thread owned by AAudio. Detached during construction of this // object. - rtc::ThreadChecker thread_checker_aaudio_; + SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. rtc::Thread* main_thread_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_wrapper.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_wrapper.h index 491509214..1f925b96d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/aaudio_wrapper.h @@ -13,8 +13,8 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -113,8 +113,8 @@ class AAudioWrapper { bool VerifyStreamConfiguration(); bool OptimizeBuffers(); - rtc::ThreadChecker thread_checker_; - rtc::ThreadChecker aaudio_thread_checker_; + SequenceChecker thread_checker_; + SequenceChecker aaudio_thread_checker_; AudioParameters audio_parameters_; const aaudio_direction_t direction_; AAudioObserverInterface* observer_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h index fb5bf6fa5..3ea248f79 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_device_template.h @@ -11,11 +11,11 @@ #ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ #define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ +#include "api/sequence_checker.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/audio_device_generic.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -39,7 +39,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { output_(audio_manager_), input_(audio_manager_), initialized_(false) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK(audio_manager); audio_manager_->SetActiveAudioLayer(audio_layer); } @@ -48,13 +48,13 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t ActiveAudioLayer( AudioDeviceModule::AudioLayer& audioLayer) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; audioLayer = audio_layer_; return 0; } InitStatus Init() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_DCHECK(thread_checker_.IsCurrent()); RTC_DCHECK(!initialized_); if (!audio_manager_->Init()) { @@ -74,7 +74,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } int32_t Terminate() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_DCHECK(thread_checker_.IsCurrent()); int32_t err = input_.Terminate(); err |= output_.Terminate(); @@ -85,18 +85,18 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } bool Initialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_DCHECK(thread_checker_.IsCurrent()); return initialized_; } int16_t PlayoutDevices() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return 1; } int16_t RecordingDevices() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return 1; } @@ -115,7 +115,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t SetPlayoutDevice(uint16_t index) override { // OK to use but it has no effect currently since device selection is // done using Andoid APIs instead. - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return 0; } @@ -127,7 +127,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t SetRecordingDevice(uint16_t index) override { // OK to use but it has no effect currently since device selection is // done using Andoid APIs instead. - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return 0; } @@ -137,39 +137,39 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } int32_t PlayoutIsAvailable(bool& available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; available = true; return 0; } int32_t InitPlayout() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.InitPlayout(); } bool PlayoutIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.PlayoutIsInitialized(); } int32_t RecordingIsAvailable(bool& available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; available = true; return 0; } int32_t InitRecording() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return input_.InitRecording(); } bool RecordingIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return input_.RecordingIsInitialized(); } int32_t StartPlayout() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!audio_manager_->IsCommunicationModeEnabled()) { RTC_LOG(WARNING) << "The application should use MODE_IN_COMMUNICATION audio mode!"; @@ -181,7 +181,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { // Avoid using audio manger (JNI/Java cost) if playout was inactive. if (!Playing()) return 0; - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; int32_t err = output_.StopPlayout(); return err; } @@ -192,7 +192,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } int32_t StartRecording() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!audio_manager_->IsCommunicationModeEnabled()) { RTC_LOG(WARNING) << "The application should use MODE_IN_COMMUNICATION audio mode!"; @@ -202,7 +202,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { int32_t StopRecording() override { // Avoid using audio manger (JNI/Java cost) if recording was inactive. - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!Recording()) return 0; int32_t err = input_.StopRecording(); @@ -212,47 +212,47 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { bool Recording() const override { return input_.Recording(); } int32_t InitSpeaker() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return 0; } bool SpeakerIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return true; } int32_t InitMicrophone() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return 0; } bool MicrophoneIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return true; } int32_t SpeakerVolumeIsAvailable(bool& available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.SpeakerVolumeIsAvailable(available); } int32_t SetSpeakerVolume(uint32_t volume) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.SetSpeakerVolume(volume); } int32_t SpeakerVolume(uint32_t& volume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.SpeakerVolume(volume); } int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.MaxSpeakerVolume(maxVolume); } int32_t MinSpeakerVolume(uint32_t& minVolume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_.MinSpeakerVolume(minVolume); } @@ -299,13 +299,13 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { // Returns true if the audio manager has been configured to support stereo // and false otherwised. Default is mono. int32_t StereoPlayoutIsAvailable(bool& available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; available = audio_manager_->IsStereoPlayoutSupported(); return 0; } int32_t SetStereoPlayout(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; bool available = audio_manager_->IsStereoPlayoutSupported(); // Android does not support changes between mono and stero on the fly. // Instead, the native audio layer is configured via the audio manager @@ -320,13 +320,13 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } int32_t StereoRecordingIsAvailable(bool& available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; available = audio_manager_->IsStereoRecordSupported(); return 0; } int32_t SetStereoRecording(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; bool available = audio_manager_->IsStereoRecordSupported(); // Android does not support changes between mono and stero on the fly. // Instead, the native audio layer is configured via the audio manager @@ -336,7 +336,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } int32_t StereoRecording(bool& enabled) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; enabled = audio_manager_->IsStereoRecordSupported(); return 0; } @@ -349,7 +349,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { } void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; output_.AttachAudioBuffer(audioBuffer); input_.AttachAudioBuffer(audioBuffer); } @@ -367,13 +367,13 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { // a "Not Implemented" log will be filed. This non-perfect state will remain // until I have added full support for audio effects based on OpenSL ES APIs. bool BuiltInAECIsAvailable() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return audio_manager_->IsAcousticEchoCancelerSupported(); } // TODO(henrika): add implementation for OpenSL ES based audio as well. int32_t EnableBuiltInAEC(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; RTC_CHECK(BuiltInAECIsAvailable()) << "HW AEC is not available"; return input_.EnableBuiltInAEC(enable); } @@ -383,13 +383,13 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { // TODO(henrika): add implementation for OpenSL ES based audio as well. // In addition, see comments for BuiltInAECIsAvailable(). bool BuiltInAGCIsAvailable() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return audio_manager_->IsAutomaticGainControlSupported(); } // TODO(henrika): add implementation for OpenSL ES based audio as well. int32_t EnableBuiltInAGC(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; RTC_CHECK(BuiltInAGCIsAvailable()) << "HW AGC is not available"; return input_.EnableBuiltInAGC(enable); } @@ -399,19 +399,19 @@ class AudioDeviceTemplate : public AudioDeviceGeneric { // TODO(henrika): add implementation for OpenSL ES based audio as well. // In addition, see comments for BuiltInAECIsAvailable(). bool BuiltInNSIsAvailable() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return audio_manager_->IsNoiseSuppressorSupported(); } // TODO(henrika): add implementation for OpenSL ES based audio as well. int32_t EnableBuiltInNS(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; RTC_CHECK(BuiltInNSIsAvailable()) << "HW NS is not available"; return input_.EnableBuiltInNS(enable); } private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Local copy of the audio layer set during construction of the // AudioDeviceModuleImpl instance. Read only value. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_manager.h index d1debdb41..900fc78a6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_manager.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_manager.h @@ -16,6 +16,7 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/audio_common.h" #include "modules/audio_device/android/opensles_common.h" #include "modules/audio_device/audio_device_config.h" @@ -23,7 +24,6 @@ #include "modules/audio_device/include/audio_device_defines.h" #include "modules/utility/include/helpers_android.h" #include "modules/utility/include/jvm_android.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -158,9 +158,9 @@ class AudioManager { jint input_buffer_size); // Stores thread ID in the constructor. - // We can then use ThreadChecker::IsCurrent() to ensure that + // We can then use RTC_DCHECK_RUN_ON(&thread_checker_) to ensure that // other methods are called from the same thread. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Calls JavaVM::AttachCurrentThread() if this thread is not attached at // construction. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.h index 102f29ab1..c445360d6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_record_jni.h @@ -15,12 +15,12 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/audio_device_generic.h" #include "modules/audio_device/include/audio_device_defines.h" #include "modules/utility/include/helpers_android.h" #include "modules/utility/include/jvm_android.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -110,11 +110,11 @@ class AudioRecordJni { void OnDataIsRecorded(int length); // Stores thread ID in constructor. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to OnDataIsRecorded() from high-priority // thread in Java. Detached during construction of this object. - rtc::ThreadChecker thread_checker_java_; + SequenceChecker thread_checker_java_; // Calls JavaVM::AttachCurrentThread() if this thread is not attached at // construction. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.h index 529a9013e..62bcba42b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/audio_track_jni.h @@ -15,13 +15,13 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/audio_common.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/audio_device_generic.h" #include "modules/audio_device/include/audio_device_defines.h" #include "modules/utility/include/helpers_android.h" #include "modules/utility/include/jvm_android.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -109,11 +109,11 @@ class AudioTrackJni { void OnGetPlayoutData(size_t length); // Stores thread ID in constructor. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to OnGetPlayoutData() from high-priority // thread in Java. Detached during construction of this object. - rtc::ThreadChecker thread_checker_java_; + SequenceChecker thread_checker_java_; // Calls JavaVM::AttachCurrentThread() if this thread is not attached at // construction. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.h index 20107585a..78af29b6b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_player.h @@ -15,13 +15,13 @@ #include #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/audio_common.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/android/opensles_common.h" #include "modules/audio_device/audio_device_generic.h" #include "modules/audio_device/include/audio_device_defines.h" #include "modules/utility/include/helpers_android.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -113,12 +113,12 @@ class OpenSLESPlayer { // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to SimpleBufferQueueCallback() from internal // non-application thread which is not attached to the Dalvik JVM. // Detached during construction of this object. - rtc::ThreadChecker thread_checker_opensles_; + SequenceChecker thread_checker_opensles_; // Raw pointer to the audio manager injected at construction. Used to cache // audio parameters and to access the global SL engine object needed by the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.h index ee1ede51d..5f975d724 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/android/opensles_recorder.h @@ -17,13 +17,13 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/android/audio_common.h" #include "modules/audio_device/android/audio_manager.h" #include "modules/audio_device/android/opensles_common.h" #include "modules/audio_device/audio_device_generic.h" #include "modules/audio_device/include/audio_device_defines.h" #include "modules/utility/include/helpers_android.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -123,12 +123,12 @@ class OpenSLESRecorder { // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to SimpleBufferQueueCallback() from internal // non-application thread which is not attached to the Dalvik JVM. // Detached during construction of this object. - rtc::ThreadChecker thread_checker_opensles_; + SequenceChecker thread_checker_opensles_; // Raw pointer to the audio manager injected at construction. Used to cache // audio parameters and to access the global SL engine object needed by the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc index 8d3637308..977045419 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc @@ -17,7 +17,6 @@ #include #include "common_audio/signal_processing/include/signal_processing_library.h" -#include "rtc_base/bind.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" @@ -79,7 +78,7 @@ AudioDeviceBuffer::~AudioDeviceBuffer() { int32_t AudioDeviceBuffer::RegisterAudioCallback( AudioTransport* audio_callback) { RTC_DCHECK_RUN_ON(&main_thread_checker_); - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (playing_ || recording_) { RTC_LOG(LS_ERROR) << "Failed to set audio transport since media was active"; return -1; @@ -96,7 +95,7 @@ void AudioDeviceBuffer::StartPlayout() { if (playing_) { return; } - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Clear members tracking playout stats and do it on the task queue. task_queue_.PostTask([this] { ResetPlayStats(); }); // Start a periodic timer based on task queue if not already done by the @@ -115,7 +114,7 @@ void AudioDeviceBuffer::StartRecording() { if (recording_) { return; } - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Clear members tracking recording stats and do it on the task queue. task_queue_.PostTask([this] { ResetRecStats(); }); // Start a periodic timer based on task queue if not already done by the @@ -137,7 +136,7 @@ void AudioDeviceBuffer::StopPlayout() { if (!playing_) { return; } - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; playing_ = false; // Stop periodic logging if no more media is active. if (!recording_) { @@ -151,7 +150,7 @@ void AudioDeviceBuffer::StopRecording() { if (!recording_) { return; } - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; recording_ = false; // Stop periodic logging if no more media is active. if (!playing_) { @@ -349,13 +348,11 @@ int32_t AudioDeviceBuffer::GetPlayoutData(void* audio_buffer) { } void AudioDeviceBuffer::StartPeriodicLogging() { - task_queue_.PostTask(rtc::Bind(&AudioDeviceBuffer::LogStats, this, - AudioDeviceBuffer::LOG_START)); + task_queue_.PostTask([this] { LogStats(AudioDeviceBuffer::LOG_START); }); } void AudioDeviceBuffer::StopPeriodicLogging() { - task_queue_.PostTask(rtc::Bind(&AudioDeviceBuffer::LogStats, this, - AudioDeviceBuffer::LOG_STOP)); + task_queue_.PostTask([this] { LogStats(AudioDeviceBuffer::LOG_STOP); }); } void AudioDeviceBuffer::LogStats(LogState state) { @@ -460,9 +457,9 @@ void AudioDeviceBuffer::LogStats(LogState state) { RTC_DCHECK_GT(time_to_wait_ms, 0) << "Invalid timer interval"; // Keep posting new (delayed) tasks until state is changed to kLogStop. - task_queue_.PostDelayedTask(rtc::Bind(&AudioDeviceBuffer::LogStats, this, - AudioDeviceBuffer::LOG_ACTIVE), - time_to_wait_ms); + task_queue_.PostDelayedTask( + [this] { AudioDeviceBuffer::LogStats(AudioDeviceBuffer::LOG_ACTIVE); }, + time_to_wait_ms); } void AudioDeviceBuffer::ResetRecStats() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h index 37b8a2ec5..a0b795319 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h @@ -16,13 +16,13 @@ #include +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -140,7 +140,7 @@ class AudioDeviceBuffer { // TODO(henrika): see if it is possible to refactor and annotate all members. // Main thread on which this object is created. - rtc::ThreadChecker main_thread_checker_; + SequenceChecker main_thread_checker_; Mutex lock_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc index 89265a288..be78fd16d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc @@ -301,9 +301,8 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { rtc::scoped_refptr CreateAudioDeviceWithDataObserver( rtc::scoped_refptr impl, std::unique_ptr observer) { - rtc::scoped_refptr audio_device( - new rtc::RefCountedObject(impl, observer.get(), - std::move(observer))); + auto audio_device = rtc::make_ref_counted(impl, observer.get(), + std::move(observer)); if (!audio_device->IsValid()) { return nullptr; @@ -315,8 +314,8 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( rtc::scoped_refptr CreateAudioDeviceWithDataObserver( rtc::scoped_refptr impl, AudioDeviceDataObserver* legacy_observer) { - rtc::scoped_refptr audio_device( - new rtc::RefCountedObject(impl, legacy_observer, nullptr)); + auto audio_device = + rtc::make_ref_counted(impl, legacy_observer, nullptr); if (!audio_device->IsValid()) { return nullptr; @@ -329,10 +328,8 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, std::unique_ptr observer) { - rtc::scoped_refptr audio_device( - new rtc::RefCountedObject(audio_layer, task_queue_factory, - observer.get(), - std::move(observer))); + auto audio_device = rtc::make_ref_counted( + audio_layer, task_queue_factory, observer.get(), std::move(observer)); if (!audio_device->IsValid()) { return nullptr; @@ -345,9 +342,8 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, AudioDeviceDataObserver* legacy_observer) { - rtc::scoped_refptr audio_device( - new rtc::RefCountedObject(audio_layer, task_queue_factory, - legacy_observer, nullptr)); + auto audio_device = rtc::make_ref_counted( + audio_layer, task_queue_factory, legacy_observer, nullptr); if (!audio_device->IsValid()) { return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc index 73031b959..84460ff83 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_impl.cc @@ -73,7 +73,7 @@ namespace webrtc { rtc::scoped_refptr AudioDeviceModule::Create( AudioLayer audio_layer, TaskQueueFactory* task_queue_factory) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory); } @@ -81,7 +81,7 @@ rtc::scoped_refptr AudioDeviceModule::Create( rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioLayer audio_layer, TaskQueueFactory* task_queue_factory) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // The "AudioDeviceModule::kWindowsCoreAudio2" audio layer has its own // dedicated factory method which should be used instead. @@ -92,38 +92,37 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( } // Create the generic reference counted (platform independent) implementation. - rtc::scoped_refptr audioDevice( - new rtc::RefCountedObject(audio_layer, - task_queue_factory)); + auto audio_device = rtc::make_ref_counted( + audio_layer, task_queue_factory); // Ensure that the current platform is supported. - if (audioDevice->CheckPlatform() == -1) { + if (audio_device->CheckPlatform() == -1) { return nullptr; } // Create the platform-dependent implementation. - if (audioDevice->CreatePlatformSpecificObjects() == -1) { + if (audio_device->CreatePlatformSpecificObjects() == -1) { return nullptr; } // Ensure that the generic audio buffer can communicate with the platform // specific parts. - if (audioDevice->AttachAudioBuffer() == -1) { + if (audio_device->AttachAudioBuffer() == -1) { return nullptr; } - return audioDevice; + return audio_device; } AudioDeviceModuleImpl::AudioDeviceModuleImpl( AudioLayer audio_layer, TaskQueueFactory* task_queue_factory) : audio_layer_(audio_layer), audio_device_buffer_(task_queue_factory) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; } int32_t AudioDeviceModuleImpl::CheckPlatform() { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Ensure that the current platform is supported PlatformType platform(kPlatformNotSupported); #if defined(_WIN32) @@ -280,7 +279,8 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { // iOS ADM implementation. #if defined(WEBRTC_IOS) if (audio_layer == kPlatformDefaultAudio) { - audio_device_.reset(new ios_adm::AudioDeviceIOS()); + audio_device_.reset( + new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false)); RTC_LOG(INFO) << "iPhone Audio APIs will be utilized."; } // END #if defined(WEBRTC_IOS) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc index c68e7bba1..e345a16c4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.cc @@ -216,10 +216,13 @@ int32_t FileAudioDevice::StartPlayout() { } } - _ptrThreadPlay.reset(new rtc::PlatformThread( - PlayThreadFunc, this, "webrtc_audio_module_play_thread", - rtc::kRealtimePriority)); - _ptrThreadPlay->Start(); + _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable( + [this] { + while (PlayThreadProcess()) { + } + }, + "webrtc_audio_module_play_thread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); RTC_LOG(LS_INFO) << "Started playout capture to output file: " << _outputFilename; @@ -233,10 +236,8 @@ int32_t FileAudioDevice::StopPlayout() { } // stop playout thread first - if (_ptrThreadPlay) { - _ptrThreadPlay->Stop(); - _ptrThreadPlay.reset(); - } + if (!_ptrThreadPlay.empty()) + _ptrThreadPlay.Finalize(); MutexLock lock(&mutex_); @@ -276,11 +277,13 @@ int32_t FileAudioDevice::StartRecording() { } } - _ptrThreadRec.reset(new rtc::PlatformThread( - RecThreadFunc, this, "webrtc_audio_module_capture_thread", - rtc::kRealtimePriority)); - - _ptrThreadRec->Start(); + _ptrThreadRec = rtc::PlatformThread::SpawnJoinable( + [this] { + while (RecThreadProcess()) { + } + }, + "webrtc_audio_module_capture_thread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); RTC_LOG(LS_INFO) << "Started recording from input file: " << _inputFilename; @@ -293,10 +296,8 @@ int32_t FileAudioDevice::StopRecording() { _recording = false; } - if (_ptrThreadRec) { - _ptrThreadRec->Stop(); - _ptrThreadRec.reset(); - } + if (!_ptrThreadRec.empty()) + _ptrThreadRec.Finalize(); MutexLock lock(&mutex_); _recordingFramesLeft = 0; @@ -439,18 +440,6 @@ void FileAudioDevice::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { _ptrAudioBuffer->SetPlayoutChannels(0); } -void FileAudioDevice::PlayThreadFunc(void* pThis) { - FileAudioDevice* device = static_cast(pThis); - while (device->PlayThreadProcess()) { - } -} - -void FileAudioDevice::RecThreadFunc(void* pThis) { - FileAudioDevice* device = static_cast(pThis); - while (device->RecThreadProcess()) { - } -} - bool FileAudioDevice::PlayThreadProcess() { if (!_playing) { return false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h index ecb3f2f53..f4a6b7658 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/dummy/file_audio_device.h @@ -17,14 +17,11 @@ #include #include "modules/audio_device/audio_device_generic.h" +#include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/time_utils.h" -namespace rtc { -class PlatformThread; -} // namespace rtc - namespace webrtc { // This is a fake audio device which plays audio from a file as its microphone @@ -145,9 +142,8 @@ class FileAudioDevice : public AudioDeviceGeneric { size_t _recordingFramesIn10MS; size_t _playoutFramesIn10MS; - // TODO(pbos): Make plain members instead of pointers and stop resetting them. - std::unique_ptr _ptrThreadRec; - std::unique_ptr _ptrThreadPlay; + rtc::PlatformThread _ptrThreadRec; + rtc::PlatformThread _ptrThreadPlay; bool _playing; bool _recording; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h index e1c2035d6..b59cafcb5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h @@ -14,6 +14,7 @@ #include #include +#include "absl/base/attributes.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/include/audio_device.h" @@ -48,7 +49,7 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( // Creates an ADMWrapper around an ADM instance that registers // the provided AudioDeviceDataObserver. -RTC_DEPRECATED +ABSL_DEPRECATED("") rtc::scoped_refptr CreateAudioDeviceWithDataObserver( rtc::scoped_refptr impl, AudioDeviceDataObserver* observer); @@ -60,7 +61,7 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( std::unique_ptr observer); // Creates an ADM instance with AudioDeviceDataObserver registered. -RTC_DEPRECATED +ABSL_DEPRECATED("") rtc::scoped_refptr CreateAudioDeviceWithDataObserver( const AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h index d5d4d7372..01129a47a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h @@ -16,7 +16,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/deprecation.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h index 0ca19de15..8483aa3da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_device.h @@ -23,11 +23,10 @@ namespace test { class MockAudioDeviceModule : public AudioDeviceModule { public: static rtc::scoped_refptr CreateNice() { - return new rtc::RefCountedObject< - ::testing::NiceMock>(); + return rtc::make_ref_counted<::testing::NiceMock>(); } static rtc::scoped_refptr CreateStrict() { - return new rtc::RefCountedObject< + return rtc::make_ref_counted< ::testing::StrictMock>(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc index 46bf21654..8351e8a40 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/test_audio_device.cc @@ -447,7 +447,7 @@ rtc::scoped_refptr TestAudioDeviceModule::Create( std::unique_ptr capturer, std::unique_ptr renderer, float speed) { - return new rtc::RefCountedObject( + return rtc::make_ref_counted( task_queue_factory, std::move(capturer), std::move(renderer), speed); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc index 84d05e0f6..9e6bd168f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc @@ -98,7 +98,7 @@ AudioDeviceLinuxALSA::AudioDeviceLinuxALSA() _recordingDelay(0), _playoutDelay(0) { memset(_oldKeyState, 0, sizeof(_oldKeyState)); - RTC_LOG(LS_INFO) << __FUNCTION__ << " created"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " created"; } // ---------------------------------------------------------------------------- @@ -106,7 +106,7 @@ AudioDeviceLinuxALSA::AudioDeviceLinuxALSA() // ---------------------------------------------------------------------------- AudioDeviceLinuxALSA::~AudioDeviceLinuxALSA() { - RTC_LOG(LS_INFO) << __FUNCTION__ << " destroyed"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " destroyed"; Terminate(); @@ -178,26 +178,13 @@ int32_t AudioDeviceLinuxALSA::Terminate() { _mixerManager.Close(); // RECORDING - if (_ptrThreadRec) { - rtc::PlatformThread* tmpThread = _ptrThreadRec.release(); - mutex_.Unlock(); - - tmpThread->Stop(); - delete tmpThread; - - mutex_.Lock(); - } + mutex_.Unlock(); + _ptrThreadRec.Finalize(); // PLAYOUT - if (_ptrThreadPlay) { - rtc::PlatformThread* tmpThread = _ptrThreadPlay.release(); - mutex_.Unlock(); + _ptrThreadPlay.Finalize(); + mutex_.Lock(); - tmpThread->Stop(); - delete tmpThread; - - mutex_.Lock(); - } #if defined(WEBRTC_USE_X11) if (_XDisplay) { XCloseDisplay(_XDisplay); @@ -1040,11 +1027,13 @@ int32_t AudioDeviceLinuxALSA::StartRecording() { return -1; } // RECORDING - _ptrThreadRec.reset(new rtc::PlatformThread( - RecThreadFunc, this, "webrtc_audio_module_capture_thread", - rtc::kRealtimePriority)); - - _ptrThreadRec->Start(); + _ptrThreadRec = rtc::PlatformThread::SpawnJoinable( + [this] { + while (RecThreadProcess()) { + } + }, + "webrtc_audio_module_capture_thread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); errVal = LATE(snd_pcm_prepare)(_handleRecord); if (errVal < 0) { @@ -1088,10 +1077,7 @@ int32_t AudioDeviceLinuxALSA::StopRecordingLocked() { _recIsInitialized = false; _recording = false; - if (_ptrThreadRec) { - _ptrThreadRec->Stop(); - _ptrThreadRec.reset(); - } + _ptrThreadRec.Finalize(); _recordingFramesLeft = 0; if (_recordingBuffer) { @@ -1158,10 +1144,13 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() { } // PLAYOUT - _ptrThreadPlay.reset(new rtc::PlatformThread( - PlayThreadFunc, this, "webrtc_audio_module_play_thread", - rtc::kRealtimePriority)); - _ptrThreadPlay->Start(); + _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable( + [this] { + while (PlayThreadProcess()) { + } + }, + "webrtc_audio_module_play_thread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); int errVal = LATE(snd_pcm_prepare)(_handlePlayout); if (errVal < 0) { @@ -1191,10 +1180,7 @@ int32_t AudioDeviceLinuxALSA::StopPlayoutLocked() { _playing = false; // stop playout thread first - if (_ptrThreadPlay) { - _ptrThreadPlay->Stop(); - _ptrThreadPlay.reset(); - } + _ptrThreadPlay.Finalize(); _playoutFramesLeft = 0; delete[] _playoutBuffer; @@ -1469,18 +1455,6 @@ int32_t AudioDeviceLinuxALSA::ErrorRecovery(int32_t error, // Thread Methods // ============================================================================ -void AudioDeviceLinuxALSA::PlayThreadFunc(void* pThis) { - AudioDeviceLinuxALSA* device = static_cast(pThis); - while (device->PlayThreadProcess()) { - } -} - -void AudioDeviceLinuxALSA::RecThreadFunc(void* pThis) { - AudioDeviceLinuxALSA* device = static_cast(pThis); - while (device->RecThreadProcess()) { - } -} - bool AudioDeviceLinuxALSA::PlayThreadProcess() { if (!_playing) return false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h index 410afcf42..1f4a23164 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h @@ -155,10 +155,8 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { Mutex mutex_; - // TODO(pbos): Make plain members and start/stop instead of resetting these - // pointers. A thread can be reused. - std::unique_ptr _ptrThreadRec; - std::unique_ptr _ptrThreadPlay; + rtc::PlatformThread _ptrThreadRec; + rtc::PlatformThread _ptrThreadPlay; AudioMixerManagerLinuxALSA _mixerManager; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc index 9a7d1d0ca..7742420fc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc @@ -15,6 +15,7 @@ #include "modules/audio_device/linux/latebindingsymboltable_linux.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/platform_thread.h" WebRTCPulseSymbolTable* GetPulseSymbolTable() { static WebRTCPulseSymbolTable* pulse_symbol_table = @@ -78,7 +79,7 @@ AudioDeviceLinuxPulse::AudioDeviceLinuxPulse() _playStream(NULL), _recStreamFlags(0), _playStreamFlags(0) { - RTC_LOG(LS_INFO) << __FUNCTION__ << " created"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " created"; memset(_paServerVersion, 0, sizeof(_paServerVersion)); memset(&_playBufferAttr, 0, sizeof(_playBufferAttr)); @@ -87,7 +88,7 @@ AudioDeviceLinuxPulse::AudioDeviceLinuxPulse() } AudioDeviceLinuxPulse::~AudioDeviceLinuxPulse() { - RTC_LOG(LS_INFO) << __FUNCTION__ << " destroyed"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " destroyed"; RTC_DCHECK(thread_checker_.IsCurrent()); Terminate(); @@ -158,18 +159,22 @@ AudioDeviceGeneric::InitStatus AudioDeviceLinuxPulse::Init() { #endif // RECORDING - _ptrThreadRec.reset(new rtc::PlatformThread(RecThreadFunc, this, - "webrtc_audio_module_rec_thread", - rtc::kRealtimePriority)); - - _ptrThreadRec->Start(); + const auto attributes = + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); + _ptrThreadRec = rtc::PlatformThread::SpawnJoinable( + [this] { + while (RecThreadProcess()) { + } + }, + "webrtc_audio_module_rec_thread", attributes); // PLAYOUT - _ptrThreadPlay.reset(new rtc::PlatformThread( - PlayThreadFunc, this, "webrtc_audio_module_play_thread", - rtc::kRealtimePriority)); - _ptrThreadPlay->Start(); - + _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable( + [this] { + while (PlayThreadProcess()) { + } + }, + "webrtc_audio_module_play_thread", attributes); _initialized = true; return InitStatus::OK; @@ -187,22 +192,12 @@ int32_t AudioDeviceLinuxPulse::Terminate() { _mixerManager.Close(); // RECORDING - if (_ptrThreadRec) { - rtc::PlatformThread* tmpThread = _ptrThreadRec.release(); - - _timeEventRec.Set(); - tmpThread->Stop(); - delete tmpThread; - } + _timeEventRec.Set(); + _ptrThreadRec.Finalize(); // PLAYOUT - if (_ptrThreadPlay) { - rtc::PlatformThread* tmpThread = _ptrThreadPlay.release(); - - _timeEventPlay.Set(); - tmpThread->Stop(); - delete tmpThread; - } + _timeEventPlay.Set(); + _ptrThreadPlay.Finalize(); // Terminate PulseAudio if (TerminatePulseAudio() < 0) { @@ -1981,18 +1976,6 @@ int32_t AudioDeviceLinuxPulse::ProcessRecordedData(int8_t* bufferData, return 0; } -void AudioDeviceLinuxPulse::PlayThreadFunc(void* pThis) { - AudioDeviceLinuxPulse* device = static_cast(pThis); - while (device->PlayThreadProcess()) { - } -} - -void AudioDeviceLinuxPulse::RecThreadFunc(void* pThis) { - AudioDeviceLinuxPulse* device = static_cast(pThis); - while (device->RecThreadProcess()) { - } -} - bool AudioDeviceLinuxPulse::PlayThreadProcess() { if (!_timeEventPlay.Wait(1000)) { return true; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h index 03aa16bb8..0cf89ef01 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h @@ -13,6 +13,7 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_generic.h" #include "modules/audio_device/include/audio_device.h" @@ -23,7 +24,6 @@ #include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #if defined(WEBRTC_USE_X11) #include @@ -268,9 +268,8 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { rtc::Event _recStartEvent; rtc::Event _playStartEvent; - // TODO(pbos): Remove unique_ptr and use directly without resetting. - std::unique_ptr _ptrThreadPlay; - std::unique_ptr _ptrThreadRec; + rtc::PlatformThread _ptrThreadPlay; + rtc::PlatformThread _ptrThreadRec; AudioMixerManagerLinuxPulse _mixerManager; @@ -284,10 +283,10 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { uint8_t _playChannels; // Stores thread ID in constructor. - // We can then use ThreadChecker::IsCurrent() to ensure that + // We can then use RTC_DCHECK_RUN_ON(&worker_thread_checker_) to ensure that // other methods are called from the same thread. // Currently only does RTC_DCHECK(thread_checker_.IsCurrent()). - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; bool _initialized; bool _recording; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc index fb9d874ef..e7e703317 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc @@ -27,14 +27,14 @@ AudioMixerManagerLinuxALSA::AudioMixerManagerLinuxALSA() _inputMixerHandle(NULL), _outputMixerElement(NULL), _inputMixerElement(NULL) { - RTC_LOG(LS_INFO) << __FUNCTION__ << " created"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " created"; memset(_outputMixerStr, 0, kAdmMaxDeviceNameSize); memset(_inputMixerStr, 0, kAdmMaxDeviceNameSize); } AudioMixerManagerLinuxALSA::~AudioMixerManagerLinuxALSA() { - RTC_LOG(LS_INFO) << __FUNCTION__ << " destroyed"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " destroyed"; Close(); } @@ -43,7 +43,7 @@ AudioMixerManagerLinuxALSA::~AudioMixerManagerLinuxALSA() { // ============================================================================ int32_t AudioMixerManagerLinuxALSA::Close() { - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; MutexLock lock(&mutex_); @@ -59,7 +59,7 @@ int32_t AudioMixerManagerLinuxALSA::CloseSpeaker() { } int32_t AudioMixerManagerLinuxALSA::CloseSpeakerLocked() { - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; int errVal = 0; @@ -94,7 +94,7 @@ int32_t AudioMixerManagerLinuxALSA::CloseMicrophone() { } int32_t AudioMixerManagerLinuxALSA::CloseMicrophoneLocked() { - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; int errVal = 0; @@ -289,13 +289,13 @@ int32_t AudioMixerManagerLinuxALSA::OpenMicrophone(char* deviceName) { } bool AudioMixerManagerLinuxALSA::SpeakerIsInitialized() const { - RTC_LOG(LS_INFO) << __FUNCTION__; + RTC_DLOG(LS_INFO) << __FUNCTION__; return (_outputMixerHandle != NULL); } bool AudioMixerManagerLinuxALSA::MicrophoneIsInitialized() const { - RTC_LOG(LS_INFO) << __FUNCTION__; + RTC_DLOG(LS_INFO) << __FUNCTION__; return (_inputMixerHandle != NULL); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc index c507e623b..91beee3c8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc @@ -54,12 +54,12 @@ AudioMixerManagerLinuxPulse::AudioMixerManagerLinuxPulse() _paSpeakerVolume(PA_VOLUME_NORM), _paChannels(0), _paObjectsSet(false) { - RTC_LOG(LS_INFO) << __FUNCTION__ << " created"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " created"; } AudioMixerManagerLinuxPulse::~AudioMixerManagerLinuxPulse() { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_INFO) << __FUNCTION__ << " destroyed"; + RTC_DLOG(LS_INFO) << __FUNCTION__ << " destroyed"; Close(); } @@ -72,7 +72,7 @@ int32_t AudioMixerManagerLinuxPulse::SetPulseAudioObjects( pa_threaded_mainloop* mainloop, pa_context* context) { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; if (!mainloop || !context) { RTC_LOG(LS_ERROR) << "could not set PulseAudio objects for mixer"; @@ -90,7 +90,7 @@ int32_t AudioMixerManagerLinuxPulse::SetPulseAudioObjects( int32_t AudioMixerManagerLinuxPulse::Close() { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; CloseSpeaker(); CloseMicrophone(); @@ -104,7 +104,7 @@ int32_t AudioMixerManagerLinuxPulse::Close() { int32_t AudioMixerManagerLinuxPulse::CloseSpeaker() { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; // Reset the index to -1 _paOutputDeviceIndex = -1; @@ -115,7 +115,7 @@ int32_t AudioMixerManagerLinuxPulse::CloseSpeaker() { int32_t AudioMixerManagerLinuxPulse::CloseMicrophone() { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_VERBOSE) << __FUNCTION__; + RTC_DLOG(LS_VERBOSE) << __FUNCTION__; // Reset the index to -1 _paInputDeviceIndex = -1; @@ -186,14 +186,14 @@ int32_t AudioMixerManagerLinuxPulse::OpenMicrophone(uint16_t deviceIndex) { bool AudioMixerManagerLinuxPulse::SpeakerIsInitialized() const { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_INFO) << __FUNCTION__; + RTC_DLOG(LS_INFO) << __FUNCTION__; return (_paOutputDeviceIndex != -1); } bool AudioMixerManagerLinuxPulse::MicrophoneIsInitialized() const { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_LOG(LS_INFO) << __FUNCTION__; + RTC_DLOG(LS_INFO) << __FUNCTION__; return (_paInputDeviceIndex != -1); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.h index f2f3e48c7..546440c4a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.h @@ -14,7 +14,7 @@ #include #include -#include "rtc_base/thread_checker.h" +#include "api/sequence_checker.h" #ifndef UINT32_MAX #define UINT32_MAX ((uint32_t)-1) @@ -103,10 +103,10 @@ class AudioMixerManagerLinuxPulse { bool _paObjectsSet; // Stores thread ID in constructor. - // We can then use ThreadChecker::IsCurrent() to ensure that + // We can then use RTC_DCHECK_RUN_ON(&worker_thread_checker_) to ensure that // other methods are called from the same thread. // Currently only does RTC_DCHECK(thread_checker_.IsCurrent()). - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/OWNERS index b33d59969..5edc304ab 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/OWNERS @@ -1,2 +1,2 @@ -aleloi@webrtc.org +alessiob@webrtc.org henrik.lundin@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc index 04a8bcf72..8cebc3877 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.cc @@ -126,30 +126,33 @@ struct AudioMixerImpl::HelperContainers { AudioMixerImpl::AudioMixerImpl( std::unique_ptr output_rate_calculator, - bool use_limiter) - : output_rate_calculator_(std::move(output_rate_calculator)), + bool use_limiter, + int max_sources_to_mix) + : max_sources_to_mix_(max_sources_to_mix), + output_rate_calculator_(std::move(output_rate_calculator)), audio_source_list_(), helper_containers_(std::make_unique()), frame_combiner_(use_limiter) { - const int kTypicalMaxNumberOfMixedStreams = 3; - audio_source_list_.reserve(kTypicalMaxNumberOfMixedStreams); - helper_containers_->resize(kTypicalMaxNumberOfMixedStreams); + RTC_CHECK_GE(max_sources_to_mix, 1) << "At least one source must be mixed"; + audio_source_list_.reserve(max_sources_to_mix); + helper_containers_->resize(max_sources_to_mix); } AudioMixerImpl::~AudioMixerImpl() {} -rtc::scoped_refptr AudioMixerImpl::Create() { +rtc::scoped_refptr AudioMixerImpl::Create( + int max_sources_to_mix) { return Create(std::unique_ptr( new DefaultOutputRateCalculator()), - true); + /*use_limiter=*/true, max_sources_to_mix); } rtc::scoped_refptr AudioMixerImpl::Create( std::unique_ptr output_rate_calculator, - bool use_limiter) { - return rtc::scoped_refptr( - new rtc::RefCountedObject( - std::move(output_rate_calculator), use_limiter)); + bool use_limiter, + int max_sources_to_mix) { + return rtc::make_ref_counted( + std::move(output_rate_calculator), use_limiter, max_sources_to_mix); } void AudioMixerImpl::Mix(size_t number_of_channels, @@ -219,7 +222,7 @@ rtc::ArrayView AudioMixerImpl::GetAudioFromSources( std::sort(audio_source_mixing_data_view.begin(), audio_source_mixing_data_view.end(), ShouldMixBefore); - int max_audio_frame_counter = kMaximumAmountOfMixedAudioSources; + int max_audio_frame_counter = max_sources_to_mix_; int ramp_list_lengh = 0; int audio_to_mix_count = 0; // Go through list in order and put unmuted frames in result list. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h index 0a1308272..737fcbdc4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h @@ -35,13 +35,16 @@ class AudioMixerImpl : public AudioMixer { // AudioProcessing only accepts 10 ms frames. static const int kFrameDurationInMs = 10; - enum : int { kMaximumAmountOfMixedAudioSources = 3 }; - static rtc::scoped_refptr Create(); + static const int kDefaultNumberOfMixedAudioSources = 3; + + static rtc::scoped_refptr Create( + int max_sources_to_mix = kDefaultNumberOfMixedAudioSources); static rtc::scoped_refptr Create( std::unique_ptr output_rate_calculator, - bool use_limiter); + bool use_limiter, + int max_sources_to_mix = kDefaultNumberOfMixedAudioSources); ~AudioMixerImpl() override; @@ -60,7 +63,8 @@ class AudioMixerImpl : public AudioMixer { protected: AudioMixerImpl(std::unique_ptr output_rate_calculator, - bool use_limiter); + bool use_limiter, + int max_sources_to_mix); private: struct HelperContainers; @@ -76,6 +80,8 @@ class AudioMixerImpl : public AudioMixer { // checks that mixing is done sequentially. mutable Mutex mutex_; + const int max_sources_to_mix_; + std::unique_ptr output_rate_calculator_; // List of all audio sources. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc index e184506b4..e31eea595 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/frame_combiner.cc @@ -16,8 +16,12 @@ #include #include #include +#include +#include #include "api/array_view.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" #include "common_audio/include/audio_util.h" #include "modules/audio_mixer/audio_frame_manipulator.h" #include "modules/audio_mixer/audio_mixer_impl.h" @@ -26,6 +30,7 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -53,11 +58,23 @@ void SetAudioFrameFields(rtc::ArrayView mix_list, if (mix_list.empty()) { audio_frame_for_mixing->elapsed_time_ms_ = -1; - } else if (mix_list.size() == 1) { + } else { audio_frame_for_mixing->timestamp_ = mix_list[0]->timestamp_; audio_frame_for_mixing->elapsed_time_ms_ = mix_list[0]->elapsed_time_ms_; audio_frame_for_mixing->ntp_time_ms_ = mix_list[0]->ntp_time_ms_; - audio_frame_for_mixing->packet_infos_ = mix_list[0]->packet_infos_; + std::vector packet_infos; + for (const auto& frame : mix_list) { + audio_frame_for_mixing->timestamp_ = + std::min(audio_frame_for_mixing->timestamp_, frame->timestamp_); + audio_frame_for_mixing->ntp_time_ms_ = + std::min(audio_frame_for_mixing->ntp_time_ms_, frame->ntp_time_ms_); + audio_frame_for_mixing->elapsed_time_ms_ = std::max( + audio_frame_for_mixing->elapsed_time_ms_, frame->elapsed_time_ms_); + packet_infos.insert(packet_infos.end(), frame->packet_infos_.begin(), + frame->packet_infos_.end()); + } + audio_frame_for_mixing->packet_infos_ = + RtpPacketInfos(std::move(packet_infos)); } } @@ -88,13 +105,14 @@ void MixToFloatFrame(rtc::ArrayView mix_list, // Convert to FloatS16 and mix. for (size_t i = 0; i < mix_list.size(); ++i) { const AudioFrame* const frame = mix_list[i]; + const int16_t* const frame_data = frame->data(); for (size_t j = 0; j < std::min(number_of_channels, FrameCombiner::kMaximumNumberOfChannels); ++j) { for (size_t k = 0; k < std::min(samples_per_channel, FrameCombiner::kMaximumChannelSize); ++k) { - (*mixing_buffer)[j][k] += frame->data()[number_of_channels * k + j]; + (*mixing_buffer)[j][k] += frame_data[number_of_channels * k + j]; } } } @@ -113,10 +131,11 @@ void InterleaveToAudioFrame(AudioFrameView mixing_buffer_view, AudioFrame* audio_frame_for_mixing) { const size_t number_of_channels = mixing_buffer_view.num_channels(); const size_t samples_per_channel = mixing_buffer_view.samples_per_channel(); + int16_t* const mixing_data = audio_frame_for_mixing->mutable_data(); // Put data in the result frame. for (size_t i = 0; i < number_of_channels; ++i) { for (size_t j = 0; j < samples_per_channel; ++j) { - audio_frame_for_mixing->mutable_data()[number_of_channels * j + i] = + mixing_data[number_of_channels * j + i] = FloatS16ToS16(mixing_buffer_view.channel(i)[j]); } } @@ -205,10 +224,10 @@ void FrameCombiner::LogMixingStats( uma_logging_counter_ = 0; RTC_HISTOGRAM_COUNTS_100("WebRTC.Audio.AudioMixer.NumIncomingStreams", static_cast(number_of_streams)); - RTC_HISTOGRAM_ENUMERATION( - "WebRTC.Audio.AudioMixer.NumIncomingActiveStreams", - static_cast(mix_list.size()), - AudioMixerImpl::kMaximumAmountOfMixedAudioSources); + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.AudioMixer.NumIncomingActiveStreams2", + rtc::dchecked_cast(mix_list.size()), /*min=*/1, /*max=*/16, + /*bucket_count=*/16); using NativeRate = AudioProcessing::NativeRate; static constexpr NativeRate native_rates[] = { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc index 7bd8d6267..ed6bc4675 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.cc @@ -20,9 +20,7 @@ namespace webrtc { Aec3Optimization DetectOptimization() { #if defined(WEBRTC_ARCH_X86_FAMILY) - if (GetCPUInfo(kAVX2) != 0) { - return Aec3Optimization::kAvx2; - } else if (GetCPUInfo(kSSE2) != 0) { + if (GetCPUInfo(kSSE2) != 0) { return Aec3Optimization::kSse2; } #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc index c7361093f..21cad2186 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.cc @@ -113,14 +113,6 @@ void AecState::GetResidualEchoScaling( residual_scaling); } -absl::optional AecState::ErleUncertainty() const { - if (SaturatedEcho()) { - return 1.f; - } - - return absl::nullopt; -} - AecState::AecState(const EchoCanceller3Config& config, size_t num_capture_channels) : data_dumper_( @@ -302,7 +294,9 @@ void AecState::Update( data_dumper_->DumpRaw("aec3_active_render", active_render); data_dumper_->DumpRaw("aec3_erl", Erl()); data_dumper_->DumpRaw("aec3_erl_time_domain", ErlTimeDomain()); - data_dumper_->DumpRaw("aec3_erle", Erle()[0]); + data_dumper_->DumpRaw("aec3_erle", Erle(/*onset_compensated=*/false)[0]); + data_dumper_->DumpRaw("aec3_erle_onset_compensated", + Erle(/*onset_compensated=*/true)[0]); data_dumper_->DumpRaw("aec3_usable_linear_estimate", UsableLinearEstimate()); data_dumper_->DumpRaw("aec3_transparent_mode", TransparentModeActive()); data_dumper_->DumpRaw("aec3_filter_delay", @@ -322,6 +316,11 @@ void AecState::Update( external_delay ? 1 : 0); data_dumper_->DumpRaw("aec3_filter_tail_freq_resp_est", GetReverbFrequencyResponse()); + data_dumper_->DumpRaw("aec3_subtractor_y2", subtractor_output[0].y2); + data_dumper_->DumpRaw("aec3_subtractor_e2_coarse", + subtractor_output[0].e2_coarse); + data_dumper_->DumpRaw("aec3_subtractor_e2_refined", + subtractor_output[0].e2_refined); } AecState::InitialState::InitialState(const EchoCanceller3Config& config) diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h index 5b40e9513..125ae83a2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec_state.h @@ -70,16 +70,11 @@ class AecState { } // Returns the ERLE. - rtc::ArrayView> Erle() const { - return erle_estimator_.Erle(); + rtc::ArrayView> Erle( + bool onset_compensated) const { + return erle_estimator_.Erle(onset_compensated); } - // Returns an offset to apply to the estimation of the residual echo - // computation. Returning nullopt means that no offset should be used, while - // any other value will be applied as a multiplier to the estimated residual - // echo. - absl::optional ErleUncertainty() const; - // Returns the fullband ERLE estimate in log2 units. float FullBandErleLog2() const { return erle_estimator_.FullbandErleLog2(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc index f2f326148..2ee32b82d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.cc @@ -63,6 +63,7 @@ class BlockProcessorImpl final : public BlockProcessor { void GetMetrics(EchoControl::Metrics* metrics) const override; void SetAudioBufferDelay(int delay_ms) override; + void SetCaptureOutputUsage(bool capture_output_used) override; private: static int instance_count_; @@ -237,6 +238,10 @@ void BlockProcessorImpl::SetAudioBufferDelay(int delay_ms) { render_buffer_->SetAudioBufferDelay(delay_ms); } +void BlockProcessorImpl::SetCaptureOutputUsage(bool capture_output_used) { + echo_remover_->SetCaptureOutputUsage(capture_output_used); +} + } // namespace BlockProcessor* BlockProcessor::Create(const EchoCanceller3Config& config, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h index 9bb0cf19f..41ce016dc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor.h @@ -69,6 +69,12 @@ class BlockProcessor { // Reports whether echo leakage has been detected in the echo canceller // output. virtual void UpdateEchoLeakageStatus(bool leakage_detected) = 0; + + // Specifies whether the capture output will be used. The purpose of this is + // to allow the block processor to deactivate some of the processing when the + // resulting output is anyway not used, for instance when the endpoint is + // muted. + virtual void SetCaptureOutputUsage(bool capture_output_used) = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc index 98da232bb..181b649f6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc @@ -49,7 +49,11 @@ void RetrieveFieldTrialValue(const char* trial_name, ParseFieldTrial({&field_trial_param}, field_trial_str); float field_trial_value = static_cast(field_trial_param.Get()); - if (field_trial_value >= min && field_trial_value <= max) { + if (field_trial_value >= min && field_trial_value <= max && + field_trial_value != *value_to_update) { + RTC_LOG(LS_INFO) << "Key " << trial_name + << " changing AEC3 parameter value from " + << *value_to_update << " to " << field_trial_value; *value_to_update = field_trial_value; } } @@ -65,7 +69,11 @@ void RetrieveFieldTrialValue(const char* trial_name, ParseFieldTrial({&field_trial_param}, field_trial_str); float field_trial_value = field_trial_param.Get(); - if (field_trial_value >= min && field_trial_value <= max) { + if (field_trial_value >= min && field_trial_value <= max && + field_trial_value != *value_to_update) { + RTC_LOG(LS_INFO) << "Key " << trial_name + << " changing AEC3 parameter value from " + << *value_to_update << " to " << field_trial_value; *value_to_update = field_trial_value; } } @@ -251,6 +259,10 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { adjusted_cfg.filter.initial_state_seconds = 2.0f; } + if (field_trial::IsEnabled("WebRTC-Aec3HighPassFilterEchoReference")) { + adjusted_cfg.filter.high_pass_filter_echo_reference = true; + } + if (field_trial::IsEnabled("WebRTC-Aec3EchoSaturationDetectionKillSwitch")) { adjusted_cfg.ep_strength.echo_can_saturate = false; } @@ -568,12 +580,19 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { RetrieveFieldTrialValue("WebRTC-Aec3SuppressorEpStrengthDefaultLenOverride", -1.f, 1.f, &adjusted_cfg.ep_strength.default_len); + // Field trial-based overrides of individual delay estimator parameters. + RetrieveFieldTrialValue("WebRTC-Aec3DelayEstimateSmoothingOverride", 0.f, 1.f, + &adjusted_cfg.delay.delay_estimate_smoothing); + RetrieveFieldTrialValue( + "WebRTC-Aec3DelayEstimateSmoothingDelayFoundOverride", 0.f, 1.f, + &adjusted_cfg.delay.delay_estimate_smoothing_delay_found); return adjusted_cfg; } class EchoCanceller3::RenderWriter { public: RenderWriter(ApmDataDumper* data_dumper, + const EchoCanceller3Config& config, SwapQueue>>, Aec3RenderQueueItemVerifier>* render_transfer_queue, size_t num_bands, @@ -590,7 +609,7 @@ class EchoCanceller3::RenderWriter { ApmDataDumper* data_dumper_; const size_t num_bands_; const size_t num_channels_; - HighPassFilter high_pass_filter_; + std::unique_ptr high_pass_filter_; std::vector>> render_queue_input_frame_; SwapQueue>>, Aec3RenderQueueItemVerifier>* render_transfer_queue_; @@ -598,6 +617,7 @@ class EchoCanceller3::RenderWriter { EchoCanceller3::RenderWriter::RenderWriter( ApmDataDumper* data_dumper, + const EchoCanceller3Config& config, SwapQueue>>, Aec3RenderQueueItemVerifier>* render_transfer_queue, size_t num_bands, @@ -605,7 +625,6 @@ EchoCanceller3::RenderWriter::RenderWriter( : data_dumper_(data_dumper), num_bands_(num_bands), num_channels_(num_channels), - high_pass_filter_(16000, num_channels), render_queue_input_frame_( num_bands_, std::vector>( @@ -613,6 +632,9 @@ EchoCanceller3::RenderWriter::RenderWriter( std::vector(AudioBuffer::kSplitBandSize, 0.f))), render_transfer_queue_(render_transfer_queue) { RTC_DCHECK(data_dumper); + if (config.filter.high_pass_filter_echo_reference) { + high_pass_filter_ = std::make_unique(16000, num_channels); + } } EchoCanceller3::RenderWriter::~RenderWriter() = default; @@ -631,7 +653,9 @@ void EchoCanceller3::RenderWriter::Insert(const AudioBuffer& input) { CopyBufferIntoFrame(input, num_bands_, num_channels_, &render_queue_input_frame_); - high_pass_filter_.Process(&render_queue_input_frame_[0]); + if (high_pass_filter_) { + high_pass_filter_->Process(&render_queue_input_frame_[0]); + } static_cast(render_transfer_queue_->Insert(&render_queue_input_frame_)); } @@ -704,7 +728,7 @@ EchoCanceller3::EchoCanceller3(const EchoCanceller3Config& config, config_.delay.fixed_capture_delay_samples)); } - render_writer_.reset(new RenderWriter(data_dumper_.get(), + render_writer_.reset(new RenderWriter(data_dumper_.get(), config_, &render_transfer_queue_, num_bands_, num_render_channels_)); @@ -721,6 +745,10 @@ EchoCanceller3::EchoCanceller3(const EchoCanceller3Config& config, std::vector>>( 1, std::vector>(num_capture_channels_)); } + + RTC_LOG(LS_INFO) << "AEC3 created with sample rate: " << sample_rate_hz_ + << " Hz, num render channels: " << num_render_channels_ + << ", num capture channels: " << num_capture_channels_; } EchoCanceller3::~EchoCanceller3() = default; @@ -823,6 +851,11 @@ void EchoCanceller3::SetAudioBufferDelay(int delay_ms) { block_processor_->SetAudioBufferDelay(delay_ms); } +void EchoCanceller3::SetCaptureOutputUsage(bool capture_output_used) { + RTC_DCHECK_RUNS_SERIALIZED(&capture_race_checker_); + block_processor_->SetCaptureOutputUsage(capture_output_used); +} + bool EchoCanceller3::ActiveProcessing() const { return true; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h index bacd5dfc4..a4aab4987 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.h @@ -118,6 +118,12 @@ class EchoCanceller3 : public EchoControl { // Provides an optional external estimate of the audio buffer delay. void SetAudioBufferDelay(int delay_ms) override; + // Specifies whether the capture output will be used. The purpose of this is + // to allow the echo controller to deactivate some of the processing when the + // resulting output is anyway not used, for instance when the endpoint is + // muted. + void SetCaptureOutputUsage(bool capture_output_used) override; + bool ActiveProcessing() const override; // Signals whether an external detector has detected echo leakage from the diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc index 2c987f934..8a7883414 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.cc @@ -42,6 +42,7 @@ EchoPathDelayEstimator::EchoPathDelayEstimator( ? config.render_levels.poor_excitation_render_limit_ds8 : config.render_levels.poor_excitation_render_limit, config.delay.delay_estimate_smoothing, + config.delay.delay_estimate_smoothing_delay_found, config.delay.delay_candidate_detection_threshold), matched_filter_lag_aggregator_(data_dumper_, matched_filter_.GetMaxFilterLag(), @@ -71,7 +72,8 @@ absl::optional EchoPathDelayEstimator::EstimateDelay( data_dumper_->DumpWav("aec3_capture_decimator_output", downsampled_capture.size(), downsampled_capture.data(), 16000 / down_sampling_factor_, 1); - matched_filter_.Update(render_buffer, downsampled_capture); + matched_filter_.Update(render_buffer, downsampled_capture, + matched_filter_lag_aggregator_.ReliableDelayFound()); absl::optional aggregated_matched_filter_lag = matched_filter_lag_aggregator_.Aggregate( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc index df539bfad..6c177c9a1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.cc @@ -132,6 +132,10 @@ class EchoRemoverImpl final : public EchoRemover { echo_leakage_detected_ = leakage_detected; } + void SetCaptureOutputUsage(bool capture_output_used) override { + capture_output_used_ = capture_output_used; + } + private: // Selects which of the coarse and refined linear filter outputs that is most // appropriate to pass to the suppressor and forms the linear filter output by @@ -155,6 +159,7 @@ class EchoRemoverImpl final : public EchoRemover { RenderSignalAnalyzer render_signal_analyzer_; ResidualEchoEstimator residual_echo_estimator_; bool echo_leakage_detected_ = false; + bool capture_output_used_ = true; AecState aec_state_; EchoRemoverMetrics metrics_; std::vector> e_old_; @@ -391,42 +396,50 @@ void EchoRemoverImpl::ProcessCapture( 1); data_dumper_->DumpWav("aec3_output_linear2", kBlockSize, &e[0][0], 16000, 1); - // Estimate the residual echo power. - residual_echo_estimator_.Estimate(aec_state_, *render_buffer, S2_linear, Y2, - R2); - // Estimate the comfort noise. cng_.Compute(aec_state_.SaturatedCapture(), Y2, comfort_noise, high_band_comfort_noise); - // Suppressor nearend estimate. - if (aec_state_.UsableLinearEstimate()) { - // E2 is bound by Y2. - for (size_t ch = 0; ch < num_capture_channels_; ++ch) { - std::transform(E2[ch].begin(), E2[ch].end(), Y2[ch].begin(), - E2[ch].begin(), - [](float a, float b) { return std::min(a, b); }); - } - } - const auto& nearend_spectrum = aec_state_.UsableLinearEstimate() ? E2 : Y2; - - // Suppressor echo estimate. - const auto& echo_spectrum = - aec_state_.UsableLinearEstimate() ? S2_linear : R2; - - // Determine if the suppressor should assume clock drift. - const bool clock_drift = config_.echo_removal_control.has_clock_drift || - echo_path_variability.clock_drift; - - // Compute preferred gains. - float high_bands_gain; + // Only do the below processing if the output of the audio processing module + // is used. std::array G; - suppression_gain_.GetGain(nearend_spectrum, echo_spectrum, R2, - cng_.NoiseSpectrum(), render_signal_analyzer_, - aec_state_, x, clock_drift, &high_bands_gain, &G); + if (capture_output_used_) { + // Estimate the residual echo power. + residual_echo_estimator_.Estimate(aec_state_, *render_buffer, S2_linear, Y2, + suppression_gain_.IsDominantNearend(), + R2); - suppression_filter_.ApplyGain(comfort_noise, high_band_comfort_noise, G, - high_bands_gain, Y_fft, y); + // Suppressor nearend estimate. + if (aec_state_.UsableLinearEstimate()) { + // E2 is bound by Y2. + for (size_t ch = 0; ch < num_capture_channels_; ++ch) { + std::transform(E2[ch].begin(), E2[ch].end(), Y2[ch].begin(), + E2[ch].begin(), + [](float a, float b) { return std::min(a, b); }); + } + } + const auto& nearend_spectrum = aec_state_.UsableLinearEstimate() ? E2 : Y2; + + // Suppressor echo estimate. + const auto& echo_spectrum = + aec_state_.UsableLinearEstimate() ? S2_linear : R2; + + // Determine if the suppressor should assume clock drift. + const bool clock_drift = config_.echo_removal_control.has_clock_drift || + echo_path_variability.clock_drift; + + // Compute preferred gains. + float high_bands_gain; + suppression_gain_.GetGain(nearend_spectrum, echo_spectrum, R2, + cng_.NoiseSpectrum(), render_signal_analyzer_, + aec_state_, x, clock_drift, &high_bands_gain, &G); + + suppression_filter_.ApplyGain(comfort_noise, high_band_comfort_noise, G, + high_bands_gain, Y_fft, y); + + } else { + G.fill(0.f); + } // Update the metrics. metrics_.Update(aec_state_, cng_.NoiseSpectrum()[0], G); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h index ef4164688..486a9a72f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover.h @@ -48,6 +48,12 @@ class EchoRemover { // Updates the status on whether echo leakage is detected in the output of the // echo remover. virtual void UpdateEchoLeakageStatus(bool leakage_detected) = 0; + + // Specifies whether the capture output will be used. The purpose of this is + // to allow the echo remover to deactivate some of the processing when the + // resulting output is anyway not used, for instance when the endpoint is + // muted. + virtual void SetCaptureOutputUsage(bool capture_output_used) = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.cc index 4d843457d..0e3d715c5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.cc @@ -52,8 +52,9 @@ void ErleEstimator::Update( rtc::ArrayView> subtractor_spectra, const std::vector& converged_filters) { - RTC_DCHECK_EQ(subband_erle_estimator_.Erle().size(), capture_spectra.size()); - RTC_DCHECK_EQ(subband_erle_estimator_.Erle().size(), + RTC_DCHECK_EQ(subband_erle_estimator_.Erle(/*onset_compensated=*/true).size(), + capture_spectra.size()); + RTC_DCHECK_EQ(subband_erle_estimator_.Erle(/*onset_compensated=*/true).size(), subtractor_spectra.size()); const auto& X2_reverb = avg_render_spectrum_with_reverb; const auto& Y2 = capture_spectra; @@ -68,7 +69,9 @@ void ErleEstimator::Update( if (signal_dependent_erle_estimator_) { signal_dependent_erle_estimator_->Update( render_buffer, filter_frequency_responses, X2_reverb, Y2, E2, - subband_erle_estimator_.Erle(), converged_filters); + subband_erle_estimator_.Erle(/*onset_compensated=*/false), + subband_erle_estimator_.Erle(/*onset_compensated=*/true), + converged_filters); } fullband_erle_estimator_.Update(X2_reverb, Y2, E2, converged_filters); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.h index d741cff3d..cae896e82 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erle_estimator.h @@ -55,17 +55,18 @@ class ErleEstimator { const std::vector& converged_filters); // Returns the most recent subband ERLE estimates. - rtc::ArrayView> Erle() const { + rtc::ArrayView> Erle( + bool onset_compensated) const { return signal_dependent_erle_estimator_ - ? signal_dependent_erle_estimator_->Erle() - : subband_erle_estimator_.Erle(); + ? signal_dependent_erle_estimator_->Erle(onset_compensated) + : subband_erle_estimator_.Erle(onset_compensated); } // Returns the subband ERLE that are estimated during onsets (only used for // testing). - rtc::ArrayView> ErleOnsets() + rtc::ArrayView> ErleDuringOnsets() const { - return subband_erle_estimator_.ErleOnsets(); + return subband_erle_estimator_.ErleDuringOnsets(); } // Returns the fullband ERLE estimate. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.cc index e42121482..e56674e4c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.cc @@ -34,8 +34,8 @@ FullBandErleEstimator::FullBandErleEstimator( const EchoCanceller3Config::Erle& config, size_t num_capture_channels) : min_erle_log2_(FastApproxLog2f(config.min + kEpsilon)), - max_erle_lf_log2(FastApproxLog2f(config.max_l + kEpsilon)), - hold_counters_time_domain_(num_capture_channels, 0), + max_erle_lf_log2_(FastApproxLog2f(config.max_l + kEpsilon)), + hold_counters_instantaneous_erle_(num_capture_channels, 0), erle_time_domain_log2_(num_capture_channels, min_erle_log2_), instantaneous_erle_(num_capture_channels, ErleInstantaneous(config)), linear_filters_qualities_(num_capture_channels) { @@ -52,8 +52,8 @@ void FullBandErleEstimator::Reset() { UpdateQualityEstimates(); std::fill(erle_time_domain_log2_.begin(), erle_time_domain_log2_.end(), min_erle_log2_); - std::fill(hold_counters_time_domain_.begin(), - hold_counters_time_domain_.end(), 0); + std::fill(hold_counters_instantaneous_erle_.begin(), + hold_counters_instantaneous_erle_.end(), 0); } void FullBandErleEstimator::Update( @@ -71,21 +71,17 @@ void FullBandErleEstimator::Update( const float E2_sum = std::accumulate(E2[ch].begin(), E2[ch].end(), 0.0f); if (instantaneous_erle_[ch].Update(Y2_sum, E2_sum)) { - hold_counters_time_domain_[ch] = kBlocksToHoldErle; + hold_counters_instantaneous_erle_[ch] = kBlocksToHoldErle; erle_time_domain_log2_[ch] += - 0.1f * ((instantaneous_erle_[ch].GetInstErleLog2().value()) - - erle_time_domain_log2_[ch]); - erle_time_domain_log2_[ch] = rtc::SafeClamp( - erle_time_domain_log2_[ch], min_erle_log2_, max_erle_lf_log2); + 0.05f * ((instantaneous_erle_[ch].GetInstErleLog2().value()) - + erle_time_domain_log2_[ch]); + erle_time_domain_log2_[ch] = + std::max(erle_time_domain_log2_[ch], min_erle_log2_); } } } - --hold_counters_time_domain_[ch]; - if (hold_counters_time_domain_[ch] <= 0) { - erle_time_domain_log2_[ch] = - std::max(min_erle_log2_, erle_time_domain_log2_[ch] - 0.044f); - } - if (hold_counters_time_domain_[ch] == 0) { + --hold_counters_instantaneous_erle_[ch]; + if (hold_counters_instantaneous_erle_[ch] == 0) { instantaneous_erle_[ch].ResetAccumulators(); } } @@ -166,17 +162,12 @@ void FullBandErleEstimator::ErleInstantaneous::Dump( void FullBandErleEstimator::ErleInstantaneous::UpdateMaxMin() { RTC_DCHECK(erle_log2_); - if (erle_log2_.value() > max_erle_log2_) { - max_erle_log2_ = erle_log2_.value(); - } else { - max_erle_log2_ -= 0.0004; // Forget factor, approx 1dB every 3 sec. - } - - if (erle_log2_.value() < min_erle_log2_) { - min_erle_log2_ = erle_log2_.value(); - } else { - min_erle_log2_ += 0.0004; // Forget factor, approx 1dB every 3 sec. - } + // Adding the forgetting factors for the maximum and minimum and capping the + // result to the incoming value. + max_erle_log2_ -= 0.0004f; // Forget factor, approx 1dB every 3 sec. + max_erle_log2_ = std::max(max_erle_log2_, erle_log2_.value()); + min_erle_log2_ += 0.0004f; // Forget factor, approx 1dB every 3 sec. + min_erle_log2_ = std::min(min_erle_log2_, erle_log2_.value()); } void FullBandErleEstimator::ErleInstantaneous::UpdateQualityEstimate() { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h index 1580f1a8a..2b720a4de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h @@ -106,8 +106,8 @@ class FullBandErleEstimator { }; const float min_erle_log2_; - const float max_erle_lf_log2; - std::vector hold_counters_time_domain_; + const float max_erle_lf_log2_; + std::vector hold_counters_instantaneous_erle_; std::vector erle_time_domain_log2_; std::vector instantaneous_erle_; std::vector> linear_filters_qualities_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc index 2a489923b..b2113f696 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc @@ -307,7 +307,8 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, int num_matched_filters, size_t alignment_shift_sub_blocks, float excitation_limit, - float smoothing, + float smoothing_fast, + float smoothing_slow, float matching_filter_threshold) : data_dumper_(data_dumper), optimization_(optimization), @@ -319,7 +320,8 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, lag_estimates_(num_matched_filters), filters_offsets_(num_matched_filters, 0), excitation_limit_(excitation_limit), - smoothing_(smoothing), + smoothing_fast_(smoothing_fast), + smoothing_slow_(smoothing_slow), matching_filter_threshold_(matching_filter_threshold) { RTC_DCHECK(data_dumper); RTC_DCHECK_LT(0, window_size_sub_blocks); @@ -340,10 +342,14 @@ void MatchedFilter::Reset() { } void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, - rtc::ArrayView capture) { + rtc::ArrayView capture, + bool use_slow_smoothing) { RTC_DCHECK_EQ(sub_block_size_, capture.size()); auto& y = capture; + const float smoothing = + use_slow_smoothing ? smoothing_slow_ : smoothing_fast_; + const float x2_sum_threshold = filters_[0].size() * excitation_limit_ * excitation_limit_; @@ -360,20 +366,20 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, switch (optimization_) { #if defined(WEBRTC_ARCH_X86_FAMILY) case Aec3Optimization::kSse2: - aec3::MatchedFilterCore_SSE2(x_start_index, x2_sum_threshold, - smoothing_, render_buffer.buffer, y, - filters_[n], &filters_updated, &error_sum); + aec3::MatchedFilterCore_SSE2(x_start_index, x2_sum_threshold, smoothing, + render_buffer.buffer, y, filters_[n], + &filters_updated, &error_sum); break; #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: - aec3::MatchedFilterCore_NEON(x_start_index, x2_sum_threshold, - smoothing_, render_buffer.buffer, y, - filters_[n], &filters_updated, &error_sum); + aec3::MatchedFilterCore_NEON(x_start_index, x2_sum_threshold, smoothing, + render_buffer.buffer, y, filters_[n], + &filters_updated, &error_sum); break; #endif default: - aec3::MatchedFilterCore(x_start_index, x2_sum_threshold, smoothing_, + aec3::MatchedFilterCore(x_start_index, x2_sum_threshold, smoothing, render_buffer.buffer, y, filters_[n], &filters_updated, &error_sum); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h index fa44eb27f..c6410ab4e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h @@ -100,7 +100,8 @@ class MatchedFilter { int num_matched_filters, size_t alignment_shift_sub_blocks, float excitation_limit, - float smoothing, + float smoothing_fast, + float smoothing_slow, float matching_filter_threshold); MatchedFilter() = delete; @@ -111,7 +112,8 @@ class MatchedFilter { // Updates the correlation with the values in the capture buffer. void Update(const DownsampledRenderBuffer& render_buffer, - rtc::ArrayView capture); + rtc::ArrayView capture, + bool use_slow_smoothing); // Resets the matched filter. void Reset(); @@ -140,7 +142,8 @@ class MatchedFilter { std::vector lag_estimates_; std::vector filters_offsets_; const float excitation_limit_; - const float smoothing_; + const float smoothing_fast_; + const float smoothing_slow_; const float matching_filter_threshold_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h index d48011e47..612bd5d94 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_lag_aggregator.h @@ -45,6 +45,9 @@ class MatchedFilterLagAggregator { absl::optional Aggregate( rtc::ArrayView lag_estimates); + // Returns whether a reliable delay estimate has been found. + bool ReliableDelayFound() const { return significant_candidate_found_; } + private: ApmDataDumper* const data_dumper_; std::vector histogram_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h index e1eb26702..aa612257e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_block_processor.h @@ -44,6 +44,10 @@ class MockBlockProcessor : public BlockProcessor { (EchoControl::Metrics * metrics), (const, override)); MOCK_METHOD(void, SetAudioBufferDelay, (int delay_ms), (override)); + MOCK_METHOD(void, + SetCaptureOutputUsage, + (bool capture_output_used), + (override)); }; } // namespace test diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h index 8a3044bcf..60c5bf433 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/mock/mock_echo_remover.h @@ -44,6 +44,10 @@ class MockEchoRemover : public EchoRemover { GetMetrics, (EchoControl::Metrics * metrics), (const, override)); + MOCK_METHOD(void, + SetCaptureOutputUsage, + (bool capture_output_used), + (override)); }; } // namespace test diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc index 138329ad3..db5203d54 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.cc @@ -73,6 +73,7 @@ void RefinedFilterUpdateGain::Compute( rtc::ArrayView erl, size_t size_partitions, bool saturated_capture_signal, + bool disallow_leakage_diverged, FftData* gain_fft) { RTC_DCHECK(gain_fft); // Introducing shorter notation to improve readability. @@ -125,7 +126,7 @@ void RefinedFilterUpdateGain::Compute( // H_error = H_error + factor * erl. for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) { - if (E2_coarse[k] >= E2_refined[k]) { + if (E2_refined[k] <= E2_coarse[k] || disallow_leakage_diverged) { H_error_[k] += current_config_.leakage_converged * erl[k]; } else { H_error_[k] += current_config_.leakage_diverged * erl[k]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h index 573097956..ae4fe84df 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/refined_filter_update_gain.h @@ -51,6 +51,7 @@ class RefinedFilterUpdateGain { rtc::ArrayView erl, size_t size_partitions, bool saturated_capture_signal, + bool disallow_leakage_diverged, FftData* gain_fft); // Sets a new config. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc index e352cf555..0688429d4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.cc @@ -45,6 +45,13 @@ float GetLateReflectionsDefaultModeGain( return config.default_gain; } +bool UseErleOnsetCompensationInDominantNearend( + const EchoCanceller3Config::EpStrength& config) { + return config.erle_onset_compensation_in_dominant_nearend || + field_trial::IsEnabled( + "WebRTC-Aec3UseErleOnsetCompensationInDominantNearend"); +} + // Computes the indexes that will be used for computing spectral power over // the blocks surrounding the delay. void GetRenderIndexesToAnalyze( @@ -84,22 +91,6 @@ void LinearEstimate( } } -// Estimates the residual echo power based on an uncertainty estimate of the -// echo return loss enhancement (ERLE) and the linear power estimate. -void LinearEstimate( - rtc::ArrayView> S2_linear, - float erle_uncertainty, - rtc::ArrayView> R2) { - RTC_DCHECK_EQ(S2_linear.size(), R2.size()); - - const size_t num_capture_channels = R2.size(); - for (size_t ch = 0; ch < num_capture_channels; ++ch) { - for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) { - R2[ch][k] = S2_linear[ch][k] * erle_uncertainty; - } - } -} - // Estimates the residual echo power based on the estimate of the echo path // gain. void NonLinearEstimate( @@ -172,7 +163,9 @@ ResidualEchoEstimator::ResidualEchoEstimator(const EchoCanceller3Config& config, early_reflections_general_gain_( GetEarlyReflectionsDefaultModeGain(config_.ep_strength)), late_reflections_general_gain_( - GetLateReflectionsDefaultModeGain(config_.ep_strength)) { + GetLateReflectionsDefaultModeGain(config_.ep_strength)), + erle_onset_compensation_in_dominant_nearend_( + UseErleOnsetCompensationInDominantNearend(config_.ep_strength)) { Reset(); } @@ -183,6 +176,7 @@ void ResidualEchoEstimator::Estimate( const RenderBuffer& render_buffer, rtc::ArrayView> S2_linear, rtc::ArrayView> Y2, + bool dominant_nearend, rtc::ArrayView> R2) { RTC_DCHECK_EQ(R2.size(), Y2.size()); RTC_DCHECK_EQ(R2.size(), S2_linear.size()); @@ -201,12 +195,9 @@ void ResidualEchoEstimator::Estimate( std::copy(Y2[ch].begin(), Y2[ch].end(), R2[ch].begin()); } } else { - absl::optional erle_uncertainty = aec_state.ErleUncertainty(); - if (erle_uncertainty) { - LinearEstimate(S2_linear, *erle_uncertainty, R2); - } else { - LinearEstimate(S2_linear, aec_state.Erle(), R2); - } + const bool onset_compensated = + erle_onset_compensation_in_dominant_nearend_ || !dominant_nearend; + LinearEstimate(S2_linear, aec_state.Erle(onset_compensated), R2); } AddReverb(ReverbType::kLinear, aec_state, render_buffer, R2); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h index 8fe7a84f0..9e977766c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/residual_echo_estimator.h @@ -39,6 +39,7 @@ class ResidualEchoEstimator { const RenderBuffer& render_buffer, rtc::ArrayView> S2_linear, rtc::ArrayView> Y2, + bool dominant_nearend, rtc::ArrayView> R2); private: @@ -68,6 +69,7 @@ class ResidualEchoEstimator { const float late_reflections_transparent_mode_gain_; const float early_reflections_general_gain_; const float late_reflections_general_gain_; + const bool erle_onset_compensation_in_dominant_nearend_; std::array X2_noise_floor_; std::array X2_noise_floor_counter_; ReverbModel echo_reverb_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc index 5a3ba6c84..a5e77092a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc @@ -131,7 +131,9 @@ SignalDependentErleEstimator::SignalDependentErleEstimator( section_boundaries_blocks_(SetSectionsBoundaries(delay_headroom_blocks_, num_blocks_, num_sections_)), + use_onset_detection_(config.erle.onset_detection), erle_(num_capture_channels), + erle_onset_compensated_(num_capture_channels), S2_section_accum_( num_capture_channels, std::vector>(num_sections_)), @@ -154,6 +156,7 @@ SignalDependentErleEstimator::~SignalDependentErleEstimator() = default; void SignalDependentErleEstimator::Reset() { for (size_t ch = 0; ch < erle_.size(); ++ch) { erle_[ch].fill(min_erle_); + erle_onset_compensated_[ch].fill(min_erle_); for (auto& erle_estimator : erle_estimators_[ch]) { erle_estimator.fill(min_erle_); } @@ -180,6 +183,8 @@ void SignalDependentErleEstimator::Update( rtc::ArrayView> Y2, rtc::ArrayView> E2, rtc::ArrayView> average_erle, + rtc::ArrayView> + average_erle_onset_compensated, const std::vector& converged_filters) { RTC_DCHECK_GT(num_sections_, 1); @@ -202,6 +207,11 @@ void SignalDependentErleEstimator::Update( [band_to_subband_[k]]; erle_[ch][k] = rtc::SafeClamp(average_erle[ch][k] * correction_factor, min_erle_, max_erle_[band_to_subband_[k]]); + if (use_onset_detection_) { + erle_onset_compensated_[ch][k] = rtc::SafeClamp( + average_erle_onset_compensated[ch][k] * correction_factor, + min_erle_, max_erle_[band_to_subband_[k]]); + } } } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.h index 498e922f1..6847c1ab1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.h @@ -37,8 +37,10 @@ class SignalDependentErleEstimator { void Reset(); // Returns the Erle per frequency subband. - rtc::ArrayView> Erle() const { - return erle_; + rtc::ArrayView> Erle( + bool onset_compensated) const { + return onset_compensated && use_onset_detection_ ? erle_onset_compensated_ + : erle_; } // Updates the Erle estimate. The Erle that is passed as an input is required @@ -51,6 +53,8 @@ class SignalDependentErleEstimator { rtc::ArrayView> Y2, rtc::ArrayView> E2, rtc::ArrayView> average_erle, + rtc::ArrayView> + average_erle_onset_compensated, const std::vector& converged_filters); void Dump(const std::unique_ptr& data_dumper) const; @@ -83,7 +87,9 @@ class SignalDependentErleEstimator { const std::array band_to_subband_; const std::array max_erle_; const std::vector section_boundaries_blocks_; + const bool use_onset_detection_; std::vector> erle_; + std::vector> erle_onset_compensated_; std::vector>> S2_section_accum_; std::vector>> erle_estimators_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.cc index 6c0009126..1e957f23a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.cc @@ -48,7 +48,8 @@ SubbandErleEstimator::SubbandErleEstimator(const EchoCanceller3Config& config, use_min_erle_during_onsets_(EnableMinErleDuringOnsets()), accum_spectra_(num_capture_channels), erle_(num_capture_channels), - erle_onsets_(num_capture_channels), + erle_onset_compensated_(num_capture_channels), + erle_during_onsets_(num_capture_channels), coming_onset_(num_capture_channels), hold_counters_(num_capture_channels) { Reset(); @@ -57,11 +58,11 @@ SubbandErleEstimator::SubbandErleEstimator(const EchoCanceller3Config& config, SubbandErleEstimator::~SubbandErleEstimator() = default; void SubbandErleEstimator::Reset() { - for (auto& erle : erle_) { - erle.fill(min_erle_); - } - for (size_t ch = 0; ch < erle_onsets_.size(); ++ch) { - erle_onsets_[ch].fill(min_erle_); + const size_t num_capture_channels = erle_.size(); + for (size_t ch = 0; ch < num_capture_channels; ++ch) { + erle_[ch].fill(min_erle_); + erle_onset_compensated_[ch].fill(min_erle_); + erle_during_onsets_[ch].fill(min_erle_); coming_onset_[ch].fill(true); hold_counters_[ch].fill(0); } @@ -80,15 +81,21 @@ void SubbandErleEstimator::Update( DecreaseErlePerBandForLowRenderSignals(); } - for (auto& erle : erle_) { + const size_t num_capture_channels = erle_.size(); + for (size_t ch = 0; ch < num_capture_channels; ++ch) { + auto& erle = erle_[ch]; erle[0] = erle[1]; erle[kFftLengthBy2] = erle[kFftLengthBy2 - 1]; + + auto& erle_oc = erle_onset_compensated_[ch]; + erle_oc[0] = erle_oc[1]; + erle_oc[kFftLengthBy2] = erle_oc[kFftLengthBy2 - 1]; } } void SubbandErleEstimator::Dump( const std::unique_ptr& data_dumper) const { - data_dumper->DumpRaw("aec3_erle_onset", ErleOnsets()[0]); + data_dumper->DumpRaw("aec3_erle_onset", ErleDuringOnsets()[0]); } void SubbandErleEstimator::UpdateBands( @@ -102,13 +109,16 @@ void SubbandErleEstimator::UpdateBands( continue; } + if (accum_spectra_.num_points[ch] != kPointsToAccumulate) { + continue; + } + std::array new_erle; std::array is_erle_updated; is_erle_updated.fill(false); for (size_t k = 1; k < kFftLengthBy2; ++k) { - if (accum_spectra_.num_points[ch] == kPointsToAccumulate && - accum_spectra_.E2[ch][k] > 0.f) { + if (accum_spectra_.E2[ch][k] > 0.f) { new_erle[k] = accum_spectra_.Y2[ch][k] / accum_spectra_.E2[ch][k]; is_erle_updated[k] = true; } @@ -120,10 +130,11 @@ void SubbandErleEstimator::UpdateBands( if (coming_onset_[ch][k]) { coming_onset_[ch][k] = false; if (!use_min_erle_during_onsets_) { - float alpha = new_erle[k] < erle_onsets_[ch][k] ? 0.3f : 0.15f; - erle_onsets_[ch][k] = rtc::SafeClamp( - erle_onsets_[ch][k] + - alpha * (new_erle[k] - erle_onsets_[ch][k]), + float alpha = + new_erle[k] < erle_during_onsets_[ch][k] ? 0.3f : 0.15f; + erle_during_onsets_[ch][k] = rtc::SafeClamp( + erle_during_onsets_[ch][k] + + alpha * (new_erle[k] - erle_during_onsets_[ch][k]), min_erle_, max_erle_[k]); } } @@ -132,15 +143,26 @@ void SubbandErleEstimator::UpdateBands( } } + auto update_erle_band = [](float& erle, float new_erle, + bool low_render_energy, float min_erle, + float max_erle) { + float alpha = 0.05f; + if (new_erle < erle) { + alpha = low_render_energy ? 0.f : 0.1f; + } + erle = + rtc::SafeClamp(erle + alpha * (new_erle - erle), min_erle, max_erle); + }; + for (size_t k = 1; k < kFftLengthBy2; ++k) { if (is_erle_updated[k]) { - float alpha = 0.05f; - if (new_erle[k] < erle_[ch][k]) { - alpha = accum_spectra_.low_render_energy[ch][k] ? 0.f : 0.1f; + const bool low_render_energy = accum_spectra_.low_render_energy[ch][k]; + update_erle_band(erle_[ch][k], new_erle[k], low_render_energy, + min_erle_, max_erle_[k]); + if (use_onset_detection_) { + update_erle_band(erle_onset_compensated_[ch][k], new_erle[k], + low_render_energy, min_erle_, max_erle_[k]); } - erle_[ch][k] = - rtc::SafeClamp(erle_[ch][k] + alpha * (new_erle[k] - erle_[ch][k]), - min_erle_, max_erle_[k]); } } } @@ -153,9 +175,11 @@ void SubbandErleEstimator::DecreaseErlePerBandForLowRenderSignals() { --hold_counters_[ch][k]; if (hold_counters_[ch][k] <= (kBlocksForOnsetDetection - kBlocksToHoldErle)) { - if (erle_[ch][k] > erle_onsets_[ch][k]) { - erle_[ch][k] = std::max(erle_onsets_[ch][k], 0.97f * erle_[ch][k]); - RTC_DCHECK_LE(min_erle_, erle_[ch][k]); + if (erle_onset_compensated_[ch][k] > erle_during_onsets_[ch][k]) { + erle_onset_compensated_[ch][k] = + std::max(erle_during_onsets_[ch][k], + 0.97f * erle_onset_compensated_[ch][k]); + RTC_DCHECK_LE(min_erle_, erle_onset_compensated_[ch][k]); } if (hold_counters_[ch][k] <= 0) { coming_onset_[ch][k] = true; @@ -167,7 +191,7 @@ void SubbandErleEstimator::DecreaseErlePerBandForLowRenderSignals() { } void SubbandErleEstimator::ResetAccumulatedSpectra() { - for (size_t ch = 0; ch < erle_onsets_.size(); ++ch) { + for (size_t ch = 0; ch < erle_during_onsets_.size(); ++ch) { accum_spectra_.Y2[ch].fill(0.f); accum_spectra_.E2[ch].fill(0.f); accum_spectra_.num_points[ch] = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.h index 90363e081..ffed6a57a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subband_erle_estimator.h @@ -41,14 +41,16 @@ class SubbandErleEstimator { const std::vector& converged_filters); // Returns the ERLE estimate. - rtc::ArrayView> Erle() const { - return erle_; + rtc::ArrayView> Erle( + bool onset_compensated) const { + return onset_compensated && use_onset_detection_ ? erle_onset_compensated_ + : erle_; } // Returns the ERLE estimate at onsets (only used for testing). - rtc::ArrayView> ErleOnsets() + rtc::ArrayView> ErleDuringOnsets() const { - return erle_onsets_; + return erle_during_onsets_; } void Dump(const std::unique_ptr& data_dumper) const; @@ -82,8 +84,12 @@ class SubbandErleEstimator { const std::array max_erle_; const bool use_min_erle_during_onsets_; AccumulatedSpectra accum_spectra_; + // ERLE without special handling of render onsets. std::vector> erle_; - std::vector> erle_onsets_; + // ERLE lowered during render onsets. + std::vector> erle_onset_compensated_; + // Estimation of ERLE during render onsets. + std::vector> erle_during_onsets_; std::vector> coming_onset_; std::vector> hold_counters_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc index d15229934..2eae68675 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.cc @@ -19,11 +19,17 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { +bool UseCoarseFilterResetHangover() { + return !field_trial::IsEnabled( + "WebRTC-Aec3CoarseFilterResetHangoverKillSwitch"); +} + void PredictionError(const Aec3Fft& fft, const FftData& S, rtc::ArrayView y, @@ -66,12 +72,14 @@ Subtractor::Subtractor(const EchoCanceller3Config& config, optimization_(optimization), config_(config), num_capture_channels_(num_capture_channels), + use_coarse_filter_reset_hangover_(UseCoarseFilterResetHangover()), refined_filters_(num_capture_channels_), coarse_filter_(num_capture_channels_), refined_gains_(num_capture_channels_), coarse_gains_(num_capture_channels_), filter_misadjustment_estimators_(num_capture_channels_), poor_coarse_filter_counters_(num_capture_channels_, 0), + coarse_filter_reset_hangover_(num_capture_channels_, 0), refined_frequency_responses_( num_capture_channels_, std::vector>( @@ -83,7 +91,20 @@ Subtractor::Subtractor(const EchoCanceller3Config& config, std::vector(GetTimeDomainLength(std::max( config_.filter.refined_initial.length_blocks, config_.filter.refined.length_blocks)), - 0.f)) { + 0.f)), + coarse_impulse_responses_(0) { + // Set up the storing of coarse impulse responses if data dumping is + // available. + if (ApmDataDumper::IsAvailable()) { + coarse_impulse_responses_.resize(num_capture_channels_); + const size_t filter_size = GetTimeDomainLength( + std::max(config_.filter.coarse_initial.length_blocks, + config_.filter.coarse.length_blocks)); + for (std::vector& impulse_response : coarse_impulse_responses_) { + impulse_response.resize(filter_size, 0.f); + } + } + for (size_t ch = 0; ch < num_capture_channels_; ++ch) { refined_filters_[ch] = std::make_unique( config_.filter.refined.length_blocks, @@ -228,11 +249,19 @@ void Subtractor::Process(const RenderBuffer& render_buffer, // Update the refined filter. if (!refined_filters_adjusted) { + // Do not allow the performance of the coarse filter to affect the + // adaptation speed of the refined filter just after the coarse filter has + // been reset. + const bool disallow_leakage_diverged = + coarse_filter_reset_hangover_[ch] > 0 && + use_coarse_filter_reset_hangover_; + std::array erl; ComputeErl(optimization_, refined_frequency_responses_[ch], erl); refined_gains_[ch]->Compute(X2_refined, render_signal_analyzer, output, erl, refined_filters_[ch]->SizePartitions(), - aec_state.SaturatedCapture(), &G); + aec_state.SaturatedCapture(), + disallow_leakage_diverged, &G); } else { G.re.fill(0.f); G.im.fill(0.f); @@ -256,6 +285,8 @@ void Subtractor::Process(const RenderBuffer& render_buffer, coarse_gains_[ch]->Compute(X2_coarse, render_signal_analyzer, E_coarse, coarse_filter_[ch]->SizePartitions(), aec_state.SaturatedCapture(), &G); + coarse_filter_reset_hangover_[ch] = + std::max(coarse_filter_reset_hangover_[ch] - 1, 0); } else { poor_coarse_filter_counters_[ch] = 0; coarse_filter_[ch]->SetFilter(refined_filters_[ch]->SizePartitions(), @@ -263,9 +294,18 @@ void Subtractor::Process(const RenderBuffer& render_buffer, coarse_gains_[ch]->Compute(X2_coarse, render_signal_analyzer, E_refined, coarse_filter_[ch]->SizePartitions(), aec_state.SaturatedCapture(), &G); + coarse_filter_reset_hangover_[ch] = + config_.filter.coarse_reset_hangover_blocks; + } + + if (ApmDataDumper::IsAvailable()) { + RTC_DCHECK_LT(ch, coarse_impulse_responses_.size()); + coarse_filter_[ch]->Adapt(render_buffer, G, + &coarse_impulse_responses_[ch]); + } else { + coarse_filter_[ch]->Adapt(render_buffer, G); } - coarse_filter_[ch]->Adapt(render_buffer, G); if (ch == 0) { data_dumper_->DumpRaw("aec3_subtractor_G_coarse", G.re); data_dumper_->DumpRaw("aec3_subtractor_G_coarse", G.im); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h index 42ca3729c..767e4aad4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/subtractor.h @@ -78,6 +78,15 @@ class Subtractor { refined_impulse_responses_[0].data(), GetTimeDomainLength( refined_filters_[0]->max_filter_size_partitions()))); + if (ApmDataDumper::IsAvailable()) { + RTC_DCHECK_GT(coarse_impulse_responses_.size(), 0); + data_dumper_->DumpRaw( + "aec3_subtractor_h_coarse", + rtc::ArrayView( + coarse_impulse_responses_[0].data(), + GetTimeDomainLength( + coarse_filter_[0]->max_filter_size_partitions()))); + } refined_filters_[0]->DumpFilter("aec3_subtractor_H_refined"); coarse_filter_[0]->DumpFilter("aec3_subtractor_H_coarse"); @@ -120,6 +129,7 @@ class Subtractor { const Aec3Optimization optimization_; const EchoCanceller3Config config_; const size_t num_capture_channels_; + const bool use_coarse_filter_reset_hangover_; std::vector> refined_filters_; std::vector> coarse_filter_; @@ -127,9 +137,11 @@ class Subtractor { std::vector> coarse_gains_; std::vector filter_misadjustment_estimators_; std::vector poor_coarse_filter_counters_; + std::vector coarse_filter_reset_hangover_; std::vector>> refined_frequency_responses_; std::vector> refined_impulse_responses_; + std::vector> coarse_impulse_responses_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h index e7175c36d..d049baeaa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h @@ -51,6 +51,10 @@ class SuppressionGain { float* high_bands_gain, std::array* low_band_gain); + bool IsDominantNearend() { + return dominant_nearend_detector_->IsNearendState(); + } + // Toggles the usage of the initial state. void SetInitialState(bool state); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc index 3ed0980bf..489f53f4f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/transparent_mode.cc @@ -11,6 +11,7 @@ #include "modules/audio_processing/aec3/transparent_mode.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -23,8 +24,8 @@ bool DeactivateTransparentMode() { return field_trial::IsEnabled("WebRTC-Aec3TransparentModeKillSwitch"); } -bool DeactivateTransparentModeHmm() { - return field_trial::IsEnabled("WebRTC-Aec3TransparentModeHmmKillSwitch"); +bool ActivateTransparentModeHmm() { + return field_trial::IsEnabled("WebRTC-Aec3TransparentModeHmm"); } } // namespace @@ -228,12 +229,15 @@ class LegacyTransparentModeImpl : public TransparentMode { std::unique_ptr TransparentMode::Create( const EchoCanceller3Config& config) { if (config.ep_strength.bounded_erl || DeactivateTransparentMode()) { + RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: Disabled"; return nullptr; } - if (DeactivateTransparentModeHmm()) { - return std::make_unique(config); + if (ActivateTransparentModeHmm()) { + RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: HMM"; + return std::make_unique(); } - return std::make_unique(); + RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: Legacy"; + return std::make_unique(config); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h index 8ef813341..29ede6be2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/vector_math.h @@ -137,9 +137,6 @@ class VectorMath { z[j] = x[j] * y[j]; } } break; - case Aec3Optimization::kAvx2: - MultiplyAVX2(x, y, z); - break; #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: { @@ -187,9 +184,6 @@ class VectorMath { z[j] += x[j]; } } break; - case Aec3Optimization::kAvx2: - AccumulateAVX2(x, z); - break; #endif #if defined(WEBRTC_HAS_NEON) case Aec3Optimization::kNeon: { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc index 18f85721b..db61b36c2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec_dump/aec_dump_impl.cc @@ -186,6 +186,12 @@ void AecDumpImpl::WriteRuntimeSetting( setting->set_capture_pre_gain(x); break; } + case AudioProcessing::RuntimeSetting::Type::kCapturePostGain: { + float x; + runtime_setting.GetFloat(&x); + setting->set_capture_post_gain(x); + break; + } case AudioProcessing::RuntimeSetting::Type:: kCustomRenderProcessingRuntimeSetting: { float x; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc index 1428d2a0e..ebd978b0d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc @@ -16,7 +16,6 @@ #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/agc/gain_map_internal.h" -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -28,33 +27,26 @@ namespace webrtc { namespace { -// Amount the microphone level is lowered with every clipping event. -const int kClippedLevelStep = 15; -// Proportion of clipped samples required to declare a clipping event. -const float kClippedRatioThreshold = 0.1f; -// Time in frames to wait after a clipping event before checking again. -const int kClippedWaitFrames = 300; - // Amount of error we tolerate in the microphone level (presumably due to OS // quantization) before we assume the user has manually adjusted the microphone. -const int kLevelQuantizationSlack = 25; +constexpr int kLevelQuantizationSlack = 25; -const int kDefaultCompressionGain = 7; -const int kMaxCompressionGain = 12; -const int kMinCompressionGain = 2; +constexpr int kDefaultCompressionGain = 7; +constexpr int kMaxCompressionGain = 12; +constexpr int kMinCompressionGain = 2; // Controls the rate of compression changes towards the target. -const float kCompressionGainStep = 0.05f; +constexpr float kCompressionGainStep = 0.05f; -const int kMaxMicLevel = 255; +constexpr int kMaxMicLevel = 255; static_assert(kGainMapSize > kMaxMicLevel, "gain map too small"); -const int kMinMicLevel = 12; +constexpr int kMinMicLevel = 12; // Prevent very large microphone level changes. -const int kMaxResidualGainChange = 15; +constexpr int kMaxResidualGainChange = 15; // Maximum additional gain allowed to compensate for microphone level // restrictions from clipping events. -const int kSurplusCompressionGain = 6; +constexpr int kSurplusCompressionGain = 6; // Returns whether a fall-back solution to choose the maximum level should be // chosen. @@ -138,24 +130,18 @@ float ComputeClippedRatio(const float* const* audio, MonoAgc::MonoAgc(ApmDataDumper* data_dumper, int startup_min_level, int clipped_level_min, - bool use_agc2_level_estimation, bool disable_digital_adaptive, int min_mic_level) : min_mic_level_(min_mic_level), disable_digital_adaptive_(disable_digital_adaptive), + agc_(std::make_unique()), max_level_(kMaxMicLevel), max_compression_gain_(kMaxCompressionGain), target_compression_(kDefaultCompressionGain), compression_(target_compression_), compression_accumulator_(compression_), startup_min_level_(ClampLevel(startup_min_level, min_mic_level_)), - clipped_level_min_(clipped_level_min) { - if (use_agc2_level_estimation) { - agc_ = std::make_unique(data_dumper); - } else { - agc_ = std::make_unique(); - } -} + clipped_level_min_(clipped_level_min) {} MonoAgc::~MonoAgc() = default; @@ -165,7 +151,7 @@ void MonoAgc::Initialize() { target_compression_ = disable_digital_adaptive_ ? 0 : kDefaultCompressionGain; compression_ = disable_digital_adaptive_ ? 0 : target_compression_; compression_accumulator_ = compression_; - capture_muted_ = false; + capture_output_used_ = true; check_volume_on_next_process_ = true; } @@ -189,19 +175,19 @@ void MonoAgc::Process(const int16_t* audio, } } -void MonoAgc::HandleClipping() { +void MonoAgc::HandleClipping(int clipped_level_step) { // Always decrease the maximum level, even if the current level is below // threshold. - SetMaxLevel(std::max(clipped_level_min_, max_level_ - kClippedLevelStep)); + SetMaxLevel(std::max(clipped_level_min_, max_level_ - clipped_level_step)); if (log_to_histograms_) { RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.AgcClippingAdjustmentAllowed", - level_ - kClippedLevelStep >= clipped_level_min_); + level_ - clipped_level_step >= clipped_level_min_); } if (level_ > clipped_level_min_) { // Don't try to adjust the level if we're already below the limit. As // a consequence, if the user has brought the level above the limit, we // will still not react until the postproc updates the level. - SetLevel(std::max(clipped_level_min_, level_ - kClippedLevelStep)); + SetLevel(std::max(clipped_level_min_, level_ - clipped_level_step)); // Reset the AGCs for all channels since the level has changed. agc_->Reset(); } @@ -263,14 +249,14 @@ void MonoAgc::SetMaxLevel(int level) { << ", max_compression_gain_=" << max_compression_gain_; } -void MonoAgc::SetCaptureMuted(bool muted) { - if (capture_muted_ == muted) { +void MonoAgc::HandleCaptureOutputUsedChange(bool capture_output_used) { + if (capture_output_used_ == capture_output_used) { return; } - capture_muted_ = muted; + capture_output_used_ = capture_output_used; - if (!muted) { - // When we unmute, we should reset things to be safe. + if (capture_output_used) { + // When we start using the output, we should reset things to be safe. check_volume_on_next_process_ = true; } } @@ -411,13 +397,18 @@ int AgcManagerDirect::instance_counter_ = 0; AgcManagerDirect::AgcManagerDirect(Agc* agc, int startup_min_level, int clipped_level_min, - int sample_rate_hz) + int sample_rate_hz, + int clipped_level_step, + float clipped_ratio_threshold, + int clipped_wait_frames) : AgcManagerDirect(/*num_capture_channels*/ 1, startup_min_level, clipped_level_min, - /*use_agc2_level_estimation*/ false, /*disable_digital_adaptive*/ false, - sample_rate_hz) { + sample_rate_hz, + clipped_level_step, + clipped_ratio_threshold, + clipped_wait_frames) { RTC_DCHECK(channel_agcs_[0]); RTC_DCHECK(agc); channel_agcs_[0]->set_agc(agc); @@ -426,17 +417,22 @@ AgcManagerDirect::AgcManagerDirect(Agc* agc, AgcManagerDirect::AgcManagerDirect(int num_capture_channels, int startup_min_level, int clipped_level_min, - bool use_agc2_level_estimation, bool disable_digital_adaptive, - int sample_rate_hz) + int sample_rate_hz, + int clipped_level_step, + float clipped_ratio_threshold, + int clipped_wait_frames) : data_dumper_( new ApmDataDumper(rtc::AtomicOps::Increment(&instance_counter_))), use_min_channel_level_(!UseMaxAnalogChannelLevel()), sample_rate_hz_(sample_rate_hz), num_capture_channels_(num_capture_channels), disable_digital_adaptive_(disable_digital_adaptive), - frames_since_clipped_(kClippedWaitFrames), - capture_muted_(false), + frames_since_clipped_(clipped_wait_frames), + capture_output_used_(true), + clipped_level_step_(clipped_level_step), + clipped_ratio_threshold_(clipped_ratio_threshold), + clipped_wait_frames_(clipped_wait_frames), channel_agcs_(num_capture_channels), new_compressions_to_set_(num_capture_channels) { const int min_mic_level = GetMinMicLevel(); @@ -445,9 +441,15 @@ AgcManagerDirect::AgcManagerDirect(int num_capture_channels, channel_agcs_[ch] = std::make_unique( data_dumper_ch, startup_min_level, clipped_level_min, - use_agc2_level_estimation, disable_digital_adaptive_, min_mic_level); + disable_digital_adaptive_, min_mic_level); } - RTC_DCHECK_LT(0, channel_agcs_.size()); + RTC_DCHECK(!channel_agcs_.empty()); + RTC_DCHECK_GT(clipped_level_step, 0); + RTC_DCHECK_LE(clipped_level_step, 255); + RTC_DCHECK_GT(clipped_ratio_threshold, 0.f); + RTC_DCHECK_LT(clipped_ratio_threshold, 1.f); + RTC_DCHECK_GT(clipped_wait_frames, 0); + channel_agcs_[0]->ActivateLogging(); } @@ -459,7 +461,7 @@ void AgcManagerDirect::Initialize() { for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { channel_agcs_[ch]->Initialize(); } - capture_muted_ = false; + capture_output_used_ = true; AggregateChannelLevels(); } @@ -494,11 +496,11 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, size_t samples_per_channel) { RTC_DCHECK(audio); AggregateChannelLevels(); - if (capture_muted_) { + if (!capture_output_used_) { return; } - if (frames_since_clipped_ < kClippedWaitFrames) { + if (frames_since_clipped_ < clipped_wait_frames_) { ++frames_since_clipped_; return; } @@ -515,11 +517,11 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, float clipped_ratio = ComputeClippedRatio(audio, num_capture_channels_, samples_per_channel); - if (clipped_ratio > kClippedRatioThreshold) { + if (clipped_ratio > clipped_ratio_threshold_) { RTC_DLOG(LS_INFO) << "[agc] Clipping detected. clipped_ratio=" << clipped_ratio; for (auto& state_ch : channel_agcs_) { - state_ch->HandleClipping(); + state_ch->HandleClipping(clipped_level_step_); } frames_since_clipped_ = 0; } @@ -529,7 +531,7 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, void AgcManagerDirect::Process(const AudioBuffer* audio) { AggregateChannelLevels(); - if (capture_muted_) { + if (!capture_output_used_) { return; } @@ -558,11 +560,11 @@ absl::optional AgcManagerDirect::GetDigitalComressionGain() { return new_compressions_to_set_[channel_controlling_gain_]; } -void AgcManagerDirect::SetCaptureMuted(bool muted) { +void AgcManagerDirect::HandleCaptureOutputUsedChange(bool capture_output_used) { for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { - channel_agcs_[ch]->SetCaptureMuted(muted); + channel_agcs_[ch]->HandleCaptureOutputUsedChange(capture_output_used); } - capture_muted_ = muted; + capture_output_used_ = capture_output_used; } float AgcManagerDirect::voice_probability() const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h index d3663be69..e0be1a0dd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h @@ -34,13 +34,20 @@ class AgcManagerDirect final { // AgcManagerDirect will configure GainControl internally. The user is // responsible for processing the audio using it after the call to Process. // The operating range of startup_min_level is [12, 255] and any input value - // outside that range will be clamped. + // outside that range will be clamped. `clipped_level_step` is the amount + // the microphone level is lowered with every clipping event, limited to + // (0, 255]. `clipped_ratio_threshold` is the proportion of clipped + // samples required to declare a clipping event, limited to (0.f, 1.f). + // `clipped_wait_frames` is the time in frames to wait after a clipping event + // before checking again, limited to values higher than 0. AgcManagerDirect(int num_capture_channels, int startup_min_level, int clipped_level_min, - bool use_agc2_level_estimation, bool disable_digital_adaptive, - int sample_rate_hz); + int sample_rate_hz, + int clipped_level_step, + float clipped_ratio_threshold, + int clipped_wait_frames); ~AgcManagerDirect(); AgcManagerDirect(const AgcManagerDirect&) = delete; @@ -52,10 +59,9 @@ class AgcManagerDirect final { void AnalyzePreProcess(const AudioBuffer* audio); void Process(const AudioBuffer* audio); - // Call when the capture stream has been muted/unmuted. This causes the - // manager to disregard all incoming audio; chances are good it's background - // noise to which we'd like to avoid adapting. - void SetCaptureMuted(bool muted); + // Call when the capture stream output has been flagged to be used/not-used. + // If unused, the manager disregards all incoming audio. + void HandleCaptureOutputUsedChange(bool capture_output_used); float voice_probability() const; int stream_analog_level() const { return stream_analog_level_; } @@ -73,13 +79,28 @@ class AgcManagerDirect final { DisableDigitalDisablesDigital); FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, AgcMinMicLevelExperiment); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + AgcMinMicLevelExperimentDisabled); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + AgcMinMicLevelExperimentOutOfRangeAbove); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + AgcMinMicLevelExperimentOutOfRangeBelow); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + AgcMinMicLevelExperimentEnabled50); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + AgcMinMicLevelExperimentEnabledAboveStartupLevel); + FRIEND_TEST_ALL_PREFIXES(AgcManagerDirectStandaloneTest, + ClippingParametersVerified); // Dependency injection for testing. Don't delete |agc| as the memory is owned // by the manager. AgcManagerDirect(Agc* agc, int startup_min_level, int clipped_level_min, - int sample_rate_hz); + int sample_rate_hz, + int clipped_level_step, + float clipped_ratio_threshold, + int clipped_wait_frames); void AnalyzePreProcess(const float* const* audio, size_t samples_per_channel); @@ -94,9 +115,13 @@ class AgcManagerDirect final { int frames_since_clipped_; int stream_analog_level_ = 0; - bool capture_muted_; + bool capture_output_used_; int channel_controlling_gain_ = 0; + const int clipped_level_step_; + const float clipped_ratio_threshold_; + const int clipped_wait_frames_; + std::vector> channel_agcs_; std::vector> new_compressions_to_set_; }; @@ -106,7 +131,6 @@ class MonoAgc { MonoAgc(ApmDataDumper* data_dumper, int startup_min_level, int clipped_level_min, - bool use_agc2_level_estimation, bool disable_digital_adaptive, int min_mic_level); ~MonoAgc(); @@ -114,9 +138,9 @@ class MonoAgc { MonoAgc& operator=(const MonoAgc&) = delete; void Initialize(); - void SetCaptureMuted(bool muted); + void HandleCaptureOutputUsedChange(bool capture_output_used); - void HandleClipping(); + void HandleClipping(int clipped_level_step); void Process(const int16_t* audio, size_t samples_per_channel, @@ -158,7 +182,7 @@ class MonoAgc { int target_compression_; int compression_; float compression_accumulator_; - bool capture_muted_ = false; + bool capture_output_used_ = true; bool check_volume_on_next_process_ = true; bool startup_ = true; int startup_min_level_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc index 0372ccf38..3fc9008db 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc @@ -11,6 +11,7 @@ #include "modules/audio_processing/agc2/adaptive_agc.h" #include "common_audio/include/audio_util.h" +#include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" @@ -19,72 +20,107 @@ namespace webrtc { namespace { -void DumpDebugData(const AdaptiveDigitalGainApplier::FrameInfo& info, - ApmDataDumper& dumper) { - dumper.DumpRaw("agc2_vad_probability", info.vad_result.speech_probability); - dumper.DumpRaw("agc2_vad_rms_dbfs", info.vad_result.rms_dbfs); - dumper.DumpRaw("agc2_vad_peak_dbfs", info.vad_result.peak_dbfs); - dumper.DumpRaw("agc2_noise_estimate_dbfs", info.input_noise_level_dbfs); - dumper.DumpRaw("agc2_last_limiter_audio_level", info.limiter_envelope_dbfs); +using AdaptiveDigitalConfig = + AudioProcessing::Config::GainController2::AdaptiveDigital; +using NoiseEstimatorType = + AudioProcessing::Config::GainController2::NoiseEstimator; + +// Detects the available CPU features and applies any kill-switches. +AvailableCpuFeatures GetAllowedCpuFeatures( + const AdaptiveDigitalConfig& config) { + AvailableCpuFeatures features = GetAvailableCpuFeatures(); + if (!config.sse2_allowed) { + features.sse2 = false; + } + if (!config.avx2_allowed) { + features.avx2 = false; + } + if (!config.neon_allowed) { + features.neon = false; + } + return features; } -constexpr int kGainApplierAdjacentSpeechFramesThreshold = 1; -constexpr float kMaxGainChangePerSecondDb = 3.f; -constexpr float kMaxOutputNoiseLevelDbfs = -50.f; +std::unique_ptr CreateNoiseLevelEstimator( + NoiseEstimatorType estimator_type, + ApmDataDumper* apm_data_dumper) { + switch (estimator_type) { + case NoiseEstimatorType::kStationaryNoise: + return CreateStationaryNoiseEstimator(apm_data_dumper); + case NoiseEstimatorType::kNoiseFloor: + return CreateNoiseFloorEstimator(apm_data_dumper); + } +} } // namespace -AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper) - : speech_level_estimator_(apm_data_dumper), - gain_applier_(apm_data_dumper, - kGainApplierAdjacentSpeechFramesThreshold, - kMaxGainChangePerSecondDb, - kMaxOutputNoiseLevelDbfs), - apm_data_dumper_(apm_data_dumper), - noise_level_estimator_(apm_data_dumper) { - RTC_DCHECK(apm_data_dumper); -} - AdaptiveAgc::AdaptiveAgc(ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2& config) - : speech_level_estimator_( - apm_data_dumper, - config.adaptive_digital.level_estimator, - config.adaptive_digital - .level_estimator_adjacent_speech_frames_threshold, - config.adaptive_digital.initial_saturation_margin_db, - config.adaptive_digital.extra_saturation_margin_db), - vad_(config.adaptive_digital.vad_probability_attack), - gain_applier_( - apm_data_dumper, - config.adaptive_digital.gain_applier_adjacent_speech_frames_threshold, - config.adaptive_digital.max_gain_change_db_per_second, - config.adaptive_digital.max_output_noise_level_dbfs), + const AdaptiveDigitalConfig& config) + : speech_level_estimator_(apm_data_dumper, + config.adjacent_speech_frames_threshold), + vad_(config.vad_reset_period_ms, GetAllowedCpuFeatures(config)), + gain_controller_(apm_data_dumper, + config.adjacent_speech_frames_threshold, + config.max_gain_change_db_per_second, + config.max_output_noise_level_dbfs, + config.dry_run), apm_data_dumper_(apm_data_dumper), - noise_level_estimator_(apm_data_dumper) { + noise_level_estimator_( + CreateNoiseLevelEstimator(config.noise_estimator, apm_data_dumper)), + saturation_protector_( + CreateSaturationProtector(kSaturationProtectorInitialHeadroomDb, + kSaturationProtectorExtraHeadroomDb, + config.adjacent_speech_frames_threshold, + apm_data_dumper)) { RTC_DCHECK(apm_data_dumper); - if (!config.adaptive_digital.use_saturation_protector) { + RTC_DCHECK(noise_level_estimator_); + RTC_DCHECK(saturation_protector_); + if (!config.use_saturation_protector) { RTC_LOG(LS_WARNING) << "The saturation protector cannot be disabled."; } } AdaptiveAgc::~AdaptiveAgc() = default; -void AdaptiveAgc::Process(AudioFrameView frame, float limiter_envelope) { - AdaptiveDigitalGainApplier::FrameInfo info; - info.vad_result = vad_.AnalyzeFrame(frame); - speech_level_estimator_.Update(info.vad_result); - info.input_level_dbfs = speech_level_estimator_.level_dbfs(); - info.input_noise_level_dbfs = noise_level_estimator_.Analyze(frame); - info.limiter_envelope_dbfs = - limiter_envelope > 0 ? FloatS16ToDbfs(limiter_envelope) : -90.f; - info.estimate_is_confident = speech_level_estimator_.IsConfident(); - DumpDebugData(info, *apm_data_dumper_); - gain_applier_.Process(info, frame); +void AdaptiveAgc::Initialize(int sample_rate_hz, int num_channels) { + gain_controller_.Initialize(sample_rate_hz, num_channels); } -void AdaptiveAgc::Reset() { +void AdaptiveAgc::Process(AudioFrameView frame, float limiter_envelope) { + AdaptiveDigitalGainApplier::FrameInfo info; + + VadLevelAnalyzer::Result vad_result = vad_.AnalyzeFrame(frame); + info.speech_probability = vad_result.speech_probability; + apm_data_dumper_->DumpRaw("agc2_speech_probability", + vad_result.speech_probability); + apm_data_dumper_->DumpRaw("agc2_input_rms_dbfs", vad_result.rms_dbfs); + apm_data_dumper_->DumpRaw("agc2_input_peak_dbfs", vad_result.peak_dbfs); + + speech_level_estimator_.Update(vad_result); + info.speech_level_dbfs = speech_level_estimator_.level_dbfs(); + info.speech_level_reliable = speech_level_estimator_.IsConfident(); + apm_data_dumper_->DumpRaw("agc2_speech_level_dbfs", info.speech_level_dbfs); + apm_data_dumper_->DumpRaw("agc2_speech_level_reliable", + info.speech_level_reliable); + + info.noise_rms_dbfs = noise_level_estimator_->Analyze(frame); + apm_data_dumper_->DumpRaw("agc2_noise_rms_dbfs", info.noise_rms_dbfs); + + saturation_protector_->Analyze(info.speech_probability, vad_result.peak_dbfs, + info.speech_level_dbfs); + info.headroom_db = saturation_protector_->HeadroomDb(); + apm_data_dumper_->DumpRaw("agc2_headroom_db", info.headroom_db); + + info.limiter_envelope_dbfs = FloatS16ToDbfs(limiter_envelope); + apm_data_dumper_->DumpRaw("agc2_limiter_envelope_dbfs", + info.limiter_envelope_dbfs); + + gain_controller_.Process(info, frame); +} + +void AdaptiveAgc::HandleInputGainChange() { speech_level_estimator_.Reset(); + saturation_protector_->Reset(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h index f3c7854e1..43c7787e3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_agc.h @@ -11,9 +11,12 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_AGC_H_ #define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_AGC_H_ +#include + #include "modules/audio_processing/agc2/adaptive_digital_gain_applier.h" #include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" #include "modules/audio_processing/agc2/noise_level_estimator.h" +#include "modules/audio_processing/agc2/saturation_protector.h" #include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/include/audio_processing.h" @@ -22,27 +25,33 @@ namespace webrtc { class ApmDataDumper; // Adaptive digital gain controller. -// TODO(crbug.com/webrtc/7494): Unify with `AdaptiveDigitalGainApplier`. +// TODO(crbug.com/webrtc/7494): Rename to `AdaptiveDigitalGainController`. class AdaptiveAgc { public: - explicit AdaptiveAgc(ApmDataDumper* apm_data_dumper); - // TODO(crbug.com/webrtc/7494): Remove ctor above. - AdaptiveAgc(ApmDataDumper* apm_data_dumper, - const AudioProcessing::Config::GainController2& config); + AdaptiveAgc( + ApmDataDumper* apm_data_dumper, + const AudioProcessing::Config::GainController2::AdaptiveDigital& config); ~AdaptiveAgc(); + void Initialize(int sample_rate_hz, int num_channels); + + // TODO(crbug.com/webrtc/7494): Add `SetLimiterEnvelope()`. + // Analyzes `frame` and applies a digital adaptive gain to it. Takes into // account the envelope measured by the limiter. - // TODO(crbug.com/webrtc/7494): Make the class depend on the limiter. + // TODO(crbug.com/webrtc/7494): Remove `limiter_envelope`. void Process(AudioFrameView frame, float limiter_envelope); - void Reset(); + + // Handles a gain change applied to the input signal (e.g., analog gain). + void HandleInputGainChange(); private: AdaptiveModeLevelEstimator speech_level_estimator_; VadLevelAnalyzer vad_; - AdaptiveDigitalGainApplier gain_applier_; + AdaptiveDigitalGainApplier gain_controller_; ApmDataDumper* const apm_data_dumper_; - NoiseLevelEstimator noise_level_estimator_; + std::unique_ptr noise_level_estimator_; + std::unique_ptr saturation_protector_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc index 36ef9be56..e59b110ef 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc @@ -23,6 +23,9 @@ namespace webrtc { namespace { +constexpr int kHeadroomHistogramMin = 0; +constexpr int kHeadroomHistogramMax = 50; + // This function maps input level to desired applied gain. We want to // boost the signal so that peaks are at -kHeadroomDbfs. We can't // apply more than kMaxGainDb gain. @@ -31,17 +34,13 @@ float ComputeGainDb(float input_level_dbfs) { if (input_level_dbfs < -(kHeadroomDbfs + kMaxGainDb)) { return kMaxGainDb; } - // We expect to end up here most of the time: the level is below // -headroom, but we can boost it to -headroom. if (input_level_dbfs < -kHeadroomDbfs) { return -kHeadroomDbfs - input_level_dbfs; } - - // Otherwise, the level is too high and we can't boost. The - // LevelEstimator is responsible for not reporting bogus gain - // values. - RTC_DCHECK_LE(input_level_dbfs, 0.f); + // Otherwise, the level is too high and we can't boost. + RTC_DCHECK_GE(input_level_dbfs, -kHeadroomDbfs); return 0.f; } @@ -52,10 +51,11 @@ float LimitGainByNoise(float target_gain, float input_noise_level_dbfs, float max_output_noise_level_dbfs, ApmDataDumper& apm_data_dumper) { - const float noise_headroom_db = + const float max_allowed_gain_db = max_output_noise_level_dbfs - input_noise_level_dbfs; - apm_data_dumper.DumpRaw("agc2_noise_headroom_db", noise_headroom_db); - return std::min(target_gain, std::max(noise_headroom_db, 0.f)); + apm_data_dumper.DumpRaw("agc2_adaptive_gain_applier_max_allowed_gain_db", + max_allowed_gain_db); + return std::min(target_gain, std::max(max_allowed_gain_db, 0.f)); } float LimitGainByLowConfidence(float target_gain, @@ -68,8 +68,8 @@ float LimitGainByLowConfidence(float target_gain, } const float limiter_level_before_gain = limiter_audio_level_dbfs - last_gain; - // Compute a new gain so that limiter_level_before_gain + new_gain <= - // kLimiterThreshold. + // Compute a new gain so that `limiter_level_before_gain` + `new_target_gain` + // is not great than `kLimiterThresholdForAgcGainDbfs`. const float new_target_gain = std::max( kLimiterThresholdForAgcGainDbfs - limiter_level_before_gain, 0.f); return std::min(new_target_gain, target_gain); @@ -80,13 +80,30 @@ float LimitGainByLowConfidence(float target_gain, float ComputeGainChangeThisFrameDb(float target_gain_db, float last_gain_db, bool gain_increase_allowed, - float max_gain_change_db) { + float max_gain_decrease_db, + float max_gain_increase_db) { + RTC_DCHECK_GT(max_gain_decrease_db, 0); + RTC_DCHECK_GT(max_gain_increase_db, 0); float target_gain_difference_db = target_gain_db - last_gain_db; if (!gain_increase_allowed) { target_gain_difference_db = std::min(target_gain_difference_db, 0.f); } - return rtc::SafeClamp(target_gain_difference_db, -max_gain_change_db, - max_gain_change_db); + return rtc::SafeClamp(target_gain_difference_db, -max_gain_decrease_db, + max_gain_increase_db); +} + +// Copies the (multichannel) audio samples from `src` into `dst`. +void CopyAudio(AudioFrameView src, + std::vector>& dst) { + RTC_DCHECK_GT(src.num_channels(), 0); + RTC_DCHECK_GT(src.samples_per_channel(), 0); + RTC_DCHECK_EQ(dst.size(), src.num_channels()); + for (size_t c = 0; c < src.num_channels(); ++c) { + rtc::ArrayView channel_view = src.channel(c); + RTC_DCHECK_EQ(channel_view.size(), src.samples_per_channel()); + RTC_DCHECK_EQ(dst[c].size(), src.samples_per_channel()); + std::copy(channel_view.begin(), channel_view.end(), dst[c].begin()); + } } } // namespace @@ -95,7 +112,8 @@ AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier( ApmDataDumper* apm_data_dumper, int adjacent_speech_frames_threshold, float max_gain_change_db_per_second, - float max_output_noise_level_dbfs) + float max_output_noise_level_dbfs, + bool dry_run) : apm_data_dumper_(apm_data_dumper), gain_applier_( /*hard_clip_samples=*/false, @@ -104,18 +122,44 @@ AdaptiveDigitalGainApplier::AdaptiveDigitalGainApplier( max_gain_change_db_per_10ms_(max_gain_change_db_per_second * kFrameDurationMs / 1000.f), max_output_noise_level_dbfs_(max_output_noise_level_dbfs), + dry_run_(dry_run), calls_since_last_gain_log_(0), frames_to_gain_increase_allowed_(adjacent_speech_frames_threshold_), last_gain_db_(kInitialAdaptiveDigitalGainDb) { - RTC_DCHECK_GT(max_gain_change_db_per_second, 0.f); + RTC_DCHECK_GT(max_gain_change_db_per_second, 0.0f); RTC_DCHECK_GE(frames_to_gain_increase_allowed_, 1); - RTC_DCHECK_GE(max_output_noise_level_dbfs_, -90.f); - RTC_DCHECK_LE(max_output_noise_level_dbfs_, 0.f); + RTC_DCHECK_GE(max_output_noise_level_dbfs_, -90.0f); + RTC_DCHECK_LE(max_output_noise_level_dbfs_, 0.0f); + Initialize(/*sample_rate_hz=*/48000, /*num_channels=*/1); +} + +void AdaptiveDigitalGainApplier::Initialize(int sample_rate_hz, + int num_channels) { + if (!dry_run_) { + return; + } + RTC_DCHECK_GT(sample_rate_hz, 0); + RTC_DCHECK_GT(num_channels, 0); + int frame_size = rtc::CheckedDivExact(sample_rate_hz, 100); + bool sample_rate_changed = + dry_run_frame_.empty() || // Handle initialization. + dry_run_frame_[0].size() != static_cast(frame_size); + bool num_channels_changed = + dry_run_channels_.size() != static_cast(num_channels); + if (sample_rate_changed || num_channels_changed) { + // Resize the multichannel audio vector and update the channel pointers. + dry_run_frame_.resize(num_channels); + dry_run_channels_.resize(num_channels); + for (int c = 0; c < num_channels; ++c) { + dry_run_frame_[c].resize(frame_size); + dry_run_channels_[c] = dry_run_frame_[c].data(); + } + } } void AdaptiveDigitalGainApplier::Process(const FrameInfo& info, AudioFrameView frame) { - RTC_DCHECK_GE(info.input_level_dbfs, -150.f); + RTC_DCHECK_GE(info.speech_level_dbfs, -150.f); RTC_DCHECK_GE(frame.num_channels(), 1); RTC_DCHECK( frame.samples_per_channel() == 80 || frame.samples_per_channel() == 160 || @@ -123,28 +167,46 @@ void AdaptiveDigitalGainApplier::Process(const FrameInfo& info, << "`frame` does not look like a 10 ms frame for an APM supported sample " "rate"; + // Compute the input level used to select the desired gain. + RTC_DCHECK_GT(info.headroom_db, 0.0f); + const float input_level_dbfs = info.speech_level_dbfs + info.headroom_db; + const float target_gain_db = LimitGainByLowConfidence( - LimitGainByNoise(ComputeGainDb(std::min(info.input_level_dbfs, 0.f)), - info.input_noise_level_dbfs, + LimitGainByNoise(ComputeGainDb(input_level_dbfs), info.noise_rms_dbfs, max_output_noise_level_dbfs_, *apm_data_dumper_), - last_gain_db_, info.limiter_envelope_dbfs, info.estimate_is_confident); + last_gain_db_, info.limiter_envelope_dbfs, info.speech_level_reliable); // Forbid increasing the gain until enough adjacent speech frames are // observed. - if (info.vad_result.speech_probability < kVadConfidenceThreshold) { + bool first_confident_speech_frame = false; + if (info.speech_probability < kVadConfidenceThreshold) { frames_to_gain_increase_allowed_ = adjacent_speech_frames_threshold_; } else if (frames_to_gain_increase_allowed_ > 0) { frames_to_gain_increase_allowed_--; + first_confident_speech_frame = frames_to_gain_increase_allowed_ == 0; + } + apm_data_dumper_->DumpRaw( + "agc2_adaptive_gain_applier_frames_to_gain_increase_allowed", + frames_to_gain_increase_allowed_); + + const bool gain_increase_allowed = frames_to_gain_increase_allowed_ == 0; + + float max_gain_increase_db = max_gain_change_db_per_10ms_; + if (first_confident_speech_frame) { + // No gain increase happened while waiting for a long enough speech + // sequence. Therefore, temporarily allow a faster gain increase. + RTC_DCHECK(gain_increase_allowed); + max_gain_increase_db *= adjacent_speech_frames_threshold_; } const float gain_change_this_frame_db = ComputeGainChangeThisFrameDb( - target_gain_db, last_gain_db_, - /*gain_increase_allowed=*/frames_to_gain_increase_allowed_ == 0, - max_gain_change_db_per_10ms_); + target_gain_db, last_gain_db_, gain_increase_allowed, + /*max_gain_decrease_db=*/max_gain_change_db_per_10ms_, + max_gain_increase_db); - apm_data_dumper_->DumpRaw("agc2_want_to_change_by_db", + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_want_to_change_by_db", target_gain_db - last_gain_db_); - apm_data_dumper_->DumpRaw("agc2_will_change_by_db", + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_will_change_by_db", gain_change_this_frame_db); // Optimization: avoid calling math functions if gain does not @@ -153,27 +215,45 @@ void AdaptiveDigitalGainApplier::Process(const FrameInfo& info, gain_applier_.SetGainFactor( DbToRatio(last_gain_db_ + gain_change_this_frame_db)); } - gain_applier_.ApplyGain(frame); + + // Modify `frame` only if not running in "dry run" mode. + if (!dry_run_) { + gain_applier_.ApplyGain(frame); + } else { + // Copy `frame` so that `ApplyGain()` is called (on a copy). + CopyAudio(frame, dry_run_frame_); + RTC_DCHECK(!dry_run_channels_.empty()); + AudioFrameView frame_copy(&dry_run_channels_[0], + frame.num_channels(), + frame.samples_per_channel()); + gain_applier_.ApplyGain(frame_copy); + } // Remember that the gain has changed for the next iteration. last_gain_db_ = last_gain_db_ + gain_change_this_frame_db; - apm_data_dumper_->DumpRaw("agc2_applied_gain_db", last_gain_db_); + apm_data_dumper_->DumpRaw("agc2_adaptive_gain_applier_applied_gain_db", + last_gain_db_); // Log every 10 seconds. calls_since_last_gain_log_++; if (calls_since_last_gain_log_ == 1000) { calls_since_last_gain_log_ = 0; + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedSpeechLevel", + -info.speech_level_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", + -info.noise_rms_dbfs, 0, 100, 101); + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.Agc2.Headroom", info.headroom_db, kHeadroomHistogramMin, + kHeadroomHistogramMax, + kHeadroomHistogramMax - kHeadroomHistogramMin + 1); RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.DigitalGainApplied", last_gain_db_, 0, kMaxGainDb, kMaxGainDb + 1); - RTC_HISTOGRAM_COUNTS_LINEAR( - "WebRTC.Audio.Agc2.EstimatedSpeechPlusNoiseLevel", - -info.input_level_dbfs, 0, 100, 101); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.Agc2.EstimatedNoiseLevel", - -info.input_noise_level_dbfs, 0, 100, 101); RTC_LOG(LS_INFO) << "AGC2 adaptive digital" - << " | speech_plus_noise_dbfs: " << info.input_level_dbfs - << " | noise_dbfs: " << info.input_noise_level_dbfs + << " | speech_dbfs: " << info.speech_level_dbfs + << " | noise_dbfs: " << info.noise_rms_dbfs + << " | headroom_db: " << info.headroom_db << " | gain_db: " << last_gain_db_; } } + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h index a65379f5b..8b58ea00b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.h @@ -11,42 +11,46 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ #define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_DIGITAL_GAIN_APPLIER_H_ +#include + #include "modules/audio_processing/agc2/gain_applier.h" -#include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/include/audio_frame_view.h" namespace webrtc { class ApmDataDumper; -// Part of the adaptive digital controller that applies a digital adaptive gain. -// The gain is updated towards a target. The logic decides when gain updates are -// allowed, it controls the adaptation speed and caps the target based on the -// estimated noise level and the speech level estimate confidence. +// TODO(bugs.webrtc.org): Split into `GainAdaptor` and `GainApplier`. +// Selects the target digital gain, decides when and how quickly to adapt to the +// target and applies the current gain to 10 ms frames. class AdaptiveDigitalGainApplier { public: // Information about a frame to process. struct FrameInfo { - float input_level_dbfs; // Estimated speech plus noise level. - float input_noise_level_dbfs; // Estimated noise level. - VadLevelAnalyzer::Result vad_result; - float limiter_envelope_dbfs; // Envelope level from the limiter. - bool estimate_is_confident; + float speech_probability; // Probability of speech in the [0, 1] range. + float speech_level_dbfs; // Estimated speech level (dBFS). + bool speech_level_reliable; // True with reliable speech level estimation. + float noise_rms_dbfs; // Estimated noise RMS level (dBFS). + float headroom_db; // Headroom (dB). + float limiter_envelope_dbfs; // Envelope level from the limiter (dBFS). }; - // Ctor. - // `adjacent_speech_frames_threshold` indicates how many speech frames are - // required before a gain increase is allowed. `max_gain_change_db_per_second` - // limits the adaptation speed (uniformly operated across frames). - // `max_output_noise_level_dbfs` limits the output noise level. + // Ctor. `adjacent_speech_frames_threshold` indicates how many adjacent speech + // frames must be observed in order to consider the sequence as speech. + // `max_gain_change_db_per_second` limits the adaptation speed (uniformly + // operated across frames). `max_output_noise_level_dbfs` limits the output + // noise level. If `dry_run` is true, `Process()` will not modify the audio. AdaptiveDigitalGainApplier(ApmDataDumper* apm_data_dumper, int adjacent_speech_frames_threshold, float max_gain_change_db_per_second, - float max_output_noise_level_dbfs); + float max_output_noise_level_dbfs, + bool dry_run); AdaptiveDigitalGainApplier(const AdaptiveDigitalGainApplier&) = delete; AdaptiveDigitalGainApplier& operator=(const AdaptiveDigitalGainApplier&) = delete; + void Initialize(int sample_rate_hz, int num_channels); + // Analyzes `info`, updates the digital gain and applies it to a 10 ms // `frame`. Supports any sample rate supported by APM. void Process(const FrameInfo& info, AudioFrameView frame); @@ -58,10 +62,14 @@ class AdaptiveDigitalGainApplier { const int adjacent_speech_frames_threshold_; const float max_gain_change_db_per_10ms_; const float max_output_noise_level_dbfs_; + const bool dry_run_; int calls_since_last_gain_log_; int frames_to_gain_increase_allowed_; float last_gain_db_; + + std::vector> dry_run_frame_; + std::vector dry_run_channels_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc index 739997f5e..507aa12cb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc @@ -22,37 +22,17 @@ namespace { using LevelEstimatorType = AudioProcessing::Config::GainController2::LevelEstimator; -// Combines a level estimation with the saturation protector margins. -float ComputeLevelEstimateDbfs(float level_estimate_dbfs, - float saturation_margin_db, - float extra_saturation_margin_db) { - return rtc::SafeClamp( - level_estimate_dbfs + saturation_margin_db + extra_saturation_margin_db, - -90.f, 30.f); -} - -// Returns the level of given type from `vad_level`. -float GetLevel(const VadLevelAnalyzer::Result& vad_level, - LevelEstimatorType type) { - switch (type) { - case LevelEstimatorType::kRms: - return vad_level.rms_dbfs; - break; - case LevelEstimatorType::kPeak: - return vad_level.peak_dbfs; - break; - } - RTC_CHECK_NOTREACHED(); +float ClampLevelEstimateDbfs(float level_estimate_dbfs) { + return rtc::SafeClamp(level_estimate_dbfs, -90.f, 30.f); } } // namespace bool AdaptiveModeLevelEstimator::LevelEstimatorState::operator==( const AdaptiveModeLevelEstimator::LevelEstimatorState& b) const { - return time_to_full_buffer_ms == b.time_to_full_buffer_ms && + return time_to_confidence_ms == b.time_to_confidence_ms && level_dbfs.numerator == b.level_dbfs.numerator && - level_dbfs.denominator == b.level_dbfs.denominator && - saturation_protector == b.saturation_protector; + level_dbfs.denominator == b.level_dbfs.denominator; } float AdaptiveModeLevelEstimator::LevelEstimatorState::Ratio::GetRatio() const { @@ -64,25 +44,14 @@ AdaptiveModeLevelEstimator::AdaptiveModeLevelEstimator( ApmDataDumper* apm_data_dumper) : AdaptiveModeLevelEstimator( apm_data_dumper, - AudioProcessing::Config::GainController2::LevelEstimator::kRms, - kDefaultLevelEstimatorAdjacentSpeechFramesThreshold, - kDefaultInitialSaturationMarginDb, - kDefaultExtraSaturationMarginDb) {} + kDefaultLevelEstimatorAdjacentSpeechFramesThreshold) {} AdaptiveModeLevelEstimator::AdaptiveModeLevelEstimator( ApmDataDumper* apm_data_dumper, - AudioProcessing::Config::GainController2::LevelEstimator level_estimator, - int adjacent_speech_frames_threshold, - float initial_saturation_margin_db, - float extra_saturation_margin_db) + int adjacent_speech_frames_threshold) : apm_data_dumper_(apm_data_dumper), - level_estimator_type_(level_estimator), adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold), - initial_saturation_margin_db_(initial_saturation_margin_db), - extra_saturation_margin_db_(extra_saturation_margin_db), - level_dbfs_(ComputeLevelEstimateDbfs(kInitialSpeechLevelEstimateDbfs, - initial_saturation_margin_db_, - extra_saturation_margin_db_)) { + level_dbfs_(ClampLevelEstimateDbfs(kInitialSpeechLevelEstimateDbfs)) { RTC_DCHECK(apm_data_dumper_); RTC_DCHECK_GE(adjacent_speech_frames_threshold_, 1); Reset(); @@ -96,8 +65,6 @@ void AdaptiveModeLevelEstimator::Update( RTC_DCHECK_LT(vad_level.peak_dbfs, 50.f); RTC_DCHECK_GE(vad_level.speech_probability, 0.f); RTC_DCHECK_LE(vad_level.speech_probability, 1.f); - DumpDebugData(); - if (vad_level.speech_probability < kVadConfidenceThreshold) { // Not a speech frame. if (adjacent_speech_frames_threshold_ > 1) { @@ -115,85 +82,82 @@ void AdaptiveModeLevelEstimator::Update( } } num_adjacent_speech_frames_ = 0; - return; - } - - // Speech frame observed. - num_adjacent_speech_frames_++; - - // Update preliminary level estimate. - RTC_DCHECK_GE(preliminary_state_.time_to_full_buffer_ms, 0); - const bool buffer_is_full = preliminary_state_.time_to_full_buffer_ms == 0; - if (!buffer_is_full) { - preliminary_state_.time_to_full_buffer_ms -= kFrameDurationMs; - } - // Weighted average of levels with speech probability as weight. - RTC_DCHECK_GT(vad_level.speech_probability, 0.f); - const float leak_factor = buffer_is_full ? kFullBufferLeakFactor : 1.f; - preliminary_state_.level_dbfs.numerator = - preliminary_state_.level_dbfs.numerator * leak_factor + - GetLevel(vad_level, level_estimator_type_) * vad_level.speech_probability; - preliminary_state_.level_dbfs.denominator = - preliminary_state_.level_dbfs.denominator * leak_factor + - vad_level.speech_probability; - - const float level_dbfs = preliminary_state_.level_dbfs.GetRatio(); - - UpdateSaturationProtectorState(vad_level.peak_dbfs, level_dbfs, - preliminary_state_.saturation_protector); - - if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { - // `preliminary_state_` is now reliable. Update the last level estimation. - level_dbfs_ = ComputeLevelEstimateDbfs( - level_dbfs, preliminary_state_.saturation_protector.margin_db, - extra_saturation_margin_db_); + } else { + // Speech frame observed. + num_adjacent_speech_frames_++; + + // Update preliminary level estimate. + RTC_DCHECK_GE(preliminary_state_.time_to_confidence_ms, 0); + const bool buffer_is_full = preliminary_state_.time_to_confidence_ms == 0; + if (!buffer_is_full) { + preliminary_state_.time_to_confidence_ms -= kFrameDurationMs; + } + // Weighted average of levels with speech probability as weight. + RTC_DCHECK_GT(vad_level.speech_probability, 0.f); + const float leak_factor = buffer_is_full ? kLevelEstimatorLeakFactor : 1.f; + preliminary_state_.level_dbfs.numerator = + preliminary_state_.level_dbfs.numerator * leak_factor + + vad_level.rms_dbfs * vad_level.speech_probability; + preliminary_state_.level_dbfs.denominator = + preliminary_state_.level_dbfs.denominator * leak_factor + + vad_level.speech_probability; + + const float level_dbfs = preliminary_state_.level_dbfs.GetRatio(); + + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // `preliminary_state_` is now reliable. Update the last level estimation. + level_dbfs_ = ClampLevelEstimateDbfs(level_dbfs); + } } + DumpDebugData(); } bool AdaptiveModeLevelEstimator::IsConfident() const { if (adjacent_speech_frames_threshold_ == 1) { // Ignore `reliable_state_` when a single frame is enough to update the // level estimate (because it is not used). - return preliminary_state_.time_to_full_buffer_ms == 0; + return preliminary_state_.time_to_confidence_ms == 0; } // Once confident, it remains confident. - RTC_DCHECK(reliable_state_.time_to_full_buffer_ms != 0 || - preliminary_state_.time_to_full_buffer_ms == 0); + RTC_DCHECK(reliable_state_.time_to_confidence_ms != 0 || + preliminary_state_.time_to_confidence_ms == 0); // During the first long enough speech sequence, `reliable_state_` must be // ignored since `preliminary_state_` is used. - return reliable_state_.time_to_full_buffer_ms == 0 || + return reliable_state_.time_to_confidence_ms == 0 || (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_ && - preliminary_state_.time_to_full_buffer_ms == 0); + preliminary_state_.time_to_confidence_ms == 0); } void AdaptiveModeLevelEstimator::Reset() { ResetLevelEstimatorState(preliminary_state_); ResetLevelEstimatorState(reliable_state_); - level_dbfs_ = ComputeLevelEstimateDbfs(kInitialSpeechLevelEstimateDbfs, - initial_saturation_margin_db_, - extra_saturation_margin_db_); + level_dbfs_ = ClampLevelEstimateDbfs(kInitialSpeechLevelEstimateDbfs); num_adjacent_speech_frames_ = 0; } void AdaptiveModeLevelEstimator::ResetLevelEstimatorState( LevelEstimatorState& state) const { - state.time_to_full_buffer_ms = kFullBufferSizeMs; - state.level_dbfs.numerator = 0.f; - state.level_dbfs.denominator = 0.f; - ResetSaturationProtectorState(initial_saturation_margin_db_, - state.saturation_protector); + state.time_to_confidence_ms = kLevelEstimatorTimeToConfidenceMs; + state.level_dbfs.numerator = kInitialSpeechLevelEstimateDbfs; + state.level_dbfs.denominator = 1.0f; } void AdaptiveModeLevelEstimator::DumpDebugData() const { - apm_data_dumper_->DumpRaw("agc2_adaptive_level_estimate_dbfs", level_dbfs_); - apm_data_dumper_->DumpRaw("agc2_adaptive_num_adjacent_speech_frames_", - num_adjacent_speech_frames_); - apm_data_dumper_->DumpRaw("agc2_adaptive_preliminary_level_estimate_num", - preliminary_state_.level_dbfs.numerator); - apm_data_dumper_->DumpRaw("agc2_adaptive_preliminary_level_estimate_den", - preliminary_state_.level_dbfs.denominator); - apm_data_dumper_->DumpRaw("agc2_adaptive_preliminary_saturation_margin_db", - preliminary_state_.saturation_protector.margin_db); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_num_adjacent_speech_frames", + num_adjacent_speech_frames_); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_preliminary_level_estimate_num", + preliminary_state_.level_dbfs.numerator); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_preliminary_level_estimate_den", + preliminary_state_.level_dbfs.denominator); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_preliminary_time_to_confidence_ms", + preliminary_state_.time_to_confidence_ms); + apm_data_dumper_->DumpRaw( + "agc2_adaptive_level_estimator_reliable_time_to_confidence_ms", + reliable_state_.time_to_confidence_ms); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h index 213fc0f0c..6d4493858 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.h @@ -15,7 +15,6 @@ #include #include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/agc2/saturation_protector.h" #include "modules/audio_processing/agc2/vad_with_level.h" #include "modules/audio_processing/include/audio_processing.h" @@ -29,12 +28,8 @@ class AdaptiveModeLevelEstimator { AdaptiveModeLevelEstimator(const AdaptiveModeLevelEstimator&) = delete; AdaptiveModeLevelEstimator& operator=(const AdaptiveModeLevelEstimator&) = delete; - AdaptiveModeLevelEstimator( - ApmDataDumper* apm_data_dumper, - AudioProcessing::Config::GainController2::LevelEstimator level_estimator, - int adjacent_speech_frames_threshold, - float initial_saturation_margin_db, - float extra_saturation_margin_db); + AdaptiveModeLevelEstimator(ApmDataDumper* apm_data_dumper, + int adjacent_speech_frames_threshold); // Updates the level estimation. void Update(const VadLevelAnalyzer::Result& vad_data); @@ -57,10 +52,9 @@ class AdaptiveModeLevelEstimator { float denominator; float GetRatio() const; }; - // TODO(crbug.com/webrtc/7494): Remove time_to_full_buffer_ms if redundant. - int time_to_full_buffer_ms; + // TODO(crbug.com/webrtc/7494): Remove time_to_confidence_ms if redundant. + int time_to_confidence_ms; Ratio level_dbfs; - SaturationProtectorState saturation_protector; }; static_assert(std::is_trivially_copyable::value, ""); @@ -70,11 +64,7 @@ class AdaptiveModeLevelEstimator { ApmDataDumper* const apm_data_dumper_; - const AudioProcessing::Config::GainController2::LevelEstimator - level_estimator_type_; const int adjacent_speech_frames_threshold_; - const float initial_saturation_margin_db_; - const float extra_saturation_margin_db_; LevelEstimatorState preliminary_state_; LevelEstimatorState reliable_state_; float level_dbfs_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc deleted file mode 100644 index 5ceeb7df7..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h" - -#include -#include - -#include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/include/audio_frame_view.h" - -namespace webrtc { - -AdaptiveModeLevelEstimatorAgc::AdaptiveModeLevelEstimatorAgc( - ApmDataDumper* apm_data_dumper) - : level_estimator_(apm_data_dumper) { - set_target_level_dbfs(kDefaultAgc2LevelHeadroomDbfs); -} - -// |audio| must be mono; in a multi-channel stream, provide the first (usually -// left) channel. -void AdaptiveModeLevelEstimatorAgc::Process(const int16_t* audio, - size_t length, - int sample_rate_hz) { - std::vector float_audio_frame(audio, audio + length); - const float* const first_channel = &float_audio_frame[0]; - AudioFrameView frame_view(&first_channel, 1 /* num channels */, - length); - const auto vad_prob = agc2_vad_.AnalyzeFrame(frame_view); - latest_voice_probability_ = vad_prob.speech_probability; - if (latest_voice_probability_ > kVadConfidenceThreshold) { - time_in_ms_since_last_estimate_ += kFrameDurationMs; - } - level_estimator_.Update(vad_prob); -} - -// Retrieves the difference between the target RMS level and the current -// signal RMS level in dB. Returns true if an update is available and false -// otherwise, in which case |error| should be ignored and no action taken. -bool AdaptiveModeLevelEstimatorAgc::GetRmsErrorDb(int* error) { - if (time_in_ms_since_last_estimate_ <= kTimeUntilConfidentMs) { - return false; - } - *error = - std::floor(target_level_dbfs() - level_estimator_.level_dbfs() + 0.5f); - time_in_ms_since_last_estimate_ = 0; - return true; -} - -void AdaptiveModeLevelEstimatorAgc::Reset() { - level_estimator_.Reset(); -} - -float AdaptiveModeLevelEstimatorAgc::voice_probability() const { - return latest_voice_probability_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h deleted file mode 100644 index bc6fa843b..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_AGC_H_ -#define MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_AGC_H_ - -#include -#include - -#include "modules/audio_processing/agc/agc.h" -#include "modules/audio_processing/agc2/adaptive_mode_level_estimator.h" -#include "modules/audio_processing/agc2/saturation_protector.h" -#include "modules/audio_processing/agc2/vad_with_level.h" - -namespace webrtc { -class AdaptiveModeLevelEstimatorAgc : public Agc { - public: - explicit AdaptiveModeLevelEstimatorAgc(ApmDataDumper* apm_data_dumper); - - // |audio| must be mono; in a multi-channel stream, provide the first (usually - // left) channel. - void Process(const int16_t* audio, - size_t length, - int sample_rate_hz) override; - - // Retrieves the difference between the target RMS level and the current - // signal RMS level in dB. Returns true if an update is available and false - // otherwise, in which case |error| should be ignored and no action taken. - bool GetRmsErrorDb(int* error) override; - void Reset() override; - - float voice_probability() const override; - - private: - static constexpr int kTimeUntilConfidentMs = 700; - static constexpr int kDefaultAgc2LevelHeadroomDbfs = -1; - int32_t time_in_ms_since_last_estimate_ = 0; - AdaptiveModeLevelEstimator level_estimator_; - VadLevelAnalyzer agc2_vad_; - float latest_voice_probability_ = 0.f; -}; -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_AGC2_ADAPTIVE_MODE_LEVEL_ESTIMATOR_AGC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h index 5d01100eb..adb161492 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_common.h @@ -11,74 +11,59 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_AGC2_COMMON_H_ #define MODULES_AUDIO_PROCESSING_AGC2_AGC2_COMMON_H_ -#include - namespace webrtc { -constexpr float kMinFloatS16Value = -32768.f; -constexpr float kMaxFloatS16Value = 32767.f; +constexpr float kMinFloatS16Value = -32768.0f; +constexpr float kMaxFloatS16Value = 32767.0f; constexpr float kMaxAbsFloatS16Value = 32768.0f; -constexpr size_t kFrameDurationMs = 10; -constexpr size_t kSubFramesInFrame = 20; -constexpr size_t kMaximalNumberOfSamplesPerChannel = 480; +// Minimum audio level in dBFS scale for S16 samples. +constexpr float kMinLevelDbfs = -90.31f; -constexpr float kAttackFilterConstant = 0.f; +constexpr int kFrameDurationMs = 10; +constexpr int kSubFramesInFrame = 20; +constexpr int kMaximalNumberOfSamplesPerChannel = 480; // Adaptive digital gain applier settings below. -constexpr float kHeadroomDbfs = 1.f; -constexpr float kMaxGainDb = 30.f; -constexpr float kInitialAdaptiveDigitalGainDb = 8.f; +constexpr float kHeadroomDbfs = 1.0f; +constexpr float kMaxGainDb = 30.0f; +constexpr float kInitialAdaptiveDigitalGainDb = 8.0f; // At what limiter levels should we start decreasing the adaptive digital gain. constexpr float kLimiterThresholdForAgcGainDbfs = -kHeadroomDbfs; // This is the threshold for speech. Speech frames are used for updating the // speech level, measuring the amount of speech, and decide when to allow target // gain reduction. -constexpr float kVadConfidenceThreshold = 0.9f; +constexpr float kVadConfidenceThreshold = 0.95f; -// The amount of 'memory' of the Level Estimator. Decides leak factors. -constexpr size_t kFullBufferSizeMs = 1200; -constexpr float kFullBufferLeakFactor = 1.f - 1.f / kFullBufferSizeMs; - -constexpr float kInitialSpeechLevelEstimateDbfs = -30.f; +// Adaptive digital level estimator parameters. +// Number of milliseconds of speech frames to observe to make the estimator +// confident. +constexpr float kLevelEstimatorTimeToConfidenceMs = 400; +constexpr float kLevelEstimatorLeakFactor = + 1.0f - 1.0f / kLevelEstimatorTimeToConfidenceMs; // Robust VAD probability and speech decisions. -constexpr float kDefaultSmoothedVadProbabilityAttack = 1.f; -constexpr int kDefaultLevelEstimatorAdjacentSpeechFramesThreshold = 1; +constexpr int kDefaultLevelEstimatorAdjacentSpeechFramesThreshold = 12; // Saturation Protector settings. -constexpr float kDefaultInitialSaturationMarginDb = 20.f; -constexpr float kDefaultExtraSaturationMarginDb = 2.f; +constexpr float kSaturationProtectorInitialHeadroomDb = 20.0f; +constexpr float kSaturationProtectorExtraHeadroomDb = 5.0f; +constexpr int kSaturationProtectorBufferSize = 4; -constexpr size_t kPeakEnveloperSuperFrameLengthMs = 400; -static_assert(kFullBufferSizeMs % kPeakEnveloperSuperFrameLengthMs == 0, - "Full buffer size should be a multiple of super frame length for " - "optimal Saturation Protector performance."); - -constexpr size_t kPeakEnveloperBufferSize = - kFullBufferSizeMs / kPeakEnveloperSuperFrameLengthMs + 1; - -// This value is 10 ** (-1/20 * frame_size_ms / satproc_attack_ms), -// where satproc_attack_ms is 5000. -constexpr float kSaturationProtectorAttackConstant = 0.9988493699365052f; - -// This value is 10 ** (-1/20 * frame_size_ms / satproc_decay_ms), -// where satproc_decay_ms is 1000. -constexpr float kSaturationProtectorDecayConstant = 0.9997697679981565f; - -// This is computed from kDecayMs by -// 10 ** (-1/20 * subframe_duration / kDecayMs). -// |subframe_duration| is |kFrameDurationMs / kSubFramesInFrame|. -// kDecayMs is defined in agc2_testing_common.h -constexpr float kDecayFilterConstant = 0.9998848773724686f; +// Set the initial speech level estimate so that `kInitialAdaptiveDigitalGainDb` +// is applied at the beginning of the call. +constexpr float kInitialSpeechLevelEstimateDbfs = + -kSaturationProtectorExtraHeadroomDb - + kSaturationProtectorInitialHeadroomDb - kInitialAdaptiveDigitalGainDb - + kHeadroomDbfs; // Number of interpolation points for each region of the limiter. // These values have been tuned to limit the interpolated gain curve error given // the limiter parameters and allowing a maximum error of +/- 32768^-1. -constexpr size_t kInterpolatedGainCurveKneePoints = 22; -constexpr size_t kInterpolatedGainCurveBeyondKneePoints = 10; -constexpr size_t kInterpolatedGainCurveTotalPoints = +constexpr int kInterpolatedGainCurveKneePoints = 22; +constexpr int kInterpolatedGainCurveBeyondKneePoints = 10; +constexpr int kInterpolatedGainCurveTotalPoints = kInterpolatedGainCurveKneePoints + kInterpolatedGainCurveBeyondKneePoints; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc index 6c22492e8..125e551b7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc @@ -10,24 +10,84 @@ #include "modules/audio_processing/agc2/agc2_testing_common.h" +#include + #include "rtc_base/checks.h" namespace webrtc { - namespace test { -std::vector LinSpace(const double l, - const double r, - size_t num_points) { - RTC_CHECK(num_points >= 2); +std::vector LinSpace(double l, double r, int num_points) { + RTC_CHECK_GE(num_points, 2); std::vector points(num_points); const double step = (r - l) / (num_points - 1.0); points[0] = l; - for (size_t i = 1; i < num_points - 1; i++) { + for (int i = 1; i < num_points - 1; i++) { points[i] = static_cast(l) + i * step; } points[num_points - 1] = r; return points; } + +WhiteNoiseGenerator::WhiteNoiseGenerator(int min_amplitude, int max_amplitude) + : rand_gen_(42), + min_amplitude_(min_amplitude), + max_amplitude_(max_amplitude) { + RTC_DCHECK_LT(min_amplitude_, max_amplitude_); + RTC_DCHECK_LE(kMinS16, min_amplitude_); + RTC_DCHECK_LE(min_amplitude_, kMaxS16); + RTC_DCHECK_LE(kMinS16, max_amplitude_); + RTC_DCHECK_LE(max_amplitude_, kMaxS16); +} + +float WhiteNoiseGenerator::operator()() { + return static_cast(rand_gen_.Rand(min_amplitude_, max_amplitude_)); +} + +SineGenerator::SineGenerator(float amplitude, + float frequency_hz, + int sample_rate_hz) + : amplitude_(amplitude), + frequency_hz_(frequency_hz), + sample_rate_hz_(sample_rate_hz), + x_radians_(0.0f) { + RTC_DCHECK_GT(amplitude_, 0); + RTC_DCHECK_LE(amplitude_, kMaxS16); +} + +float SineGenerator::operator()() { + constexpr float kPi = 3.1415926536f; + x_radians_ += frequency_hz_ / sample_rate_hz_ * 2 * kPi; + if (x_radians_ >= 2 * kPi) { + x_radians_ -= 2 * kPi; + } + return amplitude_ * std::sinf(x_radians_); +} + +PulseGenerator::PulseGenerator(float pulse_amplitude, + float no_pulse_amplitude, + float frequency_hz, + int sample_rate_hz) + : pulse_amplitude_(pulse_amplitude), + no_pulse_amplitude_(no_pulse_amplitude), + samples_period_( + static_cast(static_cast(sample_rate_hz) / frequency_hz)), + sample_counter_(0) { + RTC_DCHECK_GE(pulse_amplitude_, kMinS16); + RTC_DCHECK_LE(pulse_amplitude_, kMaxS16); + RTC_DCHECK_GT(no_pulse_amplitude_, kMinS16); + RTC_DCHECK_LE(no_pulse_amplitude_, kMaxS16); + RTC_DCHECK_GT(sample_rate_hz, frequency_hz); +} + +float PulseGenerator::operator()() { + sample_counter_++; + if (sample_counter_ >= samples_period_) { + sample_counter_ -= samples_period_; + } + return static_cast(sample_counter_ == 0 ? pulse_amplitude_ + : no_pulse_amplitude_); +} + } // namespace test } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.h index 7bfadbb3f..4572d9cff 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.h @@ -11,17 +11,19 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_AGC2_TESTING_COMMON_H_ #define MODULES_AUDIO_PROCESSING_AGC2_AGC2_TESTING_COMMON_H_ -#include - #include #include -#include "rtc_base/checks.h" +#include "rtc_base/random.h" namespace webrtc { - namespace test { +constexpr float kMinS16 = + static_cast(std::numeric_limits::min()); +constexpr float kMaxS16 = + static_cast(std::numeric_limits::max()); + // Level Estimator test parameters. constexpr float kDecayMs = 500.f; @@ -29,47 +31,49 @@ constexpr float kDecayMs = 500.f; constexpr float kLimiterMaxInputLevelDbFs = 1.f; constexpr float kLimiterKneeSmoothnessDb = 1.f; constexpr float kLimiterCompressionRatio = 5.f; -constexpr float kPi = 3.1415926536f; -std::vector LinSpace(const double l, const double r, size_t num_points); +// Returns evenly spaced `num_points` numbers over a specified interval [l, r]. +std::vector LinSpace(double l, double r, int num_points); -class SineGenerator { +// Generates white noise. +class WhiteNoiseGenerator { public: - SineGenerator(float frequency, int rate) - : frequency_(frequency), rate_(rate) {} - float operator()() { - x_radians_ += frequency_ / rate_ * 2 * kPi; - if (x_radians_ > 2 * kPi) { - x_radians_ -= 2 * kPi; - } - return 1000.f * sinf(x_radians_); - } + WhiteNoiseGenerator(int min_amplitude, int max_amplitude); + float operator()(); private: - float frequency_; - int rate_; - float x_radians_ = 0.f; + Random rand_gen_; + const int min_amplitude_; + const int max_amplitude_; }; -class PulseGenerator { +// Generates a sine function. +class SineGenerator { public: - PulseGenerator(float frequency, int rate) - : samples_period_( - static_cast(static_cast(rate) / frequency)) { - RTC_DCHECK_GT(rate, frequency); - } - float operator()() { - sample_counter_++; - if (sample_counter_ >= samples_period_) { - sample_counter_ -= samples_period_; - } - return static_cast( - sample_counter_ == 0 ? std::numeric_limits::max() : 10.f); - } + SineGenerator(float amplitude, float frequency_hz, int sample_rate_hz); + float operator()(); private: - int samples_period_; - int sample_counter_ = 0; + const float amplitude_; + const float frequency_hz_; + const int sample_rate_hz_; + float x_radians_; +}; + +// Generates periodic pulses. +class PulseGenerator { + public: + PulseGenerator(float pulse_amplitude, + float no_pulse_amplitude, + float frequency_hz, + int sample_rate_hz); + float operator()(); + + private: + const float pulse_amplitude_; + const float no_pulse_amplitude_; + const int samples_period_; + int sample_counter_; }; } // namespace test diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/cpu_features.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/cpu_features.cc new file mode 100644 index 000000000..cced7614b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/cpu_features.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/cpu_features.h" + +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/arch.h" +#include "system_wrappers/include/cpu_features_wrapper.h" + +namespace webrtc { + +std::string AvailableCpuFeatures::ToString() const { + char buf[64]; + rtc::SimpleStringBuilder builder(buf); + bool first = true; + if (sse2) { + builder << (first ? "SSE2" : "_SSE2"); + first = false; + } + if (avx2) { + builder << (first ? "AVX2" : "_AVX2"); + first = false; + } + if (neon) { + builder << (first ? "NEON" : "_NEON"); + first = false; + } + if (first) { + return "none"; + } + return builder.str(); +} + +// Detects available CPU features. +AvailableCpuFeatures GetAvailableCpuFeatures() { +#if defined(WEBRTC_ARCH_X86_FAMILY) + return {/*sse2=*/GetCPUInfo(kSSE2) != 0, + /*avx2=*/GetCPUInfo(kAVX2) != 0, + /*neon=*/false}; +#elif defined(WEBRTC_HAS_NEON) + return {/*sse2=*/false, + /*avx2=*/false, + /*neon=*/true}; +#else + return {/*sse2=*/false, + /*avx2=*/false, + /*neon=*/false}; +#endif +} + +AvailableCpuFeatures NoAvailableCpuFeatures() { + return {/*sse2=*/false, /*avx2=*/false, /*neon=*/false}; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/cpu_features.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/cpu_features.h new file mode 100644 index 000000000..54ddfb305 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/cpu_features.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_CPU_FEATURES_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_CPU_FEATURES_H_ + +#include + +namespace webrtc { + +// Collection of flags indicating which CPU features are available on the +// current platform. True means available. +struct AvailableCpuFeatures { + AvailableCpuFeatures(bool sse2, bool avx2, bool neon) + : sse2(sse2), avx2(avx2), neon(neon) {} + // Intel. + bool sse2; + bool avx2; + // ARM. + bool neon; + std::string ToString() const; +}; + +// Detects what CPU features are available. +AvailableCpuFeatures GetAvailableCpuFeatures(); + +// Returns the CPU feature flags all set to false. +AvailableCpuFeatures NoAvailableCpuFeatures(); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_CPU_FEATURES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.cc index 654ed4be3..fd1a2c3a4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.cc @@ -72,7 +72,7 @@ void DownSampler::Initialize(int sample_rate_hz) { void DownSampler::DownSample(rtc::ArrayView in, rtc::ArrayView out) { - data_dumper_->DumpWav("lc_down_sampler_input", in, sample_rate_hz_, 1); + data_dumper_->DumpWav("agc2_down_sampler_input", in, sample_rate_hz_, 1); RTC_DCHECK_EQ(sample_rate_hz_ * kChunkSizeMs / 1000, in.size()); RTC_DCHECK_EQ(kSampleRate8kHz * kChunkSizeMs / 1000, out.size()); const size_t kMaxNumFrames = kSampleRate48kHz * kChunkSizeMs / 1000; @@ -93,7 +93,7 @@ void DownSampler::DownSample(rtc::ArrayView in, std::copy(in.data(), in.data() + in.size(), out.data()); } - data_dumper_->DumpWav("lc_down_sampler_output", out, kSampleRate8kHz, 1); + data_dumper_->DumpWav("agc2_down_sampler_output", out, kSampleRate8kHz, 1); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h index be7cbb3da..a44f96fa2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/down_sampler.h @@ -31,7 +31,7 @@ class DownSampler { void DownSample(rtc::ArrayView in, rtc::ArrayView out); private: - ApmDataDumper* data_dumper_; + ApmDataDumper* const data_dumper_; int sample_rate_hz_; int down_sampling_factor_; BiQuadFilter low_pass_filter_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.cc index 971f4f62b..3e9bb2efb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.cc @@ -22,10 +22,18 @@ namespace { constexpr float kInitialFilterStateLevel = 0.f; +// Instant attack. +constexpr float kAttackFilterConstant = 0.f; +// This is computed from kDecayMs by +// 10 ** (-1/20 * subframe_duration / kDecayMs). +// |subframe_duration| is |kFrameDurationMs / kSubFramesInFrame|. +// kDecayMs is defined in agc2_testing_common.h +constexpr float kDecayFilterConstant = 0.9998848773724686f; + } // namespace FixedDigitalLevelEstimator::FixedDigitalLevelEstimator( - size_t sample_rate_hz, + int sample_rate_hz, ApmDataDumper* apm_data_dumper) : apm_data_dumper_(apm_data_dumper), filter_state_level_(kInitialFilterStateLevel) { @@ -52,8 +60,8 @@ std::array FixedDigitalLevelEstimator::ComputeLevel( for (size_t channel_idx = 0; channel_idx < float_frame.num_channels(); ++channel_idx) { const auto channel = float_frame.channel(channel_idx); - for (size_t sub_frame = 0; sub_frame < kSubFramesInFrame; ++sub_frame) { - for (size_t sample_in_sub_frame = 0; + for (int sub_frame = 0; sub_frame < kSubFramesInFrame; ++sub_frame) { + for (int sample_in_sub_frame = 0; sample_in_sub_frame < samples_in_sub_frame_; ++sample_in_sub_frame) { envelope[sub_frame] = std::max(envelope[sub_frame], @@ -66,14 +74,14 @@ std::array FixedDigitalLevelEstimator::ComputeLevel( // Make sure envelope increases happen one step earlier so that the // corresponding *gain decrease* doesn't miss a sudden signal // increase due to interpolation. - for (size_t sub_frame = 0; sub_frame < kSubFramesInFrame - 1; ++sub_frame) { + for (int sub_frame = 0; sub_frame < kSubFramesInFrame - 1; ++sub_frame) { if (envelope[sub_frame] < envelope[sub_frame + 1]) { envelope[sub_frame] = envelope[sub_frame + 1]; } } // Add attack / decay smoothing. - for (size_t sub_frame = 0; sub_frame < kSubFramesInFrame; ++sub_frame) { + for (int sub_frame = 0; sub_frame < kSubFramesInFrame; ++sub_frame) { const float envelope_value = envelope[sub_frame]; if (envelope_value > filter_state_level_) { envelope[sub_frame] = envelope_value * (1 - kAttackFilterConstant) + @@ -97,9 +105,9 @@ std::array FixedDigitalLevelEstimator::ComputeLevel( return envelope; } -void FixedDigitalLevelEstimator::SetSampleRate(size_t sample_rate_hz) { - samples_in_frame_ = rtc::CheckedDivExact(sample_rate_hz * kFrameDurationMs, - static_cast(1000)); +void FixedDigitalLevelEstimator::SetSampleRate(int sample_rate_hz) { + samples_in_frame_ = + rtc::CheckedDivExact(sample_rate_hz * kFrameDurationMs, 1000); samples_in_sub_frame_ = rtc::CheckedDivExact(samples_in_frame_, kSubFramesInFrame); CheckParameterCombination(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h index aa84a2e0f..d96aedaf9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h @@ -31,7 +31,7 @@ class FixedDigitalLevelEstimator { // kSubFramesInSample. For kFrameDurationMs=10 and // kSubFramesInSample=20, this means that sample_rate_hz has to be // divisible by 2000. - FixedDigitalLevelEstimator(size_t sample_rate_hz, + FixedDigitalLevelEstimator(int sample_rate_hz, ApmDataDumper* apm_data_dumper); // The input is assumed to be in FloatS16 format. Scaled input will @@ -43,7 +43,7 @@ class FixedDigitalLevelEstimator { // Rate may be changed at any time (but not concurrently) from the // value passed to the constructor. The class is not thread safe. - void SetSampleRate(size_t sample_rate_hz); + void SetSampleRate(int sample_rate_hz); // Resets the level estimator internal state. void Reset(); @@ -55,8 +55,8 @@ class FixedDigitalLevelEstimator { ApmDataDumper* const apm_data_dumper_ = nullptr; float filter_state_level_; - size_t samples_in_frame_; - size_t samples_in_sub_frame_; + int samples_in_frame_; + int samples_in_sub_frame_; RTC_DISALLOW_COPY_AND_ASSIGN(FixedDigitalLevelEstimator); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_gain_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_gain_controller.cc deleted file mode 100644 index ef908dc35..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_gain_controller.cc +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/fixed_gain_controller.h" - -#include "api/array_view.h" -#include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/logging/apm_data_dumper.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" - -namespace webrtc { -namespace { - -// Returns true when the gain factor is so close to 1 that it would -// not affect int16 samples. -bool CloseToOne(float gain_factor) { - return 1.f - 1.f / kMaxFloatS16Value <= gain_factor && - gain_factor <= 1.f + 1.f / kMaxFloatS16Value; -} -} // namespace - -FixedGainController::FixedGainController(ApmDataDumper* apm_data_dumper) - : FixedGainController(apm_data_dumper, "Agc2") {} - -FixedGainController::FixedGainController(ApmDataDumper* apm_data_dumper, - std::string histogram_name_prefix) - : apm_data_dumper_(apm_data_dumper), - limiter_(48000, apm_data_dumper_, histogram_name_prefix) { - // Do update histograms.xml when adding name prefixes. - RTC_DCHECK(histogram_name_prefix == "" || histogram_name_prefix == "Test" || - histogram_name_prefix == "AudioMixer" || - histogram_name_prefix == "Agc2"); -} - -void FixedGainController::SetGain(float gain_to_apply_db) { - // Changes in gain_to_apply_ cause discontinuities. We assume - // gain_to_apply_ is set in the beginning of the call. If it is - // frequently changed, we should add interpolation between the - // values. - // The gain - RTC_DCHECK_LE(-50.f, gain_to_apply_db); - RTC_DCHECK_LE(gain_to_apply_db, 50.f); - const float previous_applied_gained = gain_to_apply_; - gain_to_apply_ = DbToRatio(gain_to_apply_db); - RTC_DCHECK_LT(0.f, gain_to_apply_); - RTC_DLOG(LS_INFO) << "Gain to apply: " << gain_to_apply_db << " db."; - // Reset the gain curve applier to quickly react on abrupt level changes - // caused by large changes of the applied gain. - if (previous_applied_gained != gain_to_apply_) { - limiter_.Reset(); - } -} - -void FixedGainController::SetSampleRate(size_t sample_rate_hz) { - limiter_.SetSampleRate(sample_rate_hz); -} - -void FixedGainController::Process(AudioFrameView signal) { - // Apply fixed digital gain. One of the - // planned usages of the FGC is to only use the limiter. In that - // case, the gain would be 1.0. Not doing the multiplications speeds - // it up considerably. Hence the check. - if (!CloseToOne(gain_to_apply_)) { - for (size_t k = 0; k < signal.num_channels(); ++k) { - rtc::ArrayView channel_view = signal.channel(k); - for (auto& sample : channel_view) { - sample *= gain_to_apply_; - } - } - } - - // Use the limiter. - limiter_.Process(signal); - - // Dump data for debug. - const auto channel_view = signal.channel(0); - apm_data_dumper_->DumpRaw("agc2_fixed_digital_gain_curve_applier", - channel_view.size(), channel_view.data()); - // Hard-clipping. - for (size_t k = 0; k < signal.num_channels(); ++k) { - rtc::ArrayView channel_view = signal.channel(k); - for (auto& sample : channel_view) { - sample = rtc::SafeClamp(sample, kMinFloatS16Value, kMaxFloatS16Value); - } - } -} - -float FixedGainController::LastAudioLevel() const { - return limiter_.LastAudioLevel(); -} -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc index 502e7024b..3dd501096 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.cc @@ -28,8 +28,9 @@ constexpr std::array constexpr std::array InterpolatedGainCurve::approximation_params_q_; -InterpolatedGainCurve::InterpolatedGainCurve(ApmDataDumper* apm_data_dumper, - std::string histogram_name_prefix) +InterpolatedGainCurve::InterpolatedGainCurve( + ApmDataDumper* apm_data_dumper, + const std::string& histogram_name_prefix) : region_logger_("WebRTC.Audio." + histogram_name_prefix + ".FixedDigitalGainCurveRegion.Identity", "WebRTC.Audio." + histogram_name_prefix + @@ -56,10 +57,10 @@ InterpolatedGainCurve::~InterpolatedGainCurve() { } InterpolatedGainCurve::RegionLogger::RegionLogger( - std::string identity_histogram_name, - std::string knee_histogram_name, - std::string limiter_histogram_name, - std::string saturation_histogram_name) + const std::string& identity_histogram_name, + const std::string& knee_histogram_name, + const std::string& limiter_histogram_name, + const std::string& saturation_histogram_name) : identity_histogram( metrics::HistogramFactoryGetCounts(identity_histogram_name, 1, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h index ef1c027cf..af993204c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h @@ -61,7 +61,7 @@ class InterpolatedGainCurve { }; InterpolatedGainCurve(ApmDataDumper* apm_data_dumper, - std::string histogram_name_prefix); + const std::string& histogram_name_prefix); ~InterpolatedGainCurve(); Stats get_stats() const { return stats_; } @@ -75,7 +75,7 @@ class InterpolatedGainCurve { private: // For comparing 'approximation_params_*_' with ones computed by // ComputeInterpolatedGainCurve. - FRIEND_TEST_ALL_PREFIXES(AutomaticGainController2InterpolatedGainCurve, + FRIEND_TEST_ALL_PREFIXES(GainController2InterpolatedGainCurve, CheckApproximationParams); struct RegionLogger { @@ -84,10 +84,10 @@ class InterpolatedGainCurve { metrics::Histogram* limiter_histogram; metrics::Histogram* saturation_histogram; - RegionLogger(std::string identity_histogram_name, - std::string knee_histogram_name, - std::string limiter_histogram_name, - std::string saturation_histogram_name); + RegionLogger(const std::string& identity_histogram_name, + const std::string& knee_histogram_name, + const std::string& limiter_histogram_name, + const std::string& saturation_histogram_name); ~RegionLogger(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc index 1589f0740..ed7d3ee5f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.cc @@ -94,7 +94,7 @@ void CheckLimiterSampleRate(size_t sample_rate_hz) { Limiter::Limiter(size_t sample_rate_hz, ApmDataDumper* apm_data_dumper, - std::string histogram_name) + const std::string& histogram_name) : interp_gain_curve_(apm_data_dumper, histogram_name), level_estimator_(sample_rate_hz, apm_data_dumper), apm_data_dumper_(apm_data_dumper) { @@ -125,9 +125,11 @@ void Limiter::Process(AudioFrameView signal) { last_scaling_factor_ = scaling_factors_.back(); // Dump data for debug. - apm_data_dumper_->DumpRaw("agc2_gain_curve_applier_scaling_factors", - samples_per_channel, - per_sample_scaling_factors_.data()); + apm_data_dumper_->DumpRaw("agc2_limiter_last_scaling_factor", + last_scaling_factor_); + apm_data_dumper_->DumpRaw( + "agc2_limiter_region", + static_cast(interp_gain_curve_.get_stats().region)); } InterpolatedGainCurve::Stats Limiter::GetGainCurveStats() const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h index 599fd0f4b..df7b540b7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h @@ -26,7 +26,7 @@ class Limiter { public: Limiter(size_t sample_rate_hz, ApmDataDumper* apm_data_dumper, - std::string histogram_name_prefix); + const std::string& histogram_name_prefix); Limiter(const Limiter& limiter) = delete; Limiter& operator=(const Limiter& limiter) = delete; ~Limiter(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.cc index 2ca503433..10e8437d3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.cc @@ -18,19 +18,19 @@ #include "api/array_view.h" #include "common_audio/include/audio_util.h" +#include "modules/audio_processing/agc2/signal_classifier.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" namespace webrtc { - namespace { constexpr int kFramesPerSecond = 100; float FrameEnergy(const AudioFrameView& audio) { - float energy = 0.f; + float energy = 0.0f; for (size_t k = 0; k < audio.num_channels(); ++k) { float channel_energy = - std::accumulate(audio.channel(k).begin(), audio.channel(k).end(), 0.f, + std::accumulate(audio.channel(k).begin(), audio.channel(k).end(), 0.0f, [](float a, float b) -> float { return a + b * b; }); energy = std::max(channel_energy, energy); } @@ -41,74 +41,220 @@ float EnergyToDbfs(float signal_energy, size_t num_samples) { const float rms = std::sqrt(signal_energy / num_samples); return FloatS16ToDbfs(rms); } -} // namespace -NoiseLevelEstimator::NoiseLevelEstimator(ApmDataDumper* data_dumper) - : signal_classifier_(data_dumper) { - Initialize(48000); -} - -NoiseLevelEstimator::~NoiseLevelEstimator() {} - -void NoiseLevelEstimator::Initialize(int sample_rate_hz) { - sample_rate_hz_ = sample_rate_hz; - noise_energy_ = 1.f; - first_update_ = true; - min_noise_energy_ = sample_rate_hz * 2.f * 2.f / kFramesPerSecond; - noise_energy_hold_counter_ = 0; - signal_classifier_.Initialize(sample_rate_hz); -} - -float NoiseLevelEstimator::Analyze(const AudioFrameView& frame) { - const int rate = - static_cast(frame.samples_per_channel() * kFramesPerSecond); - if (rate != sample_rate_hz_) { - Initialize(rate); +class NoiseLevelEstimatorImpl : public NoiseLevelEstimator { + public: + NoiseLevelEstimatorImpl(ApmDataDumper* data_dumper) + : data_dumper_(data_dumper), signal_classifier_(data_dumper) { + // Initially assume that 48 kHz will be used. `Analyze()` will detect the + // used sample rate and call `Initialize()` again if needed. + Initialize(/*sample_rate_hz=*/48000); } - const float frame_energy = FrameEnergy(frame); - if (frame_energy <= 0.f) { - RTC_DCHECK_GE(frame_energy, 0.f); + NoiseLevelEstimatorImpl(const NoiseLevelEstimatorImpl&) = delete; + NoiseLevelEstimatorImpl& operator=(const NoiseLevelEstimatorImpl&) = delete; + ~NoiseLevelEstimatorImpl() = default; + + float Analyze(const AudioFrameView& frame) override { + data_dumper_->DumpRaw("agc2_noise_level_estimator_hold_counter", + noise_energy_hold_counter_); + const int sample_rate_hz = + static_cast(frame.samples_per_channel() * kFramesPerSecond); + if (sample_rate_hz != sample_rate_hz_) { + Initialize(sample_rate_hz); + } + const float frame_energy = FrameEnergy(frame); + if (frame_energy <= 0.f) { + RTC_DCHECK_GE(frame_energy, 0.f); + data_dumper_->DumpRaw("agc2_noise_level_estimator_signal_type", -1); + return EnergyToDbfs(noise_energy_, frame.samples_per_channel()); + } + + if (first_update_) { + // Initialize the noise energy to the frame energy. + first_update_ = false; + data_dumper_->DumpRaw("agc2_noise_level_estimator_signal_type", -1); + noise_energy_ = std::max(frame_energy, min_noise_energy_); + return EnergyToDbfs(noise_energy_, frame.samples_per_channel()); + } + + const SignalClassifier::SignalType signal_type = + signal_classifier_.Analyze(frame.channel(0)); + data_dumper_->DumpRaw("agc2_noise_level_estimator_signal_type", + static_cast(signal_type)); + + // Update the noise estimate in a minimum statistics-type manner. + if (signal_type == SignalClassifier::SignalType::kStationary) { + if (frame_energy > noise_energy_) { + // Leak the estimate upwards towards the frame energy if no recent + // downward update. + noise_energy_hold_counter_ = + std::max(noise_energy_hold_counter_ - 1, 0); + + if (noise_energy_hold_counter_ == 0) { + constexpr float kMaxNoiseEnergyFactor = 1.01f; + noise_energy_ = + std::min(noise_energy_ * kMaxNoiseEnergyFactor, frame_energy); + } + } else { + // Update smoothly downwards with a limited maximum update magnitude. + constexpr float kMinNoiseEnergyFactor = 0.9f; + constexpr float kNoiseEnergyDeltaFactor = 0.05f; + noise_energy_ = + std::max(noise_energy_ * kMinNoiseEnergyFactor, + noise_energy_ - kNoiseEnergyDeltaFactor * + (noise_energy_ - frame_energy)); + // Prevent an energy increase for the next 10 seconds. + constexpr int kNumFramesToEnergyIncreaseAllowed = 1000; + noise_energy_hold_counter_ = kNumFramesToEnergyIncreaseAllowed; + } + } else { + // TODO(bugs.webrtc.org/7494): Remove to not forget the estimated level. + // For a non-stationary signal, leak the estimate downwards in order to + // avoid estimate locking due to incorrect signal classification. + noise_energy_ = noise_energy_ * 0.99f; + } + + // Ensure a minimum of the estimate. + noise_energy_ = std::max(noise_energy_, min_noise_energy_); return EnergyToDbfs(noise_energy_, frame.samples_per_channel()); } - if (first_update_) { - // Initialize the noise energy to the frame energy. - first_update_ = false; - return EnergyToDbfs( - noise_energy_ = std::max(frame_energy, min_noise_energy_), - frame.samples_per_channel()); + private: + void Initialize(int sample_rate_hz) { + sample_rate_hz_ = sample_rate_hz; + noise_energy_ = 1.0f; + first_update_ = true; + // Initialize the minimum noise energy to -84 dBFS. + min_noise_energy_ = sample_rate_hz * 2.0f * 2.0f / kFramesPerSecond; + noise_energy_hold_counter_ = 0; + signal_classifier_.Initialize(sample_rate_hz); } - const SignalClassifier::SignalType signal_type = - signal_classifier_.Analyze(frame.channel(0)); + ApmDataDumper* const data_dumper_; + int sample_rate_hz_; + float min_noise_energy_; + bool first_update_; + float noise_energy_; + int noise_energy_hold_counter_; + SignalClassifier signal_classifier_; +}; - // Update the noise estimate in a minimum statistics-type manner. - if (signal_type == SignalClassifier::SignalType::kStationary) { - if (frame_energy > noise_energy_) { - // Leak the estimate upwards towards the frame energy if no recent - // downward update. - noise_energy_hold_counter_ = std::max(noise_energy_hold_counter_ - 1, 0); +// Updates the noise floor with instant decay and slow attack. This tuning is +// specific for AGC2, so that (i) it can promptly increase the gain if the noise +// floor drops (instant decay) and (ii) in case of music or fast speech, due to +// which the noise floor can be overestimated, the gain reduction is slowed +// down. +float SmoothNoiseFloorEstimate(float current_estimate, float new_estimate) { + constexpr float kAttack = 0.5f; + if (current_estimate < new_estimate) { + // Attack phase. + return kAttack * new_estimate + (1.0f - kAttack) * current_estimate; + } + // Instant attack. + return new_estimate; +} - if (noise_energy_hold_counter_ == 0) { - noise_energy_ = std::min(noise_energy_ * 1.01f, frame_energy); - } - } else { - // Update smoothly downwards with a limited maximum update magnitude. - noise_energy_ = - std::max(noise_energy_ * 0.9f, - noise_energy_ + 0.05f * (frame_energy - noise_energy_)); - noise_energy_hold_counter_ = 1000; +class NoiseFloorEstimator : public NoiseLevelEstimator { + public: + // Update the noise floor every 5 seconds. + static constexpr int kUpdatePeriodNumFrames = 500; + static_assert(kUpdatePeriodNumFrames >= 200, + "A too small value may cause noise level overestimation."); + static_assert(kUpdatePeriodNumFrames <= 1500, + "A too large value may make AGC2 slow at reacting to increased " + "noise levels."); + + NoiseFloorEstimator(ApmDataDumper* data_dumper) : data_dumper_(data_dumper) { + // Initially assume that 48 kHz will be used. `Analyze()` will detect the + // used sample rate and call `Initialize()` again if needed. + Initialize(/*sample_rate_hz=*/48000); + } + NoiseFloorEstimator(const NoiseFloorEstimator&) = delete; + NoiseFloorEstimator& operator=(const NoiseFloorEstimator&) = delete; + ~NoiseFloorEstimator() = default; + + float Analyze(const AudioFrameView& frame) override { + // Detect sample rate changes. + const int sample_rate_hz = + static_cast(frame.samples_per_channel() * kFramesPerSecond); + if (sample_rate_hz != sample_rate_hz_) { + Initialize(sample_rate_hz); } - } else { - // For a non-stationary signal, leak the estimate downwards in order to - // avoid estimate locking due to incorrect signal classification. - noise_energy_ = noise_energy_ * 0.99f; + + const float frame_energy = FrameEnergy(frame); + if (frame_energy <= min_noise_energy_) { + // Ignore frames when muted or below the minimum measurable energy. + data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", + noise_energy_); + return EnergyToDbfs(noise_energy_, frame.samples_per_channel()); + } + + if (preliminary_noise_energy_set_) { + preliminary_noise_energy_ = + std::min(preliminary_noise_energy_, frame_energy); + } else { + preliminary_noise_energy_ = frame_energy; + preliminary_noise_energy_set_ = true; + } + data_dumper_->DumpRaw("agc2_noise_floor_estimator_preliminary_level", + preliminary_noise_energy_); + + if (counter_ == 0) { + // Full period observed. + first_period_ = false; + // Update the estimated noise floor energy with the preliminary + // estimation. + noise_energy_ = SmoothNoiseFloorEstimate( + /*current_estimate=*/noise_energy_, + /*new_estimate=*/preliminary_noise_energy_); + // Reset for a new observation period. + counter_ = kUpdatePeriodNumFrames; + preliminary_noise_energy_set_ = false; + } else if (first_period_) { + // While analyzing the signal during the initial period, continuously + // update the estimated noise energy, which is monotonic. + noise_energy_ = preliminary_noise_energy_; + counter_--; + } else { + // During the observation period it's only allowed to lower the energy. + noise_energy_ = std::min(noise_energy_, preliminary_noise_energy_); + counter_--; + } + return EnergyToDbfs(noise_energy_, frame.samples_per_channel()); } - // Ensure a minimum of the estimate. - return EnergyToDbfs( - noise_energy_ = std::max(noise_energy_, min_noise_energy_), - frame.samples_per_channel()); + private: + void Initialize(int sample_rate_hz) { + sample_rate_hz_ = sample_rate_hz; + first_period_ = true; + preliminary_noise_energy_set_ = false; + // Initialize the minimum noise energy to -84 dBFS. + min_noise_energy_ = sample_rate_hz * 2.0f * 2.0f / kFramesPerSecond; + preliminary_noise_energy_ = min_noise_energy_; + noise_energy_ = min_noise_energy_; + counter_ = kUpdatePeriodNumFrames; + } + + ApmDataDumper* const data_dumper_; + int sample_rate_hz_; + float min_noise_energy_; + bool first_period_; + bool preliminary_noise_energy_set_; + float preliminary_noise_energy_; + float noise_energy_; + int counter_; +}; + +} // namespace + +std::unique_ptr CreateStationaryNoiseEstimator( + ApmDataDumper* data_dumper) { + return std::make_unique(data_dumper); +} + +std::unique_ptr CreateNoiseFloorEstimator( + ApmDataDumper* data_dumper) { + return std::make_unique(data_dumper); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.h index ca2f9f2e2..94aecda7f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_level_estimator.h @@ -11,33 +11,30 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_NOISE_LEVEL_ESTIMATOR_H_ #define MODULES_AUDIO_PROCESSING_AGC2_NOISE_LEVEL_ESTIMATOR_H_ -#include "modules/audio_processing/agc2/signal_classifier.h" +#include + #include "modules/audio_processing/include/audio_frame_view.h" -#include "rtc_base/constructor_magic.h" namespace webrtc { class ApmDataDumper; +// Noise level estimator interface. class NoiseLevelEstimator { public: - NoiseLevelEstimator(ApmDataDumper* data_dumper); - ~NoiseLevelEstimator(); - // Returns the estimated noise level in dBFS. - float Analyze(const AudioFrameView& frame); - - private: - void Initialize(int sample_rate_hz); - - int sample_rate_hz_; - float min_noise_energy_; - bool first_update_; - float noise_energy_; - int noise_energy_hold_counter_; - SignalClassifier signal_classifier_; - - RTC_DISALLOW_COPY_AND_ASSIGN(NoiseLevelEstimator); + virtual ~NoiseLevelEstimator() = default; + // Analyzes a 10 ms `frame`, updates the noise level estimation and returns + // the value for the latter in dBFS. + virtual float Analyze(const AudioFrameView& frame) = 0; }; +// Creates a noise level estimator based on stationarity detection. +std::unique_ptr CreateStationaryNoiseEstimator( + ApmDataDumper* data_dumper); + +// Creates a noise level estimator based on noise floor detection. +std::unique_ptr CreateNoiseFloorEstimator( + ApmDataDumper* data_dumper); + } // namespace webrtc #endif // MODULES_AUDIO_PROCESSING_AGC2_NOISE_LEVEL_ESTIMATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.cc index 31438b1f4..f283f4e27 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/noise_spectrum_estimator.cc @@ -63,8 +63,8 @@ void NoiseSpectrumEstimator::Update(rtc::ArrayView spectrum, v = std::max(v, kMinNoisePower); } - data_dumper_->DumpRaw("lc_noise_spectrum", 65, noise_spectrum_); - data_dumper_->DumpRaw("lc_signal_spectrum", spectrum); + data_dumper_->DumpRaw("agc2_noise_spectrum", 65, noise_spectrum_); + data_dumper_->DumpRaw("agc2_signal_spectrum", spectrum); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc deleted file mode 100644 index 5d76b52e5..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/agc2/rnn_vad/common.h" - -#include "rtc_base/system/arch.h" -#include "system_wrappers/include/cpu_features_wrapper.h" - -namespace webrtc { -namespace rnn_vad { - -Optimization DetectOptimization() { -#if defined(WEBRTC_ARCH_X86_FAMILY) - if (GetCPUInfo(kSSE2) != 0) { - return Optimization::kSse2; - } -#endif - -#if defined(WEBRTC_HAS_NEON) - return Optimization::kNeon; -#endif - - return Optimization::kNone; -} - -} // namespace rnn_vad -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h index 36b366ad1..be5a2d58c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.h @@ -71,11 +71,6 @@ static_assert(kCepstralCoeffsHistorySize > 2, constexpr int kFeatureVectorSize = 42; -enum class Optimization { kNone, kSse2, kNeon }; - -// Detects what kind of optimizations to use for the code. -Optimization DetectOptimization(); - } // namespace rnn_vad } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc index cdbbbc311..f86eba764 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc @@ -26,13 +26,13 @@ const BiQuadFilter::BiQuadCoefficients kHpfConfig24k = { } // namespace -FeaturesExtractor::FeaturesExtractor() +FeaturesExtractor::FeaturesExtractor(const AvailableCpuFeatures& cpu_features) : use_high_pass_filter_(false), pitch_buf_24kHz_(), pitch_buf_24kHz_view_(pitch_buf_24kHz_.GetBufferView()), lp_residual_(kBufSize24kHz), lp_residual_view_(lp_residual_.data(), kBufSize24kHz), - pitch_estimator_(), + pitch_estimator_(cpu_features), reference_frame_view_(pitch_buf_24kHz_.GetMostRecentValuesView()) { RTC_DCHECK_EQ(kBufSize24kHz, lp_residual_.size()); hpf_.Initialize(kHpfConfig24k); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h index e2c77d2cf..f4cea7a83 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.h @@ -26,7 +26,7 @@ namespace rnn_vad { // Feature extractor to feed the VAD RNN. class FeaturesExtractor { public: - FeaturesExtractor(); + explicit FeaturesExtractor(const AvailableCpuFeatures& cpu_features); FeaturesExtractor(const FeaturesExtractor&) = delete; FeaturesExtractor& operator=(const FeaturesExtractor&) = delete; ~FeaturesExtractor(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h index 2e54dd93d..380d9f608 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.h @@ -18,7 +18,7 @@ namespace webrtc { namespace rnn_vad { -// LPC inverse filter length. +// Linear predictive coding (LPC) inverse filter length. constexpr int kNumLpcCoefficients = 5; // Given a frame |x|, computes a post-processed version of LPC coefficients diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc index c6c3e1b2b..77a118853 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc @@ -18,8 +18,9 @@ namespace webrtc { namespace rnn_vad { -PitchEstimator::PitchEstimator() - : y_energy_24kHz_(kRefineNumLags24kHz, 0.f), +PitchEstimator::PitchEstimator(const AvailableCpuFeatures& cpu_features) + : cpu_features_(cpu_features), + y_energy_24kHz_(kRefineNumLags24kHz, 0.f), pitch_buffer_12kHz_(kBufSize12kHz), auto_correlation_12kHz_(kNumLags12kHz) {} @@ -35,12 +36,13 @@ int PitchEstimator::Estimate( RTC_DCHECK_EQ(auto_correlation_12kHz_.size(), auto_correlation_12kHz_view.size()); + // TODO(bugs.chromium.org/10480): Use `cpu_features_` to estimate pitch. // Perform the initial pitch search at 12 kHz. Decimate2x(pitch_buffer, pitch_buffer_12kHz_view); auto_corr_calculator_.ComputeOnPitchBuffer(pitch_buffer_12kHz_view, auto_correlation_12kHz_view); CandidatePitchPeriods pitch_periods = ComputePitchPeriod12kHz( - pitch_buffer_12kHz_view, auto_correlation_12kHz_view); + pitch_buffer_12kHz_view, auto_correlation_12kHz_view, cpu_features_); // The refinement is done using the pitch buffer that contains 24 kHz samples. // Therefore, adapt the inverted lags in |pitch_candidates_inv_lags| from 12 // to 24 kHz. @@ -52,14 +54,15 @@ int PitchEstimator::Estimate( rtc::ArrayView y_energy_24kHz_view( y_energy_24kHz_.data(), kRefineNumLags24kHz); RTC_DCHECK_EQ(y_energy_24kHz_.size(), y_energy_24kHz_view.size()); - ComputeSlidingFrameSquareEnergies24kHz(pitch_buffer, y_energy_24kHz_view); + ComputeSlidingFrameSquareEnergies24kHz(pitch_buffer, y_energy_24kHz_view, + cpu_features_); // Estimation at 48 kHz. - const int pitch_lag_48kHz = - ComputePitchPeriod48kHz(pitch_buffer, y_energy_24kHz_view, pitch_periods); + const int pitch_lag_48kHz = ComputePitchPeriod48kHz( + pitch_buffer, y_energy_24kHz_view, pitch_periods, cpu_features_); last_pitch_48kHz_ = ComputeExtendedPitchPeriod48kHz( pitch_buffer, y_energy_24kHz_view, /*initial_pitch_period_48kHz=*/kMaxPitch48kHz - pitch_lag_48kHz, - last_pitch_48kHz_); + last_pitch_48kHz_, cpu_features_); return last_pitch_48kHz_.period; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h index e96a2dcaf..42c448eb5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.h @@ -15,6 +15,7 @@ #include #include "api/array_view.h" +#include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/rnn_vad/auto_correlation.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" #include "modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h" @@ -26,7 +27,7 @@ namespace rnn_vad { // Pitch estimator. class PitchEstimator { public: - PitchEstimator(); + explicit PitchEstimator(const AvailableCpuFeatures& cpu_features); PitchEstimator(const PitchEstimator&) = delete; PitchEstimator& operator=(const PitchEstimator&) = delete; ~PitchEstimator(); @@ -39,6 +40,7 @@ class PitchEstimator { return last_pitch_48kHz_.strength; } + const AvailableCpuFeatures cpu_features_; PitchInfo last_pitch_48kHz_{}; AutoCorrelationCalculator auto_corr_calculator_; std::vector y_energy_24kHz_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc index 262c38645..0b8a77e48 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc @@ -18,9 +18,11 @@ #include #include "modules/audio_processing/agc2/rnn_vad/common.h" +#include "modules/audio_processing/agc2/rnn_vad/vector_math.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_compare.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/system/arch.h" namespace webrtc { namespace rnn_vad { @@ -28,14 +30,14 @@ namespace { float ComputeAutoCorrelation( int inverted_lag, - rtc::ArrayView pitch_buffer) { + rtc::ArrayView pitch_buffer, + const VectorMath& vector_math) { RTC_DCHECK_LT(inverted_lag, kBufSize24kHz); RTC_DCHECK_LT(inverted_lag, kRefineNumLags24kHz); static_assert(kMaxPitch24kHz < kBufSize24kHz, ""); - // TODO(bugs.webrtc.org/9076): Maybe optimize using vectorization. - return std::inner_product(pitch_buffer.begin() + kMaxPitch24kHz, - pitch_buffer.end(), - pitch_buffer.begin() + inverted_lag, 0.f); + return vector_math.DotProduct( + pitch_buffer.subview(/*offset=*/kMaxPitch24kHz), + pitch_buffer.subview(inverted_lag, kFrameSize20ms24kHz)); } // Given an auto-correlation coefficient `curr_auto_correlation` and its @@ -66,15 +68,16 @@ int GetPitchPseudoInterpolationOffset(float prev_auto_correlation, // output sample rate is twice as that of |lag|. int PitchPseudoInterpolationLagPitchBuf( int lag, - rtc::ArrayView pitch_buffer) { + rtc::ArrayView pitch_buffer, + const VectorMath& vector_math) { int offset = 0; // Cannot apply pseudo-interpolation at the boundaries. if (lag > 0 && lag < kMaxPitch24kHz) { const int inverted_lag = kMaxPitch24kHz - lag; offset = GetPitchPseudoInterpolationOffset( - ComputeAutoCorrelation(inverted_lag + 1, pitch_buffer), - ComputeAutoCorrelation(inverted_lag, pitch_buffer), - ComputeAutoCorrelation(inverted_lag - 1, pitch_buffer)); + ComputeAutoCorrelation(inverted_lag + 1, pitch_buffer, vector_math), + ComputeAutoCorrelation(inverted_lag, pitch_buffer, vector_math), + ComputeAutoCorrelation(inverted_lag - 1, pitch_buffer, vector_math)); } return 2 * lag + offset; } @@ -153,7 +156,8 @@ void ComputeAutoCorrelation( Range inverted_lags, rtc::ArrayView pitch_buffer, rtc::ArrayView auto_correlation, - InvertedLagsIndex& inverted_lags_index) { + InvertedLagsIndex& inverted_lags_index, + const VectorMath& vector_math) { // Check valid range. RTC_DCHECK_LE(inverted_lags.min, inverted_lags.max); // Trick to avoid zero initialization of `auto_correlation`. @@ -170,7 +174,7 @@ void ComputeAutoCorrelation( for (int inverted_lag = inverted_lags.min; inverted_lag <= inverted_lags.max; ++inverted_lag) { auto_correlation[inverted_lag] = - ComputeAutoCorrelation(inverted_lag, pitch_buffer); + ComputeAutoCorrelation(inverted_lag, pitch_buffer, vector_math); inverted_lags_index.Append(inverted_lag); } } @@ -181,7 +185,8 @@ int ComputePitchPeriod48kHz( rtc::ArrayView pitch_buffer, rtc::ArrayView inverted_lags, rtc::ArrayView auto_correlation, - rtc::ArrayView y_energy) { + rtc::ArrayView y_energy, + const VectorMath& vector_math) { static_assert(kMaxPitch24kHz > kInitialNumLags24kHz, ""); static_assert(kMaxPitch24kHz < kBufSize24kHz, ""); int best_inverted_lag = 0; // Pitch period. @@ -289,10 +294,12 @@ void Decimate2x(rtc::ArrayView src, void ComputeSlidingFrameSquareEnergies24kHz( rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy) { - float yy = std::inner_product(pitch_buffer.begin(), - pitch_buffer.begin() + kFrameSize20ms24kHz, - pitch_buffer.begin(), 0.f); + rtc::ArrayView y_energy, + AvailableCpuFeatures cpu_features) { + VectorMath vector_math(cpu_features); + static_assert(kFrameSize20ms24kHz < kBufSize24kHz, ""); + const auto frame_20ms_view = pitch_buffer.subview(0, kFrameSize20ms24kHz); + float yy = vector_math.DotProduct(frame_20ms_view, frame_20ms_view); y_energy[0] = yy; static_assert(kMaxPitch24kHz - 1 + kFrameSize20ms24kHz < kBufSize24kHz, ""); static_assert(kMaxPitch24kHz < kRefineNumLags24kHz, ""); @@ -307,7 +314,8 @@ void ComputeSlidingFrameSquareEnergies24kHz( CandidatePitchPeriods ComputePitchPeriod12kHz( rtc::ArrayView pitch_buffer, - rtc::ArrayView auto_correlation) { + rtc::ArrayView auto_correlation, + AvailableCpuFeatures cpu_features) { static_assert(kMaxPitch12kHz > kNumLags12kHz, ""); static_assert(kMaxPitch12kHz < kBufSize12kHz, ""); @@ -326,10 +334,10 @@ CandidatePitchPeriods ComputePitchPeriod12kHz( } }; - // TODO(bugs.webrtc.org/9076): Maybe optimize using vectorization. - float denominator = std::inner_product( - pitch_buffer.begin(), pitch_buffer.begin() + kFrameSize20ms12kHz + 1, - pitch_buffer.begin(), 1.f); + VectorMath vector_math(cpu_features); + static_assert(kFrameSize20ms12kHz + 1 < kBufSize12kHz, ""); + const auto frame_view = pitch_buffer.subview(0, kFrameSize20ms12kHz + 1); + float denominator = 1.f + vector_math.DotProduct(frame_view, frame_view); // Search best and second best pitches by looking at the scaled // auto-correlation. PitchCandidate best; @@ -364,7 +372,8 @@ CandidatePitchPeriods ComputePitchPeriod12kHz( int ComputePitchPeriod48kHz( rtc::ArrayView pitch_buffer, rtc::ArrayView y_energy, - CandidatePitchPeriods pitch_candidates) { + CandidatePitchPeriods pitch_candidates, + AvailableCpuFeatures cpu_features) { // Compute the auto-correlation terms only for neighbors of the two pitch // candidates (best and second best). std::array auto_correlation; @@ -382,26 +391,28 @@ int ComputePitchPeriod48kHz( // Check `r1` precedes `r2`. RTC_DCHECK_LE(r1.min, r2.min); RTC_DCHECK_LE(r1.max, r2.max); + VectorMath vector_math(cpu_features); if (r1.max + 1 >= r2.min) { // Overlapping or adjacent ranges. ComputeAutoCorrelation({r1.min, r2.max}, pitch_buffer, auto_correlation, - inverted_lags_index); + inverted_lags_index, vector_math); } else { // Disjoint ranges. ComputeAutoCorrelation(r1, pitch_buffer, auto_correlation, - inverted_lags_index); + inverted_lags_index, vector_math); ComputeAutoCorrelation(r2, pitch_buffer, auto_correlation, - inverted_lags_index); + inverted_lags_index, vector_math); } return ComputePitchPeriod48kHz(pitch_buffer, inverted_lags_index, - auto_correlation, y_energy); + auto_correlation, y_energy, vector_math); } PitchInfo ComputeExtendedPitchPeriod48kHz( rtc::ArrayView pitch_buffer, rtc::ArrayView y_energy, int initial_pitch_period_48kHz, - PitchInfo last_pitch_48kHz) { + PitchInfo last_pitch_48kHz, + AvailableCpuFeatures cpu_features) { RTC_DCHECK_LE(kMinPitch48kHz, initial_pitch_period_48kHz); RTC_DCHECK_LE(initial_pitch_period_48kHz, kMaxPitch48kHz); @@ -419,13 +430,14 @@ PitchInfo ComputeExtendedPitchPeriod48kHz( RTC_DCHECK_GE(x_energy * y_energy, 0.f); return xy / std::sqrt(1.f + x_energy * y_energy); }; + VectorMath vector_math(cpu_features); // Initialize the best pitch candidate with `initial_pitch_period_48kHz`. RefinedPitchCandidate best_pitch; best_pitch.period = std::min(initial_pitch_period_48kHz / 2, kMaxPitch24kHz - 1); - best_pitch.xy = - ComputeAutoCorrelation(kMaxPitch24kHz - best_pitch.period, pitch_buffer); + best_pitch.xy = ComputeAutoCorrelation(kMaxPitch24kHz - best_pitch.period, + pitch_buffer, vector_math); best_pitch.y_energy = y_energy[kMaxPitch24kHz - best_pitch.period]; best_pitch.strength = pitch_strength(best_pitch.xy, best_pitch.y_energy); // Keep a copy of the initial pitch candidate. @@ -463,9 +475,11 @@ PitchInfo ComputeExtendedPitchPeriod48kHz( // |alternative_pitch.period| by also looking at its possible sub-harmonic // |dual_alternative_period|. const float xy_primary_period = ComputeAutoCorrelation( - kMaxPitch24kHz - alternative_pitch.period, pitch_buffer); + kMaxPitch24kHz - alternative_pitch.period, pitch_buffer, vector_math); + // TODO(webrtc:10480): Copy `xy_primary_period` if the secondary period is + // equal to the primary one. const float xy_secondary_period = ComputeAutoCorrelation( - kMaxPitch24kHz - dual_alternative_period, pitch_buffer); + kMaxPitch24kHz - dual_alternative_period, pitch_buffer, vector_math); const float xy = 0.5f * (xy_primary_period + xy_secondary_period); const float yy = 0.5f * (y_energy[kMaxPitch24kHz - alternative_pitch.period] + @@ -489,8 +503,8 @@ PitchInfo ComputeExtendedPitchPeriod48kHz( : best_pitch.xy / (best_pitch.y_energy + 1.f); final_pitch_strength = std::min(best_pitch.strength, final_pitch_strength); int final_pitch_period_48kHz = std::max( - kMinPitch48kHz, - PitchPseudoInterpolationLagPitchBuf(best_pitch.period, pitch_buffer)); + kMinPitch48kHz, PitchPseudoInterpolationLagPitchBuf( + best_pitch.period, pitch_buffer, vector_math)); return {final_pitch_period_48kHz, final_pitch_strength}; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h index 0af55f8e6..aa2dd1374 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h @@ -17,6 +17,7 @@ #include #include "api/array_view.h" +#include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" namespace webrtc { @@ -65,7 +66,8 @@ void Decimate2x(rtc::ArrayView src, // buffer. The indexes of `y_energy` are inverted lags. void ComputeSlidingFrameSquareEnergies24kHz( rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy); + rtc::ArrayView y_energy, + AvailableCpuFeatures cpu_features); // Top-2 pitch period candidates. Unit: number of samples - i.e., inverted lags. struct CandidatePitchPeriods { @@ -78,7 +80,8 @@ struct CandidatePitchPeriods { // indexes). CandidatePitchPeriods ComputePitchPeriod12kHz( rtc::ArrayView pitch_buffer, - rtc::ArrayView auto_correlation); + rtc::ArrayView auto_correlation, + AvailableCpuFeatures cpu_features); // Computes the pitch period at 48 kHz given a view on the 24 kHz pitch buffer, // the energies for the sliding frames `y` at 24 kHz and the pitch period @@ -86,7 +89,8 @@ CandidatePitchPeriods ComputePitchPeriod12kHz( int ComputePitchPeriod48kHz( rtc::ArrayView pitch_buffer, rtc::ArrayView y_energy, - CandidatePitchPeriods pitch_candidates_24kHz); + CandidatePitchPeriods pitch_candidates_24kHz, + AvailableCpuFeatures cpu_features); struct PitchInfo { int period; @@ -101,7 +105,8 @@ PitchInfo ComputeExtendedPitchPeriod48kHz( rtc::ArrayView pitch_buffer, rtc::ArrayView y_energy, int initial_pitch_period_48kHz, - PitchInfo last_pitch_48kHz); + PitchInfo last_pitch_48kHz, + AvailableCpuFeatures cpu_features); } // namespace rnn_vad } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc index 2072a6854..475bef977 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc @@ -10,417 +10,81 @@ #include "modules/audio_processing/agc2/rnn_vad/rnn.h" -// Defines WEBRTC_ARCH_X86_FAMILY, used below. -#include "rtc_base/system/arch.h" - -#if defined(WEBRTC_HAS_NEON) -#include -#endif -#if defined(WEBRTC_ARCH_X86_FAMILY) -#include -#endif -#include -#include -#include -#include - #include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "third_party/rnnoise/src/rnn_activations.h" #include "third_party/rnnoise/src/rnn_vad_weights.h" namespace webrtc { namespace rnn_vad { namespace { -using rnnoise::kWeightsScale; - -using rnnoise::kInputLayerInputSize; +using ::rnnoise::kInputLayerInputSize; static_assert(kFeatureVectorSize == kInputLayerInputSize, ""); -using rnnoise::kInputDenseBias; -using rnnoise::kInputDenseWeights; -using rnnoise::kInputLayerOutputSize; -static_assert(kInputLayerOutputSize <= kFullyConnectedLayersMaxUnits, - "Increase kFullyConnectedLayersMaxUnits."); +using ::rnnoise::kInputDenseBias; +using ::rnnoise::kInputDenseWeights; +using ::rnnoise::kInputLayerOutputSize; +static_assert(kInputLayerOutputSize <= kFullyConnectedLayerMaxUnits, ""); -using rnnoise::kHiddenGruBias; -using rnnoise::kHiddenGruRecurrentWeights; -using rnnoise::kHiddenGruWeights; -using rnnoise::kHiddenLayerOutputSize; -static_assert(kHiddenLayerOutputSize <= kRecurrentLayersMaxUnits, - "Increase kRecurrentLayersMaxUnits."); +using ::rnnoise::kHiddenGruBias; +using ::rnnoise::kHiddenGruRecurrentWeights; +using ::rnnoise::kHiddenGruWeights; +using ::rnnoise::kHiddenLayerOutputSize; +static_assert(kHiddenLayerOutputSize <= kGruLayerMaxUnits, ""); -using rnnoise::kOutputDenseBias; -using rnnoise::kOutputDenseWeights; -using rnnoise::kOutputLayerOutputSize; -static_assert(kOutputLayerOutputSize <= kFullyConnectedLayersMaxUnits, - "Increase kFullyConnectedLayersMaxUnits."); - -using rnnoise::SigmoidApproximated; -using rnnoise::TansigApproximated; - -inline float RectifiedLinearUnit(float x) { - return x < 0.f ? 0.f : x; -} - -std::vector GetScaledParams(rtc::ArrayView params) { - std::vector scaled_params(params.size()); - std::transform(params.begin(), params.end(), scaled_params.begin(), - [](int8_t x) -> float { - return rnnoise::kWeightsScale * static_cast(x); - }); - return scaled_params; -} - -// TODO(bugs.chromium.org/10480): Hard-code optimized layout and remove this -// function to improve setup time. -// Casts and scales |weights| and re-arranges the layout. -std::vector GetPreprocessedFcWeights( - rtc::ArrayView weights, - int output_size) { - if (output_size == 1) { - return GetScaledParams(weights); - } - // Transpose, scale and cast. - const int input_size = rtc::CheckedDivExact( - rtc::dchecked_cast(weights.size()), output_size); - std::vector w(weights.size()); - for (int o = 0; o < output_size; ++o) { - for (int i = 0; i < input_size; ++i) { - w[o * input_size + i] = rnnoise::kWeightsScale * - static_cast(weights[i * output_size + o]); - } - } - return w; -} - -constexpr int kNumGruGates = 3; // Update, reset, output. - -// TODO(bugs.chromium.org/10480): Hard-coded optimized layout and remove this -// function to improve setup time. -// Casts and scales |tensor_src| for a GRU layer and re-arranges the layout. -// It works both for weights, recurrent weights and bias. -std::vector GetPreprocessedGruTensor( - rtc::ArrayView tensor_src, - int output_size) { - // Transpose, cast and scale. - // |n| is the size of the first dimension of the 3-dim tensor |weights|. - const int n = rtc::CheckedDivExact(rtc::dchecked_cast(tensor_src.size()), - output_size * kNumGruGates); - const int stride_src = kNumGruGates * output_size; - const int stride_dst = n * output_size; - std::vector tensor_dst(tensor_src.size()); - for (int g = 0; g < kNumGruGates; ++g) { - for (int o = 0; o < output_size; ++o) { - for (int i = 0; i < n; ++i) { - tensor_dst[g * stride_dst + o * n + i] = - rnnoise::kWeightsScale * - static_cast( - tensor_src[i * stride_src + g * output_size + o]); - } - } - } - return tensor_dst; -} - -void ComputeGruUpdateResetGates(int input_size, - int output_size, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, - rtc::ArrayView bias, - rtc::ArrayView input, - rtc::ArrayView state, - rtc::ArrayView gate) { - for (int o = 0; o < output_size; ++o) { - gate[o] = bias[o]; - for (int i = 0; i < input_size; ++i) { - gate[o] += input[i] * weights[o * input_size + i]; - } - for (int s = 0; s < output_size; ++s) { - gate[o] += state[s] * recurrent_weights[o * output_size + s]; - } - gate[o] = SigmoidApproximated(gate[o]); - } -} - -void ComputeGruOutputGate(int input_size, - int output_size, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, - rtc::ArrayView bias, - rtc::ArrayView input, - rtc::ArrayView state, - rtc::ArrayView reset, - rtc::ArrayView gate) { - for (int o = 0; o < output_size; ++o) { - gate[o] = bias[o]; - for (int i = 0; i < input_size; ++i) { - gate[o] += input[i] * weights[o * input_size + i]; - } - for (int s = 0; s < output_size; ++s) { - gate[o] += state[s] * recurrent_weights[o * output_size + s] * reset[s]; - } - gate[o] = RectifiedLinearUnit(gate[o]); - } -} - -// Gated recurrent unit (GRU) layer un-optimized implementation. -void ComputeGruLayerOutput(int input_size, - int output_size, - rtc::ArrayView input, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, - rtc::ArrayView bias, - rtc::ArrayView state) { - RTC_DCHECK_EQ(input_size, input.size()); - // Stride and offset used to read parameter arrays. - const int stride_in = input_size * output_size; - const int stride_out = output_size * output_size; - - // Update gate. - std::array update; - ComputeGruUpdateResetGates( - input_size, output_size, weights.subview(0, stride_in), - recurrent_weights.subview(0, stride_out), bias.subview(0, output_size), - input, state, update); - - // Reset gate. - std::array reset; - ComputeGruUpdateResetGates( - input_size, output_size, weights.subview(stride_in, stride_in), - recurrent_weights.subview(stride_out, stride_out), - bias.subview(output_size, output_size), input, state, reset); - - // Output gate. - std::array output; - ComputeGruOutputGate( - input_size, output_size, weights.subview(2 * stride_in, stride_in), - recurrent_weights.subview(2 * stride_out, stride_out), - bias.subview(2 * output_size, output_size), input, state, reset, output); - - // Update output through the update gates and update the state. - for (int o = 0; o < output_size; ++o) { - output[o] = update[o] * state[o] + (1.f - update[o]) * output[o]; - state[o] = output[o]; - } -} - -// Fully connected layer un-optimized implementation. -void ComputeFullyConnectedLayerOutput( - int input_size, - int output_size, - rtc::ArrayView input, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::FunctionView activation_function, - rtc::ArrayView output) { - RTC_DCHECK_EQ(input.size(), input_size); - RTC_DCHECK_EQ(bias.size(), output_size); - RTC_DCHECK_EQ(weights.size(), input_size * output_size); - for (int o = 0; o < output_size; ++o) { - output[o] = bias[o]; - // TODO(bugs.chromium.org/9076): Benchmark how different layouts for - // |weights_| change the performance across different platforms. - for (int i = 0; i < input_size; ++i) { - output[o] += input[i] * weights[o * input_size + i]; - } - output[o] = activation_function(output[o]); - } -} - -#if defined(WEBRTC_ARCH_X86_FAMILY) -// Fully connected layer SSE2 implementation. -void ComputeFullyConnectedLayerOutputSse2( - int input_size, - int output_size, - rtc::ArrayView input, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::FunctionView activation_function, - rtc::ArrayView output) { - RTC_DCHECK_EQ(input.size(), input_size); - RTC_DCHECK_EQ(bias.size(), output_size); - RTC_DCHECK_EQ(weights.size(), input_size * output_size); - const int input_size_by_4 = input_size >> 2; - const int offset = input_size & ~3; - __m128 sum_wx_128; - const float* v = reinterpret_cast(&sum_wx_128); - for (int o = 0; o < output_size; ++o) { - // Perform 128 bit vector operations. - sum_wx_128 = _mm_set1_ps(0); - const float* x_p = input.data(); - const float* w_p = weights.data() + o * input_size; - for (int i = 0; i < input_size_by_4; ++i, x_p += 4, w_p += 4) { - sum_wx_128 = _mm_add_ps(sum_wx_128, - _mm_mul_ps(_mm_loadu_ps(x_p), _mm_loadu_ps(w_p))); - } - // Perform non-vector operations for any remaining items, sum up bias term - // and results from the vectorized code, and apply the activation function. - output[o] = activation_function( - std::inner_product(input.begin() + offset, input.end(), - weights.begin() + o * input_size + offset, - bias[o] + v[0] + v[1] + v[2] + v[3])); - } -} -#endif +using ::rnnoise::kOutputDenseBias; +using ::rnnoise::kOutputDenseWeights; +using ::rnnoise::kOutputLayerOutputSize; +static_assert(kOutputLayerOutputSize <= kFullyConnectedLayerMaxUnits, ""); } // namespace -FullyConnectedLayer::FullyConnectedLayer( - const int input_size, - const int output_size, - const rtc::ArrayView bias, - const rtc::ArrayView weights, - rtc::FunctionView activation_function, - Optimization optimization) - : input_size_(input_size), - output_size_(output_size), - bias_(GetScaledParams(bias)), - weights_(GetPreprocessedFcWeights(weights, output_size)), - activation_function_(activation_function), - optimization_(optimization) { - RTC_DCHECK_LE(output_size_, kFullyConnectedLayersMaxUnits) - << "Static over-allocation of fully-connected layers output vectors is " - "not sufficient."; - RTC_DCHECK_EQ(output_size_, bias_.size()) - << "Mismatching output size and bias terms array size."; - RTC_DCHECK_EQ(input_size_ * output_size_, weights_.size()) - << "Mismatching input-output size and weight coefficients array size."; -} - -FullyConnectedLayer::~FullyConnectedLayer() = default; - -rtc::ArrayView FullyConnectedLayer::GetOutput() const { - return rtc::ArrayView(output_.data(), output_size_); -} - -void FullyConnectedLayer::ComputeOutput(rtc::ArrayView input) { - switch (optimization_) { -#if defined(WEBRTC_ARCH_X86_FAMILY) - case Optimization::kSse2: - ComputeFullyConnectedLayerOutputSse2(input_size_, output_size_, input, - bias_, weights_, - activation_function_, output_); - break; -#endif -#if defined(WEBRTC_HAS_NEON) - case Optimization::kNeon: - // TODO(bugs.chromium.org/10480): Handle Optimization::kNeon. - ComputeFullyConnectedLayerOutput(input_size_, output_size_, input, bias_, - weights_, activation_function_, output_); - break; -#endif - default: - ComputeFullyConnectedLayerOutput(input_size_, output_size_, input, bias_, - weights_, activation_function_, output_); - } -} - -GatedRecurrentLayer::GatedRecurrentLayer( - const int input_size, - const int output_size, - const rtc::ArrayView bias, - const rtc::ArrayView weights, - const rtc::ArrayView recurrent_weights, - Optimization optimization) - : input_size_(input_size), - output_size_(output_size), - bias_(GetPreprocessedGruTensor(bias, output_size)), - weights_(GetPreprocessedGruTensor(weights, output_size)), - recurrent_weights_( - GetPreprocessedGruTensor(recurrent_weights, output_size)), - optimization_(optimization) { - RTC_DCHECK_LE(output_size_, kRecurrentLayersMaxUnits) - << "Static over-allocation of recurrent layers state vectors is not " - "sufficient."; - RTC_DCHECK_EQ(kNumGruGates * output_size_, bias_.size()) - << "Mismatching output size and bias terms array size."; - RTC_DCHECK_EQ(kNumGruGates * input_size_ * output_size_, weights_.size()) - << "Mismatching input-output size and weight coefficients array size."; - RTC_DCHECK_EQ(kNumGruGates * output_size_ * output_size_, - recurrent_weights_.size()) - << "Mismatching input-output size and recurrent weight coefficients array" - " size."; - Reset(); -} - -GatedRecurrentLayer::~GatedRecurrentLayer() = default; - -rtc::ArrayView GatedRecurrentLayer::GetOutput() const { - return rtc::ArrayView(state_.data(), output_size_); -} - -void GatedRecurrentLayer::Reset() { - state_.fill(0.f); -} - -void GatedRecurrentLayer::ComputeOutput(rtc::ArrayView input) { - switch (optimization_) { -#if defined(WEBRTC_ARCH_X86_FAMILY) - case Optimization::kSse2: - // TODO(bugs.chromium.org/10480): Handle Optimization::kSse2. - ComputeGruLayerOutput(input_size_, output_size_, input, weights_, - recurrent_weights_, bias_, state_); - break; -#endif -#if defined(WEBRTC_HAS_NEON) - case Optimization::kNeon: - // TODO(bugs.chromium.org/10480): Handle Optimization::kNeon. - ComputeGruLayerOutput(input_size_, output_size_, input, weights_, - recurrent_weights_, bias_, state_); - break; -#endif - default: - ComputeGruLayerOutput(input_size_, output_size_, input, weights_, - recurrent_weights_, bias_, state_); - } -} - -RnnBasedVad::RnnBasedVad() - : input_layer_(kInputLayerInputSize, - kInputLayerOutputSize, - kInputDenseBias, - kInputDenseWeights, - TansigApproximated, - DetectOptimization()), - hidden_layer_(kInputLayerOutputSize, - kHiddenLayerOutputSize, - kHiddenGruBias, - kHiddenGruWeights, - kHiddenGruRecurrentWeights, - DetectOptimization()), - output_layer_(kHiddenLayerOutputSize, - kOutputLayerOutputSize, - kOutputDenseBias, - kOutputDenseWeights, - SigmoidApproximated, - DetectOptimization()) { +RnnVad::RnnVad(const AvailableCpuFeatures& cpu_features) + : input_(kInputLayerInputSize, + kInputLayerOutputSize, + kInputDenseBias, + kInputDenseWeights, + ActivationFunction::kTansigApproximated, + cpu_features, + /*layer_name=*/"FC1"), + hidden_(kInputLayerOutputSize, + kHiddenLayerOutputSize, + kHiddenGruBias, + kHiddenGruWeights, + kHiddenGruRecurrentWeights, + cpu_features, + /*layer_name=*/"GRU1"), + output_(kHiddenLayerOutputSize, + kOutputLayerOutputSize, + kOutputDenseBias, + kOutputDenseWeights, + ActivationFunction::kSigmoidApproximated, + // The output layer is just 24x1. The unoptimized code is faster. + NoAvailableCpuFeatures(), + /*layer_name=*/"FC2") { // Input-output chaining size checks. - RTC_DCHECK_EQ(input_layer_.output_size(), hidden_layer_.input_size()) + RTC_DCHECK_EQ(input_.size(), hidden_.input_size()) << "The input and the hidden layers sizes do not match."; - RTC_DCHECK_EQ(hidden_layer_.output_size(), output_layer_.input_size()) + RTC_DCHECK_EQ(hidden_.size(), output_.input_size()) << "The hidden and the output layers sizes do not match."; } -RnnBasedVad::~RnnBasedVad() = default; +RnnVad::~RnnVad() = default; -void RnnBasedVad::Reset() { - hidden_layer_.Reset(); +void RnnVad::Reset() { + hidden_.Reset(); } -float RnnBasedVad::ComputeVadProbability( +float RnnVad::ComputeVadProbability( rtc::ArrayView feature_vector, bool is_silence) { if (is_silence) { Reset(); return 0.f; } - input_layer_.ComputeOutput(feature_vector); - hidden_layer_.ComputeOutput(input_layer_.GetOutput()); - output_layer_.ComputeOutput(hidden_layer_.GetOutput()); - const auto vad_output = output_layer_.GetOutput(); - return vad_output[0]; + input_.ComputeOutput(feature_vector); + hidden_.ComputeOutput(input_); + output_.ComputeOutput(hidden_); + RTC_DCHECK_EQ(output_.size(), 1); + return output_.data()[0]; } } // namespace rnn_vad diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h index 5b44f5304..3148f1b3f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.h @@ -18,106 +18,33 @@ #include #include "api/array_view.h" -#include "api/function_view.h" +#include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" -#include "rtc_base/system/arch.h" +#include "modules/audio_processing/agc2/rnn_vad/rnn_fc.h" +#include "modules/audio_processing/agc2/rnn_vad/rnn_gru.h" namespace webrtc { namespace rnn_vad { -// Maximum number of units for a fully-connected layer. This value is used to -// over-allocate space for fully-connected layers output vectors (implemented as -// std::array). The value should equal the number of units of the largest -// fully-connected layer. -constexpr int kFullyConnectedLayersMaxUnits = 24; - -// Maximum number of units for a recurrent layer. This value is used to -// over-allocate space for recurrent layers state vectors (implemented as -// std::array). The value should equal the number of units of the largest -// recurrent layer. -constexpr int kRecurrentLayersMaxUnits = 24; - -// Fully-connected layer. -class FullyConnectedLayer { +// Recurrent network with hard-coded architecture and weights for voice activity +// detection. +class RnnVad { public: - FullyConnectedLayer(int input_size, - int output_size, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::FunctionView activation_function, - Optimization optimization); - FullyConnectedLayer(const FullyConnectedLayer&) = delete; - FullyConnectedLayer& operator=(const FullyConnectedLayer&) = delete; - ~FullyConnectedLayer(); - int input_size() const { return input_size_; } - int output_size() const { return output_size_; } - Optimization optimization() const { return optimization_; } - rtc::ArrayView GetOutput() const; - // Computes the fully-connected layer output. - void ComputeOutput(rtc::ArrayView input); - - private: - const int input_size_; - const int output_size_; - const std::vector bias_; - const std::vector weights_; - rtc::FunctionView activation_function_; - // The output vector of a recurrent layer has length equal to |output_size_|. - // However, for efficiency, over-allocation is used. - std::array output_; - const Optimization optimization_; -}; - -// Recurrent layer with gated recurrent units (GRUs) with sigmoid and ReLU as -// activation functions for the update/reset and output gates respectively. -class GatedRecurrentLayer { - public: - GatedRecurrentLayer(int input_size, - int output_size, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, - Optimization optimization); - GatedRecurrentLayer(const GatedRecurrentLayer&) = delete; - GatedRecurrentLayer& operator=(const GatedRecurrentLayer&) = delete; - ~GatedRecurrentLayer(); - int input_size() const { return input_size_; } - int output_size() const { return output_size_; } - Optimization optimization() const { return optimization_; } - rtc::ArrayView GetOutput() const; + explicit RnnVad(const AvailableCpuFeatures& cpu_features); + RnnVad(const RnnVad&) = delete; + RnnVad& operator=(const RnnVad&) = delete; + ~RnnVad(); void Reset(); - // Computes the recurrent layer output and updates the status. - void ComputeOutput(rtc::ArrayView input); - - private: - const int input_size_; - const int output_size_; - const std::vector bias_; - const std::vector weights_; - const std::vector recurrent_weights_; - // The state vector of a recurrent layer has length equal to |output_size_|. - // However, to avoid dynamic allocation, over-allocation is used. - std::array state_; - const Optimization optimization_; -}; - -// Recurrent network based VAD. -class RnnBasedVad { - public: - RnnBasedVad(); - RnnBasedVad(const RnnBasedVad&) = delete; - RnnBasedVad& operator=(const RnnBasedVad&) = delete; - ~RnnBasedVad(); - void Reset(); - // Compute and returns the probability of voice (range: [0.0, 1.0]). + // Observes `feature_vector` and `is_silence`, updates the RNN and returns the + // current voice probability. float ComputeVadProbability( rtc::ArrayView feature_vector, bool is_silence); private: - FullyConnectedLayer input_layer_; - GatedRecurrentLayer hidden_layer_; - FullyConnectedLayer output_layer_; + FullyConnectedLayer input_; + GatedRecurrentLayer hidden_; + FullyConnectedLayer output_; }; } // namespace rnn_vad diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc new file mode 100644 index 000000000..b04807f19 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "modules/audio_processing/agc2/rnn_vad/rnn_fc.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "third_party/rnnoise/src/rnn_activations.h" +#include "third_party/rnnoise/src/rnn_vad_weights.h" + +namespace webrtc { +namespace rnn_vad { +namespace { + +std::vector GetScaledParams(rtc::ArrayView params) { + std::vector scaled_params(params.size()); + std::transform(params.begin(), params.end(), scaled_params.begin(), + [](int8_t x) -> float { + return ::rnnoise::kWeightsScale * static_cast(x); + }); + return scaled_params; +} + +// TODO(bugs.chromium.org/10480): Hard-code optimized layout and remove this +// function to improve setup time. +// Casts and scales |weights| and re-arranges the layout. +std::vector PreprocessWeights(rtc::ArrayView weights, + int output_size) { + if (output_size == 1) { + return GetScaledParams(weights); + } + // Transpose, scale and cast. + const int input_size = rtc::CheckedDivExact( + rtc::dchecked_cast(weights.size()), output_size); + std::vector w(weights.size()); + for (int o = 0; o < output_size; ++o) { + for (int i = 0; i < input_size; ++i) { + w[o * input_size + i] = rnnoise::kWeightsScale * + static_cast(weights[i * output_size + o]); + } + } + return w; +} + +rtc::FunctionView GetActivationFunction( + ActivationFunction activation_function) { + switch (activation_function) { + case ActivationFunction::kTansigApproximated: + return ::rnnoise::TansigApproximated; + break; + case ActivationFunction::kSigmoidApproximated: + return ::rnnoise::SigmoidApproximated; + break; + } +} + +} // namespace + +FullyConnectedLayer::FullyConnectedLayer( + const int input_size, + const int output_size, + const rtc::ArrayView bias, + const rtc::ArrayView weights, + ActivationFunction activation_function, + const AvailableCpuFeatures& cpu_features, + absl::string_view layer_name) + : input_size_(input_size), + output_size_(output_size), + bias_(GetScaledParams(bias)), + weights_(PreprocessWeights(weights, output_size)), + vector_math_(cpu_features), + activation_function_(GetActivationFunction(activation_function)) { + RTC_DCHECK_LE(output_size_, kFullyConnectedLayerMaxUnits) + << "Insufficient FC layer over-allocation (" << layer_name << ")."; + RTC_DCHECK_EQ(output_size_, bias_.size()) + << "Mismatching output size and bias terms array size (" << layer_name + << ")."; + RTC_DCHECK_EQ(input_size_ * output_size_, weights_.size()) + << "Mismatching input-output size and weight coefficients array size (" + << layer_name << ")."; +} + +FullyConnectedLayer::~FullyConnectedLayer() = default; + +void FullyConnectedLayer::ComputeOutput(rtc::ArrayView input) { + RTC_DCHECK_EQ(input.size(), input_size_); + rtc::ArrayView weights(weights_); + for (int o = 0; o < output_size_; ++o) { + output_[o] = activation_function_( + bias_[o] + vector_math_.DotProduct( + input, weights.subview(o * input_size_, input_size_))); + } +} + +} // namespace rnn_vad +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.h new file mode 100644 index 000000000..d23957a6f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_RNN_FC_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_RNN_FC_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/function_view.h" +#include "modules/audio_processing/agc2/cpu_features.h" +#include "modules/audio_processing/agc2/rnn_vad/vector_math.h" + +namespace webrtc { +namespace rnn_vad { + +// Activation function for a neural network cell. +enum class ActivationFunction { kTansigApproximated, kSigmoidApproximated }; + +// Maximum number of units for an FC layer. +constexpr int kFullyConnectedLayerMaxUnits = 24; + +// Fully-connected layer with a custom activation function which owns the output +// buffer. +class FullyConnectedLayer { + public: + // Ctor. `output_size` cannot be greater than `kFullyConnectedLayerMaxUnits`. + FullyConnectedLayer(int input_size, + int output_size, + rtc::ArrayView bias, + rtc::ArrayView weights, + ActivationFunction activation_function, + const AvailableCpuFeatures& cpu_features, + absl::string_view layer_name); + FullyConnectedLayer(const FullyConnectedLayer&) = delete; + FullyConnectedLayer& operator=(const FullyConnectedLayer&) = delete; + ~FullyConnectedLayer(); + + // Returns the size of the input vector. + int input_size() const { return input_size_; } + // Returns the pointer to the first element of the output buffer. + const float* data() const { return output_.data(); } + // Returns the size of the output buffer. + int size() const { return output_size_; } + + // Computes the fully-connected layer output. + void ComputeOutput(rtc::ArrayView input); + + private: + const int input_size_; + const int output_size_; + const std::vector bias_; + const std::vector weights_; + const VectorMath vector_math_; + rtc::FunctionView activation_function_; + // Over-allocated array with size equal to `output_size_`. + std::array output_; +}; + +} // namespace rnn_vad +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_RNN_FC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc new file mode 100644 index 000000000..482016e8d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/rnn_vad/rnn_gru.h" + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "third_party/rnnoise/src/rnn_activations.h" +#include "third_party/rnnoise/src/rnn_vad_weights.h" + +namespace webrtc { +namespace rnn_vad { +namespace { + +constexpr int kNumGruGates = 3; // Update, reset, output. + +std::vector PreprocessGruTensor(rtc::ArrayView tensor_src, + int output_size) { + // Transpose, cast and scale. + // |n| is the size of the first dimension of the 3-dim tensor |weights|. + const int n = rtc::CheckedDivExact(rtc::dchecked_cast(tensor_src.size()), + output_size * kNumGruGates); + const int stride_src = kNumGruGates * output_size; + const int stride_dst = n * output_size; + std::vector tensor_dst(tensor_src.size()); + for (int g = 0; g < kNumGruGates; ++g) { + for (int o = 0; o < output_size; ++o) { + for (int i = 0; i < n; ++i) { + tensor_dst[g * stride_dst + o * n + i] = + ::rnnoise::kWeightsScale * + static_cast( + tensor_src[i * stride_src + g * output_size + o]); + } + } + } + return tensor_dst; +} + +// Computes the output for the update or the reset gate. +// Operation: `g = sigmoid(W^T∙i + R^T∙s + b)` where +// - `g`: output gate vector +// - `W`: weights matrix +// - `i`: input vector +// - `R`: recurrent weights matrix +// - `s`: state gate vector +// - `b`: bias vector +void ComputeUpdateResetGate(int input_size, + int output_size, + const VectorMath& vector_math, + rtc::ArrayView input, + rtc::ArrayView state, + rtc::ArrayView bias, + rtc::ArrayView weights, + rtc::ArrayView recurrent_weights, + rtc::ArrayView gate) { + RTC_DCHECK_EQ(input.size(), input_size); + RTC_DCHECK_EQ(state.size(), output_size); + RTC_DCHECK_EQ(bias.size(), output_size); + RTC_DCHECK_EQ(weights.size(), input_size * output_size); + RTC_DCHECK_EQ(recurrent_weights.size(), output_size * output_size); + RTC_DCHECK_GE(gate.size(), output_size); // `gate` is over-allocated. + for (int o = 0; o < output_size; ++o) { + float x = bias[o]; + x += vector_math.DotProduct(input, + weights.subview(o * input_size, input_size)); + x += vector_math.DotProduct( + state, recurrent_weights.subview(o * output_size, output_size)); + gate[o] = ::rnnoise::SigmoidApproximated(x); + } +} + +// Computes the output for the state gate. +// Operation: `s' = u .* s + (1 - u) .* ReLU(W^T∙i + R^T∙(s .* r) + b)` where +// - `s'`: output state gate vector +// - `s`: previous state gate vector +// - `u`: update gate vector +// - `W`: weights matrix +// - `i`: input vector +// - `R`: recurrent weights matrix +// - `r`: reset gate vector +// - `b`: bias vector +// - `.*` element-wise product +void ComputeStateGate(int input_size, + int output_size, + const VectorMath& vector_math, + rtc::ArrayView input, + rtc::ArrayView update, + rtc::ArrayView reset, + rtc::ArrayView bias, + rtc::ArrayView weights, + rtc::ArrayView recurrent_weights, + rtc::ArrayView state) { + RTC_DCHECK_EQ(input.size(), input_size); + RTC_DCHECK_GE(update.size(), output_size); // `update` is over-allocated. + RTC_DCHECK_GE(reset.size(), output_size); // `reset` is over-allocated. + RTC_DCHECK_EQ(bias.size(), output_size); + RTC_DCHECK_EQ(weights.size(), input_size * output_size); + RTC_DCHECK_EQ(recurrent_weights.size(), output_size * output_size); + RTC_DCHECK_EQ(state.size(), output_size); + std::array reset_x_state; + for (int o = 0; o < output_size; ++o) { + reset_x_state[o] = state[o] * reset[o]; + } + for (int o = 0; o < output_size; ++o) { + float x = bias[o]; + x += vector_math.DotProduct(input, + weights.subview(o * input_size, input_size)); + x += vector_math.DotProduct( + {reset_x_state.data(), static_cast(output_size)}, + recurrent_weights.subview(o * output_size, output_size)); + state[o] = update[o] * state[o] + (1.f - update[o]) * std::max(0.f, x); + } +} + +} // namespace + +GatedRecurrentLayer::GatedRecurrentLayer( + const int input_size, + const int output_size, + const rtc::ArrayView bias, + const rtc::ArrayView weights, + const rtc::ArrayView recurrent_weights, + const AvailableCpuFeatures& cpu_features, + absl::string_view layer_name) + : input_size_(input_size), + output_size_(output_size), + bias_(PreprocessGruTensor(bias, output_size)), + weights_(PreprocessGruTensor(weights, output_size)), + recurrent_weights_(PreprocessGruTensor(recurrent_weights, output_size)), + vector_math_(cpu_features) { + RTC_DCHECK_LE(output_size_, kGruLayerMaxUnits) + << "Insufficient GRU layer over-allocation (" << layer_name << ")."; + RTC_DCHECK_EQ(kNumGruGates * output_size_, bias_.size()) + << "Mismatching output size and bias terms array size (" << layer_name + << ")."; + RTC_DCHECK_EQ(kNumGruGates * input_size_ * output_size_, weights_.size()) + << "Mismatching input-output size and weight coefficients array size (" + << layer_name << ")."; + RTC_DCHECK_EQ(kNumGruGates * output_size_ * output_size_, + recurrent_weights_.size()) + << "Mismatching input-output size and recurrent weight coefficients array" + " size (" + << layer_name << ")."; + Reset(); +} + +GatedRecurrentLayer::~GatedRecurrentLayer() = default; + +void GatedRecurrentLayer::Reset() { + state_.fill(0.f); +} + +void GatedRecurrentLayer::ComputeOutput(rtc::ArrayView input) { + RTC_DCHECK_EQ(input.size(), input_size_); + + // The tensors below are organized as a sequence of flattened tensors for the + // `update`, `reset` and `state` gates. + rtc::ArrayView bias(bias_); + rtc::ArrayView weights(weights_); + rtc::ArrayView recurrent_weights(recurrent_weights_); + // Strides to access to the flattened tensors for a specific gate. + const int stride_weights = input_size_ * output_size_; + const int stride_recurrent_weights = output_size_ * output_size_; + + rtc::ArrayView state(state_.data(), output_size_); + + // Update gate. + std::array update; + ComputeUpdateResetGate( + input_size_, output_size_, vector_math_, input, state, + bias.subview(0, output_size_), weights.subview(0, stride_weights), + recurrent_weights.subview(0, stride_recurrent_weights), update); + // Reset gate. + std::array reset; + ComputeUpdateResetGate(input_size_, output_size_, vector_math_, input, state, + bias.subview(output_size_, output_size_), + weights.subview(stride_weights, stride_weights), + recurrent_weights.subview(stride_recurrent_weights, + stride_recurrent_weights), + reset); + // State gate. + ComputeStateGate(input_size_, output_size_, vector_math_, input, update, + reset, bias.subview(2 * output_size_, output_size_), + weights.subview(2 * stride_weights, stride_weights), + recurrent_weights.subview(2 * stride_recurrent_weights, + stride_recurrent_weights), + state); +} + +} // namespace rnn_vad +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.h new file mode 100644 index 000000000..3407dfcdf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_RNN_GRU_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_RNN_GRU_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "modules/audio_processing/agc2/cpu_features.h" +#include "modules/audio_processing/agc2/rnn_vad/vector_math.h" + +namespace webrtc { +namespace rnn_vad { + +// Maximum number of units for a GRU layer. +constexpr int kGruLayerMaxUnits = 24; + +// Recurrent layer with gated recurrent units (GRUs) with sigmoid and ReLU as +// activation functions for the update/reset and output gates respectively. +class GatedRecurrentLayer { + public: + // Ctor. `output_size` cannot be greater than `kGruLayerMaxUnits`. + GatedRecurrentLayer(int input_size, + int output_size, + rtc::ArrayView bias, + rtc::ArrayView weights, + rtc::ArrayView recurrent_weights, + const AvailableCpuFeatures& cpu_features, + absl::string_view layer_name); + GatedRecurrentLayer(const GatedRecurrentLayer&) = delete; + GatedRecurrentLayer& operator=(const GatedRecurrentLayer&) = delete; + ~GatedRecurrentLayer(); + + // Returns the size of the input vector. + int input_size() const { return input_size_; } + // Returns the pointer to the first element of the output buffer. + const float* data() const { return state_.data(); } + // Returns the size of the output buffer. + int size() const { return output_size_; } + + // Resets the GRU state. + void Reset(); + // Computes the recurrent layer output and updates the status. + void ComputeOutput(rtc::ArrayView input); + + private: + const int input_size_; + const int output_size_; + const std::vector bias_; + const std::vector weights_; + const std::vector recurrent_weights_; + const VectorMath vector_math_; + // Over-allocated array with size equal to `output_size_`. + std::array state_; +}; + +} // namespace rnn_vad +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_RNN_GRU_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc index 8b12b60c5..a0e1242eb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc @@ -16,6 +16,7 @@ #include "absl/flags/parse.h" #include "common_audio/resampler/push_sinc_resampler.h" #include "common_audio/wav_file.h" +#include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" #include "modules/audio_processing/agc2/rnn_vad/features_extraction.h" #include "modules/audio_processing/agc2/rnn_vad/rnn.h" @@ -63,9 +64,10 @@ int main(int argc, char* argv[]) { samples_10ms.resize(frame_size_10ms); std::array samples_10ms_24kHz; PushSincResampler resampler(frame_size_10ms, kFrameSize10ms24kHz); - FeaturesExtractor features_extractor; + const AvailableCpuFeatures cpu_features = GetAvailableCpuFeatures(); + FeaturesExtractor features_extractor(cpu_features); std::array feature_vector; - RnnBasedVad rnn_vad; + RnnVad rnn_vad(cpu_features); // Compute VAD probabilities. while (true) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc index 24bbf13e3..b8ca9c366 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.cc @@ -11,22 +11,58 @@ #include "modules/audio_processing/agc2/rnn_vad/test_utils.h" #include +#include #include +#include +#include #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_compare.h" -#include "rtc_base/system/arch.h" -#include "system_wrappers/include/cpu_features_wrapper.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" namespace webrtc { namespace rnn_vad { -namespace test { namespace { -using ReaderPairType = - std::pair>, const int>; +// File reader for binary files that contain a sequence of values with +// arithmetic type `T`. The values of type `T` that are read are cast to float. +template +class FloatFileReader : public FileReader { + public: + static_assert(std::is_arithmetic::value, ""); + FloatFileReader(const std::string& filename) + : is_(filename, std::ios::binary | std::ios::ate), + size_(is_.tellg() / sizeof(T)) { + RTC_CHECK(is_); + SeekBeginning(); + } + FloatFileReader(const FloatFileReader&) = delete; + FloatFileReader& operator=(const FloatFileReader&) = delete; + ~FloatFileReader() = default; + + int size() const override { return size_; } + bool ReadChunk(rtc::ArrayView dst) override { + const std::streamsize bytes_to_read = dst.size() * sizeof(T); + if (std::is_same::value) { + is_.read(reinterpret_cast(dst.data()), bytes_to_read); + } else { + buffer_.resize(dst.size()); + is_.read(reinterpret_cast(buffer_.data()), bytes_to_read); + std::transform(buffer_.begin(), buffer_.end(), dst.begin(), + [](const T& v) -> float { return static_cast(v); }); + } + return is_.gcount() == bytes_to_read; + } + bool ReadValue(float& dst) override { return ReadChunk({&dst, 1}); } + void SeekForward(int hop) override { is_.seekg(hop * sizeof(T), is_.cur); } + void SeekBeginning() override { is_.seekg(0, is_.beg); } + + private: + std::ifstream is_; + const int size_; + std::vector buffer_; +}; } // namespace @@ -51,85 +87,55 @@ void ExpectNearAbsolute(rtc::ArrayView expected, } } -std::pair>, const int> -CreatePcmSamplesReader(const int frame_length) { - auto ptr = std::make_unique>( - test::ResourcePath("audio_processing/agc2/rnn_vad/samples", "pcm"), - frame_length); - // The last incomplete frame is ignored. - return {std::move(ptr), ptr->data_length() / frame_length}; +std::unique_ptr CreatePcmSamplesReader() { + return std::make_unique>( + /*filename=*/test::ResourcePath("audio_processing/agc2/rnn_vad/samples", + "pcm")); } -ReaderPairType CreatePitchBuffer24kHzReader() { - constexpr int cols = 864; - auto ptr = std::make_unique>( - ResourcePath("audio_processing/agc2/rnn_vad/pitch_buf_24k", "dat"), cols); - return {std::move(ptr), rtc::CheckedDivExact(ptr->data_length(), cols)}; +ChunksFileReader CreatePitchBuffer24kHzReader() { + auto reader = std::make_unique>( + /*filename=*/test::ResourcePath( + "audio_processing/agc2/rnn_vad/pitch_buf_24k", "dat")); + const int num_chunks = rtc::CheckedDivExact(reader->size(), kBufSize24kHz); + return {/*chunk_size=*/kBufSize24kHz, num_chunks, std::move(reader)}; } -ReaderPairType CreateLpResidualAndPitchPeriodGainReader() { - constexpr int num_lp_residual_coeffs = 864; - auto ptr = std::make_unique>( - ResourcePath("audio_processing/agc2/rnn_vad/pitch_lp_res", "dat"), - num_lp_residual_coeffs); - return {std::move(ptr), - rtc::CheckedDivExact(ptr->data_length(), 2 + num_lp_residual_coeffs)}; +ChunksFileReader CreateLpResidualAndPitchInfoReader() { + constexpr int kPitchInfoSize = 2; // Pitch period and strength. + constexpr int kChunkSize = kBufSize24kHz + kPitchInfoSize; + auto reader = std::make_unique>( + /*filename=*/test::ResourcePath( + "audio_processing/agc2/rnn_vad/pitch_lp_res", "dat")); + const int num_chunks = rtc::CheckedDivExact(reader->size(), kChunkSize); + return {kChunkSize, num_chunks, std::move(reader)}; } -ReaderPairType CreateVadProbsReader() { - auto ptr = std::make_unique>( - test::ResourcePath("audio_processing/agc2/rnn_vad/vad_prob", "dat")); - return {std::move(ptr), ptr->data_length()}; +std::unique_ptr CreateGruInputReader() { + return std::make_unique>( + /*filename=*/test::ResourcePath("audio_processing/agc2/rnn_vad/gru_in", + "dat")); +} + +std::unique_ptr CreateVadProbsReader() { + return std::make_unique>( + /*filename=*/test::ResourcePath("audio_processing/agc2/rnn_vad/vad_prob", + "dat")); } PitchTestData::PitchTestData() { - BinaryFileReader test_data_reader( - ResourcePath("audio_processing/agc2/rnn_vad/pitch_search_int", "dat"), - 1396); - test_data_reader.ReadChunk(test_data_); + FloatFileReader reader( + /*filename=*/ResourcePath( + "audio_processing/agc2/rnn_vad/pitch_search_int", "dat")); + reader.ReadChunk(pitch_buffer_24k_); + reader.ReadChunk(square_energies_24k_); + reader.ReadChunk(auto_correlation_12k_); // Reverse the order of the squared energy values. // Required after the WebRTC CL 191703 which switched to forward computation. - std::reverse(test_data_.begin() + kBufSize24kHz, - test_data_.begin() + kBufSize24kHz + kNumPitchBufSquareEnergies); + std::reverse(square_energies_24k_.begin(), square_energies_24k_.end()); } PitchTestData::~PitchTestData() = default; -rtc::ArrayView PitchTestData::GetPitchBufView() - const { - return {test_data_.data(), kBufSize24kHz}; -} - -rtc::ArrayView -PitchTestData::GetPitchBufSquareEnergiesView() const { - return {test_data_.data() + kBufSize24kHz, kNumPitchBufSquareEnergies}; -} - -rtc::ArrayView -PitchTestData::GetPitchBufAutoCorrCoeffsView() const { - return {test_data_.data() + kBufSize24kHz + kNumPitchBufSquareEnergies, - kNumPitchBufAutoCorrCoeffs}; -} - -bool IsOptimizationAvailable(Optimization optimization) { - switch (optimization) { - case Optimization::kSse2: -#if defined(WEBRTC_ARCH_X86_FAMILY) - return GetCPUInfo(kSSE2) != 0; -#else - return false; -#endif - case Optimization::kNeon: -#if defined(WEBRTC_HAS_NEON) - return true; -#else - return false; -#endif - case Optimization::kNone: - return true; - } -} - -} // namespace test } // namespace rnn_vad } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h index 23e642be8..e366e1837 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/test_utils.h @@ -11,15 +11,10 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_TEST_UTILS_H_ #define MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_TEST_UTILS_H_ -#include #include #include -#include #include #include -#include -#include -#include #include "api/array_view.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" @@ -28,7 +23,6 @@ namespace webrtc { namespace rnn_vad { -namespace test { constexpr float kFloatMin = std::numeric_limits::min(); @@ -43,98 +37,51 @@ void ExpectNearAbsolute(rtc::ArrayView expected, rtc::ArrayView computed, float tolerance); -// Reader for binary files consisting of an arbitrary long sequence of elements -// having type T. It is possible to read and cast to another type D at once. -template -class BinaryFileReader { +// File reader interface. +class FileReader { public: - BinaryFileReader(const std::string& file_path, int chunk_size = 0) - : is_(file_path, std::ios::binary | std::ios::ate), - data_length_(is_.tellg() / sizeof(T)), - chunk_size_(chunk_size) { - RTC_CHECK(is_); - SeekBeginning(); - buf_.resize(chunk_size_); - } - BinaryFileReader(const BinaryFileReader&) = delete; - BinaryFileReader& operator=(const BinaryFileReader&) = delete; - ~BinaryFileReader() = default; - int data_length() const { return data_length_; } - bool ReadValue(D* dst) { - if (std::is_same::value) { - is_.read(reinterpret_cast(dst), sizeof(T)); - } else { - T v; - is_.read(reinterpret_cast(&v), sizeof(T)); - *dst = static_cast(v); - } - return is_.gcount() == sizeof(T); - } - // If |chunk_size| was specified in the ctor, it will check that the size of - // |dst| equals |chunk_size|. - bool ReadChunk(rtc::ArrayView dst) { - RTC_DCHECK((chunk_size_ == 0) || rtc::SafeEq(chunk_size_, dst.size())); - const std::streamsize bytes_to_read = dst.size() * sizeof(T); - if (std::is_same::value) { - is_.read(reinterpret_cast(dst.data()), bytes_to_read); - } else { - is_.read(reinterpret_cast(buf_.data()), bytes_to_read); - std::transform(buf_.begin(), buf_.end(), dst.begin(), - [](const T& v) -> D { return static_cast(v); }); - } - return is_.gcount() == bytes_to_read; - } - void SeekForward(int items) { is_.seekg(items * sizeof(T), is_.cur); } - void SeekBeginning() { is_.seekg(0, is_.beg); } - - private: - std::ifstream is_; - const int data_length_; - const int chunk_size_; - std::vector buf_; + virtual ~FileReader() = default; + // Number of values in the file. + virtual int size() const = 0; + // Reads `dst.size()` float values into `dst`, advances the internal file + // position according to the number of read bytes and returns true if the + // values are correctly read. If the number of remaining bytes in the file is + // not sufficient to read `dst.size()` float values, `dst` is partially + // modified and false is returned. + virtual bool ReadChunk(rtc::ArrayView dst) = 0; + // Reads a single float value, advances the internal file position according + // to the number of read bytes and returns true if the value is correctly + // read. If the number of remaining bytes in the file is not sufficient to + // read one float, `dst` is not modified and false is returned. + virtual bool ReadValue(float& dst) = 0; + // Advances the internal file position by `hop` float values. + virtual void SeekForward(int hop) = 0; + // Resets the internal file position to BOF. + virtual void SeekBeginning() = 0; }; -// Writer for binary files. -template -class BinaryFileWriter { - public: - explicit BinaryFileWriter(const std::string& file_path) - : os_(file_path, std::ios::binary) {} - BinaryFileWriter(const BinaryFileWriter&) = delete; - BinaryFileWriter& operator=(const BinaryFileWriter&) = delete; - ~BinaryFileWriter() = default; - static_assert(std::is_arithmetic::value, ""); - void WriteChunk(rtc::ArrayView value) { - const std::streamsize bytes_to_write = value.size() * sizeof(T); - os_.write(reinterpret_cast(value.data()), bytes_to_write); - } - - private: - std::ofstream os_; +// File reader for files that contain `num_chunks` chunks with size equal to +// `chunk_size`. +struct ChunksFileReader { + const int chunk_size; + const int num_chunks; + std::unique_ptr reader; }; -// Factories for resource file readers. -// The functions below return a pair where the first item is a reader unique -// pointer and the second the number of chunks that can be read from the file. -// Creates a reader for the PCM samples that casts from S16 to float and reads -// chunks with length |frame_length|. -std::pair>, const int> -CreatePcmSamplesReader(const int frame_length); -// Creates a reader for the pitch buffer content at 24 kHz. -std::pair>, const int> -CreatePitchBuffer24kHzReader(); -// Creates a reader for the the LP residual coefficients and the pitch period -// and gain values. -std::pair>, const int> -CreateLpResidualAndPitchPeriodGainReader(); -// Creates a reader for the VAD probabilities. -std::pair>, const int> -CreateVadProbsReader(); +// Creates a reader for the PCM S16 samples file. +std::unique_ptr CreatePcmSamplesReader(); -constexpr int kNumPitchBufAutoCorrCoeffs = 147; -constexpr int kNumPitchBufSquareEnergies = 385; -constexpr int kPitchTestDataSize = - kBufSize24kHz + kNumPitchBufSquareEnergies + kNumPitchBufAutoCorrCoeffs; +// Creates a reader for the 24 kHz pitch buffer test data. +ChunksFileReader CreatePitchBuffer24kHzReader(); + +// Creates a reader for the LP residual and pitch information test data. +ChunksFileReader CreateLpResidualAndPitchInfoReader(); + +// Creates a reader for the sequence of GRU input vectors. +std::unique_ptr CreateGruInputReader(); + +// Creates a reader for the VAD probabilities test data. +std::unique_ptr CreateVadProbsReader(); // Class to retrieve a test pitch buffer content and the expected output for the // analysis steps. @@ -142,20 +89,40 @@ class PitchTestData { public: PitchTestData(); ~PitchTestData(); - rtc::ArrayView GetPitchBufView() const; - rtc::ArrayView - GetPitchBufSquareEnergiesView() const; - rtc::ArrayView - GetPitchBufAutoCorrCoeffsView() const; + rtc::ArrayView PitchBuffer24kHzView() const { + return pitch_buffer_24k_; + } + rtc::ArrayView SquareEnergies24kHzView() + const { + return square_energies_24k_; + } + rtc::ArrayView AutoCorrelation12kHzView() const { + return auto_correlation_12k_; + } private: - std::array test_data_; + std::array pitch_buffer_24k_; + std::array square_energies_24k_; + std::array auto_correlation_12k_; }; -// Returns true if the given optimization is available. -bool IsOptimizationAvailable(Optimization optimization); +// Writer for binary files. +class FileWriter { + public: + explicit FileWriter(const std::string& file_path) + : os_(file_path, std::ios::binary) {} + FileWriter(const FileWriter&) = delete; + FileWriter& operator=(const FileWriter&) = delete; + ~FileWriter() = default; + void WriteChunk(rtc::ArrayView value) { + const std::streamsize bytes_to_write = value.size() * sizeof(float); + os_.write(reinterpret_cast(value.data()), bytes_to_write); + } + + private: + std::ofstream os_; +}; -} // namespace test } // namespace rnn_vad } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/vector_math.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/vector_math.h new file mode 100644 index 000000000..ead93a193 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/vector_math.h @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_VECTOR_MATH_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_VECTOR_MATH_H_ + +// Defines WEBRTC_ARCH_X86_FAMILY, used below. +#include "rtc_base/system/arch.h" + +#if defined(WEBRTC_HAS_NEON) +#include +#endif +#if defined(WEBRTC_ARCH_X86_FAMILY) +#include +#endif + +#include + +#include "api/array_view.h" +#include "modules/audio_processing/agc2/cpu_features.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/system/arch.h" + +namespace webrtc { +namespace rnn_vad { + +// Provides optimizations for mathematical operations having vectors as +// operand(s). +class VectorMath { + public: + explicit VectorMath(AvailableCpuFeatures cpu_features) + : cpu_features_(cpu_features) {} + + // Computes the dot product between two equally sized vectors. + float DotProduct(rtc::ArrayView x, + rtc::ArrayView y) const { + RTC_DCHECK_EQ(x.size(), y.size()); +#if defined(WEBRTC_ARCH_X86_FAMILY) + if (cpu_features_.sse2) { + __m128 accumulator = _mm_setzero_ps(); + constexpr int kBlockSizeLog2 = 2; + constexpr int kBlockSize = 1 << kBlockSizeLog2; + const int incomplete_block_index = (x.size() >> kBlockSizeLog2) + << kBlockSizeLog2; + for (int i = 0; i < incomplete_block_index; i += kBlockSize) { + RTC_DCHECK_LE(i + kBlockSize, x.size()); + const __m128 x_i = _mm_loadu_ps(&x[i]); + const __m128 y_i = _mm_loadu_ps(&y[i]); + // Multiply-add. + const __m128 z_j = _mm_mul_ps(x_i, y_i); + accumulator = _mm_add_ps(accumulator, z_j); + } + // Reduce `accumulator` by addition. + __m128 high = _mm_movehl_ps(accumulator, accumulator); + accumulator = _mm_add_ps(accumulator, high); + high = _mm_shuffle_ps(accumulator, accumulator, 1); + accumulator = _mm_add_ps(accumulator, high); + float dot_product = _mm_cvtss_f32(accumulator); + // Add the result for the last block if incomplete. + for (int i = incomplete_block_index; + i < rtc::dchecked_cast(x.size()); ++i) { + dot_product += x[i] * y[i]; + } + return dot_product; + } +#elif defined(WEBRTC_HAS_NEON) && defined(WEBRTC_ARCH_ARM64) + if (cpu_features_.neon) { + float32x4_t accumulator = vdupq_n_f32(0.f); + constexpr int kBlockSizeLog2 = 2; + constexpr int kBlockSize = 1 << kBlockSizeLog2; + const int incomplete_block_index = (x.size() >> kBlockSizeLog2) + << kBlockSizeLog2; + for (int i = 0; i < incomplete_block_index; i += kBlockSize) { + RTC_DCHECK_LE(i + kBlockSize, x.size()); + const float32x4_t x_i = vld1q_f32(&x[i]); + const float32x4_t y_i = vld1q_f32(&y[i]); + accumulator = vfmaq_f32(accumulator, x_i, y_i); + } + // Reduce `accumulator` by addition. + const float32x2_t tmp = + vpadd_f32(vget_low_f32(accumulator), vget_high_f32(accumulator)); + float dot_product = vget_lane_f32(vpadd_f32(tmp, vrev64_f32(tmp)), 0); + // Add the result for the last block if incomplete. + for (int i = incomplete_block_index; + i < rtc::dchecked_cast(x.size()); ++i) { + dot_product += x[i] * y[i]; + } + return dot_product; + } +#endif + return std::inner_product(x.begin(), x.end(), y.begin(), 0.f); + } + + private: + float DotProductAvx2(rtc::ArrayView x, + rtc::ArrayView y) const; + + const AvailableCpuFeatures cpu_features_; +}; + +} // namespace rnn_vad +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_RNN_VAD_VECTOR_MATH_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc new file mode 100644 index 000000000..e4d246d9a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/rnn_vad/vector_math.h" + +#include + +#include "api/array_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" + +namespace webrtc { +namespace rnn_vad { + +float VectorMath::DotProductAvx2(rtc::ArrayView x, + rtc::ArrayView y) const { + RTC_DCHECK(cpu_features_.avx2); + RTC_DCHECK_EQ(x.size(), y.size()); + __m256 accumulator = _mm256_setzero_ps(); + constexpr int kBlockSizeLog2 = 3; + constexpr int kBlockSize = 1 << kBlockSizeLog2; + const int incomplete_block_index = (x.size() >> kBlockSizeLog2) + << kBlockSizeLog2; + for (int i = 0; i < incomplete_block_index; i += kBlockSize) { + RTC_DCHECK_LE(i + kBlockSize, x.size()); + const __m256 x_i = _mm256_loadu_ps(&x[i]); + const __m256 y_i = _mm256_loadu_ps(&y[i]); + accumulator = _mm256_fmadd_ps(x_i, y_i, accumulator); + } + // Reduce `accumulator` by addition. + __m128 high = _mm256_extractf128_ps(accumulator, 1); + __m128 low = _mm256_extractf128_ps(accumulator, 0); + low = _mm_add_ps(high, low); + high = _mm_movehl_ps(high, low); + low = _mm_add_ps(high, low); + high = _mm_shuffle_ps(low, low, 1); + low = _mm_add_ss(high, low); + float dot_product = _mm_cvtss_f32(low); + // Add the result for the last block if incomplete. + for (int i = incomplete_block_index; i < rtc::dchecked_cast(x.size()); + ++i) { + dot_product += x[i] * y[i]; + } + return dot_product; +} + +} // namespace rnn_vad +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc index b64fcdb71..d6f21ef89 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.cc @@ -10,84 +10,59 @@ #include "modules/audio_processing/agc2/saturation_protector.h" +#include + +#include "modules/audio_processing/agc2/agc2_common.h" +#include "modules/audio_processing/agc2/saturation_protector_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { namespace { -constexpr float kMinLevelDbfs = -90.f; +constexpr int kPeakEnveloperSuperFrameLengthMs = 400; +constexpr float kMinMarginDb = 12.0f; +constexpr float kMaxMarginDb = 25.0f; +constexpr float kAttack = 0.9988493699365052f; +constexpr float kDecay = 0.9997697679981565f; -// Min/max margins are based on speech crest-factor. -constexpr float kMinMarginDb = 12.f; -constexpr float kMaxMarginDb = 25.f; - -using saturation_protector_impl::RingBuffer; - -} // namespace - -bool RingBuffer::operator==(const RingBuffer& b) const { - RTC_DCHECK_LE(size_, buffer_.size()); - RTC_DCHECK_LE(b.size_, b.buffer_.size()); - if (size_ != b.size_) { - return false; +// Saturation protector state. Defined outside of `SaturationProtectorImpl` to +// implement check-point and restore ops. +struct SaturationProtectorState { + bool operator==(const SaturationProtectorState& s) const { + return headroom_db == s.headroom_db && + peak_delay_buffer == s.peak_delay_buffer && + max_peaks_dbfs == s.max_peaks_dbfs && + time_since_push_ms == s.time_since_push_ms; } - for (int i = 0, i0 = FrontIndex(), i1 = b.FrontIndex(); i < size_; - ++i, ++i0, ++i1) { - if (buffer_[i0 % buffer_.size()] != b.buffer_[i1 % b.buffer_.size()]) { - return false; - } + inline bool operator!=(const SaturationProtectorState& s) const { + return !(*this == s); } - return true; -} -void RingBuffer::Reset() { - next_ = 0; - size_ = 0; -} + float headroom_db; + SaturationProtectorBuffer peak_delay_buffer; + float max_peaks_dbfs; + int time_since_push_ms; // Time since the last ring buffer push operation. +}; -void RingBuffer::PushBack(float v) { - RTC_DCHECK_GE(next_, 0); - RTC_DCHECK_GE(size_, 0); - RTC_DCHECK_LT(next_, buffer_.size()); - RTC_DCHECK_LE(size_, buffer_.size()); - buffer_[next_++] = v; - if (rtc::SafeEq(next_, buffer_.size())) { - next_ = 0; - } - if (rtc::SafeLt(size_, buffer_.size())) { - size_++; - } -} - -absl::optional RingBuffer::Front() const { - if (size_ == 0) { - return absl::nullopt; - } - RTC_DCHECK_LT(FrontIndex(), buffer_.size()); - return buffer_[FrontIndex()]; -} - -bool SaturationProtectorState::operator==( - const SaturationProtectorState& b) const { - return margin_db == b.margin_db && peak_delay_buffer == b.peak_delay_buffer && - max_peaks_dbfs == b.max_peaks_dbfs && - time_since_push_ms == b.time_since_push_ms; -} - -void ResetSaturationProtectorState(float initial_margin_db, +// Resets the saturation protector state. +void ResetSaturationProtectorState(float initial_headroom_db, SaturationProtectorState& state) { - state.margin_db = initial_margin_db; + state.headroom_db = initial_headroom_db; state.peak_delay_buffer.Reset(); state.max_peaks_dbfs = kMinLevelDbfs; state.time_since_push_ms = 0; } -void UpdateSaturationProtectorState(float speech_peak_dbfs, +// Updates `state` by analyzing the estimated speech level `speech_level_dbfs` +// and the peak level `peak_dbfs` for an observed frame. `state` must not be +// modified without calling this function. +void UpdateSaturationProtectorState(float peak_dbfs, float speech_level_dbfs, SaturationProtectorState& state) { // Get the max peak over `kPeakEnveloperSuperFrameLengthMs` ms. - state.max_peaks_dbfs = std::max(state.max_peaks_dbfs, speech_peak_dbfs); + state.max_peaks_dbfs = std::max(state.max_peaks_dbfs, peak_dbfs); state.time_since_push_ms += kFrameDurationMs; if (rtc::SafeGt(state.time_since_push_ms, kPeakEnveloperSuperFrameLengthMs)) { // Push `max_peaks_dbfs` back into the ring buffer. @@ -97,25 +72,117 @@ void UpdateSaturationProtectorState(float speech_peak_dbfs, state.time_since_push_ms = 0; } - // Update margin by comparing the estimated speech level and the delayed max - // speech peak power. - // TODO(alessiob): Check with aleloi@ why we use a delay and how to tune it. + // Update the headroom by comparing the estimated speech level and the delayed + // max speech peak. const float delayed_peak_dbfs = state.peak_delay_buffer.Front().value_or(state.max_peaks_dbfs); const float difference_db = delayed_peak_dbfs - speech_level_dbfs; - if (difference_db > state.margin_db) { + if (difference_db > state.headroom_db) { // Attack. - state.margin_db = - state.margin_db * kSaturationProtectorAttackConstant + - difference_db * (1.f - kSaturationProtectorAttackConstant); + state.headroom_db = + state.headroom_db * kAttack + difference_db * (1.0f - kAttack); } else { // Decay. - state.margin_db = state.margin_db * kSaturationProtectorDecayConstant + - difference_db * (1.f - kSaturationProtectorDecayConstant); + state.headroom_db = + state.headroom_db * kDecay + difference_db * (1.0f - kDecay); } - state.margin_db = - rtc::SafeClamp(state.margin_db, kMinMarginDb, kMaxMarginDb); + state.headroom_db = + rtc::SafeClamp(state.headroom_db, kMinMarginDb, kMaxMarginDb); +} + +// Saturation protector which recommends a headroom based on the recent peaks. +class SaturationProtectorImpl : public SaturationProtector { + public: + explicit SaturationProtectorImpl(float initial_headroom_db, + float extra_headroom_db, + int adjacent_speech_frames_threshold, + ApmDataDumper* apm_data_dumper) + : apm_data_dumper_(apm_data_dumper), + initial_headroom_db_(initial_headroom_db), + extra_headroom_db_(extra_headroom_db), + adjacent_speech_frames_threshold_(adjacent_speech_frames_threshold) { + Reset(); + } + SaturationProtectorImpl(const SaturationProtectorImpl&) = delete; + SaturationProtectorImpl& operator=(const SaturationProtectorImpl&) = delete; + ~SaturationProtectorImpl() = default; + + float HeadroomDb() override { return headroom_db_; } + + void Analyze(float speech_probability, + float peak_dbfs, + float speech_level_dbfs) override { + if (speech_probability < kVadConfidenceThreshold) { + // Not a speech frame. + if (adjacent_speech_frames_threshold_ > 1) { + // When two or more adjacent speech frames are required in order to + // update the state, we need to decide whether to discard or confirm the + // updates based on the speech sequence length. + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // First non-speech frame after a long enough sequence of speech + // frames. Update the reliable state. + reliable_state_ = preliminary_state_; + } else if (num_adjacent_speech_frames_ > 0) { + // First non-speech frame after a too short sequence of speech frames. + // Reset to the last reliable state. + preliminary_state_ = reliable_state_; + } + } + num_adjacent_speech_frames_ = 0; + } else { + // Speech frame observed. + num_adjacent_speech_frames_++; + + // Update preliminary level estimate. + UpdateSaturationProtectorState(peak_dbfs, speech_level_dbfs, + preliminary_state_); + + if (num_adjacent_speech_frames_ >= adjacent_speech_frames_threshold_) { + // `preliminary_state_` is now reliable. Update the headroom. + headroom_db_ = preliminary_state_.headroom_db + extra_headroom_db_; + } + } + DumpDebugData(); + } + + void Reset() override { + num_adjacent_speech_frames_ = 0; + headroom_db_ = initial_headroom_db_ + extra_headroom_db_; + ResetSaturationProtectorState(initial_headroom_db_, preliminary_state_); + ResetSaturationProtectorState(initial_headroom_db_, reliable_state_); + } + + private: + void DumpDebugData() { + apm_data_dumper_->DumpRaw( + "agc2_saturation_protector_preliminary_max_peak_dbfs", + preliminary_state_.max_peaks_dbfs); + apm_data_dumper_->DumpRaw( + "agc2_saturation_protector_reliable_max_peak_dbfs", + reliable_state_.max_peaks_dbfs); + } + + ApmDataDumper* const apm_data_dumper_; + const float initial_headroom_db_; + const float extra_headroom_db_; + const int adjacent_speech_frames_threshold_; + int num_adjacent_speech_frames_; + float headroom_db_; + SaturationProtectorState preliminary_state_; + SaturationProtectorState reliable_state_; +}; + +} // namespace + +std::unique_ptr CreateSaturationProtector( + float initial_headroom_db, + float extra_headroom_db, + int adjacent_speech_frames_threshold, + ApmDataDumper* apm_data_dumper) { + return std::make_unique( + initial_headroom_db, extra_headroom_db, adjacent_speech_frames_threshold, + apm_data_dumper); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h index 88be91a79..0c384f1fa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector.h @@ -11,71 +11,36 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_H_ #define MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_H_ -#include - -#include "absl/types/optional.h" -#include "modules/audio_processing/agc2/agc2_common.h" -#include "rtc_base/numerics/safe_compare.h" +#include namespace webrtc { -namespace saturation_protector_impl { +class ApmDataDumper; -// Ring buffer which only supports (i) push back and (ii) read oldest item. -class RingBuffer { +// Saturation protector. Analyzes peak levels and recommends a headroom to +// reduce the chances of clipping. +class SaturationProtector { public: - bool operator==(const RingBuffer& b) const; - inline bool operator!=(const RingBuffer& b) const { return !(*this == b); } + virtual ~SaturationProtector() = default; - // Maximum number of values that the buffer can contain. - int Capacity() const { return buffer_.size(); } - // Number of values in the buffer. - int Size() const { return size_; } + // Returns the recommended headroom in dB. + virtual float HeadroomDb() = 0; - void Reset(); - // Pushes back `v`. If the buffer is full, the oldest value is replaced. - void PushBack(float v); - // Returns the oldest item in the buffer. Returns an empty value if the - // buffer is empty. - absl::optional Front() const; + // Analyzes the peak level of a 10 ms frame along with its speech probability + // and the current speech level estimate to update the recommended headroom. + virtual void Analyze(float speech_probability, + float peak_dbfs, + float speech_level_dbfs) = 0; - private: - inline int FrontIndex() const { - return rtc::SafeEq(size_, buffer_.size()) ? next_ : 0; - } - // `buffer_` has `size_` elements (up to the size of `buffer_`) and `next_` is - // the position where the next new value is written in `buffer_`. - std::array buffer_; - int next_ = 0; - int size_ = 0; + // Resets the internal state. + virtual void Reset() = 0; }; -} // namespace saturation_protector_impl - -// Saturation protector state. Exposed publicly for check-pointing and restore -// ops. -struct SaturationProtectorState { - bool operator==(const SaturationProtectorState& s) const; - inline bool operator!=(const SaturationProtectorState& s) const { - return !(*this == s); - } - - float margin_db; // Recommended margin. - saturation_protector_impl::RingBuffer peak_delay_buffer; - float max_peaks_dbfs; - int time_since_push_ms; // Time since the last ring buffer push operation. -}; - -// Resets the saturation protector state. -void ResetSaturationProtectorState(float initial_margin_db, - SaturationProtectorState& state); - -// Updates `state` by analyzing the estimated speech level `speech_level_dbfs` -// and the peak power `speech_peak_dbfs` for an observed frame which is -// reliably classified as "speech". `state` must not be modified without calling -// this function. -void UpdateSaturationProtectorState(float speech_peak_dbfs, - float speech_level_dbfs, - SaturationProtectorState& state); +// Creates a saturation protector that starts at `initial_headroom_db`. +std::unique_ptr CreateSaturationProtector( + float initial_headroom_db, + float extra_headroom_db, + int adjacent_speech_frames_threshold, + ApmDataDumper* apm_data_dumper); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.cc new file mode 100644 index 000000000..41efdad2c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.cc @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/agc2/saturation_protector_buffer.h" + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" + +namespace webrtc { + +SaturationProtectorBuffer::SaturationProtectorBuffer() = default; + +SaturationProtectorBuffer::~SaturationProtectorBuffer() = default; + +bool SaturationProtectorBuffer::operator==( + const SaturationProtectorBuffer& b) const { + RTC_DCHECK_LE(size_, buffer_.size()); + RTC_DCHECK_LE(b.size_, b.buffer_.size()); + if (size_ != b.size_) { + return false; + } + for (int i = 0, i0 = FrontIndex(), i1 = b.FrontIndex(); i < size_; + ++i, ++i0, ++i1) { + if (buffer_[i0 % buffer_.size()] != b.buffer_[i1 % b.buffer_.size()]) { + return false; + } + } + return true; +} + +int SaturationProtectorBuffer::Capacity() const { + return buffer_.size(); +} + +int SaturationProtectorBuffer::Size() const { + return size_; +} + +void SaturationProtectorBuffer::Reset() { + next_ = 0; + size_ = 0; +} + +void SaturationProtectorBuffer::PushBack(float v) { + RTC_DCHECK_GE(next_, 0); + RTC_DCHECK_GE(size_, 0); + RTC_DCHECK_LT(next_, buffer_.size()); + RTC_DCHECK_LE(size_, buffer_.size()); + buffer_[next_++] = v; + if (rtc::SafeEq(next_, buffer_.size())) { + next_ = 0; + } + if (rtc::SafeLt(size_, buffer_.size())) { + size_++; + } +} + +absl::optional SaturationProtectorBuffer::Front() const { + if (size_ == 0) { + return absl::nullopt; + } + RTC_DCHECK_LT(FrontIndex(), buffer_.size()); + return buffer_[FrontIndex()]; +} + +int SaturationProtectorBuffer::FrontIndex() const { + return rtc::SafeEq(size_, buffer_.size()) ? next_ : 0; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.h new file mode 100644 index 000000000..e17d0998c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_BUFFER_H_ +#define MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_BUFFER_H_ + +#include + +#include "absl/types/optional.h" +#include "modules/audio_processing/agc2/agc2_common.h" + +namespace webrtc { + +// Ring buffer for the saturation protector which only supports (i) push back +// and (ii) read oldest item. +class SaturationProtectorBuffer { + public: + SaturationProtectorBuffer(); + ~SaturationProtectorBuffer(); + + bool operator==(const SaturationProtectorBuffer& b) const; + inline bool operator!=(const SaturationProtectorBuffer& b) const { + return !(*this == b); + } + + // Maximum number of values that the buffer can contain. + int Capacity() const; + + // Number of values in the buffer. + int Size() const; + + void Reset(); + + // Pushes back `v`. If the buffer is full, the oldest value is replaced. + void PushBack(float v); + + // Returns the oldest item in the buffer. Returns an empty value if the + // buffer is empty. + absl::optional Front() const; + + private: + int FrontIndex() const; + // `buffer_` has `size_` elements (up to the size of `buffer_`) and `next_` is + // the position where the next new value is written in `buffer_`. + std::array buffer_; + int next_ = 0; + int size_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc index a06413d16..3ef8dd775 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc @@ -84,8 +84,8 @@ webrtc::SignalClassifier::SignalType ClassifySignal( } } - data_dumper->DumpRaw("lc_num_stationary_bands", 1, &num_stationary_bands); - data_dumper->DumpRaw("lc_num_highly_nonstationary_bands", 1, + data_dumper->DumpRaw("agc2_num_stationary_bands", 1, &num_stationary_bands); + data_dumper->DumpRaw("agc2_num_highly_nonstationary_bands", 1, &num_highly_nonstationary_bands); // Use the detected number of bands to classify the overall signal diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc index 3dbb55732..9747ca237 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc @@ -32,11 +32,14 @@ using VoiceActivityDetector = VadLevelAnalyzer::VoiceActivityDetector; // Computes the speech probability on the first channel. class Vad : public VoiceActivityDetector { public: - Vad() = default; + explicit Vad(const AvailableCpuFeatures& cpu_features) + : features_extractor_(cpu_features), rnn_vad_(cpu_features) {} Vad(const Vad&) = delete; Vad& operator=(const Vad&) = delete; ~Vad() = default; + void Reset() override { rnn_vad_.Reset(); } + float ComputeProbability(AudioFrameView frame) override { // The source number of channels is 1, because we always use the 1st // channel. @@ -59,54 +62,44 @@ class Vad : public VoiceActivityDetector { private: PushResampler resampler_; rnn_vad::FeaturesExtractor features_extractor_; - rnn_vad::RnnBasedVad rnn_vad_; + rnn_vad::RnnVad rnn_vad_; }; -// Returns an updated version of `p_old` by using instant decay and the given -// `attack` on a new VAD probability value `p_new`. -float SmoothedVadProbability(float p_old, float p_new, float attack) { - RTC_DCHECK_GT(attack, 0.f); - RTC_DCHECK_LE(attack, 1.f); - if (p_new < p_old || attack == 1.f) { - // Instant decay (or no smoothing). - return p_new; - } else { - // Attack phase. - return attack * p_new + (1.f - attack) * p_old; - } -} - } // namespace -VadLevelAnalyzer::VadLevelAnalyzer() - : VadLevelAnalyzer(kDefaultSmoothedVadProbabilityAttack, - std::make_unique()) {} +VadLevelAnalyzer::VadLevelAnalyzer(int vad_reset_period_ms, + const AvailableCpuFeatures& cpu_features) + : VadLevelAnalyzer(vad_reset_period_ms, + std::make_unique(cpu_features)) {} -VadLevelAnalyzer::VadLevelAnalyzer(float vad_probability_attack) - : VadLevelAnalyzer(vad_probability_attack, std::make_unique()) {} - -VadLevelAnalyzer::VadLevelAnalyzer(float vad_probability_attack, +VadLevelAnalyzer::VadLevelAnalyzer(int vad_reset_period_ms, std::unique_ptr vad) - : vad_(std::move(vad)), vad_probability_attack_(vad_probability_attack) { + : vad_(std::move(vad)), + vad_reset_period_frames_( + rtc::CheckedDivExact(vad_reset_period_ms, kFrameDurationMs)), + time_to_vad_reset_(vad_reset_period_frames_) { RTC_DCHECK(vad_); + RTC_DCHECK_GT(vad_reset_period_frames_, 1); } VadLevelAnalyzer::~VadLevelAnalyzer() = default; VadLevelAnalyzer::Result VadLevelAnalyzer::AnalyzeFrame( AudioFrameView frame) { + // Periodically reset the VAD. + time_to_vad_reset_--; + if (time_to_vad_reset_ <= 0) { + vad_->Reset(); + time_to_vad_reset_ = vad_reset_period_frames_; + } // Compute levels. - float peak = 0.f; - float rms = 0.f; + float peak = 0.0f; + float rms = 0.0f; for (const auto& x : frame.channel(0)) { peak = std::max(std::fabs(x), peak); rms += x * x; } - // Compute smoothed speech probability. - vad_probability_ = SmoothedVadProbability( - /*p_old=*/vad_probability_, /*p_new=*/vad_->ComputeProbability(frame), - vad_probability_attack_); - return {vad_probability_, + return {vad_->ComputeProbability(frame), FloatS16ToDbfs(std::sqrt(rms / frame.samples_per_channel())), FloatS16ToDbfs(peak)}; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h index ce72cdc75..8d2ae4576 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/vad_with_level.h @@ -13,6 +13,7 @@ #include +#include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/include/audio_frame_view.h" namespace webrtc { @@ -30,16 +31,21 @@ class VadLevelAnalyzer { class VoiceActivityDetector { public: virtual ~VoiceActivityDetector() = default; + // Resets the internal state. + virtual void Reset() = 0; // Analyzes an audio frame and returns the speech probability. virtual float ComputeProbability(AudioFrameView frame) = 0; }; - // Ctor. Uses the default VAD. - VadLevelAnalyzer(); - explicit VadLevelAnalyzer(float vad_probability_attack); + // Ctor. `vad_reset_period_ms` indicates the period in milliseconds to call + // `VadLevelAnalyzer::Reset()`; it must be equal to or greater than the + // duration of two frames. Uses `cpu_features` to instantiate the default VAD. + VadLevelAnalyzer(int vad_reset_period_ms, + const AvailableCpuFeatures& cpu_features); // Ctor. Uses a custom `vad`. - VadLevelAnalyzer(float vad_probability_attack, + VadLevelAnalyzer(int vad_reset_period_ms, std::unique_ptr vad); + VadLevelAnalyzer(const VadLevelAnalyzer&) = delete; VadLevelAnalyzer& operator=(const VadLevelAnalyzer&) = delete; ~VadLevelAnalyzer(); @@ -49,8 +55,8 @@ class VadLevelAnalyzer { private: std::unique_ptr vad_; - const float vad_probability_attack_; - float vad_probability_ = 0.f; + const int vad_reset_period_frames_; + int time_to_vad_reset_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc index 37112f088..3c5d9fb71 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc @@ -23,7 +23,6 @@ #include "common_audio/audio_converter.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" -#include "modules/audio_processing/agc2/gain_applier.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/common.h" #include "modules/audio_processing/include/audio_frame_view.h" @@ -49,8 +48,6 @@ namespace webrtc { -constexpr int kRuntimeSettingQueueSize = 100; - namespace { static bool LayoutHasKeyboard(AudioProcessing::ChannelLayout layout) { @@ -117,6 +114,10 @@ GainControl::Mode Agc1ConfigModeToInterfaceMode( RTC_CHECK_NOTREACHED(); } +bool MinimizeProcessingForUnusedOutput() { + return !field_trial::IsEnabled("WebRTC-MutedStateKillSwitch"); +} + // Maximum lengths that frame of samples being passed from the render side to // the capture side can have (does not apply to AEC3). static const size_t kMaxAllowedValuesOfSamplesPerBand = 160; @@ -147,7 +148,7 @@ bool AudioProcessingImpl::SubmoduleStates::Update( bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, - bool pre_amplifier_enabled, + bool gain_adjustment_enabled, bool echo_controller_enabled, bool voice_detector_enabled, bool transient_suppressor_enabled) { @@ -161,7 +162,7 @@ bool AudioProcessingImpl::SubmoduleStates::Update( changed |= (adaptive_gain_controller_enabled != adaptive_gain_controller_enabled_); changed |= (gain_controller2_enabled != gain_controller2_enabled_); - changed |= (pre_amplifier_enabled_ != pre_amplifier_enabled); + changed |= (gain_adjustment_enabled != gain_adjustment_enabled_); changed |= (echo_controller_enabled != echo_controller_enabled_); changed |= (voice_detector_enabled != voice_detector_enabled_); changed |= (transient_suppressor_enabled != transient_suppressor_enabled_); @@ -172,7 +173,7 @@ bool AudioProcessingImpl::SubmoduleStates::Update( noise_suppressor_enabled_ = noise_suppressor_enabled; adaptive_gain_controller_enabled_ = adaptive_gain_controller_enabled; gain_controller2_enabled_ = gain_controller2_enabled; - pre_amplifier_enabled_ = pre_amplifier_enabled; + gain_adjustment_enabled_ = gain_adjustment_enabled; echo_controller_enabled_ = echo_controller_enabled; voice_detector_enabled_ = voice_detector_enabled; transient_suppressor_enabled_ = transient_suppressor_enabled; @@ -204,7 +205,7 @@ bool AudioProcessingImpl::SubmoduleStates::CaptureMultiBandProcessingActive( bool AudioProcessingImpl::SubmoduleStates::CaptureFullBandProcessingActive() const { return gain_controller2_enabled_ || capture_post_processor_enabled_ || - pre_amplifier_enabled_; + gain_adjustment_enabled_; } bool AudioProcessingImpl::SubmoduleStates::CaptureAnalyzerActive() const { @@ -253,8 +254,8 @@ AudioProcessingImpl::AudioProcessingImpl( new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), use_setup_specific_default_aec3_config_( UseSetupSpecificDefaultAec3Congfig()), - capture_runtime_settings_(kRuntimeSettingQueueSize), - render_runtime_settings_(kRuntimeSettingQueueSize), + capture_runtime_settings_(RuntimeSettingQueueSize()), + render_runtime_settings_(RuntimeSettingQueueSize()), capture_runtime_settings_enqueuer_(&capture_runtime_settings_), render_runtime_settings_enqueuer_(&render_runtime_settings_), echo_control_factory_(std::move(echo_control_factory)), @@ -269,7 +270,10 @@ AudioProcessingImpl::AudioProcessingImpl( "WebRTC-ApmExperimentalMultiChannelRenderKillSwitch"), !field_trial::IsEnabled( "WebRTC-ApmExperimentalMultiChannelCaptureKillSwitch"), - EnforceSplitBandHpf()), + EnforceSplitBandHpf(), + MinimizeProcessingForUnusedOutput(), + field_trial::IsEnabled("WebRTC-TransientSuppressorForcedOff")), + capture_(), capture_nonlocked_() { RTC_LOG(LS_INFO) << "Injected APM submodules:" "\nEcho control factory: " @@ -287,8 +291,7 @@ AudioProcessingImpl::AudioProcessingImpl( // If no echo detector is injected, use the ResidualEchoDetector. if (!submodules_.echo_detector) { - submodules_.echo_detector = - new rtc::RefCountedObject(); + submodules_.echo_detector = rtc::make_ref_counted(); } #if !(defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)) @@ -304,8 +307,6 @@ AudioProcessingImpl::AudioProcessingImpl( config.Get().startup_min_volume; config_.gain_controller1.analog_gain_controller.clipped_level_min = config.Get().clipped_level_min; - config_.gain_controller1.analog_gain_controller.enable_agc2_level_estimator = - config.Get().enabled_agc2_level_estimator; config_.gain_controller1.analog_gain_controller.enable_digital_adaptive = !config.Get().digital_adaptive_disabled; #endif @@ -426,6 +427,7 @@ void AudioProcessingImpl::InitializeLocked() { InitializeAnalyzer(); InitializePostProcessor(); InitializePreProcessor(); + InitializeCaptureLevelsAdjuster(); if (aec_dump_) { aec_dump_->WriteInitMessage(formats_.api_format, rtc::TimeUTCMillis()); @@ -567,6 +569,9 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { config_.pre_amplifier.fixed_gain_factor != config.pre_amplifier.fixed_gain_factor; + const bool gain_adjustment_config_changed = + config_.capture_level_adjustment != config.capture_level_adjustment; + config_ = config; if (aec_config_changed) { @@ -598,8 +603,8 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { InitializeGainController2(); } - if (pre_amplifier_config_changed) { - InitializePreAmplifier(); + if (pre_amplifier_config_changed || gain_adjustment_config_changed) { + InitializeCaptureLevelsAdjuster(); } if (config_.level_estimation.enabled && !submodules_.output_level_estimator) { @@ -666,35 +671,60 @@ size_t AudioProcessingImpl::num_output_channels() const { void AudioProcessingImpl::set_output_will_be_muted(bool muted) { MutexLock lock(&mutex_capture_); - capture_.output_will_be_muted = muted; + HandleCaptureOutputUsedSetting(!muted); +} + +void AudioProcessingImpl::HandleCaptureOutputUsedSetting( + bool capture_output_used) { + capture_.capture_output_used = + capture_output_used || !constants_.minimize_processing_for_unused_output; + if (submodules_.agc_manager.get()) { - submodules_.agc_manager->SetCaptureMuted(capture_.output_will_be_muted); + submodules_.agc_manager->HandleCaptureOutputUsedChange( + capture_.capture_output_used); + } + if (submodules_.echo_controller) { + submodules_.echo_controller->SetCaptureOutputUsage( + capture_.capture_output_used); + } + if (submodules_.noise_suppressor) { + submodules_.noise_suppressor->SetCaptureOutputUsage( + capture_.capture_output_used); } } void AudioProcessingImpl::SetRuntimeSetting(RuntimeSetting setting) { + PostRuntimeSetting(setting); +} + +bool AudioProcessingImpl::PostRuntimeSetting(RuntimeSetting setting) { switch (setting.type()) { case RuntimeSetting::Type::kCustomRenderProcessingRuntimeSetting: case RuntimeSetting::Type::kPlayoutAudioDeviceChange: - render_runtime_settings_enqueuer_.Enqueue(setting); - return; + return render_runtime_settings_enqueuer_.Enqueue(setting); case RuntimeSetting::Type::kCapturePreGain: + case RuntimeSetting::Type::kCapturePostGain: case RuntimeSetting::Type::kCaptureCompressionGain: case RuntimeSetting::Type::kCaptureFixedPostGain: case RuntimeSetting::Type::kCaptureOutputUsed: - capture_runtime_settings_enqueuer_.Enqueue(setting); - return; - case RuntimeSetting::Type::kPlayoutVolumeChange: - capture_runtime_settings_enqueuer_.Enqueue(setting); - render_runtime_settings_enqueuer_.Enqueue(setting); - return; + return capture_runtime_settings_enqueuer_.Enqueue(setting); + case RuntimeSetting::Type::kPlayoutVolumeChange: { + bool enqueueing_successful; + enqueueing_successful = + capture_runtime_settings_enqueuer_.Enqueue(setting); + enqueueing_successful = + render_runtime_settings_enqueuer_.Enqueue(setting) && + enqueueing_successful; + return enqueueing_successful; + } case RuntimeSetting::Type::kNotSpecified: RTC_NOTREACHED(); - return; + return true; } // The language allows the enum to have a non-enumerator // value. Check that this doesn't happen. RTC_NOTREACHED(); + return true; } AudioProcessingImpl::RuntimeSettingEnqueuer::RuntimeSettingEnqueuer( @@ -706,20 +736,15 @@ AudioProcessingImpl::RuntimeSettingEnqueuer::RuntimeSettingEnqueuer( AudioProcessingImpl::RuntimeSettingEnqueuer::~RuntimeSettingEnqueuer() = default; -void AudioProcessingImpl::RuntimeSettingEnqueuer::Enqueue( +bool AudioProcessingImpl::RuntimeSettingEnqueuer::Enqueue( RuntimeSetting setting) { - int remaining_attempts = 10; - while (!runtime_settings_.Insert(&setting) && remaining_attempts-- > 0) { - RuntimeSetting setting_to_discard; - if (runtime_settings_.Remove(&setting_to_discard)) { - RTC_LOG(LS_ERROR) - << "The runtime settings queue is full. Oldest setting discarded."; - } - } - if (remaining_attempts == 0) { + const bool successful_insert = runtime_settings_.Insert(&setting); + + if (!successful_insert) { RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.ApmRuntimeSettingCannotEnqueue", 1); RTC_LOG(LS_ERROR) << "Cannot enqueue a new runtime setting."; } + return successful_insert; } int AudioProcessingImpl::MaybeInitializeCapture( @@ -793,17 +818,48 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, void AudioProcessingImpl::HandleCaptureRuntimeSettings() { RuntimeSetting setting; + int num_settings_processed = 0; while (capture_runtime_settings_.Remove(&setting)) { if (aec_dump_) { aec_dump_->WriteRuntimeSetting(setting); } switch (setting.type()) { case RuntimeSetting::Type::kCapturePreGain: - if (config_.pre_amplifier.enabled) { + if (config_.pre_amplifier.enabled || + config_.capture_level_adjustment.enabled) { float value; setting.GetFloat(&value); - config_.pre_amplifier.fixed_gain_factor = value; - submodules_.pre_amplifier->SetGainFactor(value); + // If the pre-amplifier is used, apply the new gain to the + // pre-amplifier regardless if the capture level adjustment is + // activated. This approach allows both functionalities to coexist + // until they have been properly merged. + if (config_.pre_amplifier.enabled) { + config_.pre_amplifier.fixed_gain_factor = value; + } else { + config_.capture_level_adjustment.pre_gain_factor = value; + } + + // Use both the pre-amplifier and the capture level adjustment gains + // as pre-gains. + float gain = 1.f; + if (config_.pre_amplifier.enabled) { + gain *= config_.pre_amplifier.fixed_gain_factor; + } + if (config_.capture_level_adjustment.enabled) { + gain *= config_.capture_level_adjustment.pre_gain_factor; + } + + submodules_.capture_levels_adjuster->SetPreGain(gain); + } + // TODO(bugs.chromium.org/9138): Log setting handling by Aec Dump. + break; + case RuntimeSetting::Type::kCapturePostGain: + if (config_.capture_level_adjustment.enabled) { + float value; + setting.GetFloat(&value); + config_.capture_level_adjustment.post_gain_factor = value; + submodules_.capture_levels_adjuster->SetPostGain( + config_.capture_level_adjustment.post_gain_factor); } // TODO(bugs.chromium.org/9138): Log setting handling by Aec Dump. break; @@ -846,11 +902,25 @@ void AudioProcessingImpl::HandleCaptureRuntimeSettings() { RTC_NOTREACHED(); break; case RuntimeSetting::Type::kCaptureOutputUsed: - // TODO(b/154437967): Add support for reducing complexity when it is - // known that the capture output will not be used. + bool value; + setting.GetBool(&value); + HandleCaptureOutputUsedSetting(value); break; } + ++num_settings_processed; } + + if (num_settings_processed >= RuntimeSettingQueueSize()) { + // Handle overrun of the runtime settings queue, which likely will has + // caused settings to be discarded. + HandleOverrunInCaptureRuntimeSettingsQueue(); + } +} + +void AudioProcessingImpl::HandleOverrunInCaptureRuntimeSettingsQueue() { + // Fall back to a safe state for the case when a setting for capture output + // usage setting has been missed. + HandleCaptureOutputUsedSetting(/*capture_output_used=*/true); } void AudioProcessingImpl::HandleRenderRuntimeSettings() { @@ -868,6 +938,7 @@ void AudioProcessingImpl::HandleRenderRuntimeSettings() { } break; case RuntimeSetting::Type::kCapturePreGain: // fall-through + case RuntimeSetting::Type::kCapturePostGain: // fall-through case RuntimeSetting::Type::kCaptureCompressionGain: // fall-through case RuntimeSetting::Type::kCaptureFixedPostGain: // fall-through case RuntimeSetting::Type::kCaptureOutputUsed: // fall-through @@ -1055,10 +1126,21 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { /*use_split_band_data=*/false); } - if (submodules_.pre_amplifier) { - submodules_.pre_amplifier->ApplyGain(AudioFrameView( - capture_buffer->channels(), capture_buffer->num_channels(), - capture_buffer->num_frames())); + if (submodules_.capture_levels_adjuster) { + // If the analog mic gain emulation is active, get the emulated analog mic + // gain and pass it to the analog gain control functionality. + if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { + int level = submodules_.capture_levels_adjuster->GetAnalogMicGainLevel(); + if (submodules_.agc_manager) { + submodules_.agc_manager->set_stream_analog_level(level); + } else if (submodules_.gain_control) { + int error = submodules_.gain_control->set_stream_analog_level(level); + RTC_DCHECK_EQ(kNoError, error); + } + } + + submodules_.capture_levels_adjuster->ApplyPreLevelAdjustment( + *capture_buffer); } capture_input_rms_.Analyze(rtc::ArrayView( @@ -1082,14 +1164,15 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_.prev_analog_mic_level != -1; capture_.prev_analog_mic_level = analog_mic_level; - // Detect and flag any change in the pre-amplifier gain. - if (submodules_.pre_amplifier) { - float pre_amp_gain = submodules_.pre_amplifier->GetGainFactor(); + // Detect and flag any change in the capture level adjustment pre-gain. + if (submodules_.capture_levels_adjuster) { + float pre_adjustment_gain = + submodules_.capture_levels_adjuster->GetPreAdjustmentGain(); capture_.echo_path_gain_change = capture_.echo_path_gain_change || - (capture_.prev_pre_amp_gain != pre_amp_gain && - capture_.prev_pre_amp_gain >= 0.f); - capture_.prev_pre_amp_gain = pre_amp_gain; + (capture_.prev_pre_adjustment_gain != pre_adjustment_gain && + capture_.prev_pre_adjustment_gain >= 0.f); + capture_.prev_pre_adjustment_gain = pre_adjustment_gain; } // Detect volume change. @@ -1204,81 +1287,95 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_buffer->MergeFrequencyBands(); } - if (capture_.capture_fullband_audio) { - const auto& ec = submodules_.echo_controller; - bool ec_active = ec ? ec->ActiveProcessing() : false; - // Only update the fullband buffer if the multiband processing has changed - // the signal. Keep the original signal otherwise. - if (submodule_states_.CaptureMultiBandProcessingActive(ec_active)) { - capture_buffer->CopyTo(capture_.capture_fullband_audio.get()); + capture_.stats.output_rms_dbfs = absl::nullopt; + if (capture_.capture_output_used) { + if (capture_.capture_fullband_audio) { + const auto& ec = submodules_.echo_controller; + bool ec_active = ec ? ec->ActiveProcessing() : false; + // Only update the fullband buffer if the multiband processing has changed + // the signal. Keep the original signal otherwise. + if (submodule_states_.CaptureMultiBandProcessingActive(ec_active)) { + capture_buffer->CopyTo(capture_.capture_fullband_audio.get()); + } + capture_buffer = capture_.capture_fullband_audio.get(); + } + + if (config_.residual_echo_detector.enabled) { + RTC_DCHECK(submodules_.echo_detector); + submodules_.echo_detector->AnalyzeCaptureAudio( + rtc::ArrayView(capture_buffer->channels()[0], + capture_buffer->num_frames())); + } + + // TODO(aluebs): Investigate if the transient suppression placement should + // be before or after the AGC. + if (submodules_.transient_suppressor) { + float voice_probability = + submodules_.agc_manager.get() + ? submodules_.agc_manager->voice_probability() + : 1.f; + + submodules_.transient_suppressor->Suppress( + capture_buffer->channels()[0], capture_buffer->num_frames(), + capture_buffer->num_channels(), + capture_buffer->split_bands_const(0)[kBand0To8kHz], + capture_buffer->num_frames_per_band(), + capture_.keyboard_info.keyboard_data, + capture_.keyboard_info.num_keyboard_frames, voice_probability, + capture_.key_pressed); + } + + // Experimental APM sub-module that analyzes |capture_buffer|. + if (submodules_.capture_analyzer) { + submodules_.capture_analyzer->Analyze(capture_buffer); + } + + if (submodules_.gain_controller2) { + submodules_.gain_controller2->NotifyAnalogLevel( + recommended_stream_analog_level_locked()); + submodules_.gain_controller2->Process(capture_buffer); + } + + if (submodules_.capture_post_processor) { + submodules_.capture_post_processor->Process(capture_buffer); + } + + // The level estimator operates on the recombined data. + if (config_.level_estimation.enabled) { + submodules_.output_level_estimator->ProcessStream(*capture_buffer); + capture_.stats.output_rms_dbfs = + submodules_.output_level_estimator->RMS(); + } + + capture_output_rms_.Analyze(rtc::ArrayView( + capture_buffer->channels_const()[0], + capture_nonlocked_.capture_processing_format.num_frames())); + if (log_rms) { + RmsLevel::Levels levels = capture_output_rms_.AverageAndPeak(); + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.Audio.ApmCaptureOutputLevelAverageRms", levels.average, 1, + RmsLevel::kMinLevelDb, 64); + RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.ApmCaptureOutputLevelPeakRms", + levels.peak, 1, RmsLevel::kMinLevelDb, 64); + } + + if (submodules_.agc_manager) { + int level = recommended_stream_analog_level_locked(); + data_dumper_->DumpRaw("experimental_gain_control_stream_analog_level", 1, + &level); + } + + // Compute echo-detector stats. + if (config_.residual_echo_detector.enabled) { + RTC_DCHECK(submodules_.echo_detector); + auto ed_metrics = submodules_.echo_detector->GetMetrics(); + capture_.stats.residual_echo_likelihood = ed_metrics.echo_likelihood; + capture_.stats.residual_echo_likelihood_recent_max = + ed_metrics.echo_likelihood_recent_max; } - capture_buffer = capture_.capture_fullband_audio.get(); } - if (config_.residual_echo_detector.enabled) { - RTC_DCHECK(submodules_.echo_detector); - submodules_.echo_detector->AnalyzeCaptureAudio(rtc::ArrayView( - capture_buffer->channels()[0], capture_buffer->num_frames())); - } - - // TODO(aluebs): Investigate if the transient suppression placement should be - // before or after the AGC. - if (submodules_.transient_suppressor) { - float voice_probability = submodules_.agc_manager.get() - ? submodules_.agc_manager->voice_probability() - : 1.f; - - submodules_.transient_suppressor->Suppress( - capture_buffer->channels()[0], capture_buffer->num_frames(), - capture_buffer->num_channels(), - capture_buffer->split_bands_const(0)[kBand0To8kHz], - capture_buffer->num_frames_per_band(), - capture_.keyboard_info.keyboard_data, - capture_.keyboard_info.num_keyboard_frames, voice_probability, - capture_.key_pressed); - } - - // Experimental APM sub-module that analyzes |capture_buffer|. - if (submodules_.capture_analyzer) { - submodules_.capture_analyzer->Analyze(capture_buffer); - } - - if (submodules_.gain_controller2) { - submodules_.gain_controller2->NotifyAnalogLevel( - recommended_stream_analog_level_locked()); - submodules_.gain_controller2->Process(capture_buffer); - } - - if (submodules_.capture_post_processor) { - submodules_.capture_post_processor->Process(capture_buffer); - } - - // The level estimator operates on the recombined data. - if (config_.level_estimation.enabled) { - submodules_.output_level_estimator->ProcessStream(*capture_buffer); - capture_.stats.output_rms_dbfs = submodules_.output_level_estimator->RMS(); - } else { - capture_.stats.output_rms_dbfs = absl::nullopt; - } - - capture_output_rms_.Analyze(rtc::ArrayView( - capture_buffer->channels_const()[0], - capture_nonlocked_.capture_processing_format.num_frames())); - if (log_rms) { - RmsLevel::Levels levels = capture_output_rms_.AverageAndPeak(); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.ApmCaptureOutputLevelAverageRms", - levels.average, 1, RmsLevel::kMinLevelDb, 64); - RTC_HISTOGRAM_COUNTS_LINEAR("WebRTC.Audio.ApmCaptureOutputLevelPeakRms", - levels.peak, 1, RmsLevel::kMinLevelDb, 64); - } - - if (submodules_.agc_manager) { - int level = recommended_stream_analog_level_locked(); - data_dumper_->DumpRaw("experimental_gain_control_stream_analog_level", 1, - &level); - } - - // Compute echo-related stats. + // Compute echo-controller stats. if (submodules_.echo_controller) { auto ec_metrics = submodules_.echo_controller->GetMetrics(); capture_.stats.echo_return_loss = ec_metrics.echo_return_loss; @@ -1286,17 +1383,41 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { ec_metrics.echo_return_loss_enhancement; capture_.stats.delay_ms = ec_metrics.delay_ms; } - if (config_.residual_echo_detector.enabled) { - RTC_DCHECK(submodules_.echo_detector); - auto ed_metrics = submodules_.echo_detector->GetMetrics(); - capture_.stats.residual_echo_likelihood = ed_metrics.echo_likelihood; - capture_.stats.residual_echo_likelihood_recent_max = - ed_metrics.echo_likelihood_recent_max; - } // Pass stats for reporting. stats_reporter_.UpdateStatistics(capture_.stats); + if (submodules_.capture_levels_adjuster) { + submodules_.capture_levels_adjuster->ApplyPostLevelAdjustment( + *capture_buffer); + + // If the analog mic gain emulation is active, retrieve the level from the + // analog gain control and set it to mic gain emulator. + if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { + if (submodules_.agc_manager) { + submodules_.capture_levels_adjuster->SetAnalogMicGainLevel( + submodules_.agc_manager->stream_analog_level()); + } else if (submodules_.gain_control) { + submodules_.capture_levels_adjuster->SetAnalogMicGainLevel( + submodules_.gain_control->stream_analog_level()); + } + } + } + + // Temporarily set the output to zero after the stream has been unmuted + // (capture output is again used). The purpose of this is to avoid clicks and + // artefacts in the audio that results when the processing again is + // reactivated after unmuting. + if (!capture_.capture_output_used_last_frame && + capture_.capture_output_used) { + for (size_t ch = 0; ch < capture_buffer->num_channels(); ++ch) { + rtc::ArrayView channel_view(capture_buffer->channels()[ch], + capture_buffer->num_frames()); + std::fill(channel_view.begin(), channel_view.end(), 0.f); + } + } + capture_.capture_output_used_last_frame = capture_.capture_output_used; + capture_.was_stream_delay_set = false; return kNoError; } @@ -1499,16 +1620,29 @@ void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) { void AudioProcessingImpl::set_stream_analog_level(int level) { MutexLock lock_capture(&mutex_capture_); + if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { + // If the analog mic gain is emulated internally, simply cache the level for + // later reporting back as the recommended stream analog level to use. + capture_.cached_stream_analog_level_ = level; + return; + } + if (submodules_.agc_manager) { submodules_.agc_manager->set_stream_analog_level(level); data_dumper_->DumpRaw("experimental_gain_control_set_stream_analog_level", 1, &level); - } else if (submodules_.gain_control) { + return; + } + + if (submodules_.gain_control) { int error = submodules_.gain_control->set_stream_analog_level(level); RTC_DCHECK_EQ(kNoError, error); - } else { - capture_.cached_stream_analog_level_ = level; + return; } + + // If no analog mic gain control functionality is in place, cache the level + // for later reporting back as the recommended stream analog level to use. + capture_.cached_stream_analog_level_ = level; } int AudioProcessingImpl::recommended_stream_analog_level() const { @@ -1517,13 +1651,19 @@ int AudioProcessingImpl::recommended_stream_analog_level() const { } int AudioProcessingImpl::recommended_stream_analog_level_locked() const { - if (submodules_.agc_manager) { - return submodules_.agc_manager->stream_analog_level(); - } else if (submodules_.gain_control) { - return submodules_.gain_control->stream_analog_level(); - } else { + if (config_.capture_level_adjustment.analog_mic_gain_emulation.enabled) { return capture_.cached_stream_analog_level_; } + + if (submodules_.agc_manager) { + return submodules_.agc_manager->stream_analog_level(); + } + + if (submodules_.gain_control) { + return submodules_.gain_control->stream_analog_level(); + } + + return capture_.cached_stream_analog_level_; } bool AudioProcessingImpl::CreateAndAttachAecDump(const std::string& file_name, @@ -1576,14 +1716,6 @@ void AudioProcessingImpl::DetachAecDump() { } } -void AudioProcessingImpl::MutateConfig( - rtc::FunctionView mutator) { - MutexLock lock_render(&mutex_render_); - MutexLock lock_capture(&mutex_capture_); - mutator(&config_); - ApplyConfig(config_); -} - AudioProcessing::Config AudioProcessingImpl::GetConfig() const { MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); @@ -1595,12 +1727,14 @@ bool AudioProcessingImpl::UpdateActiveSubmoduleStates() { config_.high_pass_filter.enabled, !!submodules_.echo_control_mobile, config_.residual_echo_detector.enabled, !!submodules_.noise_suppressor, !!submodules_.gain_control, !!submodules_.gain_controller2, - config_.pre_amplifier.enabled, capture_nonlocked_.echo_controller_enabled, + config_.pre_amplifier.enabled || config_.capture_level_adjustment.enabled, + capture_nonlocked_.echo_controller_enabled, config_.voice_detection.enabled, !!submodules_.transient_suppressor); } void AudioProcessingImpl::InitializeTransientSuppressor() { - if (config_.transient_suppression.enabled) { + if (config_.transient_suppression.enabled && + !constants_.transient_suppressor_forced_off) { // Attempt to create a transient suppressor, if one is not already created. if (!submodules_.transient_suppressor) { submodules_.transient_suppressor = @@ -1782,11 +1916,12 @@ void AudioProcessingImpl::InitializeGainController1() { num_proc_channels(), config_.gain_controller1.analog_gain_controller.startup_min_volume, config_.gain_controller1.analog_gain_controller.clipped_level_min, - config_.gain_controller1.analog_gain_controller - .enable_agc2_level_estimator, !config_.gain_controller1.analog_gain_controller .enable_digital_adaptive, - capture_nonlocked_.split_rate)); + capture_nonlocked_.split_rate, + config_.gain_controller1.analog_gain_controller.clipped_level_step, + config_.gain_controller1.analog_gain_controller.clipped_ratio_threshold, + config_.gain_controller1.analog_gain_controller.clipped_wait_frames)); if (re_creation) { submodules_.agc_manager->set_stream_analog_level(stream_analog_level); } @@ -1794,7 +1929,8 @@ void AudioProcessingImpl::InitializeGainController1() { submodules_.agc_manager->Initialize(); submodules_.agc_manager->SetupDigitalGainControl( submodules_.gain_control.get()); - submodules_.agc_manager->SetCaptureMuted(capture_.output_will_be_muted); + submodules_.agc_manager->HandleCaptureOutputUsedChange( + capture_.capture_output_used); } void AudioProcessingImpl::InitializeGainController2() { @@ -1805,7 +1941,8 @@ void AudioProcessingImpl::InitializeGainController2() { submodules_.gain_controller2.reset(new GainController2()); } - submodules_.gain_controller2->Initialize(proc_fullband_sample_rate_hz()); + submodules_.gain_controller2->Initialize(proc_fullband_sample_rate_hz(), + num_input_channels()); submodules_.gain_controller2->ApplyConfig(config_.gain_controller2); } else { submodules_.gain_controller2.reset(); @@ -1840,12 +1977,27 @@ void AudioProcessingImpl::InitializeNoiseSuppressor() { } } -void AudioProcessingImpl::InitializePreAmplifier() { - if (config_.pre_amplifier.enabled) { - submodules_.pre_amplifier.reset( - new GainApplier(true, config_.pre_amplifier.fixed_gain_factor)); +void AudioProcessingImpl::InitializeCaptureLevelsAdjuster() { + if (config_.pre_amplifier.enabled || + config_.capture_level_adjustment.enabled) { + // Use both the pre-amplifier and the capture level adjustment gains as + // pre-gains. + float pre_gain = 1.f; + if (config_.pre_amplifier.enabled) { + pre_gain *= config_.pre_amplifier.fixed_gain_factor; + } + if (config_.capture_level_adjustment.enabled) { + pre_gain *= config_.capture_level_adjustment.pre_gain_factor; + } + + submodules_.capture_levels_adjuster = + std::make_unique( + config_.capture_level_adjustment.analog_mic_gain_emulation.enabled, + config_.capture_level_adjustment.analog_mic_gain_emulation + .initial_level, + pre_gain, config_.capture_level_adjustment.post_gain_factor); } else { - submodules_.pre_amplifier.reset(); + submodules_.capture_levels_adjuster.reset(); } } @@ -2005,13 +2157,14 @@ void AudioProcessingImpl::RecordAudioProcessingState() { AudioProcessingImpl::ApmCaptureState::ApmCaptureState() : was_stream_delay_set(false), - output_will_be_muted(false), + capture_output_used(true), + capture_output_used_last_frame(true), key_pressed(false), capture_processing_format(kSampleRate16kHz), split_rate(kSampleRate16kHz), echo_path_gain_change(false), prev_analog_mic_level(-1), - prev_pre_amp_gain(-1.f), + prev_pre_adjustment_gain(-1.f), playout_volume(-1), prev_playout_volume(-1) {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h index d0eec0eec..c88cfcde9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h @@ -23,6 +23,7 @@ #include "modules/audio_processing/agc/agc_manager_direct.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h" #include "modules/audio_processing/echo_control_mobile_impl.h" #include "modules/audio_processing/gain_control_impl.h" #include "modules/audio_processing/gain_controller2.h" @@ -82,6 +83,7 @@ class AudioProcessingImpl : public AudioProcessing { void AttachAecDump(std::unique_ptr aec_dump) override; void DetachAecDump() override; void SetRuntimeSetting(RuntimeSetting setting) override; + bool PostRuntimeSetting(RuntimeSetting setting) override; // Capture-side exclusive methods possibly running APM in a // multi-threaded manner. Acquire the capture lock. @@ -96,6 +98,8 @@ class AudioProcessingImpl : public AudioProcessing { bool GetLinearAecOutput( rtc::ArrayView> linear_output) const override; void set_output_will_be_muted(bool muted) override; + void HandleCaptureOutputUsedSetting(bool capture_output_used) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); int set_stream_delay_ms(int delay) override; void set_stream_key_pressed(bool key_pressed) override; void set_stream_analog_level(int level) override; @@ -133,8 +137,6 @@ class AudioProcessingImpl : public AudioProcessing { return stats_reporter_.GetStatistics(); } - // TODO(peah): Remove MutateConfig once the new API allows that. - void MutateConfig(rtc::FunctionView mutator); AudioProcessing::Config GetConfig() const override; protected: @@ -168,7 +170,9 @@ class AudioProcessingImpl : public AudioProcessing { explicit RuntimeSettingEnqueuer( SwapQueue* runtime_settings); ~RuntimeSettingEnqueuer(); - void Enqueue(RuntimeSetting setting); + + // Enqueue setting and return whether the setting was successfully enqueued. + bool Enqueue(RuntimeSetting setting); private: SwapQueue& runtime_settings_; @@ -199,7 +203,7 @@ class AudioProcessingImpl : public AudioProcessing { bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, - bool pre_amplifier_enabled, + bool gain_adjustment_enabled, bool echo_controller_enabled, bool voice_detector_enabled, bool transient_suppressor_enabled); @@ -223,7 +227,7 @@ class AudioProcessingImpl : public AudioProcessing { bool noise_suppressor_enabled_ = false; bool adaptive_gain_controller_enabled_ = false; bool gain_controller2_enabled_ = false; - bool pre_amplifier_enabled_ = false; + bool gain_adjustment_enabled_ = false; bool echo_controller_enabled_ = false; bool voice_detector_enabled_ = false; bool transient_suppressor_enabled_ = false; @@ -267,7 +271,8 @@ class AudioProcessingImpl : public AudioProcessing { RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeGainController2() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeNoiseSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); - void InitializePreAmplifier() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); + void InitializeCaptureLevelsAdjuster() + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializePostProcessor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeAnalyzer() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -339,6 +344,12 @@ class AudioProcessingImpl : public AudioProcessing { void RecordAudioProcessingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); + // Ensures that overruns in the capture runtime settings queue is properly + // handled by the code, providing safe-fallbacks to mitigate the implications + // of any settings being missed. + void HandleOverrunInCaptureRuntimeSettingsQueue() + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); + // AecDump instance used for optionally logging APM config, input // and output to file in the AEC-dump format defined in debug.proto. std::unique_ptr aec_dump_; @@ -383,10 +394,10 @@ class AudioProcessingImpl : public AudioProcessing { std::unique_ptr transient_suppressor; std::unique_ptr capture_post_processor; std::unique_ptr render_pre_processor; - std::unique_ptr pre_amplifier; std::unique_ptr capture_analyzer; std::unique_ptr output_level_estimator; std::unique_ptr voice_detector; + std::unique_ptr capture_levels_adjuster; } submodules_; // State that is written to while holding both the render and capture locks @@ -410,20 +421,28 @@ class AudioProcessingImpl : public AudioProcessing { const struct ApmConstants { ApmConstants(bool multi_channel_render_support, bool multi_channel_capture_support, - bool enforce_split_band_hpf) + bool enforce_split_band_hpf, + bool minimize_processing_for_unused_output, + bool transient_suppressor_forced_off) : multi_channel_render_support(multi_channel_render_support), multi_channel_capture_support(multi_channel_capture_support), - enforce_split_band_hpf(enforce_split_band_hpf) {} + enforce_split_band_hpf(enforce_split_band_hpf), + minimize_processing_for_unused_output( + minimize_processing_for_unused_output), + transient_suppressor_forced_off(transient_suppressor_forced_off) {} bool multi_channel_render_support; bool multi_channel_capture_support; bool enforce_split_band_hpf; + bool minimize_processing_for_unused_output; + bool transient_suppressor_forced_off; } constants_; struct ApmCaptureState { ApmCaptureState(); ~ApmCaptureState(); bool was_stream_delay_set; - bool output_will_be_muted; + bool capture_output_used; + bool capture_output_used_last_frame; bool key_pressed; std::unique_ptr capture_audio; std::unique_ptr capture_fullband_audio; @@ -435,7 +454,7 @@ class AudioProcessingImpl : public AudioProcessing { int split_rate; bool echo_path_gain_change; int prev_analog_mic_level; - float prev_pre_amp_gain; + float prev_pre_adjustment_gain; int playout_volume; int prev_playout_volume; AudioProcessingStats stats; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc new file mode 100644 index 000000000..cb2336b87 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.h" + +#include + +#include "api/array_view.h" +#include "modules/audio_processing/audio_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { + +AudioSamplesScaler::AudioSamplesScaler(float initial_gain) + : previous_gain_(initial_gain), target_gain_(initial_gain) {} + +void AudioSamplesScaler::Process(AudioBuffer& audio_buffer) { + if (static_cast(audio_buffer.num_frames()) != samples_per_channel_) { + // Update the members depending on audio-buffer length if needed. + RTC_DCHECK_GT(audio_buffer.num_frames(), 0); + samples_per_channel_ = static_cast(audio_buffer.num_frames()); + one_by_samples_per_channel_ = 1.f / samples_per_channel_; + } + + if (target_gain_ == 1.f && previous_gain_ == target_gain_) { + // If only a gain of 1 is to be applied, do an early return without applying + // any gain. + return; + } + + float gain = previous_gain_; + if (previous_gain_ == target_gain_) { + // Apply a non-changing gain. + for (size_t channel = 0; channel < audio_buffer.num_channels(); ++channel) { + rtc::ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); + for (float& sample : channel_view) { + sample *= gain; + } + } + } else { + const float increment = + (target_gain_ - previous_gain_) * one_by_samples_per_channel_; + + if (increment > 0.f) { + // Apply an increasing gain. + for (size_t channel = 0; channel < audio_buffer.num_channels(); + ++channel) { + gain = previous_gain_; + rtc::ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); + for (float& sample : channel_view) { + gain = std::min(gain + increment, target_gain_); + sample *= gain; + } + } + } else { + // Apply a decreasing gain. + for (size_t channel = 0; channel < audio_buffer.num_channels(); + ++channel) { + gain = previous_gain_; + rtc::ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); + for (float& sample : channel_view) { + gain = std::max(gain + increment, target_gain_); + sample *= gain; + } + } + } + } + previous_gain_ = target_gain_; + + // Saturate the samples to be in the S16 range. + for (size_t channel = 0; channel < audio_buffer.num_channels(); ++channel) { + rtc::ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); + for (float& sample : channel_view) { + constexpr float kMinFloatS16Value = -32768.f; + constexpr float kMaxFloatS16Value = 32767.f; + sample = rtc::SafeClamp(sample, kMinFloatS16Value, kMaxFloatS16Value); + } + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.h new file mode 100644 index 000000000..2ae853394 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_CAPTURE_LEVELS_ADJUSTER_AUDIO_SAMPLES_SCALER_H_ +#define MODULES_AUDIO_PROCESSING_CAPTURE_LEVELS_ADJUSTER_AUDIO_SAMPLES_SCALER_H_ + +#include + +#include "modules/audio_processing/audio_buffer.h" + +namespace webrtc { + +// Handles and applies a gain to the samples in an audio buffer. +// The gain is applied for each sample and any changes in the gain take effect +// gradually (in a linear manner) over one frame. +class AudioSamplesScaler { + public: + // C-tor. The supplied `initial_gain` is used immediately at the first call to + // Process(), i.e., in contrast to the gain supplied by SetGain(...) there is + // no gradual change to the `initial_gain`. + explicit AudioSamplesScaler(float initial_gain); + AudioSamplesScaler(const AudioSamplesScaler&) = delete; + AudioSamplesScaler& operator=(const AudioSamplesScaler&) = delete; + + // Applies the specified gain to the audio in `audio_buffer`. + void Process(AudioBuffer& audio_buffer); + + // Sets the gain to apply to each sample. + void SetGain(float gain) { target_gain_ = gain; } + + private: + float previous_gain_ = 1.f; + float target_gain_ = 1.f; + int samples_per_channel_ = -1; + float one_by_samples_per_channel_ = -1.f; +}; +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_CAPTURE_LEVELS_ADJUSTER_AUDIO_SAMPLES_SCALER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc new file mode 100644 index 000000000..dfda58291 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h" + +#include "modules/audio_processing/audio_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { + +namespace { + +constexpr int kMinAnalogMicGainLevel = 0; +constexpr int kMaxAnalogMicGainLevel = 255; + +float ComputeLevelBasedGain(int emulated_analog_mic_gain_level) { + static_assert( + kMinAnalogMicGainLevel == 0, + "The minimum gain level must be 0 for the maths below to work."); + static_assert(kMaxAnalogMicGainLevel > 0, + "The minimum gain level must be larger than 0 for the maths " + "below to work."); + constexpr float kGainToLevelMultiplier = 1.f / kMaxAnalogMicGainLevel; + + RTC_DCHECK_GE(emulated_analog_mic_gain_level, kMinAnalogMicGainLevel); + RTC_DCHECK_LE(emulated_analog_mic_gain_level, kMaxAnalogMicGainLevel); + return kGainToLevelMultiplier * emulated_analog_mic_gain_level; +} + +float ComputePreGain(float pre_gain, + int emulated_analog_mic_gain_level, + bool emulated_analog_mic_gain_enabled) { + return emulated_analog_mic_gain_enabled + ? pre_gain * ComputeLevelBasedGain(emulated_analog_mic_gain_level) + : pre_gain; +} + +} // namespace + +CaptureLevelsAdjuster::CaptureLevelsAdjuster( + bool emulated_analog_mic_gain_enabled, + int emulated_analog_mic_gain_level, + float pre_gain, + float post_gain) + : emulated_analog_mic_gain_enabled_(emulated_analog_mic_gain_enabled), + emulated_analog_mic_gain_level_(emulated_analog_mic_gain_level), + pre_gain_(pre_gain), + pre_adjustment_gain_(ComputePreGain(pre_gain_, + emulated_analog_mic_gain_level_, + emulated_analog_mic_gain_enabled_)), + pre_scaler_(pre_adjustment_gain_), + post_scaler_(post_gain) {} + +void CaptureLevelsAdjuster::ApplyPreLevelAdjustment(AudioBuffer& audio_buffer) { + pre_scaler_.Process(audio_buffer); +} + +void CaptureLevelsAdjuster::ApplyPostLevelAdjustment( + AudioBuffer& audio_buffer) { + post_scaler_.Process(audio_buffer); +} + +void CaptureLevelsAdjuster::SetPreGain(float pre_gain) { + pre_gain_ = pre_gain; + UpdatePreAdjustmentGain(); +} + +void CaptureLevelsAdjuster::SetPostGain(float post_gain) { + post_scaler_.SetGain(post_gain); +} + +void CaptureLevelsAdjuster::SetAnalogMicGainLevel(int level) { + RTC_DCHECK_GE(level, kMinAnalogMicGainLevel); + RTC_DCHECK_LE(level, kMaxAnalogMicGainLevel); + int clamped_level = + rtc::SafeClamp(level, kMinAnalogMicGainLevel, kMaxAnalogMicGainLevel); + + emulated_analog_mic_gain_level_ = clamped_level; + UpdatePreAdjustmentGain(); +} + +void CaptureLevelsAdjuster::UpdatePreAdjustmentGain() { + pre_adjustment_gain_ = + ComputePreGain(pre_gain_, emulated_analog_mic_gain_level_, + emulated_analog_mic_gain_enabled_); + pre_scaler_.SetGain(pre_adjustment_gain_); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h new file mode 100644 index 000000000..38b68ad06 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_AUDIO_PROCESSING_CAPTURE_LEVELS_ADJUSTER_CAPTURE_LEVELS_ADJUSTER_H_ +#define MODULES_AUDIO_PROCESSING_CAPTURE_LEVELS_ADJUSTER_CAPTURE_LEVELS_ADJUSTER_H_ + +#include + +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.h" + +namespace webrtc { + +// Adjusts the level of the capture signal before and after all capture-side +// processing is done using a combination of explicitly specified gains +// and an emulated analog gain functionality where a specified analog level +// results in an additional gain. The pre-adjustment is achieved by combining +// the gain value `pre_gain` and the level `emulated_analog_mic_gain_level` to +// form a combined gain of `pre_gain`*`emulated_analog_mic_gain_level`/255 which +// is multiplied to each sample. The intention of the +// `emulated_analog_mic_gain_level` is to be controlled by the analog AGC +// functionality and to produce an emulated analog mic gain equal to +// `emulated_analog_mic_gain_level`/255. The post level adjustment is achieved +// by multiplying each sample with the value of `post_gain`. Any changes in the +// gains take are done smoothly over one frame and the scaled samples are +// clamped to fit into the allowed S16 sample range. +class CaptureLevelsAdjuster { + public: + // C-tor. The values for the level and the gains must fulfill + // 0 <= emulated_analog_mic_gain_level <= 255. + // 0.f <= pre_gain. + // 0.f <= post_gain. + CaptureLevelsAdjuster(bool emulated_analog_mic_gain_enabled, + int emulated_analog_mic_gain_level, + float pre_gain, + float post_gain); + CaptureLevelsAdjuster(const CaptureLevelsAdjuster&) = delete; + CaptureLevelsAdjuster& operator=(const CaptureLevelsAdjuster&) = delete; + + // Adjusts the level of the signal. This should be called before any of the + // other processing is performed. + void ApplyPreLevelAdjustment(AudioBuffer& audio_buffer); + + // Adjusts the level of the signal. This should be called after all of the + // other processing have been performed. + void ApplyPostLevelAdjustment(AudioBuffer& audio_buffer); + + // Sets the gain to apply to each sample before any of the other processing is + // performed. + void SetPreGain(float pre_gain); + + // Returns the total pre-adjustment gain applied, comprising both the pre_gain + // as well as the gain from the emulated analog mic, to each sample before any + // of the other processing is performed. + float GetPreAdjustmentGain() const { return pre_adjustment_gain_; } + + // Sets the gain to apply to each sample after all of the other processing + // have been performed. + void SetPostGain(float post_gain); + + // Sets the analog gain level to use for the emulated analog gain. + // `level` must be in the range [0...255]. + void SetAnalogMicGainLevel(int level); + + // Returns the current analog gain level used for the emulated analog gain. + int GetAnalogMicGainLevel() const { return emulated_analog_mic_gain_level_; } + + private: + // Updates the value of `pre_adjustment_gain_` based on the supplied values + // for `pre_gain` and `emulated_analog_mic_gain_level_`. + void UpdatePreAdjustmentGain(); + + const bool emulated_analog_mic_gain_enabled_; + int emulated_analog_mic_gain_level_; + float pre_gain_; + float pre_adjustment_gain_; + AudioSamplesScaler pre_scaler_; + AudioSamplesScaler post_scaler_; +}; +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_CAPTURE_LEVELS_ADJUSTER_CAPTURE_LEVELS_ADJUSTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h index d8532c574..2c88c4e46 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h @@ -16,6 +16,10 @@ namespace webrtc { +constexpr int RuntimeSettingQueueSize() { + return 100; +} + static inline size_t ChannelsFromLayout(AudioProcessing::ChannelLayout layout) { switch (layout) { case AudioProcessing::kMono: diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc index 6561bebc6..74b63c943 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.cc @@ -16,6 +16,7 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -23,29 +24,35 @@ namespace webrtc { int GainController2::instance_count_ = 0; GainController2::GainController2() - : data_dumper_( - new ApmDataDumper(rtc::AtomicOps::Increment(&instance_count_))), + : data_dumper_(rtc::AtomicOps::Increment(&instance_count_)), gain_applier_(/*hard_clip_samples=*/false, - /*initial_gain_factor=*/0.f), - limiter_(static_cast(48000), data_dumper_.get(), "Agc2") { + /*initial_gain_factor=*/0.0f), + limiter_(static_cast(48000), &data_dumper_, "Agc2"), + calls_since_last_limiter_log_(0) { if (config_.adaptive_digital.enabled) { - adaptive_agc_.reset(new AdaptiveAgc(data_dumper_.get())); + adaptive_agc_ = + std::make_unique(&data_dumper_, config_.adaptive_digital); } } GainController2::~GainController2() = default; -void GainController2::Initialize(int sample_rate_hz) { +void GainController2::Initialize(int sample_rate_hz, int num_channels) { RTC_DCHECK(sample_rate_hz == AudioProcessing::kSampleRate8kHz || sample_rate_hz == AudioProcessing::kSampleRate16kHz || sample_rate_hz == AudioProcessing::kSampleRate32kHz || sample_rate_hz == AudioProcessing::kSampleRate48kHz); limiter_.SetSampleRate(sample_rate_hz); - data_dumper_->InitiateNewSetOfRecordings(); - data_dumper_->DumpRaw("sample_rate_hz", sample_rate_hz); + if (adaptive_agc_) { + adaptive_agc_->Initialize(sample_rate_hz, num_channels); + } + data_dumper_.InitiateNewSetOfRecordings(); + data_dumper_.DumpRaw("sample_rate_hz", sample_rate_hz); + calls_since_last_limiter_log_ = 0; } void GainController2::Process(AudioBuffer* audio) { + data_dumper_.DumpRaw("agc2_notified_analog_level", analog_level_); AudioFrameView float_frame(audio->channels(), audio->num_channels(), audio->num_frames()); // Apply fixed gain first, then the adaptive one. @@ -54,11 +61,23 @@ void GainController2::Process(AudioBuffer* audio) { adaptive_agc_->Process(float_frame, limiter_.LastAudioLevel()); } limiter_.Process(float_frame); + + // Log limiter stats every 30 seconds. + ++calls_since_last_limiter_log_; + if (calls_since_last_limiter_log_ == 3000) { + calls_since_last_limiter_log_ = 0; + InterpolatedGainCurve::Stats stats = limiter_.GetGainCurveStats(); + RTC_LOG(LS_INFO) << "AGC2 limiter stats" + << " | identity: " << stats.look_ups_identity_region + << " | knee: " << stats.look_ups_knee_region + << " | limiter: " << stats.look_ups_limiter_region + << " | saturation: " << stats.look_ups_saturation_region; + } } void GainController2::NotifyAnalogLevel(int level) { if (analog_level_ != level && adaptive_agc_) { - adaptive_agc_->Reset(); + adaptive_agc_->HandleInputGainChange(); } analog_level_ = level; } @@ -75,7 +94,8 @@ void GainController2::ApplyConfig( } gain_applier_.SetGainFactor(DbToRatio(config_.fixed_digital.gain_db)); if (config_.adaptive_digital.enabled) { - adaptive_agc_.reset(new AdaptiveAgc(data_dumper_.get(), config_)); + adaptive_agc_ = + std::make_unique(&data_dumper_, config_.adaptive_digital); } else { adaptive_agc_.reset(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h index da27fdcc6..ce758c783 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/gain_controller2.h @@ -18,11 +18,11 @@ #include "modules/audio_processing/agc2/gain_applier.h" #include "modules/audio_processing/agc2/limiter.h" #include "modules/audio_processing/include/audio_processing.h" +#include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/constructor_magic.h" namespace webrtc { -class ApmDataDumper; class AudioBuffer; // Gain Controller 2 aims to automatically adjust levels by acting on the @@ -30,9 +30,11 @@ class AudioBuffer; class GainController2 { public: GainController2(); + GainController2(const GainController2&) = delete; + GainController2& operator=(const GainController2&) = delete; ~GainController2(); - void Initialize(int sample_rate_hz); + void Initialize(int sample_rate_hz, int num_channels); void Process(AudioBuffer* audio); void NotifyAnalogLevel(int level); @@ -41,14 +43,13 @@ class GainController2 { private: static int instance_count_; - std::unique_ptr data_dumper_; + ApmDataDumper data_dumper_; AudioProcessing::Config::GainController2 config_; GainApplier gain_applier_; std::unique_ptr adaptive_agc_; Limiter limiter_; + int calls_since_last_limiter_log_; int analog_level_ = -1; - - RTC_DISALLOW_COPY_AND_ASSIGN(GainController2); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h index ed5acb094..a7769d997 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/aec_dump.h @@ -15,9 +15,9 @@ #include +#include "absl/base/attributes.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/deprecation.h" namespace webrtc { @@ -76,7 +76,8 @@ class AecDump { // Logs Event::Type INIT message. virtual void WriteInitMessage(const ProcessingConfig& api_format, int64_t time_now_ms) = 0; - RTC_DEPRECATED void WriteInitMessage(const ProcessingConfig& api_format) { + ABSL_DEPRECATED("") + void WriteInitMessage(const ProcessingConfig& api_format) { WriteInitMessage(api_format, 0); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc index 04336b611..29dcd6604 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc @@ -46,25 +46,17 @@ std::string GainController1ModeToString(const Agc1Config::Mode& mode) { RTC_CHECK_NOTREACHED(); } -std::string GainController2LevelEstimatorToString( - const Agc2Config::LevelEstimator& level) { - switch (level) { - case Agc2Config::LevelEstimator::kRms: - return "Rms"; - case Agc2Config::LevelEstimator::kPeak: - return "Peak"; +std::string GainController2NoiseEstimatorToString( + const Agc2Config::NoiseEstimator& type) { + switch (type) { + case Agc2Config::NoiseEstimator::kStationaryNoise: + return "StationaryNoise"; + case Agc2Config::NoiseEstimator::kNoiseFloor: + return "NoiseFloor"; } RTC_CHECK_NOTREACHED(); } -int GetDefaultMaxInternalRate() { -#ifdef WEBRTC_ARCH_ARM_FAMILY - return 32000; -#else - return 48000; -#endif -} - } // namespace constexpr int AudioProcessing::kNativeSampleRatesHz[]; @@ -72,9 +64,6 @@ constexpr int AudioProcessing::kNativeSampleRatesHz[]; void CustomProcessing::SetRuntimeSetting( AudioProcessing::RuntimeSetting setting) {} -AudioProcessing::Config::Pipeline::Pipeline() - : maximum_internal_processing_rate(GetDefaultMaxInternalRate()) {} - bool Agc1Config::operator==(const Agc1Config& rhs) const { const auto& analog_lhs = analog_gain_controller; const auto& analog_rhs = rhs.analog_gain_controller; @@ -87,93 +76,121 @@ bool Agc1Config::operator==(const Agc1Config& rhs) const { analog_lhs.enabled == analog_rhs.enabled && analog_lhs.startup_min_volume == analog_rhs.startup_min_volume && analog_lhs.clipped_level_min == analog_rhs.clipped_level_min && - analog_lhs.enable_agc2_level_estimator == - analog_rhs.enable_agc2_level_estimator && analog_lhs.enable_digital_adaptive == - analog_rhs.enable_digital_adaptive; + analog_rhs.enable_digital_adaptive && + analog_lhs.clipped_level_step == analog_rhs.clipped_level_step && + analog_lhs.clipped_ratio_threshold == + analog_rhs.clipped_ratio_threshold && + analog_lhs.clipped_wait_frames == analog_rhs.clipped_wait_frames; +} + +bool Agc2Config::AdaptiveDigital::operator==( + const Agc2Config::AdaptiveDigital& rhs) const { + return enabled == rhs.enabled && dry_run == rhs.dry_run && + noise_estimator == rhs.noise_estimator && + vad_reset_period_ms == rhs.vad_reset_period_ms && + adjacent_speech_frames_threshold == + rhs.adjacent_speech_frames_threshold && + max_gain_change_db_per_second == rhs.max_gain_change_db_per_second && + max_output_noise_level_dbfs == rhs.max_output_noise_level_dbfs && + sse2_allowed == rhs.sse2_allowed && avx2_allowed == rhs.avx2_allowed && + neon_allowed == rhs.neon_allowed; } bool Agc2Config::operator==(const Agc2Config& rhs) const { - const auto& adaptive_lhs = adaptive_digital; - const auto& adaptive_rhs = rhs.adaptive_digital; - return enabled == rhs.enabled && fixed_digital.gain_db == rhs.fixed_digital.gain_db && - adaptive_lhs.enabled == adaptive_rhs.enabled && - adaptive_lhs.vad_probability_attack == - adaptive_rhs.vad_probability_attack && - adaptive_lhs.level_estimator == adaptive_rhs.level_estimator && - adaptive_lhs.level_estimator_adjacent_speech_frames_threshold == - adaptive_rhs.level_estimator_adjacent_speech_frames_threshold && - adaptive_lhs.use_saturation_protector == - adaptive_rhs.use_saturation_protector && - adaptive_lhs.initial_saturation_margin_db == - adaptive_rhs.initial_saturation_margin_db && - adaptive_lhs.extra_saturation_margin_db == - adaptive_rhs.extra_saturation_margin_db && - adaptive_lhs.gain_applier_adjacent_speech_frames_threshold == - adaptive_rhs.gain_applier_adjacent_speech_frames_threshold && - adaptive_lhs.max_gain_change_db_per_second == - adaptive_rhs.max_gain_change_db_per_second && - adaptive_lhs.max_output_noise_level_dbfs == - adaptive_rhs.max_output_noise_level_dbfs; + adaptive_digital == rhs.adaptive_digital; +} + +bool AudioProcessing::Config::CaptureLevelAdjustment::operator==( + const AudioProcessing::Config::CaptureLevelAdjustment& rhs) const { + return enabled == rhs.enabled && pre_gain_factor == rhs.pre_gain_factor && + post_gain_factor && rhs.post_gain_factor && + analog_mic_gain_emulation == rhs.analog_mic_gain_emulation; +} + +bool AudioProcessing::Config::CaptureLevelAdjustment::AnalogMicGainEmulation:: +operator==(const AudioProcessing::Config::CaptureLevelAdjustment:: + AnalogMicGainEmulation& rhs) const { + return enabled == rhs.enabled && initial_level == rhs.initial_level; } std::string AudioProcessing::Config::ToString() const { char buf[2048]; rtc::SimpleStringBuilder builder(buf); - builder << "AudioProcessing::Config{ " - "pipeline: {" - "maximum_internal_processing_rate: " - << pipeline.maximum_internal_processing_rate - << ", multi_channel_render: " << pipeline.multi_channel_render - << ", multi_channel_capture: " << pipeline.multi_channel_capture - << "}, pre_amplifier: { enabled: " << pre_amplifier.enabled - << ", fixed_gain_factor: " << pre_amplifier.fixed_gain_factor - << " }, high_pass_filter: { enabled: " << high_pass_filter.enabled - << " }, echo_canceller: { enabled: " << echo_canceller.enabled - << ", mobile_mode: " << echo_canceller.mobile_mode - << ", enforce_high_pass_filtering: " - << echo_canceller.enforce_high_pass_filtering - << " }, noise_suppression: { enabled: " << noise_suppression.enabled - << ", level: " - << NoiseSuppressionLevelToString(noise_suppression.level) - << " }, transient_suppression: { enabled: " - << transient_suppression.enabled - << " }, voice_detection: { enabled: " << voice_detection.enabled - << " }, gain_controller1: { enabled: " << gain_controller1.enabled - << ", mode: " << GainController1ModeToString(gain_controller1.mode) - << ", target_level_dbfs: " << gain_controller1.target_level_dbfs - << ", compression_gain_db: " << gain_controller1.compression_gain_db - << ", enable_limiter: " << gain_controller1.enable_limiter - << ", analog_level_minimum: " << gain_controller1.analog_level_minimum - << ", analog_level_maximum: " << gain_controller1.analog_level_maximum - << " }, gain_controller2: { enabled: " << gain_controller2.enabled - << ", fixed_digital: { gain_db: " - << gain_controller2.fixed_digital.gain_db - << "}, adaptive_digital: { enabled: " - << gain_controller2.adaptive_digital.enabled - << ", level_estimator: { type: " - << GainController2LevelEstimatorToString( - gain_controller2.adaptive_digital.level_estimator) - << ", adjacent_speech_frames_threshold: " - << gain_controller2.adaptive_digital - .level_estimator_adjacent_speech_frames_threshold - << ", initial_saturation_margin_db: " - << gain_controller2.adaptive_digital.initial_saturation_margin_db - << ", extra_saturation_margin_db: " - << gain_controller2.adaptive_digital.extra_saturation_margin_db - << "}, gain_applier: { adjacent_speech_frames_threshold: " - << gain_controller2.adaptive_digital - .gain_applier_adjacent_speech_frames_threshold - << ", max_gain_change_db_per_second: " - << gain_controller2.adaptive_digital.max_gain_change_db_per_second - << ", max_output_noise_level_dbfs: " - << gain_controller2.adaptive_digital.max_output_noise_level_dbfs - << " } }, residual_echo_detector: { enabled: " - << residual_echo_detector.enabled - << " }, level_estimation: { enabled: " << level_estimation.enabled - << " }}}"; + builder + << "AudioProcessing::Config{ " + "pipeline: { " + "maximum_internal_processing_rate: " + << pipeline.maximum_internal_processing_rate + << ", multi_channel_render: " << pipeline.multi_channel_render + << ", multi_channel_capture: " << pipeline.multi_channel_capture + << " }, pre_amplifier: { enabled: " << pre_amplifier.enabled + << ", fixed_gain_factor: " << pre_amplifier.fixed_gain_factor + << " },capture_level_adjustment: { enabled: " + << capture_level_adjustment.enabled + << ", pre_gain_factor: " << capture_level_adjustment.pre_gain_factor + << ", post_gain_factor: " << capture_level_adjustment.post_gain_factor + << ", analog_mic_gain_emulation: { enabled: " + << capture_level_adjustment.analog_mic_gain_emulation.enabled + << ", initial_level: " + << capture_level_adjustment.analog_mic_gain_emulation.initial_level + << " }}, high_pass_filter: { enabled: " << high_pass_filter.enabled + << " }, echo_canceller: { enabled: " << echo_canceller.enabled + << ", mobile_mode: " << echo_canceller.mobile_mode + << ", enforce_high_pass_filtering: " + << echo_canceller.enforce_high_pass_filtering + << " }, noise_suppression: { enabled: " << noise_suppression.enabled + << ", level: " << NoiseSuppressionLevelToString(noise_suppression.level) + << " }, transient_suppression: { enabled: " + << transient_suppression.enabled + << " }, voice_detection: { enabled: " << voice_detection.enabled + << " }, gain_controller1: { enabled: " << gain_controller1.enabled + << ", mode: " << GainController1ModeToString(gain_controller1.mode) + << ", target_level_dbfs: " << gain_controller1.target_level_dbfs + << ", compression_gain_db: " << gain_controller1.compression_gain_db + << ", enable_limiter: " << gain_controller1.enable_limiter + << ", analog_level_minimum: " << gain_controller1.analog_level_minimum + << ", analog_level_maximum: " << gain_controller1.analog_level_maximum + << ", analog_gain_controller { enabled: " + << gain_controller1.analog_gain_controller.enabled + << ", startup_min_volume: " + << gain_controller1.analog_gain_controller.startup_min_volume + << ", clipped_level_min: " + << gain_controller1.analog_gain_controller.clipped_level_min + << ", enable_digital_adaptive: " + << gain_controller1.analog_gain_controller.enable_digital_adaptive + << ", clipped_level_step: " + << gain_controller1.analog_gain_controller.clipped_level_step + << ", clipped_ratio_threshold: " + << gain_controller1.analog_gain_controller.clipped_ratio_threshold + << ", clipped_wait_frames: " + << gain_controller1.analog_gain_controller.clipped_wait_frames + << " }}, gain_controller2: { enabled: " << gain_controller2.enabled + << ", fixed_digital: { gain_db: " + << gain_controller2.fixed_digital.gain_db + << " }, adaptive_digital: { enabled: " + << gain_controller2.adaptive_digital.enabled + << ", dry_run: " << gain_controller2.adaptive_digital.dry_run + << ", noise_estimator: " + << GainController2NoiseEstimatorToString( + gain_controller2.adaptive_digital.noise_estimator) + << ", vad_reset_period_ms: " + << gain_controller2.adaptive_digital.vad_reset_period_ms + << ", adjacent_speech_frames_threshold: " + << gain_controller2.adaptive_digital.adjacent_speech_frames_threshold + << ", max_gain_change_db_per_second: " + << gain_controller2.adaptive_digital.max_gain_change_db_per_second + << ", max_output_noise_level_dbfs: " + << gain_controller2.adaptive_digital.max_output_noise_level_dbfs + << ", sse2_allowed: " << gain_controller2.adaptive_digital.sse2_allowed + << ", avx2_allowed: " << gain_controller2.adaptive_digital.avx2_allowed + << ", neon_allowed: " << gain_controller2.adaptive_digital.neon_allowed + << "}}, residual_echo_detector: { enabled: " + << residual_echo_detector.enabled + << " }, level_estimation: { enabled: " << level_estimation.enabled + << " }}"; return builder.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h index e85ac0c63..66220979d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h @@ -32,7 +32,6 @@ #include "modules/audio_processing/include/config.h" #include "rtc_base/arraysize.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/deprecation.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/system/rtc_export.h" @@ -60,9 +59,9 @@ class CustomProcessing; // // Must be provided through AudioProcessingBuilder().Create(config). #if defined(WEBRTC_CHROMIUM_BUILD) -static const int kAgcStartupMinVolume = 85; +static constexpr int kAgcStartupMinVolume = 85; #else -static const int kAgcStartupMinVolume = 0; +static constexpr int kAgcStartupMinVolume = 0; #endif // defined(WEBRTC_CHROMIUM_BUILD) static constexpr int kClippedLevelMin = 70; @@ -72,32 +71,13 @@ static constexpr int kClippedLevelMin = 70; struct ExperimentalAgc { ExperimentalAgc() = default; explicit ExperimentalAgc(bool enabled) : enabled(enabled) {} - ExperimentalAgc(bool enabled, - bool enabled_agc2_level_estimator, - bool digital_adaptive_disabled) - : enabled(enabled), - enabled_agc2_level_estimator(enabled_agc2_level_estimator), - digital_adaptive_disabled(digital_adaptive_disabled) {} - // Deprecated constructor: will be removed. - ExperimentalAgc(bool enabled, - bool enabled_agc2_level_estimator, - bool digital_adaptive_disabled, - bool analyze_before_aec) - : enabled(enabled), - enabled_agc2_level_estimator(enabled_agc2_level_estimator), - digital_adaptive_disabled(digital_adaptive_disabled) {} ExperimentalAgc(bool enabled, int startup_min_volume) : enabled(enabled), startup_min_volume(startup_min_volume) {} - ExperimentalAgc(bool enabled, int startup_min_volume, int clipped_level_min) - : enabled(enabled), - startup_min_volume(startup_min_volume), - clipped_level_min(clipped_level_min) {} static const ConfigOptionID identifier = ConfigOptionID::kExperimentalAgc; bool enabled = true; int startup_min_volume = kAgcStartupMinVolume; // Lowest microphone level that will be applied in response to clipping. int clipped_level_min = kClippedLevelMin; - bool enabled_agc2_level_estimator = false; bool digital_adaptive_disabled = false; }; @@ -187,7 +167,7 @@ struct ExperimentalNs { // analog_level = apm->recommended_stream_analog_level(); // has_voice = apm->stream_has_voice(); // -// // Repeate render and capture processing for the duration of the call... +// // Repeat render and capture processing for the duration of the call... // // Start a new call... // apm->Initialize(); // @@ -214,13 +194,9 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Sets the properties of the audio processing pipeline. struct RTC_EXPORT Pipeline { - Pipeline(); - // Maximum allowed processing rate used internally. May only be set to - // 32000 or 48000 and any differing values will be treated as 48000. The - // default rate is currently selected based on the CPU architecture, but - // that logic may change. - int maximum_internal_processing_rate; + // 32000 or 48000 and any differing values will be treated as 48000. + int maximum_internal_processing_rate = 48000; // Allow multi-channel processing of render audio. bool multi_channel_render = false; // Allow multi-channel processing of capture audio when AEC3 is active @@ -230,11 +206,37 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Enabled the pre-amplifier. It amplifies the capture signal // before any other processing is done. + // TODO(webrtc:5298): Deprecate and use the pre-gain functionality in + // capture_level_adjustment instead. struct PreAmplifier { bool enabled = false; - float fixed_gain_factor = 1.f; + float fixed_gain_factor = 1.0f; } pre_amplifier; + // Functionality for general level adjustment in the capture pipeline. This + // should not be used together with the legacy PreAmplifier functionality. + struct CaptureLevelAdjustment { + bool operator==(const CaptureLevelAdjustment& rhs) const; + bool operator!=(const CaptureLevelAdjustment& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + // The `pre_gain_factor` scales the signal before any processing is done. + float pre_gain_factor = 1.0f; + // The `post_gain_factor` scales the signal after all processing is done. + float post_gain_factor = 1.0f; + struct AnalogMicGainEmulation { + bool operator==(const AnalogMicGainEmulation& rhs) const; + bool operator!=(const AnalogMicGainEmulation& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + // Initial analog gain level to use for the emulated analog gain. Must + // be in the range [0...255]. + int initial_level = 255; + } analog_mic_gain_emulation; + } capture_level_adjustment; + struct HighPassFilter { bool enabled = false; bool apply_in_full_band = true; @@ -273,7 +275,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // in the analog mode, prescribing an analog gain to be applied at the audio // HAL. // Recommended to be enabled on the client-side. - struct GainController1 { + struct RTC_EXPORT GainController1 { bool operator==(const GainController1& rhs) const; bool operator!=(const GainController1& rhs) const { return !(*this == rhs); @@ -331,8 +333,16 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Lowest analog microphone level that will be applied in response to // clipping. int clipped_level_min = kClippedLevelMin; - bool enable_agc2_level_estimator = false; bool enable_digital_adaptive = true; + // Amount the microphone level is lowered with every clipping event. + // Limited to (0, 255]. + int clipped_level_step = 15; + // Proportion of clipped samples required to declare a clipping event. + // Limited to (0.f, 1.f). + float clipped_ratio_threshold = 0.1f; + // Time in frames to wait after a clipping event before checking again. + // Limited to values higher than 0. + int clipped_wait_frames = 300; } analog_gain_controller; } gain_controller1; @@ -342,29 +352,44 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // setting |fixed_gain_db|, the limiter can be turned into a compressor that // first applies a fixed gain. The adaptive digital AGC can be turned off by // setting |adaptive_digital_mode=false|. - struct GainController2 { + struct RTC_EXPORT GainController2 { bool operator==(const GainController2& rhs) const; bool operator!=(const GainController2& rhs) const { return !(*this == rhs); } + // TODO(crbug.com/webrtc/7494): Remove `LevelEstimator`. enum LevelEstimator { kRms, kPeak }; + enum NoiseEstimator { kStationaryNoise, kNoiseFloor }; bool enabled = false; - struct { - float gain_db = 0.f; + struct FixedDigital { + float gain_db = 0.0f; } fixed_digital; - struct { + struct RTC_EXPORT AdaptiveDigital { + bool operator==(const AdaptiveDigital& rhs) const; + bool operator!=(const AdaptiveDigital& rhs) const { + return !(*this == rhs); + } + bool enabled = false; - float vad_probability_attack = 1.f; + // Run the adaptive digital controller but the signal is not modified. + bool dry_run = false; + NoiseEstimator noise_estimator = kNoiseFloor; + int vad_reset_period_ms = 1500; + int adjacent_speech_frames_threshold = 12; + float max_gain_change_db_per_second = 3.0f; + float max_output_noise_level_dbfs = -50.0f; + bool sse2_allowed = true; + bool avx2_allowed = true; + bool neon_allowed = true; + // TODO(crbug.com/webrtc/7494): Remove deprecated settings below. + float vad_probability_attack = 1.0f; LevelEstimator level_estimator = kRms; - int level_estimator_adjacent_speech_frames_threshold = 1; - // TODO(crbug.com/webrtc/7494): Remove `use_saturation_protector`. + int level_estimator_adjacent_speech_frames_threshold = 12; bool use_saturation_protector = true; - float initial_saturation_margin_db = 20.f; - float extra_saturation_margin_db = 2.f; - int gain_applier_adjacent_speech_frames_threshold = 1; - float max_gain_change_db_per_second = 3.f; - float max_output_noise_level_dbfs = -50.f; + float initial_saturation_margin_db = 25.0f; + float extra_saturation_margin_db = 5.0f; + int gain_applier_adjacent_speech_frames_threshold = 12; } adaptive_digital; } gain_controller2; @@ -403,6 +428,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { kPlayoutVolumeChange, kCustomRenderProcessingRuntimeSetting, kPlayoutAudioDeviceChange, + kCapturePostGain, kCaptureOutputUsed }; @@ -412,14 +438,17 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { int max_volume; // Maximum play-out volume. }; - RuntimeSetting() : type_(Type::kNotSpecified), value_(0.f) {} + RuntimeSetting() : type_(Type::kNotSpecified), value_(0.0f) {} ~RuntimeSetting() = default; static RuntimeSetting CreateCapturePreGain(float gain) { - RTC_DCHECK_GE(gain, 1.f) << "Attenuation is not allowed."; return {Type::kCapturePreGain, gain}; } + static RuntimeSetting CreateCapturePostGain(float gain) { + return {Type::kCapturePostGain, gain}; + } + // Corresponds to Config::GainController1::compression_gain_db, but for // runtime configuration. static RuntimeSetting CreateCompressionGainDb(int gain_db) { @@ -431,8 +460,8 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Corresponds to Config::GainController2::fixed_digital::gain_db, but for // runtime configuration. static RuntimeSetting CreateCaptureFixedPostGain(float gain_db) { - RTC_DCHECK_GE(gain_db, 0.f); - RTC_DCHECK_LE(gain_db, 90.f); + RTC_DCHECK_GE(gain_db, 0.0f); + RTC_DCHECK_LE(gain_db, 90.0f); return {Type::kCaptureFixedPostGain, gain_db}; } @@ -453,8 +482,9 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { return {Type::kCustomRenderProcessingRuntimeSetting, payload}; } - static RuntimeSetting CreateCaptureOutputUsedSetting(bool payload) { - return {Type::kCaptureOutputUsed, payload}; + static RuntimeSetting CreateCaptureOutputUsedSetting( + bool capture_output_used) { + return {Type::kCaptureOutputUsed, capture_output_used}; } Type type() const { return type_; } @@ -546,12 +576,17 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // Set to true when the output of AudioProcessing will be muted or in some // other way not used. Ideally, the captured audio would still be processed, // but some components may change behavior based on this information. - // Default false. + // Default false. This method takes a lock. To achieve this in a lock-less + // manner the PostRuntimeSetting can instead be used. virtual void set_output_will_be_muted(bool muted) = 0; - // Enqueue a runtime setting. + // Enqueues a runtime setting. virtual void SetRuntimeSetting(RuntimeSetting setting) = 0; + // Enqueues a runtime setting. Returns a bool indicating whether the + // enqueueing was successfull. + virtual bool PostRuntimeSetting(RuntimeSetting setting) = 0; + // Accepts and produces a 10 ms frame interleaved 16 bit integer audio as // specified in |input_config| and |output_config|. |src| and |dest| may use // the same memory, if desired. @@ -713,7 +748,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { static constexpr int kMaxNativeSampleRateHz = kNativeSampleRatesHz[kNumNativeSampleRates - 1]; - static const int kChunkSizeMs = 10; + static constexpr int kChunkSizeMs = 10; }; class RTC_EXPORT AudioProcessingBuilder { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h index 2055f7e51..46c5f0efb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h @@ -67,7 +67,7 @@ class MockEchoControl : public EchoControl { MOCK_METHOD(bool, ActiveProcessing, (), (const, override)); }; -class MockAudioProcessing : public ::testing::NiceMock { +class MockAudioProcessing : public AudioProcessing { public: MockAudioProcessing() {} @@ -96,6 +96,7 @@ class MockAudioProcessing : public ::testing::NiceMock { MOCK_METHOD(size_t, num_reverse_channels, (), (const, override)); MOCK_METHOD(void, set_output_will_be_muted, (bool muted), (override)); MOCK_METHOD(void, SetRuntimeSetting, (RuntimeSetting setting), (override)); + MOCK_METHOD(bool, PostRuntimeSetting, (RuntimeSetting setting), (override)); MOCK_METHOD(int, ProcessStream, (const int16_t* const src, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc index 917df60c9..445248b0b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.cc @@ -61,6 +61,7 @@ ApmDataDumper::~ApmDataDumper() = default; #if WEBRTC_APM_DEBUG_DUMP == 1 bool ApmDataDumper::recording_activated_ = false; +absl::optional ApmDataDumper::dump_set_to_use_; char ApmDataDumper::output_dir_[] = ""; FILE* ApmDataDumper::GetRawFile(const char* name) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h index 1824fdd2a..9c2ac3be5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/logging/apm_data_dumper.h @@ -21,6 +21,7 @@ #include #endif +#include "absl/types/optional.h" #include "api/array_view.h" #if WEBRTC_APM_DEBUG_DUMP == 1 #include "common_audio/wav_file.h" @@ -64,6 +65,27 @@ class ApmDataDumper { #endif } + // Returns whether dumping functionality is enabled/available. + static bool IsAvailable() { +#if WEBRTC_APM_DEBUG_DUMP == 1 + return true; +#else + return false; +#endif + } + + // Default dump set. + static constexpr size_t kDefaultDumpSet = 0; + + // Specifies what dump set to use. All dump commands with a different dump set + // than the one specified will be discarded. If not specificed, all dump sets + // will be used. + static void SetDumpSetToUse(int dump_set_to_use) { +#if WEBRTC_APM_DEBUG_DUMP == 1 + dump_set_to_use_ = dump_set_to_use; +#endif + } + // Set an optional output directory. static void SetOutputDirectory(const std::string& output_dir) { #if WEBRTC_APM_DEBUG_DUMP == 1 @@ -82,8 +104,11 @@ class ApmDataDumper { // Methods for performing dumping of data of various types into // various formats. - void DumpRaw(const char* name, double v) { + void DumpRaw(const char* name, double v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(&v, sizeof(v), 1, file); @@ -91,8 +116,14 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v_length, const double* v) { + void DumpRaw(const char* name, + size_t v_length, + const double* v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(v, sizeof(v[0]), v_length, file); @@ -100,16 +131,24 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, rtc::ArrayView v) { + void DumpRaw(const char* name, + rtc::ArrayView v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpRaw(name, v.size(), v.data()); } #endif } - void DumpRaw(const char* name, float v) { + void DumpRaw(const char* name, float v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(&v, sizeof(v), 1, file); @@ -117,8 +156,14 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v_length, const float* v) { + void DumpRaw(const char* name, + size_t v_length, + const float* v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(v, sizeof(v[0]), v_length, file); @@ -126,24 +171,38 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, rtc::ArrayView v) { + void DumpRaw(const char* name, + rtc::ArrayView v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpRaw(name, v.size(), v.data()); } #endif } - void DumpRaw(const char* name, bool v) { + void DumpRaw(const char* name, bool v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpRaw(name, static_cast(v)); } #endif } - void DumpRaw(const char* name, size_t v_length, const bool* v) { + void DumpRaw(const char* name, + size_t v_length, + const bool* v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); for (size_t k = 0; k < v_length; ++k) { @@ -154,16 +213,24 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, rtc::ArrayView v) { + void DumpRaw(const char* name, + rtc::ArrayView v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpRaw(name, v.size(), v.data()); } #endif } - void DumpRaw(const char* name, int16_t v) { + void DumpRaw(const char* name, int16_t v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(&v, sizeof(v), 1, file); @@ -171,8 +238,14 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v_length, const int16_t* v) { + void DumpRaw(const char* name, + size_t v_length, + const int16_t* v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(v, sizeof(v[0]), v_length, file); @@ -180,16 +253,24 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, rtc::ArrayView v) { + void DumpRaw(const char* name, + rtc::ArrayView v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpRaw(name, v.size(), v.data()); } #endif } - void DumpRaw(const char* name, int32_t v) { + void DumpRaw(const char* name, int32_t v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(&v, sizeof(v), 1, file); @@ -197,8 +278,14 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v_length, const int32_t* v) { + void DumpRaw(const char* name, + size_t v_length, + const int32_t* v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(v, sizeof(v[0]), v_length, file); @@ -206,8 +293,11 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v) { + void DumpRaw(const char* name, size_t v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(&v, sizeof(v), 1, file); @@ -215,8 +305,14 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, size_t v_length, const size_t* v) { + void DumpRaw(const char* name, + size_t v_length, + const size_t* v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { FILE* file = GetRawFile(name); fwrite(v, sizeof(v[0]), v_length, file); @@ -224,16 +320,26 @@ class ApmDataDumper { #endif } - void DumpRaw(const char* name, rtc::ArrayView v) { + void DumpRaw(const char* name, + rtc::ArrayView v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpRaw(name, v.size(), v.data()); } #endif } - void DumpRaw(const char* name, rtc::ArrayView v) { + void DumpRaw(const char* name, + rtc::ArrayView v, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + DumpRaw(name, v.size(), v.data()); #endif } @@ -242,8 +348,12 @@ class ApmDataDumper { size_t v_length, const float* v, int sample_rate_hz, - int num_channels) { + int num_channels, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { WavWriter* file = GetWavFile(name, sample_rate_hz, num_channels, WavFile::SampleFormat::kFloat); @@ -255,8 +365,12 @@ class ApmDataDumper { void DumpWav(const char* name, rtc::ArrayView v, int sample_rate_hz, - int num_channels) { + int num_channels, + int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 + if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) + return; + if (recording_activated_) { DumpWav(name, v.size(), v.data(), sample_rate_hz, num_channels); } @@ -266,6 +380,7 @@ class ApmDataDumper { private: #if WEBRTC_APM_DEBUG_DUMP == 1 static bool recording_activated_; + static absl::optional dump_set_to_use_; static constexpr size_t kOutputDirMaxLength = 1024; static char output_dir_[kOutputDirMaxLength]; const int instance_index_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.cc index 89e1fe0d9..d66faa6ed 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.cc @@ -448,6 +448,12 @@ void NoiseSuppressor::Process(AudioBuffer* audio) { } } + // Only do the below processing if the output of the audio processing module + // is used. + if (!capture_output_used_) { + return; + } + // Aggregate the Wiener filters for all channels. std::array filter_data; rtc::ArrayView filter = filter_data; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.h index d9628869b..1e321cf4a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/ns/noise_suppressor.h @@ -41,12 +41,21 @@ class NoiseSuppressor { // Applies noise suppression. void Process(AudioBuffer* audio); + // Specifies whether the capture output will be used. The purpose of this is + // to allow the noise suppressor to deactivate some of the processing when the + // resulting output is anyway not used, for instance when the endpoint is + // muted. + void SetCaptureOutputUsage(bool capture_output_used) { + capture_output_used_ = capture_output_used; + } + private: const size_t num_bands_; const size_t num_channels_; const SuppressionParams suppression_params_; int32_t num_analyzed_frames_ = -1; NrFft fft_; + bool capture_output_used_ = true; struct ChannelState { ChannelState(const SuppressionParams& suppression_params, size_t num_bands); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc index 1c0230128..185b09d8a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc @@ -20,8 +20,10 @@ #include "absl/strings/match.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" #include "modules/congestion_controller/goog_cc/trendline_estimator.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -30,6 +32,11 @@ namespace webrtc { namespace { constexpr TimeDelta kStreamTimeOut = TimeDelta::Seconds(2); + +// Used with field trial "WebRTC-Bwe-NewInterArrivalDelta/Enabled/ +constexpr TimeDelta kSendTimeGroupLength = TimeDelta::Millis(5); + +// Used unless field trial "WebRTC-Bwe-NewInterArrivalDelta/Enabled/" constexpr int kTimestampGroupLengthMs = 5; constexpr int kAbsSendTimeFraction = 18; constexpr int kAbsSendTimeInterArrivalUpshift = 8; @@ -45,23 +52,8 @@ constexpr double kTimestampToMs = constexpr uint32_t kFixedSsrc = 0; } // namespace -constexpr char BweIgnoreSmallPacketsSettings::kKey[]; constexpr char BweSeparateAudioPacketsSettings::kKey[]; -BweIgnoreSmallPacketsSettings::BweIgnoreSmallPacketsSettings( - const WebRtcKeyValueConfig* key_value_config) { - Parser()->Parse( - key_value_config->Lookup(BweIgnoreSmallPacketsSettings::kKey)); -} - -std::unique_ptr -BweIgnoreSmallPacketsSettings::Parser() { - return StructParametersParser::Create("smoothing", &smoothing_factor, // - "fraction_large", &fraction_large, // - "large", &large_threshold, // - "small", &small_threshold); -} - BweSeparateAudioPacketsSettings::BweSeparateAudioPacketsSettings( const WebRtcKeyValueConfig* key_value_config) { Parser()->Parse( @@ -83,28 +75,17 @@ DelayBasedBwe::Result::Result() recovered_from_overuse(false), backoff_in_alr(false) {} -DelayBasedBwe::Result::Result(bool probe, DataRate target_bitrate) - : updated(true), - probe(probe), - target_bitrate(target_bitrate), - recovered_from_overuse(false), - backoff_in_alr(false) {} - DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, RtcEventLog* event_log, NetworkStatePredictor* network_state_predictor) : event_log_(event_log), key_value_config_(key_value_config), - ignore_small_(key_value_config), - fraction_large_packets_(0.5), separate_audio_(key_value_config), audio_packets_since_last_video_(0), last_video_packet_recv_time_(Timestamp::MinusInfinity()), network_state_predictor_(network_state_predictor), - video_inter_arrival_(), video_delay_detector_( new TrendlineEstimator(key_value_config_, network_state_predictor_)), - audio_inter_arrival_(), audio_delay_detector_( new TrendlineEstimator(key_value_config_, network_state_predictor_)), active_delay_detector_(video_delay_detector_.get()), @@ -114,15 +95,16 @@ DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, prev_bitrate_(DataRate::Zero()), has_once_detected_overuse_(false), prev_state_(BandwidthUsage::kBwNormal), + use_new_inter_arrival_delta_(absl::StartsWith( + key_value_config->Lookup("WebRTC-Bwe-NewInterArrivalDelta"), + "Enabled")), alr_limited_backoff_enabled_(absl::StartsWith( key_value_config->Lookup("WebRTC-Bwe-AlrLimitedBackoff"), "Enabled")) { - RTC_LOG(LS_INFO) << "Initialized DelayBasedBwe with small packet filtering " - << ignore_small_.Parser()->Encode() - << ", separate audio overuse detection" - << separate_audio_.Parser()->Encode() - << " and alr limited backoff " - << (alr_limited_backoff_enabled_ ? "enabled" : "disabled"); + RTC_LOG(LS_INFO) + << "Initialized DelayBasedBwe with separate audio overuse detection" + << separate_audio_.Parser()->Encode() << " and alr limited backoff " + << (alr_limited_backoff_enabled_ ? "enabled" : "disabled"); } DelayBasedBwe::~DelayBasedBwe() {} @@ -180,42 +162,31 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, // Reset if the stream has timed out. if (last_seen_packet_.IsInfinite() || at_time - last_seen_packet_ > kStreamTimeOut) { - video_inter_arrival_.reset( - new InterArrival(kTimestampGroupTicks, kTimestampToMs, true)); + if (use_new_inter_arrival_delta_) { + video_inter_arrival_delta_ = + std::make_unique(kSendTimeGroupLength); + audio_inter_arrival_delta_ = + std::make_unique(kSendTimeGroupLength); + } else { + video_inter_arrival_ = std::make_unique( + kTimestampGroupTicks, kTimestampToMs, true); + audio_inter_arrival_ = std::make_unique( + kTimestampGroupTicks, kTimestampToMs, true); + } video_delay_detector_.reset( new TrendlineEstimator(key_value_config_, network_state_predictor_)); - audio_inter_arrival_.reset( - new InterArrival(kTimestampGroupTicks, kTimestampToMs, true)); audio_delay_detector_.reset( new TrendlineEstimator(key_value_config_, network_state_predictor_)); active_delay_detector_ = video_delay_detector_.get(); } last_seen_packet_ = at_time; - // Ignore "small" packets if many/most packets in the call are "large". The - // packet size may have a significant effect on the propagation delay, - // especially at low bandwidths. Variations in packet size will then show up - // as noise in the delay measurement. By default, we include all packets. - DataSize packet_size = packet_feedback.sent_packet.size; - if (!ignore_small_.small_threshold.IsZero()) { - double is_large = - static_cast(packet_size >= ignore_small_.large_threshold); - fraction_large_packets_ += - ignore_small_.smoothing_factor * (is_large - fraction_large_packets_); - if (packet_size <= ignore_small_.small_threshold && - fraction_large_packets_ >= ignore_small_.fraction_large) { - return; - } - } - // As an alternative to ignoring small packets, we can separate audio and // video packets for overuse detection. - InterArrival* inter_arrival_for_packet = video_inter_arrival_.get(); DelayIncreaseDetectorInterface* delay_detector_for_packet = video_delay_detector_.get(); if (separate_audio_.enabled) { if (packet_feedback.sent_packet.audio) { - inter_arrival_for_packet = audio_inter_arrival_.get(); delay_detector_for_packet = audio_delay_detector_.get(); audio_packets_since_last_video_++; if (audio_packets_since_last_video_ > separate_audio_.packet_threshold && @@ -230,29 +201,59 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, active_delay_detector_ = video_delay_detector_.get(); } } + DataSize packet_size = packet_feedback.sent_packet.size; - uint32_t send_time_24bits = - static_cast( - ((static_cast(packet_feedback.sent_packet.send_time.ms()) - << kAbsSendTimeFraction) + - 500) / - 1000) & - 0x00FFFFFF; - // Shift up send time to use the full 32 bits that inter_arrival works with, - // so wrapping works properly. - uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; + if (use_new_inter_arrival_delta_) { + TimeDelta send_delta = TimeDelta::Zero(); + TimeDelta recv_delta = TimeDelta::Zero(); + int size_delta = 0; - uint32_t timestamp_delta = 0; - int64_t recv_delta_ms = 0; - int size_delta = 0; - bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( - timestamp, packet_feedback.receive_time.ms(), at_time.ms(), - packet_size.bytes(), ×tamp_delta, &recv_delta_ms, &size_delta); - double send_delta_ms = (1000.0 * timestamp_delta) / (1 << kInterArrivalShift); - delay_detector_for_packet->Update(recv_delta_ms, send_delta_ms, - packet_feedback.sent_packet.send_time.ms(), - packet_feedback.receive_time.ms(), - packet_size.bytes(), calculated_deltas); + InterArrivalDelta* inter_arrival_for_packet = + (separate_audio_.enabled && packet_feedback.sent_packet.audio) + ? video_inter_arrival_delta_.get() + : audio_inter_arrival_delta_.get(); + bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( + packet_feedback.sent_packet.send_time, packet_feedback.receive_time, + at_time, packet_size.bytes(), &send_delta, &recv_delta, &size_delta); + + delay_detector_for_packet->Update( + recv_delta.ms(), send_delta.ms(), + packet_feedback.sent_packet.send_time.ms(), + packet_feedback.receive_time.ms(), packet_size.bytes(), + calculated_deltas); + } else { + InterArrival* inter_arrival_for_packet = + (separate_audio_.enabled && packet_feedback.sent_packet.audio) + ? video_inter_arrival_.get() + : audio_inter_arrival_.get(); + + uint32_t send_time_24bits = + static_cast( + ((static_cast(packet_feedback.sent_packet.send_time.ms()) + << kAbsSendTimeFraction) + + 500) / + 1000) & + 0x00FFFFFF; + // Shift up send time to use the full 32 bits that inter_arrival works with, + // so wrapping works properly. + uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; + + uint32_t timestamp_delta = 0; + int64_t recv_delta_ms = 0; + int size_delta = 0; + + bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( + timestamp, packet_feedback.receive_time.ms(), at_time.ms(), + packet_size.bytes(), ×tamp_delta, &recv_delta_ms, &size_delta); + double send_delta_ms = + (1000.0 * timestamp_delta) / (1 << kInterArrivalShift); + + delay_detector_for_packet->Update( + recv_delta_ms, send_delta_ms, + packet_feedback.sent_packet.send_time.ms(), + packet_feedback.receive_time.ms(), packet_size.bytes(), + calculated_deltas); + } } DataRate DelayBasedBwe::TriggerOveruse(Timestamp at_time, diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h index 74650dc82..85ce6eaa8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -22,9 +22,9 @@ #include "api/transport/network_types.h" #include "api/transport/webrtc_key_value_config.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" +#include "modules/congestion_controller/goog_cc/inter_arrival_delta.h" #include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" @@ -32,21 +32,6 @@ namespace webrtc { class RtcEventLog; -struct BweIgnoreSmallPacketsSettings { - static constexpr char kKey[] = "WebRTC-BweIgnoreSmallPacketsFix"; - - BweIgnoreSmallPacketsSettings() = default; - explicit BweIgnoreSmallPacketsSettings( - const WebRtcKeyValueConfig* key_value_config); - - double smoothing_factor = 0.1; - double fraction_large = 1.0; - DataSize large_threshold = DataSize::Zero(); - DataSize small_threshold = DataSize::Zero(); - - std::unique_ptr Parser(); -}; - struct BweSeparateAudioPacketsSettings { static constexpr char kKey[] = "WebRTC-Bwe-SeparateAudioPackets"; @@ -65,7 +50,6 @@ class DelayBasedBwe { public: struct Result { Result(); - Result(bool probe, DataRate target_bitrate); ~Result() = default; bool updated; bool probe; @@ -113,19 +97,14 @@ class DelayBasedBwe { Timestamp at_time); // Updates the current remote rate estimate and returns true if a valid // estimate exists. - bool UpdateEstimate(Timestamp now, + bool UpdateEstimate(Timestamp at_time, absl::optional acked_bitrate, - DataRate* target_bitrate); + DataRate* target_rate); rtc::RaceChecker network_race_; RtcEventLog* const event_log_; const WebRtcKeyValueConfig* const key_value_config_; - // Filtering out small packets. Intention is to base the detection only - // on video packets even if we have TWCC sequence numbers for audio. - BweIgnoreSmallPacketsSettings ignore_small_; - double fraction_large_packets_; - // Alternatively, run two separate overuse detectors for audio and video, // and fall back to the audio one if we haven't seen a video packet in a // while. @@ -135,8 +114,10 @@ class DelayBasedBwe { NetworkStatePredictor* network_state_predictor_; std::unique_ptr video_inter_arrival_; + std::unique_ptr video_inter_arrival_delta_; std::unique_ptr video_delay_detector_; std::unique_ptr audio_inter_arrival_; + std::unique_ptr audio_inter_arrival_delta_; std::unique_ptr audio_delay_detector_; DelayIncreaseDetectorInterface* active_delay_detector_; @@ -146,6 +127,7 @@ class DelayBasedBwe { DataRate prev_bitrate_; bool has_once_detected_overuse_; BandwidthUsage prev_state_; + const bool use_new_inter_arrival_delta_; bool alr_limited_backoff_enabled_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h index 8fe3f669b..eaadb0d12 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h @@ -12,7 +12,7 @@ #include -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "api/network_state_predictor.h" #include "rtc_base/constructor_magic.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc new file mode 100644 index 000000000..791867db6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/inter_arrival_delta.h" + +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +static constexpr TimeDelta kBurstDeltaThreshold = TimeDelta::Millis(5); +static constexpr TimeDelta kMaxBurstDuration = TimeDelta::Millis(100); +constexpr TimeDelta InterArrivalDelta::kArrivalTimeOffsetThreshold; + +InterArrivalDelta::InterArrivalDelta(TimeDelta send_time_group_length) + : send_time_group_length_(send_time_group_length), + current_timestamp_group_(), + prev_timestamp_group_(), + num_consecutive_reordered_packets_(0) {} + +bool InterArrivalDelta::ComputeDeltas(Timestamp send_time, + Timestamp arrival_time, + Timestamp system_time, + size_t packet_size, + TimeDelta* send_time_delta, + TimeDelta* arrival_time_delta, + int* packet_size_delta) { + bool calculated_deltas = false; + if (current_timestamp_group_.IsFirstPacket()) { + // We don't have enough data to update the filter, so we store it until we + // have two frames of data to process. + current_timestamp_group_.send_time = send_time; + current_timestamp_group_.first_send_time = send_time; + current_timestamp_group_.first_arrival = arrival_time; + } else if (current_timestamp_group_.first_send_time > send_time) { + // Reordered packet. + return false; + } else if (NewTimestampGroup(arrival_time, send_time)) { + // First packet of a later send burst, the previous packets sample is ready. + if (prev_timestamp_group_.complete_time.IsFinite()) { + *send_time_delta = + current_timestamp_group_.send_time - prev_timestamp_group_.send_time; + *arrival_time_delta = current_timestamp_group_.complete_time - + prev_timestamp_group_.complete_time; + + TimeDelta system_time_delta = current_timestamp_group_.last_system_time - + prev_timestamp_group_.last_system_time; + + if (*arrival_time_delta - system_time_delta >= + kArrivalTimeOffsetThreshold) { + RTC_LOG(LS_WARNING) + << "The arrival time clock offset has changed (diff = " + << arrival_time_delta->ms() - system_time_delta.ms() + << " ms), resetting."; + Reset(); + return false; + } + if (*arrival_time_delta < TimeDelta::Zero()) { + // The group of packets has been reordered since receiving its local + // arrival timestamp. + ++num_consecutive_reordered_packets_; + if (num_consecutive_reordered_packets_ >= kReorderedResetThreshold) { + RTC_LOG(LS_WARNING) + << "Packets between send burst arrived out of order, resetting." + << " arrival_time_delta" << arrival_time_delta->ms() + << " send time delta " << send_time_delta->ms(); + Reset(); + } + return false; + } else { + num_consecutive_reordered_packets_ = 0; + } + *packet_size_delta = static_cast(current_timestamp_group_.size) - + static_cast(prev_timestamp_group_.size); + calculated_deltas = true; + } + prev_timestamp_group_ = current_timestamp_group_; + // The new timestamp is now the current frame. + current_timestamp_group_.first_send_time = send_time; + current_timestamp_group_.send_time = send_time; + current_timestamp_group_.first_arrival = arrival_time; + current_timestamp_group_.size = 0; + } else { + current_timestamp_group_.send_time = + std::max(current_timestamp_group_.send_time, send_time); + } + // Accumulate the frame size. + current_timestamp_group_.size += packet_size; + current_timestamp_group_.complete_time = arrival_time; + current_timestamp_group_.last_system_time = system_time; + + return calculated_deltas; +} + +// Assumes that |timestamp| is not reordered compared to +// |current_timestamp_group_|. +bool InterArrivalDelta::NewTimestampGroup(Timestamp arrival_time, + Timestamp send_time) const { + if (current_timestamp_group_.IsFirstPacket()) { + return false; + } else if (BelongsToBurst(arrival_time, send_time)) { + return false; + } else { + return send_time - current_timestamp_group_.first_send_time > + send_time_group_length_; + } +} + +bool InterArrivalDelta::BelongsToBurst(Timestamp arrival_time, + Timestamp send_time) const { + RTC_DCHECK(current_timestamp_group_.complete_time.IsFinite()); + TimeDelta arrival_time_delta = + arrival_time - current_timestamp_group_.complete_time; + TimeDelta send_time_delta = send_time - current_timestamp_group_.send_time; + if (send_time_delta.IsZero()) + return true; + TimeDelta propagation_delta = arrival_time_delta - send_time_delta; + if (propagation_delta < TimeDelta::Zero() && + arrival_time_delta <= kBurstDeltaThreshold && + arrival_time - current_timestamp_group_.first_arrival < kMaxBurstDuration) + return true; + return false; +} + +void InterArrivalDelta::Reset() { + num_consecutive_reordered_packets_ = 0; + current_timestamp_group_ = SendTimeGroup(); + prev_timestamp_group_ = SendTimeGroup(); +} +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h new file mode 100644 index 000000000..28dc80624 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.h @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_INTER_ARRIVAL_DELTA_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_INTER_ARRIVAL_DELTA_H_ + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { + +// Helper class to compute the inter-arrival time delta and the size delta +// between two send bursts. This code is branched from +// modules/remote_bitrate_estimator/inter_arrival. +class InterArrivalDelta { + public: + // After this many packet groups received out of order InterArrival will + // reset, assuming that clocks have made a jump. + static constexpr int kReorderedResetThreshold = 3; + static constexpr TimeDelta kArrivalTimeOffsetThreshold = + TimeDelta::Seconds(3); + + // A send time group is defined as all packets with a send time which are at + // most send_time_group_length older than the first timestamp in that + // group. + explicit InterArrivalDelta(TimeDelta send_time_group_length); + + InterArrivalDelta() = delete; + InterArrivalDelta(const InterArrivalDelta&) = delete; + InterArrivalDelta& operator=(const InterArrivalDelta&) = delete; + + // This function returns true if a delta was computed, or false if the current + // group is still incomplete or if only one group has been completed. + // |send_time| is the send time. + // |arrival_time| is the time at which the packet arrived. + // |packet_size| is the size of the packet. + // |timestamp_delta| (output) is the computed send time delta. + // |arrival_time_delta_ms| (output) is the computed arrival-time delta. + // |packet_size_delta| (output) is the computed size delta. + bool ComputeDeltas(Timestamp send_time, + Timestamp arrival_time, + Timestamp system_time, + size_t packet_size, + TimeDelta* send_time_delta, + TimeDelta* arrival_time_delta, + int* packet_size_delta); + + private: + struct SendTimeGroup { + SendTimeGroup() + : size(0), + first_send_time(Timestamp::MinusInfinity()), + send_time(Timestamp::MinusInfinity()), + first_arrival(Timestamp::MinusInfinity()), + complete_time(Timestamp::MinusInfinity()), + last_system_time(Timestamp::MinusInfinity()) {} + + bool IsFirstPacket() const { return complete_time.IsInfinite(); } + + size_t size; + Timestamp first_send_time; + Timestamp send_time; + Timestamp first_arrival; + Timestamp complete_time; + Timestamp last_system_time; + }; + + // Returns true if the last packet was the end of the current batch and the + // packet with |send_time| is the first of a new batch. + bool NewTimestampGroup(Timestamp arrival_time, Timestamp send_time) const; + + bool BelongsToBurst(Timestamp arrival_time, Timestamp send_time) const; + + void Reset(); + + const TimeDelta send_time_group_length_; + SendTimeGroup current_timestamp_group_; + SendTimeGroup prev_timestamp_group_; + int num_consecutive_reordered_packets_; +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_INTER_ARRIVAL_DELTA_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc index 1d2aab852..2211d26f0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc @@ -14,14 +14,18 @@ #include #include +#include "absl/strings/match.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { const char kBweLossBasedControl[] = "WebRTC-Bwe-LossBasedControl"; +// Expecting RTCP feedback to be sent with roughly 1s intervals, a 5s gap +// indicates a channel outage. +constexpr TimeDelta kMaxRtcpFeedbackInterval = TimeDelta::Millis(5000); + // Increase slower when RTT is high. double GetIncreaseFactor(const LossBasedControlConfig& config, TimeDelta rtt) { // Clamp the RTT @@ -71,10 +75,16 @@ double ExponentialUpdate(TimeDelta window, TimeDelta interval) { return 1.0f - exp(interval / window * -1.0); } +bool IsEnabled(const webrtc::WebRtcKeyValueConfig& key_value_config, + absl::string_view name) { + return absl::StartsWith(key_value_config.Lookup(name), "Enabled"); +} + } // namespace -LossBasedControlConfig::LossBasedControlConfig() - : enabled(field_trial::IsEnabled(kBweLossBasedControl)), +LossBasedControlConfig::LossBasedControlConfig( + const WebRtcKeyValueConfig* key_value_config) + : enabled(IsEnabled(*key_value_config, kBweLossBasedControl)), min_increase_factor("min_incr", 1.02), max_increase_factor("max_incr", 1.08), increase_low_rtt("incr_low_rtt", TimeDelta::Millis(200)), @@ -88,26 +98,28 @@ LossBasedControlConfig::LossBasedControlConfig() DataRate::KilobitsPerSec(0.5)), loss_bandwidth_balance_decrease("balance_decr", DataRate::KilobitsPerSec(4)), + loss_bandwidth_balance_reset("balance_reset", + DataRate::KilobitsPerSec(0.1)), loss_bandwidth_balance_exponent("exponent", 0.5), allow_resets("resets", false), decrease_interval("decr_intvl", TimeDelta::Millis(300)), loss_report_timeout("timeout", TimeDelta::Millis(6000)) { - std::string trial_string = field_trial::FindFullName(kBweLossBasedControl); ParseFieldTrial( {&min_increase_factor, &max_increase_factor, &increase_low_rtt, &increase_high_rtt, &decrease_factor, &loss_window, &loss_max_window, &acknowledged_rate_max_window, &increase_offset, &loss_bandwidth_balance_increase, &loss_bandwidth_balance_decrease, - &loss_bandwidth_balance_exponent, &allow_resets, &decrease_interval, - &loss_report_timeout}, - trial_string); + &loss_bandwidth_balance_reset, &loss_bandwidth_balance_exponent, + &allow_resets, &decrease_interval, &loss_report_timeout}, + key_value_config->Lookup(kBweLossBasedControl)); } LossBasedControlConfig::LossBasedControlConfig(const LossBasedControlConfig&) = default; LossBasedControlConfig::~LossBasedControlConfig() = default; -LossBasedBandwidthEstimation::LossBasedBandwidthEstimation() - : config_(LossBasedControlConfig()), +LossBasedBandwidthEstimation::LossBasedBandwidthEstimation( + const WebRtcKeyValueConfig* key_value_config) + : config_(key_value_config), average_loss_(0), average_loss_max_(0), loss_based_bitrate_(DataRate::Zero()), @@ -164,9 +176,14 @@ void LossBasedBandwidthEstimation::UpdateAcknowledgedBitrate( } } -void LossBasedBandwidthEstimation::Update(Timestamp at_time, - DataRate min_bitrate, - TimeDelta last_round_trip_time) { +DataRate LossBasedBandwidthEstimation::Update(Timestamp at_time, + DataRate min_bitrate, + DataRate wanted_bitrate, + TimeDelta last_round_trip_time) { + if (loss_based_bitrate_.IsZero()) { + loss_based_bitrate_ = wanted_bitrate; + } + // Only increase if loss has been low for some time. const double loss_estimate_for_increase = average_loss_max_; // Avoid multiple decreases from averaging over one loss spike. @@ -176,8 +193,15 @@ void LossBasedBandwidthEstimation::Update(Timestamp at_time, !has_decreased_since_last_loss_report_ && (at_time - time_last_decrease_ >= last_round_trip_time + config_.decrease_interval); + // If packet lost reports are too old, dont increase bitrate. + const bool loss_report_valid = + at_time - last_loss_packet_report_ < 1.2 * kMaxRtcpFeedbackInterval; - if (loss_estimate_for_increase < loss_increase_threshold()) { + if (loss_report_valid && config_.allow_resets && + loss_estimate_for_increase < loss_reset_threshold()) { + loss_based_bitrate_ = wanted_bitrate; + } else if (loss_report_valid && + loss_estimate_for_increase < loss_increase_threshold()) { // Increase bitrate by RTT-adaptive ratio. DataRate new_increased_bitrate = min_bitrate * GetIncreaseFactor(config_, last_round_trip_time) + @@ -203,14 +227,21 @@ void LossBasedBandwidthEstimation::Update(Timestamp at_time, loss_based_bitrate_ = new_decreased_bitrate; } } + return loss_based_bitrate_; } -void LossBasedBandwidthEstimation::Reset(DataRate bitrate) { +void LossBasedBandwidthEstimation::Initialize(DataRate bitrate) { loss_based_bitrate_ = bitrate; average_loss_ = 0; average_loss_max_ = 0; } +double LossBasedBandwidthEstimation::loss_reset_threshold() const { + return LossFromBitrate(loss_based_bitrate_, + config_.loss_bandwidth_balance_reset, + config_.loss_bandwidth_balance_exponent); +} + double LossBasedBandwidthEstimation::loss_increase_threshold() const { return LossFromBitrate(loss_based_bitrate_, config_.loss_bandwidth_balance_increase, @@ -226,14 +257,4 @@ double LossBasedBandwidthEstimation::loss_decrease_threshold() const { DataRate LossBasedBandwidthEstimation::decreased_bitrate() const { return config_.decrease_factor * acknowledged_bitrate_max_; } - -void LossBasedBandwidthEstimation::MaybeReset(DataRate bitrate) { - if (config_.allow_resets) - Reset(bitrate); -} - -void LossBasedBandwidthEstimation::SetInitialBitrate(DataRate bitrate) { - Reset(bitrate); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h index b63363cad..20ff092e6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h @@ -14,6 +14,7 @@ #include #include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -22,7 +23,7 @@ namespace webrtc { struct LossBasedControlConfig { - LossBasedControlConfig(); + explicit LossBasedControlConfig(const WebRtcKeyValueConfig* key_value_config); LossBasedControlConfig(const LossBasedControlConfig&); LossBasedControlConfig& operator=(const LossBasedControlConfig&) = default; ~LossBasedControlConfig(); @@ -38,23 +39,34 @@ struct LossBasedControlConfig { FieldTrialParameter increase_offset; FieldTrialParameter loss_bandwidth_balance_increase; FieldTrialParameter loss_bandwidth_balance_decrease; + FieldTrialParameter loss_bandwidth_balance_reset; FieldTrialParameter loss_bandwidth_balance_exponent; FieldTrialParameter allow_resets; FieldTrialParameter decrease_interval; FieldTrialParameter loss_report_timeout; }; +// Estimates an upper BWE limit based on loss. +// It requires knowledge about lost packets and acknowledged bitrate. +// Ie, this class require transport feedback. class LossBasedBandwidthEstimation { public: - LossBasedBandwidthEstimation(); - void Update(Timestamp at_time, - DataRate min_bitrate, - TimeDelta last_round_trip_time); + explicit LossBasedBandwidthEstimation( + const WebRtcKeyValueConfig* key_value_config); + // Returns the new estimate. + DataRate Update(Timestamp at_time, + DataRate min_bitrate, + DataRate wanted_bitrate, + TimeDelta last_round_trip_time); void UpdateAcknowledgedBitrate(DataRate acknowledged_bitrate, Timestamp at_time); - void MaybeReset(DataRate bitrate); - void SetInitialBitrate(DataRate bitrate); + void Initialize(DataRate bitrate); bool Enabled() const { return config_.enabled; } + // Returns true if LossBasedBandwidthEstimation is enabled and have + // received loss statistics. Ie, this class require transport feedback. + bool InUse() const { + return Enabled() && last_loss_packet_report_.IsFinite(); + } void UpdateLossStatistics(const std::vector& packet_results, Timestamp at_time); DataRate GetEstimate() const { return loss_based_bitrate_; } @@ -64,9 +76,11 @@ class LossBasedBandwidthEstimation { void Reset(DataRate bitrate); double loss_increase_threshold() const; double loss_decrease_threshold() const; + double loss_reset_threshold() const; + DataRate decreased_bitrate() const; - LossBasedControlConfig config_; + const LossBasedControlConfig config_; double average_loss_; double average_loss_max_; DataRate loss_based_bitrate_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h index 11e92b97a..bcaa29320 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h @@ -16,6 +16,7 @@ #include #include +#include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/network_control.h" @@ -23,7 +24,6 @@ #include "api/units/data_rate.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/system/unused.h" namespace webrtc { @@ -63,7 +63,7 @@ class ProbeController { RtcEventLog* event_log); ~ProbeController(); - RTC_WARN_UNUSED_RESULT std::vector SetBitrates( + ABSL_MUST_USE_RESULT std::vector SetBitrates( int64_t min_bitrate_bps, int64_t start_bitrate_bps, int64_t max_bitrate_bps, @@ -71,14 +71,14 @@ class ProbeController { // The total bitrate, as opposed to the max bitrate, is the sum of the // configured bitrates for all active streams. - RTC_WARN_UNUSED_RESULT std::vector + ABSL_MUST_USE_RESULT std::vector OnMaxTotalAllocatedBitrate(int64_t max_total_allocated_bitrate, int64_t at_time_ms); - RTC_WARN_UNUSED_RESULT std::vector OnNetworkAvailability( + ABSL_MUST_USE_RESULT std::vector OnNetworkAvailability( NetworkAvailability msg); - RTC_WARN_UNUSED_RESULT std::vector SetEstimatedBitrate( + ABSL_MUST_USE_RESULT std::vector SetEstimatedBitrate( int64_t bitrate_bps, int64_t at_time_ms); @@ -87,7 +87,7 @@ class ProbeController { void SetAlrStartTimeMs(absl::optional alr_start_time); void SetAlrEndedTimeMs(int64_t alr_end_time); - RTC_WARN_UNUSED_RESULT std::vector RequestProbe( + ABSL_MUST_USE_RESULT std::vector RequestProbe( int64_t at_time_ms); // Sets a new maximum probing bitrate, without generating a new probe cluster. @@ -97,7 +97,7 @@ class ProbeController { // created EXCEPT for |enable_periodic_alr_probing_|. void Reset(int64_t at_time_ms); - RTC_WARN_UNUSED_RESULT std::vector Process( + ABSL_MUST_USE_RESULT std::vector Process( int64_t at_time_ms); private: @@ -110,9 +110,9 @@ class ProbeController { kProbingComplete, }; - RTC_WARN_UNUSED_RESULT std::vector + ABSL_MUST_USE_RESULT std::vector InitiateExponentialProbing(int64_t at_time_ms); - RTC_WARN_UNUSED_RESULT std::vector InitiateProbing( + ABSL_MUST_USE_RESULT std::vector InitiateProbing( int64_t now_ms, std::vector bitrates_to_probe, bool probe_further); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index 4ca75bf26..a2865d9f5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -20,6 +20,7 @@ #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" @@ -224,7 +225,9 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation( last_rtc_event_log_(Timestamp::MinusInfinity()), low_loss_threshold_(kDefaultLowLossThreshold), high_loss_threshold_(kDefaultHighLossThreshold), - bitrate_threshold_(kDefaultBitrateThreshold) { + bitrate_threshold_(kDefaultBitrateThreshold), + loss_based_bandwidth_estimation_(key_value_config), + receiver_limit_caps_only_("Enabled") { RTC_DCHECK(event_log); if (BweLossExperimentIsEnabled()) { uint32_t bitrate_threshold_kbps; @@ -237,6 +240,8 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation( bitrate_threshold_ = DataRate::KilobitsPerSec(bitrate_threshold_kbps); } } + ParseFieldTrial({&receiver_limit_caps_only_}, + key_value_config->Lookup("WebRTC-Bwe-ReceiverLimitCapsOnly")); } SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {} @@ -283,9 +288,6 @@ void SendSideBandwidthEstimation::SetSendBitrate(DataRate bitrate, RTC_DCHECK_GT(bitrate, DataRate::Zero()); // Reset to avoid being capped by the estimate. delay_based_limit_ = DataRate::PlusInfinity(); - if (loss_based_bandwidth_estimation_.Enabled()) { - loss_based_bandwidth_estimation_.MaybeReset(bitrate); - } UpdateTargetBitrate(bitrate, at_time); // Clear last sent bitrate history so the new value can be used directly // and not capped. @@ -308,7 +310,10 @@ int SendSideBandwidthEstimation::GetMinBitrate() const { } DataRate SendSideBandwidthEstimation::target_rate() const { - return std::max(min_bitrate_configured_, current_target_); + DataRate target = current_target_; + if (receiver_limit_caps_only_) + target = std::min(target, receiver_limit_); + return std::max(min_bitrate_configured_, target); } DataRate SendSideBandwidthEstimation::GetEstimatedLinkCapacity() const { @@ -350,8 +355,8 @@ void SendSideBandwidthEstimation::IncomingPacketFeedbackVector( } } -void SendSideBandwidthEstimation::UpdatePacketsLost(int packets_lost, - int number_of_packets, +void SendSideBandwidthEstimation::UpdatePacketsLost(int64_t packets_lost, + int64_t number_of_packets, Timestamp at_time) { last_loss_feedback_ = at_time; if (first_report_time_.IsInfinite()) @@ -359,21 +364,23 @@ void SendSideBandwidthEstimation::UpdatePacketsLost(int packets_lost, // Check sequence number diff and weight loss report if (number_of_packets > 0) { - // Accumulate reports. - lost_packets_since_last_loss_update_ += packets_lost; - expected_packets_since_last_loss_update_ += number_of_packets; + int64_t expected = + expected_packets_since_last_loss_update_ + number_of_packets; // Don't generate a loss rate until it can be based on enough packets. - if (expected_packets_since_last_loss_update_ < kLimitNumPackets) + if (expected < kLimitNumPackets) { + // Accumulate reports. + expected_packets_since_last_loss_update_ = expected; + lost_packets_since_last_loss_update_ += packets_lost; return; + } has_decreased_since_last_fraction_loss_ = false; - int64_t lost_q8 = lost_packets_since_last_loss_update_ << 8; - int64_t expected = expected_packets_since_last_loss_update_; + int64_t lost_q8 = (lost_packets_since_last_loss_update_ + packets_lost) + << 8; last_fraction_loss_ = std::min(lost_q8 / expected, 255); // Reset accumulators. - lost_packets_since_last_loss_update_ = 0; expected_packets_since_last_loss_update_ = 0; last_loss_packet_report_ = at_time; @@ -453,7 +460,7 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { if (delay_based_limit_.IsFinite()) new_bitrate = std::max(delay_based_limit_, new_bitrate); if (loss_based_bandwidth_estimation_.Enabled()) { - loss_based_bandwidth_estimation_.SetInitialBitrate(new_bitrate); + loss_based_bandwidth_estimation_.Initialize(new_bitrate); } if (new_bitrate != current_target_) { @@ -476,10 +483,10 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { return; } - if (loss_based_bandwidth_estimation_.Enabled()) { - loss_based_bandwidth_estimation_.Update( - at_time, min_bitrate_history_.front().second, last_round_trip_time_); - DataRate new_bitrate = MaybeRampupOrBackoff(current_target_, at_time); + if (loss_based_bandwidth_estimation_.InUse()) { + DataRate new_bitrate = loss_based_bandwidth_estimation_.Update( + at_time, min_bitrate_history_.front().second, delay_based_limit_, + last_round_trip_time_); UpdateTargetBitrate(new_bitrate, at_time); return; } @@ -576,27 +583,11 @@ void SendSideBandwidthEstimation::UpdateMinHistory(Timestamp at_time) { min_bitrate_history_.push_back(std::make_pair(at_time, current_target_)); } -DataRate SendSideBandwidthEstimation::MaybeRampupOrBackoff(DataRate new_bitrate, - Timestamp at_time) { - // TODO(crodbro): reuse this code in UpdateEstimate instead of current - // inlining of very similar functionality. - const TimeDelta time_since_loss_packet_report = - at_time - last_loss_packet_report_; - if (time_since_loss_packet_report < 1.2 * kMaxRtcpFeedbackInterval) { - new_bitrate = min_bitrate_history_.front().second * 1.08; - new_bitrate += DataRate::BitsPerSec(1000); - } - return new_bitrate; -} - DataRate SendSideBandwidthEstimation::GetUpperLimit() const { - DataRate upper_limit = std::min(delay_based_limit_, receiver_limit_); + DataRate upper_limit = delay_based_limit_; + if (!receiver_limit_caps_only_) + upper_limit = std::min(upper_limit, receiver_limit_); upper_limit = std::min(upper_limit, max_bitrate_configured_); - if (loss_based_bandwidth_estimation_.Enabled() && - loss_based_bandwidth_estimation_.GetEstimate() > DataRate::Zero()) { - upper_limit = - std::min(upper_limit, loss_based_bandwidth_estimation_.GetEstimate()); - } return upper_limit; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index a13800b7f..b97b940db 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -99,8 +99,8 @@ class SendSideBandwidthEstimation { void UpdateDelayBasedEstimate(Timestamp at_time, DataRate bitrate); // Call when we receive a RTCP message with a ReceiveBlock. - void UpdatePacketsLost(int packets_lost, - int number_of_packets, + void UpdatePacketsLost(int64_t packets_lost, + int64_t number_of_packets, Timestamp at_time); // Call when we receive a RTCP message with a ReceiveBlock. @@ -131,8 +131,6 @@ class SendSideBandwidthEstimation { // min bitrate used during last kBweIncreaseIntervalMs. void UpdateMinHistory(Timestamp at_time); - DataRate MaybeRampupOrBackoff(DataRate new_bitrate, Timestamp at_time); - // Gets the upper limit for the target bitrate. This is the minimum of the // delay based limit, the receiver limit and the loss based controller limit. DataRate GetUpperLimit() const; @@ -192,6 +190,7 @@ class SendSideBandwidthEstimation { float high_loss_threshold_; DataRate bitrate_threshold_; LossBasedBandwidthEstimation loss_based_bandwidth_estimation_; + FieldTrialFlag receiver_limit_caps_only_; }; } // namespace webrtc #endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_SEND_SIDE_BANDWIDTH_ESTIMATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc new file mode 100644 index 000000000..52baab06c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc @@ -0,0 +1,199 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "modules/congestion_controller/goog_cc/alr_detector.h" +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/trendline_estimator.h" +#include "modules/remote_bitrate_estimator/aimd_rate_control.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { +void WriteTypedValue(RtcEventLogOutput* out, int value) { + LogWriteFormat(out, "%i", value); +} +void WriteTypedValue(RtcEventLogOutput* out, double value) { + LogWriteFormat(out, "%.6f", value); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.0f", value ? value->bytes_per_sec() : NAN); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.0f", value ? value->bytes() : NAN); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); +} + +template +class TypedFieldLogger : public FieldLogger { + public: + TypedFieldLogger(std::string name, F&& getter) + : name_(std::move(name)), getter_(std::forward(getter)) {} + const std::string& name() const override { return name_; } + void WriteValue(RtcEventLogOutput* out) override { + WriteTypedValue(out, getter_()); + } + + private: + std::string name_; + F getter_; +}; + +template +FieldLogger* Log(std::string name, F&& getter) { + return new TypedFieldLogger(std::move(name), std::forward(getter)); +} + +} // namespace +GoogCcStatePrinter::GoogCcStatePrinter() { + for (auto* logger : CreateLoggers()) { + loggers_.emplace_back(logger); + } +} + +std::deque GoogCcStatePrinter::CreateLoggers() { + auto stable_estimate = [this] { + return DataRate::KilobitsPerSec( + controller_->delay_based_bwe_->rate_control_.link_capacity_ + .estimate_kbps_.value_or(-INFINITY)); + }; + auto rate_control_state = [this] { + return static_cast( + controller_->delay_based_bwe_->rate_control_.rate_control_state_); + }; + auto trend = [this] { + return reinterpret_cast( + controller_->delay_based_bwe_->active_delay_detector_); + }; + auto acknowledged_rate = [this] { + return controller_->acknowledged_bitrate_estimator_->bitrate(); + }; + auto loss_cont = [&] { + return &controller_->bandwidth_estimation_ + ->loss_based_bandwidth_estimation_; + }; + std::deque loggers({ + Log("time", [=] { return target_.at_time; }), + Log("rtt", [=] { return target_.network_estimate.round_trip_time; }), + Log("target", [=] { return target_.target_rate; }), + Log("stable_target", [=] { return target_.stable_target_rate; }), + Log("pacing", [=] { return pacing_.data_rate(); }), + Log("padding", [=] { return pacing_.pad_rate(); }), + Log("window", [=] { return congestion_window_; }), + Log("rate_control_state", [=] { return rate_control_state(); }), + Log("stable_estimate", [=] { return stable_estimate(); }), + Log("trendline", [=] { return trend()->prev_trend_; }), + Log("trendline_modified_offset", + [=] { return trend()->prev_modified_trend_; }), + Log("trendline_offset_threshold", [=] { return trend()->threshold_; }), + Log("acknowledged_rate", [=] { return acknowledged_rate(); }), + Log("est_capacity", [=] { return est_.link_capacity; }), + Log("est_capacity_dev", [=] { return est_.link_capacity_std_dev; }), + Log("est_capacity_min", [=] { return est_.link_capacity_min; }), + Log("est_cross_traffic", [=] { return est_.cross_traffic_ratio; }), + Log("est_cross_delay", [=] { return est_.cross_delay_rate; }), + Log("est_spike_delay", [=] { return est_.spike_delay_rate; }), + Log("est_pre_buffer", [=] { return est_.pre_link_buffer_delay; }), + Log("est_post_buffer", [=] { return est_.post_link_buffer_delay; }), + Log("est_propagation", [=] { return est_.propagation_delay; }), + Log("loss_ratio", [=] { return loss_cont()->last_loss_ratio_; }), + Log("loss_average", [=] { return loss_cont()->average_loss_; }), + Log("loss_average_max", [=] { return loss_cont()->average_loss_max_; }), + Log("loss_thres_inc", + [=] { return loss_cont()->loss_increase_threshold(); }), + Log("loss_thres_dec", + [=] { return loss_cont()->loss_decrease_threshold(); }), + Log("loss_dec_rate", [=] { return loss_cont()->decreased_bitrate(); }), + Log("loss_based_rate", [=] { return loss_cont()->loss_based_bitrate_; }), + Log("loss_ack_rate", + [=] { return loss_cont()->acknowledged_bitrate_max_; }), + Log("data_window", [=] { return controller_->current_data_window_; }), + Log("pushback_target", + [=] { return controller_->last_pushback_target_rate_; }), + }); + return loggers; +} +GoogCcStatePrinter::~GoogCcStatePrinter() = default; + +void GoogCcStatePrinter::PrintHeaders(RtcEventLogOutput* log) { + int ix = 0; + for (const auto& logger : loggers_) { + if (ix++) + log->Write(" "); + log->Write(logger->name()); + } + log->Write("\n"); + log->Flush(); +} + +void GoogCcStatePrinter::PrintState(RtcEventLogOutput* log, + GoogCcNetworkController* controller, + Timestamp at_time) { + controller_ = controller; + auto state_update = controller_->GetNetworkState(at_time); + target_ = state_update.target_rate.value(); + pacing_ = state_update.pacer_config.value(); + if (state_update.congestion_window) + congestion_window_ = *state_update.congestion_window; + if (controller_->network_estimator_) { + est_ = controller_->network_estimator_->GetCurrentEstimate().value_or( + NetworkStateEstimate()); + } + + int ix = 0; + for (const auto& logger : loggers_) { + if (ix++) + log->Write(" "); + logger->WriteValue(log); + } + + log->Write("\n"); + log->Flush(); +} + +GoogCcDebugFactory::GoogCcDebugFactory() + : GoogCcDebugFactory(GoogCcFactoryConfig()) {} + +GoogCcDebugFactory::GoogCcDebugFactory(GoogCcFactoryConfig config) + : GoogCcNetworkControllerFactory(std::move(config)) {} + +std::unique_ptr GoogCcDebugFactory::Create( + NetworkControllerConfig config) { + RTC_CHECK(controller_ == nullptr); + auto controller = GoogCcNetworkControllerFactory::Create(config); + controller_ = static_cast(controller.get()); + return controller; +} + +void GoogCcDebugFactory::PrintState(const Timestamp at_time) { + if (controller_ && log_writer_) { + printer_.PrintState(log_writer_.get(), controller_, at_time); + } +} + +void GoogCcDebugFactory::AttachWriter( + std::unique_ptr log_writer) { + if (log_writer) { + log_writer_ = std::move(log_writer); + printer_.PrintHeaders(log_writer_.get()); + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h new file mode 100644 index 000000000..3eee7814c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h @@ -0,0 +1,75 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ + +#include +#include +#include + +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/goog_cc_factory.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" +#include "test/logging/log_writer.h" + +namespace webrtc { + +class FieldLogger { + public: + virtual ~FieldLogger() = default; + virtual const std::string& name() const = 0; + virtual void WriteValue(RtcEventLogOutput* out) = 0; +}; + +class GoogCcStatePrinter { + public: + GoogCcStatePrinter(); + GoogCcStatePrinter(const GoogCcStatePrinter&) = delete; + GoogCcStatePrinter& operator=(const GoogCcStatePrinter&) = delete; + ~GoogCcStatePrinter(); + + void PrintHeaders(RtcEventLogOutput* log); + void PrintState(RtcEventLogOutput* log, + GoogCcNetworkController* controller, + Timestamp at_time); + + private: + std::deque CreateLoggers(); + std::deque> loggers_; + + GoogCcNetworkController* controller_ = nullptr; + TargetTransferRate target_; + PacerConfig pacing_; + DataSize congestion_window_ = DataSize::PlusInfinity(); + NetworkStateEstimate est_; +}; + +class GoogCcDebugFactory : public GoogCcNetworkControllerFactory { + public: + GoogCcDebugFactory(); + explicit GoogCcDebugFactory(GoogCcFactoryConfig config); + std::unique_ptr Create( + NetworkControllerConfig config) override; + + void PrintState(const Timestamp at_time); + + void AttachWriter(std::unique_ptr log_writer); + + private: + GoogCcStatePrinter printer_; + GoogCcNetworkController* controller_ = nullptr; + std::unique_ptr log_writer_; +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc index c04db7351..1008badf6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc @@ -17,7 +17,7 @@ #include "absl/strings/match.h" #include "absl/types/optional.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "api/network_state_predictor.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/struct_parameters_parser.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h index 2db290341..75b971d18 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h @@ -20,7 +20,6 @@ #include "api/network_state_predictor.h" #include "api/transport/webrtc_key_value_config.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h index 034f2e951..84661c05b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -16,7 +16,10 @@ #include "api/transport/field_trial_based_config.h" #include "api/transport/network_control.h" +#include "api/units/data_rate.h" +#include "modules/congestion_controller/remb_throttler.h" #include "modules/include/module.h" +#include "modules/pacing/packet_router.h" #include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" #include "rtc_base/synchronization/mutex.h" @@ -32,10 +35,10 @@ class RemoteBitrateObserver; class ReceiveSideCongestionController : public CallStatsObserver, public Module { public: - ReceiveSideCongestionController(Clock* clock, PacketRouter* packet_router); ReceiveSideCongestionController( Clock* clock, - PacketRouter* packet_router, + RemoteEstimatorProxy::TransportFeedbackSender feedback_sender, + RembThrottler::RembSender remb_sender, NetworkStateEstimator* network_state_estimator); ~ReceiveSideCongestionController() override {} @@ -56,6 +59,10 @@ class ReceiveSideCongestionController : public CallStatsObserver, // This is send bitrate, used to control the rate of feedback messages. void OnBitrateChanged(int bitrate_bps); + // Ensures the remote party is notified of the receive bitrate no larger than + // |bitrate| using RTCP REMB. + void SetMaxDesiredReceiveBitrate(DataRate bitrate); + // Implements Module. int64_t TimeUntilNextProcess() override; void Process() override; @@ -103,6 +110,7 @@ class ReceiveSideCongestionController : public CallStatsObserver, }; const FieldTrialBasedConfig field_trial_config_; + RembThrottler remb_throttler_; WrappingBitrateEstimator remote_bitrate_estimator_; RemoteEstimatorProxy remote_estimator_proxy_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc index 638cb2d29..61a126fbe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/receive_side_congestion_controller.cc @@ -10,6 +10,7 @@ #include "modules/congestion_controller/include/receive_side_congestion_controller.h" +#include "api/units/data_rate.h" #include "modules/pacing/packet_router.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h" @@ -120,16 +121,13 @@ void ReceiveSideCongestionController::WrappingBitrateEstimator:: ReceiveSideCongestionController::ReceiveSideCongestionController( Clock* clock, - PacketRouter* packet_router) - : ReceiveSideCongestionController(clock, packet_router, nullptr) {} - -ReceiveSideCongestionController::ReceiveSideCongestionController( - Clock* clock, - PacketRouter* packet_router, + RemoteEstimatorProxy::TransportFeedbackSender feedback_sender, + RembThrottler::RembSender remb_sender, NetworkStateEstimator* network_state_estimator) - : remote_bitrate_estimator_(packet_router, clock), + : remb_throttler_(std::move(remb_sender), clock), + remote_bitrate_estimator_(&remb_throttler_, clock), remote_estimator_proxy_(clock, - packet_router, + std::move(feedback_sender), &field_trial_config_, network_state_estimator) {} @@ -186,4 +184,9 @@ void ReceiveSideCongestionController::Process() { remote_bitrate_estimator_.Process(); } +void ReceiveSideCongestionController::SetMaxDesiredReceiveBitrate( + DataRate bitrate) { + remb_throttler_.SetMaxDesiredReceiveBitrate(bitrate); +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.cc new file mode 100644 index 000000000..fcc30af9a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.cc @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/remb_throttler.h" + +#include +#include + +namespace webrtc { + +namespace { +constexpr TimeDelta kRembSendInterval = TimeDelta::Millis(200); +} // namespace + +RembThrottler::RembThrottler(RembSender remb_sender, Clock* clock) + : remb_sender_(std::move(remb_sender)), + clock_(clock), + last_remb_time_(Timestamp::MinusInfinity()), + last_send_remb_bitrate_(DataRate::PlusInfinity()), + max_remb_bitrate_(DataRate::PlusInfinity()) {} + +void RembThrottler::OnReceiveBitrateChanged(const std::vector& ssrcs, + uint32_t bitrate_bps) { + DataRate receive_bitrate = DataRate::BitsPerSec(bitrate_bps); + Timestamp now = clock_->CurrentTime(); + { + MutexLock lock(&mutex_); + // % threshold for if we should send a new REMB asap. + const int64_t kSendThresholdPercent = 103; + if (receive_bitrate * kSendThresholdPercent / 100 > + last_send_remb_bitrate_ && + now < last_remb_time_ + kRembSendInterval) { + return; + } + last_remb_time_ = now; + last_send_remb_bitrate_ = receive_bitrate; + receive_bitrate = std::min(last_send_remb_bitrate_, max_remb_bitrate_); + } + remb_sender_(receive_bitrate.bps(), ssrcs); +} + +void RembThrottler::SetMaxDesiredReceiveBitrate(DataRate bitrate) { + Timestamp now = clock_->CurrentTime(); + { + MutexLock lock(&mutex_); + max_remb_bitrate_ = bitrate; + if (now - last_remb_time_ < kRembSendInterval && + !last_send_remb_bitrate_.IsZero() && + last_send_remb_bitrate_ <= max_remb_bitrate_) { + return; + } + } + remb_sender_(bitrate.bps(), /*ssrcs=*/{}); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h new file mode 100644 index 000000000..67c028074 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/remb_throttler.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_ +#define MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_ + +#include +#include + +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +// RembThrottler is a helper class used for throttling RTCP REMB messages. +// Throttles small changes to the received BWE within 200ms. +class RembThrottler : public RemoteBitrateObserver { + public: + using RembSender = + std::function ssrcs)>; + RembThrottler(RembSender remb_sender, Clock* clock); + + // Ensures the remote party is notified of the receive bitrate no larger than + // |bitrate| using RTCP REMB. + void SetMaxDesiredReceiveBitrate(DataRate bitrate); + + // Implements RemoteBitrateObserver; + // Called every time there is a new bitrate estimate for a receive channel + // group. This call will trigger a new RTCP REMB packet if the bitrate + // estimate has decreased or if no RTCP REMB packet has been sent for + // a certain time interval. + void OnReceiveBitrateChanged(const std::vector& ssrcs, + uint32_t bitrate_bps) override; + + private: + const RembSender remb_sender_; + Clock* const clock_; + mutable Mutex mutex_; + Timestamp last_remb_time_ RTC_GUARDED_BY(mutex_); + DataRate last_send_remb_bitrate_ RTC_GUARDED_BY(mutex_); + DataRate max_remb_bitrate_ RTC_GUARDED_BY(mutex_); +}; + +} // namespace webrtc +#endif // MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h index 9cce0d72b..1da646321 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h @@ -14,12 +14,13 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/transport/network_types.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "modules/pacing/paced_sender.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { // This is used to observe the network controller state and route calls to @@ -46,7 +47,7 @@ class CongestionControlHandler { const bool disable_pacer_emergency_stop_; int64_t pacer_expected_queue_ms_ = 0; - SequenceChecker sequenced_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequenced_checker_; RTC_DISALLOW_COPY_AND_ASSIGN(CongestionControlHandler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h index c41a7c67f..deb7925d7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_adapter.h @@ -16,13 +16,13 @@ #include #include +#include "api/sequence_checker.h" #include "api/transport/network_types.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc index a0e76761e..51d3edc30 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc @@ -58,13 +58,13 @@ PacedSender::~PacedSender() { } void PacedSender::CreateProbeCluster(DataRate bitrate, int cluster_id) { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); return pacing_controller_.CreateProbeCluster(bitrate, cluster_id); } void PacedSender::Pause() { { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.Pause(); } @@ -77,7 +77,7 @@ void PacedSender::Pause() { void PacedSender::Resume() { { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.Resume(); } @@ -90,7 +90,7 @@ void PacedSender::Resume() { void PacedSender::SetCongestionWindow(DataSize congestion_window_size) { { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.SetCongestionWindow(congestion_window_size); } MaybeWakupProcessThread(); @@ -98,7 +98,7 @@ void PacedSender::SetCongestionWindow(DataSize congestion_window_size) { void PacedSender::UpdateOutstandingData(DataSize outstanding_data) { { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.UpdateOutstandingData(outstanding_data); } MaybeWakupProcessThread(); @@ -106,7 +106,7 @@ void PacedSender::UpdateOutstandingData(DataSize outstanding_data) { void PacedSender::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) { { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.SetPacingRates(pacing_rate, padding_rate); } MaybeWakupProcessThread(); @@ -117,13 +117,14 @@ void PacedSender::EnqueuePackets( { TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("webrtc"), "PacedSender::EnqueuePackets"); - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); for (auto& packet : packets) { TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), "PacedSender::EnqueuePackets::Loop", "sequence_number", packet->SequenceNumber(), "rtp_timestamp", packet->Timestamp()); + RTC_DCHECK_GE(packet->capture_time_ms(), 0); pacing_controller_.EnqueuePacket(std::move(packet)); } } @@ -131,42 +132,42 @@ void PacedSender::EnqueuePackets( } void PacedSender::SetAccountForAudioPackets(bool account_for_audio) { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.SetAccountForAudioPackets(account_for_audio); } void PacedSender::SetIncludeOverhead() { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.SetIncludeOverhead(); } void PacedSender::SetTransportOverhead(DataSize overhead_per_packet) { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.SetTransportOverhead(overhead_per_packet); } TimeDelta PacedSender::ExpectedQueueTime() const { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); return pacing_controller_.ExpectedQueueTime(); } DataSize PacedSender::QueueSizeData() const { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); return pacing_controller_.QueueSizeData(); } absl::optional PacedSender::FirstSentPacketTime() const { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); return pacing_controller_.FirstSentPacketTime(); } TimeDelta PacedSender::OldestPacketWaitTime() const { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); return pacing_controller_.OldestPacketWaitTime(); } int64_t PacedSender::TimeUntilNextProcess() { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); Timestamp next_send_time = pacing_controller_.NextSendTime(); TimeDelta sleep_time = @@ -178,7 +179,7 @@ int64_t PacedSender::TimeUntilNextProcess() { } void PacedSender::Process() { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.ProcessPackets(); } @@ -198,7 +199,7 @@ void PacedSender::MaybeWakupProcessThread() { void PacedSender::SetQueueTimeLimit(TimeDelta limit) { { - rtc::CritScope cs(&critsect_); + MutexLock lock(&mutex_); pacing_controller_.SetQueueTimeLimit(limit); } MaybeWakupProcessThread(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h index d255efdc3..c819f3fb7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h @@ -32,7 +32,7 @@ #include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/utility/include/process_thread.h" -#include "rtc_base/deprecated/recursive_critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -157,9 +157,9 @@ class PacedSender : public Module, PacedSender* const delegate_; } module_proxy_{this}; - rtc::RecursiveCriticalSection critsect_; + mutable Mutex mutex_; const PacingController::ProcessMode process_mode_; - PacingController pacing_controller_ RTC_GUARDED_BY(critsect_); + PacingController pacing_controller_ RTC_GUARDED_BY(mutex_); Clock* const clock_; ProcessThread* const process_thread_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc index 5ffbc903b..e0ace4e65 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc @@ -112,8 +112,6 @@ PacingController::PacingController(Clock* clock, send_padding_if_silent_( IsEnabled(*field_trials_, "WebRTC-Pacer-PadInSilence")), pace_audio_(IsEnabled(*field_trials_, "WebRTC-Pacer-BlockAudio")), - small_first_probe_packet_( - !IsDisabled(*field_trials_, "WebRTC-Pacer-SmallFirstProbePacket")), ignore_transport_overhead_( IsEnabled(*field_trials_, "WebRTC-Pacer-IgnoreTransportOverhead")), padding_target_duration_(GetDynamicPaddingTarget(*field_trials_)), @@ -297,11 +295,7 @@ void PacingController::EnqueuePacketInternal( int priority) { prober_.OnIncomingPacket(DataSize::Bytes(packet->payload_size())); - // TODO(sprang): Make sure tests respect this, replace with DCHECK. Timestamp now = CurrentTime(); - if (packet->capture_time_ms() < 0) { - packet->set_capture_time_ms(now.ms()); - } if (mode_ == ProcessMode::kDynamic && packet_queue_.Empty() && NextSendTime() <= now) { @@ -519,7 +513,7 @@ void PacingController::ProcessPackets() { // The paused state is checked in the loop since it leaves the critical // section allowing the paused state to be changed from other code. while (!paused_) { - if (small_first_probe_packet_ && first_packet_in_probe) { + if (first_packet_in_probe) { // If first packet in probe, insert a small padding packet so we have a // more reliable start window for the rate estimation. auto padding = packet_sender_->GeneratePadding(DataSize::Bytes(1)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h index 6e0f9bd5b..b0bdfb2e4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.h @@ -182,7 +182,6 @@ class PacingController { const bool drain_large_queues_; const bool send_padding_if_silent_; const bool pace_audio_; - const bool small_first_probe_packet_; const bool ignore_transport_overhead_; // In dynamic mode, indicates the target size when requesting padding, // expressed as a duration in order to adjust for varying padding rate. diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc index 5317f510c..3b1278e50 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.cc @@ -27,20 +27,11 @@ #include "rtc_base/trace_event.h" namespace webrtc { -namespace { - -constexpr int kRembSendIntervalMs = 200; - -} // namespace PacketRouter::PacketRouter() : PacketRouter(0) {} PacketRouter::PacketRouter(uint16_t start_transport_seq) : last_send_module_(nullptr), - last_remb_time_ms_(rtc::TimeMillis()), - last_send_bitrate_bps_(0), - bitrate_bps_(0), - max_bitrate_bps_(std::numeric_limits::max()), active_remb_module_(nullptr), transport_seq_(start_transport_seq) {} @@ -235,77 +226,19 @@ uint16_t PacketRouter::CurrentTransportSequenceNumber() const { return transport_seq_ & 0xFFFF; } -void PacketRouter::OnReceiveBitrateChanged(const std::vector& ssrcs, - uint32_t bitrate_bps) { - // % threshold for if we should send a new REMB asap. - const int64_t kSendThresholdPercent = 97; - // TODO(danilchap): Remove receive_bitrate_bps variable and the cast - // when OnReceiveBitrateChanged takes bitrate as int64_t. - int64_t receive_bitrate_bps = static_cast(bitrate_bps); - - int64_t now_ms = rtc::TimeMillis(); - { - MutexLock lock(&remb_mutex_); - - // If we already have an estimate, check if the new total estimate is below - // kSendThresholdPercent of the previous estimate. - if (last_send_bitrate_bps_ > 0) { - int64_t new_remb_bitrate_bps = - last_send_bitrate_bps_ - bitrate_bps_ + receive_bitrate_bps; - - if (new_remb_bitrate_bps < - kSendThresholdPercent * last_send_bitrate_bps_ / 100) { - // The new bitrate estimate is less than kSendThresholdPercent % of the - // last report. Send a REMB asap. - last_remb_time_ms_ = now_ms - kRembSendIntervalMs; - } - } - bitrate_bps_ = receive_bitrate_bps; - - if (now_ms - last_remb_time_ms_ < kRembSendIntervalMs) { - return; - } - // NOTE: Updated if we intend to send the data; we might not have - // a module to actually send it. - last_remb_time_ms_ = now_ms; - last_send_bitrate_bps_ = receive_bitrate_bps; - // Cap the value to send in remb with configured value. - receive_bitrate_bps = std::min(receive_bitrate_bps, max_bitrate_bps_); - } - SendRemb(receive_bitrate_bps, ssrcs); -} - -void PacketRouter::SetMaxDesiredReceiveBitrate(int64_t bitrate_bps) { - RTC_DCHECK_GE(bitrate_bps, 0); - { - MutexLock lock(&remb_mutex_); - max_bitrate_bps_ = bitrate_bps; - if (rtc::TimeMillis() - last_remb_time_ms_ < kRembSendIntervalMs && - last_send_bitrate_bps_ > 0 && - last_send_bitrate_bps_ <= max_bitrate_bps_) { - // Recent measured bitrate is already below the cap. - return; - } - } - SendRemb(bitrate_bps, /*ssrcs=*/{}); -} - -bool PacketRouter::SendRemb(int64_t bitrate_bps, - const std::vector& ssrcs) { +void PacketRouter::SendRemb(int64_t bitrate_bps, std::vector ssrcs) { MutexLock lock(&modules_mutex_); if (!active_remb_module_) { - return false; + return; } // The Add* and Remove* methods above ensure that REMB is disabled on all // other modules, because otherwise, they will send REMB with stale info. - active_remb_module_->SetRemb(bitrate_bps, ssrcs); - - return true; + active_remb_module_->SetRemb(bitrate_bps, std::move(ssrcs)); } -bool PacketRouter::SendCombinedRtcpPacket( +void PacketRouter::SendCombinedRtcpPacket( std::vector> packets) { MutexLock lock(&modules_mutex_); @@ -315,15 +248,14 @@ bool PacketRouter::SendCombinedRtcpPacket( continue; } rtp_module->SendCombinedRtcpPacket(std::move(packets)); - return true; + return; } if (rtcp_feedback_senders_.empty()) { - return false; + return; } auto* rtcp_sender = rtcp_feedback_senders_[0]; rtcp_sender->SendCombinedRtcpPacket(std::move(packets)); - return true; } void PacketRouter::AddRembModuleCandidate( diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h index 2fa104b4c..7a6e24d7e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h @@ -39,9 +39,7 @@ class RtpRtcpInterface; // module if possible (sender report), otherwise on receive module // (receiver report). For the latter case, we also keep track of the // receive modules. -class PacketRouter : public RemoteBitrateObserver, - public TransportFeedbackSenderInterface, - public PacingController::PacketSender { +class PacketRouter : public PacingController::PacketSender { public: PacketRouter(); explicit PacketRouter(uint16_t start_transport_seq); @@ -62,24 +60,12 @@ class PacketRouter : public RemoteBitrateObserver, uint16_t CurrentTransportSequenceNumber() const; - // Called every time there is a new bitrate estimate for a receive channel - // group. This call will trigger a new RTCP REMB packet if the bitrate - // estimate has decreased or if no RTCP REMB packet has been sent for - // a certain time interval. - // Implements RtpReceiveBitrateUpdate. - void OnReceiveBitrateChanged(const std::vector& ssrcs, - uint32_t bitrate_bps) override; - - // Ensures remote party notified of the receive bitrate limit no larger than - // |bitrate_bps|. - void SetMaxDesiredReceiveBitrate(int64_t bitrate_bps); - // Send REMB feedback. - bool SendRemb(int64_t bitrate_bps, const std::vector& ssrcs); + void SendRemb(int64_t bitrate_bps, std::vector ssrcs); // Sends |packets| in one or more IP packets. - bool SendCombinedRtcpPacket( - std::vector> packets) override; + void SendCombinedRtcpPacket( + std::vector> packets); private: void AddRembModuleCandidate(RtcpFeedbackSenderInterface* candidate_module, @@ -107,16 +93,6 @@ class PacketRouter : public RemoteBitrateObserver, std::vector rtcp_feedback_senders_ RTC_GUARDED_BY(modules_mutex_); - // TODO(eladalon): remb_mutex_ only ever held from one function, and it's not - // clear if that function can actually be called from more than one thread. - Mutex remb_mutex_; - // The last time a REMB was sent. - int64_t last_remb_time_ms_ RTC_GUARDED_BY(remb_mutex_); - int64_t last_send_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_); - // The last bitrate update. - int64_t bitrate_bps_ RTC_GUARDED_BY(remb_mutex_); - int64_t max_bitrate_bps_ RTC_GUARDED_BY(remb_mutex_); - // Candidates for the REMB module can be RTP sender/receiver modules, with // the sender modules taking precedence. std::vector sender_remb_candidates_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h index 9446a8e17..cad555a1a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/round_robin_packet_queue.h @@ -19,6 +19,7 @@ #include #include #include +#include #include "absl/types/optional.h" #include "api/transport/webrtc_key_value_config.h" @@ -163,7 +164,7 @@ class RoundRobinPacketQueue { std::multimap stream_priorities_; // A map of SSRCs to Streams. - std::map streams_; + std::unordered_map streams_; // The enqueue time of every packet currently in the queue. Used to figure out // the age of the oldest packet in the queue. diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc index 69ec5457a..709718ff1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc @@ -32,7 +32,7 @@ constexpr TimeDelta kMinTimeBetweenStatsUpdates = TimeDelta::Millis(1); TaskQueuePacedSender::TaskQueuePacedSender( Clock* clock, - PacketRouter* packet_router, + PacingController::PacketSender* packet_sender, RtcEventLog* event_log, const WebRtcKeyValueConfig* field_trials, TaskQueueFactory* task_queue_factory, @@ -40,7 +40,7 @@ TaskQueuePacedSender::TaskQueuePacedSender( : clock_(clock), hold_back_window_(hold_back_window), pacing_controller_(clock, - packet_router, + packet_sender, event_log, field_trials, PacingController::ProcessMode::kDynamic), @@ -62,6 +62,14 @@ TaskQueuePacedSender::~TaskQueuePacedSender() { }); } +void TaskQueuePacedSender::EnsureStarted() { + task_queue_.PostTask([this]() { + RTC_DCHECK_RUN_ON(&task_queue_); + is_started_ = true; + MaybeProcessPackets(Timestamp::MinusInfinity()); + }); +} + void TaskQueuePacedSender::CreateProbeCluster(DataRate bitrate, int cluster_id) { task_queue_.PostTask([this, bitrate, cluster_id]() { @@ -136,6 +144,7 @@ void TaskQueuePacedSender::EnqueuePackets( task_queue_.PostTask([this, packets_ = std::move(packets)]() mutable { RTC_DCHECK_RUN_ON(&task_queue_); for (auto& packet : packets_) { + RTC_DCHECK_GE(packet->capture_time_ms(), 0); pacing_controller_.EnqueuePacket(std::move(packet)); } MaybeProcessPackets(Timestamp::MinusInfinity()); @@ -196,7 +205,7 @@ void TaskQueuePacedSender::MaybeProcessPackets( Timestamp scheduled_process_time) { RTC_DCHECK_RUN_ON(&task_queue_); - if (is_shutdown_) { + if (is_shutdown_ || !is_started_) { return; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h index ba4f4667b..0673441e5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h @@ -20,17 +20,16 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/include/module.h" #include "modules/pacing/pacing_controller.h" -#include "modules/pacing/packet_router.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" @@ -47,7 +46,7 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // TODO(bugs.webrtc.org/10809): Remove default value for hold_back_window. TaskQueuePacedSender( Clock* clock, - PacketRouter* packet_router, + PacingController::PacketSender* packet_sender, RtcEventLog* event_log, const WebRtcKeyValueConfig* field_trials, TaskQueueFactory* task_queue_factory, @@ -55,10 +54,13 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { ~TaskQueuePacedSender() override; + // Ensure that necessary delayed tasks are scheduled. + void EnsureStarted(); + // Methods implementing RtpPacketSender. - // Adds the packet to the queue and calls PacketRouter::SendPacket() when - // it's time to send. + // Adds the packet to the queue and calls + // PacingController::PacketSender::SendPacket() when it's time to send. void EnqueuePackets( std::vector> packets) override; @@ -150,6 +152,10 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // Last time stats were updated. Timestamp last_stats_time_ RTC_GUARDED_BY(task_queue_); + // Indicates if this task queue is started. If not, don't allow + // posting delayed tasks yet. + bool is_started_ RTC_GUARDED_BY(task_queue_) = false; + // Indicates if this task queue is shutting down. If so, don't allow // posting any more delayed tasks as that can cause the task queue to // never drain. diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc index da1317664..2ca298b7f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.cc @@ -78,7 +78,7 @@ AimdRateControl::AimdRateControl(const WebRtcKeyValueConfig* key_value_config, current_bitrate_(max_configured_bitrate_), latest_estimated_throughput_(current_bitrate_), link_capacity_(), - rate_control_state_(kRcHold), + rate_control_state_(RateControlState::kRcHold), time_last_bitrate_change_(Timestamp::MinusInfinity()), time_last_bitrate_decrease_(Timestamp::MinusInfinity()), time_first_throughput_estimate_(Timestamp::MinusInfinity()), @@ -280,10 +280,10 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, 1.5 * estimated_throughput + DataRate::KilobitsPerSec(10); switch (rate_control_state_) { - case kRcHold: + case RateControlState::kRcHold: break; - case kRcIncrease: + case RateControlState::kRcIncrease: if (estimated_throughput > link_capacity_.UpperBound()) link_capacity_.Reset(); @@ -316,7 +316,7 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, time_last_bitrate_change_ = at_time; break; - case kRcDecrease: { + case RateControlState::kRcDecrease: { DataRate decreased_bitrate = DataRate::PlusInfinity(); // Set bit rate to something slightly lower than the measured throughput @@ -356,7 +356,7 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, bitrate_is_initialized_ = true; link_capacity_.OnOveruseDetected(estimated_throughput); // Stay on hold until the pipes are cleared. - rate_control_state_ = kRcHold; + rate_control_state_ = RateControlState::kRcHold; time_last_bitrate_change_ = at_time; time_last_bitrate_decrease_ = at_time; break; @@ -403,18 +403,18 @@ void AimdRateControl::ChangeState(const RateControlInput& input, Timestamp at_time) { switch (input.bw_state) { case BandwidthUsage::kBwNormal: - if (rate_control_state_ == kRcHold) { + if (rate_control_state_ == RateControlState::kRcHold) { time_last_bitrate_change_ = at_time; - rate_control_state_ = kRcIncrease; + rate_control_state_ = RateControlState::kRcIncrease; } break; case BandwidthUsage::kBwOverusing: - if (rate_control_state_ != kRcDecrease) { - rate_control_state_ = kRcDecrease; + if (rate_control_state_ != RateControlState::kRcDecrease) { + rate_control_state_ = RateControlState::kRcDecrease; } break; case BandwidthUsage::kBwUnderusing: - rate_control_state_ = kRcHold; + rate_control_state_ = RateControlState::kRcHold; break; default: assert(false); diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h index c9e9470c5..3e0d541b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h @@ -65,6 +65,8 @@ class AimdRateControl { TimeDelta GetExpectedBandwidthPeriod() const; private: + enum class RateControlState { kRcHold, kRcIncrease, kRcDecrease }; + friend class GoogCcStatePrinter; // Update the target bitrate based on, among other things, the current rate // control state, the current target bitrate and the estimated throughput. diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h index 40fbfe005..b3ca1846f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h @@ -17,9 +17,6 @@ #include "api/network_state_predictor.h" #include "api/units/data_rate.h" -#define BWE_MAX(a, b) ((a) > (b) ? (a) : (b)) -#define BWE_MIN(a, b) ((a) < (b) ? (a) : (b)) - namespace webrtc { namespace congestion_controller { @@ -39,8 +36,6 @@ enum BweNames { kBweNamesMax = 4 }; -enum RateControlState { kRcHold, kRcIncrease, kRcDecrease }; - struct RateControlInput { RateControlInput(BandwidthUsage bw_state, const absl::optional& estimated_throughput); diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h index c60c030e8..ac937bbfe 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h @@ -38,14 +38,6 @@ class RemoteBitrateObserver { virtual ~RemoteBitrateObserver() {} }; -class TransportFeedbackSenderInterface { - public: - virtual ~TransportFeedbackSenderInterface() = default; - - virtual bool SendCombinedRtcpPacket( - std::vector> packets) = 0; -}; - // TODO(holmer): Remove when all implementations have been updated. struct ReceiveBandwidthEstimatorStats {}; diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc index 44cbe5013..710b3b21d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc @@ -16,7 +16,6 @@ #include #include -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h index 1df6cab78..4e72e8e03 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h @@ -12,8 +12,8 @@ #include +#include "api/network_state_predictor.h" #include "api/transport/webrtc_key_value_config.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "rtc_base/constructor_magic.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc index e97e06b0b..74449bec6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.cc @@ -16,7 +16,7 @@ #include -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "api/network_state_predictor.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h index d5f675e99..a082d9d06 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h @@ -14,7 +14,7 @@ #include -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "api/network_state_predictor.h" #include "rtc_base/constructor_magic.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc new file mode 100644 index 000000000..72696f6c8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/remote_bitrate_estimator/packet_arrival_map.h" + +#include + +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { + +constexpr size_t PacketArrivalTimeMap::kMaxNumberOfPackets; + +void PacketArrivalTimeMap::AddPacket(int64_t sequence_number, + int64_t arrival_time_ms) { + if (!has_seen_packet_) { + // First packet. + has_seen_packet_ = true; + begin_sequence_number_ = sequence_number; + arrival_times.push_back(arrival_time_ms); + return; + } + + int64_t pos = sequence_number - begin_sequence_number_; + if (pos >= 0 && pos < static_cast(arrival_times.size())) { + // The packet is within the buffer - no need to expand it. + arrival_times[pos] = arrival_time_ms; + return; + } + + if (pos < 0) { + // The packet goes before the current buffer. Expand to add packet, but only + // if it fits within kMaxNumberOfPackets. + size_t missing_packets = -pos; + if (missing_packets + arrival_times.size() > kMaxNumberOfPackets) { + // Don't expand the buffer further, as that would remove newly received + // packets. + return; + } + + arrival_times.insert(arrival_times.begin(), missing_packets, 0); + arrival_times[0] = arrival_time_ms; + begin_sequence_number_ = sequence_number; + return; + } + + // The packet goes after the buffer. + + if (static_cast(pos) >= kMaxNumberOfPackets) { + // The buffer grows too large - old packets have to be removed. + size_t packets_to_remove = pos - kMaxNumberOfPackets + 1; + if (packets_to_remove >= arrival_times.size()) { + arrival_times.clear(); + begin_sequence_number_ = sequence_number; + pos = 0; + } else { + // Also trim the buffer to remove leading non-received packets, to + // ensure that the buffer only spans received packets. + while (packets_to_remove < arrival_times.size() && + arrival_times[packets_to_remove] == 0) { + ++packets_to_remove; + } + + arrival_times.erase(arrival_times.begin(), + arrival_times.begin() + packets_to_remove); + begin_sequence_number_ += packets_to_remove; + pos -= packets_to_remove; + RTC_DCHECK_GE(pos, 0); + } + } + + // Packets can be received out-of-order. If this isn't the next expected + // packet, add enough placeholders to fill the gap. + size_t missing_gap_packets = pos - arrival_times.size(); + if (missing_gap_packets > 0) { + arrival_times.insert(arrival_times.end(), missing_gap_packets, 0); + } + RTC_DCHECK_EQ(arrival_times.size(), pos); + arrival_times.push_back(arrival_time_ms); + RTC_DCHECK_LE(arrival_times.size(), kMaxNumberOfPackets); +} + +void PacketArrivalTimeMap::RemoveOldPackets(int64_t sequence_number, + int64_t arrival_time_limit) { + while (!arrival_times.empty() && begin_sequence_number_ < sequence_number && + arrival_times.front() <= arrival_time_limit) { + arrival_times.pop_front(); + ++begin_sequence_number_; + } +} + +bool PacketArrivalTimeMap::has_received(int64_t sequence_number) const { + int64_t pos = sequence_number - begin_sequence_number_; + if (pos >= 0 && pos < static_cast(arrival_times.size()) && + arrival_times[pos] != 0) { + return true; + } + return false; +} + +void PacketArrivalTimeMap::EraseTo(int64_t sequence_number) { + if (sequence_number > begin_sequence_number_) { + size_t count = + std::min(static_cast(sequence_number - begin_sequence_number_), + arrival_times.size()); + + arrival_times.erase(arrival_times.begin(), arrival_times.begin() + count); + begin_sequence_number_ += count; + } +} + +int64_t PacketArrivalTimeMap::clamp(int64_t sequence_number) const { + return rtc::SafeClamp(sequence_number, begin_sequence_number(), + end_sequence_number()); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h new file mode 100644 index 000000000..10659e0f6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_ +#define MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_ + +#include +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +// PacketArrivalTimeMap is an optimized map of packet sequence number to arrival +// time, limited in size to never exceed `kMaxNumberOfPackets`. It will grow as +// needed, and remove old packets, and will expand to allow earlier packets to +// be added (out-of-order). +// +// Not yet received packets have the arrival time zero. The queue will not span +// larger than necessary and the last packet should always be received. The +// first packet in the queue doesn't have to be received in case of receiving +// packets out-of-order. +class PacketArrivalTimeMap { + public: + // Impossible to request feedback older than what can be represented by 15 + // bits. + static constexpr size_t kMaxNumberOfPackets = (1 << 15); + + // Indicates if the packet with `sequence_number` has already been received. + bool has_received(int64_t sequence_number) const; + + // Returns the sequence number of the first entry in the map, i.e. the + // sequence number that a `begin()` iterator would represent. + int64_t begin_sequence_number() const { return begin_sequence_number_; } + + // Returns the sequence number of the element just after the map, i.e. the + // sequence number that an `end()` iterator would represent. + int64_t end_sequence_number() const { + return begin_sequence_number_ + arrival_times.size(); + } + + // Returns an element by `sequence_number`, which must be valid, i.e. + // between [begin_sequence_number, end_sequence_number). + int64_t get(int64_t sequence_number) { + int64_t pos = sequence_number - begin_sequence_number_; + RTC_DCHECK(pos >= 0 && pos < static_cast(arrival_times.size())); + return arrival_times[pos]; + } + + // Clamps `sequence_number` between [begin_sequence_number, + // end_sequence_number]. + int64_t clamp(int64_t sequence_number) const; + + // Erases all elements from the beginning of the map until `sequence_number`. + void EraseTo(int64_t sequence_number); + + // Records the fact that a packet with `sequence_number` arrived at + // `arrival_time_ms`. + void AddPacket(int64_t sequence_number, int64_t arrival_time_ms); + + // Removes packets from the beginning of the map as long as they are received + // before `sequence_number` and with an age older than `arrival_time_limit` + void RemoveOldPackets(int64_t sequence_number, int64_t arrival_time_limit); + + private: + // Deque representing unwrapped sequence number -> time, where the index + + // `begin_sequence_number_` represents the packet's sequence number. + std::deque arrival_times; + + // The unwrapped sequence number for the first element in + // `arrival_times`. + int64_t begin_sequence_number_ = 0; + + // Indicates if this map has had any packet added to it. The first packet + // decides the initial sequence number. + bool has_seen_packet_ = false; +}; + +} // namespace webrtc + +#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_PACKET_ARRIVAL_MAP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc index e8f835ca6..4196f6dc5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc @@ -15,6 +15,7 @@ #include #include "api/transport/field_trial_based_config.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc index a9cc170a3..7764e60ef 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc @@ -23,9 +23,6 @@ namespace webrtc { -// Impossible to request feedback older than what can be represented by 15 bits. -const int RemoteEstimatorProxy::kMaxNumberOfPackets = (1 << 15); - // The maximum allowed value for a timestamp in milliseconds. This is lower // than the numerical limit since we often convert to microseconds. static constexpr int64_t kMaxTimeMs = @@ -33,11 +30,11 @@ static constexpr int64_t kMaxTimeMs = RemoteEstimatorProxy::RemoteEstimatorProxy( Clock* clock, - TransportFeedbackSenderInterface* feedback_sender, + TransportFeedbackSender feedback_sender, const WebRtcKeyValueConfig* key_value_config, NetworkStateEstimator* network_state_estimator) : clock_(clock), - feedback_sender_(feedback_sender), + feedback_sender_(std::move(feedback_sender)), send_config_(key_value_config), last_process_time_ms_(-1), network_state_estimator_(network_state_estimator), @@ -54,6 +51,18 @@ RemoteEstimatorProxy::RemoteEstimatorProxy( RemoteEstimatorProxy::~RemoteEstimatorProxy() {} +void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number, + int64_t arrival_time_ms) { + if (periodic_window_start_seq_.has_value()) { + if (*periodic_window_start_seq_ >= + packet_arrival_times_.end_sequence_number()) { + // Start new feedback packet, cull old packets. + packet_arrival_times_.RemoveOldPackets( + sequence_number, arrival_time_ms - send_config_.back_window->ms()); + } + } +} + void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, size_t payload_size, const RTPHeader& header) { @@ -69,39 +78,26 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, seq = unwrapper_.Unwrap(header.extension.transportSequenceNumber); if (send_periodic_feedback_) { - if (periodic_window_start_seq_ && - packet_arrival_times_.lower_bound(*periodic_window_start_seq_) == - packet_arrival_times_.end()) { - // Start new feedback packet, cull old packets. - for (auto it = packet_arrival_times_.begin(); - it != packet_arrival_times_.end() && it->first < seq && - arrival_time_ms - it->second >= send_config_.back_window->ms();) { - it = packet_arrival_times_.erase(it); - } - } + MaybeCullOldPackets(seq, arrival_time_ms); + if (!periodic_window_start_seq_ || seq < *periodic_window_start_seq_) { periodic_window_start_seq_ = seq; } } // We are only interested in the first time a packet is received. - if (packet_arrival_times_.find(seq) != packet_arrival_times_.end()) + if (packet_arrival_times_.has_received(seq)) { return; + } - packet_arrival_times_[seq] = arrival_time_ms; + packet_arrival_times_.AddPacket(seq, arrival_time_ms); // Limit the range of sequence numbers to send feedback for. - auto first_arrival_time_to_keep = packet_arrival_times_.lower_bound( - packet_arrival_times_.rbegin()->first - kMaxNumberOfPackets); - if (first_arrival_time_to_keep != packet_arrival_times_.begin()) { - packet_arrival_times_.erase(packet_arrival_times_.begin(), - first_arrival_time_to_keep); - if (send_periodic_feedback_) { - // |packet_arrival_times_| cannot be empty since we just added one - // element and the last element is not deleted. - RTC_DCHECK(!packet_arrival_times_.empty()); - periodic_window_start_seq_ = packet_arrival_times_.begin()->first; - } + if (!periodic_window_start_seq_.has_value() || + periodic_window_start_seq_.value() < + packet_arrival_times_.begin_sequence_number()) { + periodic_window_start_seq_ = + packet_arrival_times_.begin_sequence_number(); } if (header.extension.feedback_request) { @@ -113,8 +109,8 @@ void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms, if (network_state_estimator_ && header.extension.hasAbsoluteSendTime) { PacketResult packet_result; packet_result.receive_time = Timestamp::Millis(arrival_time_ms); - // Ignore reordering of packets and assume they have approximately the same - // send time. + // Ignore reordering of packets and assume they have approximately the + // same send time. abs_send_timestamp_ += std::max( header.extension.GetAbsoluteSendTimeDelta(previous_abs_send_time_), TimeDelta::Millis(0)); @@ -183,9 +179,9 @@ void RemoteEstimatorProxy::SetSendPeriodicFeedback( } void RemoteEstimatorProxy::SendPeriodicFeedbacks() { - // |periodic_window_start_seq_| is the first sequence number to include in the - // current feedback packet. Some older may still be in the map, in case a - // reordering happens and we need to retransmit them. + // |periodic_window_start_seq_| is the first sequence number to include in + // the current feedback packet. Some older may still be in the map, in case + // a reordering happens and we need to retransmit them. if (!periodic_window_start_seq_) return; @@ -199,15 +195,17 @@ void RemoteEstimatorProxy::SendPeriodicFeedbacks() { } } - for (auto begin_iterator = - packet_arrival_times_.lower_bound(*periodic_window_start_seq_); - begin_iterator != packet_arrival_times_.cend(); - begin_iterator = - packet_arrival_times_.lower_bound(*periodic_window_start_seq_)) { - auto feedback_packet = std::make_unique(); - periodic_window_start_seq_ = BuildFeedbackPacket( - feedback_packet_count_++, media_ssrc_, *periodic_window_start_seq_, - begin_iterator, packet_arrival_times_.cend(), feedback_packet.get()); + int64_t packet_arrival_times_end_seq = + packet_arrival_times_.end_sequence_number(); + while (periodic_window_start_seq_ < packet_arrival_times_end_seq) { + auto feedback_packet = MaybeBuildFeedbackPacket( + /*include_timestamps=*/true, periodic_window_start_seq_.value(), + packet_arrival_times_end_seq, + /*is_periodic_update=*/true); + + if (feedback_packet == nullptr) { + break; + } RTC_DCHECK(feedback_sender_ != nullptr); @@ -217,10 +215,10 @@ void RemoteEstimatorProxy::SendPeriodicFeedbacks() { } packets.push_back(std::move(feedback_packet)); - feedback_sender_->SendCombinedRtcpPacket(std::move(packets)); - // Note: Don't erase items from packet_arrival_times_ after sending, in case - // they need to be re-sent after a reordering. Removal will be handled - // by OnPacketArrival once packets are too old. + feedback_sender_(std::move(packets)); + // Note: Don't erase items from packet_arrival_times_ after sending, in + // case they need to be re-sent after a reordering. Removal will be + // handled by OnPacketArrival once packets are too old. } } @@ -231,61 +229,79 @@ void RemoteEstimatorProxy::SendFeedbackOnRequest( return; } - auto feedback_packet = std::make_unique( - feedback_request.include_timestamps); - int64_t first_sequence_number = sequence_number - feedback_request.sequence_count + 1; - auto begin_iterator = - packet_arrival_times_.lower_bound(first_sequence_number); - auto end_iterator = packet_arrival_times_.upper_bound(sequence_number); - BuildFeedbackPacket(feedback_packet_count_++, media_ssrc_, - first_sequence_number, begin_iterator, end_iterator, - feedback_packet.get()); + auto feedback_packet = MaybeBuildFeedbackPacket( + feedback_request.include_timestamps, first_sequence_number, + sequence_number + 1, /*is_periodic_update=*/false); + + // This is called when a packet has just been added. + RTC_DCHECK(feedback_packet != nullptr); // Clear up to the first packet that is included in this feedback packet. - packet_arrival_times_.erase(packet_arrival_times_.begin(), begin_iterator); + packet_arrival_times_.EraseTo(first_sequence_number); RTC_DCHECK(feedback_sender_ != nullptr); std::vector> packets; packets.push_back(std::move(feedback_packet)); - feedback_sender_->SendCombinedRtcpPacket(std::move(packets)); + feedback_sender_(std::move(packets)); } -int64_t RemoteEstimatorProxy::BuildFeedbackPacket( - uint8_t feedback_packet_count, - uint32_t media_ssrc, - int64_t base_sequence_number, - std::map::const_iterator begin_iterator, - std::map::const_iterator end_iterator, - rtcp::TransportFeedback* feedback_packet) { - RTC_DCHECK(begin_iterator != end_iterator); +std::unique_ptr +RemoteEstimatorProxy::MaybeBuildFeedbackPacket( + bool include_timestamps, + int64_t begin_sequence_number_inclusive, + int64_t end_sequence_number_exclusive, + bool is_periodic_update) { + RTC_DCHECK_LT(begin_sequence_number_inclusive, end_sequence_number_exclusive); - // TODO(sprang): Measure receive times in microseconds and remove the - // conversions below. - feedback_packet->SetMediaSsrc(media_ssrc); - // Base sequence number is the expected first sequence number. This is known, - // but we might not have actually received it, so the base time shall be the - // time of the first received packet in the feedback. - feedback_packet->SetBase(static_cast(base_sequence_number & 0xFFFF), - begin_iterator->second * 1000); - feedback_packet->SetFeedbackSequenceNumber(feedback_packet_count); - int64_t next_sequence_number = base_sequence_number; - for (auto it = begin_iterator; it != end_iterator; ++it) { - if (!feedback_packet->AddReceivedPacket( - static_cast(it->first & 0xFFFF), it->second * 1000)) { - // If we can't even add the first seq to the feedback packet, we won't be - // able to build it at all. - RTC_CHECK(begin_iterator != it); + int64_t start_seq = + packet_arrival_times_.clamp(begin_sequence_number_inclusive); + int64_t end_seq = packet_arrival_times_.clamp(end_sequence_number_exclusive); + + // Create the packet on demand, as it's not certain that there are packets + // in the range that have been received. + std::unique_ptr feedback_packet = nullptr; + + int64_t next_sequence_number = begin_sequence_number_inclusive; + + for (int64_t seq = start_seq; seq < end_seq; ++seq) { + int64_t arrival_time_ms = packet_arrival_times_.get(seq); + if (arrival_time_ms == 0) { + // Packet not received. + continue; + } + + if (feedback_packet == nullptr) { + feedback_packet = + std::make_unique(include_timestamps); + // TODO(sprang): Measure receive times in microseconds and remove the + // conversions below. + feedback_packet->SetMediaSsrc(media_ssrc_); + // Base sequence number is the expected first sequence number. This is + // known, but we might not have actually received it, so the base time + // shall be the time of the first received packet in the feedback. + feedback_packet->SetBase( + static_cast(begin_sequence_number_inclusive & 0xFFFF), + arrival_time_ms * 1000); + feedback_packet->SetFeedbackSequenceNumber(feedback_packet_count_++); + } + + if (!feedback_packet->AddReceivedPacket(static_cast(seq & 0xFFFF), + arrival_time_ms * 1000)) { // Could not add timestamp, feedback packet might be full. Return and // try again with a fresh packet. break; } - next_sequence_number = it->first + 1; + + next_sequence_number = seq + 1; } - return next_sequence_number; + if (is_periodic_update) { + periodic_window_start_seq_ = next_sequence_number; + } + return feedback_packet; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h index a4adefc5e..4f8940999 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h @@ -11,12 +11,15 @@ #ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_ #define MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_ -#include +#include +#include +#include #include #include "api/transport/network_control.h" #include "api/transport/webrtc_key_value_config.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/remote_bitrate_estimator/packet_arrival_map.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" @@ -24,7 +27,6 @@ namespace webrtc { class Clock; -class PacketRouter; namespace rtcp { class TransportFeedback; } @@ -32,11 +34,14 @@ class TransportFeedback; // Class used when send-side BWE is enabled: This proxy is instantiated on the // receive side. It buffers a number of receive timestamps and then sends // transport feedback messages back too the send side. - class RemoteEstimatorProxy : public RemoteBitrateEstimator { public: + // Used for sending transport feedback messages when send side + // BWE is used. + using TransportFeedbackSender = std::function> packets)>; RemoteEstimatorProxy(Clock* clock, - TransportFeedbackSenderInterface* feedback_sender, + TransportFeedbackSender feedback_sender, const WebRtcKeyValueConfig* key_value_config, NetworkStateEstimator* network_state_estimator); ~RemoteEstimatorProxy() override; @@ -71,24 +76,33 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { } }; - static const int kMaxNumberOfPackets; - + void MaybeCullOldPackets(int64_t sequence_number, int64_t arrival_time_ms) + RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); void SendPeriodicFeedbacks() RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); void SendFeedbackOnRequest(int64_t sequence_number, const FeedbackRequest& feedback_request) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); - static int64_t BuildFeedbackPacket( - uint8_t feedback_packet_count, - uint32_t media_ssrc, - int64_t base_sequence_number, - std::map::const_iterator - begin_iterator, // |begin_iterator| is inclusive. - std::map::const_iterator - end_iterator, // |end_iterator| is exclusive. - rtcp::TransportFeedback* feedback_packet); + + // Returns a Transport Feedback packet with information about as many packets + // that has been received between [`begin_sequence_number_incl`, + // `end_sequence_number_excl`) that can fit in it. If `is_periodic_update`, + // this represents sending a periodic feedback message, which will make it + // update the `periodic_window_start_seq_` variable with the first packet that + // was not included in the feedback packet, so that the next update can + // continue from that sequence number. + // + // If no incoming packets were added, nullptr is returned. + // + // `include_timestamps` decide if the returned TransportFeedback should + // include timestamps. + std::unique_ptr MaybeBuildFeedbackPacket( + bool include_timestamps, + int64_t begin_sequence_number_inclusive, + int64_t end_sequence_number_exclusive, + bool is_periodic_update) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); Clock* const clock_; - TransportFeedbackSenderInterface* const feedback_sender_; + const TransportFeedbackSender feedback_sender_; const TransportWideFeedbackConfig send_config_; int64_t last_process_time_ms_; @@ -99,9 +113,14 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator { uint32_t media_ssrc_ RTC_GUARDED_BY(&lock_); uint8_t feedback_packet_count_ RTC_GUARDED_BY(&lock_); SeqNumUnwrapper unwrapper_ RTC_GUARDED_BY(&lock_); + + // The next sequence number that should be the start sequence number during + // periodic reporting. Will be absl::nullopt before the first seen packet. absl::optional periodic_window_start_seq_ RTC_GUARDED_BY(&lock_); - // Map unwrapped seq -> time. - std::map packet_arrival_times_ RTC_GUARDED_BY(&lock_); + + // Packet arrival times, by sequence number. + PacketArrivalTimeMap packet_arrival_times_ RTC_GUARDED_BY(&lock_); + int64_t send_interval_ms_ RTC_GUARDED_BY(&lock_); bool send_periodic_feedback_ RTC_GUARDED_BY(&lock_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h index 6df984f85..b0caea68f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/flexfec_receiver.h @@ -15,11 +15,12 @@ #include +#include "api/sequence_checker.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/include/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -69,7 +70,7 @@ class FlexfecReceiver { int64_t last_recovered_packet_ms_ RTC_GUARDED_BY(sequence_checker_); FecPacketCounter packet_counter_ RTC_GUARDED_BY(sequence_checker_); - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h index 4e6441340..ce87b99a4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/receive_statistics.h @@ -17,11 +17,9 @@ #include "absl/types/optional.h" #include "call/rtp_packet_sink_interface.h" -#include "modules/include/module.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" -#include "rtc_base/deprecation.h" namespace webrtc { @@ -57,7 +55,12 @@ class ReceiveStatistics : public ReceiveStatisticsProvider, public: ~ReceiveStatistics() override = default; + // Returns a thread-safe instance of ReceiveStatistics. + // https://chromium.googlesource.com/chromium/src/+/lkgr/docs/threading_and_tasks.md#threading-lexicon static std::unique_ptr Create(Clock* clock); + // Returns a thread-compatible instance of ReceiveStatistics. + static std::unique_ptr CreateThreadCompatible( + Clock* clock); // Returns a pointer to the statistician of an ssrc. virtual StreamStatistician* GetStatistician(uint32_t ssrc) const = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h index e26c475e3..de70c1494 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtcp_statistics.h @@ -17,22 +17,6 @@ namespace webrtc { -// Statistics for an RTCP channel -struct RtcpStatistics { - uint8_t fraction_lost = 0; - int32_t packets_lost = 0; // Defined as a 24 bit signed integer in RTCP - uint32_t extended_highest_sequence_number = 0; - uint32_t jitter = 0; -}; - -class RtcpStatisticsCallback { - public: - virtual ~RtcpStatisticsCallback() {} - - virtual void StatisticsUpdated(const RtcpStatistics& statistics, - uint32_t ssrc) = 0; -}; - // Statistics for RTCP packet types. struct RtcpPacketTypeCounter { RtcpPacketTypeCounter() diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h index ff2d34d60..72e5541d3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_header_extension_map.h @@ -19,7 +19,6 @@ #include "api/rtp_parameters.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/checks.h" -#include "rtc_base/deprecation.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h index 8663296eb..727fc6e64 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h @@ -12,12 +12,10 @@ #define MODULES_RTP_RTCP_INCLUDE_RTP_RTCP_H_ #include -#include -#include +#include "absl/base/attributes.h" #include "modules/include/module.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "rtc_base/deprecation.h" namespace webrtc { @@ -25,52 +23,14 @@ namespace webrtc { class RtpRtcp : public Module, public RtpRtcpInterface { public: // Instantiates a deprecated version of the RtpRtcp module. - static std::unique_ptr RTC_DEPRECATED - Create(const Configuration& configuration) { + static std::unique_ptr ABSL_DEPRECATED("") + Create(const Configuration& configuration) { return DEPRECATED_Create(configuration); } static std::unique_ptr DEPRECATED_Create( const Configuration& configuration); - // (TMMBR) Temporary Max Media Bit Rate - RTC_DEPRECATED virtual bool TMMBR() const = 0; - - RTC_DEPRECATED virtual void SetTMMBRStatus(bool enable) = 0; - - // Returns -1 on failure else 0. - RTC_DEPRECATED virtual int32_t AddMixedCNAME(uint32_t ssrc, - const char* cname) = 0; - - // Returns -1 on failure else 0. - RTC_DEPRECATED virtual int32_t RemoveMixedCNAME(uint32_t ssrc) = 0; - - // Returns remote CName. - // Returns -1 on failure else 0. - RTC_DEPRECATED virtual int32_t RemoteCNAME( - uint32_t remote_ssrc, - char cname[RTCP_CNAME_SIZE]) const = 0; - - // (De)registers RTP header extension type and id. - // Returns -1 on failure else 0. - RTC_DEPRECATED virtual int32_t RegisterSendRtpHeaderExtension( - RTPExtensionType type, - uint8_t id) = 0; - - // (APP) Sets application specific data. - // Returns -1 on failure else 0. - RTC_DEPRECATED virtual int32_t SetRTCPApplicationSpecificData( - uint8_t sub_type, - uint32_t name, - const uint8_t* data, - uint16_t length) = 0; - - // Returns statistics of the amount of data sent. - // Returns -1 on failure else 0. - RTC_DEPRECATED virtual int32_t DataCountersRTP( - size_t* bytes_sent, - uint32_t* packets_sent) const = 0; - // Requests new key frame. // using PLI, https://tools.ietf.org/html/rfc4585#section-6.3.1.1 void SendPictureLossIndication() { SendRTCP(kRtcpPli); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h index cbc2d9211..d25532053 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -74,6 +74,7 @@ enum RTPExtensionType : int { kRtpExtensionGenericFrameDescriptor = kRtpExtensionGenericFrameDescriptor00, kRtpExtensionGenericFrameDescriptor02, kRtpExtensionColorSpace, + kRtpExtensionVideoFrameTrackingId, kRtpExtensionNumberOfExtensions // Must be the last entity in the enum. }; @@ -228,6 +229,7 @@ struct RtpPacketSendInfo { uint16_t transport_sequence_number = 0; uint32_t ssrc = 0; uint16_t rtp_sequence_number = 0; + uint32_t rtp_timestamp = 0; size_t length = 0; absl::optional packet_type; PacedPacketInfo pacing_info; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 70b073cd7..d523128e3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -141,16 +141,14 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { GetSendStreamDataCounters, (StreamDataCounters*, StreamDataCounters*), (const, override)); - MOCK_METHOD(int32_t, - RemoteRTCPStat, - (std::vector * receive_blocks), - (const, override)); MOCK_METHOD(std::vector, GetLatestReportBlockData, (), (const, override)); - MOCK_METHOD(void, SetRtcpXrRrtrStatus, (bool enable), (override)); - MOCK_METHOD(bool, RtcpXrRrtrStatus, (), (const, override)); + MOCK_METHOD(absl::optional, + GetSenderReportStats, + (), + (const, override)); MOCK_METHOD(void, SetRemb, (int64_t bitrate, std::vector ssrcs), @@ -168,7 +166,6 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { SetStorePacketsStatus, (bool enable, uint16_t number_to_store), (override)); - MOCK_METHOD(bool, StorePackets, (), (const, override)); MOCK_METHOD(void, SendCombinedRtcpPacket, (std::vector> rtcp_packets), diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc new file mode 100644 index 000000000..99fc030ac --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +constexpr Timestamp kInvalidLastReceiveTime = Timestamp::MinusInfinity(); +} // namespace + +constexpr TimeDelta AbsoluteCaptureTimeInterpolator::kInterpolationMaxInterval; + +AbsoluteCaptureTimeInterpolator::AbsoluteCaptureTimeInterpolator(Clock* clock) + : clock_(clock), last_receive_time_(kInvalidLastReceiveTime) {} + +uint32_t AbsoluteCaptureTimeInterpolator::GetSource( + uint32_t ssrc, + rtc::ArrayView csrcs) { + if (csrcs.empty()) { + return ssrc; + } + + return csrcs[0]; +} + +absl::optional +AbsoluteCaptureTimeInterpolator::OnReceivePacket( + uint32_t source, + uint32_t rtp_timestamp, + uint32_t rtp_clock_frequency, + const absl::optional& received_extension) { + const Timestamp receive_time = clock_->CurrentTime(); + + MutexLock lock(&mutex_); + + AbsoluteCaptureTime extension; + if (received_extension == absl::nullopt) { + if (!ShouldInterpolateExtension(receive_time, source, rtp_timestamp, + rtp_clock_frequency)) { + last_receive_time_ = kInvalidLastReceiveTime; + return absl::nullopt; + } + + extension.absolute_capture_timestamp = InterpolateAbsoluteCaptureTimestamp( + rtp_timestamp, rtp_clock_frequency, last_rtp_timestamp_, + last_absolute_capture_timestamp_); + extension.estimated_capture_clock_offset = + last_estimated_capture_clock_offset_; + } else { + last_source_ = source; + last_rtp_timestamp_ = rtp_timestamp; + last_rtp_clock_frequency_ = rtp_clock_frequency; + last_absolute_capture_timestamp_ = + received_extension->absolute_capture_timestamp; + last_estimated_capture_clock_offset_ = + received_extension->estimated_capture_clock_offset; + + last_receive_time_ = receive_time; + + extension = *received_extension; + } + + return extension; +} + +uint64_t AbsoluteCaptureTimeInterpolator::InterpolateAbsoluteCaptureTimestamp( + uint32_t rtp_timestamp, + uint32_t rtp_clock_frequency, + uint32_t last_rtp_timestamp, + uint64_t last_absolute_capture_timestamp) { + RTC_DCHECK_GT(rtp_clock_frequency, 0); + + return last_absolute_capture_timestamp + + static_cast( + rtc::dchecked_cast(rtp_timestamp - last_rtp_timestamp) + << 32) / + rtp_clock_frequency; +} + +bool AbsoluteCaptureTimeInterpolator::ShouldInterpolateExtension( + Timestamp receive_time, + uint32_t source, + uint32_t rtp_timestamp, + uint32_t rtp_clock_frequency) const { + // Shouldn't if we don't have a previously received extension stored. + if (last_receive_time_ == kInvalidLastReceiveTime) { + return false; + } + + // Shouldn't if the last received extension is too old. + if ((receive_time - last_receive_time_) > kInterpolationMaxInterval) { + return false; + } + + // Shouldn't if the source has changed. + if (last_source_ != source) { + return false; + } + + // Shouldn't if the RTP clock frequency has changed. + if (last_rtp_clock_frequency_ != rtp_clock_frequency) { + return false; + } + + // Shouldn't if the RTP clock frequency is invalid. + if (rtp_clock_frequency <= 0) { + return false; + } + + return true; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h new file mode 100644 index 000000000..89d7f0850 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_ +#define MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_ + +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +// +// Helper class for interpolating the |AbsoluteCaptureTime| header extension. +// +// Supports the "timestamp interpolation" optimization: +// A receiver SHOULD memorize the capture system (i.e. CSRC/SSRC), capture +// timestamp, and RTP timestamp of the most recently received abs-capture-time +// packet on each received stream. It can then use that information, in +// combination with RTP timestamps of packets without abs-capture-time, to +// extrapolate missing capture timestamps. +// +// See: https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/ +// +class AbsoluteCaptureTimeInterpolator { + public: + static constexpr TimeDelta kInterpolationMaxInterval = + TimeDelta::Millis(5000); + + explicit AbsoluteCaptureTimeInterpolator(Clock* clock); + + // Returns the source (i.e. SSRC or CSRC) of the capture system. + static uint32_t GetSource(uint32_t ssrc, + rtc::ArrayView csrcs); + + // Returns a received header extension, an interpolated header extension, or + // |absl::nullopt| if it's not possible to interpolate a header extension. + absl::optional OnReceivePacket( + uint32_t source, + uint32_t rtp_timestamp, + uint32_t rtp_clock_frequency, + const absl::optional& received_extension); + + private: + friend class AbsoluteCaptureTimeSender; + + static uint64_t InterpolateAbsoluteCaptureTimestamp( + uint32_t rtp_timestamp, + uint32_t rtp_clock_frequency, + uint32_t last_rtp_timestamp, + uint64_t last_absolute_capture_timestamp); + + bool ShouldInterpolateExtension(Timestamp receive_time, + uint32_t source, + uint32_t rtp_timestamp, + uint32_t rtp_clock_frequency) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + Clock* const clock_; + + Mutex mutex_; + + Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_); + + uint32_t last_source_ RTC_GUARDED_BY(mutex_); + uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_); + uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_); + uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_); + absl::optional last_estimated_capture_clock_offset_ + RTC_GUARDED_BY(mutex_); +}; // AbsoluteCaptureTimeInterpolator + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc index 529ed7eef..efb75506d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc @@ -10,38 +10,14 @@ #include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" -#include - -#include "rtc_base/checks.h" - namespace webrtc { -namespace { - -constexpr Timestamp kInvalidLastReceiveTime = Timestamp::MinusInfinity(); -} // namespace - -constexpr TimeDelta AbsoluteCaptureTimeReceiver::kInterpolationMaxInterval; AbsoluteCaptureTimeReceiver::AbsoluteCaptureTimeReceiver(Clock* clock) - : clock_(clock), - remote_to_local_clock_offset_(absl::nullopt), - last_receive_time_(kInvalidLastReceiveTime) {} - -uint32_t AbsoluteCaptureTimeReceiver::GetSource( - uint32_t ssrc, - rtc::ArrayView csrcs) { - if (csrcs.empty()) { - return ssrc; - } - - return csrcs[0]; -} + : AbsoluteCaptureTimeInterpolator(clock) {} void AbsoluteCaptureTimeReceiver::SetRemoteToLocalClockOffset( absl::optional value_q32x32) { - MutexLock lock(&mutex_); - - remote_to_local_clock_offset_ = value_q32x32; + capture_clock_offset_updater_.SetRemoteToLocalClockOffset(value_q32x32); } absl::optional @@ -50,101 +26,16 @@ AbsoluteCaptureTimeReceiver::OnReceivePacket( uint32_t rtp_timestamp, uint32_t rtp_clock_frequency, const absl::optional& received_extension) { - const Timestamp receive_time = clock_->CurrentTime(); + auto extension = AbsoluteCaptureTimeInterpolator::OnReceivePacket( + source, rtp_timestamp, rtp_clock_frequency, received_extension); - MutexLock lock(&mutex_); - - AbsoluteCaptureTime extension; - if (received_extension == absl::nullopt) { - if (!ShouldInterpolateExtension(receive_time, source, rtp_timestamp, - rtp_clock_frequency)) { - last_receive_time_ = kInvalidLastReceiveTime; - return absl::nullopt; - } - - extension.absolute_capture_timestamp = InterpolateAbsoluteCaptureTimestamp( - rtp_timestamp, rtp_clock_frequency, last_rtp_timestamp_, - last_absolute_capture_timestamp_); - extension.estimated_capture_clock_offset = - last_estimated_capture_clock_offset_; - } else { - last_source_ = source; - last_rtp_timestamp_ = rtp_timestamp; - last_rtp_clock_frequency_ = rtp_clock_frequency; - last_absolute_capture_timestamp_ = - received_extension->absolute_capture_timestamp; - last_estimated_capture_clock_offset_ = - received_extension->estimated_capture_clock_offset; - - last_receive_time_ = receive_time; - - extension = *received_extension; + if (extension.has_value()) { + extension->estimated_capture_clock_offset = + capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( + extension->estimated_capture_clock_offset); } - extension.estimated_capture_clock_offset = AdjustEstimatedCaptureClockOffset( - extension.estimated_capture_clock_offset); - return extension; } -uint64_t AbsoluteCaptureTimeReceiver::InterpolateAbsoluteCaptureTimestamp( - uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, - uint32_t last_rtp_timestamp, - uint64_t last_absolute_capture_timestamp) { - RTC_DCHECK_GT(rtp_clock_frequency, 0); - - return last_absolute_capture_timestamp + - static_cast( - rtc::dchecked_cast(rtp_timestamp - last_rtp_timestamp) - << 32) / - rtp_clock_frequency; -} - -bool AbsoluteCaptureTimeReceiver::ShouldInterpolateExtension( - Timestamp receive_time, - uint32_t source, - uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency) const { - // Shouldn't if we don't have a previously received extension stored. - if (last_receive_time_ == kInvalidLastReceiveTime) { - return false; - } - - // Shouldn't if the last received extension is too old. - if ((receive_time - last_receive_time_) > kInterpolationMaxInterval) { - return false; - } - - // Shouldn't if the source has changed. - if (last_source_ != source) { - return false; - } - - // Shouldn't if the RTP clock frequency has changed. - if (last_rtp_clock_frequency_ != rtp_clock_frequency) { - return false; - } - - // Shouldn't if the RTP clock frequency is invalid. - if (rtp_clock_frequency <= 0) { - return false; - } - - return true; -} - -absl::optional -AbsoluteCaptureTimeReceiver::AdjustEstimatedCaptureClockOffset( - absl::optional received_value) const { - if (received_value == absl::nullopt || - remote_to_local_clock_offset_ == absl::nullopt) { - return absl::nullopt; - } - - // Do calculations as "unsigned" to make overflows deterministic. - return static_cast(*received_value) + - static_cast(*remote_to_local_clock_offset_); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.h index ce3442b38..ad1bd7eb5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.h @@ -11,89 +11,28 @@ #ifndef MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_RECEIVER_H_ #define MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_RECEIVER_H_ -#include "api/array_view.h" -#include "api/rtp_headers.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" #include "system_wrappers/include/clock.h" namespace webrtc { -// -// Helper class for receiving the |AbsoluteCaptureTime| header extension. -// -// Supports the "timestamp interpolation" optimization: -// A receiver SHOULD memorize the capture system (i.e. CSRC/SSRC), capture -// timestamp, and RTP timestamp of the most recently received abs-capture-time -// packet on each received stream. It can then use that information, in -// combination with RTP timestamps of packets without abs-capture-time, to -// extrapolate missing capture timestamps. -// -// See: https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/ -// -class AbsoluteCaptureTimeReceiver { +// DEPRECATED. Use `AbsoluteCaptureTimeInterpolator` instead. +class AbsoluteCaptureTimeReceiver : public AbsoluteCaptureTimeInterpolator { public: - static constexpr TimeDelta kInterpolationMaxInterval = - TimeDelta::Millis(5000); - explicit AbsoluteCaptureTimeReceiver(Clock* clock); - // Returns the source (i.e. SSRC or CSRC) of the capture system. - static uint32_t GetSource(uint32_t ssrc, - rtc::ArrayView csrcs); - - // Sets the NTP clock offset between the sender system (which may be different - // from the capture system) and the local system. This information is normally - // provided by passing half the value of the Round-Trip Time estimation given - // by RTCP sender reports (see DLSR/DLRR). - // - // Note that the value must be in Q32.32-formatted fixed-point seconds. - void SetRemoteToLocalClockOffset(absl::optional value_q32x32); - - // Returns a received header extension, an interpolated header extension, or - // |absl::nullopt| if it's not possible to interpolate a header extension. absl::optional OnReceivePacket( uint32_t source, uint32_t rtp_timestamp, uint32_t rtp_clock_frequency, const absl::optional& received_extension); + void SetRemoteToLocalClockOffset(absl::optional value_q32x32); + private: - friend class AbsoluteCaptureTimeSender; - - static uint64_t InterpolateAbsoluteCaptureTimestamp( - uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, - uint32_t last_rtp_timestamp, - uint64_t last_absolute_capture_timestamp); - - bool ShouldInterpolateExtension(Timestamp receive_time, - uint32_t source, - uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - absl::optional AdjustEstimatedCaptureClockOffset( - absl::optional received_value) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - Clock* const clock_; - - Mutex mutex_; - - absl::optional remote_to_local_clock_offset_ RTC_GUARDED_BY(mutex_); - - Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_); - - uint32_t last_source_ RTC_GUARDED_BY(mutex_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_); - uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_); - uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_); - absl::optional last_estimated_capture_clock_offset_ - RTC_GUARDED_BY(mutex_); -}; // AbsoluteCaptureTimeReceiver + CaptureClockOffsetUpdater capture_clock_offset_updater_; +}; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.cc new file mode 100644 index 000000000..a5b12cb42 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.cc @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" + +namespace webrtc { + +absl::optional +CaptureClockOffsetUpdater::AdjustEstimatedCaptureClockOffset( + absl::optional remote_capture_clock_offset) const { + if (remote_capture_clock_offset == absl::nullopt || + remote_to_local_clock_offset_ == absl::nullopt) { + return absl::nullopt; + } + + // Do calculations as "unsigned" to make overflows deterministic. + return static_cast(*remote_capture_clock_offset) + + static_cast(*remote_to_local_clock_offset_); +} + +void CaptureClockOffsetUpdater::SetRemoteToLocalClockOffset( + absl::optional offset_q32x32) { + remote_to_local_clock_offset_ = offset_q32x32; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.h new file mode 100644 index 000000000..71d3eb483 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_CAPTURE_CLOCK_OFFSET_UPDATER_H_ +#define MODULES_RTP_RTCP_SOURCE_CAPTURE_CLOCK_OFFSET_UPDATER_H_ + +#include + +#include "absl/types/optional.h" + +namespace webrtc { + +// +// Helper class for calculating the clock offset against the capturer's clock. +// +// This is achieved by adjusting the estimated capture clock offset in received +// Absolute Capture Time RTP header extension (see +// https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/), which +// represents the clock offset between a remote sender and the capturer, by +// adding local-to-remote clock offset. + +class CaptureClockOffsetUpdater { + public: + // Adjusts remote_capture_clock_offset, which originates from Absolute Capture + // Time RTP header extension, to get the local clock offset against the + // capturer's clock. + absl::optional AdjustEstimatedCaptureClockOffset( + absl::optional remote_capture_clock_offset) const; + + // Sets the NTP clock offset between the sender system (which may be different + // from the capture system) and the local system. This information is normally + // provided by passing half the value of the Round-Trip Time estimation given + // by RTCP sender reports (see DLSR/DLRR). + // + // Note that the value must be in Q32.32-formatted fixed-point seconds. + void SetRemoteToLocalClockOffset(absl::optional offset_q32x32); + + private: + absl::optional remote_to_local_clock_offset_; +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_CAPTURE_CLOCK_OFFSET_UPDATER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index 6cb9d9330..3f7d22c49 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -176,8 +176,7 @@ void DEPRECATED_RtpSenderEgress::SendPacket( AddPacketToTransportFeedback(*packet_id, *packet, pacing_info); } - options.application_data.assign(packet->application_data().begin(), - packet->application_data().end()); + options.additional_data = packet->additional_data(); if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/fec_test_helper.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/fec_test_helper.cc index f8579b48f..b9ac25e4a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/fec_test_helper.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/fec_test_helper.cc @@ -57,7 +57,7 @@ ForwardErrorCorrection::PacketList MediaPacketGenerator::ConstructMediaPackets( media_packet->data.SetSize( random_->Rand(min_packet_size_, max_packet_size_)); - uint8_t* data = media_packet->data.data(); + uint8_t* data = media_packet->data.MutableData(); // Generate random values for the first 2 bytes data[0] = random_->Rand(); data[1] = random_->Rand(); @@ -88,7 +88,7 @@ ForwardErrorCorrection::PacketList MediaPacketGenerator::ConstructMediaPackets( // Last packet, set marker bit. ForwardErrorCorrection::Packet* media_packet = media_packets.back().get(); RTC_DCHECK(media_packet); - media_packet->data[1] |= 0x80; + media_packet->data.MutableData()[1] |= 0x80; next_seq_num_ = seq_num; @@ -122,7 +122,7 @@ std::unique_ptr AugmentedPacketGenerator::NextPacket( std::unique_ptr packet(new AugmentedPacket()); packet->data.SetSize(length + kRtpHeaderSize); - uint8_t* data = packet->data.data(); + uint8_t* data = packet->data.MutableData(); for (size_t i = 0; i < length; ++i) data[i + kRtpHeaderSize] = offset + i; packet->data.SetSize(length + kRtpHeaderSize); @@ -132,7 +132,7 @@ std::unique_ptr AugmentedPacketGenerator::NextPacket( packet->header.sequenceNumber = seq_num_; packet->header.timestamp = timestamp_; packet->header.ssrc = ssrc_; - WriteRtpHeader(packet->header, packet->data.data()); + WriteRtpHeader(packet->header, data); ++seq_num_; --num_packets_; @@ -171,8 +171,8 @@ std::unique_ptr FlexfecPacketGenerator::BuildFlexfecPacket( std::unique_ptr packet_with_rtp_header( new AugmentedPacket()); packet_with_rtp_header->data.SetSize(kRtpHeaderSize + packet.data.size()); - WriteRtpHeader(header, packet_with_rtp_header->data.data()); - memcpy(packet_with_rtp_header->data.data() + kRtpHeaderSize, + WriteRtpHeader(header, packet_with_rtp_header->data.MutableData()); + memcpy(packet_with_rtp_header->data.MutableData() + kRtpHeaderSize, packet.data.cdata(), packet.data.size()); return packet_with_rtp_header; @@ -184,19 +184,21 @@ UlpfecPacketGenerator::UlpfecPacketGenerator(uint32_t ssrc) RtpPacketReceived UlpfecPacketGenerator::BuildMediaRedPacket( const AugmentedPacket& packet, bool is_recovered) { - RtpPacketReceived red_packet; - // Copy RTP header. + // Create a temporary buffer used to wrap the media packet in RED. + rtc::CopyOnWriteBuffer red_buffer; const size_t kHeaderLength = packet.header.headerLength; - red_packet.Parse(packet.data.cdata(), kHeaderLength); - RTC_DCHECK_EQ(red_packet.headers_size(), kHeaderLength); - uint8_t* rtp_payload = - red_packet.AllocatePayload(packet.data.size() + 1 - kHeaderLength); - // Move payload type into rtp payload. - rtp_payload[0] = red_packet.PayloadType(); + // Append header. + red_buffer.SetData(packet.data.data(), kHeaderLength); + // Find payload type and add it as RED header. + uint8_t media_payload_type = red_buffer[1] & 0x7F; + red_buffer.AppendData({media_payload_type}); + // Append rest of payload/padding. + red_buffer.AppendData( + packet.data.Slice(kHeaderLength, packet.data.size() - kHeaderLength)); + + RtpPacketReceived red_packet; + RTC_CHECK(red_packet.Parse(std::move(red_buffer))); red_packet.SetPayloadType(kRedPayloadType); - // Copy the payload. - memcpy(rtp_payload + 1, packet.data.cdata() + kHeaderLength, - packet.data.size() - kHeaderLength); red_packet.set_recovered(is_recovered); return red_packet; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc index ab0dcb68a..40426f16b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc @@ -25,6 +25,11 @@ namespace { // Maximum number of media packets that can be protected in one batch. constexpr size_t kMaxMediaPackets = 48; // Since we are reusing ULPFEC masks. +// Maximum number of media packets tracked by FEC decoder. +// Maintain a sufficiently larger tracking window than |kMaxMediaPackets| +// to account for packet reordering in pacer/ network. +constexpr size_t kMaxTrackedMediaPackets = 4 * kMaxMediaPackets; + // Maximum number of FEC packets stored inside ForwardErrorCorrection. constexpr size_t kMaxFecPackets = kMaxMediaPackets; @@ -72,7 +77,7 @@ size_t FlexfecHeaderSize(size_t packet_mask_size) { } // namespace FlexfecHeaderReader::FlexfecHeaderReader() - : FecHeaderReader(kMaxMediaPackets, kMaxFecPackets) {} + : FecHeaderReader(kMaxTrackedMediaPackets, kMaxFecPackets) {} FlexfecHeaderReader::~FlexfecHeaderReader() = default; @@ -85,7 +90,7 @@ bool FlexfecHeaderReader::ReadFecHeader( RTC_LOG(LS_WARNING) << "Discarding truncated FlexFEC packet."; return false; } - uint8_t* const data = fec_packet->pkt->data.data(); + uint8_t* const data = fec_packet->pkt->data.MutableData(); bool r_bit = (data[0] & 0x80) != 0; if (r_bit) { RTC_LOG(LS_INFO) @@ -249,7 +254,7 @@ void FlexfecHeaderWriter::FinalizeFecHeader( const uint8_t* packet_mask, size_t packet_mask_size, ForwardErrorCorrection::Packet* fec_packet) const { - uint8_t* data = fec_packet->data.data(); + uint8_t* data = fec_packet->data.MutableData(); data[0] &= 0x7f; // Clear R bit. data[0] &= 0xbf; // Clear F bit. ByteWriter::WriteBigEndian(&data[8], kSsrcCount); @@ -260,8 +265,7 @@ void FlexfecHeaderWriter::FinalizeFecHeader( // // We treat the mask parts as unsigned integers with host order endianness // in order to simplify the bit shifting between bytes. - uint8_t* const written_packet_mask = - fec_packet->data.data() + kPacketMaskOffset; + uint8_t* const written_packet_mask = data + kPacketMaskOffset; if (packet_mask_size == kUlpfecPacketMaskSizeLBitSet) { // The packet mask is 48 bits long. uint16_t tmp_mask_part0 = diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc index 1812fbf03..da8025d3d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc @@ -31,6 +31,8 @@ namespace webrtc { namespace { // Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum. constexpr size_t kTransportOverhead = 28; + +constexpr uint16_t kOldSequenceThreshold = 0x3fff; } // namespace ForwardErrorCorrection::Packet::Packet() : data(0), ref_count_(0) {} @@ -151,7 +153,7 @@ int ForwardErrorCorrection::EncodeFec(const PacketList& media_packets, } for (int i = 0; i < num_fec_packets; ++i) { generated_fec_packets_[i].data.EnsureCapacity(IP_PACKET_SIZE); - memset(generated_fec_packets_[i].data.data(), 0, IP_PACKET_SIZE); + memset(generated_fec_packets_[i].data.MutableData(), 0, IP_PACKET_SIZE); // Use this as a marker for untouched packets. generated_fec_packets_[i].data.SetSize(0); fec_packets->push_back(&generated_fec_packets_[i]); @@ -231,7 +233,7 @@ void ForwardErrorCorrection::GenerateFecPayloads( fec_packet->data.SetSize(fec_packet_length); } if (first_protected_packet) { - uint8_t* data = fec_packet->data.data(); + uint8_t* data = fec_packet->data.MutableData(); // Write P, X, CC, M, and PT recovery fields. // Note that bits 0, 1, and 16 are overwritten in FinalizeFecHeaders. memcpy(&data[0], &media_packet_data[0], 2); @@ -508,9 +510,6 @@ void ForwardErrorCorrection::InsertPacket( // This is important for keeping |received_fec_packets_| sorted, and may // also reduce the possibility of incorrect decoding due to sequence number // wrap-around. - // TODO(marpan/holmer): We should be able to improve detection/discarding of - // old FEC packets based on timestamp information or better sequence number - // thresholding (e.g., to distinguish between wrap-around and reordering). if (!received_fec_packets_.empty() && received_packet.ssrc == received_fec_packets_.front()->ssrc) { // It only makes sense to detect wrap-around when |received_packet| @@ -521,7 +520,7 @@ void ForwardErrorCorrection::InsertPacket( auto it = received_fec_packets_.begin(); while (it != received_fec_packets_.end()) { uint16_t seq_num_diff = MinDiff(received_packet.seq_num, (*it)->seq_num); - if (seq_num_diff > 0x3fff) { + if (seq_num_diff > kOldSequenceThreshold) { it = received_fec_packets_.erase(it); } else { // No need to keep iterating, since |received_fec_packets_| is sorted. @@ -567,11 +566,11 @@ bool ForwardErrorCorrection::StartPacketRecovery( // Copy bytes corresponding to minimum RTP header size. // Note that the sequence number and SSRC fields will be overwritten // at the end of packet recovery. - memcpy(recovered_packet->pkt->data.data(), fec_packet.pkt->data.cdata(), - kRtpHeaderSize); + memcpy(recovered_packet->pkt->data.MutableData(), + fec_packet.pkt->data.cdata(), kRtpHeaderSize); // Copy remaining FEC payload. if (fec_packet.protection_length > 0) { - memcpy(recovered_packet->pkt->data.data() + kRtpHeaderSize, + memcpy(recovered_packet->pkt->data.MutableData() + kRtpHeaderSize, fec_packet.pkt->data.cdata() + fec_packet.fec_header_size, fec_packet.protection_length); } @@ -581,7 +580,7 @@ bool ForwardErrorCorrection::StartPacketRecovery( bool ForwardErrorCorrection::FinishPacketRecovery( const ReceivedFecPacket& fec_packet, RecoveredPacket* recovered_packet) { - uint8_t* data = recovered_packet->pkt->data.data(); + uint8_t* data = recovered_packet->pkt->data.MutableData(); // Set the RTP version to 2. data[0] |= 0x80; // Set the 1st bit. data[0] &= 0xbf; // Clear the 2nd bit. @@ -603,7 +602,7 @@ bool ForwardErrorCorrection::FinishPacketRecovery( } void ForwardErrorCorrection::XorHeaders(const Packet& src, Packet* dst) { - uint8_t* dst_data = dst->data.data(); + uint8_t* dst_data = dst->data.MutableData(); const uint8_t* src_data = src.data.cdata(); // XOR the first 2 bytes of the header: V, P, X, CC, M, PT fields. dst_data[0] ^= src_data[0]; @@ -635,7 +634,7 @@ void ForwardErrorCorrection::XorPayloads(const Packet& src, if (dst_offset + payload_length > dst->data.size()) { dst->data.SetSize(dst_offset + payload_length); } - uint8_t* dst_data = dst->data.data(); + uint8_t* dst_data = dst->data.MutableData(); const uint8_t* src_data = src.data.cdata(); for (size_t i = 0; i < payload_length; ++i) { dst_data[dst_offset + i] ^= src_data[kRtpHeaderSize + i]; @@ -698,9 +697,10 @@ void ForwardErrorCorrection::AttemptRecovery( // this may allow additional packets to be recovered. // Restart for first FEC packet. fec_packet_it = received_fec_packets_.begin(); - } else if (packets_missing == 0) { - // Either all protected packets arrived or have been recovered. We can - // discard this FEC packet. + } else if (packets_missing == 0 || + IsOldFecPacket(**fec_packet_it, recovered_packets)) { + // Either all protected packets arrived or have been recovered, or the FEC + // packet is old. We can discard this FEC packet. fec_packet_it = received_fec_packets_.erase(fec_packet_it); } else { fec_packet_it++; @@ -731,11 +731,28 @@ void ForwardErrorCorrection::DiscardOldRecoveredPackets( RTC_DCHECK_LE(recovered_packets->size(), max_media_packets); } -uint16_t ForwardErrorCorrection::ParseSequenceNumber(uint8_t* packet) { +bool ForwardErrorCorrection::IsOldFecPacket( + const ReceivedFecPacket& fec_packet, + const RecoveredPacketList* recovered_packets) { + if (recovered_packets->empty()) { + return false; + } + + const uint16_t back_recovered_seq_num = recovered_packets->back()->seq_num; + const uint16_t last_protected_seq_num = + fec_packet.protected_packets.back()->seq_num; + + // FEC packet is old if its last protected sequence number is much + // older than the latest protected sequence number received. + return (MinDiff(back_recovered_seq_num, last_protected_seq_num) > + kOldSequenceThreshold); +} + +uint16_t ForwardErrorCorrection::ParseSequenceNumber(const uint8_t* packet) { return (packet[2] << 8) + packet[3]; } -uint32_t ForwardErrorCorrection::ParseSsrc(uint8_t* packet) { +uint32_t ForwardErrorCorrection::ParseSsrc(const uint8_t* packet) { return (packet[8] << 24) + (packet[9] << 16) + (packet[10] << 8) + packet[11]; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h index 566ce7428..b97693d01 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/forward_error_correction.h @@ -235,8 +235,8 @@ class ForwardErrorCorrection { // TODO(brandtr): Remove these functions when the Packet classes // have been refactored. - static uint16_t ParseSequenceNumber(uint8_t* packet); - static uint32_t ParseSsrc(uint8_t* packet); + static uint16_t ParseSequenceNumber(const uint8_t* packet); + static uint32_t ParseSsrc(const uint8_t* packet); protected: ForwardErrorCorrection(std::unique_ptr fec_header_reader, @@ -330,6 +330,11 @@ class ForwardErrorCorrection { // for recovering lost packets. void DiscardOldRecoveredPackets(RecoveredPacketList* recovered_packets); + // Checks if the FEC packet is old enough and no longer relevant for + // recovering lost media packets. + bool IsOldFecPacket(const ReceivedFecPacket& fec_packet, + const RecoveredPacketList* recovered_packets); + // These SSRCs are only used by the decoder. const uint32_t ssrc_; const uint32_t protected_media_ssrc_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/h265_sps_parser.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/h265_sps_parser.cc index 6e174f630..a164e5349 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/h265_sps_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/h265_sps_parser.cc @@ -76,7 +76,7 @@ bool H265SpsParser::Parse() { RETURN_FALSE_ON_FAIL(parser.ConsumeBits(4)); // sps_max_sub_layers_minus1: u(3) uint32_t sps_max_sub_layers_minus1 = 0; - RETURN_FALSE_ON_FAIL(parser.ReadBits(&sps_max_sub_layers_minus1, 3)); + RETURN_FALSE_ON_FAIL(parser.ReadBits(3, sps_max_sub_layers_minus1)); // sps_temporal_id_nesting_flag: u(1) RETURN_FALSE_ON_FAIL(parser.ConsumeBits(1)); // profile_tier_level(1, sps_max_sub_layers_minus1). We are acutally not @@ -101,8 +101,8 @@ bool H265SpsParser::Parse() { uint32_t sub_layer_level_present = 0; for (uint32_t i = 0; i < sps_max_sub_layers_minus1; i++) { //sublayer_profile_present_flag and sublayer_level_presnet_flag: u(2) - RETURN_FALSE_ON_FAIL(parser.ReadBits(&sub_layer_profile_present, 1)); - RETURN_FALSE_ON_FAIL(parser.ReadBits(&sub_layer_level_present, 1)); + RETURN_FALSE_ON_FAIL(parser.ReadBits(1, sub_layer_profile_present)); + RETURN_FALSE_ON_FAIL(parser.ReadBits(1, sub_layer_level_present)); sub_layer_profile_present_flags.push_back(sub_layer_profile_present); sub_layer_level_present_flags.push_back(sub_layer_level_present); } @@ -132,22 +132,22 @@ bool H265SpsParser::Parse() { } } //sps_seq_parameter_set_id: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&golomb_ignored)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(golomb_ignored)); // chrome_format_idc: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&chroma_format_idc)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(chroma_format_idc)); if (chroma_format_idc == 3) { // seperate_colour_plane_flag: u(1) - RETURN_FALSE_ON_FAIL(parser.ReadBits(&separate_colour_plane_flag, 1)); + RETURN_FALSE_ON_FAIL(parser.ReadBits(1, separate_colour_plane_flag)); } uint32_t pic_width_in_luma_samples = 0; uint32_t pic_height_in_luma_samples = 0; // pic_width_in_luma_samples: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&pic_width_in_luma_samples)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(pic_width_in_luma_samples)); // pic_height_in_luma_samples: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&pic_height_in_luma_samples)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(pic_height_in_luma_samples)); // conformance_window_flag: u(1) uint32_t conformance_window_flag = 0; - RETURN_FALSE_ON_FAIL(parser.ReadBits(&conformance_window_flag, 1)); + RETURN_FALSE_ON_FAIL(parser.ReadBits(1, conformance_window_flag)); uint32_t conf_win_left_offset = 0; uint32_t conf_win_right_offset = 0; @@ -155,13 +155,13 @@ bool H265SpsParser::Parse() { uint32_t conf_win_bottom_offset = 0; if (conformance_window_flag) { // conf_win_left_offset: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_left_offset)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(conf_win_left_offset)); // conf_win_right_offset: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_right_offset)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(conf_win_right_offset)); // conf_win_top_offset: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_top_offset)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(conf_win_top_offset)); // conf_win_bottom_offset: ue(v) - RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(&conf_win_bottom_offset)); + RETURN_FALSE_ON_FAIL(parser.ReadExponentialGolomb(conf_win_bottom_offset)); } //For enough to get the resolution information. calcaluate according to HEVC spec 7.4.3.2 diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc new file mode 100644 index 000000000..03ea9b815 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/packet_sequencer.h" + +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { +// RED header is first byte of payload, if present. +constexpr size_t kRedForFecHeaderLength = 1; + +// Timestamps use a 90kHz clock. +constexpr uint32_t kTimestampTicksPerMs = 90; +} // namespace + +PacketSequencer::PacketSequencer(uint32_t media_ssrc, + uint32_t rtx_ssrc, + bool require_marker_before_media_padding, + Clock* clock) + : media_ssrc_(media_ssrc), + rtx_ssrc_(rtx_ssrc), + require_marker_before_media_padding_(require_marker_before_media_padding), + clock_(clock), + media_sequence_number_(0), + rtx_sequence_number_(0), + last_payload_type_(-1), + last_rtp_timestamp_(0), + last_capture_time_ms_(0), + last_timestamp_time_ms_(0), + last_packet_marker_bit_(false) {} + +bool PacketSequencer::Sequence(RtpPacketToSend& packet) { + if (packet.packet_type() == RtpPacketMediaType::kPadding && + !PopulatePaddingFields(packet)) { + // This padding packet can't be sent with current state, return without + // updating the sequence number. + return false; + } + + if (packet.Ssrc() == media_ssrc_) { + packet.SetSequenceNumber(media_sequence_number_++); + if (packet.packet_type() != RtpPacketMediaType::kPadding) { + UpdateLastPacketState(packet); + } + return true; + } + + RTC_DCHECK_EQ(packet.Ssrc(), rtx_ssrc_); + packet.SetSequenceNumber(rtx_sequence_number_++); + return true; +} + +void PacketSequencer::SetRtpState(const RtpState& state) { + media_sequence_number_ = state.sequence_number; + last_rtp_timestamp_ = state.timestamp; + last_capture_time_ms_ = state.capture_time_ms; + last_timestamp_time_ms_ = state.last_timestamp_time_ms; +} + +void PacketSequencer::PupulateRtpState(RtpState& state) const { + state.sequence_number = media_sequence_number_; + state.timestamp = last_rtp_timestamp_; + state.capture_time_ms = last_capture_time_ms_; + state.last_timestamp_time_ms = last_timestamp_time_ms_; +} + +void PacketSequencer::UpdateLastPacketState(const RtpPacketToSend& packet) { + // Remember marker bit to determine if padding can be inserted with + // sequence number following |packet|. + last_packet_marker_bit_ = packet.Marker(); + // Remember media payload type to use in the padding packet if rtx is + // disabled. + if (packet.is_red()) { + RTC_DCHECK_GE(packet.payload_size(), kRedForFecHeaderLength); + last_payload_type_ = packet.PayloadBuffer()[0]; + } else { + last_payload_type_ = packet.PayloadType(); + } + // Save timestamps to generate timestamp field and extensions for the padding. + last_rtp_timestamp_ = packet.Timestamp(); + last_timestamp_time_ms_ = clock_->TimeInMilliseconds(); + last_capture_time_ms_ = packet.capture_time_ms(); +} + +bool PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { + if (packet.Ssrc() == media_ssrc_) { + if (last_payload_type_ == -1) { + return false; + } + + // Without RTX we can't send padding in the middle of frames. + // For audio marker bits doesn't mark the end of a frame and frames + // are usually a single packet, so for now we don't apply this rule + // for audio. + if (require_marker_before_media_padding_ && !last_packet_marker_bit_) { + return false; + } + + packet.SetTimestamp(last_rtp_timestamp_); + packet.set_capture_time_ms(last_capture_time_ms_); + packet.SetPayloadType(last_payload_type_); + return true; + } + + RTC_DCHECK_EQ(packet.Ssrc(), rtx_ssrc_); + if (packet.payload_size() > 0) { + // This is payload padding packet, don't update timestamp fields. + return true; + } + + packet.SetTimestamp(last_rtp_timestamp_); + packet.set_capture_time_ms(last_capture_time_ms_); + + // Only change the timestamp of padding packets sent over RTX. + // Padding only packets over RTP has to be sent as part of a media + // frame (and therefore the same timestamp). + int64_t now_ms = clock_->TimeInMilliseconds(); + if (last_timestamp_time_ms_ > 0) { + packet.SetTimestamp(packet.Timestamp() + + (now_ms - last_timestamp_time_ms_) * + kTimestampTicksPerMs); + if (packet.capture_time_ms() > 0) { + packet.set_capture_time_ms(packet.capture_time_ms() + + (now_ms - last_timestamp_time_ms_)); + } + } + + return true; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.h new file mode 100644 index 000000000..67255164f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.h @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_PACKET_SEQUENCER_H_ +#define MODULES_RTP_RTCP_SOURCE_PACKET_SEQUENCER_H_ + +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +// Helper class used to assign RTP sequence numbers and populate some fields for +// padding packets based on the last sequenced packets. +// This class is not thread safe, the caller must provide that. +class PacketSequencer { + public: + // If |require_marker_before_media_padding_| is true, padding packets on the + // media ssrc is not allowed unless the last sequenced media packet had the + // marker bit set (i.e. don't insert padding packets between the first and + // last packets of a video frame). + PacketSequencer(uint32_t media_ssrc, + uint32_t rtx_ssrc, + bool require_marker_before_media_padding, + Clock* clock); + + // Assigns sequence number, and in the case of non-RTX padding also timestamps + // and payload type. + // Returns false if sequencing failed, which it can do for instance if the + // packet to squence is padding on the media ssrc, but the media is mid frame + // (the last marker bit is false). + bool Sequence(RtpPacketToSend& packet); + + void set_media_sequence_number(uint16_t sequence_number) { + media_sequence_number_ = sequence_number; + } + void set_rtx_sequence_number(uint16_t sequence_number) { + rtx_sequence_number_ = sequence_number; + } + + void SetRtpState(const RtpState& state); + void PupulateRtpState(RtpState& state) const; + + uint16_t media_sequence_number() const { return media_sequence_number_; } + uint16_t rtx_sequence_number() const { return rtx_sequence_number_; } + + private: + void UpdateLastPacketState(const RtpPacketToSend& packet); + bool PopulatePaddingFields(RtpPacketToSend& packet); + + const uint32_t media_ssrc_; + const uint32_t rtx_ssrc_; + const bool require_marker_before_media_padding_; + Clock* const clock_; + + uint16_t media_sequence_number_; + uint16_t rtx_sequence_number_; + + int8_t last_payload_type_; + uint32_t last_rtp_timestamp_; + int64_t last_capture_time_ms_; + int64_t last_timestamp_time_ms_; + bool last_packet_marker_bit_; +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_PACKET_SEQUENCER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc index 6ec41a1eb..f5c3eafbf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -13,9 +13,11 @@ #include #include #include +#include #include #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/time_util.h" @@ -23,9 +25,14 @@ #include "system_wrappers/include/clock.h" namespace webrtc { +namespace { +constexpr int64_t kStatisticsTimeoutMs = 8000; +constexpr int64_t kStatisticsProcessIntervalMs = 1000; -const int64_t kStatisticsTimeoutMs = 8000; -const int64_t kStatisticsProcessIntervalMs = 1000; +// Number of seconds since 1900 January 1 00:00 GMT (see +// https://tools.ietf.org/html/rfc868). +constexpr int64_t kNtpJan1970Millisecs = 2'208'988'800'000; +} // namespace StreamStatistician::~StreamStatistician() {} @@ -34,10 +41,14 @@ StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, int max_reordering_threshold) : ssrc_(ssrc), clock_(clock), + delta_internal_unix_epoch_ms_(clock_->CurrentNtpInMilliseconds() - + clock_->TimeInMilliseconds() - + kNtpJan1970Millisecs), incoming_bitrate_(kStatisticsProcessIntervalMs, RateStatistics::kBpsScale), max_reordering_threshold_(max_reordering_threshold), enable_retransmit_detection_(false), + cumulative_loss_is_capped_(false), jitter_q4_(0), cumulative_loss_(0), cumulative_loss_rtcp_offset_(0), @@ -100,7 +111,6 @@ bool StreamStatisticianImpl::UpdateOutOfOrder(const RtpPacketReceived& packet, } void StreamStatisticianImpl::UpdateCounters(const RtpPacketReceived& packet) { - MutexLock lock(&stream_lock_); RTC_DCHECK_EQ(ssrc_, packet.Ssrc()); int64_t now_ms = clock_->TimeInMilliseconds(); @@ -159,47 +169,42 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, void StreamStatisticianImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { - MutexLock lock(&stream_lock_); max_reordering_threshold_ = max_reordering_threshold; } void StreamStatisticianImpl::EnableRetransmitDetection(bool enable) { - MutexLock lock(&stream_lock_); enable_retransmit_detection_ = enable; } RtpReceiveStats StreamStatisticianImpl::GetStats() const { - MutexLock lock(&stream_lock_); RtpReceiveStats stats; stats.packets_lost = cumulative_loss_; // TODO(nisse): Can we return a float instead? // Note: internal jitter value is in Q4 and needs to be scaled by 1/16. stats.jitter = jitter_q4_ >> 4; - stats.last_packet_received_timestamp_ms = - receive_counters_.last_packet_received_timestamp_ms; + if (receive_counters_.last_packet_received_timestamp_ms.has_value()) { + stats.last_packet_received_timestamp_ms = + *receive_counters_.last_packet_received_timestamp_ms + + delta_internal_unix_epoch_ms_; + } stats.packet_counter = receive_counters_.transmitted; return stats; } -bool StreamStatisticianImpl::GetActiveStatisticsAndReset( - RtcpStatistics* statistics) { - MutexLock lock(&stream_lock_); - if (clock_->TimeInMilliseconds() - last_receive_time_ms_ >= - kStatisticsTimeoutMs) { +void StreamStatisticianImpl::MaybeAppendReportBlockAndReset( + std::vector& report_blocks) { + int64_t now_ms = clock_->TimeInMilliseconds(); + if (now_ms - last_receive_time_ms_ >= kStatisticsTimeoutMs) { // Not active. - return false; + return; } if (!ReceivedRtpPacket()) { - return false; + return; } - *statistics = CalculateRtcpStatistics(); - - return true; -} - -RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() { - RtcpStatistics stats; + report_blocks.emplace_back(); + rtcp::ReportBlock& stats = report_blocks.back(); + stats.SetMediaSsrc(ssrc_); // Calculate fraction lost. int64_t exp_since_last = received_seq_max_ - last_report_seq_max_; RTC_DCHECK_GE(exp_since_last, 0); @@ -207,41 +212,42 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() { int32_t lost_since_last = cumulative_loss_ - last_report_cumulative_loss_; if (exp_since_last > 0 && lost_since_last > 0) { // Scale 0 to 255, where 255 is 100% loss. - stats.fraction_lost = - static_cast(255 * lost_since_last / exp_since_last); - } else { - stats.fraction_lost = 0; + stats.SetFractionLost(255 * lost_since_last / exp_since_last); } - // TODO(danilchap): Ensure |stats.packets_lost| is clamped to fit in a signed - // 24-bit value. - stats.packets_lost = cumulative_loss_ + cumulative_loss_rtcp_offset_; - if (stats.packets_lost < 0) { + int packets_lost = cumulative_loss_ + cumulative_loss_rtcp_offset_; + if (packets_lost < 0) { // Clamp to zero. Work around to accomodate for senders that misbehave with // negative cumulative loss. - stats.packets_lost = 0; + packets_lost = 0; cumulative_loss_rtcp_offset_ = -cumulative_loss_; } - stats.extended_highest_sequence_number = - static_cast(received_seq_max_); + if (packets_lost > 0x7fffff) { + // Packets lost is a 24 bit signed field, and thus should be clamped, as + // described in https://datatracker.ietf.org/doc/html/rfc3550#appendix-A.3 + if (!cumulative_loss_is_capped_) { + cumulative_loss_is_capped_ = true; + RTC_LOG(LS_WARNING) << "Cumulative loss reached maximum value for ssrc " + << ssrc_; + } + packets_lost = 0x7fffff; + } + stats.SetCumulativeLost(packets_lost); + stats.SetExtHighestSeqNum(received_seq_max_); // Note: internal jitter value is in Q4 and needs to be scaled by 1/16. - stats.jitter = jitter_q4_ >> 4; + stats.SetJitter(jitter_q4_ >> 4); // Only for report blocks in RTCP SR and RR. last_report_cumulative_loss_ = cumulative_loss_; last_report_seq_max_ = received_seq_max_; - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "cumulative_loss_pkts", - clock_->TimeInMilliseconds(), + BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "cumulative_loss_pkts", now_ms, cumulative_loss_, ssrc_); - BWE_TEST_LOGGING_PLOT_WITH_SSRC( - 1, "received_seq_max_pkts", clock_->TimeInMilliseconds(), - (received_seq_max_ - received_seq_first_), ssrc_); - - return stats; + BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "received_seq_max_pkts", now_ms, + (received_seq_max_ - received_seq_first_), + ssrc_); } absl::optional StreamStatisticianImpl::GetFractionLostInPercent() const { - MutexLock lock(&stream_lock_); if (!ReceivedRtpPacket()) { return absl::nullopt; } @@ -257,12 +263,10 @@ absl::optional StreamStatisticianImpl::GetFractionLostInPercent() const { StreamDataCounters StreamStatisticianImpl::GetReceiveStreamDataCounters() const { - MutexLock lock(&stream_lock_); return receive_counters_; } uint32_t StreamStatisticianImpl::BitrateReceived() const { - MutexLock lock(&stream_lock_); return incoming_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); } @@ -295,21 +299,33 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket( } std::unique_ptr ReceiveStatistics::Create(Clock* clock) { - return std::make_unique(clock); + return std::make_unique( + clock, [](uint32_t ssrc, Clock* clock, int max_reordering_threshold) { + return std::make_unique( + ssrc, clock, max_reordering_threshold); + }); } -ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock) +std::unique_ptr ReceiveStatistics::CreateThreadCompatible( + Clock* clock) { + return std::make_unique( + clock, [](uint32_t ssrc, Clock* clock, int max_reordering_threshold) { + return std::make_unique( + ssrc, clock, max_reordering_threshold); + }); +} + +ReceiveStatisticsImpl::ReceiveStatisticsImpl( + Clock* clock, + std::function( + uint32_t ssrc, + Clock* clock, + int max_reordering_threshold)> stream_statistician_factory) : clock_(clock), - last_returned_ssrc_(0), + stream_statistician_factory_(std::move(stream_statistician_factory)), + last_returned_ssrc_idx_(0), max_reordering_threshold_(kDefaultMaxReorderingThreshold) {} -ReceiveStatisticsImpl::~ReceiveStatisticsImpl() { - while (!statisticians_.empty()) { - delete statisticians_.begin()->second; - statisticians_.erase(statisticians_.begin()); - } -} - void ReceiveStatisticsImpl::OnRtpPacket(const RtpPacketReceived& packet) { // StreamStatisticianImpl instance is created once and only destroyed when // this whole ReceiveStatisticsImpl is destroyed. StreamStatisticianImpl has @@ -318,34 +334,29 @@ void ReceiveStatisticsImpl::OnRtpPacket(const RtpPacketReceived& packet) { GetOrCreateStatistician(packet.Ssrc())->UpdateCounters(packet); } -StreamStatisticianImpl* ReceiveStatisticsImpl::GetStatistician( +StreamStatistician* ReceiveStatisticsImpl::GetStatistician( uint32_t ssrc) const { - MutexLock lock(&receive_statistics_lock_); const auto& it = statisticians_.find(ssrc); if (it == statisticians_.end()) - return NULL; - return it->second; + return nullptr; + return it->second.get(); } -StreamStatisticianImpl* ReceiveStatisticsImpl::GetOrCreateStatistician( +StreamStatisticianImplInterface* ReceiveStatisticsImpl::GetOrCreateStatistician( uint32_t ssrc) { - MutexLock lock(&receive_statistics_lock_); - StreamStatisticianImpl*& impl = statisticians_[ssrc]; + std::unique_ptr& impl = statisticians_[ssrc]; if (impl == nullptr) { // new element - impl = new StreamStatisticianImpl(ssrc, clock_, max_reordering_threshold_); + impl = + stream_statistician_factory_(ssrc, clock_, max_reordering_threshold_); + all_ssrcs_.push_back(ssrc); } - return impl; + return impl.get(); } void ReceiveStatisticsImpl::SetMaxReorderingThreshold( int max_reordering_threshold) { - std::map statisticians; - { - MutexLock lock(&receive_statistics_lock_); - max_reordering_threshold_ = max_reordering_threshold; - statisticians = statisticians_; - } - for (auto& statistician : statisticians) { + max_reordering_threshold_ = max_reordering_threshold; + for (auto& statistician : statisticians_) { statistician.second->SetMaxReorderingThreshold(max_reordering_threshold); } } @@ -364,42 +375,18 @@ void ReceiveStatisticsImpl::EnableRetransmitDetection(uint32_t ssrc, std::vector ReceiveStatisticsImpl::RtcpReportBlocks( size_t max_blocks) { - std::map statisticians; - { - MutexLock lock(&receive_statistics_lock_); - statisticians = statisticians_; - } std::vector result; - result.reserve(std::min(max_blocks, statisticians.size())); - auto add_report_block = [&result](uint32_t media_ssrc, - StreamStatisticianImpl* statistician) { - // Do we have receive statistics to send? - RtcpStatistics stats; - if (!statistician->GetActiveStatisticsAndReset(&stats)) - return; - result.emplace_back(); - rtcp::ReportBlock& block = result.back(); - block.SetMediaSsrc(media_ssrc); - block.SetFractionLost(stats.fraction_lost); - if (!block.SetCumulativeLost(stats.packets_lost)) { - RTC_LOG(LS_WARNING) << "Cumulative lost is oversized."; - result.pop_back(); - return; - } - block.SetExtHighestSeqNum(stats.extended_highest_sequence_number); - block.SetJitter(stats.jitter); - }; + result.reserve(std::min(max_blocks, all_ssrcs_.size())); - const auto start_it = statisticians.upper_bound(last_returned_ssrc_); - for (auto it = start_it; - result.size() < max_blocks && it != statisticians.end(); ++it) - add_report_block(it->first, it->second); - for (auto it = statisticians.begin(); - result.size() < max_blocks && it != start_it; ++it) - add_report_block(it->first, it->second); - - if (!result.empty()) - last_returned_ssrc_ = result.back().source_ssrc(); + size_t ssrc_idx = 0; + for (size_t i = 0; i < all_ssrcs_.size() && result.size() < max_blocks; ++i) { + ssrc_idx = (last_returned_ssrc_idx_ + i + 1) % all_ssrcs_.size(); + const uint32_t media_ssrc = all_ssrcs_[ssrc_idx]; + auto statistician_it = statisticians_.find(media_ssrc); + RTC_DCHECK(statistician_it != statisticians_.end()); + statistician_it->second->MaybeAppendReportBlockAndReset(result); + } + last_returned_ssrc_idx_ = ssrc_idx; return result; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h index 41830b0b4..44f5144df 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -12,98 +12,162 @@ #define MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_ #include -#include +#include +#include +#include +#include #include #include "absl/types/optional.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { -class StreamStatisticianImpl : public StreamStatistician { +// Extends StreamStatistician with methods needed by the implementation. +class StreamStatisticianImplInterface : public StreamStatistician { + public: + virtual ~StreamStatisticianImplInterface() = default; + virtual void MaybeAppendReportBlockAndReset( + std::vector& report_blocks) = 0; + virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0; + virtual void EnableRetransmitDetection(bool enable) = 0; + virtual void UpdateCounters(const RtpPacketReceived& packet) = 0; +}; + +// Thread-compatible implementation of StreamStatisticianImplInterface. +class StreamStatisticianImpl : public StreamStatisticianImplInterface { public: StreamStatisticianImpl(uint32_t ssrc, Clock* clock, int max_reordering_threshold); ~StreamStatisticianImpl() override; + // Implements StreamStatistician RtpReceiveStats GetStats() const override; - - bool GetActiveStatisticsAndReset(RtcpStatistics* statistics); absl::optional GetFractionLostInPercent() const override; StreamDataCounters GetReceiveStreamDataCounters() const override; uint32_t BitrateReceived() const override; - void SetMaxReorderingThreshold(int max_reordering_threshold); - void EnableRetransmitDetection(bool enable); - + // Implements StreamStatisticianImplInterface + void MaybeAppendReportBlockAndReset( + std::vector& report_blocks) override; + void SetMaxReorderingThreshold(int max_reordering_threshold) override; + void EnableRetransmitDetection(bool enable) override; // Updates StreamStatistician for incoming packets. - void UpdateCounters(const RtpPacketReceived& packet); + void UpdateCounters(const RtpPacketReceived& packet) override; private: bool IsRetransmitOfOldPacket(const RtpPacketReceived& packet, - int64_t now_ms) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_lock_); - RtcpStatistics CalculateRtcpStatistics() - RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_lock_); - void UpdateJitter(const RtpPacketReceived& packet, int64_t receive_time_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_lock_); + int64_t now_ms) const; + void UpdateJitter(const RtpPacketReceived& packet, int64_t receive_time_ms); // Updates StreamStatistician for out of order packets. // Returns true if packet considered to be out of order. bool UpdateOutOfOrder(const RtpPacketReceived& packet, int64_t sequence_number, - int64_t now_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_lock_); + int64_t now_ms); // Checks if this StreamStatistician received any rtp packets. - bool ReceivedRtpPacket() const RTC_EXCLUSIVE_LOCKS_REQUIRED(stream_lock_) { - return received_seq_first_ >= 0; - } + bool ReceivedRtpPacket() const { return received_seq_first_ >= 0; } const uint32_t ssrc_; Clock* const clock_; - mutable Mutex stream_lock_; - RateStatistics incoming_bitrate_ RTC_GUARDED_BY(&stream_lock_); + // Delta used to map internal timestamps to Unix epoch ones. + const int64_t delta_internal_unix_epoch_ms_; + RateStatistics incoming_bitrate_; // In number of packets or sequence numbers. - int max_reordering_threshold_ RTC_GUARDED_BY(&stream_lock_); - bool enable_retransmit_detection_ RTC_GUARDED_BY(&stream_lock_); + int max_reordering_threshold_; + bool enable_retransmit_detection_; + bool cumulative_loss_is_capped_; // Stats on received RTP packets. - uint32_t jitter_q4_ RTC_GUARDED_BY(&stream_lock_); + uint32_t jitter_q4_; // Cumulative loss according to RFC 3550, which may be negative (and often is, // if packets are reordered and there are non-RTX retransmissions). - int32_t cumulative_loss_ RTC_GUARDED_BY(&stream_lock_); + int32_t cumulative_loss_; // Offset added to outgoing rtcp reports, to make ensure that the reported // cumulative loss is non-negative. Reports with negative values confuse some // senders, in particular, our own loss-based bandwidth estimator. - int32_t cumulative_loss_rtcp_offset_ RTC_GUARDED_BY(&stream_lock_); + int32_t cumulative_loss_rtcp_offset_; - int64_t last_receive_time_ms_ RTC_GUARDED_BY(&stream_lock_); - uint32_t last_received_timestamp_ RTC_GUARDED_BY(&stream_lock_); - SequenceNumberUnwrapper seq_unwrapper_ RTC_GUARDED_BY(&stream_lock_); - int64_t received_seq_first_ RTC_GUARDED_BY(&stream_lock_); - int64_t received_seq_max_ RTC_GUARDED_BY(&stream_lock_); + int64_t last_receive_time_ms_; + uint32_t last_received_timestamp_; + SequenceNumberUnwrapper seq_unwrapper_; + int64_t received_seq_first_; + int64_t received_seq_max_; // Assume that the other side restarted when there are two sequential packets // with large jump from received_seq_max_. - absl::optional received_seq_out_of_order_ - RTC_GUARDED_BY(&stream_lock_); + absl::optional received_seq_out_of_order_; // Current counter values. - StreamDataCounters receive_counters_ RTC_GUARDED_BY(&stream_lock_); + StreamDataCounters receive_counters_; // Counter values when we sent the last report. - int32_t last_report_cumulative_loss_ RTC_GUARDED_BY(&stream_lock_); - int64_t last_report_seq_max_ RTC_GUARDED_BY(&stream_lock_); + int32_t last_report_cumulative_loss_; + int64_t last_report_seq_max_; }; +// Thread-safe implementation of StreamStatisticianImplInterface. +class StreamStatisticianLocked : public StreamStatisticianImplInterface { + public: + StreamStatisticianLocked(uint32_t ssrc, + Clock* clock, + int max_reordering_threshold) + : impl_(ssrc, clock, max_reordering_threshold) {} + ~StreamStatisticianLocked() override = default; + + RtpReceiveStats GetStats() const override { + MutexLock lock(&stream_lock_); + return impl_.GetStats(); + } + absl::optional GetFractionLostInPercent() const override { + MutexLock lock(&stream_lock_); + return impl_.GetFractionLostInPercent(); + } + StreamDataCounters GetReceiveStreamDataCounters() const override { + MutexLock lock(&stream_lock_); + return impl_.GetReceiveStreamDataCounters(); + } + uint32_t BitrateReceived() const override { + MutexLock lock(&stream_lock_); + return impl_.BitrateReceived(); + } + void MaybeAppendReportBlockAndReset( + std::vector& report_blocks) override { + MutexLock lock(&stream_lock_); + impl_.MaybeAppendReportBlockAndReset(report_blocks); + } + void SetMaxReorderingThreshold(int max_reordering_threshold) override { + MutexLock lock(&stream_lock_); + return impl_.SetMaxReorderingThreshold(max_reordering_threshold); + } + void EnableRetransmitDetection(bool enable) override { + MutexLock lock(&stream_lock_); + return impl_.EnableRetransmitDetection(enable); + } + void UpdateCounters(const RtpPacketReceived& packet) override { + MutexLock lock(&stream_lock_); + return impl_.UpdateCounters(packet); + } + + private: + mutable Mutex stream_lock_; + StreamStatisticianImpl impl_ RTC_GUARDED_BY(&stream_lock_); +}; + +// Thread-compatible implementation. class ReceiveStatisticsImpl : public ReceiveStatistics { public: - explicit ReceiveStatisticsImpl(Clock* clock); - - ~ReceiveStatisticsImpl() override; + ReceiveStatisticsImpl( + Clock* clock, + std::function( + uint32_t ssrc, + Clock* clock, + int max_reordering_threshold)> stream_statistician_factory); + ~ReceiveStatisticsImpl() override = default; // Implements ReceiveStatisticsProvider. std::vector RtcpReportBlocks(size_t max_blocks) override; @@ -112,22 +176,72 @@ class ReceiveStatisticsImpl : public ReceiveStatistics { void OnRtpPacket(const RtpPacketReceived& packet) override; // Implements ReceiveStatistics. - // Note: More specific return type for use in the implementation. - StreamStatisticianImpl* GetStatistician(uint32_t ssrc) const override; + StreamStatistician* GetStatistician(uint32_t ssrc) const override; void SetMaxReorderingThreshold(int max_reordering_threshold) override; void SetMaxReorderingThreshold(uint32_t ssrc, int max_reordering_threshold) override; void EnableRetransmitDetection(uint32_t ssrc, bool enable) override; private: - StreamStatisticianImpl* GetOrCreateStatistician(uint32_t ssrc); + StreamStatisticianImplInterface* GetOrCreateStatistician(uint32_t ssrc); Clock* const clock_; - mutable Mutex receive_statistics_lock_; - uint32_t last_returned_ssrc_; - int max_reordering_threshold_ RTC_GUARDED_BY(receive_statistics_lock_); - std::map statisticians_ - RTC_GUARDED_BY(receive_statistics_lock_); + std::function( + uint32_t ssrc, + Clock* clock, + int max_reordering_threshold)> + stream_statistician_factory_; + // The index within `all_ssrcs_` that was last returned. + size_t last_returned_ssrc_idx_; + std::vector all_ssrcs_; + int max_reordering_threshold_; + std::unordered_map> + statisticians_; }; + +// Thread-safe implementation wrapping access to ReceiveStatisticsImpl with a +// mutex. +class ReceiveStatisticsLocked : public ReceiveStatistics { + public: + explicit ReceiveStatisticsLocked( + Clock* clock, + std::function( + uint32_t ssrc, + Clock* clock, + int max_reordering_threshold)> stream_statitician_factory) + : impl_(clock, std::move(stream_statitician_factory)) {} + ~ReceiveStatisticsLocked() override = default; + std::vector RtcpReportBlocks(size_t max_blocks) override { + MutexLock lock(&receive_statistics_lock_); + return impl_.RtcpReportBlocks(max_blocks); + } + void OnRtpPacket(const RtpPacketReceived& packet) override { + MutexLock lock(&receive_statistics_lock_); + return impl_.OnRtpPacket(packet); + } + StreamStatistician* GetStatistician(uint32_t ssrc) const override { + MutexLock lock(&receive_statistics_lock_); + return impl_.GetStatistician(ssrc); + } + void SetMaxReorderingThreshold(int max_reordering_threshold) override { + MutexLock lock(&receive_statistics_lock_); + return impl_.SetMaxReorderingThreshold(max_reordering_threshold); + } + void SetMaxReorderingThreshold(uint32_t ssrc, + int max_reordering_threshold) override { + MutexLock lock(&receive_statistics_lock_); + return impl_.SetMaxReorderingThreshold(ssrc, max_reordering_threshold); + } + void EnableRetransmitDetection(uint32_t ssrc, bool enable) override { + MutexLock lock(&receive_statistics_lock_); + return impl_.EnableRetransmitDetection(ssrc, enable); + } + + private: + mutable Mutex receive_statistics_lock_; + ReceiveStatisticsImpl impl_ RTC_GUARDED_BY(&receive_statistics_lock_); +}; + } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc index 6fed7314c..723064eeb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc @@ -15,6 +15,7 @@ #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -51,9 +52,8 @@ bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(int64_t rtt, // Update extrapolator with the new arrival time. // The extrapolator assumes the ntp time. - int64_t receiver_arrival_time_ms = - clock_->TimeInMilliseconds() + NtpOffsetMs(); - int64_t sender_send_time_ms = Clock::NtpToMs(ntp_secs, ntp_frac); + int64_t receiver_arrival_time_ms = clock_->CurrentNtpInMilliseconds(); + int64_t sender_send_time_ms = NtpTime(ntp_secs, ntp_frac).ToMs(); int64_t sender_arrival_time_ms = sender_send_time_ms + rtt / 2; int64_t remote_to_local_clocks_offset = receiver_arrival_time_ms - sender_arrival_time_ms; @@ -72,16 +72,7 @@ int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) { int64_t receiver_capture_ntp_ms = sender_capture_ntp_ms + remote_to_local_clocks_offset; - // TODO(bugs.webrtc.org/11327): Clock::CurrentNtpInMilliseconds() was - // previously used to calculate the offset between the local and the remote - // clock. However, rtc::TimeMillis() + NtpOffsetMs() is now used as the local - // ntp clock value. To preserve the old behavior of this method, the return - // value is adjusted with the difference between the two local ntp clocks. int64_t now_ms = clock_->TimeInMilliseconds(); - int64_t offset_between_local_ntp_clocks = - clock_->CurrentNtpInMilliseconds() - now_ms - NtpOffsetMs(); - receiver_capture_ntp_ms += offset_between_local_ntp_clocks; - if (now_ms - last_timing_log_ms_ > kTimingLogIntervalMs) { RTC_LOG(LS_INFO) << "RTP timestamp: " << rtp_timestamp << " in NTP clock: " << sender_capture_ntp_ms @@ -89,6 +80,7 @@ int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) { << receiver_capture_ntp_ms; last_timing_log_ms_ = now_ms; } + return receiver_capture_ntp_ms; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h index 9627aac95..6c804bbc7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h @@ -62,7 +62,6 @@ class ExtendedReports : public RtcpPacket { void ParseRrtrBlock(const uint8_t* block, uint16_t block_length); void ParseDlrrBlock(const uint8_t* block, uint16_t block_length); - void ParseVoipMetricBlock(const uint8_t* block, uint16_t block_length); void ParseTargetBitrateBlock(const uint8_t* block, uint16_t block_length); absl::optional rrtr_block_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h index 2603a6715..99f6d12da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h @@ -11,9 +11,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_LOSS_NOTIFICATION_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_LOSS_NOTIFICATION_H_ +#include "absl/base/attributes.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" -#include "rtc_base/system/unused.h" namespace webrtc { namespace rtcp { @@ -29,14 +29,15 @@ class LossNotification : public Psfb { size_t BlockLength() const override; + ABSL_MUST_USE_RESULT bool Create(uint8_t* packet, size_t* index, size_t max_length, - PacketReadyCallback callback) const override - RTC_WARN_UNUSED_RESULT; + PacketReadyCallback callback) const override; // Parse assumes header is already parsed and validated. - bool Parse(const CommonHeader& packet) RTC_WARN_UNUSED_RESULT; + ABSL_MUST_USE_RESULT + bool Parse(const CommonHeader& packet); // Set all of the values transmitted by the loss notification message. // If the values may not be represented by a loss notification message, @@ -44,9 +45,10 @@ class LossNotification : public Psfb { // when |last_recieved| is ahead of |last_decoded| by more than 0x7fff. // This is because |last_recieved| is represented on the wire as a delta, // and only 15 bits are available for that delta. + ABSL_MUST_USE_RESULT bool Set(uint16_t last_decoded, uint16_t last_received, - bool decodability_flag) RTC_WARN_UNUSED_RESULT; + bool decodability_flag); // RTP sequence number of the first packet belong to the last decoded // non-discardable frame. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc index 1db5eeb55..526acf555 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -67,22 +67,6 @@ const size_t kMaxNumberOfStoredRrtrs = 300; constexpr TimeDelta kDefaultVideoReportInterval = TimeDelta::Seconds(1); constexpr TimeDelta kDefaultAudioReportInterval = TimeDelta::Seconds(5); -std::set GetRegisteredSsrcs( - const RtpRtcpInterface::Configuration& config) { - std::set ssrcs; - ssrcs.insert(config.local_media_ssrc); - if (config.rtx_send_ssrc) { - ssrcs.insert(*config.rtx_send_ssrc); - } - if (config.fec_generator) { - absl::optional flexfec_ssrc = config.fec_generator->FecSsrc(); - if (flexfec_ssrc) { - ssrcs.insert(*flexfec_ssrc); - } - } - return ssrcs; -} - // Returns true if the |timestamp| has exceeded the |interval * // kRrTimeoutIntervals| period and was reset (set to PlusInfinity()). Returns // false if the timer was either already reset or if it has not expired. @@ -100,6 +84,22 @@ bool ResetTimestampIfExpired(const Timestamp now, } // namespace +RTCPReceiver::RegisteredSsrcs::RegisteredSsrcs( + const RtpRtcpInterface::Configuration& config) { + ssrcs_.push_back(config.local_media_ssrc); + if (config.rtx_send_ssrc) { + ssrcs_.push_back(*config.rtx_send_ssrc); + } + if (config.fec_generator) { + absl::optional flexfec_ssrc = config.fec_generator->FecSsrc(); + if (flexfec_ssrc) { + ssrcs_.push_back(*flexfec_ssrc); + } + } + // Ensure that the RegisteredSsrcs can inline the SSRCs. + RTC_DCHECK_LE(ssrcs_.size(), RTCPReceiver::RegisteredSsrcs::kMaxSsrcs); +} + struct RTCPReceiver::PacketInformation { uint32_t packet_type_flags = 0; // RTCPPacketTypeFlags bit field. @@ -116,51 +116,13 @@ struct RTCPReceiver::PacketInformation { std::unique_ptr loss_notification; }; -// Structure for handing TMMBR and TMMBN rtcp messages (RFC5104, section 3.5.4). -struct RTCPReceiver::TmmbrInformation { - struct TimedTmmbrItem { - rtcp::TmmbItem tmmbr_item; - int64_t last_updated_ms; - }; - - int64_t last_time_received_ms = 0; - - bool ready_for_delete = false; - - std::vector tmmbn; - std::map tmmbr; -}; - -// Structure for storing received RRTR RTCP messages (RFC3611, section 4.4). -struct RTCPReceiver::RrtrInformation { - RrtrInformation(uint32_t ssrc, - uint32_t received_remote_mid_ntp_time, - uint32_t local_receive_mid_ntp_time) - : ssrc(ssrc), - received_remote_mid_ntp_time(received_remote_mid_ntp_time), - local_receive_mid_ntp_time(local_receive_mid_ntp_time) {} - - uint32_t ssrc; - // Received NTP timestamp in compact representation. - uint32_t received_remote_mid_ntp_time; - // NTP time when the report was received in compact representation. - uint32_t local_receive_mid_ntp_time; -}; - -struct RTCPReceiver::LastFirStatus { - LastFirStatus(int64_t now_ms, uint8_t sequence_number) - : request_ms(now_ms), sequence_number(sequence_number) {} - int64_t request_ms; - uint8_t sequence_number; -}; - RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, ModuleRtpRtcp* owner) : clock_(config.clock), receiver_only_(config.receiver_only), rtp_rtcp_(owner), main_ssrc_(config.local_media_ssrc), - registered_ssrcs_(GetRegisteredSsrcs(config)), + registered_ssrcs_(config), rtcp_bandwidth_observer_(config.bandwidth_callback), rtcp_intra_frame_observer_(config.intra_frame_callback), rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer), @@ -174,10 +136,12 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, // TODO(bugs.webrtc.org/10774): Remove fallback. remote_ssrc_(0), remote_sender_rtp_time_(0), - xr_rrtr_status_(false), + remote_sender_packet_count_(0), + remote_sender_octet_count_(0), + remote_sender_reports_count_(0), + xr_rrtr_status_(config.non_sender_rtt_measurement), xr_rr_rtt_ms_(0), oldest_tmmbr_info_ms_(0), - stats_callback_(config.rtcp_statistics_callback), cname_callback_(config.rtcp_cname_callback), report_block_data_observer_(config.report_block_data_observer), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), @@ -256,11 +220,6 @@ int32_t RTCPReceiver::RTT(uint32_t remote_ssrc, return 0; } -void RTCPReceiver::SetRtcpXrRrtrStatus(bool enable) { - MutexLock lock(&rtcp_receiver_lock_); - xr_rrtr_status_ = enable; -} - bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) { RTC_DCHECK(rtt_ms); MutexLock lock(&rtcp_receiver_lock_); @@ -330,7 +289,10 @@ bool RTCPReceiver::NTP(uint32_t* received_ntp_secs, uint32_t* received_ntp_frac, uint32_t* rtcp_arrival_time_secs, uint32_t* rtcp_arrival_time_frac, - uint32_t* rtcp_timestamp) const { + uint32_t* rtcp_timestamp, + uint32_t* remote_sender_packet_count, + uint64_t* remote_sender_octet_count, + uint64_t* remote_sender_reports_count) const { MutexLock lock(&rtcp_receiver_lock_); if (!last_received_sr_ntp_.Valid()) return false; @@ -340,7 +302,6 @@ bool RTCPReceiver::NTP(uint32_t* received_ntp_secs, *received_ntp_secs = remote_sender_ntp_time_.seconds(); if (received_ntp_frac) *received_ntp_frac = remote_sender_ntp_time_.fractions(); - // Rtp time from incoming SenderReport. if (rtcp_timestamp) *rtcp_timestamp = remote_sender_rtp_time_; @@ -351,6 +312,14 @@ bool RTCPReceiver::NTP(uint32_t* received_ntp_secs, if (rtcp_arrival_time_frac) *rtcp_arrival_time_frac = last_received_sr_ntp_.fractions(); + // Counters. + if (remote_sender_packet_count) + *remote_sender_packet_count = remote_sender_packet_count_; + if (remote_sender_octet_count) + *remote_sender_octet_count = remote_sender_octet_count_; + if (remote_sender_reports_count) + *remote_sender_reports_count = remote_sender_reports_count_; + return true; } @@ -377,17 +346,6 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { return last_xr_rtis; } -// We can get multiple receive reports when we receive the report from a CE. -int32_t RTCPReceiver::StatisticsReceived( - std::vector* receive_blocks) const { - RTC_DCHECK(receive_blocks); - MutexLock lock(&rtcp_receiver_lock_); - for (const auto& reports_per_receiver : received_report_blocks_) - for (const auto& report : reports_per_receiver.second) - receive_blocks->push_back(report.second.report_block()); - return 0; -} - std::vector RTCPReceiver::GetLatestReportBlockData() const { std::vector result; MutexLock lock(&rtcp_receiver_lock_); @@ -524,6 +482,9 @@ void RTCPReceiver::HandleSenderReport(const CommonHeader& rtcp_block, remote_sender_ntp_time_ = sender_report.ntp(); remote_sender_rtp_time_ = sender_report.rtp_timestamp(); last_received_sr_ntp_ = TimeMicrosToNtp(clock_->TimeInMicroseconds()); + remote_sender_packet_count_ = sender_report.sender_packet_count(); + remote_sender_octet_count_ = sender_report.sender_octet_count(); + remote_sender_reports_count_++; } else { // We will only store the send report from one source, but // we will store all the receive blocks. @@ -567,7 +528,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, // which the information in this reception report block pertains. // Filter out all report blocks that are not for us. - if (registered_ssrcs_.count(report_block.source_ssrc()) == 0) + if (!registered_ssrcs_.contains(report_block.source_ssrc())) return; last_received_rb_ = clock_->CurrentTime(); @@ -719,7 +680,6 @@ void RTCPReceiver::HandleSdes(const CommonHeader& rtcp_block, } for (const rtcp::Sdes::Chunk& chunk : sdes.chunks()) { - received_cnames_[chunk.ssrc] = chunk.cname; if (cname_callback_) cname_callback_->OnCname(chunk.ssrc, chunk.cname); } @@ -783,7 +743,6 @@ void RTCPReceiver::HandleBye(const CommonHeader& rtcp_block) { tmmbr_info->ready_for_delete = true; last_fir_.erase(bye.sender_ssrc()); - received_cnames_.erase(bye.sender_ssrc()); auto it = received_rrtrs_ssrc_it_.find(bye.sender_ssrc()); if (it != received_rrtrs_ssrc_it_.end()) { received_rrtrs_.erase(it->second); @@ -835,7 +794,7 @@ void RTCPReceiver::HandleXrReceiveReferenceTime(uint32_t sender_ssrc, } void RTCPReceiver::HandleXrDlrrReportBlock(const rtcp::ReceiveTimeInfo& rti) { - if (registered_ssrcs_.count(rti.ssrc) == 0) // Not to us. + if (!registered_ssrcs_.contains(rti.ssrc)) // Not to us. return; // Caller should explicitly enable rtt calculation using extended reports. @@ -1058,14 +1017,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( // Might trigger a OnReceivedBandwidthEstimateUpdate. NotifyTmmbrUpdated(); } - uint32_t local_ssrc; - std::set registered_ssrcs; - { - // We don't want to hold this critsect when triggering the callbacks below. - MutexLock lock(&rtcp_receiver_lock_); - local_ssrc = main_ssrc_; - registered_ssrcs = registered_ssrcs_; - } + if (!receiver_only_ && (packet_information.packet_type_flags & kRtcpSrReq)) { rtp_rtcp_->OnRequestSendReport(); } @@ -1092,7 +1044,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( RTC_LOG(LS_VERBOSE) << "Incoming FIR from SSRC " << packet_information.remote_ssrc; } - rtcp_intra_frame_observer_->OnReceivedIntraFrameRequest(local_ssrc); + rtcp_intra_frame_observer_->OnReceivedIntraFrameRequest(main_ssrc_); } } if (rtcp_loss_notification_observer_ && @@ -1100,7 +1052,7 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( rtcp::LossNotification* loss_notification = packet_information.loss_notification.get(); RTC_DCHECK(loss_notification); - if (loss_notification->media_ssrc() == local_ssrc) { + if (loss_notification->media_ssrc() == main_ssrc_) { rtcp_loss_notification_observer_->OnReceivedLossNotification( loss_notification->media_ssrc(), loss_notification->last_decoded(), loss_notification->last_received(), @@ -1132,8 +1084,8 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( (packet_information.packet_type_flags & kRtcpTransportFeedback)) { uint32_t media_source_ssrc = packet_information.transport_feedback->media_ssrc(); - if (media_source_ssrc == local_ssrc || - registered_ssrcs.find(media_source_ssrc) != registered_ssrcs.end()) { + if (media_source_ssrc == main_ssrc_ || + registered_ssrcs_.contains(media_source_ssrc)) { transport_feedback_observer_->OnTransportFeedback( *packet_information.transport_feedback); } @@ -1152,18 +1104,6 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( } if (!receiver_only_) { - if (stats_callback_) { - for (const auto& report_block : packet_information.report_blocks) { - RtcpStatistics stats; - stats.packets_lost = report_block.packets_lost; - stats.extended_highest_sequence_number = - report_block.extended_highest_sequence_number; - stats.fraction_lost = report_block.fraction_lost; - stats.jitter = report_block.jitter; - - stats_callback_->StatisticsUpdated(stats, report_block.source_ssrc); - } - } if (report_block_data_observer_) { for (const auto& report_block_data : packet_information.report_block_datas) { @@ -1174,20 +1114,6 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( } } -int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC, - char cName[RTCP_CNAME_SIZE]) const { - RTC_DCHECK(cName); - - MutexLock lock(&rtcp_receiver_lock_); - auto received_cname_it = received_cnames_.find(remoteSSRC); - if (received_cname_it == received_cnames_.end()) - return -1; - - size_t length = received_cname_it->second.copy(cName, RTCP_CNAME_SIZE - 1); - cName[length] = 0; - return 0; -} - std::vector RTCPReceiver::TmmbrReceived() { MutexLock lock(&rtcp_receiver_lock_); std::vector candidates; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h index f97fe6129..429df55d4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h @@ -15,6 +15,7 @@ #include #include #include +#include #include #include "api/array_view.h" @@ -23,6 +24,7 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_nack_stats.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -67,15 +69,22 @@ class RTCPReceiver final { void SetRemoteSSRC(uint32_t ssrc); uint32_t RemoteSSRC() const; - // Get received cname. - int32_t CNAME(uint32_t remote_ssrc, char cname[RTCP_CNAME_SIZE]) const; - // Get received NTP. + // The types for the arguments below derive from the specification: + // - `remote_sender_packet_count`: `RTCSentRtpStreamStats.packetsSent` [1] + // - `remote_sender_octet_count`: `RTCSentRtpStreamStats.bytesSent` [1] + // - `remote_sender_reports_count`: + // `RTCRemoteOutboundRtpStreamStats.reportsSent` [2] + // [1] https://www.w3.org/TR/webrtc-stats/#remoteoutboundrtpstats-dict* + // [2] https://www.w3.org/TR/webrtc-stats/#dom-rtcsentrtpstreamstats bool NTP(uint32_t* received_ntp_secs, uint32_t* received_ntp_frac, uint32_t* rtcp_arrival_time_secs, uint32_t* rtcp_arrival_time_frac, - uint32_t* rtcp_timestamp) const; + uint32_t* rtcp_timestamp, + uint32_t* remote_sender_packet_count, + uint64_t* remote_sender_octet_count, + uint64_t* remote_sender_reports_count) const; std::vector ConsumeReceivedXrReferenceTimeInfo(); @@ -86,7 +95,6 @@ class RTCPReceiver final { int64_t* min_rtt_ms, int64_t* max_rtt_ms) const; - void SetRtcpXrRrtrStatus(bool enable); bool GetAndResetXrRrRtt(int64_t* rtt_ms); // Called once per second on the worker thread to do rtt calculations. @@ -94,8 +102,6 @@ class RTCPReceiver final { absl::optional OnPeriodicRttUpdate(Timestamp newer_than, bool sending); - // Get statistics. - int32_t StatisticsReceived(std::vector* receiveBlocks) const; // A snapshot of Report Blocks with additional data of interest to statistics. // Within this list, the sender-source SSRC pair is unique and per-pair the // ReportBlockData represents the latest Report Block that was received for @@ -120,10 +126,68 @@ class RTCPReceiver final { void NotifyTmmbrUpdated(); private: + // A lightweight inlined set of local SSRCs. + class RegisteredSsrcs { + public: + static constexpr size_t kMaxSsrcs = 3; + // Initializes the set of registered local SSRCS by extracting them from the + // provided `config`. + explicit RegisteredSsrcs(const RtpRtcpInterface::Configuration& config); + + // Indicates if `ssrc` is in the set of registered local SSRCs. + bool contains(uint32_t ssrc) const { + return absl::c_linear_search(ssrcs_, ssrc); + } + + private: + absl::InlinedVector ssrcs_; + }; + struct PacketInformation; - struct TmmbrInformation; - struct RrtrInformation; - struct LastFirStatus; + + // Structure for handing TMMBR and TMMBN rtcp messages (RFC5104, + // section 3.5.4). + struct TmmbrInformation { + struct TimedTmmbrItem { + rtcp::TmmbItem tmmbr_item; + int64_t last_updated_ms; + }; + + int64_t last_time_received_ms = 0; + + bool ready_for_delete = false; + + std::vector tmmbn; + std::map tmmbr; + }; + + // Structure for storing received RRTR RTCP messages (RFC3611, section 4.4). + struct RrtrInformation { + RrtrInformation(uint32_t ssrc, + uint32_t received_remote_mid_ntp_time, + uint32_t local_receive_mid_ntp_time) + : ssrc(ssrc), + received_remote_mid_ntp_time(received_remote_mid_ntp_time), + local_receive_mid_ntp_time(local_receive_mid_ntp_time) {} + + uint32_t ssrc; + // Received NTP timestamp in compact representation. + uint32_t received_remote_mid_ntp_time; + // NTP time when the report was received in compact representation. + uint32_t local_receive_mid_ntp_time; + }; + + struct LastFirStatus { + LastFirStatus(int64_t now_ms, uint8_t sequence_number) + : request_ms(now_ms), sequence_number(sequence_number) {} + int64_t request_ms; + uint8_t sequence_number; + }; + + // TODO(boivie): `ReportBlockDataMap` and `ReportBlockMap` should be converted + // to std::unordered_map, but as there are too many tests that assume a + // specific order, it's not easily done. + // RTCP report blocks mapped by remote SSRC. using ReportBlockDataMap = std::map; // RTCP report blocks map mapped by source SSRC. @@ -225,7 +289,8 @@ class RTCPReceiver final { const bool receiver_only_; ModuleRtpRtcp* const rtp_rtcp_; const uint32_t main_ssrc_; - const std::set registered_ssrcs_; + // The set of registered local SSRCs. + const RegisteredSsrcs registered_ssrcs_; RtcpBandwidthObserver* const rtcp_bandwidth_observer_; RtcpIntraFrameObserver* const rtcp_intra_frame_observer_; @@ -243,27 +308,28 @@ class RTCPReceiver final { uint32_t remote_sender_rtp_time_ RTC_GUARDED_BY(rtcp_receiver_lock_); // When did we receive the last send report. NtpTime last_received_sr_ntp_ RTC_GUARDED_BY(rtcp_receiver_lock_); + uint32_t remote_sender_packet_count_ RTC_GUARDED_BY(rtcp_receiver_lock_); + uint64_t remote_sender_octet_count_ RTC_GUARDED_BY(rtcp_receiver_lock_); + uint64_t remote_sender_reports_count_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Received RRTR information in ascending receive time order. std::list received_rrtrs_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Received RRTR information mapped by remote ssrc. - std::map::iterator> + std::unordered_map::iterator> received_rrtrs_ssrc_it_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Estimated rtt, zero when there is no valid estimate. - bool xr_rrtr_status_ RTC_GUARDED_BY(rtcp_receiver_lock_); + const bool xr_rrtr_status_; int64_t xr_rr_rtt_ms_; int64_t oldest_tmmbr_info_ms_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Mapped by remote ssrc. - std::map tmmbr_infos_ + std::unordered_map tmmbr_infos_ RTC_GUARDED_BY(rtcp_receiver_lock_); ReportBlockMap received_report_blocks_ RTC_GUARDED_BY(rtcp_receiver_lock_); - std::map last_fir_ - RTC_GUARDED_BY(rtcp_receiver_lock_); - std::map received_cnames_ + std::unordered_map last_fir_ RTC_GUARDED_BY(rtcp_receiver_lock_); // The last time we received an RTCP Report block for this module. @@ -274,11 +340,7 @@ class RTCPReceiver final { // delivered RTP packet to the remote side. Timestamp last_increased_sequence_number_ = Timestamp::PlusInfinity(); - RtcpStatisticsCallback* const stats_callback_; RtcpCnameCallback* const cname_callback_; - // TODO(hbos): Remove RtcpStatisticsCallback in favor of - // ReportBlockDataObserver; the ReportBlockData contains a superset of the - // RtcpStatistics data. ReportBlockDataObserver* const report_block_data_observer_; RtcpPacketTypeCounterObserver* const packet_type_counter_observer_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc index 61e6085bb..ba63fd036 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc @@ -50,36 +50,10 @@ const uint32_t kRtcpAnyExtendedReports = kRtcpXrReceiverReferenceTime | constexpr int32_t kDefaultVideoReportInterval = 1000; constexpr int32_t kDefaultAudioReportInterval = 5000; -class PacketContainer : public rtcp::CompoundPacket { - public: - PacketContainer(Transport* transport, RtcEventLog* event_log) - : transport_(transport), event_log_(event_log) {} - - PacketContainer() = delete; - PacketContainer(const PacketContainer&) = delete; - PacketContainer& operator=(const PacketContainer&) = delete; - - size_t SendPackets(size_t max_payload_length) { - size_t bytes_sent = 0; - Build(max_payload_length, [&](rtc::ArrayView packet) { - if (transport_->SendRtcp(packet.data(), packet.size())) { - bytes_sent += packet.size(); - if (event_log_) { - event_log_->Log(std::make_unique(packet)); - } - } - }); - return bytes_sent; - } - - private: - Transport* transport_; - RtcEventLog* const event_log_; -}; +} // namespace // Helper to put several RTCP packets into lower layer datagram RTCP packet. -// Prefer to use this class instead of PacketContainer. -class PacketSender { +class RTCPSender::PacketSender { public: PacketSender(rtcp::RtcpPacket::PacketReadyCallback callback, size_t max_packet_size) @@ -102,8 +76,6 @@ class PacketSender { } } - bool IsEmpty() const { return index_ == 0; } - private: const rtcp::RtcpPacket::PacketReadyCallback callback_; const size_t max_packet_size_; @@ -111,8 +83,6 @@ class PacketSender { uint8_t buffer_[IP_PACKET_SIZE]; }; -} // namespace - RTCPSender::FeedbackState::FeedbackState() : packets_sent(0), media_bytes_sent(0), @@ -173,7 +143,8 @@ RTCPSender::RTCPSender(const RtpRtcpInterface::Configuration& config) packet_oh_send_(0), max_packet_size_(IP_PACKET_SIZE - 28), // IPv4 + UDP by default. - xr_send_receiver_reference_time_enabled_(false), + xr_send_receiver_reference_time_enabled_( + config.non_sender_rtt_measurement), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), send_video_bitrate_allocation_(false), last_payload_type_(-1) { @@ -216,8 +187,8 @@ bool RTCPSender::Sending() const { return sending_; } -int32_t RTCPSender::SetSendingStatus(const FeedbackState& feedback_state, - bool sending) { +void RTCPSender::SetSendingStatus(const FeedbackState& feedback_state, + bool sending) { bool sendRTCPBye = false; { MutexLock lock(&mutex_rtcp_sender_); @@ -230,9 +201,11 @@ int32_t RTCPSender::SetSendingStatus(const FeedbackState& feedback_state, } sending_ = sending; } - if (sendRTCPBye) - return SendRTCP(feedback_state, kRtcpBye); - return 0; + if (sendRTCPBye) { + if (SendRTCP(feedback_state, kRtcpBye) != 0) { + RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE"; + } + } } int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state, @@ -240,21 +213,42 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state, uint16_t last_received_seq_num, bool decodability_flag, bool buffering_allowed) { - MutexLock lock(&mutex_rtcp_sender_); + int32_t error_code = -1; + auto callback = [&](rtc::ArrayView packet) { + transport_->SendRtcp(packet.data(), packet.size()); + error_code = 0; + if (event_log_) { + event_log_->Log(std::make_unique(packet)); + } + }; + absl::optional sender; + { + MutexLock lock(&mutex_rtcp_sender_); - loss_notification_state_.last_decoded_seq_num = last_decoded_seq_num; - loss_notification_state_.last_received_seq_num = last_received_seq_num; - loss_notification_state_.decodability_flag = decodability_flag; + if (!loss_notification_.Set(last_decoded_seq_num, last_received_seq_num, + decodability_flag)) { + return -1; + } - SetFlag(kRtcpLossNotification, /*is_volatile=*/true); + SetFlag(kRtcpLossNotification, /*is_volatile=*/true); - if (buffering_allowed) { - // The loss notification will be batched with additional feedback messages. - return 0; + if (buffering_allowed) { + // The loss notification will be batched with additional feedback + // messages. + return 0; + } + + sender.emplace(callback, max_packet_size_); + auto result = ComputeCompoundRTCPPacket( + feedback_state, RTCPPacketType::kRtcpLossNotification, 0, nullptr, + *sender); + if (result) { + return *result; + } } + sender->Send(); - return SendCompoundRTCPLocked( - feedback_state, {RTCPPacketType::kRtcpLossNotification}, 0, nullptr); + return error_code; } void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { @@ -280,15 +274,6 @@ bool RTCPSender::TMMBR() const { return IsFlagPresent(RTCPPacketType::kRtcpTmmbr); } -void RTCPSender::SetTMMBRStatus(bool enable) { - MutexLock lock(&mutex_rtcp_sender_); - if (enable) { - SetFlag(RTCPPacketType::kRtcpTmmbr, false); - } else { - ConsumeFlag(RTCPPacketType::kRtcpTmmbr, true); - } -} - void RTCPSender::SetMaxRtpPacketSize(size_t max_packet_size) { MutexLock lock(&mutex_rtcp_sender_); max_packet_size_ = max_packet_size; @@ -337,31 +322,6 @@ int32_t RTCPSender::SetCNAME(const char* c_name) { return 0; } -int32_t RTCPSender::AddMixedCNAME(uint32_t SSRC, const char* c_name) { - RTC_DCHECK(c_name); - RTC_DCHECK_LT(strlen(c_name), RTCP_CNAME_SIZE); - MutexLock lock(&mutex_rtcp_sender_); - // One spot is reserved for ssrc_/cname_. - // TODO(danilchap): Add support for more than 30 contributes by sending - // several sdes packets. - if (csrc_cnames_.size() >= rtcp::Sdes::kMaxNumberOfChunks - 1) - return -1; - - csrc_cnames_[SSRC] = c_name; - return 0; -} - -int32_t RTCPSender::RemoveMixedCNAME(uint32_t SSRC) { - MutexLock lock(&mutex_rtcp_sender_); - auto it = csrc_cnames_.find(SSRC); - - if (it == csrc_cnames_.end()) - return -1; - - csrc_cnames_.erase(it); - return 0; -} - bool RTCPSender::TimeToSendRTCPReport(bool sendKeyframeBeforeRTP) const { /* For audio we use a configurable interval (default: 5 seconds) @@ -434,17 +394,10 @@ bool RTCPSender::TimeToSendRTCPReport(bool sendKeyframeBeforeRTP) const { now += RTCP_SEND_BEFORE_KEY_FRAME_MS; } - if (now >= next_time_to_send_rtcp_) { - return true; - } else if (now < 0x0000ffff && - next_time_to_send_rtcp_ > 0xffff0000) { // 65 sec margin - // wrap - return true; - } - return false; + return now >= next_time_to_send_rtcp_; } -std::unique_ptr RTCPSender::BuildSR(const RtcpContext& ctx) { +void RTCPSender::BuildSR(const RtcpContext& ctx, PacketSender& sender) { // Timestamp shouldn't be estimated before first media frame. RTC_DCHECK_GE(last_frame_capture_time_ms_, 0); // The timestamp of this RTCP packet should be estimated as the timestamp of @@ -463,69 +416,58 @@ std::unique_ptr RTCPSender::BuildSR(const RtcpContext& ctx) { timestamp_offset_ + last_rtp_timestamp_ + ((ctx.now_us_ + 500) / 1000 - last_frame_capture_time_ms_) * rtp_rate; - rtcp::SenderReport* report = new rtcp::SenderReport(); - report->SetSenderSsrc(ssrc_); - report->SetNtp(TimeMicrosToNtp(ctx.now_us_)); - report->SetRtpTimestamp(rtp_timestamp); - report->SetPacketCount(ctx.feedback_state_.packets_sent); - report->SetOctetCount(ctx.feedback_state_.media_bytes_sent); - report->SetReportBlocks(CreateReportBlocks(ctx.feedback_state_)); - - return std::unique_ptr(report); + rtcp::SenderReport report; + report.SetSenderSsrc(ssrc_); + report.SetNtp(TimeMicrosToNtp(ctx.now_us_)); + report.SetRtpTimestamp(rtp_timestamp); + report.SetPacketCount(ctx.feedback_state_.packets_sent); + report.SetOctetCount(ctx.feedback_state_.media_bytes_sent); + report.SetReportBlocks(CreateReportBlocks(ctx.feedback_state_)); + sender.AppendPacket(report); } -std::unique_ptr RTCPSender::BuildSDES( - const RtcpContext& ctx) { +void RTCPSender::BuildSDES(const RtcpContext& ctx, PacketSender& sender) { size_t length_cname = cname_.length(); RTC_CHECK_LT(length_cname, RTCP_CNAME_SIZE); - rtcp::Sdes* sdes = new rtcp::Sdes(); - sdes->AddCName(ssrc_, cname_); - - for (const auto& it : csrc_cnames_) - RTC_CHECK(sdes->AddCName(it.first, it.second)); - - return std::unique_ptr(sdes); + rtcp::Sdes sdes; + sdes.AddCName(ssrc_, cname_); + sender.AppendPacket(sdes); } -std::unique_ptr RTCPSender::BuildRR(const RtcpContext& ctx) { - rtcp::ReceiverReport* report = new rtcp::ReceiverReport(); - report->SetSenderSsrc(ssrc_); - report->SetReportBlocks(CreateReportBlocks(ctx.feedback_state_)); - - return std::unique_ptr(report); +void RTCPSender::BuildRR(const RtcpContext& ctx, PacketSender& sender) { + rtcp::ReceiverReport report; + report.SetSenderSsrc(ssrc_); + report.SetReportBlocks(CreateReportBlocks(ctx.feedback_state_)); + sender.AppendPacket(report); } -std::unique_ptr RTCPSender::BuildPLI(const RtcpContext& ctx) { - rtcp::Pli* pli = new rtcp::Pli(); - pli->SetSenderSsrc(ssrc_); - pli->SetMediaSsrc(remote_ssrc_); +void RTCPSender::BuildPLI(const RtcpContext& ctx, PacketSender& sender) { + rtcp::Pli pli; + pli.SetSenderSsrc(ssrc_); + pli.SetMediaSsrc(remote_ssrc_); ++packet_type_counter_.pli_packets; - - return std::unique_ptr(pli); + sender.AppendPacket(pli); } -std::unique_ptr RTCPSender::BuildFIR(const RtcpContext& ctx) { +void RTCPSender::BuildFIR(const RtcpContext& ctx, PacketSender& sender) { ++sequence_number_fir_; - rtcp::Fir* fir = new rtcp::Fir(); - fir->SetSenderSsrc(ssrc_); - fir->AddRequestTo(remote_ssrc_, sequence_number_fir_); + rtcp::Fir fir; + fir.SetSenderSsrc(ssrc_); + fir.AddRequestTo(remote_ssrc_, sequence_number_fir_); ++packet_type_counter_.fir_packets; - - return std::unique_ptr(fir); + sender.AppendPacket(fir); } -std::unique_ptr RTCPSender::BuildREMB( - const RtcpContext& ctx) { - rtcp::Remb* remb = new rtcp::Remb(); - remb->SetSenderSsrc(ssrc_); - remb->SetBitrateBps(remb_bitrate_); - remb->SetSsrcs(remb_ssrcs_); - - return std::unique_ptr(remb); +void RTCPSender::BuildREMB(const RtcpContext& ctx, PacketSender& sender) { + rtcp::Remb remb; + remb.SetSenderSsrc(ssrc_); + remb.SetBitrateBps(remb_bitrate_); + remb.SetSsrcs(remb_ssrcs_); + sender.AppendPacket(remb); } void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) { @@ -533,10 +475,9 @@ void RTCPSender::SetTargetBitrate(unsigned int target_bitrate) { tmmbr_send_bps_ = target_bitrate; } -std::unique_ptr RTCPSender::BuildTMMBR( - const RtcpContext& ctx) { +void RTCPSender::BuildTMMBR(const RtcpContext& ctx, PacketSender& sender) { if (ctx.feedback_state_.receiver == nullptr) - return nullptr; + return; // Before sending the TMMBR check the received TMMBN, only an owner is // allowed to raise the bitrate: // * If the sender is an owner of the TMMBN -> send TMMBR @@ -556,7 +497,7 @@ std::unique_ptr RTCPSender::BuildTMMBR( if (candidate.bitrate_bps() == tmmbr_send_bps_ && candidate.packet_overhead() == packet_oh_send_) { // Do not send the same tuple. - return nullptr; + return; } } if (!tmmbr_owner) { @@ -570,62 +511,53 @@ std::unique_ptr RTCPSender::BuildTMMBR( tmmbr_owner = TMMBRHelp::IsOwner(bounding, ssrc_); if (!tmmbr_owner) { // Did not enter bounding set, no meaning to send this request. - return nullptr; + return; } } } if (!tmmbr_send_bps_) - return nullptr; + return; - rtcp::Tmmbr* tmmbr = new rtcp::Tmmbr(); - tmmbr->SetSenderSsrc(ssrc_); + rtcp::Tmmbr tmmbr; + tmmbr.SetSenderSsrc(ssrc_); rtcp::TmmbItem request; request.set_ssrc(remote_ssrc_); request.set_bitrate_bps(tmmbr_send_bps_); request.set_packet_overhead(packet_oh_send_); - tmmbr->AddTmmbr(request); - - return std::unique_ptr(tmmbr); + tmmbr.AddTmmbr(request); + sender.AppendPacket(tmmbr); } -std::unique_ptr RTCPSender::BuildTMMBN( - const RtcpContext& ctx) { - rtcp::Tmmbn* tmmbn = new rtcp::Tmmbn(); - tmmbn->SetSenderSsrc(ssrc_); +void RTCPSender::BuildTMMBN(const RtcpContext& ctx, PacketSender& sender) { + rtcp::Tmmbn tmmbn; + tmmbn.SetSenderSsrc(ssrc_); for (const rtcp::TmmbItem& tmmbr : tmmbn_to_send_) { if (tmmbr.bitrate_bps() > 0) { - tmmbn->AddTmmbr(tmmbr); + tmmbn.AddTmmbr(tmmbr); } } - - return std::unique_ptr(tmmbn); + sender.AppendPacket(tmmbn); } -std::unique_ptr RTCPSender::BuildAPP(const RtcpContext& ctx) { - rtcp::App* app = new rtcp::App(); - app->SetSenderSsrc(ssrc_); - - return std::unique_ptr(app); +void RTCPSender::BuildAPP(const RtcpContext& ctx, PacketSender& sender) { + rtcp::App app; + app.SetSenderSsrc(ssrc_); + sender.AppendPacket(app); } -std::unique_ptr RTCPSender::BuildLossNotification( - const RtcpContext& ctx) { - auto loss_notification = std::make_unique( - loss_notification_state_.last_decoded_seq_num, - loss_notification_state_.last_received_seq_num, - loss_notification_state_.decodability_flag); - loss_notification->SetSenderSsrc(ssrc_); - loss_notification->SetMediaSsrc(remote_ssrc_); - return std::move(loss_notification); +void RTCPSender::BuildLossNotification(const RtcpContext& ctx, + PacketSender& sender) { + loss_notification_.SetSenderSsrc(ssrc_); + loss_notification_.SetMediaSsrc(remote_ssrc_); + sender.AppendPacket(loss_notification_); } -std::unique_ptr RTCPSender::BuildNACK( - const RtcpContext& ctx) { - rtcp::Nack* nack = new rtcp::Nack(); - nack->SetSenderSsrc(ssrc_); - nack->SetMediaSsrc(remote_ssrc_); - nack->SetPacketIds(ctx.nack_list_, ctx.nack_size_); +void RTCPSender::BuildNACK(const RtcpContext& ctx, PacketSender& sender) { + rtcp::Nack nack; + nack.SetSenderSsrc(ssrc_); + nack.SetMediaSsrc(remote_ssrc_); + nack.SetPacketIds(ctx.nack_list_, ctx.nack_size_); // Report stats. for (int idx = 0; idx < ctx.nack_size_; ++idx) { @@ -635,31 +567,29 @@ std::unique_ptr RTCPSender::BuildNACK( packet_type_counter_.unique_nack_requests = nack_stats_.unique_requests(); ++packet_type_counter_.nack_packets; - - return std::unique_ptr(nack); + sender.AppendPacket(nack); } -std::unique_ptr RTCPSender::BuildBYE(const RtcpContext& ctx) { - rtcp::Bye* bye = new rtcp::Bye(); - bye->SetSenderSsrc(ssrc_); - bye->SetCsrcs(csrcs_); - - return std::unique_ptr(bye); +void RTCPSender::BuildBYE(const RtcpContext& ctx, PacketSender& sender) { + rtcp::Bye bye; + bye.SetSenderSsrc(ssrc_); + bye.SetCsrcs(csrcs_); + sender.AppendPacket(bye); } -std::unique_ptr RTCPSender::BuildExtendedReports( - const RtcpContext& ctx) { - std::unique_ptr xr(new rtcp::ExtendedReports()); - xr->SetSenderSsrc(ssrc_); +void RTCPSender::BuildExtendedReports(const RtcpContext& ctx, + PacketSender& sender) { + rtcp::ExtendedReports xr; + xr.SetSenderSsrc(ssrc_); if (!sending_ && xr_send_receiver_reference_time_enabled_) { rtcp::Rrtr rrtr; rrtr.SetNtp(TimeMicrosToNtp(ctx.now_us_)); - xr->SetRrtr(rrtr); + xr.SetRrtr(rrtr); } for (const rtcp::ReceiveTimeInfo& rti : ctx.feedback_state_.last_xr_rtis) { - xr->AddDlrrItem(rti); + xr.AddDlrrItem(rti); } if (send_video_bitrate_allocation_) { @@ -674,72 +604,53 @@ std::unique_ptr RTCPSender::BuildExtendedReports( } } - xr->SetTargetBitrate(target_bitrate); + xr.SetTargetBitrate(target_bitrate); send_video_bitrate_allocation_ = false; } - - return std::move(xr); + sender.AppendPacket(xr); } int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state, - RTCPPacketType packetType, + RTCPPacketType packet_type, int32_t nack_size, const uint16_t* nack_list) { - return SendCompoundRTCP( - feedback_state, std::set(&packetType, &packetType + 1), - nack_size, nack_list); -} - -int32_t RTCPSender::SendCompoundRTCP( - const FeedbackState& feedback_state, - const std::set& packet_types, - int32_t nack_size, - const uint16_t* nack_list) { - PacketContainer container(transport_, event_log_); - size_t max_packet_size; - + int32_t error_code = -1; + auto callback = [&](rtc::ArrayView packet) { + if (transport_->SendRtcp(packet.data(), packet.size())) { + error_code = 0; + if (event_log_) { + event_log_->Log(std::make_unique(packet)); + } + } + }; + absl::optional sender; { MutexLock lock(&mutex_rtcp_sender_); - auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types, - nack_size, nack_list, &container); + sender.emplace(callback, max_packet_size_); + auto result = ComputeCompoundRTCPPacket(feedback_state, packet_type, + nack_size, nack_list, *sender); if (result) { return *result; } - max_packet_size = max_packet_size_; } + sender->Send(); - size_t bytes_sent = container.SendPackets(max_packet_size); - return bytes_sent == 0 ? -1 : 0; -} - -int32_t RTCPSender::SendCompoundRTCPLocked( - const FeedbackState& feedback_state, - const std::set& packet_types, - int32_t nack_size, - const uint16_t* nack_list) { - PacketContainer container(transport_, event_log_); - auto result = ComputeCompoundRTCPPacket(feedback_state, packet_types, - nack_size, nack_list, &container); - if (result) { - return *result; - } - size_t bytes_sent = container.SendPackets(max_packet_size_); - return bytes_sent == 0 ? -1 : 0; + return error_code; } absl::optional RTCPSender::ComputeCompoundRTCPPacket( const FeedbackState& feedback_state, - const std::set& packet_types, + RTCPPacketType packet_type, int32_t nack_size, const uint16_t* nack_list, - rtcp::CompoundPacket* out_packet) { + PacketSender& sender) { if (method_ == RtcpMode::kOff) { RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; return -1; } - // Add all flags as volatile. Non volatile entries will not be overwritten. - // All new volatile flags added will be consumed by the end of this call. - SetFlags(packet_types, true); + // Add the flag as volatile. Non volatile entries will not be overwritten. + // The new volatile flag will be consumed by the end of this call. + SetFlag(packet_type, true); // Prevent sending streams to send SR before any media has been sent. const bool can_calculate_rtp_timestamp = (last_frame_capture_time_ms_ >= 0); @@ -766,37 +677,37 @@ absl::optional RTCPSender::ComputeCompoundRTCPPacket( PrepareReport(feedback_state); - std::unique_ptr packet_bye; + bool create_bye = false; auto it = report_flags_.begin(); while (it != report_flags_.end()) { - auto builder_it = builders_.find(it->type); + uint32_t rtcp_packet_type = it->type; + if (it->is_volatile) { report_flags_.erase(it++); } else { ++it; } + // If there is a BYE, don't append now - save it and append it + // at the end later. + if (rtcp_packet_type == kRtcpBye) { + create_bye = true; + continue; + } + auto builder_it = builders_.find(rtcp_packet_type); if (builder_it == builders_.end()) { - RTC_NOTREACHED() << "Could not find builder for packet type " << it->type; + RTC_NOTREACHED() << "Could not find builder for packet type " + << rtcp_packet_type; } else { BuilderFunc func = builder_it->second; - std::unique_ptr packet = (this->*func)(context); - if (packet == nullptr) - return -1; - // If there is a BYE, don't append now - save it and append it - // at the end later. - if (builder_it->first == kRtcpBye) { - packet_bye = std::move(packet); - } else { - out_packet->Append(std::move(packet)); - } + (this->*func)(context, sender); } } // Append the BYE now at the end - if (packet_bye) { - out_packet->Append(std::move(packet_bye)); + if (create_bye) { + BuildBYE(context, sender); } if (packet_type_counter_observer_ != nullptr) { @@ -896,16 +807,6 @@ void RTCPSender::SetCsrcs(const std::vector& csrcs) { csrcs_ = csrcs; } -void RTCPSender::SendRtcpXrReceiverReferenceTime(bool enable) { - MutexLock lock(&mutex_rtcp_sender_); - xr_send_receiver_reference_time_enabled_ = enable; -} - -bool RTCPSender::RtcpXrReceiverReferenceTime() const { - MutexLock lock(&mutex_rtcp_sender_); - return xr_send_receiver_reference_time_enabled_; -} - void RTCPSender::SetTmmbn(std::vector bounding_set) { MutexLock lock(&mutex_rtcp_sender_); tmmbn_to_send_ = std::move(bounding_set); @@ -920,12 +821,6 @@ void RTCPSender::SetFlag(uint32_t type, bool is_volatile) { } } -void RTCPSender::SetFlags(const std::set& types, - bool is_volatile) { - for (RTCPPacketType type : types) - SetFlag(type, is_volatile); -} - bool RTCPSender::IsFlagPresent(uint32_t type) const { return report_flags_.find(ReportFlag(type, false)) != report_flags_.end(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h index 22b2bb7b7..aab2c9051 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_sender.h @@ -20,7 +20,6 @@ #include "absl/types/optional.h" #include "api/call/transport.h" #include "api/video/video_bitrate_allocation.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -28,6 +27,7 @@ #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" @@ -75,8 +75,8 @@ class RTCPSender final { void SetRTCPStatus(RtcpMode method) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); bool Sending() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t SetSendingStatus(const FeedbackState& feedback_state, - bool enabled) + void SetSendingStatus(const FeedbackState& feedback_state, + bool enabled) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); // combine the functions int32_t SetNackStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); @@ -100,12 +100,6 @@ class RTCPSender final { int32_t SetCNAME(const char* cName) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t AddMixedCNAME(uint32_t SSRC, const char* c_name) - RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - - int32_t RemoveMixedCNAME(uint32_t SSRC) - RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - bool TimeToSendRTCPReport(bool sendKeyframeBeforeRTP = false) const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); @@ -115,12 +109,6 @@ class RTCPSender final { const uint16_t* nackList = 0) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t SendCompoundRTCP(const FeedbackState& feedback_state, - const std::set& packetTypes, - int32_t nackSize = 0, - const uint16_t* nackList = nullptr) - RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - int32_t SendLossNotification(const FeedbackState& feedback_state, uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, @@ -135,20 +123,12 @@ class RTCPSender final { bool TMMBR() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetTMMBRStatus(bool enable) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetMaxRtpPacketSize(size_t max_packet_size) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); void SetTmmbn(std::vector bounding_set) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SendRtcpXrReceiverReferenceTime(bool enable) - RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - - bool RtcpXrReceiverReferenceTime() const - RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); - void SetCsrcs(const std::vector& csrcs) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); @@ -162,20 +142,14 @@ class RTCPSender final { private: class RtcpContext; - - int32_t SendCompoundRTCPLocked(const FeedbackState& feedback_state, - const std::set& packet_types, - int32_t nack_size, - const uint16_t* nack_list) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); + class PacketSender; absl::optional ComputeCompoundRTCPPacket( const FeedbackState& feedback_state, - const std::set& packet_types, + RTCPPacketType packet_type, int32_t nack_size, const uint16_t* nack_list, - rtcp::CompoundPacket* out_packet) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); + PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); // Determine which RTCP messages should be sent and setup flags. void PrepareReport(const FeedbackState& feedback_state) @@ -185,36 +159,33 @@ class RTCPSender final { const FeedbackState& feedback_state) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildSR(const RtcpContext& context) + void BuildSR(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildRR(const RtcpContext& context) + void BuildRR(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildSDES(const RtcpContext& context) + void BuildSDES(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildPLI(const RtcpContext& context) + void BuildPLI(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildREMB(const RtcpContext& context) + void BuildREMB(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildTMMBR(const RtcpContext& context) + void BuildTMMBR(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildTMMBN(const RtcpContext& context) + void BuildTMMBN(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildAPP(const RtcpContext& context) + void BuildAPP(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildLossNotification( - const RtcpContext& context) + void BuildLossNotification(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildExtendedReports( - const RtcpContext& context) + void BuildExtendedReports(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildBYE(const RtcpContext& context) + void BuildBYE(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildFIR(const RtcpContext& context) + void BuildFIR(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - std::unique_ptr BuildNACK(const RtcpContext& context) + void BuildNACK(const RtcpContext& context, PacketSender& sender) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - private: const bool audio_; const uint32_t ssrc_; Clock* const clock_; @@ -240,8 +211,6 @@ class RTCPSender final { ReceiveStatisticsProvider* receive_statistics_ RTC_GUARDED_BY(mutex_rtcp_sender_); - std::map csrc_cnames_ - RTC_GUARDED_BY(mutex_rtcp_sender_); // send CSRCs std::vector csrcs_ RTC_GUARDED_BY(mutex_rtcp_sender_); @@ -249,14 +218,7 @@ class RTCPSender final { // Full intra request uint8_t sequence_number_fir_ RTC_GUARDED_BY(mutex_rtcp_sender_); - // Loss Notification - struct LossNotificationState { - uint16_t last_decoded_seq_num; - uint16_t last_received_seq_num; - bool decodability_flag; - }; - LossNotificationState loss_notification_state_ - RTC_GUARDED_BY(mutex_rtcp_sender_); + rtcp::LossNotification loss_notification_ RTC_GUARDED_BY(mutex_rtcp_sender_); // REMB int64_t remb_bitrate_ RTC_GUARDED_BY(mutex_rtcp_sender_); @@ -268,8 +230,7 @@ class RTCPSender final { size_t max_packet_size_ RTC_GUARDED_BY(mutex_rtcp_sender_); // True if sending of XR Receiver reference time report is enabled. - bool xr_send_receiver_reference_time_enabled_ - RTC_GUARDED_BY(mutex_rtcp_sender_); + const bool xr_send_receiver_reference_time_enabled_; RtcpPacketTypeCounterObserver* const packet_type_counter_observer_; RtcpPacketTypeCounter packet_type_counter_ RTC_GUARDED_BY(mutex_rtcp_sender_); @@ -289,8 +250,6 @@ class RTCPSender final { void SetFlag(uint32_t type, bool is_volatile) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); - void SetFlags(const std::set& types, bool is_volatile) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); bool IsFlagPresent(uint32_t type) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); bool ConsumeFlag(uint32_t type, bool forced = false) @@ -308,8 +267,7 @@ class RTCPSender final { std::set report_flags_ RTC_GUARDED_BY(mutex_rtcp_sender_); - typedef std::unique_ptr (RTCPSender::*BuilderFunc)( - const RtcpContext&); + typedef void (RTCPSender::*BuilderFunc)(const RtcpContext&, PacketSender&); // Map from RTCPPacketType to builder. std::map builders_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h index 8a8fd6aed..0501b9af7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h @@ -17,6 +17,7 @@ #include "api/task_queue/task_queue_base.h" #include "api/video/video_bitrate_allocation.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -61,6 +62,9 @@ struct RtcpTransceiverConfig { // Maximum packet size outgoing transport accepts. size_t max_packet_size = 1200; + // The clock to use when querying for the NTP time. Should be set. + Clock* clock = nullptr; + // Transport to send rtcp packets to. Should be set. Transport* outgoing_transport = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc index cba594dc6..8f0cb349b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc @@ -47,14 +47,14 @@ RtpDependencyDescriptorReader::RtpDependencyDescriptorReader( uint32_t RtpDependencyDescriptorReader::ReadBits(size_t bit_count) { uint32_t value = 0; - if (!buffer_.ReadBits(&value, bit_count)) + if (!buffer_.ReadBits(bit_count, value)) parsing_failed_ = true; return value; } uint32_t RtpDependencyDescriptorReader::ReadNonSymmetric(size_t num_values) { uint32_t value = 0; - if (!buffer_.ReadNonSymmetric(&value, num_values)) + if (!buffer_.ReadNonSymmetric(num_values, value)) parsing_failed_ = true; return value; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc index 6c3966cb9..86f48582a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.cc @@ -177,7 +177,7 @@ size_t RtpPacketizerH264::PacketizeStapA(size_t fragment_index) { return fragment_size; } if (fragment_index == input_fragments_.size() - 1) { - // Last fragment, so StrapA might be the last packet. + // Last fragment, so STAP-A might be the last packet. return fragment_size + limits_.last_packet_reduction_len; } return fragment_size; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc index c16dcaf6f..aebe884c0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extension_map.cc @@ -50,6 +50,7 @@ constexpr ExtensionInfo kExtensions[] = { CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), + CreateExtensionInfo(), }; // Because of kRtpExtensionNone, NumberOfExtension is 1 bigger than the actual diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index b540e4b22..1c3073e90 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -823,4 +823,32 @@ bool InbandComfortNoiseExtension::Write(rtc::ArrayView data, return true; } +// VideoFrameTrackingIdExtension +// +// 0 1 2 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | ID | L=1 | video-frame-tracking-id | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + +constexpr RTPExtensionType VideoFrameTrackingIdExtension::kId; +constexpr uint8_t VideoFrameTrackingIdExtension::kValueSizeBytes; +constexpr const char VideoFrameTrackingIdExtension::kUri[]; + +bool VideoFrameTrackingIdExtension::Parse(rtc::ArrayView data, + uint16_t* video_frame_tracking_id) { + if (data.size() != kValueSizeBytes) { + return false; + } + *video_frame_tracking_id = ByteReader::ReadBigEndian(data.data()); + return true; +} + +bool VideoFrameTrackingIdExtension::Write(rtc::ArrayView data, + uint16_t video_frame_tracking_id) { + RTC_DCHECK_EQ(data.size(), kValueSizeBytes); + ByteWriter::WriteBigEndian(data.data(), video_frame_tracking_id); + return true; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h index 1352611fb..f6e7a579a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -307,5 +307,21 @@ class InbandComfortNoiseExtension { absl::optional level); }; +class VideoFrameTrackingIdExtension { + public: + using value_type = uint16_t; + static constexpr RTPExtensionType kId = kRtpExtensionVideoFrameTrackingId; + static constexpr uint8_t kValueSizeBytes = 2; + static constexpr const char kUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-frame-tracking-id"; + static bool Parse(rtc::ArrayView data, + uint16_t* video_frame_tracking_id); + static size_t ValueSize(uint16_t /*video_frame_tracking_id*/) { + return kValueSizeBytes; + } + static bool Write(rtc::ArrayView data, + uint16_t video_frame_tracking_id); +}; + } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc index 38d29cc2b..84769d0f4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.cc @@ -198,7 +198,8 @@ void RtpPacket::ZeroMutableExtensions() { case RTPExtensionType::kRtpExtensionVideoContentType: case RTPExtensionType::kRtpExtensionVideoLayersAllocation: case RTPExtensionType::kRtpExtensionVideoRotation: - case RTPExtensionType::kRtpExtensionInbandComfortNoise: { + case RTPExtensionType::kRtpExtensionInbandComfortNoise: + case RTPExtensionType::kRtpExtensionVideoFrameTrackingId: { // Non-mutable extension. Don't change it. break; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.h index b3e67be7c..aa854f35a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet.h @@ -30,6 +30,8 @@ class RtpPacket { // packet creating and used if available in Parse function. // Adding and getting extensions will fail until |extensions| is // provided via constructor or IdentifyExtensions function. + // |*extensions| is only accessed during construction; the pointer is not + // stored. RtpPacket(); explicit RtpPacket(const ExtensionManager* extensions); RtpPacket(const RtpPacket&); @@ -178,8 +180,10 @@ class RtpPacket { uint16_t SetExtensionLengthMaybeAddZeroPadding(size_t extensions_offset); - uint8_t* WriteAt(size_t offset) { return buffer_.data() + offset; } - void WriteAt(size_t offset, uint8_t byte) { buffer_.data()[offset] = byte; } + uint8_t* WriteAt(size_t offset) { return buffer_.MutableData() + offset; } + void WriteAt(size_t offset, uint8_t byte) { + buffer_.MutableData()[offset] = byte; + } const uint8_t* ReadAt(size_t offset) const { return buffer_.data() + offset; } // Header. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc index 1fbfb7651..508993305 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc @@ -134,7 +134,7 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, // Store packet. const uint16_t rtp_seq_no = packet->SequenceNumber(); int packet_index = GetPacketIndex(rtp_seq_no); - if (packet_index >= 0u && + if (packet_index >= 0 && static_cast(packet_index) < packet_history_.size() && packet_history_[packet_index].packet_ != nullptr) { RTC_LOG(LS_WARNING) << "Duplicate packet inserted: " << rtp_seq_no; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc index feadee1db..6b2cc7698 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.cc @@ -21,8 +21,10 @@ namespace webrtc { RtpPacketReceived::RtpPacketReceived() = default; -RtpPacketReceived::RtpPacketReceived(const ExtensionManager* extensions) - : RtpPacket(extensions) {} +RtpPacketReceived::RtpPacketReceived( + const ExtensionManager* extensions, + webrtc::Timestamp arrival_time /*= webrtc::Timestamp::MinusInfinity()*/) + : RtpPacket(extensions), arrival_time_(arrival_time) {} RtpPacketReceived::RtpPacketReceived(const RtpPacketReceived& packet) = default; RtpPacketReceived::RtpPacketReceived(RtpPacketReceived&& packet) = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h index f5d317668..431d3f52b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h @@ -12,19 +12,26 @@ #include -#include +#include +#include "absl/base/attributes.h" #include "api/array_view.h" +#include "api/ref_counted_base.h" #include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtp_packet.h" -#include "system_wrappers/include/ntp_time.h" namespace webrtc { // Class to hold rtp packet with metadata for receiver side. +// The metadata is not parsed from the rtp packet, but may be derived from the +// data that is parsed from the rtp packet. class RtpPacketReceived : public RtpPacket { public: RtpPacketReceived(); - explicit RtpPacketReceived(const ExtensionManager* extensions); + explicit RtpPacketReceived( + const ExtensionManager* extensions, + webrtc::Timestamp arrival_time = webrtc::Timestamp::MinusInfinity()); RtpPacketReceived(const RtpPacketReceived& packet); RtpPacketReceived(RtpPacketReceived&& packet); @@ -39,12 +46,17 @@ class RtpPacketReceived : public RtpPacket { // Time in local time base as close as it can to packet arrived on the // network. - int64_t arrival_time_ms() const { return arrival_time_ms_; } - void set_arrival_time_ms(int64_t time) { arrival_time_ms_ = time; } + webrtc::Timestamp arrival_time() const { return arrival_time_; } + void set_arrival_time(webrtc::Timestamp time) { arrival_time_ = time; } - // Estimated from Timestamp() using rtcp Sender Reports. - NtpTime capture_ntp_time() const { return capture_time_; } - void set_capture_ntp_time(NtpTime time) { capture_time_ = time; } + ABSL_DEPRECATED("Use arrival_time() instead") + int64_t arrival_time_ms() const { + return arrival_time_.IsMinusInfinity() ? -1 : arrival_time_.ms(); + } + ABSL_DEPRECATED("Use set_arrival_time() instead") + void set_arrival_time_ms(int64_t time) { + arrival_time_ = webrtc::Timestamp::Millis(time); + } // Flag if packet was recovered via RTX or FEC. bool recovered() const { return recovered_; } @@ -55,21 +67,20 @@ class RtpPacketReceived : public RtpPacket { payload_type_frequency_ = value; } - // Additional data bound to the RTP packet for use in application code, - // outside of WebRTC. - rtc::ArrayView application_data() const { - return application_data_; + // An application can attach arbitrary data to an RTP packet using + // `additional_data`. The additional data does not affect WebRTC processing. + rtc::scoped_refptr additional_data() const { + return additional_data_; } - void set_application_data(rtc::ArrayView data) { - application_data_.assign(data.begin(), data.end()); + void set_additional_data(rtc::scoped_refptr data) { + additional_data_ = std::move(data); } private: - NtpTime capture_time_; - int64_t arrival_time_ms_ = 0; + webrtc::Timestamp arrival_time_ = Timestamp::MinusInfinity(); int payload_type_frequency_ = 0; bool recovered_ = false; - std::vector application_data_; + rtc::scoped_refptr additional_data_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h index 9aaf9a52e..2411deac4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -13,10 +13,12 @@ #include #include -#include +#include #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/ref_counted_base.h" +#include "api/scoped_refptr.h" #include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -24,6 +26,8 @@ namespace webrtc { // Class to hold rtp packet with metadata for sender side. +// The metadata is not send over the wire, but packet sender may use it to +// create rtp header extensions or other data that is sent over the wire. class RtpPacketToSend : public RtpPacket { public: // RtpPacketToSend::Type is deprecated. Use RtpPacketMediaType directly. @@ -64,14 +68,13 @@ class RtpPacketToSend : public RtpPacket { } bool allow_retransmission() { return allow_retransmission_; } - // Additional data bound to the RTP packet for use in application code, - // outside of WebRTC. - rtc::ArrayView application_data() const { - return application_data_; + // An application can attach arbitrary data to an RTP packet using + // `additional_data`. The additional data does not affect WebRTC processing. + rtc::scoped_refptr additional_data() const { + return additional_data_; } - - void set_application_data(rtc::ArrayView data) { - application_data_.assign(data.begin(), data.end()); + void set_additional_data(rtc::scoped_refptr data) { + additional_data_ = std::move(data); } void set_packetization_finish_time_ms(int64_t time) { @@ -122,7 +125,7 @@ class RtpPacketToSend : public RtpPacket { absl::optional packet_type_; bool allow_retransmission_ = false; absl::optional retransmitted_sequence_number_; - std::vector application_data_; + rtc::scoped_refptr additional_data_; bool is_first_packet_of_frame_ = false; bool is_key_frame_ = false; bool fec_protect_packet_ = false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index b2268c7d1..5a79f55d3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -24,6 +24,7 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "system_wrappers/include/ntp_time.h" #ifdef _WIN32 // Disable warning C4355: 'this' : used in base member initializer list. @@ -122,20 +123,18 @@ void ModuleRtpRtcpImpl::Process() { // processed RTT for at least |kRtpRtcpRttProcessTimeMs| milliseconds. // Note that LastReceivedReportBlockMs() grabs a lock, so check // |process_rtt| first. - if (process_rtt && + if (process_rtt && rtt_stats_ != nullptr && rtcp_receiver_.LastReceivedReportBlockMs() > last_rtt_process_time_) { - std::vector receive_blocks; - rtcp_receiver_.StatisticsReceived(&receive_blocks); - int64_t max_rtt = 0; - for (std::vector::iterator it = receive_blocks.begin(); - it != receive_blocks.end(); ++it) { - int64_t rtt = 0; - rtcp_receiver_.RTT(it->sender_ssrc, &rtt, NULL, NULL, NULL); - max_rtt = (rtt > max_rtt) ? rtt : max_rtt; + int64_t max_rtt_ms = 0; + for (const auto& block : rtcp_receiver_.GetLatestReportBlockData()) { + if (block.last_rtt_ms() > max_rtt_ms) { + max_rtt_ms = block.last_rtt_ms(); + } } // Report the rtt. - if (rtt_stats_ && max_rtt != 0) - rtt_stats_->OnRttUpdate(max_rtt); + if (max_rtt_ms > 0) { + rtt_stats_->OnRttUpdate(max_rtt_ms); + } } // Verify receiver reports are delivered and the reported sequence number @@ -192,7 +191,7 @@ void ModuleRtpRtcpImpl::Process() { if (rtcp_sender_.TimeToSendRTCPReport()) rtcp_sender_.SendRTCP(GetFeedbackState(), kRtcpReport); - if (TMMBR() && rtcp_receiver_.UpdateTmmbrTimers()) { + if (rtcp_sender_.TMMBR() && rtcp_receiver_.UpdateTmmbrTimers()) { rtcp_receiver_.NotifyTmmbrUpdated(); } } @@ -312,8 +311,19 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { } state.receiver = &rtcp_receiver_; - LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac, - &state.remote_sr); + uint32_t received_ntp_secs = 0; + uint32_t received_ntp_frac = 0; + state.remote_sr = 0; + if (rtcp_receiver_.NTP(&received_ntp_secs, &received_ntp_frac, + /*rtcp_arrival_time_secs=*/&state.last_rr_ntp_secs, + /*rtcp_arrival_time_frac=*/&state.last_rr_ntp_frac, + /*rtcp_timestamp=*/nullptr, + /*remote_sender_packet_count=*/nullptr, + /*remote_sender_octet_count=*/nullptr, + /*remote_sender_reports_count=*/nullptr)) { + state.remote_sr = ((received_ntp_secs & 0x0000ffff) << 16) + + ((received_ntp_frac & 0xffff0000) >> 16); + } state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); @@ -326,9 +336,7 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) { if (rtcp_sender_.Sending() != sending) { // Sends RTCP BYE when going from true to false - if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) { - RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE"; - } + rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending); } return 0; } @@ -467,19 +475,6 @@ int32_t ModuleRtpRtcpImpl::SetCNAME(const char* c_name) { return rtcp_sender_.SetCNAME(c_name); } -int32_t ModuleRtpRtcpImpl::AddMixedCNAME(uint32_t ssrc, const char* c_name) { - return rtcp_sender_.AddMixedCNAME(ssrc, c_name); -} - -int32_t ModuleRtpRtcpImpl::RemoveMixedCNAME(const uint32_t ssrc) { - return rtcp_sender_.RemoveMixedCNAME(ssrc); -} - -int32_t ModuleRtpRtcpImpl::RemoteCNAME(const uint32_t remote_ssrc, - char c_name[RTCP_CNAME_SIZE]) const { - return rtcp_receiver_.CNAME(remote_ssrc, c_name); -} - int32_t ModuleRtpRtcpImpl::RemoteNTP(uint32_t* received_ntpsecs, uint32_t* received_ntpfrac, uint32_t* rtcp_arrival_time_secs, @@ -487,7 +482,10 @@ int32_t ModuleRtpRtcpImpl::RemoteNTP(uint32_t* received_ntpsecs, uint32_t* rtcp_timestamp) const { return rtcp_receiver_.NTP(received_ntpsecs, received_ntpfrac, rtcp_arrival_time_secs, rtcp_arrival_time_frac, - rtcp_timestamp) + rtcp_timestamp, + /*remote_sender_packet_count=*/nullptr, + /*remote_sender_octet_count=*/nullptr, + /*remote_sender_reports_count=*/nullptr) ? 0 : -1; } @@ -527,48 +525,6 @@ int32_t ModuleRtpRtcpImpl::SendRTCP(RTCPPacketType packet_type) { return rtcp_sender_.SendRTCP(GetFeedbackState(), packet_type); } -int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData( - const uint8_t sub_type, - const uint32_t name, - const uint8_t* data, - const uint16_t length) { - RTC_NOTREACHED() << "Not implemented"; - return -1; -} - -void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) { - rtcp_receiver_.SetRtcpXrRrtrStatus(enable); - rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable); -} - -bool ModuleRtpRtcpImpl::RtcpXrRrtrStatus() const { - return rtcp_sender_.RtcpXrReceiverReferenceTime(); -} - -// TODO(asapersson): Replace this method with the one below. -int32_t ModuleRtpRtcpImpl::DataCountersRTP(size_t* bytes_sent, - uint32_t* packets_sent) const { - StreamDataCounters rtp_stats; - StreamDataCounters rtx_stats; - rtp_sender_->packet_sender.GetDataCounters(&rtp_stats, &rtx_stats); - - if (bytes_sent) { - // TODO(http://crbug.com/webrtc/10525): Bytes sent should only include - // payload bytes, not header and padding bytes. - *bytes_sent = rtp_stats.transmitted.payload_bytes + - rtp_stats.transmitted.padding_bytes + - rtp_stats.transmitted.header_bytes + - rtx_stats.transmitted.payload_bytes + - rtx_stats.transmitted.padding_bytes + - rtx_stats.transmitted.header_bytes; - } - if (packets_sent) { - *packets_sent = - rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; - } - return 0; -} - void ModuleRtpRtcpImpl::GetSendStreamDataCounters( StreamDataCounters* rtp_counters, StreamDataCounters* rtx_counters) const { @@ -576,16 +532,31 @@ void ModuleRtpRtcpImpl::GetSendStreamDataCounters( } // Received RTCP report. -int32_t ModuleRtpRtcpImpl::RemoteRTCPStat( - std::vector* receive_blocks) const { - return rtcp_receiver_.StatisticsReceived(receive_blocks); -} - std::vector ModuleRtpRtcpImpl::GetLatestReportBlockData() const { return rtcp_receiver_.GetLatestReportBlockData(); } +absl::optional +ModuleRtpRtcpImpl::GetSenderReportStats() const { + SenderReportStats stats; + uint32_t remote_timestamp_secs; + uint32_t remote_timestamp_frac; + uint32_t arrival_timestamp_secs; + uint32_t arrival_timestamp_frac; + if (rtcp_receiver_.NTP(&remote_timestamp_secs, &remote_timestamp_frac, + &arrival_timestamp_secs, &arrival_timestamp_frac, + /*rtcp_timestamp=*/nullptr, &stats.packets_sent, + &stats.bytes_sent, &stats.reports_count)) { + stats.last_remote_timestamp.Set(remote_timestamp_secs, + remote_timestamp_frac); + stats.last_arrival_timestamp.Set(arrival_timestamp_secs, + arrival_timestamp_frac); + return stats; + } + return absl::nullopt; +} + // (REMB) Receiver Estimated Max Bitrate. void ModuleRtpRtcpImpl::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { @@ -600,12 +571,6 @@ void ModuleRtpRtcpImpl::SetExtmapAllowMixed(bool extmap_allow_mixed) { rtp_sender_->packet_generator.SetExtmapAllowMixed(extmap_allow_mixed); } -int32_t ModuleRtpRtcpImpl::RegisterSendRtpHeaderExtension( - const RTPExtensionType type, - const uint8_t id) { - return rtp_sender_->packet_generator.RegisterRtpHeaderExtension(type, id); -} - void ModuleRtpRtcpImpl::RegisterRtpHeaderExtension(absl::string_view uri, int id) { bool registered = @@ -622,15 +587,6 @@ void ModuleRtpRtcpImpl::DeregisterSendRtpHeaderExtension( rtp_sender_->packet_generator.DeregisterRtpHeaderExtension(uri); } -// (TMMBR) Temporary Max Media Bit Rate. -bool ModuleRtpRtcpImpl::TMMBR() const { - return rtcp_sender_.TMMBR(); -} - -void ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) { - rtcp_sender_.SetTMMBRStatus(enable); -} - void ModuleRtpRtcpImpl::SetTmmbn(std::vector bounding_set) { rtcp_sender_.SetTmmbn(std::move(bounding_set)); } @@ -772,23 +728,6 @@ void ModuleRtpRtcpImpl::OnReceivedRtcpReportBlocks( } } -bool ModuleRtpRtcpImpl::LastReceivedNTP( - uint32_t* rtcp_arrival_time_secs, // When we got the last report. - uint32_t* rtcp_arrival_time_frac, - uint32_t* remote_sr) const { - // Remote SR: NTP inside the last received (mid 16 bits from sec and frac). - uint32_t ntp_secs = 0; - uint32_t ntp_frac = 0; - - if (!rtcp_receiver_.NTP(&ntp_secs, &ntp_frac, rtcp_arrival_time_secs, - rtcp_arrival_time_frac, NULL)) { - return false; - } - *remote_sr = - ((ntp_secs & 0x0000ffff) << 16) + ((ntp_frac & 0xffff0000) >> 16); - return true; -} - void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) { { MutexLock lock(&mutex_rtt_); @@ -817,15 +756,4 @@ const RTPSender* ModuleRtpRtcpImpl::RtpSender() const { return rtp_sender_ ? &rtp_sender_->packet_generator : nullptr; } -DataRate ModuleRtpRtcpImpl::SendRate() const { - RTC_DCHECK(rtp_sender_); - return rtp_sender_->packet_sender.GetSendRates().Sum(); -} - -DataRate ModuleRtpRtcpImpl::NackOverheadRate() const { - RTC_DCHECK(rtp_sender_); - return rtp_sender_->packet_sender - .GetSendRates()[RtpPacketMediaType::kRetransmission]; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h index 7f7df174f..5bcabc57b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -73,8 +73,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { void SetExtmapAllowMixed(bool extmap_allow_mixed) override; // Register RTP header extension. - int32_t RegisterSendRtpHeaderExtension(RTPExtensionType type, - uint8_t id) override; void RegisterRtpHeaderExtension(absl::string_view uri, int id) override; int32_t DeregisterSendRtpHeaderExtension(RTPExtensionType type) override; void DeregisterSendRtpHeaderExtension(absl::string_view uri) override; @@ -166,10 +164,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { // Set RTCP CName. int32_t SetCNAME(const char* c_name) override; - // Get remote CName. - int32_t RemoteCNAME(uint32_t remote_ssrc, - char c_name[RTCP_CNAME_SIZE]) const override; - // Get remote NTP. int32_t RemoteNTP(uint32_t* received_ntp_secs, uint32_t* received_ntp_frac, @@ -177,10 +171,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { uint32_t* rtcp_arrival_time_frac, uint32_t* rtcp_timestamp) const override; - int32_t AddMixedCNAME(uint32_t ssrc, const char* c_name) override; - - int32_t RemoveMixedCNAME(uint32_t ssrc) override; - // Get RoundTripTime. int32_t RTT(uint32_t remote_ssrc, int64_t* rtt, @@ -194,32 +184,21 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { // Normal SR and RR are triggered via the process function. int32_t SendRTCP(RTCPPacketType rtcpPacketType) override; - // Statistics of the amount of data sent and received. - int32_t DataCountersRTP(size_t* bytes_sent, - uint32_t* packets_sent) const override; - void GetSendStreamDataCounters( StreamDataCounters* rtp_counters, StreamDataCounters* rtx_counters) const override; - // Get received RTCP report, report block. - int32_t RemoteRTCPStat( - std::vector* receive_blocks) const override; // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. // Within this list, the ReportBlockData::RTCPReportBlock::source_ssrc(), // which is the SSRC of the corresponding outbound RTP stream, is unique. std::vector GetLatestReportBlockData() const override; + absl::optional GetSenderReportStats() const override; // (REMB) Receiver Estimated Max Bitrate. void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override; void UnsetRemb() override; - // (TMMBR) Temporary Max Media Bit Rate. - bool TMMBR() const override; - - void SetTMMBRStatus(bool enable) override; - void SetTmmbn(std::vector bounding_set) override; size_t MaxRtpPacketSize() const override; @@ -238,32 +217,15 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { // requests. void SetStorePacketsStatus(bool enable, uint16_t number_to_store) override; - bool StorePackets() const override; - void SendCombinedRtcpPacket( std::vector> rtcp_packets) override; - // (APP) Application specific data. - int32_t SetRTCPApplicationSpecificData(uint8_t sub_type, - uint32_t name, - const uint8_t* data, - uint16_t length) override; - - // (XR) Receiver reference time report. - void SetRtcpXrRrtrStatus(bool enable) override; - - bool RtcpXrRrtrStatus() const override; - // Video part. int32_t SendLossNotification(uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, bool decodability_flag, bool buffering_allowed) override; - bool LastReceivedNTP(uint32_t* NTPsecs, - uint32_t* NTPfrac, - uint32_t* remote_sr) const; - RtpSendRates GetSendRates() const override; void OnReceivedNack( @@ -300,10 +262,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { Clock* clock() const { return clock_; } - // TODO(sprang): Remove when usage is gone. - DataRate SendRate() const; - DataRate NackOverheadRate() const; - private: FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, Rtt); FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly); @@ -326,6 +284,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { bool TimeToSendFullNackList(int64_t now) const; + // Returns true if the module is configured to store packets. + bool StorePackets() const; + + // Returns current Receiver Reference Time Report (RTTR) status. + bool RtcpXrRrtrStatus() const; + std::unique_ptr rtp_sender_; RTCPSender rtcp_sender_; @@ -348,7 +312,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp, public RTCPReceiver::ModuleRtpRtcp { // The processed RTT from RtcpRttStats. mutable Mutex mutex_rtt_; - int64_t rtt_ms_; + int64_t rtt_ms_ RTC_GUARDED_BY(mutex_rtt_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index 88ede3d43..e526bac65 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -24,6 +24,7 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "system_wrappers/include/ntp_time.h" #ifdef _WIN32 // Disable warning C4355: 'this' : used in base member initializer list. @@ -260,8 +261,19 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { } state.receiver = &rtcp_receiver_; - LastReceivedNTP(&state.last_rr_ntp_secs, &state.last_rr_ntp_frac, - &state.remote_sr); + uint32_t received_ntp_secs = 0; + uint32_t received_ntp_frac = 0; + state.remote_sr = 0; + if (rtcp_receiver_.NTP(&received_ntp_secs, &received_ntp_frac, + /*rtcp_arrival_time_secs=*/&state.last_rr_ntp_secs, + /*rtcp_arrival_time_frac=*/&state.last_rr_ntp_frac, + /*rtcp_timestamp=*/nullptr, + /*remote_sender_packet_count=*/nullptr, + /*remote_sender_octet_count=*/nullptr, + /*remote_sender_reports_count=*/nullptr)) { + state.remote_sr = ((received_ntp_secs & 0x0000ffff) << 16) + + ((received_ntp_frac & 0xffff0000) >> 16); + } state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); @@ -274,9 +286,7 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { int32_t ModuleRtpRtcpImpl2::SetSendingStatus(const bool sending) { if (rtcp_sender_.Sending() != sending) { // Sends RTCP BYE when going from true to false - if (rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending) != 0) { - RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE"; - } + rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending); } return 0; } @@ -436,7 +446,10 @@ int32_t ModuleRtpRtcpImpl2::RemoteNTP(uint32_t* received_ntpsecs, uint32_t* rtcp_timestamp) const { return rtcp_receiver_.NTP(received_ntpsecs, received_ntpfrac, rtcp_arrival_time_secs, rtcp_arrival_time_frac, - rtcp_timestamp) + rtcp_timestamp, + /*remote_sender_packet_count=*/nullptr, + /*remote_sender_octet_count=*/nullptr, + /*remote_sender_reports_count=*/nullptr) ? 0 : -1; } @@ -479,15 +492,6 @@ int32_t ModuleRtpRtcpImpl2::SendRTCP(RTCPPacketType packet_type) { return rtcp_sender_.SendRTCP(GetFeedbackState(), packet_type); } -void ModuleRtpRtcpImpl2::SetRtcpXrRrtrStatus(bool enable) { - rtcp_receiver_.SetRtcpXrRrtrStatus(enable); - rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable); -} - -bool ModuleRtpRtcpImpl2::RtcpXrRrtrStatus() const { - return rtcp_sender_.RtcpXrReceiverReferenceTime(); -} - void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( StreamDataCounters* rtp_counters, StreamDataCounters* rtx_counters) const { @@ -495,16 +499,31 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( } // Received RTCP report. -int32_t ModuleRtpRtcpImpl2::RemoteRTCPStat( - std::vector* receive_blocks) const { - return rtcp_receiver_.StatisticsReceived(receive_blocks); -} - std::vector ModuleRtpRtcpImpl2::GetLatestReportBlockData() const { return rtcp_receiver_.GetLatestReportBlockData(); } +absl::optional +ModuleRtpRtcpImpl2::GetSenderReportStats() const { + SenderReportStats stats; + uint32_t remote_timestamp_secs; + uint32_t remote_timestamp_frac; + uint32_t arrival_timestamp_secs; + uint32_t arrival_timestamp_frac; + if (rtcp_receiver_.NTP(&remote_timestamp_secs, &remote_timestamp_frac, + &arrival_timestamp_secs, &arrival_timestamp_frac, + /*rtcp_timestamp=*/nullptr, &stats.packets_sent, + &stats.bytes_sent, &stats.reports_count)) { + stats.last_remote_timestamp.Set(remote_timestamp_secs, + remote_timestamp_frac); + stats.last_arrival_timestamp.Set(arrival_timestamp_secs, + arrival_timestamp_frac); + return stats; + } + return absl::nullopt; +} + // (REMB) Receiver Estimated Max Bitrate. void ModuleRtpRtcpImpl2::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { @@ -677,23 +696,6 @@ void ModuleRtpRtcpImpl2::OnReceivedRtcpReportBlocks( } } -bool ModuleRtpRtcpImpl2::LastReceivedNTP( - uint32_t* rtcp_arrival_time_secs, // When we got the last report. - uint32_t* rtcp_arrival_time_frac, - uint32_t* remote_sr) const { - // Remote SR: NTP inside the last received (mid 16 bits from sec and frac). - uint32_t ntp_secs = 0; - uint32_t ntp_frac = 0; - - if (!rtcp_receiver_.NTP(&ntp_secs, &ntp_frac, rtcp_arrival_time_secs, - rtcp_arrival_time_frac, NULL)) { - return false; - } - *remote_sr = - ((ntp_secs & 0x0000ffff) << 16) + ((ntp_frac & 0xffff0000) >> 16); - return true; -} - void ModuleRtpRtcpImpl2::set_rtt_ms(int64_t rtt_ms) { RTC_DCHECK_RUN_ON(worker_queue_); { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index 9eb7e3a6d..00f6ff161 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -21,6 +21,7 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_bitrate_allocation.h" #include "modules/include/module_fec_types.h" @@ -36,7 +37,7 @@ #include "modules/rtp_rtcp/source/rtp_sender_egress.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" @@ -199,14 +200,12 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, StreamDataCounters* rtp_counters, StreamDataCounters* rtx_counters) const override; - // Get received RTCP report, report block. - int32_t RemoteRTCPStat( - std::vector* receive_blocks) const override; // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. // Within this list, the ReportBlockData::RTCPReportBlock::source_ssrc(), // which is the SSRC of the corresponding outbound RTP stream, is unique. std::vector GetLatestReportBlockData() const override; + absl::optional GetSenderReportStats() const override; // (REMB) Receiver Estimated Max Bitrate. void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override; @@ -230,26 +229,15 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, // requests. void SetStorePacketsStatus(bool enable, uint16_t number_to_store) override; - bool StorePackets() const override; - void SendCombinedRtcpPacket( std::vector> rtcp_packets) override; - // (XR) Receiver reference time report. - void SetRtcpXrRrtrStatus(bool enable) override; - - bool RtcpXrRrtrStatus() const override; - // Video part. int32_t SendLossNotification(uint16_t last_decoded_seq_num, uint16_t last_received_seq_num, bool decodability_flag, bool buffering_allowed) override; - bool LastReceivedNTP(uint32_t* NTPsecs, - uint32_t* NTPfrac, - uint32_t* remote_sr) const; - RtpSendRates GetSendRates() const override; void OnReceivedNack( @@ -291,8 +279,11 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, // check if we need to send RTCP report, send TMMBR updates and fire events. void PeriodicUpdate(); + // Returns true if the module is configured to store packets. + bool StorePackets() const; + TaskQueueBase* const worker_queue_; - SequenceChecker process_thread_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker process_thread_checker_; std::unique_ptr rtp_sender_; @@ -316,7 +307,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, // The processed RTT from RtcpRttStats. mutable Mutex mutex_rtt_; - int64_t rtt_ms_; + int64_t rtt_ms_ RTC_GUARDED_BY(mutex_rtt_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h index 3bd5d4705..457a99313 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -28,6 +28,7 @@ #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/constructor_magic.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -76,13 +77,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { RtcpRttStats* rtt_stats = nullptr; RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr; // Called on receipt of RTCP report block from remote side. - // TODO(bugs.webrtc.org/10678): Remove RtcpStatisticsCallback in - // favor of ReportBlockDataObserver. // TODO(bugs.webrtc.org/10679): Consider whether we want to use // only getters or only callbacks. If we decide on getters, the // ReportBlockDataObserver should also be removed in favor of // GetLatestReportBlockData(). - RtcpStatisticsCallback* rtcp_statistics_callback = nullptr; RtcpCnameCallback* rtcp_cname_callback = nullptr; ReportBlockDataObserver* report_block_data_observer = nullptr; @@ -144,10 +142,35 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // overhead. bool enable_rtx_padding_prioritization = true; + // Estimate RTT as non-sender as described in + // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5 + bool non_sender_rtt_measurement = false; + private: RTC_DISALLOW_COPY_AND_ASSIGN(Configuration); }; + // Stats for RTCP sender reports (SR) for a specific SSRC. + // Refer to https://tools.ietf.org/html/rfc3550#section-6.4.1. + struct SenderReportStats { + // Arrival NPT timestamp for the last received RTCP SR. + NtpTime last_arrival_timestamp; + // Received (a.k.a., remote) NTP timestamp for the last received RTCP SR. + NtpTime last_remote_timestamp; + // Total number of RTP data packets transmitted by the sender since starting + // transmission up until the time this SR packet was generated. The count + // should be reset if the sender changes its SSRC identifier. + uint32_t packets_sent; + // Total number of payload octets (i.e., not including header or padding) + // transmitted in RTP data packets by the sender since starting transmission + // up until the time this SR packet was generated. The count should be reset + // if the sender changes its SSRC identifier. + uint64_t bytes_sent; + // Total number of RTCP SR blocks received. + // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-reportssent. + uint64_t reports_count; + }; + // ************************************************************************** // Receiver functions // ************************************************************************** @@ -357,23 +380,13 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { StreamDataCounters* rtp_counters, StreamDataCounters* rtx_counters) const = 0; - // Returns received RTCP report block. - // Returns -1 on failure else 0. - // TODO(https://crbug.com/webrtc/10678): Remove this in favor of - // GetLatestReportBlockData(). - virtual int32_t RemoteRTCPStat( - std::vector* receive_blocks) const = 0; // A snapshot of Report Blocks with additional data of interest to statistics. // Within this list, the sender-source SSRC pair is unique and per-pair the // ReportBlockData represents the latest Report Block that was received for // that pair. virtual std::vector GetLatestReportBlockData() const = 0; - - // (XR) Sets Receiver Reference Time Report (RTTR) status. - virtual void SetRtcpXrRrtrStatus(bool enable) = 0; - - // Returns current Receiver Reference Time Report (RTTR) status. - virtual bool RtcpXrRrtrStatus() const = 0; + // Returns stats based on the received RTCP SRs. + virtual absl::optional GetSenderReportStats() const = 0; // (REMB) Receiver Estimated Max Bitrate. // Schedules sending REMB on next and following sender/receiver reports. @@ -399,9 +412,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // requests. virtual void SetStorePacketsStatus(bool enable, uint16_t numberToStore) = 0; - // Returns true if the module is configured to store packets. - virtual bool StorePackets() const = 0; - virtual void SetVideoBitrateAllocation( const VideoBitrateAllocation& bitrate) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc index 584fced39..8435e5f33 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc @@ -42,7 +42,6 @@ constexpr size_t kMaxPaddingLength = 224; constexpr size_t kMinAudioPaddingLength = 50; constexpr size_t kRtpHeaderLength = 12; constexpr uint16_t kMaxInitRtpSeqNumber = 32767; // 2^15 -1. -constexpr uint32_t kTimestampTicksPerMs = 90; // Min size needed to get payload padding from packet history. constexpr int kMinPayloadPaddingBytes = 50; @@ -122,6 +121,7 @@ bool IsNonVolatile(RTPExtensionType type) { case kRtpExtensionVideoTiming: case kRtpExtensionRepairedRtpStreamId: case kRtpExtensionColorSpace: + case kRtpExtensionVideoFrameTrackingId: return false; case kRtpExtensionNone: case kRtpExtensionNumberOfExtensions: @@ -170,28 +170,25 @@ RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, paced_sender_(packet_sender), sending_media_(true), // Default to sending media. max_packet_size_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP. - last_payload_type_(-1), rtp_header_extension_map_(config.extmap_allow_mixed), - max_media_packet_header_(kRtpHeaderSize), - max_padding_fec_packet_header_(kRtpHeaderSize), // RTP variables - sequence_number_forced_(false), + sequencer_(config.local_media_ssrc, + config.rtx_send_ssrc.value_or(config.local_media_ssrc), + /*require_marker_before_media_padding_=*/!config.audio, + config.clock), always_send_mid_and_rid_(config.always_send_mid_and_rid), ssrc_has_acked_(false), rtx_ssrc_has_acked_(false), - last_rtp_timestamp_(0), - capture_time_ms_(0), - last_timestamp_time_ms_(0), - last_packet_marker_bit_(false), csrcs_(), rtx_(kRtxOff), supports_bwe_extension_(false), retransmission_rate_limiter_(config.retransmission_rate_limiter) { + UpdateHeaderSizes(); // This random initialization is not intended to be cryptographic strong. timestamp_offset_ = random_.Rand(); // Random start, 16 bits. Can't be 0. - sequence_number_rtx_ = random_.Rand(1, kMaxInitRtpSeqNumber); - sequence_number_ = random_.Rand(1, kMaxInitRtpSeqNumber); + sequencer_.set_rtx_sequence_number(random_.Rand(1, kMaxInitRtpSeqNumber)); + sequencer_.set_media_sequence_number(random_.Rand(1, kMaxInitRtpSeqNumber)); RTC_DCHECK(paced_sender_); RTC_DCHECK(packet_history_); @@ -229,15 +226,6 @@ void RTPSender::SetExtmapAllowMixed(bool extmap_allow_mixed) { rtp_header_extension_map_.SetExtmapAllowMixed(extmap_allow_mixed); } -int32_t RTPSender::RegisterRtpHeaderExtension(RTPExtensionType type, - uint8_t id) { - MutexLock lock(&send_mutex_); - bool registered = rtp_header_extension_map_.RegisterByType(id, type); - supports_bwe_extension_ = HasBweExtension(rtp_header_extension_map_); - UpdateHeaderSizes(); - return registered ? 0 : -1; -} - bool RTPSender::RegisterRtpHeaderExtension(absl::string_view uri, int id) { MutexLock lock(&send_mutex_); bool registered = rtp_header_extension_map_.RegisterByUri(id, uri); @@ -360,7 +348,11 @@ void RTPSender::OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) { void RTPSender::OnReceivedAckOnRtxSsrc( int64_t extended_highest_sequence_number) { MutexLock lock(&send_mutex_); + bool update_required = !rtx_ssrc_has_acked_; rtx_ssrc_has_acked_ = true; + if (update_required) { + UpdateHeaderSizes(); + } } void RTPSender::OnReceivedNack( @@ -452,23 +444,11 @@ std::vector> RTPSender::GeneratePadding( std::make_unique(&rtp_header_extension_map_); padding_packet->set_packet_type(RtpPacketMediaType::kPadding); padding_packet->SetMarker(false); - padding_packet->SetTimestamp(last_rtp_timestamp_); - padding_packet->set_capture_time_ms(capture_time_ms_); if (rtx_ == kRtxOff) { - if (last_payload_type_ == -1) { - break; - } - // Without RTX we can't send padding in the middle of frames. - // For audio marker bits doesn't mark the end of a frame and frames - // are usually a single packet, so for now we don't apply this rule - // for audio. - if (!audio_configured_ && !last_packet_marker_bit_) { - break; - } - padding_packet->SetSsrc(ssrc_); - padding_packet->SetPayloadType(last_payload_type_); - padding_packet->SetSequenceNumber(sequence_number_++); + if (!sequencer_.Sequence(*padding_packet)) { + break; + } } else { // Without abs-send-time or transport sequence number a media packet // must be sent before padding so that the timestamps used for @@ -479,24 +459,13 @@ std::vector> RTPSender::GeneratePadding( TransportSequenceNumber::kId))) { break; } - // Only change the timestamp of padding packets sent over RTX. - // Padding only packets over RTP has to be sent as part of a media - // frame (and therefore the same timestamp). - int64_t now_ms = clock_->TimeInMilliseconds(); - if (last_timestamp_time_ms_ > 0) { - padding_packet->SetTimestamp(padding_packet->Timestamp() + - (now_ms - last_timestamp_time_ms_) * - kTimestampTicksPerMs); - if (padding_packet->capture_time_ms() > 0) { - padding_packet->set_capture_time_ms( - padding_packet->capture_time_ms() + - (now_ms - last_timestamp_time_ms_)); - } - } + RTC_DCHECK(rtx_ssrc_); padding_packet->SetSsrc(*rtx_ssrc_); - padding_packet->SetSequenceNumber(sequence_number_rtx_++); padding_packet->SetPayloadType(rtx_payload_type_map_.begin()->second); + if (!sequencer_.Sequence(*padding_packet)) { + break; + } } if (rtp_header_extension_map_.IsRegistered(TransportSequenceNumber::kId)) { @@ -561,13 +530,6 @@ size_t RTPSender::ExpectedPerPacketOverhead() const { return max_media_packet_header_; } -uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) { - MutexLock lock(&send_mutex_); - uint16_t first_allocated_sequence_number = sequence_number_; - sequence_number_ += packets_to_send; - return first_allocated_sequence_number; -} - std::unique_ptr RTPSender::AllocatePacket() const { MutexLock lock(&send_mutex_); // TODO(danilchap): Find better motivator and value for extra capacity. @@ -614,18 +576,18 @@ bool RTPSender::AssignSequenceNumber(RtpPacketToSend* packet) { MutexLock lock(&send_mutex_); if (!sending_media_) return false; - RTC_DCHECK(packet->Ssrc() == ssrc_); - packet->SetSequenceNumber(sequence_number_++); + return sequencer_.Sequence(*packet); +} - // Remember marker bit to determine if padding can be inserted with - // sequence number following |packet|. - last_packet_marker_bit_ = packet->Marker(); - // Remember payload type to use in the padding packet if rtx is disabled. - last_payload_type_ = packet->PayloadType(); - // Save timestamps to generate timestamp field and extensions for the padding. - last_rtp_timestamp_ = packet->Timestamp(); - last_timestamp_time_ms_ = clock_->TimeInMilliseconds(); - capture_time_ms_ = packet->capture_time_ms(); +bool RTPSender::AssignSequenceNumbersAndStoreLastPacketState( + rtc::ArrayView> packets) { + RTC_DCHECK(!packets.empty()); + MutexLock lock(&send_mutex_); + if (!sending_media_) + return false; + for (auto& packet : packets) { + sequencer_.Sequence(*packet); + } return true; } @@ -680,11 +642,10 @@ void RTPSender::SetSequenceNumber(uint16_t seq) { bool updated_sequence_number = false; { MutexLock lock(&send_mutex_); - sequence_number_forced_ = true; - if (sequence_number_ != seq) { + if (sequencer_.media_sequence_number() != seq) { updated_sequence_number = true; } - sequence_number_ = seq; + sequencer_.set_media_sequence_number(seq); } if (updated_sequence_number) { @@ -696,7 +657,7 @@ void RTPSender::SetSequenceNumber(uint16_t seq) { uint16_t RTPSender::SequenceNumber() const { MutexLock lock(&send_mutex_); - return sequence_number_; + return sequencer_.media_sequence_number(); } static void CopyHeaderAndExtensionsToRtxPacket(const RtpPacketToSend& packet, @@ -769,12 +730,12 @@ std::unique_ptr RTPSender::BuildRtxPacket( rtx_packet->SetPayloadType(kv->second); - // Replace sequence number. - rtx_packet->SetSequenceNumber(sequence_number_rtx_++); - // Replace SSRC. rtx_packet->SetSsrc(*rtx_ssrc_); + // Replace sequence number. + sequencer_.Sequence(*rtx_packet); + CopyHeaderAndExtensionsToRtxPacket(packet, rtx_packet.get()); // RTX packets are sent on an SSRC different from the main media, so the @@ -809,8 +770,8 @@ std::unique_ptr RTPSender::BuildRtxPacket( auto payload = packet.payload(); memcpy(rtx_payload + kRtxHeaderSize, payload.data(), payload.size()); - // Add original application data. - rtx_packet->set_application_data(packet.application_data()); + // Add original additional data. + rtx_packet->set_additional_data(packet.additional_data()); // Copy capture time so e.g. TransmissionOffset is correctly set. rtx_packet->set_capture_time_ms(packet.capture_time_ms()); @@ -820,12 +781,9 @@ std::unique_ptr RTPSender::BuildRtxPacket( void RTPSender::SetRtpState(const RtpState& rtp_state) { MutexLock lock(&send_mutex_); - sequence_number_ = rtp_state.sequence_number; - sequence_number_forced_ = true; + timestamp_offset_ = rtp_state.start_timestamp; - last_rtp_timestamp_ = rtp_state.timestamp; - capture_time_ms_ = rtp_state.capture_time_ms; - last_timestamp_time_ms_ = rtp_state.last_timestamp_time_ms; + sequencer_.SetRtpState(rtp_state); ssrc_has_acked_ = rtp_state.ssrc_has_acked; UpdateHeaderSizes(); } @@ -834,18 +792,15 @@ RtpState RTPSender::GetRtpState() const { MutexLock lock(&send_mutex_); RtpState state; - state.sequence_number = sequence_number_; state.start_timestamp = timestamp_offset_; - state.timestamp = last_rtp_timestamp_; - state.capture_time_ms = capture_time_ms_; - state.last_timestamp_time_ms = last_timestamp_time_ms_; state.ssrc_has_acked = ssrc_has_acked_; + sequencer_.PupulateRtpState(state); return state; } void RTPSender::SetRtxRtpState(const RtpState& rtp_state) { MutexLock lock(&send_mutex_); - sequence_number_rtx_ = rtp_state.sequence_number; + sequencer_.set_rtx_sequence_number(rtp_state.sequence_number); rtx_ssrc_has_acked_ = rtp_state.ssrc_has_acked; } @@ -853,18 +808,13 @@ RtpState RTPSender::GetRtxRtpState() const { MutexLock lock(&send_mutex_); RtpState state; - state.sequence_number = sequence_number_rtx_; + state.sequence_number = sequencer_.rtx_sequence_number(); state.start_timestamp = timestamp_offset_; state.ssrc_has_acked = rtx_ssrc_has_acked_; return state; } -int64_t RTPSender::LastTimestampTimeMs() const { - MutexLock lock(&send_mutex_); - return last_timestamp_time_ms_; -} - void RTPSender::UpdateHeaderSizes() { const size_t rtp_header_length = kRtpHeaderLength + sizeof(uint32_t) * csrcs_.size(); @@ -874,10 +824,12 @@ void RTPSender::UpdateHeaderSizes() { rtp_header_extension_map_); // RtpStreamId and Mid are treated specially in that we check if they - // currently are being sent. RepairedRtpStreamId is still ignored since we - // assume RTX will not make up large enough bitrate to treat overhead - // differently. - const bool send_mid_rid = always_send_mid_and_rid_ || !ssrc_has_acked_; + // currently are being sent. RepairedRtpStreamId is ignored because it is sent + // instead of RtpStreamId on rtx packets and require the same size. + const bool send_mid_rid_on_rtx = + rtx_ssrc_.has_value() && !rtx_ssrc_has_acked_; + const bool send_mid_rid = + always_send_mid_and_rid_ || !ssrc_has_acked_ || send_mid_rid_on_rtx; std::vector non_volatile_extensions; for (auto& extension : audio_configured_ ? AudioExtensionSizes() : VideoExtensionSizes()) { @@ -901,5 +853,9 @@ void RTPSender::UpdateHeaderSizes() { max_media_packet_header_ = rtp_header_length + RtpHeaderExtensionSize(non_volatile_extensions, rtp_header_extension_map_); + // Reserve extra bytes if packet might be resent in an rtx packet. + if (rtx_ssrc_.has_value()) { + max_media_packet_header_ += kRtxHeaderSize; + } } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h index 1580259b3..fbf135049 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.h @@ -26,10 +26,10 @@ #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/packet_sequencer.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "rtc_base/deprecation.h" #include "rtc_base/random.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" @@ -78,8 +78,6 @@ class RTPSender { RTC_LOCKS_EXCLUDED(send_mutex_); // RTP header extension - int32_t RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id) - RTC_LOCKS_EXCLUDED(send_mutex_); bool RegisterRtpHeaderExtension(absl::string_view uri, int id) RTC_LOCKS_EXCLUDED(send_mutex_); bool IsRtpHeaderExtensionRegistered(RTPExtensionType type) const @@ -139,13 +137,16 @@ class RTPSender { // Return false if sending was turned off. bool AssignSequenceNumber(RtpPacketToSend* packet) RTC_LOCKS_EXCLUDED(send_mutex_); + // Same as AssignSequenceNumber(), but applies sequence numbers atomically to + // a batch of packets. + bool AssignSequenceNumbersAndStoreLastPacketState( + rtc::ArrayView> packets) + RTC_LOCKS_EXCLUDED(send_mutex_); // Maximum header overhead per fec/padding packet. size_t FecOrPaddingPacketMaxRtpHeaderLength() const RTC_LOCKS_EXCLUDED(send_mutex_); // Expected header overhead per media packet. size_t ExpectedPerPacketOverhead() const RTC_LOCKS_EXCLUDED(send_mutex_); - uint16_t AllocateSequenceNumber(uint16_t packets_to_send) - RTC_LOCKS_EXCLUDED(send_mutex_); // Including RTP headers. size_t MaxRtpPacketSize() const RTC_LOCKS_EXCLUDED(send_mutex_); @@ -171,8 +172,6 @@ class RTPSender { RTC_LOCKS_EXCLUDED(send_mutex_); RtpState GetRtxRtpState() const RTC_LOCKS_EXCLUDED(send_mutex_); - int64_t LastTimestampTimeMs() const RTC_LOCKS_EXCLUDED(send_mutex_); - private: std::unique_ptr BuildRtxPacket( const RtpPacketToSend& packet); @@ -181,6 +180,9 @@ class RTPSender { void UpdateHeaderSizes() RTC_EXCLUSIVE_LOCKS_REQUIRED(send_mutex_); + void UpdateLastPacketState(const RtpPacketToSend& packet) + RTC_EXCLUSIVE_LOCKS_REQUIRED(send_mutex_); + Clock* const clock_; Random random_ RTC_GUARDED_BY(send_mutex_); @@ -201,17 +203,13 @@ class RTPSender { bool sending_media_ RTC_GUARDED_BY(send_mutex_); size_t max_packet_size_; - int8_t last_payload_type_ RTC_GUARDED_BY(send_mutex_); - RtpHeaderExtensionMap rtp_header_extension_map_ RTC_GUARDED_BY(send_mutex_); size_t max_media_packet_header_ RTC_GUARDED_BY(send_mutex_); size_t max_padding_fec_packet_header_ RTC_GUARDED_BY(send_mutex_); // RTP variables uint32_t timestamp_offset_ RTC_GUARDED_BY(send_mutex_); - bool sequence_number_forced_ RTC_GUARDED_BY(send_mutex_); - uint16_t sequence_number_ RTC_GUARDED_BY(send_mutex_); - uint16_t sequence_number_rtx_ RTC_GUARDED_BY(send_mutex_); + PacketSequencer sequencer_ RTC_GUARDED_BY(send_mutex_); // RID value to send in the RID or RepairedRID header extension. std::string rid_ RTC_GUARDED_BY(send_mutex_); // MID value to send in the MID header extension. @@ -222,10 +220,6 @@ class RTPSender { // when to stop sending the MID and RID header extensions. bool ssrc_has_acked_ RTC_GUARDED_BY(send_mutex_); bool rtx_ssrc_has_acked_ RTC_GUARDED_BY(send_mutex_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(send_mutex_); - int64_t capture_time_ms_ RTC_GUARDED_BY(send_mutex_); - int64_t last_timestamp_time_ms_ RTC_GUARDED_BY(send_mutex_); - bool last_packet_marker_bit_ RTC_GUARDED_BY(send_mutex_); std::vector csrcs_ RTC_GUARDED_BY(send_mutex_); int rtx_ RTC_GUARDED_BY(send_mutex_); // Mapping rtx_payload_type_map_[associated] = rtx. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc index aba23ddc4..55dd9ff07 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -250,8 +250,7 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, AddPacketToTransportFeedback(*packet_id, *packet, pacing_info); } - options.application_data.assign(packet->application_data().begin(), - packet->application_data().end()); + options.additional_data = packet->additional_data(); if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { @@ -413,6 +412,7 @@ void RtpSenderEgress::AddPacketToTransportFeedback( packet_info.ssrc = ssrc_; packet_info.transport_sequence_number = packet_id; packet_info.rtp_sequence_number = packet.SequenceNumber(); + packet_info.rtp_timestamp = packet.Timestamp(); packet_info.length = packet_size; packet_info.pacing_info = pacing_info; packet_info.packet_type = packet.packet_type(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h index 8e36425f2..c767a1fe1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "api/call/transport.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/units/data_rate.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" @@ -29,7 +30,7 @@ #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -127,7 +128,7 @@ class RtpSenderEgress { void PeriodicUpdate(); TaskQueueBase* const worker_queue_; - SequenceChecker pacer_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker pacer_checker_; const uint32_t ssrc_; const absl::optional rtx_ssrc_; const absl::optional flexfec_ssrc_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index 55ba9b054..bd309c7bb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -153,7 +153,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) : (kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers)), last_rotation_(kVideoRotation_0), transmit_color_space_next_frame_(false), - send_allocation_(false), + send_allocation_(SendVideoLayersAllocation::kDontSend), current_playout_delay_{-1, -1}, playout_delay_pending_(false), forced_playout_delay_(LoadVideoPlayoutDelayOverride(config.field_trials)), @@ -169,8 +169,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) absolute_capture_time_sender_(config.clock), frame_transformer_delegate_( config.frame_transformer - ? new rtc::RefCountedObject< - RTPSenderVideoFrameTransformerDelegate>( + ? rtc::make_ref_counted( this, config.frame_transformer, rtp_sender_->SSRC(), @@ -293,8 +292,13 @@ void RTPSenderVideo::SetVideoLayersAllocationAfterTransformation( void RTPSenderVideo::SetVideoLayersAllocationInternal( VideoLayersAllocation allocation) { RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); + if (!allocation_ || allocation.active_spatial_layers.size() > + allocation_->active_spatial_layers.size()) { + send_allocation_ = SendVideoLayersAllocation::kSendWithResolution; + } else if (send_allocation_ == SendVideoLayersAllocation::kDontSend) { + send_allocation_ = SendVideoLayersAllocation::kSendWithoutResolution; + } allocation_ = std::move(allocation); - send_allocation_ = true; } void RTPSenderVideo::AddRtpHeaderExtensions( @@ -433,16 +437,19 @@ void RTPSenderVideo::AddRtpHeaderExtensions( } } - if (first_packet && send_allocation_) { - if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { - packet->SetExtension( - allocation_.value()); - } else if (PacketWillLikelyBeRequestedForRestransmitionIfLost( - video_header)) { - VideoLayersAllocation allocation = allocation_.value(); - allocation.resolution_and_frame_rate_is_valid = false; - packet->SetExtension(allocation); - } + if (first_packet && + send_allocation_ != SendVideoLayersAllocation::kDontSend && + (video_header.frame_type == VideoFrameType::kVideoFrameKey || + PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header))) { + VideoLayersAllocation allocation = allocation_.value(); + allocation.resolution_and_frame_rate_is_valid = + send_allocation_ == SendVideoLayersAllocation::kSendWithResolution; + packet->SetExtension(allocation); + } + + if (first_packet && video_header.video_frame_tracking_id) { + packet->SetExtension( + *video_header.video_frame_tracking_id); } } @@ -481,7 +488,7 @@ bool RTPSenderVideo::SendVideo( } if (allocation_) { // Send the bitrate allocation on every key frame. - send_allocation_ = true; + send_allocation_ = SendVideoLayersAllocation::kSendWithResolution; } } @@ -645,10 +652,10 @@ bool RTPSenderVideo::SendVideo( if (!packetizer->NextPacket(packet.get())) return false; RTC_DCHECK_LE(packet->payload_size(), expected_payload_capacity); - if (!rtp_sender_->AssignSequenceNumber(packet.get())) - return false; packet->set_allow_retransmission(allow_retransmission); + packet->set_is_key_frame(video_header.frame_type == + VideoFrameType::kVideoFrameKey); // Put packetization finish timestamp into extension. if (packet->HasExtension()) { @@ -665,7 +672,7 @@ bool RTPSenderVideo::SendVideo( red_packet->SetPayloadType(*red_payload_type_); red_packet->set_is_red(true); - // Send |red_packet| instead of |packet| for allocated sequence number. + // Append |red_packet| instead of |packet| to output. red_packet->set_packet_type(RtpPacketMediaType::kVideo); red_packet->set_allow_retransmission(packet->allow_retransmission()); rtp_packets.emplace_back(std::move(red_packet)); @@ -686,6 +693,11 @@ bool RTPSenderVideo::SendVideo( } } + if (!rtp_sender_->AssignSequenceNumbersAndStoreLastPacketState(rtp_packets)) { + // Media not being sent. + return false; + } + LogAndSendToNetwork(std::move(rtp_packets), payload.size()); // Update details about the last sent frame. @@ -704,7 +716,7 @@ bool RTPSenderVideo::SendVideo( // This frame will likely be delivered, no need to populate playout // delay extensions until it changes again. playout_delay_pending_ = false; - send_allocation_ = false; + send_allocation_ = SendVideoLayersAllocation::kDontSend; } TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp", diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h index 3f431dfec..06f3d2001 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h @@ -20,6 +20,7 @@ #include "api/array_view.h" #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_type.h" @@ -37,7 +38,6 @@ #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -159,6 +159,12 @@ class RTPSenderVideo { int64_t last_frame_time_ms; }; + enum class SendVideoLayersAllocation { + kSendWithResolution, + kSendWithoutResolution, + kDontSend + }; + void SetVideoStructureInternal( const FrameDependencyStructure* video_structure); void SetVideoLayersAllocationInternal(VideoLayersAllocation allocation); @@ -202,7 +208,7 @@ class RTPSenderVideo { absl::optional allocation_ RTC_GUARDED_BY(send_checker_); // Flag indicating if we should send |allocation_|. - bool send_allocation_ RTC_GUARDED_BY(send_checker_); + SendVideoLayersAllocation send_allocation_ RTC_GUARDED_BY(send_checker_); // Current target playout delay. VideoPlayoutDelay current_playout_delay_ RTC_GUARDED_BY(send_checker_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 074b64086..23e66bf75 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -129,9 +129,10 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { MutexLock lock(&sender_lock_); - // The encoder queue gets destroyed after the sender; as long as the sender is - // alive, it's safe to post. - if (!sender_) + // The encoder queue normally gets destroyed after the sender; + // however, it might still be null by the time a previously queued frame + // arrives. + if (!sender_ || !encoder_queue_) return; rtc::scoped_refptr delegate = this; encoder_queue_->PostTask(ToQueuedTask( diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc index a3d6d6f7f..a22785fac 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_utility.cc @@ -536,6 +536,10 @@ void RtpHeaderParser::ParseOneByteExtensionHeader( RTC_LOG(WARNING) << "Inband comfort noise extension unsupported by " "rtp header parser."; break; + case kRtpExtensionVideoFrameTrackingId: + RTC_LOG(WARNING) + << "VideoFrameTrackingId unsupported by rtp header parser."; + break; case kRtpExtensionNone: case kRtpExtensionNumberOfExtensions: { RTC_NOTREACHED() << "Invalid extension type: " << type; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h index b5934ff8b..aa5ce1980 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_header.h @@ -89,6 +89,9 @@ struct RTPVideoHeader { VideoPlayoutDelay playout_delay; VideoSendTiming video_timing; absl::optional color_space; + // This field is meant for media quality testing purpose only. When enabled it + // carries the webrtc::VideoFrame id field from the sender to the receiver. + absl::optional video_frame_tracking_id; RTPVideoTypeHeader video_type_header; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc index dbaa36b15..93fb235dc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc @@ -10,10 +10,13 @@ #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" -#include +#include +#include +#include "absl/algorithm/container.h" #include "api/video/video_layers_allocation.h" -#include "rtc_base/bit_buffer.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -22,202 +25,360 @@ constexpr const char RtpVideoLayersAllocationExtension::kUri[]; namespace { -// Counts the number of bits used in the binary representation of val. -size_t CountBits(uint64_t val) { - size_t bit_count = 0; - while (val != 0) { - bit_count++; - val >>= 1; +constexpr int kMaxNumRtpStreams = 4; + +// TODO(bugs.webrtc.org/12000): share Leb128 functions with av1 packetizer. +// Returns minimum number of bytes required to store `value`. +int Leb128Size(uint32_t value) { + int size = 0; + while (value >= 0x80) { + ++size; + value >>= 7; } - return bit_count; + return size + 1; } -// Counts the number of bits used if `val`is encoded using unsigned exponential -// Golomb encoding. -// TODO(bugs.webrtc.org/12000): Move to bit_buffer.cc if Golomb encoding is used -// in the final version. -size_t SizeExponentialGolomb(uint32_t val) { - if (val == std::numeric_limits::max()) { - return 0; +// Returns number of bytes consumed. +int WriteLeb128(uint32_t value, uint8_t* buffer) { + int size = 0; + while (value >= 0x80) { + buffer[size] = 0x80 | (value & 0x7F); + ++size; + value >>= 7; } - uint64_t val_to_encode = static_cast(val) + 1; - return CountBits(val_to_encode) * 2 - 1; + buffer[size] = value; + ++size; + return size; +} + +// Reads leb128 encoded value and advance read_at by number of bytes consumed. +// Sets read_at to nullptr on error. +uint64_t ReadLeb128(const uint8_t*& read_at, const uint8_t* end) { + uint64_t value = 0; + int fill_bits = 0; + while (read_at != end && fill_bits < 64 - 7) { + uint8_t leb128_byte = *read_at; + value |= uint64_t{leb128_byte & 0x7Fu} << fill_bits; + ++read_at; + fill_bits += 7; + if ((leb128_byte & 0x80) == 0) { + return value; + } + } + // Failed to find terminator leb128 byte. + read_at = nullptr; + return 0; +} + +bool AllocationIsValid(const VideoLayersAllocation& allocation) { + // Since all multivalue fields are stored in (rtp_stream_id, spatial_id) order + // assume `allocation.active_spatial_layers` is already sorted. It is simpler + // to assemble it in the sorted way than to resort during serialization. + if (!absl::c_is_sorted( + allocation.active_spatial_layers, + [](const VideoLayersAllocation::SpatialLayer& lhs, + const VideoLayersAllocation::SpatialLayer& rhs) { + return std::make_tuple(lhs.rtp_stream_index, lhs.spatial_id) < + std::make_tuple(rhs.rtp_stream_index, rhs.spatial_id); + })) { + return false; + } + + int max_rtp_stream_idx = 0; + for (const auto& spatial_layer : allocation.active_spatial_layers) { + if (spatial_layer.rtp_stream_index < 0 || + spatial_layer.rtp_stream_index >= 4) { + return false; + } + if (spatial_layer.spatial_id < 0 || spatial_layer.spatial_id >= 4) { + return false; + } + if (spatial_layer.target_bitrate_per_temporal_layer.empty() || + spatial_layer.target_bitrate_per_temporal_layer.size() > 4) { + return false; + } + if (max_rtp_stream_idx < spatial_layer.rtp_stream_index) { + max_rtp_stream_idx = spatial_layer.rtp_stream_index; + } + if (allocation.resolution_and_frame_rate_is_valid) { + // TODO(danilchap): Add check width and height are no more than 0x10000 + // when width and height become larger type and thus would support maximum + // resolution. + if (spatial_layer.width <= 0) { + return false; + } + if (spatial_layer.height <= 0) { + return false; + } + if (spatial_layer.frame_rate_fps > 255) { + return false; + } + } + } + if (allocation.rtp_stream_index < 0 || + (!allocation.active_spatial_layers.empty() && + allocation.rtp_stream_index > max_rtp_stream_idx)) { + return false; + } + return true; +} + +struct SpatialLayersBitmasks { + int max_rtp_stream_id = 0; + uint8_t spatial_layer_bitmask[kMaxNumRtpStreams] = {}; + bool bitmasks_are_the_same = true; +}; + +SpatialLayersBitmasks SpatialLayersBitmasksPerRtpStream( + const VideoLayersAllocation& allocation) { + RTC_DCHECK(AllocationIsValid(allocation)); + SpatialLayersBitmasks result; + for (const auto& layer : allocation.active_spatial_layers) { + result.spatial_layer_bitmask[layer.rtp_stream_index] |= + (1u << layer.spatial_id); + if (result.max_rtp_stream_id < layer.rtp_stream_index) { + result.max_rtp_stream_id = layer.rtp_stream_index; + } + } + for (int i = 1; i <= result.max_rtp_stream_id; ++i) { + if (result.spatial_layer_bitmask[i] != result.spatial_layer_bitmask[0]) { + result.bitmasks_are_the_same = false; + break; + } + } + return result; } } // namespace -// TODO(bugs.webrtc.org/12000): Review and revise the content and encoding of -// this extension. This is an experimental first version. - -// 0 1 2 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | NS|RSID|T|X|Res| Bit encoded data... -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// NS: Number of spatial layers/simulcast streams - 1. 2 bits, thus allowing -// passing number of layers/streams up-to 4. -// RSID: RTP stream id this allocation is sent on, numbered from 0. 2 bits. -// T: indicates if all spatial layers have the same amount of temporal layers. -// X: indicates if resolution and frame rate per spatial layer is present. -// Res: 2 bits reserved for future use. -// Bit encoded data: consists of following fields written in order: -// 1) T=1: Nt - 2-bit value of number of temporal layers - 1 -// T=0: NS 2-bit values of numbers of temporal layers - 1 for all spatial -// layers from lower to higher. -// 2) Bitrates: -// One value for each spatial x temporal layer. -// Format: RSID (2-bit) SID(2-bit),folowed by bitrate for all temporal -// layers for the RSID,SID tuple. All bitrates are in kbps. All bitrates are -// total required bitrate to receive the corresponding layer, i.e. in -// simulcast mode they include only corresponding spatial layer, in full-svc -// all lower spatial layers are included. All lower temporal layers are also -// included. All bitrates are written using unsigned Exponential Golomb -// encoding. -// 3) [only if X bit is set]. Encoded width, 16-bit, height, 16-bit, -// max frame rate 8-bit per spatial layer in order from lower to higher. +// +-+-+-+-+-+-+-+-+ +// |RID| NS| sl_bm | +// +-+-+-+-+-+-+-+-+ +// Spatial layer bitmask |sl0_bm |sl1_bm | +// up to 2 bytes |---------------| +// when sl_bm == 0 |sl2_bm |sl3_bm | +// +-+-+-+-+-+-+-+-+ +// Number of temporal |#tl|#tl|#tl|#tl| +// layers per spatial layer :---------------: +// up to 4 bytes | ... | +// +-+-+-+-+-+-+-+-+ +// Target bitrate in kpbs | | +// per temporal layer : ... : +// leb128 encoded | | +// +-+-+-+-+-+-+-+-+ +// Resolution and framerate | | +// 5 bytes per spatial layer + width-1 for + +// (optional) | rid=0, sid=0 | +// +---------------+ +// | | +// + height-1 for + +// | rid=0, sid=0 | +// +---------------+ +// | max framerate | +// +-+-+-+-+-+-+-+-+ +// : ... : +// +-+-+-+-+-+-+-+-+ +// +// RID: RTP stream index this allocation is sent on, numbered from 0. 2 bits. +// NS: Number of RTP streams - 1. 2 bits, thus allowing up-to 4 RTP streams. +// sl_bm: BitMask of the active Spatial Layers when same for all RTP streams or +// 0 otherwise. 4 bits thus allows up to 4 spatial layers per RTP streams. +// slX_bm: BitMask of the active Spatial Layers for RTP stream with index=X. +// byte-aligned. When NS < 2, takes ones byte, otherwise uses two bytes. +// #tl: 2-bit value of number of temporal layers-1, thus allowing up-to 4 +// temporal layer per spatial layer. One per spatial layer per RTP stream. +// values are stored in (RTP stream id, spatial id) ascending order. +// zero-padded to byte alignment. +// Target bitrate in kbps. Values are stored using leb128 encoding. +// one value per temporal layer. values are stored in +// (RTP stream id, spatial id, temporal id) ascending order. +// All bitrates are total required bitrate to receive the corresponding +// layer, i.e. in simulcast mode they include only corresponding spatial +// layer, in full-svc all lower spatial layers are included. All lower +// temporal layers are also included. +// Resolution and framerate. +// Optional. Presense is infered from the rtp header extension size. +// Encoded (width - 1), 16-bit, (height - 1), 16-bit, max frame rate 8-bit +// per spatial layer per RTP stream. +// Values are stored in (RTP stream id, spatial id) ascending order. +// +// An empty layer allocation (i.e nothing sent on ssrc) is encoded as +// special case with a single 0 byte. bool RtpVideoLayersAllocationExtension::Write( rtc::ArrayView data, const VideoLayersAllocation& allocation) { - RTC_DCHECK_LT(allocation.rtp_stream_index, - VideoLayersAllocation::kMaxSpatialIds); + RTC_DCHECK(AllocationIsValid(allocation)); RTC_DCHECK_GE(data.size(), ValueSize(allocation)); - rtc::BitBufferWriter writer(data.data(), data.size()); - // NS: - if (allocation.active_spatial_layers.empty()) - return false; - writer.WriteBits(allocation.active_spatial_layers.size() - 1, 2); - - // RSID: - writer.WriteBits(allocation.rtp_stream_index, 2); - - // T: - bool num_tls_is_the_same = true; - size_t first_layers_number_of_temporal_layers = - allocation.active_spatial_layers.front() - .target_bitrate_per_temporal_layer.size(); - for (const auto& spatial_layer : allocation.active_spatial_layers) { - if (first_layers_number_of_temporal_layers != - spatial_layer.target_bitrate_per_temporal_layer.size()) { - num_tls_is_the_same = false; - break; - } + if (allocation.active_spatial_layers.empty()) { + data[0] = 0; + return true; } - writer.WriteBits(num_tls_is_the_same ? 1 : 0, 1); - // X: - writer.WriteBits(allocation.resolution_and_frame_rate_is_valid ? 1 : 0, 1); - - // RESERVED: - writer.WriteBits(/*val=*/0, /*bit_count=*/2); - - if (num_tls_is_the_same) { - writer.WriteBits(first_layers_number_of_temporal_layers - 1, 2); + SpatialLayersBitmasks slb = SpatialLayersBitmasksPerRtpStream(allocation); + uint8_t* write_at = data.data(); + // First half of the header byte. + *write_at = (allocation.rtp_stream_index << 6); + // number of rtp stream - 1 is the same as the maximum rtp_stream_id. + *write_at |= slb.max_rtp_stream_id << 4; + if (slb.bitmasks_are_the_same) { + // Second half of the header byte. + *write_at |= slb.spatial_layer_bitmask[0]; } else { - for (const auto& spatial_layer : allocation.active_spatial_layers) { - writer.WriteBits( - spatial_layer.target_bitrate_per_temporal_layer.size() - 1, 2); + // spatial layer bitmasks when they are different for different RTP streams. + *++write_at = + (slb.spatial_layer_bitmask[0] << 4) | slb.spatial_layer_bitmask[1]; + if (slb.max_rtp_stream_id >= 2) { + *++write_at = + (slb.spatial_layer_bitmask[2] << 4) | slb.spatial_layer_bitmask[3]; } } + ++write_at; + { // Number of temporal layers. + int bit_offset = 8; + *write_at = 0; + for (const auto& layer : allocation.active_spatial_layers) { + if (bit_offset == 0) { + bit_offset = 6; + *++write_at = 0; + } else { + bit_offset -= 2; + } + *write_at |= + ((layer.target_bitrate_per_temporal_layer.size() - 1) << bit_offset); + } + ++write_at; + } + + // Target bitrates. for (const auto& spatial_layer : allocation.active_spatial_layers) { - writer.WriteBits(spatial_layer.rtp_stream_index, 2); - writer.WriteBits(spatial_layer.spatial_id, 2); for (const DataRate& bitrate : spatial_layer.target_bitrate_per_temporal_layer) { - writer.WriteExponentialGolomb(bitrate.kbps()); + write_at += WriteLeb128(bitrate.kbps(), write_at); } } if (allocation.resolution_and_frame_rate_is_valid) { for (const auto& spatial_layer : allocation.active_spatial_layers) { - writer.WriteUInt16(spatial_layer.width); - writer.WriteUInt16(spatial_layer.height); - writer.WriteUInt8(spatial_layer.frame_rate_fps); + ByteWriter::WriteBigEndian(write_at, spatial_layer.width - 1); + write_at += 2; + ByteWriter::WriteBigEndian(write_at, spatial_layer.height - 1); + write_at += 2; + *write_at = spatial_layer.frame_rate_fps; + ++write_at; } } + RTC_DCHECK_EQ(write_at - data.data(), ValueSize(allocation)); return true; } bool RtpVideoLayersAllocationExtension::Parse( rtc::ArrayView data, VideoLayersAllocation* allocation) { - if (data.size() == 0) - return false; - rtc::BitBuffer reader(data.data(), data.size()); - if (!allocation) + if (data.empty() || allocation == nullptr) { return false; + } + allocation->active_spatial_layers.clear(); - uint32_t val; - // NS: - if (!reader.ReadBits(&val, 2)) - return false; - int active_spatial_layers = val + 1; + const uint8_t* read_at = data.data(); + const uint8_t* const end = data.data() + data.size(); - // RSID: - if (!reader.ReadBits(&val, 2)) - return false; - allocation->rtp_stream_index = val; + if (data.size() == 1 && *read_at == 0) { + allocation->rtp_stream_index = 0; + allocation->resolution_and_frame_rate_is_valid = true; + return true; + } - // T: - if (!reader.ReadBits(&val, 1)) - return false; - bool num_tls_is_constant = (val == 1); + // Header byte. + allocation->rtp_stream_index = *read_at >> 6; + int num_rtp_streams = 1 + ((*read_at >> 4) & 0b11); + uint8_t spatial_layers_bitmasks[kMaxNumRtpStreams]; + spatial_layers_bitmasks[0] = *read_at & 0b1111; - // X: - if (!reader.ReadBits(&val, 1)) - return false; - allocation->resolution_and_frame_rate_is_valid = (val == 1); - - // RESERVED: - if (!reader.ReadBits(&val, 2)) - return false; - - int number_of_temporal_layers[VideoLayersAllocation::kMaxSpatialIds]; - if (num_tls_is_constant) { - if (!reader.ReadBits(&val, 2)) - return false; - for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { - number_of_temporal_layers[sl_idx] = val + 1; + if (spatial_layers_bitmasks[0] != 0) { + for (int i = 1; i < num_rtp_streams; ++i) { + spatial_layers_bitmasks[i] = spatial_layers_bitmasks[0]; } } else { - for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { - if (!reader.ReadBits(&val, 2)) + // Spatial layer bitmasks when they are different for different RTP streams. + if (++read_at == end) { + return false; + } + spatial_layers_bitmasks[0] = *read_at >> 4; + spatial_layers_bitmasks[1] = *read_at & 0b1111; + if (num_rtp_streams > 2) { + if (++read_at == end) { return false; - number_of_temporal_layers[sl_idx] = val + 1; - if (number_of_temporal_layers[sl_idx] > - VideoLayersAllocation::kMaxTemporalIds) + } + spatial_layers_bitmasks[2] = *read_at >> 4; + spatial_layers_bitmasks[3] = *read_at & 0b1111; + } + } + if (++read_at == end) { + return false; + } + + // Read number of temporal layers, + // Create `allocation->active_spatial_layers` while iterating though it. + int bit_offset = 8; + for (int stream_idx = 0; stream_idx < num_rtp_streams; ++stream_idx) { + for (int sid = 0; sid < VideoLayersAllocation::kMaxSpatialIds; ++sid) { + if ((spatial_layers_bitmasks[stream_idx] & (1 << sid)) == 0) { + continue; + } + + if (bit_offset == 0) { + bit_offset = 6; + if (++read_at == end) { + return false; + } + } else { + bit_offset -= 2; + } + int num_temporal_layers = 1 + ((*read_at >> bit_offset) & 0b11); + allocation->active_spatial_layers.emplace_back(); + auto& layer = allocation->active_spatial_layers.back(); + layer.rtp_stream_index = stream_idx; + layer.spatial_id = sid; + layer.target_bitrate_per_temporal_layer.resize(num_temporal_layers, + DataRate::Zero()); + } + } + if (++read_at == end) { + return false; + } + + // Target bitrates. + for (auto& layer : allocation->active_spatial_layers) { + for (DataRate& rate : layer.target_bitrate_per_temporal_layer) { + rate = DataRate::KilobitsPerSec(ReadLeb128(read_at, end)); + if (read_at == nullptr) { return false; + } } } - for (int sl_idx = 0; sl_idx < active_spatial_layers; ++sl_idx) { - allocation->active_spatial_layers.emplace_back(); - auto& spatial_layer = allocation->active_spatial_layers.back(); - auto& temporal_layers = spatial_layer.target_bitrate_per_temporal_layer; - if (!reader.ReadBits(&val, 2)) - return false; - spatial_layer.rtp_stream_index = val; - if (!reader.ReadBits(&val, 2)) - return false; - spatial_layer.spatial_id = val; - for (int tl_idx = 0; tl_idx < number_of_temporal_layers[sl_idx]; ++tl_idx) { - reader.ReadExponentialGolomb(&val); - temporal_layers.push_back(DataRate::KilobitsPerSec(val)); - } + if (read_at == end) { + allocation->resolution_and_frame_rate_is_valid = false; + return true; } - if (allocation->resolution_and_frame_rate_is_valid) { - for (auto& spatial_layer : allocation->active_spatial_layers) { - if (!reader.ReadUInt16(&spatial_layer.width)) - return false; - if (!reader.ReadUInt16(&spatial_layer.height)) - return false; - if (!reader.ReadUInt8(&spatial_layer.frame_rate_fps)) - return false; - } + if (read_at + 5 * allocation->active_spatial_layers.size() != end) { + // data is left, but it size is not what can be used for resolutions and + // framerates. + return false; + } + allocation->resolution_and_frame_rate_is_valid = true; + for (auto& layer : allocation->active_spatial_layers) { + layer.width = 1 + ByteReader::ReadBigEndian(read_at); + read_at += 2; + layer.height = 1 + ByteReader::ReadBigEndian(read_at); + read_at += 2; + layer.frame_rate_fps = *read_at; + ++read_at; } return true; } @@ -225,36 +386,28 @@ bool RtpVideoLayersAllocationExtension::Parse( size_t RtpVideoLayersAllocationExtension::ValueSize( const VideoLayersAllocation& allocation) { if (allocation.active_spatial_layers.empty()) { - return 0; + return 1; } - size_t size_in_bits = 8; // Fixed first byte.¨ - bool num_tls_is_the_same = true; - size_t first_layers_number_of_temporal_layers = - allocation.active_spatial_layers.front() - .target_bitrate_per_temporal_layer.size(); + size_t result = 1; // header + SpatialLayersBitmasks slb = SpatialLayersBitmasksPerRtpStream(allocation); + if (!slb.bitmasks_are_the_same) { + ++result; + if (slb.max_rtp_stream_id >= 2) { + ++result; + } + } + // 2 bits per active spatial layer, rounded up to full byte, i.e. + // 0.25 byte per active spatial layer. + result += (allocation.active_spatial_layers.size() + 3) / 4; for (const auto& spatial_layer : allocation.active_spatial_layers) { - if (first_layers_number_of_temporal_layers != - spatial_layer.target_bitrate_per_temporal_layer.size()) { - num_tls_is_the_same = false; - } - size_in_bits += 4; // RSID, SID tuple. - for (const auto& bitrate : - spatial_layer.target_bitrate_per_temporal_layer) { - size_in_bits += SizeExponentialGolomb(bitrate.kbps()); - } - } - if (num_tls_is_the_same) { - size_in_bits += 2; - } else { - for (const auto& spatial_layer : allocation.active_spatial_layers) { - size_in_bits += - 2 * spatial_layer.target_bitrate_per_temporal_layer.size(); + for (DataRate value : spatial_layer.target_bitrate_per_temporal_layer) { + result += Leb128Size(value.kbps()); } } if (allocation.resolution_and_frame_rate_is_valid) { - size_in_bits += allocation.active_spatial_layers.size() * 5 * 8; + result += 5 * allocation.active_spatial_layers.size(); } - return (size_in_bits + 7) / 8; + return result; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc index 76d1bb5d8..487369316 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc @@ -77,7 +77,7 @@ UlpfecGenerator::UlpfecGenerator(int red_payload_type, fec_(ForwardErrorCorrection::CreateUlpfec(kUnknownSsrc)), num_protected_frames_(0), min_num_media_packets_(1), - keyframe_in_process_(false), + media_contains_keyframe_(false), fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) {} // Used by FlexFecSender, payload types are unused. @@ -89,7 +89,7 @@ UlpfecGenerator::UlpfecGenerator(std::unique_ptr fec, fec_(std::move(fec)), num_protected_frames_(0), min_num_media_packets_(1), - keyframe_in_process_(false), + media_contains_keyframe_(false), fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) {} UlpfecGenerator::~UlpfecGenerator() = default; @@ -111,7 +111,7 @@ void UlpfecGenerator::AddPacketAndGenerateFec(const RtpPacketToSend& packet) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); RTC_DCHECK(generated_fec_packets_.empty()); - if (media_packets_.empty()) { + { MutexLock lock(&mutex_); if (pending_params_) { current_params_ = *pending_params_; @@ -123,13 +123,12 @@ void UlpfecGenerator::AddPacketAndGenerateFec(const RtpPacketToSend& packet) { min_num_media_packets_ = 1; } } - - keyframe_in_process_ = packet.is_key_frame(); } - RTC_DCHECK_EQ(packet.is_key_frame(), keyframe_in_process_); - bool complete_frame = false; - const bool marker_bit = packet.Marker(); + if (packet.is_key_frame()) { + media_contains_keyframe_ = true; + } + const bool complete_frame = packet.Marker(); if (media_packets_.size() < kUlpfecMaxMediaPackets) { // Our packet masks can only protect up to |kUlpfecMaxMediaPackets| packets. auto fec_packet = std::make_unique(); @@ -142,9 +141,8 @@ void UlpfecGenerator::AddPacketAndGenerateFec(const RtpPacketToSend& packet) { last_media_packet_ = packet; } - if (marker_bit) { + if (complete_frame) { ++num_protected_frames_; - complete_frame = true; } auto params = CurrentParams(); @@ -154,7 +152,7 @@ void UlpfecGenerator::AddPacketAndGenerateFec(const RtpPacketToSend& packet) { // less than |kMaxExcessOverhead|, and // (2) at least |min_num_media_packets_| media packets is reached. if (complete_frame && - (num_protected_frames_ == params.max_fec_frames || + (num_protected_frames_ >= params.max_fec_frames || (ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) { // We are not using Unequal Protection feature of the parity erasure code. constexpr int kNumImportantPackets = 0; @@ -190,8 +188,8 @@ bool UlpfecGenerator::MinimumMediaPacketsReached() const { const FecProtectionParams& UlpfecGenerator::CurrentParams() const { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - return keyframe_in_process_ ? current_params_.keyframe_params - : current_params_.delta_params; + return media_contains_keyframe_ ? current_params_.keyframe_params + : current_params_.delta_params; } size_t UlpfecGenerator::MaxPacketOverhead() const { @@ -265,6 +263,7 @@ void UlpfecGenerator::ResetState() { last_media_packet_.reset(); generated_fec_packets_.clear(); num_protected_frames_ = 0; + media_contains_keyframe_ = false; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.h index 32ddc6c4b..934a1d5c3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_generator.h @@ -59,6 +59,9 @@ class UlpfecGenerator : public VideoFecGenerator { absl::optional GetRtpState() override { return absl::nullopt; } + // Currently used protection params. + const FecProtectionParams& CurrentParams() const; + private: struct Params { Params(); @@ -90,8 +93,6 @@ class UlpfecGenerator : public VideoFecGenerator { // (e.g. (2k,2m) vs (k,m)) are generally more effective at recovering losses. bool MinimumMediaPacketsReached() const; - const FecProtectionParams& CurrentParams() const; - void ResetState(); const int red_payload_type_; @@ -110,7 +111,7 @@ class UlpfecGenerator : public VideoFecGenerator { int num_protected_frames_ RTC_GUARDED_BY(race_checker_); int min_num_media_packets_ RTC_GUARDED_BY(race_checker_); Params current_params_ RTC_GUARDED_BY(race_checker_); - bool keyframe_in_process_ RTC_GUARDED_BY(race_checker_); + bool media_contains_keyframe_ RTC_GUARDED_BY(race_checker_); mutable Mutex mutex_; absl::optional pending_params_ RTC_GUARDED_BY(mutex_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc index 261c8f739..49f483dad 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc @@ -24,6 +24,11 @@ namespace { // Maximum number of media packets that can be protected in one batch. constexpr size_t kMaxMediaPackets = 48; +// Maximum number of media packets tracked by FEC decoder. +// Maintain a sufficiently larger tracking window than |kMaxMediaPackets| +// to account for packet reordering in pacer/ network. +constexpr size_t kMaxTrackedMediaPackets = 4 * kMaxMediaPackets; + // Maximum number of FEC packets stored inside ForwardErrorCorrection. constexpr size_t kMaxFecPackets = kMaxMediaPackets; @@ -51,13 +56,13 @@ size_t UlpfecHeaderSize(size_t packet_mask_size) { } // namespace UlpfecHeaderReader::UlpfecHeaderReader() - : FecHeaderReader(kMaxMediaPackets, kMaxFecPackets) {} + : FecHeaderReader(kMaxTrackedMediaPackets, kMaxFecPackets) {} UlpfecHeaderReader::~UlpfecHeaderReader() = default; bool UlpfecHeaderReader::ReadFecHeader( ForwardErrorCorrection::ReceivedFecPacket* fec_packet) const { - uint8_t* data = fec_packet->pkt->data.data(); + uint8_t* data = fec_packet->pkt->data.MutableData(); if (fec_packet->pkt->data.size() < kPacketMaskOffset) { return false; // Truncated packet. } @@ -108,7 +113,7 @@ void UlpfecHeaderWriter::FinalizeFecHeader( const uint8_t* packet_mask, size_t packet_mask_size, ForwardErrorCorrection::Packet* fec_packet) const { - uint8_t* data = fec_packet->data.data(); + uint8_t* data = fec_packet->data.MutableData(); // Set E bit to zero. data[0] &= 0x7f; // Set L bit based on packet mask size. (Note that the packet mask diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc index 26993cabb..fdfa47518 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.cc @@ -37,12 +37,13 @@ UlpfecReceiverImpl::UlpfecReceiverImpl( fec_(ForwardErrorCorrection::CreateUlpfec(ssrc_)) {} UlpfecReceiverImpl::~UlpfecReceiverImpl() { + RTC_DCHECK_RUN_ON(&sequence_checker_); received_packets_.clear(); fec_->ResetState(&recovered_packets_); } FecPacketCounter UlpfecReceiverImpl::GetPacketCounter() const { - MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&sequence_checker_); return packet_counter_; } @@ -77,6 +78,10 @@ FecPacketCounter UlpfecReceiverImpl::GetPacketCounter() const { bool UlpfecReceiverImpl::AddReceivedRedPacket( const RtpPacketReceived& rtp_packet, uint8_t ulpfec_payload_type) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // TODO(bugs.webrtc.org/11993): We get here via Call::DeliverRtp, so should be + // moved to the network thread. + if (rtp_packet.Ssrc() != ssrc_) { RTC_LOG(LS_WARNING) << "Received RED packet with different SSRC than expected; dropping."; @@ -87,7 +92,6 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket( "packet size; dropping."; return false; } - MutexLock lock(&mutex_); static constexpr uint8_t kRedHeaderLength = 1; @@ -128,18 +132,19 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket( rtp_packet.Buffer().Slice(rtp_packet.headers_size() + kRedHeaderLength, rtp_packet.payload_size() - kRedHeaderLength); } else { - auto red_payload = rtp_packet.payload().subview(kRedHeaderLength); - received_packet->pkt->data.EnsureCapacity(rtp_packet.headers_size() + - red_payload.size()); + received_packet->pkt->data.EnsureCapacity(rtp_packet.size() - + kRedHeaderLength); // Copy RTP header. received_packet->pkt->data.SetData(rtp_packet.data(), rtp_packet.headers_size()); // Set payload type. - received_packet->pkt->data[1] &= 0x80; // Reset RED payload type. - received_packet->pkt->data[1] += payload_type; // Set media payload type. - // Copy payload data. - received_packet->pkt->data.AppendData(red_payload.data(), - red_payload.size()); + uint8_t& payload_type_byte = received_packet->pkt->data.MutableData()[1]; + payload_type_byte &= 0x80; // Reset RED payload type. + payload_type_byte += payload_type; // Set media payload type. + // Copy payload and padding data, after the RED header. + received_packet->pkt->data.AppendData( + rtp_packet.data() + rtp_packet.headers_size() + kRedHeaderLength, + rtp_packet.size() - rtp_packet.headers_size() - kRedHeaderLength); } if (received_packet->pkt->data.size() > 0) { @@ -150,7 +155,7 @@ bool UlpfecReceiverImpl::AddReceivedRedPacket( // TODO(nisse): Drop always-zero return value. int32_t UlpfecReceiverImpl::ProcessReceivedFec() { - mutex_.Lock(); + RTC_DCHECK_RUN_ON(&sequence_checker_); // If we iterate over |received_packets_| and it contains a packet that cause // us to recurse back to this function (for example a RED packet encapsulating @@ -167,10 +172,8 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() { // Send received media packet to VCM. if (!received_packet->is_fec) { ForwardErrorCorrection::Packet* packet = received_packet->pkt; - mutex_.Unlock(); recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), packet->data.size()); - mutex_.Lock(); // Create a packet with the buffer to modify it. RtpPacketReceived rtp_packet; const uint8_t* const original_data = packet->data.cdata(); @@ -207,13 +210,10 @@ int32_t UlpfecReceiverImpl::ProcessReceivedFec() { // Set this flag first; in case the recovered packet carries a RED // header, OnRecoveredPacket will recurse back here. recovered_packet->returned = true; - mutex_.Unlock(); recovered_packet_callback_->OnRecoveredPacket(packet->data.data(), packet->data.size()); - mutex_.Lock(); } - mutex_.Unlock(); return 0; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h index 2bed04274..f59251f84 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/ulpfec_receiver_impl.h @@ -17,12 +17,13 @@ #include #include +#include "api/sequence_checker.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/include/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -44,17 +45,18 @@ class UlpfecReceiverImpl : public UlpfecReceiver { const uint32_t ssrc_; const RtpHeaderExtensionMap extensions_; - mutable Mutex mutex_; - RecoveredPacketReceiver* recovered_packet_callback_; - std::unique_ptr fec_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; + RecoveredPacketReceiver* const recovered_packet_callback_; + const std::unique_ptr fec_; // TODO(nisse): The AddReceivedRedPacket method adds one or two packets to // this list at a time, after which it is emptied by ProcessReceivedFec. It // will make things simpler to merge AddReceivedRedPacket and // ProcessReceivedFec into a single method, and we can then delete this list. std::vector> - received_packets_; - ForwardErrorCorrection::RecoveredPacketList recovered_packets_; - FecPacketCounter packet_counter_; + received_packets_ RTC_GUARDED_BY(&sequence_checker_); + ForwardErrorCorrection::RecoveredPacketList recovered_packets_ + RTC_GUARDED_BY(&sequence_checker_); + FecPacketCounter packet_counter_ RTC_GUARDED_BY(&sequence_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc index 13788025c..e87be031a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc @@ -264,7 +264,7 @@ absl::optional ParseFuaNalu( uint8_t original_nal_header = fnri | original_nal_type; rtp_payload = rtp_payload.Slice(kNalHeaderSize, rtp_payload.size() - kNalHeaderSize); - rtp_payload[0] = original_nal_header; + rtp_payload.MutableData()[0] = original_nal_header; parsed_payload->video_payload = std::move(rtp_payload); } else { parsed_payload->video_payload = diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc index 6c2519aa2..253daaa23 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc @@ -298,8 +298,9 @@ absl::optional ParseFuNalu( } rtp_payload = rtp_payload.Slice(1, rtp_payload.size() - 1); - rtp_payload[0] = f | original_nal_type << 1 | layer_id_h; - rtp_payload[1] = layer_id_l_unshifted | tid; + RTC_DCHECK_LT(1, rtp_payload.size()); + rtp_payload.MutableData()[0] = f | original_nal_type << 1 | layer_id_h; + rtp_payload.MutableData()[1] = layer_id_l_unshifted | tid; parsed_payload->video_payload = std::move(rtp_payload); } else { parsed_payload->video_payload = diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc index a719d7ab1..be0500980 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc @@ -40,12 +40,12 @@ constexpr int kFailedToParse = 0; bool ParsePictureId(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { uint32_t picture_id; uint32_t m_bit; - RETURN_FALSE_ON_ERROR(parser->ReadBits(&m_bit, 1)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, m_bit)); if (m_bit) { - RETURN_FALSE_ON_ERROR(parser->ReadBits(&picture_id, 15)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(15, picture_id)); vp9->max_picture_id = kMaxTwoBytePictureId; } else { - RETURN_FALSE_ON_ERROR(parser->ReadBits(&picture_id, 7)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(7, picture_id)); vp9->max_picture_id = kMaxOneBytePictureId; } vp9->picture_id = picture_id; @@ -60,10 +60,10 @@ bool ParsePictureId(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { // bool ParseLayerInfoCommon(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { uint32_t t, u_bit, s, d_bit; - RETURN_FALSE_ON_ERROR(parser->ReadBits(&t, 3)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&u_bit, 1)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&s, 3)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&d_bit, 1)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(3, t)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, u_bit)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(3, s)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, d_bit)); vp9->temporal_idx = t; vp9->temporal_up_switch = u_bit ? true : false; if (s >= kMaxSpatialLayers) @@ -84,7 +84,7 @@ bool ParseLayerInfoCommon(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { bool ParseLayerInfoNonFlexibleMode(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { uint8_t tl0picidx; - RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&tl0picidx)); + RETURN_FALSE_ON_ERROR(parser->ReadUInt8(tl0picidx)); vp9->tl0_pic_idx = tl0picidx; return true; } @@ -117,8 +117,8 @@ bool ParseRefIndices(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { return false; uint32_t p_diff; - RETURN_FALSE_ON_ERROR(parser->ReadBits(&p_diff, 7)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&n_bit, 1)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(7, p_diff)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, n_bit)); vp9->pid_diff[vp9->num_ref_pics] = p_diff; uint32_t scaled_pid = vp9->picture_id; @@ -154,9 +154,9 @@ bool ParseRefIndices(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { // bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { uint32_t n_s, y_bit, g_bit; - RETURN_FALSE_ON_ERROR(parser->ReadBits(&n_s, 3)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&y_bit, 1)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&g_bit, 1)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(3, n_s)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, y_bit)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, g_bit)); RETURN_FALSE_ON_ERROR(parser->ConsumeBits(3)); vp9->num_spatial_layers = n_s + 1; vp9->spatial_layer_resolution_present = y_bit ? true : false; @@ -164,20 +164,20 @@ bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { if (y_bit) { for (size_t i = 0; i < vp9->num_spatial_layers; ++i) { - RETURN_FALSE_ON_ERROR(parser->ReadUInt16(&vp9->width[i])); - RETURN_FALSE_ON_ERROR(parser->ReadUInt16(&vp9->height[i])); + RETURN_FALSE_ON_ERROR(parser->ReadUInt16(vp9->width[i])); + RETURN_FALSE_ON_ERROR(parser->ReadUInt16(vp9->height[i])); } } if (g_bit) { uint8_t n_g; - RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&n_g)); + RETURN_FALSE_ON_ERROR(parser->ReadUInt8(n_g)); vp9->gof.num_frames_in_gof = n_g; } for (size_t i = 0; i < vp9->gof.num_frames_in_gof; ++i) { uint32_t t, u_bit, r; - RETURN_FALSE_ON_ERROR(parser->ReadBits(&t, 3)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&u_bit, 1)); - RETURN_FALSE_ON_ERROR(parser->ReadBits(&r, 2)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(3, t)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(1, u_bit)); + RETURN_FALSE_ON_ERROR(parser->ReadBits(2, r)); RETURN_FALSE_ON_ERROR(parser->ConsumeBits(2)); vp9->gof.temporal_idx[i] = t; vp9->gof.temporal_up_switch[i] = u_bit ? true : false; @@ -185,7 +185,7 @@ bool ParseSsData(rtc::BitBuffer* parser, RTPVideoHeaderVP9* vp9) { for (uint8_t p = 0; p < vp9->gof.num_ref_pics[i]; ++p) { uint8_t p_diff; - RETURN_FALSE_ON_ERROR(parser->ReadUInt8(&p_diff)); + RETURN_FALSE_ON_ERROR(parser->ReadUInt8(p_diff)); vp9->gof.pid_diff[i][p] = p_diff; } } @@ -214,7 +214,7 @@ int VideoRtpDepacketizerVp9::ParseRtpPayload( // Parse mandatory first byte of payload descriptor. rtc::BitBuffer parser(rtp_payload.data(), rtp_payload.size()); uint8_t first_byte; - if (!parser.ReadUInt8(&first_byte)) { + if (!parser.ReadUInt8(first_byte)) { RTC_LOG(LS_ERROR) << "Payload length is zero."; return kFailedToParse; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/include/jvm_android.h b/TMessagesProj/jni/voip/webrtc/modules/utility/include/jvm_android.h index 3caab8776..693ee519e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/include/jvm_android.h +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/include/jvm_android.h @@ -16,8 +16,8 @@ #include #include +#include "api/sequence_checker.h" #include "modules/utility/include/helpers_android.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -34,7 +34,7 @@ class JvmThreadConnector { ~JvmThreadConnector(); private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; bool attached_; }; @@ -111,7 +111,7 @@ class JNIEnvironment { std::string JavaToStdString(const jstring& j_string); private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; JNIEnv* const jni_; }; @@ -184,7 +184,7 @@ class JVM { private: JNIEnv* jni() const { return GetEnv(jvm_); } - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; JavaVM* const jvm_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc b/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc index 8e24daa0f..7021e5af4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/source/jvm_android.cc @@ -136,7 +136,7 @@ NativeRegistration::NativeRegistration(JNIEnv* jni, jclass clazz) NativeRegistration::~NativeRegistration() { RTC_LOG(INFO) << "NativeRegistration::dtor"; - jni_->UnregisterNatives(j_class_); + //jni_->UnregisterNatives(j_class_); CHECK_EXCEPTION(jni_) << "Error during UnregisterNatives"; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc index 370930692..cdc2fa100 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.cc @@ -48,7 +48,6 @@ ProcessThreadImpl::ProcessThreadImpl(const char* thread_name) ProcessThreadImpl::~ProcessThreadImpl() { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!thread_.get()); RTC_DCHECK(!stop_); while (!delayed_tasks_.empty()) { @@ -69,10 +68,11 @@ void ProcessThreadImpl::Delete() { delete this; } -void ProcessThreadImpl::Start() { +// Doesn't need locking, because the contending thread isn't running. +void ProcessThreadImpl::Start() RTC_NO_THREAD_SAFETY_ANALYSIS { RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!thread_.get()); - if (thread_.get()) + RTC_DCHECK(thread_.empty()); + if (!thread_.empty()) return; RTC_DCHECK(!stop_); @@ -80,47 +80,84 @@ void ProcessThreadImpl::Start() { for (ModuleCallback& m : modules_) m.module->ProcessThreadAttached(this); - thread_.reset( - new rtc::PlatformThread(&ProcessThreadImpl::Run, this, thread_name_)); - thread_->Start(); + thread_ = rtc::PlatformThread::SpawnJoinable( + [this] { + CurrentTaskQueueSetter set_current(this); + while (Process()) { + } + }, + thread_name_); } void ProcessThreadImpl::Stop() { RTC_DCHECK(thread_checker_.IsCurrent()); - if (!thread_.get()) + if (thread_.empty()) return; { - rtc::CritScope lock(&lock_); + // Need to take lock, for synchronization with `thread_`. + MutexLock lock(&mutex_); stop_ = true; } wake_up_.Set(); + thread_.Finalize(); - thread_->Stop(); + StopNoLocks(); +} + +// No locking needed, since this is called after the contending thread is +// stopped. +void ProcessThreadImpl::StopNoLocks() RTC_NO_THREAD_SAFETY_ANALYSIS { + RTC_DCHECK(thread_.empty()); stop_ = false; - thread_.reset(); for (ModuleCallback& m : modules_) m.module->ProcessThreadAttached(nullptr); } void ProcessThreadImpl::WakeUp(Module* module) { // Allowed to be called on any thread. - { - rtc::CritScope lock(&lock_); - for (ModuleCallback& m : modules_) { - if (m.module == module) - m.next_callback = kCallProcessImmediately; + auto holds_mutex = [this] { + if (!IsCurrent()) { + return false; } + RTC_DCHECK_RUN_ON(this); + return holds_mutex_; + }; + if (holds_mutex()) { + // Avoid locking if called on the ProcessThread, via a module's Process), + WakeUpNoLocks(module); + } else { + MutexLock lock(&mutex_); + WakeUpInternal(module); } wake_up_.Set(); } +// Must be called only indirectly from Process, which already holds the lock. +void ProcessThreadImpl::WakeUpNoLocks(Module* module) + RTC_NO_THREAD_SAFETY_ANALYSIS { + RTC_DCHECK_RUN_ON(this); + WakeUpInternal(module); +} + +void ProcessThreadImpl::WakeUpInternal(Module* module) { + for (ModuleCallback& m : modules_) { + if (m.module == module) + m.next_callback = kCallProcessImmediately; + } +} + void ProcessThreadImpl::PostTask(std::unique_ptr task) { - // Allowed to be called on any thread. + // Allowed to be called on any thread, except from a module's Process method. + if (IsCurrent()) { + RTC_DCHECK_RUN_ON(this); + RTC_DCHECK(!holds_mutex_) << "Calling ProcessThread::PostTask from " + "Module::Process is not supported"; + } { - rtc::CritScope lock(&lock_); + MutexLock lock(&mutex_); queue_.push(task.release()); } wake_up_.Set(); @@ -131,7 +168,7 @@ void ProcessThreadImpl::PostDelayedTask(std::unique_ptr task, int64_t run_at_ms = rtc::TimeMillis() + milliseconds; bool recalculate_wakeup_time; { - rtc::CritScope lock(&lock_); + MutexLock lock(&mutex_); recalculate_wakeup_time = delayed_tasks_.empty() || run_at_ms < delayed_tasks_.top().run_at_ms; delayed_tasks_.emplace(run_at_ms, std::move(task)); @@ -149,7 +186,7 @@ void ProcessThreadImpl::RegisterModule(Module* module, #if RTC_DCHECK_IS_ON { // Catch programmer error. - rtc::CritScope lock(&lock_); + MutexLock lock(&mutex_); for (const ModuleCallback& mc : modules_) { RTC_DCHECK(mc.module != module) << "Already registered here: " << mc.location.ToString() @@ -163,11 +200,11 @@ void ProcessThreadImpl::RegisterModule(Module* module, // Now that we know the module isn't in the list, we'll call out to notify // the module that it's attached to the worker thread. We don't hold // the lock while we make this call. - if (thread_.get()) + if (!thread_.empty()) module->ProcessThreadAttached(this); { - rtc::CritScope lock(&lock_); + MutexLock lock(&mutex_); modules_.push_back(ModuleCallback(module, from)); } @@ -182,7 +219,7 @@ void ProcessThreadImpl::DeRegisterModule(Module* module) { RTC_DCHECK(module); { - rtc::CritScope lock(&lock_); + MutexLock lock(&mutex_); modules_.remove_if( [&module](const ModuleCallback& m) { return m.module == module; }); } @@ -191,21 +228,13 @@ void ProcessThreadImpl::DeRegisterModule(Module* module) { module->ProcessThreadAttached(nullptr); } -// static -void ProcessThreadImpl::Run(void* obj) { - ProcessThreadImpl* impl = static_cast(obj); - CurrentTaskQueueSetter set_current(impl); - while (impl->Process()) { - } -} - bool ProcessThreadImpl::Process() { TRACE_EVENT1("webrtc", "ProcessThreadImpl", "name", thread_name_); int64_t now = rtc::TimeMillis(); int64_t next_checkpoint = now + (1000 * 60); - + RTC_DCHECK_RUN_ON(this); { - rtc::CritScope lock(&lock_); + MutexLock lock(&mutex_); if (stop_) return false; for (ModuleCallback& m : modules_) { @@ -216,6 +245,8 @@ bool ProcessThreadImpl::Process() { if (m.next_callback == 0) m.next_callback = GetNextCallbackTime(m.module, now); + // Set to true for the duration of the calls to modules' Process(). + holds_mutex_ = true; if (m.next_callback <= now || m.next_callback == kCallProcessImmediately) { { @@ -230,6 +261,7 @@ bool ProcessThreadImpl::Process() { int64_t new_now = rtc::TimeMillis(); m.next_callback = GetNextCallbackTime(m.module, new_now); } + holds_mutex_ = false; if (m.next_callback < next_checkpoint) next_checkpoint = m.next_callback; @@ -248,11 +280,11 @@ bool ProcessThreadImpl::Process() { while (!queue_.empty()) { QueuedTask* task = queue_.front(); queue_.pop(); - lock_.Leave(); + mutex_.Unlock(); if (task->Run()) { delete task; } - lock_.Enter(); + mutex_.Lock(); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h b/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h index ed9f5c3bf..b667bfc68 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/utility/source/process_thread_impl.h @@ -17,6 +17,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/task_queue/queued_task.h" #include "modules/include/module.h" #include "modules/utility/include/process_thread.h" @@ -24,7 +25,6 @@ #include "rtc_base/event.h" #include "rtc_base/location.h" #include "rtc_base/platform_thread.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -45,7 +45,6 @@ class ProcessThreadImpl : public ProcessThread { void DeRegisterModule(Module* module) override; protected: - static void Run(void* obj); bool Process(); private: @@ -85,25 +84,32 @@ class ProcessThreadImpl : public ProcessThread { typedef std::list ModuleList; void Delete() override; + // The part of Stop processing that doesn't need any locking. + void StopNoLocks(); + void WakeUpNoLocks(Module* module); + void WakeUpInternal(Module* module) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // Warning: For some reason, if |lock_| comes immediately before |modules_| - // with the current class layout, we will start to have mysterious crashes - // on Mac 10.9 debug. I (Tommi) suspect we're hitting some obscure alignemnt - // issues, but I haven't figured out what they are, if there are alignment - // requirements for mutexes on Mac or if there's something else to it. - // So be careful with changing the layout. - rtc::RecursiveCriticalSection - lock_; // Used to guard modules_, tasks_ and stop_. + // Members protected by this mutex are accessed on the constructor thread and + // on the spawned process thread, and locking is needed only while the process + // thread is running. + Mutex mutex_; - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; rtc::Event wake_up_; - // TODO(pbos): Remove unique_ptr and stop recreating the thread. - std::unique_ptr thread_; + rtc::PlatformThread thread_; - ModuleList modules_; + ModuleList modules_ RTC_GUARDED_BY(mutex_); + // Set to true when calling Process, to allow reentrant calls to WakeUp. + bool holds_mutex_ RTC_GUARDED_BY(this) = false; std::queue queue_; - std::priority_queue delayed_tasks_ RTC_GUARDED_BY(lock_); - bool stop_; + std::priority_queue delayed_tasks_ RTC_GUARDED_BY(mutex_); + // The `stop_` flag is modified only by the construction thread, protected by + // `thread_checker_`. It is read also by the spawned `thread_`. The latter + // thread must take `mutex_` before access, and for thread safety, the + // constructor thread needs to take `mutex_` when it modifies `stop_` and + // `thread_` is running. Annotations like RTC_GUARDED_BY doesn't support this + // usage pattern. + bool stop_ RTC_GUARDED_BY(mutex_); const char* thread_name_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc index 3c8fdd20f..b3c976602 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/device_info_linux.cc @@ -42,8 +42,6 @@ int32_t DeviceInfoLinux::Init() { DeviceInfoLinux::~DeviceInfoLinux() {} uint32_t DeviceInfoLinux::NumberOfDevices() { - RTC_LOG(LS_INFO) << __FUNCTION__; - uint32_t count = 0; char device[20]; int fd = -1; @@ -75,8 +73,6 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber, uint32_t deviceUniqueIdUTF8Length, char* /*productUniqueIdUTF8*/, uint32_t /*productUniqueIdUTF8Length*/) { - RTC_LOG(LS_INFO) << __FUNCTION__; - // Travel through /dev/video [0-63] uint32_t count = 0; char device[20]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc index 504565f51..10f9713ec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.cc @@ -34,8 +34,7 @@ namespace webrtc { namespace videocapturemodule { rtc::scoped_refptr VideoCaptureImpl::Create( const char* deviceUniqueId) { - rtc::scoped_refptr implementation( - new rtc::RefCountedObject()); + auto implementation = rtc::make_ref_counted(); if (implementation->Init(deviceUniqueId) != 0) return nullptr; @@ -241,12 +240,15 @@ int32_t VideoCaptureModuleV4L2::StartCapture( } // start capture thread; - if (!_captureThread) { + if (_captureThread.empty()) { quit_ = false; - _captureThread.reset( - new rtc::PlatformThread(VideoCaptureModuleV4L2::CaptureThread, this, - "CaptureThread", rtc::kHighPriority)); - _captureThread->Start(); + _captureThread = rtc::PlatformThread::SpawnJoinable( + [this] { + while (CaptureProcess()) { + } + }, + "CaptureThread", + rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh)); } // Needed to start UVC camera - from the uvcview application @@ -262,14 +264,13 @@ int32_t VideoCaptureModuleV4L2::StartCapture( } int32_t VideoCaptureModuleV4L2::StopCapture() { - if (_captureThread) { + if (!_captureThread.empty()) { { MutexLock lock(&capture_lock_); quit_ = true; } - // Make sure the capture thread stop stop using the critsect. - _captureThread->Stop(); - _captureThread.reset(); + // Make sure the capture thread stops using the mutex. + _captureThread.Finalize(); } MutexLock lock(&capture_lock_); @@ -357,11 +358,6 @@ bool VideoCaptureModuleV4L2::CaptureStarted() { return _captureStarted; } -void VideoCaptureModuleV4L2::CaptureThread(void* obj) { - VideoCaptureModuleV4L2* capture = static_cast(obj); - while (capture->CaptureProcess()) { - } -} bool VideoCaptureModuleV4L2::CaptureProcess() { int retVal = 0; fd_set rSet; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h index ddb5d5ba8..fa06d72b8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/linux/video_capture_linux.h @@ -41,8 +41,7 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl { bool AllocateVideoBuffers(); bool DeAllocateVideoBuffers(); - // TODO(pbos): Stop using unique_ptr and resetting the thread. - std::unique_ptr _captureThread; + rtc::PlatformThread _captureThread; Mutex capture_lock_; bool quit_ RTC_GUARDED_BY(capture_lock_); int32_t _deviceId; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h index 8d2a8f551..0f60092d7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h @@ -13,7 +13,6 @@ #include "api/video/video_rotation.h" #include "api/video/video_sink_interface.h" -#include "modules/include/module.h" #include "modules/video_capture/video_capture_defines.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc new file mode 100644 index 000000000..b15443c56 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/av1/av1_svc_config.h" + +#include +#include +#include + +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +bool SetAv1SvcConfig(VideoCodec& video_codec) { + RTC_DCHECK_EQ(video_codec.codecType, kVideoCodecAV1); + + if (video_codec.ScalabilityMode().empty()) { + RTC_LOG(LS_INFO) << "No scalability mode set."; + return false; + } + std::unique_ptr structure = + CreateScalabilityStructure(video_codec.ScalabilityMode()); + if (structure == nullptr) { + RTC_LOG(LS_INFO) << "Failed to create structure " + << video_codec.ScalabilityMode(); + return false; + } + ScalableVideoController::StreamLayersConfig info = structure->StreamConfig(); + for (int sl_idx = 0; sl_idx < info.num_spatial_layers; ++sl_idx) { + SpatialLayer& spatial_layer = video_codec.spatialLayers[sl_idx]; + spatial_layer.width = video_codec.width * info.scaling_factor_num[sl_idx] / + info.scaling_factor_den[sl_idx]; + spatial_layer.height = video_codec.height * + info.scaling_factor_num[sl_idx] / + info.scaling_factor_den[sl_idx]; + spatial_layer.maxFramerate = video_codec.maxFramerate; + spatial_layer.numberOfTemporalLayers = info.num_temporal_layers; + spatial_layer.active = true; + } + + if (info.num_spatial_layers == 1) { + SpatialLayer& spatial_layer = video_codec.spatialLayers[0]; + spatial_layer.minBitrate = video_codec.minBitrate; + spatial_layer.maxBitrate = video_codec.maxBitrate; + spatial_layer.targetBitrate = + (video_codec.minBitrate + video_codec.maxBitrate) / 2; + return true; + } + + for (int sl_idx = 0; sl_idx < info.num_spatial_layers; ++sl_idx) { + SpatialLayer& spatial_layer = video_codec.spatialLayers[sl_idx]; + // minBitrate and maxBitrate formulas are copied from vp9 settings and + // are not yet tuned for av1. + const int num_pixels = spatial_layer.width * spatial_layer.height; + int min_bitrate_kbps = (600.0 * std::sqrt(num_pixels) - 95'000.0) / 1000.0; + spatial_layer.minBitrate = std::max(min_bitrate_kbps, 20); + spatial_layer.maxBitrate = 50 + static_cast(1.6 * num_pixels / 1000.0); + spatial_layer.targetBitrate = + (spatial_layer.minBitrate + spatial_layer.maxBitrate) / 2; + } + return true; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h new file mode 100644 index 000000000..15d94e03a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.h @@ -0,0 +1,22 @@ +/* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ +#define MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ + +#include "api/video_codecs/video_codec.h" + +namespace webrtc { + +// Fills `video_codec.spatialLayers` using other members. +bool SetAv1SvcConfig(VideoCodec& video_codec); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc index bedb51937..c187c7202 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder.cc @@ -53,6 +53,7 @@ class LibaomAv1Decoder final : public VideoDecoder { int32_t Release() override; + DecoderInfo GetDecoderInfo() const override; const char* ImplementationName() const override; private: @@ -182,6 +183,13 @@ int32_t LibaomAv1Decoder::Release() { return WEBRTC_VIDEO_CODEC_OK; } +VideoDecoder::DecoderInfo LibaomAv1Decoder::GetDecoderInfo() const { + DecoderInfo info; + info.implementation_name = "libaom"; + info.is_hardware_accelerated = false; + return info; +} + const char* LibaomAv1Decoder::ImplementationName() const { return "libaom"; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index c1accad55..8c82476b7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -41,9 +41,9 @@ namespace { // Encoder configuration parameters constexpr int kQpMin = 10; -constexpr int kUsageProfile = 1; // 0 = good quality; 1 = real-time. -constexpr int kMinQindex = 58; // Min qindex threshold for QP scaling. -constexpr int kMaxQindex = 180; // Max qindex threshold for QP scaling. +constexpr int kUsageProfile = AOM_USAGE_REALTIME; +constexpr int kMinQindex = 145; // Min qindex threshold for QP scaling. +constexpr int kMaxQindex = 205; // Max qindex threshold for QP scaling. constexpr int kBitDepth = 8; constexpr int kLagInFrames = 0; // No look ahead. constexpr int kRtpTicksPerSecond = 90000; @@ -54,18 +54,27 @@ constexpr float kMinimumFrameRate = 1.0; int GetCpuSpeed(int width, int height, int number_of_cores) { // For smaller resolutions, use lower speed setting (get some coding gain at // the cost of increased encoding complexity). - if (number_of_cores > 2 && width * height <= 320 * 180) + if (number_of_cores > 4 && width * height < 320 * 180) return 6; else if (width * height >= 1280 * 720) + return 9; + else if (width * height >= 640 * 360) return 8; else return 7; } +aom_superblock_size_t GetSuperblockSize(int width, int height, int threads) { + int resolution = width * height; + if (threads >= 4 && resolution >= 960 * 540 && resolution < 1920 * 1080) + return AOM_SUPERBLOCK_SIZE_64X64; + else + return AOM_SUPERBLOCK_SIZE_DYNAMIC; +} + class LibaomAv1Encoder final : public VideoEncoder { public: - explicit LibaomAv1Encoder( - std::unique_ptr svc_controller); + LibaomAv1Encoder(); ~LibaomAv1Encoder(); int InitEncode(const VideoCodec* codec_settings, @@ -84,6 +93,9 @@ class LibaomAv1Encoder final : public VideoEncoder { EncoderInfo GetEncoderInfo() const override; private: + // Determine number of encoder threads to use. + int NumberOfThreads(int width, int height, int number_of_cores); + bool SvcEnabled() const { return svc_params_.has_value(); } // Fills svc_params_ memeber value. Returns false on error. bool SetSvcParams(ScalableVideoController::StreamLayersConfig svc_config); @@ -129,14 +141,10 @@ int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { return WEBRTC_VIDEO_CODEC_OK; } -LibaomAv1Encoder::LibaomAv1Encoder( - std::unique_ptr svc_controller) - : svc_controller_(std::move(svc_controller)), - inited_(false), +LibaomAv1Encoder::LibaomAv1Encoder() + : inited_(false), frame_for_encode_(nullptr), - encoded_image_callback_(nullptr) { - RTC_DCHECK(svc_controller_); -} + encoded_image_callback_(nullptr) {} LibaomAv1Encoder::~LibaomAv1Encoder() { Release(); @@ -170,11 +178,11 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, return result; } absl::string_view scalability_mode = encoder_settings_.ScalabilityMode(); - // When scalability_mode is not set, keep using svc_controller_ created - // at construction of the encoder. - if (!scalability_mode.empty()) { - svc_controller_ = CreateScalabilityStructure(scalability_mode); + if (scalability_mode.empty()) { + RTC_LOG(LS_WARNING) << "Scalability mode is not set, using 'NONE'."; + scalability_mode = "NONE"; } + svc_controller_ = CreateScalabilityStructure(scalability_mode); if (svc_controller_ == nullptr) { RTC_LOG(LS_WARNING) << "Failed to set scalability mode " << scalability_mode; @@ -187,7 +195,7 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, // Initialize encoder configuration structure with default values aom_codec_err_t ret = - aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg_, 0); + aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg_, kUsageProfile); if (ret != AOM_CODEC_OK) { RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret << " on aom_codec_enc_config_default."; @@ -197,7 +205,8 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, // Overwrite default config with input encoder settings & RTC-relevant values. cfg_.g_w = encoder_settings_.width; cfg_.g_h = encoder_settings_.height; - cfg_.g_threads = settings.number_of_cores; + cfg_.g_threads = + NumberOfThreads(cfg_.g_w, cfg_.g_h, settings.number_of_cores); cfg_.g_timebase.num = 1; cfg_.g_timebase.den = kRtpTicksPerSecond; cfg_.rc_target_bitrate = encoder_settings_.maxBitrate; // kilobits/sec. @@ -284,13 +293,13 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, << " on control AV1E_SET_MAX_INTRA_BITRATE_PCT."; return WEBRTC_VIDEO_CODEC_ERROR; } - ret = aom_codec_control(&ctx_, AV1E_SET_COEFF_COST_UPD_FREQ, 2); + ret = aom_codec_control(&ctx_, AV1E_SET_COEFF_COST_UPD_FREQ, 3); if (ret != AOM_CODEC_OK) { RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret << " on control AV1E_SET_COEFF_COST_UPD_FREQ."; return WEBRTC_VIDEO_CODEC_ERROR; } - ret = aom_codec_control(&ctx_, AV1E_SET_MODE_COST_UPD_FREQ, 2); + ret = aom_codec_control(&ctx_, AV1E_SET_MODE_COST_UPD_FREQ, 3); if (ret != AOM_CODEC_OK) { RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret << " on control AV1E_SET_MODE_COST_UPD_FREQ."; @@ -303,9 +312,138 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, return WEBRTC_VIDEO_CODEC_ERROR; } + if (cfg_.g_threads == 4 && cfg_.g_w == 640 && + (cfg_.g_h == 360 || cfg_.g_h == 480)) { + ret = aom_codec_control(&ctx_, AV1E_SET_TILE_ROWS, + static_cast(log2(cfg_.g_threads))); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_TILE_ROWS."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + } else { + ret = aom_codec_control(&ctx_, AV1E_SET_TILE_COLUMNS, + static_cast(log2(cfg_.g_threads))); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_TILE_COLUMNS."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ROW_MT, 1); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ROW_MT."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_OBMC, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_OBMC."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_NOISE_SENSITIVITY, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_NOISE_SENSITIVITY."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_WARPED_MOTION, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_WARPED_MOTION."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_GLOBAL_MOTION, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_GLOBAL_MOTION."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_REF_FRAME_MVS, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_REF_FRAME_MVS."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = + aom_codec_control(&ctx_, AV1E_SET_SUPERBLOCK_SIZE, + GetSuperblockSize(cfg_.g_w, cfg_.g_h, cfg_.g_threads)); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_SUPERBLOCK_SIZE."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_CFL_INTRA, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_CFL_INTRA."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_SMOOTH_INTRA, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_SMOOTH_INTRA."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_ANGLE_DELTA, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_ANGLE_DELTA."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_ENABLE_FILTER_INTRA, 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AV1E_SET_ENABLE_FILTER_INTRA."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + + ret = aom_codec_control(&ctx_, AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) + << "LibaomAv1Encoder::EncodeInit returned " << ret + << " on control AOM_CTRL_AV1E_SET_INTRA_DEFAULT_TX_ONLY."; + return WEBRTC_VIDEO_CODEC_ERROR; + } + return WEBRTC_VIDEO_CODEC_OK; } +int LibaomAv1Encoder::NumberOfThreads(int width, + int height, + int number_of_cores) { + // Keep the number of encoder threads equal to the possible number of + // column/row tiles, which is (1, 2, 4, 8). See comments below for + // AV1E_SET_TILE_COLUMNS/ROWS. + if (width * height >= 640 * 360 && number_of_cores > 4) { + return 4; + } else if (width * height >= 320 * 180 && number_of_cores > 2) { + return 2; + } else { +// Use 2 threads for low res on ARM. +#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ + defined(WEBRTC_ANDROID) + if (width * height >= 320 * 180 && number_of_cores > 2) { + return 2; + } +#endif + // 1 thread less than VGA. + return 1; + } +} + bool LibaomAv1Encoder::SetSvcParams( ScalableVideoController::StreamLayersConfig svc_config) { bool svc_enabled = @@ -440,9 +578,22 @@ int32_t LibaomAv1Encoder::Encode( // Convert input frame to I420, if needed. VideoFrame prepped_input_frame = frame; if (prepped_input_frame.video_frame_buffer()->type() != - VideoFrameBuffer::Type::kI420) { + VideoFrameBuffer::Type::kI420 && + prepped_input_frame.video_frame_buffer()->type() != + VideoFrameBuffer::Type::kI420A) { rtc::scoped_refptr converted_buffer( prepped_input_frame.video_frame_buffer()->ToI420()); + // The buffer should now be a mapped I420 or I420A format, but some buffer + // implementations incorrectly return the wrong buffer format, such as + // kNative. As a workaround to this, we perform ToI420() a second time. + // TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct + // ToI420() implementaion, remove his workaround. + if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 && + converted_buffer->type() != VideoFrameBuffer::Type::kI420A) { + converted_buffer = converted_buffer->ToI420(); + RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 || + converted_buffer->type() == VideoFrameBuffer::Type::kI420A); + } prepped_input_frame = VideoFrame(converted_buffer, frame.timestamp(), frame.render_time_ms(), frame.rotation()); } @@ -472,6 +623,15 @@ int32_t LibaomAv1Encoder::Encode( if (SvcEnabled()) { SetSvcLayerId(layer_frame); SetSvcRefFrameConfig(layer_frame); + + aom_codec_err_t ret = + aom_codec_control(&ctx_, AV1E_SET_ERROR_RESILIENT_MODE, + layer_frame.TemporalId() > 0 ? 1 : 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "LibaomAv1Encoder::Encode returned " << ret + << " on control AV1E_SET_ERROR_RESILIENT_MODE."; + return WEBRTC_VIDEO_CODEC_ERROR; + } } // Encode a frame. @@ -575,15 +735,8 @@ void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) { return; } - // Check input target bit rate value. - uint32_t rc_target_bitrate_kbps = parameters.bitrate.get_sum_kbps(); - if (encoder_settings_.maxBitrate > 0) - RTC_DCHECK_LE(rc_target_bitrate_kbps, encoder_settings_.maxBitrate); - RTC_DCHECK_GE(rc_target_bitrate_kbps, encoder_settings_.minBitrate); - svc_controller_->OnRatesUpdated(parameters.bitrate); - // Set target bit rate. - cfg_.rc_target_bitrate = rc_target_bitrate_kbps; + cfg_.rc_target_bitrate = parameters.bitrate.get_sum_kbps(); if (SvcEnabled()) { for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) { @@ -623,6 +776,15 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { info.is_hardware_accelerated = false; info.scaling_settings = VideoEncoder::ScalingSettings(kMinQindex, kMaxQindex); info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; + if (SvcEnabled()) { + for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) { + info.fps_allocation[sid].resize(svc_params_->number_temporal_layers); + for (int tid = 0; tid < svc_params_->number_temporal_layers; ++tid) { + info.fps_allocation[sid][tid] = + encoder_settings_.maxFramerate / svc_params_->framerate_factor[tid]; + } + } + } return info; } @@ -631,13 +793,7 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { const bool kIsLibaomAv1EncoderSupported = true; std::unique_ptr CreateLibaomAv1Encoder() { - return std::make_unique( - std::make_unique()); -} - -std::unique_ptr CreateLibaomAv1Encoder( - std::unique_ptr svc_controller) { - return std::make_unique(std::move(svc_controller)); + return std::make_unique(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h index 04a2b65f5..4b0ee28d4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h @@ -14,15 +14,12 @@ #include "absl/base/attributes.h" #include "api/video_codecs/video_encoder.h" -#include "modules/video_coding/svc/scalable_video_controller.h" namespace webrtc { ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported; std::unique_ptr CreateLibaomAv1Encoder(); -std::unique_ptr CreateLibaomAv1Encoder( - std::unique_ptr controller); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc index be5b031e8..016d0aa53 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc @@ -45,11 +45,11 @@ bool IsH264CodecSupported() { } // namespace -SdpVideoFormat CreateH264Format(H264::Profile profile, - H264::Level level, +SdpVideoFormat CreateH264Format(H264Profile profile, + H264Level level, const std::string& packetization_mode) { const absl::optional profile_string = - H264::ProfileLevelIdToString(H264::ProfileLevelId(profile, level)); + H264ProfileLevelIdToString(H264ProfileLevelId(profile, level)); RTC_CHECK(profile_string); return SdpVideoFormat( cricket::kH264CodecName, @@ -76,12 +76,14 @@ std::vector SupportedH264Codecs() { // // We support both packetization modes 0 (mandatory) and 1 (optional, // preferred). - return { - CreateH264Format(H264::kProfileBaseline, H264::kLevel3_1, "1"), - CreateH264Format(H264::kProfileBaseline, H264::kLevel3_1, "0"), - CreateH264Format(H264::kProfileConstrainedBaseline, H264::kLevel3_1, "1"), - CreateH264Format(H264::kProfileConstrainedBaseline, H264::kLevel3_1, - "0")}; + return {CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, + "1"), + CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, + "0"), + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, "1"), + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, "0")}; } std::unique_ptr H264Encoder::Create( diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc index 9002b8746..6f37b52fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -32,7 +32,6 @@ extern "C" { #include "common_video/include/video_frame_buffer.h" #include "modules/video_coding/codecs/h264/h264_color_space.h" #include "rtc_base/checks.h" -#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" @@ -294,20 +293,17 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, // the input one. RTC_DCHECK_EQ(av_frame_->reordered_opaque, frame_timestamp_us); - absl::optional qp; // TODO(sakal): Maybe it is possible to get QP directly from FFmpeg. - h264_bitstream_parser_.ParseBitstream(input_image.data(), input_image.size()); - int qp_int; - if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { - qp.emplace(qp_int); - } + h264_bitstream_parser_.ParseBitstream(input_image); + absl::optional qp = h264_bitstream_parser_.GetLastSliceQp(); // Obtain the |video_frame| containing the decoded image. VideoFrame* input_frame = static_cast(av_buffer_get_opaque(av_frame_->buf[0])); RTC_DCHECK(input_frame); - const webrtc::I420BufferInterface* i420_buffer = - input_frame->video_frame_buffer()->GetI420(); + rtc::scoped_refptr frame_buffer = + input_frame->video_frame_buffer(); + const webrtc::I420BufferInterface* i420_buffer = frame_buffer->GetI420(); // When needed, FFmpeg applies cropping by moving plane pointers and adjusting // frame width/height. Ensure that cropped buffers lie within the allocated @@ -334,7 +330,9 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], - av_frame_->linesize[kVPlaneIndex], rtc::KeepRefUntilDone(i420_buffer)); + av_frame_->linesize[kVPlaneIndex], + // To keep reference alive. + [frame_buffer] {}); if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc index ea784c19e..949c51baf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -373,8 +373,19 @@ int32_t H264EncoderImpl::Encode( return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - rtc::scoped_refptr frame_buffer = + rtc::scoped_refptr frame_buffer = input_frame.video_frame_buffer()->ToI420(); + // The buffer should now be a mapped I420 or I420A format, but some buffer + // implementations incorrectly return the wrong buffer format, such as + // kNative. As a workaround to this, we perform ToI420() a second time. + // TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct + // ToI420() implementaion, remove his workaround. + if (frame_buffer->type() != VideoFrameBuffer::Type::kI420 && + frame_buffer->type() != VideoFrameBuffer::Type::kI420A) { + frame_buffer = frame_buffer->ToI420(); + RTC_CHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 || + frame_buffer->type() == VideoFrameBuffer::Type::kI420A); + } bool send_key_frame = false; for (size_t i = 0; i < configurations_.size(); ++i) { @@ -481,9 +492,9 @@ int32_t H264EncoderImpl::Encode( // |encoded_images_[i]._length| == 0. if (encoded_images_[i].size() > 0) { // Parse QP. - h264_bitstream_parser_.ParseBitstream(encoded_images_[i].data(), - encoded_images_[i].size()); - h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_); + h264_bitstream_parser_.ParseBitstream(encoded_images_[i]); + encoded_images_[i].qp_ = + h264_bitstream_parser_.GetLastSliceQp().value_or(-1); // Deliver encoded image. CodecSpecificInfo codec_specific; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h index 70ca81798..1f8f79606 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h @@ -27,8 +27,8 @@ struct SdpVideoFormat; // Creates an H264 SdpVideoFormat entry with specified paramters. RTC_EXPORT SdpVideoFormat -CreateH264Format(H264::Profile profile, - H264::Level level, +CreateH264Format(H264Profile profile, + H264Level level, const std::string& packetization_mode); // Set to disable the H.264 encoder/decoder implementations that are provided if diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.cc similarity index 51% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.cc index 7bf611715..b24922f92 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/codecs/vp8/libvpx_interface.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include @@ -16,10 +16,10 @@ namespace webrtc { namespace { -class LibvpxVp8Facade : public LibvpxInterface { +class LibvpxFacade : public LibvpxInterface { public: - LibvpxVp8Facade() = default; - ~LibvpxVp8Facade() override = default; + LibvpxFacade() = default; + ~LibvpxFacade() override = default; vpx_image_t* img_alloc(vpx_image_t* img, vpx_img_fmt_t fmt, @@ -93,17 +93,45 @@ class LibvpxVp8Facade : public LibvpxInterface { return vpx_codec_control(ctx, VP8E_SET_ARNR_MAXFRAMES, param); case VP8E_SET_ARNR_STRENGTH: return vpx_codec_control(ctx, VP8E_SET_ARNR_STRENGTH, param); - case VP8E_SET_ARNR_TYPE: - RTC_NOTREACHED() << "VP8E_SET_ARNR_TYPE is deprecated."; - return VPX_CODEC_UNSUP_FEATURE; case VP8E_SET_CQ_LEVEL: return vpx_codec_control(ctx, VP8E_SET_CQ_LEVEL, param); case VP8E_SET_MAX_INTRA_BITRATE_PCT: return vpx_codec_control(ctx, VP8E_SET_MAX_INTRA_BITRATE_PCT, param); + case VP9E_SET_MAX_INTER_BITRATE_PCT: + return vpx_codec_control(ctx, VP9E_SET_MAX_INTER_BITRATE_PCT, param); case VP8E_SET_GF_CBR_BOOST_PCT: return vpx_codec_control(ctx, VP8E_SET_GF_CBR_BOOST_PCT, param); case VP8E_SET_SCREEN_CONTENT_MODE: return vpx_codec_control(ctx, VP8E_SET_SCREEN_CONTENT_MODE, param); + case VP9E_SET_GF_CBR_BOOST_PCT: + return vpx_codec_control(ctx, VP9E_SET_GF_CBR_BOOST_PCT, param); + case VP9E_SET_LOSSLESS: + return vpx_codec_control(ctx, VP9E_SET_LOSSLESS, param); + case VP9E_SET_FRAME_PARALLEL_DECODING: + return vpx_codec_control(ctx, VP9E_SET_FRAME_PARALLEL_DECODING, param); + case VP9E_SET_AQ_MODE: + return vpx_codec_control(ctx, VP9E_SET_AQ_MODE, param); + case VP9E_SET_FRAME_PERIODIC_BOOST: + return vpx_codec_control(ctx, VP9E_SET_FRAME_PERIODIC_BOOST, param); + case VP9E_SET_NOISE_SENSITIVITY: + return vpx_codec_control(ctx, VP9E_SET_NOISE_SENSITIVITY, param); + case VP9E_SET_MIN_GF_INTERVAL: + return vpx_codec_control(ctx, VP9E_SET_MIN_GF_INTERVAL, param); + case VP9E_SET_MAX_GF_INTERVAL: + return vpx_codec_control(ctx, VP9E_SET_MAX_GF_INTERVAL, param); + case VP9E_SET_TARGET_LEVEL: + return vpx_codec_control(ctx, VP9E_SET_TARGET_LEVEL, param); + case VP9E_SET_ROW_MT: + return vpx_codec_control(ctx, VP9E_SET_ROW_MT, param); + case VP9E_ENABLE_MOTION_VECTOR_UNIT_TEST: + return vpx_codec_control(ctx, VP9E_ENABLE_MOTION_VECTOR_UNIT_TEST, + param); + case VP9E_SET_SVC_INTER_LAYER_PRED: + return vpx_codec_control(ctx, VP9E_SET_SVC_INTER_LAYER_PRED, param); + case VP9E_SET_SVC_GF_TEMPORAL_REF: + return vpx_codec_control(ctx, VP9E_SET_SVC_GF_TEMPORAL_REF, param); + case VP9E_SET_POSTENCODE_DROP: + return vpx_codec_control(ctx, VP9E_SET_POSTENCODE_DROP, param); default: RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; } @@ -118,14 +146,41 @@ class LibvpxVp8Facade : public LibvpxInterface { return vpx_codec_control(ctx, VP8E_SET_FRAME_FLAGS, param); case VP8E_SET_TEMPORAL_LAYER_ID: return vpx_codec_control(ctx, VP8E_SET_TEMPORAL_LAYER_ID, param); + case VP9E_SET_SVC: + return vpx_codec_control(ctx, VP9E_SET_SVC, param); case VP8E_SET_CPUUSED: return vpx_codec_control(ctx, VP8E_SET_CPUUSED, param); case VP8E_SET_TOKEN_PARTITIONS: return vpx_codec_control(ctx, VP8E_SET_TOKEN_PARTITIONS, param); case VP8E_SET_TUNING: return vpx_codec_control(ctx, VP8E_SET_TUNING, param); + case VP9E_SET_TILE_COLUMNS: + return vpx_codec_control(ctx, VP9E_SET_TILE_COLUMNS, param); + case VP9E_SET_TILE_ROWS: + return vpx_codec_control(ctx, VP9E_SET_TILE_ROWS, param); + case VP9E_SET_TPL: + return vpx_codec_control(ctx, VP9E_SET_TPL, param); + case VP9E_SET_ALT_REF_AQ: + return vpx_codec_control(ctx, VP9E_SET_ALT_REF_AQ, param); + case VP9E_SET_TUNE_CONTENT: + return vpx_codec_control(ctx, VP9E_SET_TUNE_CONTENT, param); + case VP9E_SET_COLOR_SPACE: + return vpx_codec_control(ctx, VP9E_SET_COLOR_SPACE, param); + case VP9E_SET_COLOR_RANGE: + return vpx_codec_control(ctx, VP9E_SET_COLOR_RANGE, param); + case VP9E_SET_DELTA_Q_UV: + return vpx_codec_control(ctx, VP9E_SET_DELTA_Q_UV, param); + case VP9E_SET_DISABLE_OVERSHOOT_MAXQ_CBR: + return vpx_codec_control(ctx, VP9E_SET_DISABLE_OVERSHOOT_MAXQ_CBR, + param); + case VP9E_SET_DISABLE_LOOPFILTER: + return vpx_codec_control(ctx, VP9E_SET_DISABLE_LOOPFILTER, param); default: + if (param >= 0) { + // Might be intended for uint32_t but int literal used, try fallback. + return codec_control(ctx, ctrl_id, static_cast(param)); + } RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; } return VPX_CODEC_ERROR; @@ -139,6 +194,10 @@ class LibvpxVp8Facade : public LibvpxInterface { return vpx_codec_control(ctx, VP8E_GET_LAST_QUANTIZER, param); case VP8E_GET_LAST_QUANTIZER_64: return vpx_codec_control(ctx, VP8E_GET_LAST_QUANTIZER_64, param); + case VP9E_SET_RENDER_SIZE: + return vpx_codec_control(ctx, VP9E_SET_RENDER_SIZE, param); + case VP9E_GET_LEVEL: + return vpx_codec_control(ctx, VP9E_GET_LEVEL, param); default: RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; } @@ -151,6 +210,8 @@ class LibvpxVp8Facade : public LibvpxInterface { switch (ctrl_id) { case VP8E_SET_ROI_MAP: return vpx_codec_control(ctx, VP8E_SET_ROI_MAP, param); + case VP9E_SET_ROI_MAP: + return vpx_codec_control(ctx, VP9E_SET_ROI_MAP, param); default: RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; } @@ -163,6 +224,8 @@ class LibvpxVp8Facade : public LibvpxInterface { switch (ctrl_id) { case VP8E_SET_ACTIVEMAP: return vpx_codec_control(ctx, VP8E_SET_ACTIVEMAP, param); + case VP9E_GET_ACTIVEMAP: + return vpx_codec_control(ctx, VP8E_SET_ACTIVEMAP, param); default: RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; } @@ -181,6 +244,98 @@ class LibvpxVp8Facade : public LibvpxInterface { return VPX_CODEC_ERROR; } + vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_extra_cfg_t* param) const override { + switch (ctrl_id) { + case VP9E_SET_SVC_PARAMETERS: + return vpx_codec_control_(ctx, VP9E_SET_SVC_PARAMETERS, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + + vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_frame_drop_t* param) const override { + switch (ctrl_id) { + case VP9E_SET_SVC_FRAME_DROP_LAYER: + return vpx_codec_control_(ctx, VP9E_SET_SVC_FRAME_DROP_LAYER, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + + vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + void* param) const override { + switch (ctrl_id) { + case VP9E_SET_SVC_PARAMETERS: + return vpx_codec_control_(ctx, VP9E_SET_SVC_PARAMETERS, param); + case VP9E_REGISTER_CX_CALLBACK: + return vpx_codec_control_(ctx, VP9E_REGISTER_CX_CALLBACK, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + + vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_layer_id_t* param) const override { + switch (ctrl_id) { + case VP9E_SET_SVC_LAYER_ID: + return vpx_codec_control_(ctx, VP9E_SET_SVC_LAYER_ID, param); + case VP9E_GET_SVC_LAYER_ID: + return vpx_codec_control_(ctx, VP9E_GET_SVC_LAYER_ID, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + + vpx_codec_err_t codec_control( + vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_ref_frame_config_t* param) const override { + switch (ctrl_id) { + case VP9E_SET_SVC_REF_FRAME_CONFIG: + return vpx_codec_control_(ctx, VP9E_SET_SVC_REF_FRAME_CONFIG, param); + case VP9E_GET_SVC_REF_FRAME_CONFIG: + return vpx_codec_control_(ctx, VP9E_GET_SVC_REF_FRAME_CONFIG, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + + vpx_codec_err_t codec_control( + vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_spatial_layer_sync_t* param) const override { + switch (ctrl_id) { + case VP9E_SET_SVC_SPATIAL_LAYER_SYNC: + return vpx_codec_control_(ctx, VP9E_SET_SVC_SPATIAL_LAYER_SYNC, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + + vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_rc_funcs_t* param) const override { + switch (ctrl_id) { + case VP9E_SET_EXTERNAL_RATE_CONTROL: + return vpx_codec_control_(ctx, VP9E_SET_EXTERNAL_RATE_CONTROL, param); + default: + RTC_NOTREACHED() << "Unsupported libvpx ctrl_id: " << ctrl_id; + } + return VPX_CODEC_ERROR; + } + vpx_codec_err_t codec_encode(vpx_codec_ctx_t* ctx, const vpx_image_t* img, vpx_codec_pts_t pts, @@ -199,12 +354,20 @@ class LibvpxVp8Facade : public LibvpxInterface { const char* codec_error_detail(vpx_codec_ctx_t* ctx) const override { return ::vpx_codec_error_detail(ctx); } + + const char* codec_error(vpx_codec_ctx_t* ctx) const override { + return ::vpx_codec_error(ctx); + } + + const char* codec_err_to_string(vpx_codec_err_t err) const override { + return ::vpx_codec_err_to_string(err); + } }; } // namespace -std::unique_ptr LibvpxInterface::CreateEncoder() { - return std::make_unique(); +std::unique_ptr LibvpxInterface::Create() { + return std::make_unique(); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h similarity index 70% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h index 3da38ea24..3dea24dd6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_INTERFACE_H_ -#define MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_INTERFACE_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_INTERFACE_LIBVPX_INTERFACE_H_ +#define MODULES_VIDEO_CODING_CODECS_INTERFACE_LIBVPX_INTERFACE_H_ #include @@ -22,7 +22,7 @@ namespace webrtc { -// This interface is a proxy to to the static libvpx functions, so that they +// This interface is a proxy to the static libvpx functions, so that they // can be mocked for testing. Currently supports VP8 encoder functions. // TODO(sprang): Extend this to VP8 decoder and VP9 encoder/decoder too. class LibvpxInterface { @@ -81,7 +81,29 @@ class LibvpxInterface { virtual vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, vp8e_enc_control_id ctrl_id, vpx_scaling_mode* param) const = 0; - + virtual vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_extra_cfg_t* param) const = 0; + virtual vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_frame_drop_t* param) const = 0; + virtual vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + void* param) const = 0; + virtual vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_layer_id_t* param) const = 0; + virtual vpx_codec_err_t codec_control( + vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_ref_frame_config_t* param) const = 0; + virtual vpx_codec_err_t codec_control( + vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_svc_spatial_layer_sync_t* param) const = 0; + virtual vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx, + vp8e_enc_control_id ctrl_id, + vpx_rc_funcs_t* param) const = 0; virtual vpx_codec_err_t codec_encode(vpx_codec_ctx_t* ctx, const vpx_image_t* img, vpx_codec_pts_t pts, @@ -94,11 +116,13 @@ class LibvpxInterface { vpx_codec_iter_t* iter) const = 0; virtual const char* codec_error_detail(vpx_codec_ctx_t* ctx) const = 0; + virtual const char* codec_error(vpx_codec_ctx_t* ctx) const = 0; + virtual const char* codec_err_to_string(vpx_codec_err_t err) const = 0; // Returns interface wrapping the actual libvpx functions. - static std::unique_ptr CreateEncoder(); + static std::unique_ptr Create(); }; } // namespace webrtc -#endif // MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_INTERFACE_H_ +#endif // MODULES_VIDEO_CODING_CODECS_INTERFACE_LIBVPX_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/mock_libvpx_interface.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/mock_libvpx_interface.h new file mode 100644 index 000000000..6dfe733dd --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/mock_libvpx_interface.h @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ +#define MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ + +#include "modules/video_coding/codecs/interface/libvpx_interface.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +class MockLibvpxInterface : public LibvpxInterface { + public: + MOCK_METHOD( + vpx_image_t*, + img_alloc, + (vpx_image_t*, vpx_img_fmt_t, unsigned int, unsigned int, unsigned int), + (const, override)); + MOCK_METHOD(vpx_image_t*, + img_wrap, + (vpx_image_t*, + vpx_img_fmt_t, + unsigned int, + unsigned int, + unsigned int, + unsigned char*), + (const, override)); + MOCK_METHOD(void, img_free, (vpx_image_t * img), (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_config_set, + (vpx_codec_ctx_t*, const vpx_codec_enc_cfg_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_config_default, + (vpx_codec_iface_t*, vpx_codec_enc_cfg_t*, unsigned int), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_init, + (vpx_codec_ctx_t*, + vpx_codec_iface_t*, + const vpx_codec_enc_cfg_t*, + vpx_codec_flags_t), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_enc_init_multi, + (vpx_codec_ctx_t*, + vpx_codec_iface_t*, + vpx_codec_enc_cfg_t*, + int, + vpx_codec_flags_t, + vpx_rational_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_destroy, + (vpx_codec_ctx_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, uint32_t), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, int), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, int*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_roi_map*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_active_map*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_scaling_mode*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_svc_extra_cfg_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_svc_frame_drop_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, void*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_svc_layer_id_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, + vp8e_enc_control_id, + vpx_svc_ref_frame_config_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, + vp8e_enc_control_id, + vpx_svc_spatial_layer_sync_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_control, + (vpx_codec_ctx_t*, vp8e_enc_control_id, vpx_rc_funcs_t*), + (const, override)); + MOCK_METHOD(vpx_codec_err_t, + codec_encode, + (vpx_codec_ctx_t*, + const vpx_image_t*, + vpx_codec_pts_t, + uint64_t, + vpx_enc_frame_flags_t, + uint64_t), + (const, override)); + MOCK_METHOD(const vpx_codec_cx_pkt_t*, + codec_get_cx_data, + (vpx_codec_ctx_t*, vpx_codec_iter_t*), + (const, override)); + MOCK_METHOD(const char*, + codec_error_detail, + (vpx_codec_ctx_t*), + (const, override)); + MOCK_METHOD(const char*, codec_error, (vpx_codec_ctx_t*), (const, override)); + MOCK_METHOD(const char*, + codec_err_to_string, + (vpx_codec_err_t), + (const, override)); +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc index b48996cbc..8740884f5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc @@ -54,4 +54,12 @@ int AugmentedVideoFrameBuffer::height() const { rtc::scoped_refptr AugmentedVideoFrameBuffer::ToI420() { return video_frame_buffer_->ToI420(); } + +const I420BufferInterface* AugmentedVideoFrameBuffer::GetI420() const { + // TODO(https://crbug.com/webrtc/12021): When AugmentedVideoFrameBuffer is + // updated to implement the buffer interfaces of relevant + // VideoFrameBuffer::Types, stop overriding GetI420() as a workaround to + // AugmentedVideoFrameBuffer not being the type that is returned by type(). + return video_frame_buffer_->GetI420(); +} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h index c45ab3b2a..d711cd07d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h @@ -45,6 +45,12 @@ class AugmentedVideoFrameBuffer : public VideoFrameBuffer { // Get the I140 Buffer from the underlying frame buffer rtc::scoped_refptr ToI420() final; + // Returns GetI420() of the underlying VideoFrameBuffer. + // TODO(hbos): AugmentedVideoFrameBuffer should not return a type (such as + // kI420) without also implementing that type's interface (i.e. + // I420BufferInterface). Either implement all possible Type's interfaces or + // return kNative. + const I420BufferInterface* GetI420() const final; private: uint16_t augmenting_data_size_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc index 39c14e412..2332fcddf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc @@ -17,14 +17,8 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h" -#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" -namespace { -void KeepBufferRefs(rtc::scoped_refptr, - rtc::scoped_refptr) {} -} // anonymous namespace - namespace webrtc { class MultiplexDecoderAdapter::AdapterDecodedImageCallback @@ -250,12 +244,12 @@ void MultiplexDecoderAdapter::MergeAlphaImages( yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(), alpha_buffer->StrideY(), - rtc::Bind(&KeepBufferRefs, yuv_buffer, alpha_buffer)); + // To keep references alive. + [yuv_buffer, alpha_buffer] {}); } if (supports_augmenting_data_) { - merged_buffer = rtc::scoped_refptr( - new rtc::RefCountedObject( - merged_buffer, std::move(augmenting_data), augmenting_data_length)); + merged_buffer = rtc::make_ref_counted( + merged_buffer, std::move(augmenting_data), augmenting_data_length); } VideoFrame merged_image = VideoFrame::Builder() diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc index 0fbbc4271..cf862a9eb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc @@ -18,7 +18,6 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "media/base/video_common.h" #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" -#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" namespace webrtc { @@ -164,20 +163,38 @@ int MultiplexEncoderAdapter::Encode( return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } + // The input image is forwarded as-is, unless it is a native buffer and + // |supports_augmented_data_| is true in which case we need to map it in order + // to access the underlying AugmentedVideoFrameBuffer. + VideoFrame forwarded_image = input_image; + if (supports_augmented_data_ && + forwarded_image.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNative) { + auto info = GetEncoderInfo(); + rtc::scoped_refptr mapped_buffer = + forwarded_image.video_frame_buffer()->GetMappedFrameBuffer( + info.preferred_pixel_formats); + if (!mapped_buffer) { + // Unable to map the buffer. + return WEBRTC_VIDEO_CODEC_ERROR; + } + forwarded_image.set_video_frame_buffer(std::move(mapped_buffer)); + } + std::vector adjusted_frame_types; if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) { adjusted_frame_types.push_back(VideoFrameType::kVideoFrameKey); } else { adjusted_frame_types.push_back(VideoFrameType::kVideoFrameDelta); } - const bool has_alpha = input_image.video_frame_buffer()->type() == + const bool has_alpha = forwarded_image.video_frame_buffer()->type() == VideoFrameBuffer::Type::kI420A; std::unique_ptr augmenting_data = nullptr; uint16_t augmenting_data_length = 0; AugmentedVideoFrameBuffer* augmented_video_frame_buffer = nullptr; if (supports_augmented_data_) { augmented_video_frame_buffer = static_cast( - input_image.video_frame_buffer().get()); + forwarded_image.video_frame_buffer().get()); augmenting_data_length = augmented_video_frame_buffer->GetAugmentingDataSize(); augmenting_data = @@ -192,7 +209,7 @@ int MultiplexEncoderAdapter::Encode( MutexLock lock(&mutex_); stashed_images_.emplace( std::piecewise_construct, - std::forward_as_tuple(input_image.timestamp()), + std::forward_as_tuple(forwarded_image.timestamp()), std::forward_as_tuple( picture_index_, has_alpha ? kAlphaCodecStreams : 1, std::move(augmenting_data), augmenting_data_length)); @@ -201,7 +218,8 @@ int MultiplexEncoderAdapter::Encode( ++picture_index_; // Encode YUV - int rv = encoders_[kYUVStream]->Encode(input_image, &adjusted_frame_types); + int rv = + encoders_[kYUVStream]->Encode(forwarded_image, &adjusted_frame_types); // If we do not receive an alpha frame, we send a single frame for this // |picture_index_|. The receiver will receive |frame_count| as 1 which @@ -210,24 +228,27 @@ int MultiplexEncoderAdapter::Encode( return rv; // Encode AXX - const I420ABufferInterface* yuva_buffer = + rtc::scoped_refptr frame_buffer = supports_augmented_data_ - ? augmented_video_frame_buffer->GetVideoFrameBuffer()->GetI420A() - : input_image.video_frame_buffer()->GetI420A(); + ? augmented_video_frame_buffer->GetVideoFrameBuffer() + : forwarded_image.video_frame_buffer(); + const I420ABufferInterface* yuva_buffer = frame_buffer->GetI420A(); rtc::scoped_refptr alpha_buffer = - WrapI420Buffer(input_image.width(), input_image.height(), + WrapI420Buffer(forwarded_image.width(), forwarded_image.height(), yuva_buffer->DataA(), yuva_buffer->StrideA(), multiplex_dummy_planes_.data(), yuva_buffer->StrideU(), multiplex_dummy_planes_.data(), yuva_buffer->StrideV(), - rtc::KeepRefUntilDone(input_image.video_frame_buffer())); - VideoFrame alpha_image = VideoFrame::Builder() - .set_video_frame_buffer(alpha_buffer) - .set_timestamp_rtp(input_image.timestamp()) - .set_timestamp_ms(input_image.render_time_ms()) - .set_rotation(input_image.rotation()) - .set_id(input_image.id()) - .set_packet_infos(input_image.packet_infos()) - .build(); + // To keep reference alive. + [frame_buffer] {}); + VideoFrame alpha_image = + VideoFrame::Builder() + .set_video_frame_buffer(alpha_buffer) + .set_timestamp_rtp(forwarded_image.timestamp()) + .set_timestamp_ms(forwarded_image.render_time_ms()) + .set_rotation(forwarded_image.rotation()) + .set_id(forwarded_image.id()) + .set_packet_infos(forwarded_image.packet_infos()) + .build(); rv = encoders_[kAXXStream]->Encode(alpha_image, &adjusted_frame_types); return rv; } @@ -303,9 +324,6 @@ EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( PayloadStringToCodecType(associated_format_.name); image_component.encoded_image = encodedImage; - // If we don't already own the buffer, make a copy. - image_component.encoded_image.Retain(); - MutexLock lock(&mutex_); const auto& stashed_image_itr = stashed_images_.find(encodedImage.Timestamp()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc index b5652593a..e2d9b1ebd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc @@ -27,10 +27,12 @@ namespace webrtc { DefaultTemporalLayers::PendingFrame::PendingFrame() = default; DefaultTemporalLayers::PendingFrame::PendingFrame( + uint32_t timestamp, bool expired, uint8_t updated_buffers_mask, const DependencyInfo& dependency_info) - : expired(expired), + : timestamp(timestamp), + expired(expired), updated_buffer_mask(updated_buffers_mask), dependency_info(dependency_info) {} @@ -96,8 +98,24 @@ uint8_t GetUpdatedBuffers(const Vp8FrameConfig& config) { } return flags; } + +size_t BufferToIndex(Vp8BufferReference buffer) { + switch (buffer) { + case Vp8FrameConfig::Vp8BufferReference::kLast: + return 0; + case Vp8FrameConfig::Vp8BufferReference::kGolden: + return 1; + case Vp8FrameConfig::Vp8BufferReference::kAltref: + return 2; + case Vp8FrameConfig::Vp8BufferReference::kNone: + RTC_CHECK_NOTREACHED(); + } +} + } // namespace +constexpr size_t DefaultTemporalLayers::kNumReferenceBuffers; + std::vector DefaultTemporalLayers::GetDependencyInfo(size_t num_layers) { // For indexing in the patterns described below (which temporal layers they @@ -225,10 +243,28 @@ DefaultTemporalLayers::GetDependencyInfo(size_t num_layers) { return {{"", {kNone, kNone, kNone}}}; } +std::bitset +DefaultTemporalLayers::DetermineStaticBuffers( + const std::vector& temporal_pattern) { + std::bitset buffers; + buffers.set(); + for (const DependencyInfo& info : temporal_pattern) { + uint8_t updated_buffers = GetUpdatedBuffers(info.frame_config); + + for (Vp8BufferReference buffer : kAllBuffers) { + if (static_cast(buffer) & updated_buffers) { + buffers.reset(BufferToIndex(buffer)); + } + } + } + return buffers; +} + DefaultTemporalLayers::DefaultTemporalLayers(int number_of_temporal_layers) : num_layers_(std::max(1, number_of_temporal_layers)), temporal_ids_(GetTemporalIds(num_layers_)), temporal_pattern_(GetDependencyInfo(num_layers_)), + is_static_buffer_(DetermineStaticBuffers(temporal_pattern_)), pattern_idx_(kUninitializedPatternIndex) { RTC_CHECK_GE(kMaxTemporalStreams, number_of_temporal_layers); RTC_CHECK_GE(number_of_temporal_layers, 0); @@ -238,25 +274,12 @@ DefaultTemporalLayers::DefaultTemporalLayers(int number_of_temporal_layers) // wrap at max(temporal_ids_.size(), temporal_pattern_.size()). RTC_DCHECK_LE(temporal_ids_.size(), temporal_pattern_.size()); -#if RTC_DCHECK_IS_ON - checker_ = TemporalLayersChecker::CreateTemporalLayersChecker( - Vp8TemporalLayersType::kFixedPattern, number_of_temporal_layers); -#endif + RTC_DCHECK( + checker_ = TemporalLayersChecker::CreateTemporalLayersChecker( + Vp8TemporalLayersType::kFixedPattern, number_of_temporal_layers)); // Always need to start with a keyframe, so pre-populate all frame counters. - for (Vp8BufferReference buffer : kAllBuffers) { - frames_since_buffer_refresh_[buffer] = 0; - } - - kf_buffers_ = {kAllBuffers.begin(), kAllBuffers.end()}; - for (const DependencyInfo& info : temporal_pattern_) { - uint8_t updated_buffers = GetUpdatedBuffers(info.frame_config); - - for (Vp8BufferReference buffer : kAllBuffers) { - if (static_cast(buffer) & updated_buffers) - kf_buffers_.erase(buffer); - } - } + frames_since_buffer_refresh_.fill(0); } DefaultTemporalLayers::~DefaultTemporalLayers() = default; @@ -340,12 +363,12 @@ bool DefaultTemporalLayers::IsSyncFrame(const Vp8FrameConfig& config) const { } if ((config.golden_buffer_flags & BufferFlags::kReference) && - kf_buffers_.find(Vp8BufferReference::kGolden) == kf_buffers_.end()) { + !is_static_buffer_[BufferToIndex(Vp8BufferReference::kGolden)]) { // Referencing a golden frame that contains a non-(base layer|key frame). return false; } if ((config.arf_buffer_flags & BufferFlags::kReference) && - kf_buffers_.find(Vp8BufferReference::kAltref) == kf_buffers_.end()) { + !is_static_buffer_[BufferToIndex(Vp8BufferReference::kAltref)]) { // Referencing an altref frame that contains a non-(base layer|key frame). return false; } @@ -372,8 +395,8 @@ Vp8FrameConfig DefaultTemporalLayers::NextFrameConfig(size_t stream_index, // Start of new pattern iteration, set up clear state by invalidating any // pending frames, so that we don't make an invalid reference to a buffer // containing data from a previous iteration. - for (auto& it : pending_frames_) { - it.second.expired = true; + for (auto& frame : pending_frames_) { + frame.expired = true; } } @@ -401,21 +424,19 @@ Vp8FrameConfig DefaultTemporalLayers::NextFrameConfig(size_t stream_index, // To prevent this data spill over into the next iteration, // the |pedning_frames_| map is reset in loops. If delay is constant, // the relative age should still be OK for the search order. - for (Vp8BufferReference buffer : kAllBuffers) { - ++frames_since_buffer_refresh_[buffer]; + for (size_t& n : frames_since_buffer_refresh_) { + ++n; } } // Add frame to set of pending frames, awaiting completion. - pending_frames_[timestamp] = - PendingFrame{false, GetUpdatedBuffers(tl_config), dependency_info}; + pending_frames_.emplace_back(timestamp, false, GetUpdatedBuffers(tl_config), + dependency_info); -#if RTC_DCHECK_IS_ON // Checker does not yet support encoder frame dropping, so validate flags // here before they can be dropped. // TODO(sprang): Update checker to support dropping. RTC_DCHECK(checker_->CheckTemporalConfig(first_frame, tl_config)); -#endif return tl_config; } @@ -426,10 +447,8 @@ void DefaultTemporalLayers::ValidateReferences(BufferFlags* flags, // if it also a dynamically updating one (buffers always just containing // keyframes are always safe to reference). if ((*flags & BufferFlags::kReference) && - kf_buffers_.find(ref) == kf_buffers_.end()) { - auto it = frames_since_buffer_refresh_.find(ref); - if (it == frames_since_buffer_refresh_.end() || - it->second >= pattern_idx_) { + !is_static_buffer_[BufferToIndex(ref)]) { + if (NumFramesSinceBufferRefresh(ref) >= pattern_idx_) { // No valid buffer state, or buffer contains frame that is older than the // current pattern. This reference is not valid, so remove it. *flags = static_cast(*flags & ~BufferFlags::kReference); @@ -446,17 +465,17 @@ void DefaultTemporalLayers::UpdateSearchOrder(Vp8FrameConfig* config) { if (config->last_buffer_flags & BufferFlags::kReference) { eligible_buffers.emplace_back( Vp8BufferReference::kLast, - frames_since_buffer_refresh_[Vp8BufferReference::kLast]); + NumFramesSinceBufferRefresh(Vp8BufferReference::kLast)); } if (config->golden_buffer_flags & BufferFlags::kReference) { eligible_buffers.emplace_back( Vp8BufferReference::kGolden, - frames_since_buffer_refresh_[Vp8BufferReference::kGolden]); + NumFramesSinceBufferRefresh(Vp8BufferReference::kGolden)); } if (config->arf_buffer_flags & BufferFlags::kReference) { eligible_buffers.emplace_back( Vp8BufferReference::kAltref, - frames_since_buffer_refresh_[Vp8BufferReference::kAltref]); + NumFramesSinceBufferRefresh(Vp8BufferReference::kAltref)); } std::sort(eligible_buffers.begin(), eligible_buffers.end(), @@ -476,6 +495,23 @@ void DefaultTemporalLayers::UpdateSearchOrder(Vp8FrameConfig* config) { } } +size_t DefaultTemporalLayers::NumFramesSinceBufferRefresh( + Vp8FrameConfig::Vp8BufferReference ref) const { + return frames_since_buffer_refresh_[BufferToIndex(ref)]; +} + +void DefaultTemporalLayers::ResetNumFramesSinceBufferRefresh( + Vp8FrameConfig::Vp8BufferReference ref) { + frames_since_buffer_refresh_[BufferToIndex(ref)] = 0; +} + +void DefaultTemporalLayers::CullPendingFramesBefore(uint32_t timestamp) { + while (!pending_frames_.empty() && + pending_frames_.front().timestamp != timestamp) { + pending_frames_.pop_front(); + } +} + void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, uint32_t rtp_timestamp, size_t size_bytes, @@ -491,17 +527,15 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, return; } - auto pending_frame = pending_frames_.find(rtp_timestamp); - RTC_DCHECK(pending_frame != pending_frames_.end()); - - PendingFrame& frame = pending_frame->second; + CullPendingFramesBefore(rtp_timestamp); + RTC_CHECK(!pending_frames_.empty()); + PendingFrame& frame = pending_frames_.front(); + RTC_DCHECK_EQ(frame.timestamp, rtp_timestamp); const Vp8FrameConfig& frame_config = frame.dependency_info.frame_config; -#if RTC_DCHECK_IS_ON if (is_keyframe) { // Signal key-frame so checker resets state. RTC_DCHECK(checker_->CheckTemporalConfig(true, frame_config)); } -#endif CodecSpecificInfoVP8& vp8_info = info->codecSpecific.VP8; if (num_layers_ == 1) { @@ -515,10 +549,10 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, vp8_info.layerSync = true; // Keyframes are always sync frames. for (Vp8BufferReference buffer : kAllBuffers) { - if (kf_buffers_.find(buffer) != kf_buffers_.end()) { + if (is_static_buffer_[BufferToIndex(buffer)]) { // Update frame count of all kf-only buffers, regardless of state of // |pending_frames_|. - frames_since_buffer_refresh_[buffer] = 0; + ResetNumFramesSinceBufferRefresh(buffer); } else { // Key-frames update all buffers, this should be reflected when // updating state in FrameEncoded(). @@ -558,8 +592,9 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, vp8_info.updatedBuffers[vp8_info.updatedBuffersCount++] = i; } - if (references || updates) + if (references || updates) { generic_frame_info.encoder_buffers.emplace_back(i, references, updates); + } } // The templates are always present on keyframes, and then refered to by @@ -578,19 +613,20 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, if (!frame.expired) { for (Vp8BufferReference buffer : kAllBuffers) { if (frame.updated_buffer_mask & static_cast(buffer)) { - frames_since_buffer_refresh_[buffer] = 0; + ResetNumFramesSinceBufferRefresh(buffer); } } } - pending_frames_.erase(pending_frame); + pending_frames_.pop_front(); } void DefaultTemporalLayers::OnFrameDropped(size_t stream_index, uint32_t rtp_timestamp) { - auto pending_frame = pending_frames_.find(rtp_timestamp); - RTC_DCHECK(pending_frame != pending_frames_.end()); - pending_frames_.erase(pending_frame); + CullPendingFramesBefore(rtp_timestamp); + RTC_CHECK(!pending_frames_.empty()); + RTC_DCHECK_EQ(pending_frames_.front().timestamp, rtp_timestamp); + pending_frames_.pop_front(); } void DefaultTemporalLayers::OnPacketLossRateUpdate(float packet_loss_rate) {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h index d127d8056..bc6574c54 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.h @@ -15,8 +15,9 @@ #include #include +#include +#include #include -#include #include #include #include @@ -53,13 +54,15 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController { Vp8EncoderConfig UpdateConfiguration(size_t stream_index) override; + // Callbacks methods on frame completion. OnEncodeDone() or OnFrameDropped() + // should be called once for each NextFrameConfig() call (using the RTP + // timestamp as ID), and the calls MUST be in the same order. void OnEncodeDone(size_t stream_index, uint32_t rtp_timestamp, size_t size_bytes, bool is_keyframe, int qp, CodecSpecificInfo* info) override; - void OnFrameDropped(size_t stream_index, uint32_t rtp_timestamp) override; void OnPacketLossRateUpdate(float packet_loss_rate) override; @@ -70,6 +73,7 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController { const VideoEncoder::LossNotification& loss_notification) override; private: + static constexpr size_t kNumReferenceBuffers = 3; // Last, golden, altref. struct DependencyInfo { DependencyInfo() = default; DependencyInfo(absl::string_view indication_symbols, @@ -81,29 +85,13 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController { absl::InlinedVector decode_target_indications; Vp8FrameConfig frame_config; }; - - static std::vector GetDependencyInfo(size_t num_layers); - bool IsSyncFrame(const Vp8FrameConfig& config) const; - void ValidateReferences(Vp8FrameConfig::BufferFlags* flags, - Vp8FrameConfig::Vp8BufferReference ref) const; - void UpdateSearchOrder(Vp8FrameConfig* config); - - const size_t num_layers_; - const std::vector temporal_ids_; - const std::vector temporal_pattern_; - // Set of buffers that are never updated except by keyframes. - std::set kf_buffers_; - FrameDependencyStructure GetTemplateStructure(int num_layers) const; - - uint8_t pattern_idx_; - // Updated cumulative bitrates, per temporal layer. - absl::optional> new_bitrates_bps_; - struct PendingFrame { PendingFrame(); - PendingFrame(bool expired, + PendingFrame(uint32_t timestamp, + bool expired, uint8_t updated_buffers_mask, const DependencyInfo& dependency_info); + uint32_t timestamp = 0; // Flag indicating if this frame has expired, ie it belongs to a previous // iteration of the temporal pattern. bool expired = false; @@ -113,14 +101,38 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController { // The frame config returned by NextFrameConfig() for this frame. DependencyInfo dependency_info; }; - // Map from rtp timestamp to pending frame status. Reset on pattern loop. - std::map pending_frames_; - // One counter per Vp8BufferReference, indicating number of frames since last + static std::vector GetDependencyInfo(size_t num_layers); + static std::bitset DetermineStaticBuffers( + const std::vector& temporal_pattern); + bool IsSyncFrame(const Vp8FrameConfig& config) const; + void ValidateReferences(Vp8FrameConfig::BufferFlags* flags, + Vp8FrameConfig::Vp8BufferReference ref) const; + void UpdateSearchOrder(Vp8FrameConfig* config); + size_t NumFramesSinceBufferRefresh( + Vp8FrameConfig::Vp8BufferReference ref) const; + void ResetNumFramesSinceBufferRefresh(Vp8FrameConfig::Vp8BufferReference ref); + void CullPendingFramesBefore(uint32_t timestamp); + + const size_t num_layers_; + const std::vector temporal_ids_; + const std::vector temporal_pattern_; + // Per reference buffer flag indicating if it is static, meaning it is only + // updated by key-frames. + const std::bitset is_static_buffer_; + FrameDependencyStructure GetTemplateStructure(int num_layers) const; + + uint8_t pattern_idx_; + // Updated cumulative bitrates, per temporal layer. + absl::optional> new_bitrates_bps_; + + // Status for each pending frame, in + std::deque pending_frames_; + + // One counter per reference buffer, indicating number of frames since last // refresh. For non-base-layer frames (ie golden, altref buffers), this is // reset when the pattern loops. - std::map - frames_since_buffer_refresh_; + std::array frames_since_buffer_refresh_; // Optional utility used to verify reference validity. std::unique_ptr checker_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h index 44efbeeb3..d05c3a68d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/include/vp8.h @@ -14,10 +14,10 @@ #include #include +#include "absl/base/attributes.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/deprecation.h" namespace webrtc { @@ -40,7 +40,8 @@ class VP8Encoder { static std::unique_ptr Create(); static std::unique_ptr Create(Settings settings); - RTC_DEPRECATED static std::unique_ptr Create( + ABSL_DEPRECATED("") + static std::unique_ptr Create( std::unique_ptr frame_buffer_controller_factory); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index af48c9253..9d6ffdba9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -54,13 +54,9 @@ constexpr bool kIsArm = false; #endif absl::optional DefaultDeblockParams() { - if (kIsArm) { - // For ARM, this is only called when deblocking is explicitly enabled, and - // the default strength is set by the ctor. - return LibvpxVp8Decoder::DeblockParams(); - } - // For non-arm, don't use the explicit deblocking settings by default. - return absl::nullopt; + return LibvpxVp8Decoder::DeblockParams(/*max_level=*/8, + /*degrade_qp=*/60, + /*min_qp=*/30); } absl::optional @@ -397,6 +393,13 @@ int LibvpxVp8Decoder::Release() { return ret_val; } +VideoDecoder::DecoderInfo LibvpxVp8Decoder::GetDecoderInfo() const { + DecoderInfo info; + info.implementation_name = "libvpx"; + info.is_hardware_accelerated = false; + return info; +} + const char* LibvpxVp8Decoder::ImplementationName() const { return "libvpx"; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h index cf699f183..60295e5d5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h @@ -38,12 +38,16 @@ class LibvpxVp8Decoder : public VideoDecoder { int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override; int Release() override; + DecoderInfo GetDecoderInfo() const override; const char* ImplementationName() const override; struct DeblockParams { - int max_level = 6; // Deblocking strength: [0, 16]. - int degrade_qp = 1; // If QP value is below, start lowering |max_level|. - int min_qp = 0; // If QP value is below, turn off deblocking. + DeblockParams() : max_level(6), degrade_qp(1), min_qp(0) {} + DeblockParams(int max_level, int degrade_qp, int min_qp) + : max_level(max_level), degrade_qp(degrade_qp), min_qp(min_qp) {} + int max_level; // Deblocking strength: [0, 16]. + int degrade_qp; // If QP value is below, start lowering |max_level|. + int min_qp; // If QP value is below, turn off deblocking. }; private: diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 340817658..dd72872ed 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/algorithm/container.h" #include "api/scoped_refptr.h" #include "api/video/video_content_type.h" #include "api/video/video_frame_buffer.h" @@ -49,11 +50,6 @@ constexpr char kVP8IosMaxNumberOfThreadFieldTrial[] = constexpr char kVP8IosMaxNumberOfThreadFieldTrialParameter[] = "max_thread"; #endif -constexpr char kVp8GetEncoderInfoOverrideFieldTrial[] = - "WebRTC-VP8-GetEncoderInfoOverride"; -constexpr char kVp8RequestedResolutionAlignmentFieldTrialParameter[] = - "requested_resolution_alignment"; - constexpr char kVp8ForcePartitionResilience[] = "WebRTC-VP8-ForcePartitionResilience"; @@ -165,25 +161,63 @@ void ApplyVp8EncoderConfigToVpxConfig(const Vp8EncoderConfig& encoder_config, } } -absl::optional GetRequestedResolutionAlignmentOverride() { - const std::string trial_string = - field_trial::FindFullName(kVp8GetEncoderInfoOverrideFieldTrial); - FieldTrialOptional requested_resolution_alignment( - kVp8RequestedResolutionAlignmentFieldTrialParameter); - ParseFieldTrial({&requested_resolution_alignment}, trial_string); - return requested_resolution_alignment.GetOptional(); +bool IsCompatibleVideoFrameBufferType(VideoFrameBuffer::Type left, + VideoFrameBuffer::Type right) { + if (left == VideoFrameBuffer::Type::kI420 || + left == VideoFrameBuffer::Type::kI420A) { + // LibvpxVp8Encoder does not care about the alpha channel, I420A and I420 + // are considered compatible. + return right == VideoFrameBuffer::Type::kI420 || + right == VideoFrameBuffer::Type::kI420A; + } + return left == right; +} + +void SetRawImagePlanes(vpx_image_t* raw_image, VideoFrameBuffer* buffer) { + switch (buffer->type()) { + case VideoFrameBuffer::Type::kI420: + case VideoFrameBuffer::Type::kI420A: { + const I420BufferInterface* i420_buffer = buffer->GetI420(); + RTC_DCHECK(i420_buffer); + raw_image->planes[VPX_PLANE_Y] = + const_cast(i420_buffer->DataY()); + raw_image->planes[VPX_PLANE_U] = + const_cast(i420_buffer->DataU()); + raw_image->planes[VPX_PLANE_V] = + const_cast(i420_buffer->DataV()); + raw_image->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); + raw_image->stride[VPX_PLANE_U] = i420_buffer->StrideU(); + raw_image->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + break; + } + case VideoFrameBuffer::Type::kNV12: { + const NV12BufferInterface* nv12_buffer = buffer->GetNV12(); + RTC_DCHECK(nv12_buffer); + raw_image->planes[VPX_PLANE_Y] = + const_cast(nv12_buffer->DataY()); + raw_image->planes[VPX_PLANE_U] = + const_cast(nv12_buffer->DataUV()); + raw_image->planes[VPX_PLANE_V] = raw_image->planes[VPX_PLANE_U] + 1; + raw_image->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); + raw_image->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); + raw_image->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); + break; + } + default: + RTC_NOTREACHED(); + } } } // namespace std::unique_ptr VP8Encoder::Create() { - return std::make_unique(LibvpxInterface::CreateEncoder(), + return std::make_unique(LibvpxInterface::Create(), VP8Encoder::Settings()); } std::unique_ptr VP8Encoder::Create( VP8Encoder::Settings settings) { - return std::make_unique(LibvpxInterface::CreateEncoder(), + return std::make_unique(LibvpxInterface::Create(), std::move(settings)); } @@ -193,7 +227,7 @@ std::unique_ptr VP8Encoder::Create( VP8Encoder::Settings settings; settings.frame_buffer_controller_factory = std::move(frame_buffer_controller_factory); - return std::make_unique(LibvpxInterface::CreateEncoder(), + return std::make_unique(LibvpxInterface::Create(), std::move(settings)); } @@ -230,8 +264,6 @@ LibvpxVp8Encoder::LibvpxVp8Encoder(std::unique_ptr interface, VP8Encoder::Settings settings) : libvpx_(std::move(interface)), rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), - requested_resolution_alignment_override_( - GetRequestedResolutionAlignmentOverride()), frame_buffer_controller_factory_( std::move(settings.frame_buffer_controller_factory)), resolution_bitrate_limits_(std::move(settings.resolution_bitrate_limits)), @@ -945,40 +977,29 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, flags[i] = send_key_frame ? VPX_EFLAG_FORCE_KF : EncodeFlags(tl_configs[i]); } - rtc::scoped_refptr input_image = frame.video_frame_buffer(); - // Since we are extracting raw pointers from |input_image| to - // |raw_images_[0]|, the resolution of these frames must match. - RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w); - RTC_DCHECK_EQ(input_image->height(), raw_images_[0].d_h); - switch (input_image->type()) { - case VideoFrameBuffer::Type::kI420: - PrepareI420Image(input_image->GetI420()); - break; - case VideoFrameBuffer::Type::kNV12: - PrepareNV12Image(input_image->GetNV12()); - break; - default: { - rtc::scoped_refptr i420_image = - input_image->ToI420(); - if (!i420_image) { - RTC_LOG(LS_ERROR) << "Failed to convert " - << VideoFrameBufferTypeToString(input_image->type()) - << " image to I420. Can't encode frame."; - return WEBRTC_VIDEO_CODEC_ERROR; - } - input_image = i420_image; - PrepareI420Image(i420_image); - } + // Scale and map buffers and set |raw_images_| to hold pointers to the result. + // Because |raw_images_| are set to hold pointers to the prepared buffers, we + // need to keep these buffers alive through reference counting until after + // encoding is complete. + std::vector> prepared_buffers = + PrepareBuffers(frame.video_frame_buffer()); + if (prepared_buffers.empty()) { + return WEBRTC_VIDEO_CODEC_ERROR; } struct CleanUpOnExit { - explicit CleanUpOnExit(vpx_image_t& raw_image) : raw_image_(raw_image) {} + explicit CleanUpOnExit( + vpx_image_t* raw_image, + std::vector> prepared_buffers) + : raw_image_(raw_image), + prepared_buffers_(std::move(prepared_buffers)) {} ~CleanUpOnExit() { - raw_image_.planes[VPX_PLANE_Y] = nullptr; - raw_image_.planes[VPX_PLANE_U] = nullptr; - raw_image_.planes[VPX_PLANE_V] = nullptr; + raw_image_->planes[VPX_PLANE_Y] = nullptr; + raw_image_->planes[VPX_PLANE_U] = nullptr; + raw_image_->planes[VPX_PLANE_V] = nullptr; } - vpx_image_t& raw_image_; - } clean_up_on_exit(raw_images_[0]); + vpx_image_t* raw_image_; + std::vector> prepared_buffers_; + } clean_up_on_exit(&raw_images_[0], std::move(prepared_buffers)); if (send_key_frame) { // Adapt the size of the key frame when in screenshare with 1 temporal @@ -1062,9 +1083,25 @@ void LibvpxVp8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, int qp = 0; vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp); - frame_buffer_controller_->OnEncodeDone( - stream_idx, timestamp, encoded_images_[encoder_idx].size(), - (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, qp, codec_specific); + bool is_keyframe = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0; + frame_buffer_controller_->OnEncodeDone(stream_idx, timestamp, + encoded_images_[encoder_idx].size(), + is_keyframe, qp, codec_specific); + if (is_keyframe && codec_specific->template_structure != absl::nullopt) { + // Number of resolutions must match number of spatial layers, VP8 structures + // expected to use single spatial layer. Templates must be ordered by + // spatial_id, so assumption there is exactly one spatial layer is same as + // assumption last template uses spatial_id = 0. + // This check catches potential scenario where template_structure is shared + // across multiple vp8 streams and they are distinguished using spatial_id. + // Assigning single resolution doesn't support such scenario, i.e. assumes + // vp8 simulcast is sent using multiple ssrcs. + RTC_DCHECK(!codec_specific->template_structure->templates.empty()); + RTC_DCHECK_EQ( + codec_specific->template_structure->templates.back().spatial_id, 0); + codec_specific->template_structure->resolutions = { + RenderResolution(pkt.data.frame.width[0], pkt.data.frame.height[0])}; + } } int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, @@ -1173,9 +1210,15 @@ VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const { if (!resolution_bitrate_limits_.empty()) { info.resolution_bitrate_limits = resolution_bitrate_limits_; } - if (requested_resolution_alignment_override_) { + if (encoder_info_override_.requested_resolution_alignment()) { info.requested_resolution_alignment = - *requested_resolution_alignment_override_; + *encoder_info_override_.requested_resolution_alignment(); + info.apply_alignment_to_all_simulcast_layers = + encoder_info_override_.apply_alignment_to_all_simulcast_layers(); + } + if (!encoder_info_override_.resolution_bitrate_limits().empty()) { + info.resolution_bitrate_limits = + encoder_info_override_.resolution_bitrate_limits(); } const bool enable_scaling = @@ -1256,61 +1299,114 @@ void LibvpxVp8Encoder::MaybeUpdatePixelFormat(vpx_img_fmt fmt) { } } -void LibvpxVp8Encoder::PrepareI420Image(const I420BufferInterface* frame) { - RTC_DCHECK(!raw_images_.empty()); - MaybeUpdatePixelFormat(VPX_IMG_FMT_I420); - // Image in vpx_image_t format. - // Input image is const. VP8's raw image is not defined as const. - raw_images_[0].planes[VPX_PLANE_Y] = const_cast(frame->DataY()); - raw_images_[0].planes[VPX_PLANE_U] = const_cast(frame->DataU()); - raw_images_[0].planes[VPX_PLANE_V] = const_cast(frame->DataV()); +std::vector> +LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr buffer) { + RTC_DCHECK_EQ(buffer->width(), raw_images_[0].d_w); + RTC_DCHECK_EQ(buffer->height(), raw_images_[0].d_h); + absl::InlinedVector + supported_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; - raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY(); - raw_images_[0].stride[VPX_PLANE_U] = frame->StrideU(); - raw_images_[0].stride[VPX_PLANE_V] = frame->StrideV(); - - for (size_t i = 1; i < encoders_.size(); ++i) { - // Scale the image down a number of times by downsampling factor - libyuv::I420Scale( - raw_images_[i - 1].planes[VPX_PLANE_Y], - raw_images_[i - 1].stride[VPX_PLANE_Y], - raw_images_[i - 1].planes[VPX_PLANE_U], - raw_images_[i - 1].stride[VPX_PLANE_U], - raw_images_[i - 1].planes[VPX_PLANE_V], - raw_images_[i - 1].stride[VPX_PLANE_V], raw_images_[i - 1].d_w, - raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], - raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], - raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].planes[VPX_PLANE_V], - raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, - raw_images_[i].d_h, libyuv::kFilterBilinear); + rtc::scoped_refptr mapped_buffer; + if (buffer->type() != VideoFrameBuffer::Type::kNative) { + // |buffer| is already mapped. + mapped_buffer = buffer; + } else { + // Attempt to map to one of the supported formats. + mapped_buffer = buffer->GetMappedFrameBuffer(supported_formats); } -} - -void LibvpxVp8Encoder::PrepareNV12Image(const NV12BufferInterface* frame) { - RTC_DCHECK(!raw_images_.empty()); - MaybeUpdatePixelFormat(VPX_IMG_FMT_NV12); - // Image in vpx_image_t format. - // Input image is const. VP8's raw image is not defined as const. - raw_images_[0].planes[VPX_PLANE_Y] = const_cast(frame->DataY()); - raw_images_[0].planes[VPX_PLANE_U] = const_cast(frame->DataUV()); - raw_images_[0].planes[VPX_PLANE_V] = raw_images_[0].planes[VPX_PLANE_U] + 1; - raw_images_[0].stride[VPX_PLANE_Y] = frame->StrideY(); - raw_images_[0].stride[VPX_PLANE_U] = frame->StrideUV(); - raw_images_[0].stride[VPX_PLANE_V] = frame->StrideUV(); - - for (size_t i = 1; i < encoders_.size(); ++i) { - // Scale the image down a number of times by downsampling factor - libyuv::NV12Scale( - raw_images_[i - 1].planes[VPX_PLANE_Y], - raw_images_[i - 1].stride[VPX_PLANE_Y], - raw_images_[i - 1].planes[VPX_PLANE_U], - raw_images_[i - 1].stride[VPX_PLANE_U], raw_images_[i - 1].d_w, - raw_images_[i - 1].d_h, raw_images_[i].planes[VPX_PLANE_Y], - raw_images_[i].stride[VPX_PLANE_Y], raw_images_[i].planes[VPX_PLANE_U], - raw_images_[i].stride[VPX_PLANE_U], raw_images_[i].d_w, - raw_images_[i].d_h, libyuv::kFilterBilinear); - raw_images_[i].planes[VPX_PLANE_V] = raw_images_[i].planes[VPX_PLANE_U] + 1; + if (!mapped_buffer || + (absl::c_find(supported_formats, mapped_buffer->type()) == + supported_formats.end() && + mapped_buffer->type() != VideoFrameBuffer::Type::kI420A)) { + // Unknown pixel format or unable to map, convert to I420 and prepare that + // buffer instead to ensure Scale() is safe to use. + auto converted_buffer = buffer->ToI420(); + if (!converted_buffer) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString(buffer->type()) + << " image to I420. Can't encode frame."; + return {}; + } + // The buffer should now be a mapped I420 or I420A format, but some buffer + // implementations incorrectly return the wrong buffer format, such as + // kNative. As a workaround to this, we perform ToI420() a second time. + // TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct + // ToI420() implementaion, remove his workaround. + if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 && + converted_buffer->type() != VideoFrameBuffer::Type::kI420A) { + converted_buffer = converted_buffer->ToI420(); + RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 || + converted_buffer->type() == VideoFrameBuffer::Type::kI420A); + } + // Because |buffer| had to be converted, use |converted_buffer| instead... + buffer = mapped_buffer = converted_buffer; } + + // Maybe update pixel format. + absl::InlinedVector + mapped_type = {mapped_buffer->type()}; + switch (mapped_buffer->type()) { + case VideoFrameBuffer::Type::kI420: + case VideoFrameBuffer::Type::kI420A: + MaybeUpdatePixelFormat(VPX_IMG_FMT_I420); + break; + case VideoFrameBuffer::Type::kNV12: + MaybeUpdatePixelFormat(VPX_IMG_FMT_NV12); + break; + default: + RTC_NOTREACHED(); + } + + // Prepare |raw_images_| from |mapped_buffer| and, if simulcast, scaled + // versions of |buffer|. + std::vector> prepared_buffers; + SetRawImagePlanes(&raw_images_[0], mapped_buffer); + prepared_buffers.push_back(mapped_buffer); + for (size_t i = 1; i < encoders_.size(); ++i) { + // Native buffers should implement optimized scaling and is the preferred + // buffer to scale. But if the buffer isn't native, it should be cheaper to + // scale from the previously prepared buffer which is smaller than |buffer|. + VideoFrameBuffer* buffer_to_scale = + buffer->type() == VideoFrameBuffer::Type::kNative + ? buffer.get() + : prepared_buffers.back().get(); + + auto scaled_buffer = + buffer_to_scale->Scale(raw_images_[i].d_w, raw_images_[i].d_h); + if (scaled_buffer->type() == VideoFrameBuffer::Type::kNative) { + auto mapped_scaled_buffer = + scaled_buffer->GetMappedFrameBuffer(mapped_type); + RTC_DCHECK(mapped_scaled_buffer) << "Unable to map the scaled buffer."; + if (!mapped_scaled_buffer) { + RTC_LOG(LS_ERROR) << "Failed to map scaled " + << VideoFrameBufferTypeToString(scaled_buffer->type()) + << " image to " + << VideoFrameBufferTypeToString(mapped_buffer->type()) + << ". Can't encode frame."; + return {}; + } + scaled_buffer = mapped_scaled_buffer; + } + if (!IsCompatibleVideoFrameBufferType(scaled_buffer->type(), + mapped_buffer->type())) { + RTC_LOG(LS_ERROR) << "When scaling " + << VideoFrameBufferTypeToString(buffer_to_scale->type()) + << ", the image was unexpectedly converted to " + << VideoFrameBufferTypeToString(scaled_buffer->type()) + << " instead of " + << VideoFrameBufferTypeToString(mapped_buffer->type()) + << ". Can't encode frame."; + RTC_NOTREACHED() << "Scaled buffer type " + << VideoFrameBufferTypeToString(scaled_buffer->type()) + << " is not compatible with mapped buffer type " + << VideoFrameBufferTypeToString(mapped_buffer->type()); + return {}; + } + SetRawImagePlanes(&raw_images_[i], scaled_buffer); + prepared_buffers.push_back(scaled_buffer); + } + return prepared_buffers; } // static diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index c08b9b088..ed80eacab 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -21,11 +21,12 @@ #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp8/libvpx_interface.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/framerate_controller.h" #include "rtc_base/experiments/cpu_speed_experiment.h" +#include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/experiments/rate_control_settings.h" #include "vpx/vp8cx.h" #include "vpx/vpx_encoder.h" @@ -94,17 +95,19 @@ class LibvpxVp8Encoder : public VideoEncoder { bool UpdateVpxConfiguration(size_t stream_index); void MaybeUpdatePixelFormat(vpx_img_fmt fmt); - void PrepareI420Image(const I420BufferInterface* frame); - void PrepareNV12Image(const NV12BufferInterface* frame); + // Prepares |raw_image_| to reference image data of |buffer|, or of mapped or + // scaled versions of |buffer|. Returns a list of buffers that got referenced + // as a result, allowing the caller to keep references to them until after + // encoding has finished. On failure to convert the buffer, an empty list is + // returned. + std::vector> PrepareBuffers( + rtc::scoped_refptr buffer); const std::unique_ptr libvpx_; const CpuSpeedExperiment experimental_cpu_speed_config_arm_; const RateControlSettings rate_control_settings_; - // EncoderInfo::requested_resolution_alignment override from field trial. - const absl::optional requested_resolution_alignment_override_; - EncodedImageCallback* encoded_complete_callback_ = nullptr; VideoCodec codec_; bool inited_ = false; @@ -146,6 +149,8 @@ class LibvpxVp8Encoder : public VideoEncoder { int num_steady_state_frames_ = 0; FecControllerOverride* fec_controller_override_ = nullptr; + + const LibvpxVp8EncoderInfoSettings encoder_info_override_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc new file mode 100644 index 000000000..3500ef591 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -0,0 +1,406 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + * + */ + +#ifdef RTC_ENABLE_VP9 + +#include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" + +#include + +#include "absl/strings/match.h" +#include "api/transport/field_trial_based_config.h" +#include "api/video/color_space.h" +#include "api/video/i010_buffer.h" +#include "common_video/include/video_frame_buffer.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "vpx/vp8dx.h" +#include "vpx/vpx_decoder.h" + +namespace webrtc { +namespace { + +// Helper class for extracting VP9 colorspace. +ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t, + vpx_color_range_t range_t, + unsigned int bit_depth) { + ColorSpace::PrimaryID primaries = ColorSpace::PrimaryID::kUnspecified; + ColorSpace::TransferID transfer = ColorSpace::TransferID::kUnspecified; + ColorSpace::MatrixID matrix = ColorSpace::MatrixID::kUnspecified; + switch (space_t) { + case VPX_CS_BT_601: + case VPX_CS_SMPTE_170: + primaries = ColorSpace::PrimaryID::kSMPTE170M; + transfer = ColorSpace::TransferID::kSMPTE170M; + matrix = ColorSpace::MatrixID::kSMPTE170M; + break; + case VPX_CS_SMPTE_240: + primaries = ColorSpace::PrimaryID::kSMPTE240M; + transfer = ColorSpace::TransferID::kSMPTE240M; + matrix = ColorSpace::MatrixID::kSMPTE240M; + break; + case VPX_CS_BT_709: + primaries = ColorSpace::PrimaryID::kBT709; + transfer = ColorSpace::TransferID::kBT709; + matrix = ColorSpace::MatrixID::kBT709; + break; + case VPX_CS_BT_2020: + primaries = ColorSpace::PrimaryID::kBT2020; + switch (bit_depth) { + case 8: + transfer = ColorSpace::TransferID::kBT709; + break; + case 10: + transfer = ColorSpace::TransferID::kBT2020_10; + break; + default: + RTC_NOTREACHED(); + break; + } + matrix = ColorSpace::MatrixID::kBT2020_NCL; + break; + case VPX_CS_SRGB: + primaries = ColorSpace::PrimaryID::kBT709; + transfer = ColorSpace::TransferID::kIEC61966_2_1; + matrix = ColorSpace::MatrixID::kBT709; + break; + default: + break; + } + + ColorSpace::RangeID range = ColorSpace::RangeID::kInvalid; + switch (range_t) { + case VPX_CR_STUDIO_RANGE: + range = ColorSpace::RangeID::kLimited; + break; + case VPX_CR_FULL_RANGE: + range = ColorSpace::RangeID::kFull; + break; + default: + break; + } + return ColorSpace(primaries, transfer, matrix, range); +} + +} // namespace + +LibvpxVp9Decoder::LibvpxVp9Decoder() + : LibvpxVp9Decoder(FieldTrialBasedConfig()) {} +LibvpxVp9Decoder::LibvpxVp9Decoder(const WebRtcKeyValueConfig& trials) + : decode_complete_callback_(nullptr), + inited_(false), + decoder_(nullptr), + key_frame_required_(true), + preferred_output_format_( + absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") + ? VideoFrameBuffer::Type::kNV12 + : VideoFrameBuffer::Type::kI420) {} + +LibvpxVp9Decoder::~LibvpxVp9Decoder() { + inited_ = true; // in order to do the actual release + Release(); + int num_buffers_in_use = libvpx_buffer_pool_.GetNumBuffersInUse(); + if (num_buffers_in_use > 0) { + // The frame buffers are reference counted and frames are exposed after + // decoding. There may be valid usage cases where previous frames are still + // referenced after ~LibvpxVp9Decoder that is not a leak. + RTC_LOG(LS_INFO) << num_buffers_in_use + << " Vp9FrameBuffers are still " + "referenced during ~LibvpxVp9Decoder."; + } +} + +int LibvpxVp9Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) { + int ret_val = Release(); + if (ret_val < 0) { + return ret_val; + } + + if (decoder_ == nullptr) { + decoder_ = new vpx_codec_ctx_t; + } + vpx_codec_dec_cfg_t cfg; + memset(&cfg, 0, sizeof(cfg)); + +#ifdef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION + // We focus on webrtc fuzzing here, not libvpx itself. Use single thread for + // fuzzing, because: + // - libvpx's VP9 single thread decoder is more fuzzer friendly. It detects + // errors earlier than the multi-threads version. + // - Make peak CPU usage under control (not depending on input) + cfg.threads = 1; +#else + if (!inst) { + // No config provided - don't know resolution to decode yet. + // Set thread count to one in the meantime. + cfg.threads = 1; + } else { + // We want to use multithreading when decoding high resolution videos. But + // not too many in order to avoid overhead when many stream are decoded + // concurrently. + // Set 2 thread as target for 1280x720 pixel count, and then scale up + // linearly from there - but cap at physical core count. + // For common resolutions this results in: + // 1 for 360p + // 2 for 720p + // 4 for 1080p + // 8 for 1440p + // 18 for 4K + int num_threads = + std::max(1, 2 * (inst->width * inst->height) / (1280 * 720)); + cfg.threads = std::min(number_of_cores, num_threads); + current_codec_ = *inst; + } +#endif + + num_cores_ = number_of_cores; + + vpx_codec_flags_t flags = 0; + if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } + + if (!libvpx_buffer_pool_.InitializeVpxUsePool(decoder_)) { + return WEBRTC_VIDEO_CODEC_MEMORY; + } + + inited_ = true; + // Always start with a complete key frame. + key_frame_required_ = true; + if (inst && inst->buffer_pool_size) { + if (!libvpx_buffer_pool_.Resize(*inst->buffer_pool_size) || + !output_buffer_pool_.Resize(*inst->buffer_pool_size)) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + } + + vpx_codec_err_t status = + vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1); + if (status != VPX_CODEC_OK) { + RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. " + << vpx_codec_error(decoder_); + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::Decode(const EncodedImage& input_image, + bool missing_frames, + int64_t /*render_time_ms*/) { + if (!inited_) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (decode_complete_callback_ == nullptr) { + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { + absl::optional frame_info = + vp9::ParseIntraFrameInfo(input_image.data(), input_image.size()); + if (frame_info) { + if (frame_info->frame_width != current_codec_.width || + frame_info->frame_height != current_codec_.height) { + // Resolution has changed, tear down and re-init a new decoder in + // order to get correct sizing. + Release(); + current_codec_.width = frame_info->frame_width; + current_codec_.height = frame_info->frame_height; + int reinit_status = InitDecode(¤t_codec_, num_cores_); + if (reinit_status != WEBRTC_VIDEO_CODEC_OK) { + RTC_LOG(LS_WARNING) << "Failed to re-init decoder."; + return reinit_status; + } + } + } else { + RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame."; + } + } + + // Always start with a complete key frame. + if (key_frame_required_) { + if (input_image._frameType != VideoFrameType::kVideoFrameKey) + return WEBRTC_VIDEO_CODEC_ERROR; + key_frame_required_ = false; + } + vpx_codec_iter_t iter = nullptr; + vpx_image_t* img; + const uint8_t* buffer = input_image.data(); + if (input_image.size() == 0) { + buffer = nullptr; // Triggers full frame concealment. + } + // During decode libvpx may get and release buffers from + // |libvpx_buffer_pool_|. In practice libvpx keeps a few (~3-4) buffers alive + // at a time. + if (vpx_codec_decode(decoder_, buffer, + static_cast(input_image.size()), 0, + VPX_DL_REALTIME)) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. + // It may be released by libvpx during future vpx_codec_decode or + // vpx_codec_destroy calls. + img = vpx_codec_get_frame(decoder_, &iter); + int qp; + vpx_codec_err_t vpx_ret = + vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); + RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); + int ret = + ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace()); + if (ret != 0) { + return ret; + } + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::ReturnFrame( + const vpx_image_t* img, + uint32_t timestamp, + int qp, + const webrtc::ColorSpace* explicit_color_space) { + if (img == nullptr) { + // Decoder OK and nullptr image => No show frame. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + + // This buffer contains all of |img|'s image data, a reference counted + // Vp9FrameBuffer. (libvpx is done with the buffers after a few + // vpx_codec_decode calls or vpx_codec_destroy). + rtc::scoped_refptr img_buffer = + static_cast(img->fb_priv); + + // The buffer can be used directly by the VideoFrame (without copy) by + // using a Wrapped*Buffer. + rtc::scoped_refptr img_wrapped_buffer; + switch (img->bit_depth) { + case 8: + if (img->fmt == VPX_IMG_FMT_I420) { + if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + rtc::scoped_refptr nv12_buffer = + output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); + if (!nv12_buffer.get()) { + // Buffer pool is full. + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + img_wrapped_buffer = nv12_buffer; + libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], + img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], + img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], + nv12_buffer->MutableDataY(), + nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), + nv12_buffer->StrideUV(), img->d_w, img->d_h); + // No holding onto img_buffer as it's no longer needed and can be + // reused. + } else { + img_wrapped_buffer = WrapI420Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI420Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release |img_buffer|. + [img_buffer] {}); + } + } else if (img->fmt == VPX_IMG_FMT_I444) { + img_wrapped_buffer = WrapI444Buffer( + img->d_w, img->d_h, img->planes[VPX_PLANE_Y], + img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], + img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], + img->stride[VPX_PLANE_V], + // WrappedI444Buffer's mechanism for allowing the release of its + // frame buffer is through a callback function. This is where we + // should release |img_buffer|. + [img_buffer] {}); + } else { + RTC_LOG(LS_ERROR) + << "Unsupported pixel format produced by the decoder: " + << static_cast(img->fmt); + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + break; + case 10: + img_wrapped_buffer = WrapI010Buffer( + img->d_w, img->d_h, + reinterpret_cast(img->planes[VPX_PLANE_Y]), + img->stride[VPX_PLANE_Y] / 2, + reinterpret_cast(img->planes[VPX_PLANE_U]), + img->stride[VPX_PLANE_U] / 2, + reinterpret_cast(img->planes[VPX_PLANE_V]), + img->stride[VPX_PLANE_V] / 2, [img_buffer] {}); + break; + default: + RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: " + << img->bit_depth; + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + + auto builder = VideoFrame::Builder() + .set_video_frame_buffer(img_wrapped_buffer) + .set_timestamp_rtp(timestamp); + if (explicit_color_space) { + builder.set_color_space(*explicit_color_space); + } else { + builder.set_color_space( + ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth)); + } + VideoFrame decoded_image = builder.build(); + + decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) { + decode_complete_callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int LibvpxVp9Decoder::Release() { + int ret_val = WEBRTC_VIDEO_CODEC_OK; + + if (decoder_ != nullptr) { + if (inited_) { + // When a codec is destroyed libvpx will release any buffers of + // |libvpx_buffer_pool_| it is currently using. + if (vpx_codec_destroy(decoder_)) { + ret_val = WEBRTC_VIDEO_CODEC_MEMORY; + } + } + delete decoder_; + decoder_ = nullptr; + } + // Releases buffers from the pool. Any buffers not in use are deleted. Buffers + // still referenced externally are deleted once fully released, not returning + // to the pool. + libvpx_buffer_pool_.ClearPool(); + output_buffer_pool_.Release(); + inited_ = false; + return ret_val; +} + +VideoDecoder::DecoderInfo LibvpxVp9Decoder::GetDecoderInfo() const { + DecoderInfo info; + info.implementation_name = "libvpx"; + info.is_hardware_accelerated = false; + return info; +} + +const char* LibvpxVp9Decoder::ImplementationName() const { + return "libvpx"; +} + +} // namespace webrtc + +#endif // RTC_ENABLE_VP9 diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h new file mode 100644 index 000000000..f26f42700 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + * + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_DECODER_H_ +#define MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_DECODER_H_ + +#ifdef RTC_ENABLE_VP9 + +#include "api/transport/webrtc_key_value_config.h" +#include "api/video_codecs/video_decoder.h" +#include "common_video/include/video_frame_buffer_pool.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include "vpx/vp8cx.h" + +namespace webrtc { + +class LibvpxVp9Decoder : public VP9Decoder { + public: + LibvpxVp9Decoder(); + explicit LibvpxVp9Decoder(const WebRtcKeyValueConfig& trials); + + virtual ~LibvpxVp9Decoder(); + + int InitDecode(const VideoCodec* inst, int number_of_cores) override; + + int Decode(const EncodedImage& input_image, + bool missing_frames, + int64_t /*render_time_ms*/) override; + + int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override; + + int Release() override; + + DecoderInfo GetDecoderInfo() const override; + const char* ImplementationName() const override; + + private: + int ReturnFrame(const vpx_image_t* img, + uint32_t timestamp, + int qp, + const webrtc::ColorSpace* explicit_color_space); + + // Memory pool used to share buffers between libvpx and webrtc. + Vp9FrameBufferPool libvpx_buffer_pool_; + // Buffer pool used to allocate additionally needed NV12 buffers. + VideoFrameBufferPool output_buffer_pool_; + DecodedImageCallback* decode_complete_callback_; + bool inited_; + vpx_codec_ctx_t* decoder_; + bool key_frame_required_; + VideoCodec current_codec_; + int num_cores_; + + // Decoder should produce this format if possible. + const VideoFrameBuffer::Type preferred_output_format_; +}; +} // namespace webrtc + +#endif // RTC_ENABLE_VP9 + +#endif // MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_DECODER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc similarity index 72% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index c2b1f501f..2a7b125d3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,16 +11,16 @@ #ifdef RTC_ENABLE_VP9 -#include "modules/video_coding/codecs/vp9/vp9_impl.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include #include #include #include +#include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" -#include "api/transport/field_trial_based_config.h" #include "api/video/color_space.h" #include "api/video/i010_buffer.h" #include "common_video/include/video_frame_buffer.h" @@ -32,17 +32,15 @@ #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" -#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8cx.h" -#include "vpx/vp8dx.h" -#include "vpx/vpx_decoder.h" #include "vpx/vpx_encoder.h" namespace webrtc { @@ -65,82 +63,6 @@ const int kMaxAllowedPidDiff = 30; constexpr int kLowVp9QpThreshold = 149; constexpr int kHighVp9QpThreshold = 205; -// Only positive speeds, range for real-time coding currently is: 5 - 8. -// Lower means slower/better quality, higher means fastest/lower quality. -int GetCpuSpeed(int width, int height) { -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) - return 8; -#else - // For smaller resolutions, use lower speed setting (get some coding gain at - // the cost of increased encoding complexity). - if (width * height <= 352 * 288) - return 5; - else - return 7; -#endif -} -// Helper class for extracting VP9 colorspace. -ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t, - vpx_color_range_t range_t, - unsigned int bit_depth) { - ColorSpace::PrimaryID primaries = ColorSpace::PrimaryID::kUnspecified; - ColorSpace::TransferID transfer = ColorSpace::TransferID::kUnspecified; - ColorSpace::MatrixID matrix = ColorSpace::MatrixID::kUnspecified; - switch (space_t) { - case VPX_CS_BT_601: - case VPX_CS_SMPTE_170: - primaries = ColorSpace::PrimaryID::kSMPTE170M; - transfer = ColorSpace::TransferID::kSMPTE170M; - matrix = ColorSpace::MatrixID::kSMPTE170M; - break; - case VPX_CS_SMPTE_240: - primaries = ColorSpace::PrimaryID::kSMPTE240M; - transfer = ColorSpace::TransferID::kSMPTE240M; - matrix = ColorSpace::MatrixID::kSMPTE240M; - break; - case VPX_CS_BT_709: - primaries = ColorSpace::PrimaryID::kBT709; - transfer = ColorSpace::TransferID::kBT709; - matrix = ColorSpace::MatrixID::kBT709; - break; - case VPX_CS_BT_2020: - primaries = ColorSpace::PrimaryID::kBT2020; - switch (bit_depth) { - case 8: - transfer = ColorSpace::TransferID::kBT709; - break; - case 10: - transfer = ColorSpace::TransferID::kBT2020_10; - break; - default: - RTC_NOTREACHED(); - break; - } - matrix = ColorSpace::MatrixID::kBT2020_NCL; - break; - case VPX_CS_SRGB: - primaries = ColorSpace::PrimaryID::kBT709; - transfer = ColorSpace::TransferID::kIEC61966_2_1; - matrix = ColorSpace::MatrixID::kBT709; - break; - default: - break; - } - - ColorSpace::RangeID range = ColorSpace::RangeID::kInvalid; - switch (range_t) { - case VPX_CR_STUDIO_RANGE: - range = ColorSpace::RangeID::kLimited; - break; - case VPX_CR_FULL_RANGE: - range = ColorSpace::RangeID::kFull; - break; - default: - break; - } - return ColorSpace(primaries, transfer, matrix, range); -} - std::pair GetActiveLayers( const VideoBitrateAllocation& allocation) { for (size_t sl_idx = 0; sl_idx < kMaxSpatialLayers; ++sl_idx) { @@ -264,24 +186,22 @@ vpx_svc_ref_frame_config_t Vp9References( } // namespace -void VP9EncoderImpl::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, - void* user_data) { - VP9EncoderImpl* enc = static_cast(user_data); +void LibvpxVp9Encoder::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, + void* user_data) { + LibvpxVp9Encoder* enc = static_cast(user_data); enc->GetEncodedLayerFrame(pkt); } -VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec) - : VP9EncoderImpl(codec, FieldTrialBasedConfig()) {} - -VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec, - const WebRtcKeyValueConfig& trials) - : encoded_image_(), +LibvpxVp9Encoder::LibvpxVp9Encoder(const cricket::VideoCodec& codec, + std::unique_ptr interface, + const WebRtcKeyValueConfig& trials) + : libvpx_(std::move(interface)), + encoded_image_(), encoded_complete_callback_(nullptr), profile_( ParseSdpForVP9Profile(codec.params).value_or(VP9Profile::kProfile0)), inited_(false), timestamp_(0), - cpu_speed_(3), rc_max_intra_target_(0), encoder_(nullptr), config_(nullptr), @@ -318,27 +238,27 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec, external_ref_ctrl_( !absl::StartsWith(trials.Lookup("WebRTC-Vp9ExternalRefCtrl"), "Disabled")), - per_layer_speed_(ParsePerLayerSpeed(trials)), + performance_flags_(ParsePerformanceFlagsFromTrials(trials)), num_steady_state_frames_(0), config_changed_(true) { codec_ = {}; memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); } -VP9EncoderImpl::~VP9EncoderImpl() { +LibvpxVp9Encoder::~LibvpxVp9Encoder() { Release(); } -void VP9EncoderImpl::SetFecControllerOverride(FecControllerOverride*) { +void LibvpxVp9Encoder::SetFecControllerOverride(FecControllerOverride*) { // Ignored. } -int VP9EncoderImpl::Release() { +int LibvpxVp9Encoder::Release() { int ret_val = WEBRTC_VIDEO_CODEC_OK; if (encoder_ != nullptr) { if (inited_) { - if (vpx_codec_destroy(encoder_)) { + if (libvpx_->codec_destroy(encoder_)) { ret_val = WEBRTC_VIDEO_CODEC_MEMORY; } } @@ -350,20 +270,20 @@ int VP9EncoderImpl::Release() { config_ = nullptr; } if (raw_ != nullptr) { - vpx_img_free(raw_); + libvpx_->img_free(raw_); raw_ = nullptr; } inited_ = false; return ret_val; } -bool VP9EncoderImpl::ExplicitlyConfiguredSpatialLayers() const { +bool LibvpxVp9Encoder::ExplicitlyConfiguredSpatialLayers() const { // We check target_bitrate_bps of the 0th layer to see if the spatial layers // (i.e. bitrates) were explicitly configured. return codec_.spatialLayers[0].targetBitrate > 0; } -bool VP9EncoderImpl::SetSvcRates( +bool LibvpxVp9Encoder::SetSvcRates( const VideoBitrateAllocation& bitrate_allocation) { std::pair current_layers = GetActiveLayers(current_bitrate_allocation_); @@ -455,8 +375,6 @@ bool VP9EncoderImpl::SetSvcRates( first_active_layer_ = 0; bool seen_active_layer = false; bool expect_no_more_active_layers = false; - int highest_active_width = 0; - int highest_active_height = 0; for (int i = 0; i < num_spatial_layers_; ++i) { if (config_->ss_target_bitrate[i] > 0) { RTC_DCHECK(!expect_no_more_active_layers) << "Only middle layer is " @@ -466,12 +384,6 @@ bool VP9EncoderImpl::SetSvcRates( } num_active_spatial_layers_ = i + 1; seen_active_layer = true; - highest_active_width = - (svc_params_.scaling_factor_num[i] * config_->g_w) / - svc_params_.scaling_factor_den[i]; - highest_active_height = - (svc_params_.scaling_factor_num[i] * config_->g_h) / - svc_params_.scaling_factor_den[i]; } else { expect_no_more_active_layers = seen_active_layer; } @@ -499,12 +411,11 @@ bool VP9EncoderImpl::SetSvcRates( svc_controller_->OnRatesUpdated(allocation); } current_bitrate_allocation_ = bitrate_allocation; - cpu_speed_ = GetCpuSpeed(highest_active_width, highest_active_height); config_changed_ = true; return true; } -void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) { +void LibvpxVp9Encoder::SetRates(const RateControlParameters& parameters) { if (!inited_) { RTC_LOG(LS_WARNING) << "SetRates() calll while uninitialzied."; return; @@ -527,8 +438,8 @@ void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) { } // TODO(eladalon): s/inst/codec_settings/g. -int VP9EncoderImpl::InitEncode(const VideoCodec* inst, - const Settings& settings) { +int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, + const Settings& settings) { if (inst == nullptr) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -570,6 +481,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, if (&codec_ != inst) { codec_ = *inst; } + memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); force_key_frame_ = true; pics_since_key_ = 0; @@ -590,7 +502,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, is_svc_ = (num_spatial_layers_ > 1 || num_temporal_layers_ > 1); // Populate encoder configuration with default values. - if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { + if (libvpx_->codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) { return WEBRTC_VIDEO_CODEC_ERROR; } @@ -621,8 +533,8 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, // Creating a wrapper to the image - setting image data to nullptr. Actual // pointer will be set in encode. Setting align to 1, as it is meaningless // (actual memory is not allocated). - raw_ = - vpx_img_wrap(nullptr, img_fmt, codec_.width, codec_.height, 1, nullptr); + raw_ = libvpx_->img_wrap(nullptr, img_fmt, codec_.width, codec_.height, 1, + nullptr); raw_->bit_depth = bits_for_storage; config_->g_w = codec_.width; @@ -665,8 +577,6 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, config_->g_threads = NumberOfThreads(config_->g_w, config_->g_h, settings.number_of_cores); - cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h); - is_flexible_mode_ = inst->VP9().flexibleMode; inter_layer_pred_ = inst->VP9().interLayerPred; @@ -731,9 +641,9 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst, return InitAndSetControlSettings(inst); } -int VP9EncoderImpl::NumberOfThreads(int width, - int height, - int number_of_cores) { +int LibvpxVp9Encoder::NumberOfThreads(int width, + int height, + int number_of_cores) { // Keep the number of encoder threads equal to the possible number of column // tiles, which is (1, 2, 4, 8). See comments below for VP9E_SET_TILE_COLUMNS. if (width * height >= 1280 * 720 && number_of_cores > 4) { @@ -753,7 +663,7 @@ int VP9EncoderImpl::NumberOfThreads(int width, } } -int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { +int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { // Set QP-min/max per spatial and temporal layer. int tot_num_layers = num_spatial_layers_ * num_temporal_layers_; for (int i = 0; i < tot_num_layers; ++i) { @@ -819,53 +729,54 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - const vpx_codec_err_t rv = vpx_codec_enc_init( + const vpx_codec_err_t rv = libvpx_->codec_enc_init( encoder_, vpx_codec_vp9_cx(), config_, config_->g_bit_depth == VPX_BITS_8 ? 0 : VPX_CODEC_USE_HIGHBITDEPTH); if (rv != VPX_CODEC_OK) { - RTC_LOG(LS_ERROR) << "Init error: " << vpx_codec_err_to_string(rv); + RTC_LOG(LS_ERROR) << "Init error: " << libvpx_->codec_err_to_string(rv); return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - if (per_layer_speed_.enabled) { - for (int i = 0; i < num_spatial_layers_; ++i) { - if (codec_.spatialLayers[i].active) { - continue; - } - - if (per_layer_speed_.layers[i] != -1) { - svc_params_.speed_per_layer[i] = per_layer_speed_.layers[i]; - } else { - svc_params_.speed_per_layer[i] = GetCpuSpeed( - codec_.spatialLayers[i].width, codec_.spatialLayers[i].height); - } + UpdatePerformanceFlags(); + RTC_DCHECK_EQ(performance_flags_by_spatial_index_.size(), + static_cast(num_spatial_layers_)); + if (performance_flags_.use_per_layer_speed) { + for (int si = 0; si < num_spatial_layers_; ++si) { + svc_params_.speed_per_layer[si] = + performance_flags_by_spatial_index_[si].base_layer_speed; + svc_params_.loopfilter_ctrl[si] = + performance_flags_by_spatial_index_[si].deblock_mode; } } - vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_); - vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, - rc_max_intra_target_); - vpx_codec_control(encoder_, VP9E_SET_AQ_MODE, - inst->VP9().adaptiveQpMode ? 3 : 0); + libvpx_->codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, + rc_max_intra_target_); + libvpx_->codec_control(encoder_, VP9E_SET_AQ_MODE, + inst->VP9().adaptiveQpMode ? 3 : 0); - vpx_codec_control(encoder_, VP9E_SET_FRAME_PARALLEL_DECODING, 0); - vpx_codec_control(encoder_, VP9E_SET_SVC_GF_TEMPORAL_REF, 0); + libvpx_->codec_control(encoder_, VP9E_SET_FRAME_PARALLEL_DECODING, 0); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_GF_TEMPORAL_REF, 0); if (is_svc_) { - vpx_codec_control(encoder_, VP9E_SET_SVC, 1); - vpx_codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, &svc_params_); + libvpx_->codec_control(encoder_, VP9E_SET_SVC, 1); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, &svc_params_); + } + if (!is_svc_ || !performance_flags_.use_per_layer_speed) { + libvpx_->codec_control( + encoder_, VP8E_SET_CPUUSED, + performance_flags_by_spatial_index_.rbegin()->base_layer_speed); } if (num_spatial_layers_ > 1) { switch (inter_layer_pred_) { case InterLayerPredMode::kOn: - vpx_codec_control(encoder_, VP9E_SET_SVC_INTER_LAYER_PRED, 0); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_INTER_LAYER_PRED, 0); break; case InterLayerPredMode::kOff: - vpx_codec_control(encoder_, VP9E_SET_SVC_INTER_LAYER_PRED, 1); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_INTER_LAYER_PRED, 1); break; case InterLayerPredMode::kOnKeyPic: - vpx_codec_control(encoder_, VP9E_SET_SVC_INTER_LAYER_PRED, 2); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_INTER_LAYER_PRED, 2); break; default: RTC_NOTREACHED(); @@ -902,46 +813,47 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) { svc_drop_frame_.framedrop_thresh[i] = config_->rc_dropframe_thresh; } } - vpx_codec_control(encoder_, VP9E_SET_SVC_FRAME_DROP_LAYER, - &svc_drop_frame_); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_FRAME_DROP_LAYER, + &svc_drop_frame_); } // Register callback for getting each spatial layer. vpx_codec_priv_output_cx_pkt_cb_pair_t cbp = { - VP9EncoderImpl::EncoderOutputCodedPacketCallback, + LibvpxVp9Encoder::EncoderOutputCodedPacketCallback, reinterpret_cast(this)}; - vpx_codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, - reinterpret_cast(&cbp)); + libvpx_->codec_control(encoder_, VP9E_REGISTER_CX_CALLBACK, + reinterpret_cast(&cbp)); // Control function to set the number of column tiles in encoding a frame, in // log2 unit: e.g., 0 = 1 tile column, 1 = 2 tile columns, 2 = 4 tile columns. // The number tile columns will be capped by the encoder based on image size // (minimum width of tile column is 256 pixels, maximum is 4096). - vpx_codec_control(encoder_, VP9E_SET_TILE_COLUMNS, (config_->g_threads >> 1)); + libvpx_->codec_control(encoder_, VP9E_SET_TILE_COLUMNS, + static_cast((config_->g_threads >> 1))); // Turn on row-based multithreading. - vpx_codec_control(encoder_, VP9E_SET_ROW_MT, 1); + libvpx_->codec_control(encoder_, VP9E_SET_ROW_MT, 1); #if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ !defined(ANDROID) // Do not enable the denoiser on ARM since optimization is pending. // Denoiser is on by default on other platforms. - vpx_codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, - inst->VP9().denoisingOn ? 1 : 0); + libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, + inst->VP9().denoisingOn ? 1 : 0); #endif if (codec_.mode == VideoCodecMode::kScreensharing) { // Adjust internal parameters to screen content. - vpx_codec_control(encoder_, VP9E_SET_TUNE_CONTENT, 1); + libvpx_->codec_control(encoder_, VP9E_SET_TUNE_CONTENT, 1); } // Enable encoder skip of static/low content blocks. - vpx_codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1); + libvpx_->codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1); inited_ = true; config_changed_ = true; return WEBRTC_VIDEO_CODEC_OK; } -uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) { +uint32_t LibvpxVp9Encoder::MaxIntraTarget(uint32_t optimal_buffer_size) { // Set max to the optimal buffer level (normalized by target BR), // and scaled by a scale_par. // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps]. @@ -956,8 +868,8 @@ uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) { return (target_pct < min_intra_size) ? min_intra_size : target_pct; } -int VP9EncoderImpl::Encode(const VideoFrame& input_image, - const std::vector* frame_types) { +int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, + const std::vector* frame_types) { if (!inited_) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } @@ -1068,19 +980,55 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, } } - vpx_codec_control(encoder_, VP9E_SET_SVC_LAYER_ID, &layer_id); + if (is_svc_ && performance_flags_.use_per_layer_speed) { + // Update speed settings that might depend on temporal index. + bool speed_updated = false; + for (int sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) { + const int target_speed = + layer_id.temporal_layer_id_per_spatial[sl_idx] == 0 + ? performance_flags_by_spatial_index_[sl_idx].base_layer_speed + : performance_flags_by_spatial_index_[sl_idx].high_layer_speed; + if (svc_params_.speed_per_layer[sl_idx] != target_speed) { + svc_params_.speed_per_layer[sl_idx] = target_speed; + speed_updated = true; + } + } + if (speed_updated) { + libvpx_->codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, &svc_params_); + } + } + + libvpx_->codec_control(encoder_, VP9E_SET_SVC_LAYER_ID, &layer_id); if (num_spatial_layers_ > 1) { // Update frame dropping settings as they may change on per-frame basis. - vpx_codec_control(encoder_, VP9E_SET_SVC_FRAME_DROP_LAYER, - &svc_drop_frame_); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_FRAME_DROP_LAYER, + &svc_drop_frame_); } if (config_changed_) { - if (vpx_codec_enc_config_set(encoder_, config_)) { + if (libvpx_->codec_enc_config_set(encoder_, config_)) { return WEBRTC_VIDEO_CODEC_ERROR; } - vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_); + + if (!performance_flags_.use_per_layer_speed) { + // Not setting individual speeds per layer, find the highest active + // resolution instead and base the speed on that. + for (int i = num_spatial_layers_ - 1; i >= 0; --i) { + if (config_->ss_target_bitrate[i] > 0) { + int width = (svc_params_.scaling_factor_num[i] * config_->g_w) / + svc_params_.scaling_factor_den[i]; + int height = (svc_params_.scaling_factor_num[i] * config_->g_h) / + svc_params_.scaling_factor_den[i]; + int speed = + std::prev(performance_flags_.settings_by_resolution.lower_bound( + width * height)) + ->second.base_layer_speed; + libvpx_->codec_control(encoder_, VP8E_SET_CPUUSED, speed); + break; + } + } + } config_changed_ = false; } @@ -1093,37 +1041,17 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, // doing this. input_image_ = &input_image; - // Keep reference to buffer until encode completes. - rtc::scoped_refptr video_frame_buffer; + // In case we need to map the buffer, |mapped_buffer| is used to keep it alive + // through reference counting until after encoding has finished. + rtc::scoped_refptr mapped_buffer; const I010BufferInterface* i010_buffer; rtc::scoped_refptr i010_copy; switch (profile_) { case VP9Profile::kProfile0: { - if (input_image.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kNV12) { - const NV12BufferInterface* nv12_buffer = - input_image.video_frame_buffer()->GetNV12(); - video_frame_buffer = nv12_buffer; - MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); - raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); - raw_->planes[VPX_PLANE_U] = const_cast(nv12_buffer->DataUV()); - raw_->planes[VPX_PLANE_V] = raw_->planes[VPX_PLANE_U] + 1; - raw_->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); - raw_->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); - raw_->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); - } else { - rtc::scoped_refptr i420_buffer = - input_image.video_frame_buffer()->ToI420(); - video_frame_buffer = i420_buffer; - MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); - // Image in vpx_image_t format. - // Input image is const. VPX's raw image is not defined as const. - raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); - raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); - raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); - raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); - raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); - raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); + mapped_buffer = + PrepareBufferForProfile0(input_image.video_frame_buffer()); + if (!mapped_buffer) { + return WEBRTC_VIDEO_CODEC_ERROR; } break; } @@ -1165,7 +1093,8 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, if (svc_controller_) { vpx_svc_ref_frame_config_t ref_config = Vp9References(layer_frames_); - vpx_codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, &ref_config); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, + &ref_config); } else if (external_ref_control_) { vpx_svc_ref_frame_config_t ref_config = SetReferences(force_key_frame_, layer_id.spatial_layer_id); @@ -1178,7 +1107,8 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, } } - vpx_codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, &ref_config); + libvpx_->codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, + &ref_config); } first_frame_in_picture_ = true; @@ -1198,14 +1128,14 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, .GetTargetRate()) : codec_.maxFramerate; uint32_t duration = static_cast(90000 / target_framerate_fps); - const vpx_codec_err_t rv = vpx_codec_encode(encoder_, raw_, timestamp_, - duration, flags, VPX_DL_REALTIME); + const vpx_codec_err_t rv = libvpx_->codec_encode( + encoder_, raw_, timestamp_, duration, flags, VPX_DL_REALTIME); if (rv != VPX_CODEC_OK) { - RTC_LOG(LS_ERROR) << "Encoding error: " << vpx_codec_err_to_string(rv) + RTC_LOG(LS_ERROR) << "Encoding error: " << libvpx_->codec_err_to_string(rv) << "\n" "Details: " - << vpx_codec_error(encoder_) << "\n" - << vpx_codec_error_detail(encoder_); + << libvpx_->codec_error(encoder_) << "\n" + << libvpx_->codec_error_detail(encoder_); return WEBRTC_VIDEO_CODEC_ERROR; } timestamp_ += duration; @@ -1218,10 +1148,10 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image, return WEBRTC_VIDEO_CODEC_OK; } -void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, - absl::optional* spatial_idx, - const vpx_codec_cx_pkt& pkt, - uint32_t timestamp) { +bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, + absl::optional* spatial_idx, + const vpx_codec_cx_pkt& pkt, + uint32_t timestamp) { RTC_CHECK(codec_specific != nullptr); codec_specific->codecType = kVideoCodecVP9; CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); @@ -1236,7 +1166,7 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } vpx_svc_layer_id_t layer_id = {0}; - vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); + libvpx_->codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); // Can't have keyframe with non-zero temporal layer. RTC_DCHECK(pics_since_key_ != 0 || layer_id.temporal_layer_id == 0); @@ -1338,10 +1268,15 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, auto it = absl::c_find_if( layer_frames_, [&](const ScalableVideoController::LayerFrameConfig& config) { - return config.SpatialId() == spatial_idx->value_or(0); + return config.SpatialId() == layer_id.spatial_layer_id; }); - RTC_CHECK(it != layer_frames_.end()) - << "Failed to find spatial id " << spatial_idx->value_or(0); + if (it == layer_frames_.end()) { + RTC_LOG(LS_ERROR) << "Encoder produced a frame for layer S" + << layer_id.spatial_layer_id << "T" + << layer_id.temporal_layer_id + << " that wasn't requested."; + return false; + } codec_specific->generic_frame_info = svc_controller_->OnEncodeDone(*it); if (is_key_frame) { codec_specific->template_structure = @@ -1357,14 +1292,15 @@ void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } } } + return true; } -void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, - const size_t pic_num, - const bool inter_layer_predicted, - CodecSpecificInfoVP9* vp9_info) { +void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, + const size_t pic_num, + const bool inter_layer_predicted, + CodecSpecificInfoVP9* vp9_info) { vpx_svc_layer_id_t layer_id = {0}; - vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); + libvpx_->codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); const bool is_key_frame = (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? true : false; @@ -1373,7 +1309,8 @@ void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, if (is_svc_) { vpx_svc_ref_frame_config_t enc_layer_conf = {{0}}; - vpx_codec_control(encoder_, VP9E_GET_SVC_REF_FRAME_CONFIG, &enc_layer_conf); + libvpx_->codec_control(encoder_, VP9E_GET_SVC_REF_FRAME_CONFIG, + &enc_layer_conf); int ref_buf_flags = 0; if (enc_layer_conf.reference_last[layer_id.spatial_layer_id]) { @@ -1478,17 +1415,18 @@ void VP9EncoderImpl::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, static_cast(layer_id.temporal_layer_id)); } -void VP9EncoderImpl::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, - const size_t pic_num) { +void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, + const size_t pic_num) { vpx_svc_layer_id_t layer_id = {0}; - vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); + libvpx_->codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); RefFrameBuffer frame_buf(pic_num, layer_id.spatial_layer_id, layer_id.temporal_layer_id); if (is_svc_) { vpx_svc_ref_frame_config_t enc_layer_conf = {{0}}; - vpx_codec_control(encoder_, VP9E_GET_SVC_REF_FRAME_CONFIG, &enc_layer_conf); + libvpx_->codec_control(encoder_, VP9E_GET_SVC_REF_FRAME_CONFIG, + &enc_layer_conf); const int update_buffer_slot = enc_layer_conf.update_buffer_slot[layer_id.spatial_layer_id]; @@ -1518,7 +1456,7 @@ void VP9EncoderImpl::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, } } -vpx_svc_ref_frame_config_t VP9EncoderImpl::SetReferences( +vpx_svc_ref_frame_config_t LibvpxVp9Encoder::SetReferences( bool is_key_pic, size_t first_active_spatial_layer_id) { // kRefBufIdx, kUpdBufIdx need to be updated to support longer GOFs. @@ -1612,16 +1550,16 @@ vpx_svc_ref_frame_config_t VP9EncoderImpl::SetReferences( return ref_config; } -int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { +void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { RTC_DCHECK_EQ(pkt->kind, VPX_CODEC_CX_FRAME_PKT); if (pkt->data.frame.sz == 0) { // Ignore dropped frame. - return WEBRTC_VIDEO_CODEC_OK; + return; } vpx_svc_layer_id_t layer_id = {0}; - vpx_codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); + libvpx_->codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); if (layer_buffering_) { // Deliver buffered low spatial layer frame. @@ -1648,8 +1586,12 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { codec_specific_ = {}; absl::optional spatial_index; - PopulateCodecSpecific(&codec_specific_, &spatial_index, *pkt, - input_image_->timestamp()); + if (!PopulateCodecSpecific(&codec_specific_, &spatial_index, *pkt, + input_image_->timestamp())) { + // Drop the frame. + encoded_image_.set_size(0); + return; + } encoded_image_.SetSpatialIndex(spatial_index); UpdateReferenceBuffers(*pkt, pics_since_key_); @@ -1661,7 +1603,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { encoded_image_._encodedWidth = pkt->data.frame.width[layer_id.spatial_layer_id]; int qp = -1; - vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp); + libvpx_->codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp); encoded_image_.qp_ = qp; if (!layer_buffering_) { @@ -1669,11 +1611,9 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { num_active_spatial_layers_; DeliverBufferedFrame(end_of_picture); } - - return WEBRTC_VIDEO_CODEC_OK; } -void VP9EncoderImpl::DeliverBufferedFrame(bool end_of_picture) { +void LibvpxVp9Encoder::DeliverBufferedFrame(bool end_of_picture) { if (encoded_image_.size() > 0) { if (num_spatial_layers_ > 1) { // Restore frame dropping settings, as dropping may be temporary forbidden @@ -1714,13 +1654,13 @@ void VP9EncoderImpl::DeliverBufferedFrame(bool end_of_picture) { } } -int VP9EncoderImpl::RegisterEncodeCompleteCallback( +int LibvpxVp9Encoder::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) { encoded_complete_callback_ = callback; return WEBRTC_VIDEO_CODEC_OK; } -VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { +VideoEncoder::EncoderInfo LibvpxVp9Encoder::GetEncoderInfo() const { EncoderInfo info; info.supports_native_handle = false; info.implementation_name = "libvpx"; @@ -1767,10 +1707,14 @@ VideoEncoder::EncoderInfo VP9EncoderImpl::GetEncoderInfo() const { VideoFrameBuffer::Type::kNV12}; } } + if (!encoder_info_override_.resolution_bitrate_limits().empty()) { + info.resolution_bitrate_limits = + encoder_info_override_.resolution_bitrate_limits(); + } return info; } -size_t VP9EncoderImpl::SteadyStateSize(int sid, int tid) { +size_t LibvpxVp9Encoder::SteadyStateSize(int sid, int tid) { const size_t bitrate_bps = current_bitrate_allocation_.GetBitrate( sid, tid == kNoTemporalIdx ? 0 : tid); const float fps = (codec_.mode == VideoCodecMode::kScreensharing) @@ -1786,8 +1730,8 @@ size_t VP9EncoderImpl::SteadyStateSize(int sid, int tid) { } // static -VP9EncoderImpl::VariableFramerateExperiment -VP9EncoderImpl::ParseVariableFramerateConfig( +LibvpxVp9Encoder::VariableFramerateExperiment +LibvpxVp9Encoder::ParseVariableFramerateConfig( const WebRtcKeyValueConfig& trials) { FieldTrialFlag enabled = FieldTrialFlag("Enabled"); FieldTrialParameter framerate_limit("min_fps", 5.0); @@ -1809,8 +1753,8 @@ VP9EncoderImpl::ParseVariableFramerateConfig( } // static -VP9EncoderImpl::QualityScalerExperiment -VP9EncoderImpl::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { +LibvpxVp9Encoder::QualityScalerExperiment +LibvpxVp9Encoder::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { FieldTrialFlag disabled = FieldTrialFlag("Disabled"); FieldTrialParameter low_qp("low_qp", kLowVp9QpThreshold); FieldTrialParameter high_qp("hihg_qp", kHighVp9QpThreshold); @@ -1826,329 +1770,182 @@ VP9EncoderImpl::ParseQualityScalerConfig(const WebRtcKeyValueConfig& trials) { return config; } -// static -VP9EncoderImpl::SpeedSettings VP9EncoderImpl::ParsePerLayerSpeed( - const WebRtcKeyValueConfig& trials) { - FieldTrialFlag enabled("enabled"); - FieldTrialParameter speeds[kMaxSpatialLayers]{ - {"s0", -1}, {"s1", -1}, {"s2", -1}, {"s3", -1}, {"s4", -1}}; - ParseFieldTrial( - {&enabled, &speeds[0], &speeds[1], &speeds[2], &speeds[3], &speeds[4]}, - trials.Lookup("WebRTC-VP9-PerLayerSpeed")); - return SpeedSettings{enabled.Get(), - {speeds[0].Get(), speeds[1].Get(), speeds[2].Get(), - speeds[3].Get(), speeds[4].Get()}}; +void LibvpxVp9Encoder::UpdatePerformanceFlags() { + const auto find_speed = [&](int min_pixel_count) { + RTC_DCHECK(!performance_flags_.settings_by_resolution.empty()); + auto it = + performance_flags_.settings_by_resolution.upper_bound(min_pixel_count); + return std::prev(it)->second; + }; + + performance_flags_by_spatial_index_.clear(); + if (is_svc_) { + for (int si = 0; si < num_spatial_layers_; ++si) { + performance_flags_by_spatial_index_.push_back(find_speed( + codec_.spatialLayers[si].width * codec_.spatialLayers[si].height)); + } + } else { + performance_flags_by_spatial_index_.push_back( + find_speed(codec_.width * codec_.height)); + } } -void VP9EncoderImpl::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { +// static +LibvpxVp9Encoder::PerformanceFlags +LibvpxVp9Encoder::ParsePerformanceFlagsFromTrials( + const WebRtcKeyValueConfig& trials) { + struct Params : public PerformanceFlags::ParameterSet { + int min_pixel_count = 0; + }; + + FieldTrialStructList trials_list( + {FieldTrialStructMember("min_pixel_count", + [](Params* p) { return &p->min_pixel_count; }), + FieldTrialStructMember("high_layer_speed", + [](Params* p) { return &p->high_layer_speed; }), + FieldTrialStructMember("base_layer_speed", + [](Params* p) { return &p->base_layer_speed; }), + FieldTrialStructMember("deblock_mode", + [](Params* p) { return &p->deblock_mode; })}, + {}); + + FieldTrialFlag per_layer_speed("use_per_layer_speed"); + + ParseFieldTrial({&trials_list, &per_layer_speed}, + trials.Lookup("WebRTC-VP9-PerformanceFlags")); + + PerformanceFlags flags; + flags.use_per_layer_speed = per_layer_speed.Get(); + + constexpr int kMinSpeed = 1; + constexpr int kMaxSpeed = 9; + for (auto& f : trials_list.Get()) { + if (f.base_layer_speed < kMinSpeed || f.base_layer_speed > kMaxSpeed || + f.high_layer_speed < kMinSpeed || f.high_layer_speed > kMaxSpeed || + f.deblock_mode < 0 || f.deblock_mode > 2) { + RTC_LOG(LS_WARNING) << "Ignoring invalid performance flags: " + << "min_pixel_count = " << f.min_pixel_count + << ", high_layer_speed = " << f.high_layer_speed + << ", base_layer_speed = " << f.base_layer_speed + << ", deblock_mode = " << f.deblock_mode; + continue; + } + flags.settings_by_resolution[f.min_pixel_count] = f; + } + + if (flags.settings_by_resolution.empty()) { + return GetDefaultPerformanceFlags(); + } + + return flags; +} + +// static +LibvpxVp9Encoder::PerformanceFlags +LibvpxVp9Encoder::GetDefaultPerformanceFlags() { + PerformanceFlags flags; + flags.use_per_layer_speed = false; +#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) + // Speed 8 on all layers for all resolutions. + flags.settings_by_resolution[0] = {8, 8, 0}; +#else + // For smaller resolutions, use lower speed setting (get some coding gain at + // the cost of increased encoding complexity). + flags.settings_by_resolution[0] = {5, 5, 0}; + + // Use speed 7 for QCIF and above. + flags.settings_by_resolution[352 * 288] = {7, 7, 0}; +#endif + return flags; +} + +void LibvpxVp9Encoder::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { if (!raw_) { - raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); + raw_ = libvpx_->img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, + nullptr); } else if (raw_->fmt != fmt) { RTC_LOG(INFO) << "Switching VP9 encoder pixel format to " << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420"); - vpx_img_free(raw_); - raw_ = vpx_img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, nullptr); + libvpx_->img_free(raw_); + raw_ = libvpx_->img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, + nullptr); } // else no-op since the image is already in the right format. } -VP9DecoderImpl::VP9DecoderImpl() : VP9DecoderImpl(FieldTrialBasedConfig()) {} -VP9DecoderImpl::VP9DecoderImpl(const WebRtcKeyValueConfig& trials) - : decode_complete_callback_(nullptr), - inited_(false), - decoder_(nullptr), - key_frame_required_(true), - preferred_output_format_( - absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled") - ? VideoFrameBuffer::Type::kNV12 - : VideoFrameBuffer::Type::kI420) {} +rtc::scoped_refptr LibvpxVp9Encoder::PrepareBufferForProfile0( + rtc::scoped_refptr buffer) { + absl::InlinedVector + supported_formats = {VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12}; -VP9DecoderImpl::~VP9DecoderImpl() { - inited_ = true; // in order to do the actual release - Release(); - int num_buffers_in_use = libvpx_buffer_pool_.GetNumBuffersInUse(); - if (num_buffers_in_use > 0) { - // The frame buffers are reference counted and frames are exposed after - // decoding. There may be valid usage cases where previous frames are still - // referenced after ~VP9DecoderImpl that is not a leak. - RTC_LOG(LS_INFO) << num_buffers_in_use - << " Vp9FrameBuffers are still " - "referenced during ~VP9DecoderImpl."; - } -} - -int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) { - int ret_val = Release(); - if (ret_val < 0) { - return ret_val; - } - - if (decoder_ == nullptr) { - decoder_ = new vpx_codec_ctx_t; - } - vpx_codec_dec_cfg_t cfg; - memset(&cfg, 0, sizeof(cfg)); - -#ifdef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION - // We focus on webrtc fuzzing here, not libvpx itself. Use single thread for - // fuzzing, because: - // - libvpx's VP9 single thread decoder is more fuzzer friendly. It detects - // errors earlier than the multi-threads version. - // - Make peak CPU usage under control (not depending on input) - cfg.threads = 1; -#else - if (!inst) { - // No config provided - don't know resolution to decode yet. - // Set thread count to one in the meantime. - cfg.threads = 1; + rtc::scoped_refptr mapped_buffer; + if (buffer->type() != VideoFrameBuffer::Type::kNative) { + // |buffer| is already mapped. + mapped_buffer = buffer; } else { - // We want to use multithreading when decoding high resolution videos. But - // not too many in order to avoid overhead when many stream are decoded - // concurrently. - // Set 2 thread as target for 1280x720 pixel count, and then scale up - // linearly from there - but cap at physical core count. - // For common resolutions this results in: - // 1 for 360p - // 2 for 720p - // 4 for 1080p - // 8 for 1440p - // 18 for 4K - int num_threads = - std::max(1, 2 * (inst->width * inst->height) / (1280 * 720)); - cfg.threads = std::min(number_of_cores, num_threads); - current_codec_ = *inst; + // Attempt to map to one of the supported formats. + mapped_buffer = buffer->GetMappedFrameBuffer(supported_formats); } -#endif - - num_cores_ = number_of_cores; - - vpx_codec_flags_t flags = 0; - if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) { - return WEBRTC_VIDEO_CODEC_MEMORY; - } - - if (!libvpx_buffer_pool_.InitializeVpxUsePool(decoder_)) { - return WEBRTC_VIDEO_CODEC_MEMORY; - } - - inited_ = true; - // Always start with a complete key frame. - key_frame_required_ = true; - if (inst && inst->buffer_pool_size) { - if (!libvpx_buffer_pool_.Resize(*inst->buffer_pool_size) || - !output_buffer_pool_.Resize(*inst->buffer_pool_size)) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + if (!mapped_buffer || + (absl::c_find(supported_formats, mapped_buffer->type()) == + supported_formats.end() && + mapped_buffer->type() != VideoFrameBuffer::Type::kI420A)) { + // Unknown pixel format or unable to map, convert to I420 and prepare that + // buffer instead to ensure Scale() is safe to use. + auto converted_buffer = buffer->ToI420(); + if (!converted_buffer) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString(buffer->type()) + << " image to I420. Can't encode frame."; + return {}; } - } - - vpx_codec_err_t status = - vpx_codec_control(decoder_, VP9D_SET_LOOP_FILTER_OPT, 1); - if (status != VPX_CODEC_OK) { - RTC_LOG(LS_ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. " - << vpx_codec_error(decoder_); - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::Decode(const EncodedImage& input_image, - bool missing_frames, - int64_t /*render_time_ms*/) { - if (!inited_) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (decode_complete_callback_ == nullptr) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - if (input_image._frameType == VideoFrameType::kVideoFrameKey) { - absl::optional frame_info = - vp9::ParseIntraFrameInfo(input_image.data(), input_image.size()); - if (frame_info) { - if (frame_info->frame_width != current_codec_.width || - frame_info->frame_height != current_codec_.height) { - // Resolution has changed, tear down and re-init a new decoder in - // order to get correct sizing. - Release(); - current_codec_.width = frame_info->frame_width; - current_codec_.height = frame_info->frame_height; - int reinit_status = InitDecode(¤t_codec_, num_cores_); - if (reinit_status != WEBRTC_VIDEO_CODEC_OK) { - RTC_LOG(LS_WARNING) << "Failed to re-init decoder."; - return reinit_status; - } - } - } else { - RTC_LOG(LS_WARNING) << "Failed to parse VP9 header from key-frame."; + // The buffer should now be a mapped I420 or I420A format, but some buffer + // implementations incorrectly return the wrong buffer format, such as + // kNative. As a workaround to this, we perform ToI420() a second time. + // TODO(https://crbug.com/webrtc/12602): When Android buffers have a correct + // ToI420() implementaion, remove his workaround. + if (converted_buffer->type() != VideoFrameBuffer::Type::kI420 && + converted_buffer->type() != VideoFrameBuffer::Type::kI420A) { + converted_buffer = converted_buffer->ToI420(); + RTC_CHECK(converted_buffer->type() == VideoFrameBuffer::Type::kI420 || + converted_buffer->type() == VideoFrameBuffer::Type::kI420A); } + // Because |buffer| had to be converted, use |converted_buffer| instead. + buffer = mapped_buffer = converted_buffer; } - // Always start with a complete key frame. - if (key_frame_required_) { - if (input_image._frameType != VideoFrameType::kVideoFrameKey) - return WEBRTC_VIDEO_CODEC_ERROR; - key_frame_required_ = false; - } - vpx_codec_iter_t iter = nullptr; - vpx_image_t* img; - const uint8_t* buffer = input_image.data(); - if (input_image.size() == 0) { - buffer = nullptr; // Triggers full frame concealment. - } - // During decode libvpx may get and release buffers from - // |libvpx_buffer_pool_|. In practice libvpx keeps a few (~3-4) buffers alive - // at a time. - if (vpx_codec_decode(decoder_, buffer, - static_cast(input_image.size()), 0, - VPX_DL_REALTIME)) { - return WEBRTC_VIDEO_CODEC_ERROR; - } - // |img->fb_priv| contains the image data, a reference counted Vp9FrameBuffer. - // It may be released by libvpx during future vpx_codec_decode or - // vpx_codec_destroy calls. - img = vpx_codec_get_frame(decoder_, &iter); - int qp; - vpx_codec_err_t vpx_ret = - vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); - RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); - int ret = - ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace()); - if (ret != 0) { - return ret; - } - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::ReturnFrame( - const vpx_image_t* img, - uint32_t timestamp, - int qp, - const webrtc::ColorSpace* explicit_color_space) { - if (img == nullptr) { - // Decoder OK and nullptr image => No show frame. - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - - // This buffer contains all of |img|'s image data, a reference counted - // Vp9FrameBuffer. (libvpx is done with the buffers after a few - // vpx_codec_decode calls or vpx_codec_destroy). - Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer = - static_cast(img->fb_priv); - - // The buffer can be used directly by the VideoFrame (without copy) by - // using a Wrapped*Buffer. - rtc::scoped_refptr img_wrapped_buffer; - switch (img->bit_depth) { - case 8: - if (img->fmt == VPX_IMG_FMT_I420) { - if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { - rtc::scoped_refptr nv12_buffer = - output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h); - if (!nv12_buffer.get()) { - // Buffer pool is full. - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } - img_wrapped_buffer = nv12_buffer; - libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y], - img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], - img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], - nv12_buffer->MutableDataY(), - nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), - nv12_buffer->StrideUV(), img->d_w, img->d_h); - // No holding onto img_buffer as it's no longer needed and can be - // reused. - } else { - img_wrapped_buffer = WrapI420Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI420Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release |img_buffer|. - rtc::KeepRefUntilDone(img_buffer)); - } - } else if (img->fmt == VPX_IMG_FMT_I444) { - img_wrapped_buffer = WrapI444Buffer( - img->d_w, img->d_h, img->planes[VPX_PLANE_Y], - img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U], - img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V], - img->stride[VPX_PLANE_V], - // WrappedI444Buffer's mechanism for allowing the release of its - // frame buffer is through a callback function. This is where we - // should release |img_buffer|. - rtc::KeepRefUntilDone(img_buffer)); - } else { - RTC_LOG(LS_ERROR) - << "Unsupported pixel format produced by the decoder: " - << static_cast(img->fmt); - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; - } + // Prepare |raw_| from |mapped_buffer|. + switch (mapped_buffer->type()) { + case VideoFrameBuffer::Type::kI420: + case VideoFrameBuffer::Type::kI420A: { + MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); + const I420BufferInterface* i420_buffer = mapped_buffer->GetI420(); + RTC_DCHECK(i420_buffer); + raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(i420_buffer->DataU()); + raw_->planes[VPX_PLANE_V] = const_cast(i420_buffer->DataV()); + raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU(); + raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV(); break; - case 10: - img_wrapped_buffer = WrapI010Buffer( - img->d_w, img->d_h, - reinterpret_cast(img->planes[VPX_PLANE_Y]), - img->stride[VPX_PLANE_Y] / 2, - reinterpret_cast(img->planes[VPX_PLANE_U]), - img->stride[VPX_PLANE_U] / 2, - reinterpret_cast(img->planes[VPX_PLANE_V]), - img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer)); + } + case VideoFrameBuffer::Type::kNV12: { + MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); + const NV12BufferInterface* nv12_buffer = mapped_buffer->GetNV12(); + RTC_DCHECK(nv12_buffer); + raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); + raw_->planes[VPX_PLANE_U] = const_cast(nv12_buffer->DataUV()); + raw_->planes[VPX_PLANE_V] = raw_->planes[VPX_PLANE_U] + 1; + raw_->stride[VPX_PLANE_Y] = nv12_buffer->StrideY(); + raw_->stride[VPX_PLANE_U] = nv12_buffer->StrideUV(); + raw_->stride[VPX_PLANE_V] = nv12_buffer->StrideUV(); break; + } default: - RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: " - << img->bit_depth; - return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + RTC_NOTREACHED(); } - - auto builder = VideoFrame::Builder() - .set_video_frame_buffer(img_wrapped_buffer) - .set_timestamp_rtp(timestamp); - if (explicit_color_space) { - builder.set_color_space(*explicit_color_space); - } else { - builder.set_color_space( - ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth)); - } - VideoFrame decoded_image = builder.build(); - - decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) { - decode_complete_callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -int VP9DecoderImpl::Release() { - int ret_val = WEBRTC_VIDEO_CODEC_OK; - - if (decoder_ != nullptr) { - if (inited_) { - // When a codec is destroyed libvpx will release any buffers of - // |libvpx_buffer_pool_| it is currently using. - if (vpx_codec_destroy(decoder_)) { - ret_val = WEBRTC_VIDEO_CODEC_MEMORY; - } - } - delete decoder_; - decoder_ = nullptr; - } - // Releases buffers from the pool. Any buffers not in use are deleted. Buffers - // still referenced externally are deleted once fully released, not returning - // to the pool. - libvpx_buffer_pool_.ClearPool(); - output_buffer_pool_.Release(); - inited_ = false; - return ret_val; -} - -const char* VP9DecoderImpl::ImplementationName() const { - return "libvpx"; + return mapped_buffer; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h similarity index 68% rename from TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h rename to TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 14c3ca8cc..954c044c2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -9,38 +9,37 @@ * */ -#ifndef MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_ -#define MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_ENCODER_H_ +#define MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_ENCODER_H_ #ifdef RTC_ENABLE_VP9 #include #include -#include #include #include "api/fec_controller_override.h" #include "api/transport/webrtc_key_value_config.h" #include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp9_profile.h" #include "common_video/include/video_frame_buffer_pool.h" -#include "media/base/vp9_profile.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/framerate_controller.h" +#include "rtc_base/experiments/encoder_info_settings.h" #include "vpx/vp8cx.h" -#include "vpx/vpx_decoder.h" -#include "vpx/vpx_encoder.h" namespace webrtc { -class VP9EncoderImpl : public VP9Encoder { +class LibvpxVp9Encoder : public VP9Encoder { public: - explicit VP9EncoderImpl(const cricket::VideoCodec& codec); - VP9EncoderImpl(const cricket::VideoCodec& codec, - const WebRtcKeyValueConfig& trials); + LibvpxVp9Encoder(const cricket::VideoCodec& codec, + std::unique_ptr interface, + const WebRtcKeyValueConfig& trials); - ~VP9EncoderImpl() override; + ~LibvpxVp9Encoder() override; void SetFecControllerOverride( FecControllerOverride* fec_controller_override) override; @@ -66,7 +65,7 @@ class VP9EncoderImpl : public VP9Encoder { // Call encoder initialize function and set control settings. int InitAndSetControlSettings(const VideoCodec* inst); - void PopulateCodecSpecific(CodecSpecificInfo* codec_specific, + bool PopulateCodecSpecific(CodecSpecificInfo* codec_specific, absl::optional* spatial_idx, const vpx_codec_cx_pkt& pkt, uint32_t timestamp); @@ -83,7 +82,7 @@ class VP9EncoderImpl : public VP9Encoder { bool ExplicitlyConfiguredSpatialLayers() const; bool SetSvcRates(const VideoBitrateAllocation& bitrate_allocation); - virtual int GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt); + void GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt); // Callback function for outputting packets per spatial layer. static void EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, @@ -104,7 +103,14 @@ class VP9EncoderImpl : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); + // Prepares |raw_| to reference image data of |buffer|, or of mapped or scaled + // versions of |buffer|. Returns the buffer that got referenced as a result, + // allowing the caller to keep a reference to it until after encoding has + // finished. On failure to convert the buffer, null is returned. + rtc::scoped_refptr PrepareBufferForProfile0( + rtc::scoped_refptr buffer); + const std::unique_ptr libvpx_; EncodedImage encoded_image_; CodecSpecificInfo codec_specific_; EncodedImageCallback* encoded_complete_callback_; @@ -112,7 +118,6 @@ class VP9EncoderImpl : public VP9Encoder { const VP9Profile profile_; bool inited_; int64_t timestamp_; - int cpu_speed_; uint32_t rc_max_intra_target_; vpx_codec_ctx_t* encoder_; vpx_codec_enc_cfg_t* config_; @@ -194,58 +199,50 @@ class VP9EncoderImpl : public VP9Encoder { const WebRtcKeyValueConfig& trials); const bool external_ref_ctrl_; - const struct SpeedSettings { - bool enabled; - int layers[kMaxSpatialLayers]; - } per_layer_speed_; - static SpeedSettings ParsePerLayerSpeed(const WebRtcKeyValueConfig& trials); + // Flags that can affect speed vs quality tradeoff, and are configureable per + // resolution ranges. + struct PerformanceFlags { + // If false, a lookup will be made in |settings_by_resolution| base on the + // highest currently active resolution, and the overall speed then set to + // to the |base_layer_speed| matching that entry. + // If true, each active resolution will have it's speed and deblock_mode set + // based on it resolution, and the high layer speed configured for non + // base temporal layer frames. + bool use_per_layer_speed = false; + + struct ParameterSet { + int base_layer_speed = -1; // Speed setting for TL0. + int high_layer_speed = -1; // Speed setting for TL1-TL3. + // 0 = deblock all temporal layers (TL) + // 1 = disable deblock for top-most TL + // 2 = disable deblock for all TLs + int deblock_mode = 0; + }; + // Map from min pixel count to settings for that resolution and above. + // E.g. if you want some settings A if below wvga (640x360) and some other + // setting B at wvga and above, you'd use map {{0, A}, {230400, B}}. + std::map settings_by_resolution; + }; + // Performance flags, ordered by |min_pixel_count|. + const PerformanceFlags performance_flags_; + // Caching of of |speed_configs_|, where index i maps to the resolution as + // specified in |codec_.spatialLayer[i]|. + std::vector + performance_flags_by_spatial_index_; + void UpdatePerformanceFlags(); + static PerformanceFlags ParsePerformanceFlagsFromTrials( + const WebRtcKeyValueConfig& trials); + static PerformanceFlags GetDefaultPerformanceFlags(); int num_steady_state_frames_; // Only set config when this flag is set. bool config_changed_; + + const LibvpxVp9EncoderInfoSettings encoder_info_override_; }; -class VP9DecoderImpl : public VP9Decoder { - public: - VP9DecoderImpl(); - explicit VP9DecoderImpl(const WebRtcKeyValueConfig& trials); - - virtual ~VP9DecoderImpl(); - - int InitDecode(const VideoCodec* inst, int number_of_cores) override; - - int Decode(const EncodedImage& input_image, - bool missing_frames, - int64_t /*render_time_ms*/) override; - - int RegisterDecodeCompleteCallback(DecodedImageCallback* callback) override; - - int Release() override; - - const char* ImplementationName() const override; - - private: - int ReturnFrame(const vpx_image_t* img, - uint32_t timestamp, - int qp, - const webrtc::ColorSpace* explicit_color_space); - - // Memory pool used to share buffers between libvpx and webrtc. - Vp9FrameBufferPool libvpx_buffer_pool_; - // Buffer pool used to allocate additionally needed NV12 buffers. - VideoFrameBufferPool output_buffer_pool_; - DecodedImageCallback* decode_complete_callback_; - bool inited_; - vpx_codec_ctx_t* decoder_; - bool key_frame_required_; - VideoCodec current_codec_; - int num_cores_; - - // Decoder should produce this format if possible. - const VideoFrameBuffer::Type preferred_output_format_; -}; } // namespace webrtc #endif // RTC_ENABLE_VP9 -#endif // MODULES_VIDEO_CODING_CODECS_VP9_VP9_IMPL_H_ +#endif // MODULES_VIDEO_CODING_CODECS_VP9_LIBVPX_VP9_ENCODER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc index 9b0585c05..d9caf0f03 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc @@ -12,8 +12,11 @@ #include +#include "api/transport/field_trial_based_config.h" #include "api/video_codecs/sdp_video_format.h" -#include "modules/video_coding/codecs/vp9/vp9_impl.h" +#include "api/video_codecs/vp9_profile.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "rtc_base/checks.h" #include "vpx/vp8cx.h" #include "vpx/vp8dx.h" @@ -63,7 +66,9 @@ std::vector SupportedVP9DecoderCodecs() { std::unique_ptr VP9Encoder::Create() { #ifdef RTC_ENABLE_VP9 - return std::make_unique(cricket::VideoCodec()); + return std::make_unique(cricket::VideoCodec(), + LibvpxInterface::Create(), + FieldTrialBasedConfig()); #else RTC_NOTREACHED(); return nullptr; @@ -73,7 +78,8 @@ std::unique_ptr VP9Encoder::Create() { std::unique_ptr VP9Encoder::Create( const cricket::VideoCodec& codec) { #ifdef RTC_ENABLE_VP9 - return std::make_unique(codec); + return std::make_unique(codec, LibvpxInterface::Create(), + FieldTrialBasedConfig()); #else RTC_NOTREACHED(); return nullptr; @@ -82,7 +88,7 @@ std::unique_ptr VP9Encoder::Create( std::unique_ptr VP9Decoder::Create() { #ifdef RTC_ENABLE_VP9 - return std::make_unique(); + return std::make_unique(); #else RTC_NOTREACHED(); return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc index 4d0a6983a..d1f58b1bb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc @@ -15,7 +15,6 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" #include "vpx/vpx_codec.h" #include "vpx/vpx_decoder.h" #include "vpx/vpx_frame_buffer.h" @@ -68,7 +67,7 @@ Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) { } // Otherwise create one. if (available_buffer == nullptr) { - available_buffer = new rtc::RefCountedObject(); + available_buffer = new Vp9FrameBuffer(); allocated_buffers_.push_back(available_buffer); if (allocated_buffers_.size() > max_num_buffers_) { RTC_LOG(LS_WARNING) diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h index d37a9fc0e..bce10be4d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h @@ -16,9 +16,9 @@ #include +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "rtc_base/buffer.h" -#include "rtc_base/ref_count.h" #include "rtc_base/synchronization/mutex.h" struct vpx_codec_ctx; @@ -65,13 +65,14 @@ constexpr size_t kDefaultMaxNumBuffers = 68; // vpx_codec_destroy(decoder_ctx); class Vp9FrameBufferPool { public: - class Vp9FrameBuffer : public rtc::RefCountInterface { + class Vp9FrameBuffer final + : public rtc::RefCountedNonVirtual { public: uint8_t* GetData(); size_t GetDataSize() const; void SetSize(size_t size); - virtual bool HasOneRef() const = 0; + using rtc::RefCountedNonVirtual::HasOneRef; private: // Data as an easily resizable buffer. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc index a7a4b8f75..6aa332eb8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.cc @@ -56,7 +56,6 @@ bool VCMDecoderDataBase::DeregisterExternalDecoder(uint8_t payload_type) { // Release it if it was registered and in use. ptr_decoder_.reset(); } - DeregisterReceiveCodec(payload_type); delete it->second; dec_external_map_.erase(it); return true; @@ -73,6 +72,12 @@ void VCMDecoderDataBase::RegisterExternalDecoder(VideoDecoder* external_decoder, dec_external_map_[payload_type] = ext_decoder; } +bool VCMDecoderDataBase::IsExternalDecoderRegistered( + uint8_t payload_type) const { + return payload_type == current_payload_type_ || + FindExternalDecoderItem(payload_type); +} + bool VCMDecoderDataBase::RegisterReceiveCodec(uint8_t payload_type, const VideoCodec* receive_codec, int number_of_cores) { @@ -133,10 +138,6 @@ VCMGenericDecoder* VCMDecoderDataBase::GetDecoder( return ptr_decoder_.get(); } -bool VCMDecoderDataBase::PrefersLateDecoding() const { - return ptr_decoder_ ? ptr_decoder_->PrefersLateDecoding() : true; -} - std::unique_ptr VCMDecoderDataBase::CreateAndInitDecoder( const VCMEncodedFrame& frame, VideoCodec* new_codec) const { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h index abfd81e34..81c68e413 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoder_database.h @@ -44,6 +44,7 @@ class VCMDecoderDataBase { bool DeregisterExternalDecoder(uint8_t payload_type); void RegisterExternalDecoder(VideoDecoder* external_decoder, uint8_t payload_type); + bool IsExternalDecoderRegistered(uint8_t payload_type) const; bool RegisterReceiveCodec(uint8_t payload_type, const VideoCodec* receive_codec, @@ -59,10 +60,6 @@ class VCMDecoderDataBase { const VCMEncodedFrame& frame, VCMDecodedFrameCallback* decoded_frame_callback); - // Returns true if the currently active decoder prefer to decode frames late. - // That means that frames must be decoded near the render times stamp. - bool PrefersLateDecoding() const; - private: typedef std::map DecoderMap; typedef std::map ExternalDecoderMap; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoding_state.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoding_state.h index b87fb2d03..ec972949d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoding_state.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/decoding_state.h @@ -11,6 +11,7 @@ #ifndef MODULES_VIDEO_CODING_DECODING_STATE_H_ #define MODULES_VIDEO_CODING_DECODING_STATE_H_ +#include #include #include #include diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h index f9580ae80..2fac6ce12 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/deprecated/nack_module.h @@ -17,11 +17,11 @@ #include #include +#include "absl/base/attributes.h" #include "api/units/time_delta.h" #include "modules/include/module.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/histogram.h" -#include "rtc_base/deprecation.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -125,7 +125,7 @@ class DEPRECATED_NackModule : public Module { const absl::optional backoff_settings_; }; -using NackModule = RTC_DEPRECATED DEPRECATED_NackModule; +using NackModule ABSL_DEPRECATED("") = DEPRECATED_NackModule; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc index 4638771b2..dbad1b933 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.cc @@ -142,6 +142,10 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { break; } #endif + case kVideoCodecAV1: { + _codecSpecificInfo.codecType = kVideoCodecAV1; + break; + } default: { _codecSpecificInfo.codecType = kVideoCodecGeneric; break; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h index a77d42eec..9cc769277 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/encoded_frame.h @@ -21,7 +21,7 @@ namespace webrtc { -class RTC_EXPORT VCMEncodedFrame : protected EncodedImage { +class RTC_EXPORT VCMEncodedFrame : public EncodedImage { public: VCMEncodedFrame(); VCMEncodedFrame(const VCMEncodedFrame&); @@ -52,7 +52,6 @@ class RTC_EXPORT VCMEncodedFrame : protected EncodedImage { using EncodedImage::GetEncodedData; using EncodedImage::NtpTimeMs; using EncodedImage::PacketInfos; - using EncodedImage::Retain; using EncodedImage::set_size; using EncodedImage::SetColorSpace; using EncodedImage::SetEncodedData; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc index afce78766..903b9fb62 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc @@ -102,24 +102,28 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { RTC_DCHECK_RUN_ON(&callback_checker_); // If this task has not been cancelled, we did not get any new frames // while waiting. Continue with frame delivery. - MutexLock lock(&mutex_); - if (!frames_to_decode_.empty()) { - // We have frames, deliver! - frame_handler_(absl::WrapUnique(GetNextFrame()), kFrameFound); + std::unique_ptr frame; + std::function, ReturnReason)> + frame_handler; + { + MutexLock lock(&mutex_); + if (!frames_to_decode_.empty()) { + // We have frames, deliver! + frame = absl::WrapUnique(GetNextFrame()); + } else if (clock_->TimeInMilliseconds() < latest_return_time_ms_) { + // If there's no frames to decode and there is still time left, it + // means that the frame buffer was cleared between creation and + // execution of this task. Continue waiting for the remaining time. + int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); + return TimeDelta::Millis(wait_ms); + } + frame_handler = std::move(frame_handler_); CancelCallback(); - return TimeDelta::Zero(); // Ignored. - } else if (clock_->TimeInMilliseconds() >= latest_return_time_ms_) { - // We have timed out, signal this and stop repeating. - frame_handler_(nullptr, kTimeout); - CancelCallback(); - return TimeDelta::Zero(); // Ignored. - } else { - // If there's no frames to decode and there is still time left, it - // means that the frame buffer was cleared between creation and - // execution of this task. Continue waiting for the remaining time. - int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); - return TimeDelta::Millis(wait_ms); } + // Deliver frame, if any. Otherwise signal timeout. + ReturnReason reason = frame ? kFrameFound : kTimeout; + frame_handler(std::move(frame), reason); + return TimeDelta::Zero(); // Ignored. }); } @@ -153,38 +157,44 @@ int64_t FrameBuffer::FindNextFrame(int64_t now_ms) { continue; } - // Only ever return all parts of a superframe. Therefore skip this - // frame if it's not a beginning of a superframe. - if (frame->inter_layer_predicted) { - continue; - } - // Gather all remaining frames for the same superframe. std::vector current_superframe; current_superframe.push_back(frame_it); bool last_layer_completed = frame_it->second.frame->is_last_spatial_layer; FrameMap::iterator next_frame_it = frame_it; - while (true) { + while (!last_layer_completed) { ++next_frame_it; - if (next_frame_it == frames_.end() || - next_frame_it->first.picture_id != frame->id.picture_id || + + if (next_frame_it == frames_.end() || !next_frame_it->second.frame) { + break; + } + + if (next_frame_it->second.frame->Timestamp() != frame->Timestamp() || !next_frame_it->second.continuous) { break; } - // Check if the next frame has some undecoded references other than - // the previous frame in the same superframe. - size_t num_allowed_undecoded_refs = - (next_frame_it->second.frame->inter_layer_predicted) ? 1 : 0; - if (next_frame_it->second.num_missing_decodable > - num_allowed_undecoded_refs) { - break; - } - // All frames in the superframe should have the same timestamp. - if (frame->Timestamp() != next_frame_it->second.frame->Timestamp()) { - RTC_LOG(LS_WARNING) << "Frames in a single superframe have different" - " timestamps. Skipping undecodable superframe."; - break; + + if (next_frame_it->second.num_missing_decodable > 0) { + bool has_inter_layer_dependency = false; + for (size_t i = 0; i < EncodedFrame::kMaxFrameReferences && + i < next_frame_it->second.frame->num_references; + ++i) { + if (next_frame_it->second.frame->references[i] >= frame_it->first) { + has_inter_layer_dependency = true; + break; + } + } + + // If the frame has an undecoded dependency that is not within the same + // temporal unit then this frame is not yet ready to be decoded. If it + // is within the same temporal unit then the not yet decoded dependency + // is just a lower spatial frame, which is ok. + if (!has_inter_layer_dependency || + next_frame_it->second.num_missing_decodable > 1) { + break; + } } + current_superframe.push_back(next_frame_it); last_layer_completed = next_frame_it->second.frame->is_last_spatial_layer; } @@ -251,11 +261,11 @@ EncodedFrame* FrameBuffer::GetNextFrame() { // Remove decoded frame and all undecoded frames before it. if (stats_callback_) { - unsigned int dropped_frames = std::count_if( - frames_.begin(), frame_it, - [](const std::pair& frame) { - return frame.second.frame != nullptr; - }); + unsigned int dropped_frames = + std::count_if(frames_.begin(), frame_it, + [](const std::pair& frame) { + return frame.second.frame != nullptr; + }); if (dropped_frames > 0) { stats_callback_->OnDroppedFrames(dropped_frames); } @@ -360,7 +370,7 @@ void FrameBuffer::UpdateRtt(int64_t rtt_ms) { bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const { for (size_t i = 0; i < frame.num_references; ++i) { - if (frame.references[i] >= frame.id.picture_id) + if (frame.references[i] >= frame.Id()) return false; for (size_t j = i + 1; j < frame.num_references; ++j) { @@ -369,9 +379,6 @@ bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const { } } - if (frame.inter_layer_predicted && frame.id.spatial_layer == 0) - return false; - return true; } @@ -383,136 +390,82 @@ void FrameBuffer::CancelCallback() { callback_checker_.Detach(); } -bool FrameBuffer::IsCompleteSuperFrame(const EncodedFrame& frame) { - if (frame.inter_layer_predicted) { - // Check that all previous spatial layers are already inserted. - VideoLayerFrameId id = frame.id; - RTC_DCHECK_GT(id.spatial_layer, 0); - --id.spatial_layer; - FrameMap::iterator prev_frame = frames_.find(id); - if (prev_frame == frames_.end() || !prev_frame->second.frame) - return false; - while (prev_frame->second.frame->inter_layer_predicted) { - if (prev_frame == frames_.begin()) - return false; - --prev_frame; - --id.spatial_layer; - if (!prev_frame->second.frame || - prev_frame->first.picture_id != id.picture_id || - prev_frame->first.spatial_layer != id.spatial_layer) { - return false; - } - } - } - - if (!frame.is_last_spatial_layer) { - // Check that all following spatial layers are already inserted. - VideoLayerFrameId id = frame.id; - ++id.spatial_layer; - FrameMap::iterator next_frame = frames_.find(id); - if (next_frame == frames_.end() || !next_frame->second.frame) - return false; - while (!next_frame->second.frame->is_last_spatial_layer) { - ++next_frame; - ++id.spatial_layer; - if (next_frame == frames_.end() || !next_frame->second.frame || - next_frame->first.picture_id != id.picture_id || - next_frame->first.spatial_layer != id.spatial_layer) { - return false; - } - } - } - - return true; -} - int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame"); RTC_DCHECK(frame); MutexLock lock(&mutex_); - const VideoLayerFrameId& id = frame->id; - int64_t last_continuous_picture_id = - !last_continuous_frame_ ? -1 : last_continuous_frame_->picture_id; + int64_t last_continuous_frame_id = last_continuous_frame_.value_or(-1); if (!ValidReferences(*frame)) { - RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" - << id.picture_id << ":" - << static_cast(id.spatial_layer) - << ") has invalid frame references, dropping frame."; - return last_continuous_picture_id; + RTC_LOG(LS_WARNING) << "Frame " << frame->Id() + << " has invalid frame references, dropping frame."; + return last_continuous_frame_id; } if (frames_.size() >= kMaxFramesBuffered) { if (frame->is_keyframe()) { - RTC_LOG(LS_WARNING) << "Inserting keyframe (picture_id:spatial_id) (" - << id.picture_id << ":" - << static_cast(id.spatial_layer) - << ") but buffer is full, clearing" + RTC_LOG(LS_WARNING) << "Inserting keyframe " << frame->Id() + << " but buffer is full, clearing" " buffer and inserting the frame."; ClearFramesAndHistory(); } else { - RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" - << id.picture_id << ":" - << static_cast(id.spatial_layer) - << ") could not be inserted due to the frame " + RTC_LOG(LS_WARNING) << "Frame " << frame->Id() + << " could not be inserted due to the frame " "buffer being full, dropping frame."; - return last_continuous_picture_id; + return last_continuous_frame_id; } } auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId(); auto last_decoded_frame_timestamp = decoded_frames_history_.GetLastDecodedFrameTimestamp(); - if (last_decoded_frame && id <= *last_decoded_frame) { + if (last_decoded_frame && frame->Id() <= *last_decoded_frame) { if (AheadOf(frame->Timestamp(), *last_decoded_frame_timestamp) && frame->is_keyframe()) { - // If this frame has a newer timestamp but an earlier picture id then we - // assume there has been a jump in the picture id due to some encoder + // If this frame has a newer timestamp but an earlier frame id then we + // assume there has been a jump in the frame id due to some encoder // reconfiguration or some other reason. Even though this is not according // to spec we can still continue to decode from this frame if it is a // keyframe. RTC_LOG(LS_WARNING) - << "A jump in picture id was detected, clearing buffer."; + << "A jump in frame id was detected, clearing buffer."; ClearFramesAndHistory(); - last_continuous_picture_id = -1; + last_continuous_frame_id = -1; } else { - RTC_LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" - << id.picture_id << ":" - << static_cast(id.spatial_layer) - << ") inserted after frame (" - << last_decoded_frame->picture_id << ":" - << static_cast(last_decoded_frame->spatial_layer) - << ") was handed off for decoding, dropping frame."; - return last_continuous_picture_id; + RTC_LOG(LS_WARNING) << "Frame " << frame->Id() << " inserted after frame " + << *last_decoded_frame + << " was handed off for decoding, dropping frame."; + return last_continuous_frame_id; } } // Test if inserting this frame would cause the order of the frames to become // ambiguous (covering more than half the interval of 2^16). This can happen - // when the picture id make large jumps mid stream. - if (!frames_.empty() && id < frames_.begin()->first && - frames_.rbegin()->first < id) { - RTC_LOG(LS_WARNING) - << "A jump in picture id was detected, clearing buffer."; + // when the frame id make large jumps mid stream. + if (!frames_.empty() && frame->Id() < frames_.begin()->first && + frames_.rbegin()->first < frame->Id()) { + RTC_LOG(LS_WARNING) << "A jump in frame id was detected, clearing buffer."; ClearFramesAndHistory(); - last_continuous_picture_id = -1; + last_continuous_frame_id = -1; } - auto info = frames_.emplace(id, FrameInfo()).first; + auto info = frames_.emplace(frame->Id(), FrameInfo()).first; if (info->second.frame) { - return last_continuous_picture_id; + return last_continuous_frame_id; } if (!UpdateFrameInfoWithIncomingFrame(*frame, info)) - return last_continuous_picture_id; + return last_continuous_frame_id; if (!frame->delayed_by_retransmission()) timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime()); - if (stats_callback_ && IsCompleteSuperFrame(*frame)) { + // It can happen that a frame will be reported as fully received even if a + // lower spatial layer frame is missing. + if (stats_callback_ && frame->is_last_spatial_layer) { stats_callback_->OnCompleteFrame(frame->is_keyframe(), frame->size(), frame->contentType()); } @@ -522,7 +475,7 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { if (info->second.num_missing_continuous == 0) { info->second.continuous = true; PropagateContinuity(info); - last_continuous_picture_id = last_continuous_frame_->picture_id; + last_continuous_frame_id = *last_continuous_frame_; // Since we now have new continuous frames there might be a better frame // to return from NextFrame. @@ -538,7 +491,7 @@ int64_t FrameBuffer::InsertFrame(std::unique_ptr frame) { } } - return last_continuous_picture_id; + return last_continuous_frame_id; } void FrameBuffer::PropagateContinuity(FrameMap::iterator start) { @@ -591,8 +544,6 @@ void FrameBuffer::PropagateDecodability(const FrameInfo& info) { bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame, FrameMap::iterator info) { TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame"); - const VideoLayerFrameId& id = frame.id; - auto last_decoded_frame = decoded_frames_history_.GetLastDecodedFrameId(); RTC_DCHECK(!last_decoded_frame || *last_decoded_frame < info->first); @@ -605,52 +556,34 @@ bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame, // so that |num_missing_continuous| and |num_missing_decodable| can be // decremented as frames become continuous/are decoded. struct Dependency { - VideoLayerFrameId id; + int64_t frame_id; bool continuous; }; std::vector not_yet_fulfilled_dependencies; // Find all dependencies that have not yet been fulfilled. for (size_t i = 0; i < frame.num_references; ++i) { - VideoLayerFrameId ref_key(frame.references[i], frame.id.spatial_layer); // Does |frame| depend on a frame earlier than the last decoded one? - if (last_decoded_frame && ref_key <= *last_decoded_frame) { + if (last_decoded_frame && frame.references[i] <= *last_decoded_frame) { // Was that frame decoded? If not, this |frame| will never become // decodable. - if (!decoded_frames_history_.WasDecoded(ref_key)) { + if (!decoded_frames_history_.WasDecoded(frame.references[i])) { int64_t now_ms = clock_->TimeInMilliseconds(); if (last_log_non_decoded_ms_ + kLogNonDecodedIntervalMs < now_ms) { RTC_LOG(LS_WARNING) - << "Frame with (picture_id:spatial_id) (" << id.picture_id << ":" - << static_cast(id.spatial_layer) - << ") depends on a non-decoded frame more previous than" - " the last decoded frame, dropping frame."; + << "Frame " << frame.Id() + << " depends on a non-decoded frame more previous than the last " + "decoded frame, dropping frame."; last_log_non_decoded_ms_ = now_ms; } return false; } } else { - auto ref_info = frames_.find(ref_key); + auto ref_info = frames_.find(frame.references[i]); bool ref_continuous = ref_info != frames_.end() && ref_info->second.continuous; - not_yet_fulfilled_dependencies.push_back({ref_key, ref_continuous}); - } - } - - // Does |frame| depend on the lower spatial layer? - if (frame.inter_layer_predicted) { - VideoLayerFrameId ref_key(frame.id.picture_id, frame.id.spatial_layer - 1); - auto ref_info = frames_.find(ref_key); - - bool lower_layer_decoded = - last_decoded_frame && *last_decoded_frame == ref_key; - bool lower_layer_continuous = - lower_layer_decoded || - (ref_info != frames_.end() && ref_info->second.continuous); - - if (!lower_layer_continuous || !lower_layer_decoded) { not_yet_fulfilled_dependencies.push_back( - {ref_key, lower_layer_continuous}); + {frame.references[i], ref_continuous}); } } @@ -661,7 +594,7 @@ bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame, if (dep.continuous) --info->second.num_missing_continuous; - frames_[dep.id].dependent_frames.push_back(id); + frames_[dep.frame_id].dependent_frames.push_back(frame.Id()); } return true; @@ -697,11 +630,11 @@ void FrameBuffer::UpdateTimingFrameInfo() { void FrameBuffer::ClearFramesAndHistory() { TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory"); if (stats_callback_) { - unsigned int dropped_frames = std::count_if( - frames_.begin(), frames_.end(), - [](const std::pair& frame) { - return frame.second.frame != nullptr; - }); + unsigned int dropped_frames = + std::count_if(frames_.begin(), frames_.end(), + [](const std::pair& frame) { + return frame.second.frame != nullptr; + }); if (dropped_frames > 0) { stats_callback_->OnDroppedFrames(dropped_frames); } @@ -725,15 +658,14 @@ EncodedFrame* FrameBuffer::CombineAndDeleteFrames( } auto encoded_image_buffer = EncodedImageBuffer::Create(total_length); uint8_t* buffer = encoded_image_buffer->data(); - first_frame->SetSpatialLayerFrameSize(first_frame->id.spatial_layer, + first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0), first_frame->size()); memcpy(buffer, first_frame->data(), first_frame->size()); buffer += first_frame->size(); // Spatial index of combined frame is set equal to spatial index of its top // spatial layer. - first_frame->SetSpatialIndex(last_frame->id.spatial_layer); - first_frame->id.spatial_layer = last_frame->id.spatial_layer; + first_frame->SetSpatialIndex(last_frame->SpatialIndex().value_or(0)); first_frame->video_timing_mutable()->network2_timestamp_ms = last_frame->video_timing().network2_timestamp_ms; @@ -743,8 +675,8 @@ EncodedFrame* FrameBuffer::CombineAndDeleteFrames( // Append all remaining frames to the first one. for (size_t i = 1; i < frames.size(); ++i) { EncodedFrame* next_frame = frames[i]; - first_frame->SetSpatialLayerFrameSize(next_frame->id.spatial_layer, - next_frame->size()); + first_frame->SetSpatialLayerFrameSize( + next_frame->SpatialIndex().value_or(0), next_frame->size()); memcpy(buffer, next_frame->data(), next_frame->size()); buffer += next_frame->size(); delete next_frame; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h index 2ed21c4f7..721668a12 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h @@ -18,6 +18,7 @@ #include #include "absl/container/inlined_vector.h" +#include "api/sequence_checker.h" #include "api/video/encoded_frame.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/inter_frame_delay.h" @@ -27,7 +28,7 @@ #include "rtc_base/experiments/rtt_mult_experiment.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -57,7 +58,6 @@ class FrameBuffer { // Insert a frame into the frame buffer. Returns the picture id // of the last continuous frame or -1 if there is no continuous frame. - // TODO(philipel): Return a VideoLayerFrameId and not only the picture id. int64_t InsertFrame(std::unique_ptr frame); // Get the next frame for decoding. Will return at latest after @@ -94,7 +94,7 @@ class FrameBuffer { // Which other frames that have direct unfulfilled dependencies // on this frame. - absl::InlinedVector dependent_frames; + absl::InlinedVector dependent_frames; // A frame is continiuous if it has all its referenced/indirectly // referenced frames. @@ -114,7 +114,7 @@ class FrameBuffer { std::unique_ptr frame; }; - using FrameMap = std::map; + using FrameMap = std::map; // Check that the references of |frame| are valid. bool ValidReferences(const EncodedFrame& frame) const; @@ -147,10 +147,6 @@ class FrameBuffer { void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // Checks if the superframe, which current frame belongs to, is complete. - bool IsCompleteSuperFrame(const EncodedFrame& frame) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -161,8 +157,8 @@ class FrameBuffer { EncodedFrame* CombineAndDeleteFrames( const std::vector& frames) const; - SequenceChecker construction_checker_; - SequenceChecker callback_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker construction_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker callback_checker_; // Stores only undecoded frames. FrameMap frames_ RTC_GUARDED_BY(mutex_); @@ -181,8 +177,7 @@ class FrameBuffer { VCMJitterEstimator jitter_estimator_ RTC_GUARDED_BY(mutex_); VCMTiming* const timing_ RTC_GUARDED_BY(mutex_); VCMInterFrameDelay inter_frame_delay_ RTC_GUARDED_BY(mutex_); - absl::optional last_continuous_frame_ - RTC_GUARDED_BY(mutex_); + absl::optional last_continuous_frame_ RTC_GUARDED_BY(mutex_); std::vector frames_to_decode_ RTC_GUARDED_BY(mutex_); bool stopped_ RTC_GUARDED_BY(mutex_); VCMVideoProtection protection_mode_ RTC_GUARDED_BY(mutex_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.cc index 6de5081b9..7ca59f779 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.cc @@ -17,14 +17,12 @@ #include "absl/algorithm/container.h" #include "absl/container/inlined_vector.h" #include "api/array_view.h" -#include "api/video/video_frame_type.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { absl::InlinedVector FrameDependenciesCalculator::FromBuffersUsage( - VideoFrameType frame_type, int64_t frame_id, rtc::ArrayView buffers_usage) { absl::InlinedVector dependencies; @@ -37,29 +35,28 @@ absl::InlinedVector FrameDependenciesCalculator::FromBuffersUsage( } std::set direct_depenendencies; std::set indirect_depenendencies; - if (frame_type == VideoFrameType::kVideoFrameDelta) { - for (const CodecBufferUsage& buffer_usage : buffers_usage) { - if (!buffer_usage.referenced) { - continue; - } - const BufferUsage& buffer = buffers_[buffer_usage.id]; - if (buffer.frame_id == absl::nullopt) { - RTC_LOG(LS_ERROR) << "Odd configuration: frame " << frame_id - << " references buffer #" << buffer_usage.id - << " that was never updated."; - continue; - } - direct_depenendencies.insert(*buffer.frame_id); - indirect_depenendencies.insert(buffer.dependencies.begin(), - buffer.dependencies.end()); + + for (const CodecBufferUsage& buffer_usage : buffers_usage) { + if (!buffer_usage.referenced) { + continue; } - // Reduce references: if frame #3 depends on frame #2 and #1, and frame #2 - // depends on frame #1, then frame #3 needs to depend just on frame #2. - // Though this set diff removes only 1 level of indirection, it seems - // enough for all currently used structures. - absl::c_set_difference(direct_depenendencies, indirect_depenendencies, - std::back_inserter(dependencies)); + const BufferUsage& buffer = buffers_[buffer_usage.id]; + if (buffer.frame_id == absl::nullopt) { + RTC_LOG(LS_ERROR) << "Odd configuration: frame " << frame_id + << " references buffer #" << buffer_usage.id + << " that was never updated."; + continue; + } + direct_depenendencies.insert(*buffer.frame_id); + indirect_depenendencies.insert(buffer.dependencies.begin(), + buffer.dependencies.end()); } + // Reduce references: if frame #3 depends on frame #2 and #1, and frame #2 + // depends on frame #1, then frame #3 needs to depend just on frame #2. + // Though this set diff removes only 1 level of indirection, it seems + // enough for all currently used structures. + absl::c_set_difference(direct_depenendencies, indirect_depenendencies, + std::back_inserter(dependencies)); // Update buffers. for (const CodecBufferUsage& buffer_usage : buffers_usage) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.h index b70eddfc5..2c4a8502e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_dependencies_calculator.h @@ -18,7 +18,6 @@ #include "absl/container/inlined_vector.h" #include "absl/types/optional.h" #include "api/array_view.h" -#include "api/video/video_frame_type.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" namespace webrtc { @@ -33,7 +32,6 @@ class FrameDependenciesCalculator { // Calculates frame dependencies based on previous encoder buffer usage. absl::InlinedVector FromBuffersUsage( - VideoFrameType frame_type, int64_t frame_id, rtc::ArrayView buffers_usage); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc index 25fd23234..d226dcd01 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.cc @@ -19,7 +19,6 @@ #include "rtc_base/checks.h" namespace webrtc { -namespace video_coding { RtpFrameObject::RtpFrameObject( uint16_t first_seq_num, uint16_t last_seq_num, @@ -69,6 +68,7 @@ RtpFrameObject::RtpFrameObject( rotation_ = rotation; SetColorSpace(color_space); + SetVideoFrameTrackingId(rtp_video_header_.video_frame_tracking_id); content_type_ = content_type; if (timing.flags != VideoSendTiming::kInvalid) { // ntp_time_ms_ may be -1 if not estimated yet. This is not a problem, @@ -128,5 +128,4 @@ const RTPVideoHeader& RtpFrameObject::GetRtpVideoHeader() const { return rtp_video_header_; } -} // namespace video_coding } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h index d812b8fd2..c6f069f24 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_object.h @@ -15,7 +15,6 @@ #include "api/video/encoded_frame.h" namespace webrtc { -namespace video_coding { class RtpFrameObject : public EncodedFrame { public: @@ -64,7 +63,6 @@ class RtpFrameObject : public EncodedFrame { int times_nacked_; }; -} // namespace video_coding } // namespace webrtc #endif // MODULES_VIDEO_CODING_FRAME_OBJECT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc index 79057926f..621fd7397 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.cc @@ -91,18 +91,29 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, "timestamp", decodedImage.timestamp()); // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). - VCMFrameInformation* frameInfo; + absl::optional frameInfo; int timestamp_map_size = 0; + int dropped_frames = 0; { MutexLock lock(&lock_); + int initial_timestamp_map_size = _timestampMap.Size(); frameInfo = _timestampMap.Pop(decodedImage.timestamp()); timestamp_map_size = _timestampMap.Size(); + // _timestampMap.Pop() erases all frame upto the specified timestamp and + // return the frame info for this timestamp if it exists. Thus, the + // difference in the _timestampMap size before and after Pop() will show + // internally dropped frames. + dropped_frames = + initial_timestamp_map_size - timestamp_map_size - (frameInfo ? 1 : 0); } - if (frameInfo == NULL) { + if (dropped_frames > 0) { + _receiveCallback->OnDroppedFrames(dropped_frames); + } + + if (!frameInfo) { RTC_LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping " "this one."; - _receiveCallback->OnDroppedFrames(1); return; } @@ -110,8 +121,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_packet_infos(frameInfo->packet_infos); decodedImage.set_rotation(frameInfo->rotation); - if (low_latency_renderer_enabled_ && frameInfo->playout_delay.min_ms == 0 && - frameInfo->playout_delay.max_ms > 0) { + if (low_latency_renderer_enabled_) { absl::optional max_composition_delay_in_frames = _timing->MaxCompositionDelayInFrames(); if (max_composition_delay_in_frames) { @@ -197,18 +207,30 @@ void VCMDecodedFrameCallback::OnDecoderImplementationName( } void VCMDecodedFrameCallback::Map(uint32_t timestamp, - VCMFrameInformation* frameInfo) { - MutexLock lock(&lock_); - _timestampMap.Add(timestamp, frameInfo); + const VCMFrameInformation& frameInfo) { + int dropped_frames = 0; + { + MutexLock lock(&lock_); + int initial_size = _timestampMap.Size(); + _timestampMap.Add(timestamp, frameInfo); + // If no frame is dropped, the new size should be |initial_size| + 1 + dropped_frames = (initial_size + 1) - _timestampMap.Size(); + } + if (dropped_frames > 0) { + _receiveCallback->OnDroppedFrames(dropped_frames); + } } -int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp) { - MutexLock lock(&lock_); - if (_timestampMap.Pop(timestamp) == NULL) { - return VCM_GENERAL_ERROR; +void VCMDecodedFrameCallback::ClearTimestampMap() { + int dropped_frames = 0; + { + MutexLock lock(&lock_); + dropped_frames = _timestampMap.Size(); + _timestampMap.Clear(); + } + if (dropped_frames > 0) { + _receiveCallback->OnDroppedFrames(dropped_frames); } - _receiveCallback->OnDroppedFrames(1); - return VCM_OK; } VCMGenericDecoder::VCMGenericDecoder(std::unique_ptr decoder) @@ -216,8 +238,6 @@ VCMGenericDecoder::VCMGenericDecoder(std::unique_ptr decoder) VCMGenericDecoder::VCMGenericDecoder(VideoDecoder* decoder, bool isExternal) : _callback(NULL), - _frameInfos(), - _nextFrameInfoIdx(0), decoder_(decoder), _codecType(kVideoCodecGeneric), _isExternal(isExternal), @@ -238,52 +258,56 @@ int32_t VCMGenericDecoder::InitDecode(const VideoCodec* settings, _codecType = settings->codecType; int err = decoder_->InitDecode(settings, numberOfCores); - implementation_name_ = decoder_->ImplementationName(); - RTC_LOG(LS_INFO) << "Decoder implementation: " << implementation_name_; + decoder_info_ = decoder_->GetDecoderInfo(); + RTC_LOG(LS_INFO) << "Decoder implementation: " << decoder_info_.ToString(); + if (_callback) { + _callback->OnDecoderImplementationName( + decoder_info_.implementation_name.c_str()); + } return err; } int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp", frame.Timestamp()); - _frameInfos[_nextFrameInfoIdx].decodeStart = now; - _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); - _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); - _frameInfos[_nextFrameInfoIdx].playout_delay = frame.PlayoutDelay(); - _frameInfos[_nextFrameInfoIdx].timing = frame.video_timing(); - _frameInfos[_nextFrameInfoIdx].ntp_time_ms = - frame.EncodedImage().ntp_time_ms_; - _frameInfos[_nextFrameInfoIdx].packet_infos = frame.PacketInfos(); + VCMFrameInformation frame_info; + frame_info.decodeStart = now; + frame_info.renderTimeMs = frame.RenderTimeMs(); + frame_info.rotation = frame.rotation(); + frame_info.timing = frame.video_timing(); + frame_info.ntp_time_ms = frame.EncodedImage().ntp_time_ms_; + frame_info.packet_infos = frame.PacketInfos(); // Set correctly only for key frames. Thus, use latest key frame // content type. If the corresponding key frame was lost, decode will fail // and content type will be ignored. if (frame.FrameType() == VideoFrameType::kVideoFrameKey) { - _frameInfos[_nextFrameInfoIdx].content_type = frame.contentType(); + frame_info.content_type = frame.contentType(); _last_keyframe_content_type = frame.contentType(); } else { - _frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type; + frame_info.content_type = _last_keyframe_content_type; } - _callback->Map(frame.Timestamp(), &_frameInfos[_nextFrameInfoIdx]); + _callback->Map(frame.Timestamp(), frame_info); - _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength; int32_t ret = decoder_->Decode(frame.EncodedImage(), frame.MissingFrame(), frame.RenderTimeMs()); - const char* new_implementation_name = decoder_->ImplementationName(); - if (new_implementation_name != implementation_name_) { - implementation_name_ = new_implementation_name; + VideoDecoder::DecoderInfo decoder_info = decoder_->GetDecoderInfo(); + if (decoder_info != decoder_info_) { RTC_LOG(LS_INFO) << "Changed decoder implementation to: " - << new_implementation_name; + << decoder_info.ToString(); + decoder_info_ = decoder_info; + _callback->OnDecoderImplementationName( + decoder_info.implementation_name.empty() + ? "unknown" + : decoder_info.implementation_name.c_str()); } - _callback->OnDecoderImplementationName(implementation_name_.c_str()); if (ret < WEBRTC_VIDEO_CODEC_OK) { RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp " << frame.Timestamp() << ", error code: " << ret; - _callback->Pop(frame.Timestamp()); - return ret; + _callback->ClearTimestampMap(); } else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT) { - // No output - _callback->Pop(frame.Timestamp()); + // No output. + _callback->ClearTimestampMap(); } return ret; } @@ -291,11 +315,12 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { int32_t VCMGenericDecoder::RegisterDecodeCompleteCallback( VCMDecodedFrameCallback* callback) { _callback = callback; - return decoder_->RegisterDecodeCompleteCallback(callback); -} - -bool VCMGenericDecoder::PrefersLateDecoding() const { - return decoder_->PrefersLateDecoding(); + int32_t ret = decoder_->RegisterDecodeCompleteCallback(callback); + if (callback && !decoder_info_.implementation_name.empty()) { + callback->OnDecoderImplementationName( + decoder_info_.implementation_name.c_str()); + } + return ret; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h index 8481fdc15..8e79cb4e1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/generic_decoder.h @@ -14,6 +14,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -21,7 +22,6 @@ #include "modules/video_coding/timing.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -29,19 +29,6 @@ class VCMReceiveCallback; enum { kDecoderFrameMemoryLength = 10 }; -struct VCMFrameInformation { - int64_t renderTimeMs; - absl::optional decodeStart; - void* userData; - VideoRotation rotation; - VideoContentType content_type; - PlayoutDelay playout_delay; - EncodedImage::Timing timing; - int64_t ntp_time_ms; - RtpPacketInfos packet_infos; - // ColorSpace is not stored here, as it might be modified by decoders. -}; - class VCMDecodedFrameCallback : public DecodedImageCallback { public: VCMDecodedFrameCallback(VCMTiming* timing, Clock* clock); @@ -57,11 +44,11 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { void OnDecoderImplementationName(const char* implementation_name); - void Map(uint32_t timestamp, VCMFrameInformation* frameInfo); - int32_t Pop(uint32_t timestamp); + void Map(uint32_t timestamp, const VCMFrameInformation& frameInfo); + void ClearTimestampMap(); private: - rtc::ThreadChecker construction_thread_; + SequenceChecker construction_thread_; // Protect |_timestampMap|. Clock* const _clock; // This callback must be set before the decoder thread starts running @@ -111,20 +98,17 @@ class VCMGenericDecoder { */ int32_t RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback); - bool PrefersLateDecoding() const; bool IsSameDecoder(VideoDecoder* decoder) const { return decoder_.get() == decoder; } private: VCMDecodedFrameCallback* _callback; - VCMFrameInformation _frameInfos[kDecoderFrameMemoryLength]; - uint32_t _nextFrameInfoIdx; std::unique_ptr decoder_; VideoCodecType _codecType; const bool _isExternal; VideoContentType _last_keyframe_content_type; - std::string implementation_name_; + VideoDecoder::DecoderInfo decoder_info_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h index c7834a272..4ca00ade7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/include/video_codec_interface.h @@ -13,19 +13,18 @@ #include +#include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/video/video_frame.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #ifndef DISABLE_H265 #include "modules/video_coding/codecs/h265/include/h265_globals.h" #endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/include/video_error_codes.h" -#include "rtc_base/deprecation.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -83,7 +82,7 @@ struct CodecSpecificInfoVP9 { uint8_t num_ref_pics; uint8_t p_diff[kMaxVp9RefPics]; - RTC_DEPRECATED bool end_of_picture; + ABSL_DEPRECATED("") bool end_of_picture; }; static_assert(std::is_pod::value, ""); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/loss_notification_controller.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/loss_notification_controller.h index a7a1fb9fe..4d536ba4f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/loss_notification_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/loss_notification_controller.h @@ -17,8 +17,9 @@ #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/sequence_checker.h" #include "modules/include/module_common_types.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -102,7 +103,7 @@ class LossNotificationController { // (Naturally, later frames must also be assemblable to be decodable.) std::set decodable_frame_ids_ RTC_GUARDED_BY(sequence_checker_); - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_module2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_module2.h index 89dd08219..f58f88693 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_module2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/nack_module2.h @@ -17,11 +17,11 @@ #include #include +#include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/histogram.h" #include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc index c32f890f4..9b843d3e2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.cc @@ -34,7 +34,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr, const RTPHeader& rtp_header, const RTPVideoHeader& videoHeader, int64_t ntp_time_ms, - int64_t receive_time_ms) + Timestamp receive_time) : payloadType(rtp_header.payloadType), timestamp(rtp_header.timestamp), ntp_time_ms_(ntp_time_ms), @@ -52,7 +52,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr, videoHeader.is_first_packet_in_frame), #endif video_header(videoHeader), - packet_info(rtp_header, receive_time_ms) { + packet_info(rtp_header, receive_time) { if (is_first_packet_in_frame() && markerBit) { completeNALU = kNaluComplete; } else if (is_first_packet_in_frame()) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.h index f157e1089..9aa2d5ce0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/rtp_packet_info.h" +#include "api/units/timestamp.h" #include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" @@ -41,7 +42,7 @@ class VCMPacket { const RTPHeader& rtp_header, const RTPVideoHeader& video_header, int64_t ntp_time_ms, - int64_t receive_time_ms); + Timestamp receive_time); ~VCMPacket(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc index 8372955ce..1f11fbf81 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.cc @@ -36,34 +36,21 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/mod_ops.h" -#include "system_wrappers/include/clock.h" namespace webrtc { namespace video_coding { PacketBuffer::Packet::Packet(const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video_header, - int64_t ntp_time_ms, - int64_t receive_time_ms) + const RTPVideoHeader& video_header) : marker_bit(rtp_packet.Marker()), payload_type(rtp_packet.PayloadType()), seq_num(rtp_packet.SequenceNumber()), timestamp(rtp_packet.Timestamp()), - ntp_time_ms(ntp_time_ms), times_nacked(-1), - video_header(video_header), - packet_info(rtp_packet.Ssrc(), - rtp_packet.Csrcs(), - rtp_packet.Timestamp(), - /*audio_level=*/absl::nullopt, - rtp_packet.GetExtension(), - receive_time_ms) {} + video_header(video_header) {} -PacketBuffer::PacketBuffer(Clock* clock, - size_t start_buffer_size, - size_t max_buffer_size) - : clock_(clock), - max_size_(max_buffer_size), +PacketBuffer::PacketBuffer(size_t start_buffer_size, size_t max_buffer_size) + : max_size_(max_buffer_size), first_seq_num_(0), first_packet_received_(false), is_cleared_to_first_seq_num_(false), @@ -82,7 +69,6 @@ PacketBuffer::~PacketBuffer() { PacketBuffer::InsertResult PacketBuffer::InsertPacket( std::unique_ptr packet) { PacketBuffer::InsertResult result; - MutexLock lock(&mutex_); uint16_t seq_num = packet->seq_num; size_t index = seq_num % buffer_.size(); @@ -122,14 +108,6 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( } } - int64_t now_ms = clock_->TimeInMilliseconds(); - last_received_packet_ms_ = now_ms; - if (packet->video_header.frame_type == VideoFrameType::kVideoFrameKey || - last_received_keyframe_rtp_timestamp_ == packet->timestamp) { - last_received_keyframe_packet_ms_ = now_ms; - last_received_keyframe_rtp_timestamp_ = packet->timestamp; - } - packet->continuous = false; buffer_[index] = std::move(packet); @@ -140,7 +118,6 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( } void PacketBuffer::ClearTo(uint16_t seq_num) { - MutexLock lock(&mutex_); // We have already cleared past this sequence number, no need to do anything. if (is_cleared_to_first_seq_num_ && AheadOf(first_seq_num_, seq_num)) { @@ -177,30 +154,20 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { } void PacketBuffer::Clear() { - MutexLock lock(&mutex_); ClearInternal(); } PacketBuffer::InsertResult PacketBuffer::InsertPadding(uint16_t seq_num) { PacketBuffer::InsertResult result; - MutexLock lock(&mutex_); UpdateMissingPackets(seq_num); result.packets = FindFrames(static_cast(seq_num + 1)); return result; } -absl::optional PacketBuffer::LastReceivedPacketMs() const { - MutexLock lock(&mutex_); - return last_received_packet_ms_; -} - -absl::optional PacketBuffer::LastReceivedKeyframePacketMs() const { - MutexLock lock(&mutex_); - return last_received_keyframe_packet_ms_; -} void PacketBuffer::ForceSpsPpsIdrIsH264Keyframe() { sps_pps_idr_is_h264_keyframe_ = true; } + void PacketBuffer::ClearInternal() { for (auto& entry : buffer_) { entry = nullptr; @@ -208,8 +175,6 @@ void PacketBuffer::ClearInternal() { first_packet_received_ = false; is_cleared_to_first_seq_num_ = false; - last_received_packet_ms_.reset(); - last_received_keyframe_packet_ms_.reset(); newest_inserted_seq_num_.reset(); missing_packets_.clear(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h index e34f7040b..f4dbe3126 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/packet_buffer.h @@ -18,14 +18,13 @@ #include "absl/base/attributes.h" #include "api/rtp_packet_info.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" namespace webrtc { namespace video_coding { @@ -35,9 +34,7 @@ class PacketBuffer { struct Packet { Packet() = default; Packet(const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video_header, - int64_t ntp_time_ms, - int64_t receive_time_ms); + const RTPVideoHeader& video_header); Packet(const Packet&) = delete; Packet(Packet&&) = delete; Packet& operator=(const Packet&) = delete; @@ -62,14 +59,10 @@ class PacketBuffer { uint8_t payload_type = 0; uint16_t seq_num = 0; uint32_t timestamp = 0; - // NTP time of the capture time in local timebase in milliseconds. - int64_t ntp_time_ms = -1; int times_nacked = -1; rtc::CopyOnWriteBuffer video_payload; RTPVideoHeader video_header; - - RtpPacketInfo packet_info; }; struct InsertResult { std::vector> packets; @@ -79,72 +72,50 @@ class PacketBuffer { }; // Both |start_buffer_size| and |max_buffer_size| must be a power of 2. - PacketBuffer(Clock* clock, size_t start_buffer_size, size_t max_buffer_size); + PacketBuffer(size_t start_buffer_size, size_t max_buffer_size); ~PacketBuffer(); - ABSL_MUST_USE_RESULT InsertResult InsertPacket(std::unique_ptr packet) - RTC_LOCKS_EXCLUDED(mutex_); - ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t seq_num) - RTC_LOCKS_EXCLUDED(mutex_); - void ClearTo(uint16_t seq_num) RTC_LOCKS_EXCLUDED(mutex_); - void Clear() RTC_LOCKS_EXCLUDED(mutex_); + ABSL_MUST_USE_RESULT InsertResult + InsertPacket(std::unique_ptr packet); + ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t seq_num); + void ClearTo(uint16_t seq_num); + void Clear(); - // Timestamp (not RTP timestamp) of the last received packet/keyframe packet. - absl::optional LastReceivedPacketMs() const - RTC_LOCKS_EXCLUDED(mutex_); - absl::optional LastReceivedKeyframePacketMs() const - RTC_LOCKS_EXCLUDED(mutex_); void ForceSpsPpsIdrIsH264Keyframe(); private: - Clock* const clock_; - - // Clears with |mutex_| taken. - void ClearInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void ClearInternal(); // Tries to expand the buffer. - bool ExpandBufferSize() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + bool ExpandBufferSize(); // Test if all previous packets has arrived for the given sequence number. - bool PotentialNewFrame(uint16_t seq_num) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + bool PotentialNewFrame(uint16_t seq_num) const; // Test if all packets of a frame has arrived, and if so, returns packets to // create frames. - std::vector> FindFrames(uint16_t seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + std::vector> FindFrames(uint16_t seq_num); - void UpdateMissingPackets(uint16_t seq_num) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - mutable Mutex mutex_; + void UpdateMissingPackets(uint16_t seq_num); // buffer_.size() and max_size_ must always be a power of two. const size_t max_size_; // The fist sequence number currently in the buffer. - uint16_t first_seq_num_ RTC_GUARDED_BY(mutex_); + uint16_t first_seq_num_; // If the packet buffer has received its first packet. - bool first_packet_received_ RTC_GUARDED_BY(mutex_); + bool first_packet_received_; // If the buffer is cleared to |first_seq_num_|. - bool is_cleared_to_first_seq_num_ RTC_GUARDED_BY(mutex_); + bool is_cleared_to_first_seq_num_; // Buffer that holds the the inserted packets and information needed to // determine continuity between them. - std::vector> buffer_ RTC_GUARDED_BY(mutex_); + std::vector> buffer_; - // Timestamp of the last received packet/keyframe packet. - absl::optional last_received_packet_ms_ RTC_GUARDED_BY(mutex_); - absl::optional last_received_keyframe_packet_ms_ - RTC_GUARDED_BY(mutex_); - absl::optional last_received_keyframe_rtp_timestamp_ - RTC_GUARDED_BY(mutex_); - - absl::optional newest_inserted_seq_num_ RTC_GUARDED_BY(mutex_); - std::set> missing_packets_ - RTC_GUARDED_BY(mutex_); + absl::optional newest_inserted_seq_num_; + std::set> missing_packets_; // Indicates if we should require SPS, PPS, and IDR for a particular // RTP timestamp to treat the corresponding frame as a keyframe. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.cc new file mode 100644 index 000000000..9f3d5bb29 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.cc @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/rtp_frame_id_only_ref_finder.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +RtpFrameReferenceFinder::ReturnVector RtpFrameIdOnlyRefFinder::ManageFrame( + std::unique_ptr frame, + int frame_id) { + frame->SetSpatialIndex(0); + frame->SetId(unwrapper_.Unwrap(frame_id & (kFrameIdLength - 1))); + frame->num_references = + frame->frame_type() == VideoFrameType::kVideoFrameKey ? 0 : 1; + frame->references[0] = frame->Id() - 1; + + RtpFrameReferenceFinder::ReturnVector res; + res.push_back(std::move(frame)); + return res; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.h new file mode 100644 index 000000000..1df4870c5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_RTP_FRAME_ID_ONLY_REF_FINDER_H_ +#define MODULES_VIDEO_CODING_RTP_FRAME_ID_ONLY_REF_FINDER_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/numerics/sequence_number_util.h" + +namespace webrtc { + +class RtpFrameIdOnlyRefFinder { + public: + RtpFrameIdOnlyRefFinder() = default; + + RtpFrameReferenceFinder::ReturnVector ManageFrame( + std::unique_ptr frame, + int frame_id); + + private: + static constexpr int kFrameIdLength = 1 << 15; + SeqNumUnwrapper unwrapper_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_RTP_FRAME_ID_ONLY_REF_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.cc index 2a43c275d..a060f8477 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.cc @@ -10,18 +10,137 @@ #include "modules/video_coding/rtp_frame_reference_finder.h" -#include -#include +#include -#include "absl/base/macros.h" #include "absl/types/variant.h" #include "modules/video_coding/frame_object.h" -#include "modules/video_coding/packet_buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" +#include "modules/video_coding/rtp_frame_id_only_ref_finder.h" +#include "modules/video_coding/rtp_generic_ref_finder.h" +#include "modules/video_coding/rtp_seq_num_only_ref_finder.h" +#include "modules/video_coding/rtp_vp8_ref_finder.h" +#include "modules/video_coding/rtp_vp9_ref_finder.h" namespace webrtc { -namespace video_coding { +namespace internal { +class RtpFrameReferenceFinderImpl { + public: + RtpFrameReferenceFinderImpl() = default; + + RtpFrameReferenceFinder::ReturnVector ManageFrame( + std::unique_ptr frame); + RtpFrameReferenceFinder::ReturnVector PaddingReceived(uint16_t seq_num); + void ClearTo(uint16_t seq_num); + + private: + using RefFinder = absl::variant; + + template + T& GetRefFinderAs(); + RefFinder ref_finder_; +}; + +RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinderImpl::ManageFrame( + std::unique_ptr frame) { + const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); + + if (video_header.generic.has_value()) { + return GetRefFinderAs().ManageFrame( + std::move(frame), *video_header.generic); + } + + switch (frame->codec_type()) { + case kVideoCodecVP8: { + const RTPVideoHeaderVP8& vp8_header = + absl::get(video_header.video_type_header); + + if (vp8_header.temporalIdx == kNoTemporalIdx || + vp8_header.tl0PicIdx == kNoTl0PicIdx) { + if (vp8_header.pictureId == kNoPictureId) { + return GetRefFinderAs().ManageFrame( + std::move(frame)); + } + + return GetRefFinderAs().ManageFrame( + std::move(frame), vp8_header.pictureId); + } + + return GetRefFinderAs().ManageFrame(std::move(frame)); + } + case kVideoCodecVP9: { + const RTPVideoHeaderVP9& vp9_header = + absl::get(video_header.video_type_header); + + if (vp9_header.temporal_idx == kNoTemporalIdx) { + if (vp9_header.picture_id == kNoPictureId) { + return GetRefFinderAs().ManageFrame( + std::move(frame)); + } + + return GetRefFinderAs().ManageFrame( + std::move(frame), vp9_header.picture_id); + } + + return GetRefFinderAs().ManageFrame(std::move(frame)); + } + case kVideoCodecGeneric: { + if (auto* generic_header = absl::get_if( + &video_header.video_type_header)) { + return GetRefFinderAs().ManageFrame( + std::move(frame), generic_header->picture_id); + } + + return GetRefFinderAs().ManageFrame( + std::move(frame)); + } + default: { + return GetRefFinderAs().ManageFrame( + std::move(frame)); + } + } +} + +RtpFrameReferenceFinder::ReturnVector +RtpFrameReferenceFinderImpl::PaddingReceived(uint16_t seq_num) { + if (auto* ref_finder = absl::get_if(&ref_finder_)) { + return ref_finder->PaddingReceived(seq_num); + } + return {}; +} + +void RtpFrameReferenceFinderImpl::ClearTo(uint16_t seq_num) { + struct ClearToVisitor { + void operator()(absl::monostate& ref_finder) {} + void operator()(RtpGenericFrameRefFinder& ref_finder) {} + void operator()(RtpFrameIdOnlyRefFinder& ref_finder) {} + void operator()(RtpSeqNumOnlyRefFinder& ref_finder) { + ref_finder.ClearTo(seq_num); + } + void operator()(RtpVp8RefFinder& ref_finder) { + ref_finder.ClearTo(seq_num); + } + void operator()(RtpVp9RefFinder& ref_finder) { + ref_finder.ClearTo(seq_num); + } + uint16_t seq_num; + }; + + absl::visit(ClearToVisitor{seq_num}, ref_finder_); +} + +template +T& RtpFrameReferenceFinderImpl::GetRefFinderAs() { + if (auto* ref_finder = absl::get_if(&ref_finder_)) { + return *ref_finder; + } + return ref_finder_.emplace(); +} + +} // namespace internal RtpFrameReferenceFinder::RtpFrameReferenceFinder( OnCompleteFrameCallback* frame_callback) @@ -30,11 +149,9 @@ RtpFrameReferenceFinder::RtpFrameReferenceFinder( RtpFrameReferenceFinder::RtpFrameReferenceFinder( OnCompleteFrameCallback* frame_callback, int64_t picture_id_offset) - : last_picture_id_(-1), - current_ss_idx_(0), - cleared_to_seq_num_(-1), + : picture_id_offset_(picture_id_offset), frame_callback_(frame_callback), - picture_id_offset_(picture_id_offset) {} + impl_(std::make_unique()) {} RtpFrameReferenceFinder::~RtpFrameReferenceFinder() = default; @@ -45,736 +162,27 @@ void RtpFrameReferenceFinder::ManageFrame( AheadOf(cleared_to_seq_num_, frame->first_seq_num())) { return; } - - FrameDecision decision = ManageFrameInternal(frame.get()); - - switch (decision) { - case kStash: - if (stashed_frames_.size() > kMaxStashedFrames) - stashed_frames_.pop_back(); - stashed_frames_.push_front(std::move(frame)); - break; - case kHandOff: - HandOffFrame(std::move(frame)); - RetryStashedFrames(); - break; - case kDrop: - break; - } -} - -void RtpFrameReferenceFinder::RetryStashedFrames() { - bool complete_frame = false; - do { - complete_frame = false; - for (auto frame_it = stashed_frames_.begin(); - frame_it != stashed_frames_.end();) { - FrameDecision decision = ManageFrameInternal(frame_it->get()); - - switch (decision) { - case kStash: - ++frame_it; - break; - case kHandOff: - complete_frame = true; - HandOffFrame(std::move(*frame_it)); - ABSL_FALLTHROUGH_INTENDED; - case kDrop: - frame_it = stashed_frames_.erase(frame_it); - } - } - } while (complete_frame); -} - -void RtpFrameReferenceFinder::HandOffFrame( - std::unique_ptr frame) { - frame->id.picture_id += picture_id_offset_; - for (size_t i = 0; i < frame->num_references; ++i) { - frame->references[i] += picture_id_offset_; - } - - frame_callback_->OnCompleteFrame(std::move(frame)); -} - -RtpFrameReferenceFinder::FrameDecision -RtpFrameReferenceFinder::ManageFrameInternal(RtpFrameObject* frame) { - if (const absl::optional& - generic_descriptor = frame->GetRtpVideoHeader().generic) { - return ManageFrameGeneric(frame, *generic_descriptor); - } - - switch (frame->codec_type()) { - case kVideoCodecVP8: - return ManageFrameVp8(frame); - case kVideoCodecVP9: - return ManageFrameVp9(frame); - case kVideoCodecGeneric: - if (auto* generic_header = absl::get_if( - &frame->GetRtpVideoHeader().video_type_header)) { - return ManageFramePidOrSeqNum(frame, generic_header->picture_id); - } - ABSL_FALLTHROUGH_INTENDED; - default: - return ManageFramePidOrSeqNum(frame, kNoPictureId); - } + HandOffFrames(impl_->ManageFrame(std::move(frame))); } void RtpFrameReferenceFinder::PaddingReceived(uint16_t seq_num) { - auto clean_padding_to = - stashed_padding_.lower_bound(seq_num - kMaxPaddingAge); - stashed_padding_.erase(stashed_padding_.begin(), clean_padding_to); - stashed_padding_.insert(seq_num); - UpdateLastPictureIdWithPadding(seq_num); - RetryStashedFrames(); + HandOffFrames(impl_->PaddingReceived(seq_num)); } void RtpFrameReferenceFinder::ClearTo(uint16_t seq_num) { cleared_to_seq_num_ = seq_num; - - auto it = stashed_frames_.begin(); - while (it != stashed_frames_.end()) { - if (AheadOf(cleared_to_seq_num_, (*it)->first_seq_num())) { - it = stashed_frames_.erase(it); - } else { - ++it; - } - } + impl_->ClearTo(seq_num); } -void RtpFrameReferenceFinder::UpdateLastPictureIdWithPadding(uint16_t seq_num) { - auto gop_seq_num_it = last_seq_num_gop_.upper_bound(seq_num); - - // If this padding packet "belongs" to a group of pictures that we don't track - // anymore, do nothing. - if (gop_seq_num_it == last_seq_num_gop_.begin()) - return; - --gop_seq_num_it; - - // Calculate the next contiuous sequence number and search for it in - // the padding packets we have stashed. - uint16_t next_seq_num_with_padding = gop_seq_num_it->second.second + 1; - auto padding_seq_num_it = - stashed_padding_.lower_bound(next_seq_num_with_padding); - - // While there still are padding packets and those padding packets are - // continuous, then advance the "last-picture-id-with-padding" and remove - // the stashed padding packet. - while (padding_seq_num_it != stashed_padding_.end() && - *padding_seq_num_it == next_seq_num_with_padding) { - gop_seq_num_it->second.second = next_seq_num_with_padding; - ++next_seq_num_with_padding; - padding_seq_num_it = stashed_padding_.erase(padding_seq_num_it); - } - - // In the case where the stream has been continuous without any new keyframes - // for a while there is a risk that new frames will appear to be older than - // the keyframe they belong to due to wrapping sequence number. In order - // to prevent this we advance the picture id of the keyframe every so often. - if (ForwardDiff(gop_seq_num_it->first, seq_num) > 10000) { - auto save = gop_seq_num_it->second; - last_seq_num_gop_.clear(); - last_seq_num_gop_[seq_num] = save; - } -} - -RtpFrameReferenceFinder::FrameDecision -RtpFrameReferenceFinder::ManageFrameGeneric( - RtpFrameObject* frame, - const RTPVideoHeader::GenericDescriptorInfo& descriptor) { - frame->id.picture_id = descriptor.frame_id; - frame->id.spatial_layer = descriptor.spatial_index; - - if (EncodedFrame::kMaxFrameReferences < descriptor.dependencies.size()) { - RTC_LOG(LS_WARNING) << "Too many dependencies in generic descriptor."; - return kDrop; - } - - frame->num_references = descriptor.dependencies.size(); - for (size_t i = 0; i < descriptor.dependencies.size(); ++i) - frame->references[i] = descriptor.dependencies[i]; - - return kHandOff; -} - -RtpFrameReferenceFinder::FrameDecision -RtpFrameReferenceFinder::ManageFramePidOrSeqNum(RtpFrameObject* frame, - int picture_id) { - // If |picture_id| is specified then we use that to set the frame references, - // otherwise we use sequence number. - if (picture_id != kNoPictureId) { - frame->id.picture_id = unwrapper_.Unwrap(picture_id & 0x7FFF); - frame->num_references = - frame->frame_type() == VideoFrameType::kVideoFrameKey ? 0 : 1; - frame->references[0] = frame->id.picture_id - 1; - return kHandOff; - } - - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - last_seq_num_gop_.insert(std::make_pair( - frame->last_seq_num(), - std::make_pair(frame->last_seq_num(), frame->last_seq_num()))); - } - - // We have received a frame but not yet a keyframe, stash this frame. - if (last_seq_num_gop_.empty()) - return kStash; - - // Clean up info for old keyframes but make sure to keep info - // for the last keyframe. - auto clean_to = last_seq_num_gop_.lower_bound(frame->last_seq_num() - 100); - for (auto it = last_seq_num_gop_.begin(); - it != clean_to && last_seq_num_gop_.size() > 1;) { - it = last_seq_num_gop_.erase(it); - } - - // Find the last sequence number of the last frame for the keyframe - // that this frame indirectly references. - auto seq_num_it = last_seq_num_gop_.upper_bound(frame->last_seq_num()); - if (seq_num_it == last_seq_num_gop_.begin()) { - RTC_LOG(LS_WARNING) << "Generic frame with packet range [" - << frame->first_seq_num() << ", " - << frame->last_seq_num() - << "] has no GoP, dropping frame."; - return kDrop; - } - seq_num_it--; - - // Make sure the packet sequence numbers are continuous, otherwise stash - // this frame. - uint16_t last_picture_id_gop = seq_num_it->second.first; - uint16_t last_picture_id_with_padding_gop = seq_num_it->second.second; - if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) { - uint16_t prev_seq_num = frame->first_seq_num() - 1; - - if (prev_seq_num != last_picture_id_with_padding_gop) - return kStash; - } - - RTC_DCHECK(AheadOrAt(frame->last_seq_num(), seq_num_it->first)); - - // Since keyframes can cause reordering we can't simply assign the - // picture id according to some incrementing counter. - frame->id.picture_id = frame->last_seq_num(); - frame->num_references = - frame->frame_type() == VideoFrameType::kVideoFrameDelta; - frame->references[0] = rtp_seq_num_unwrapper_.Unwrap(last_picture_id_gop); - if (AheadOf(frame->id.picture_id, last_picture_id_gop)) { - seq_num_it->second.first = frame->id.picture_id; - seq_num_it->second.second = frame->id.picture_id; - } - - UpdateLastPictureIdWithPadding(frame->id.picture_id); - frame->id.picture_id = rtp_seq_num_unwrapper_.Unwrap(frame->id.picture_id); - return kHandOff; -} - -RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp8( - RtpFrameObject* frame) { - const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); - const RTPVideoHeaderVP8& codec_header = - absl::get(video_header.video_type_header); - - if (codec_header.pictureId == kNoPictureId || - codec_header.temporalIdx == kNoTemporalIdx || - codec_header.tl0PicIdx == kNoTl0PicIdx) { - return ManageFramePidOrSeqNum(frame, codec_header.pictureId); - } - - // Protect against corrupted packets with arbitrary large temporal idx. - if (codec_header.temporalIdx >= kMaxTemporalLayers) - return kDrop; - - frame->id.picture_id = codec_header.pictureId & 0x7FFF; - - if (last_picture_id_ == -1) - last_picture_id_ = frame->id.picture_id; - - // Clean up info about not yet received frames that are too old. - uint16_t old_picture_id = - Subtract(frame->id.picture_id, kMaxNotYetReceivedFrames); - auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id); - not_yet_received_frames_.erase(not_yet_received_frames_.begin(), - clean_frames_to); - // Avoid re-adding picture ids that were just erased. - if (AheadOf(old_picture_id, last_picture_id_)) { - last_picture_id_ = old_picture_id; - } - // Find if there has been a gap in fully received frames and save the picture - // id of those frames in |not_yet_received_frames_|. - if (AheadOf(frame->id.picture_id, last_picture_id_)) { - do { - last_picture_id_ = Add(last_picture_id_, 1); - not_yet_received_frames_.insert(last_picture_id_); - } while (last_picture_id_ != frame->id.picture_id); - } - - int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(codec_header.tl0PicIdx & 0xFF); - - // Clean up info for base layers that are too old. - int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo; - auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); - layer_info_.erase(layer_info_.begin(), clean_layer_info_to); - - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - if (codec_header.temporalIdx != 0) { - return kDrop; - } - frame->num_references = 0; - layer_info_[unwrapped_tl0].fill(-1); - UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); - return kHandOff; - } - - auto layer_info_it = layer_info_.find( - codec_header.temporalIdx == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0); - - // If we don't have the base layer frame yet, stash this frame. - if (layer_info_it == layer_info_.end()) - return kStash; - - // A non keyframe base layer frame has been received, copy the layer info - // from the previous base layer frame and set a reference to the previous - // base layer frame. - if (codec_header.temporalIdx == 0) { - layer_info_it = - layer_info_.emplace(unwrapped_tl0, layer_info_it->second).first; - frame->num_references = 1; - int64_t last_pid_on_layer = layer_info_it->second[0]; - - // Is this an old frame that has already been used to update the state? If - // so, drop it. - if (AheadOrAt(last_pid_on_layer, - frame->id.picture_id)) { - return kDrop; - } - - frame->references[0] = last_pid_on_layer; - UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); - return kHandOff; - } - - // Layer sync frame, this frame only references its base layer frame. - if (codec_header.layerSync) { - frame->num_references = 1; - int64_t last_pid_on_layer = layer_info_it->second[codec_header.temporalIdx]; - - // Is this an old frame that has already been used to update the state? If - // so, drop it. - if (last_pid_on_layer != -1 && - AheadOrAt(last_pid_on_layer, - frame->id.picture_id)) { - return kDrop; - } - - frame->references[0] = layer_info_it->second[0]; - UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); - return kHandOff; - } - - // Find all references for this frame. - frame->num_references = 0; - for (uint8_t layer = 0; layer <= codec_header.temporalIdx; ++layer) { - // If we have not yet received a previous frame on this temporal layer, - // stash this frame. - if (layer_info_it->second[layer] == -1) - return kStash; - - // If the last frame on this layer is ahead of this frame it means that - // a layer sync frame has been received after this frame for the same - // base layer frame, drop this frame. - if (AheadOf(layer_info_it->second[layer], - frame->id.picture_id)) { - return kDrop; - } - - // If we have not yet received a frame between this frame and the referenced - // frame then we have to wait for that frame to be completed first. - auto not_received_frame_it = - not_yet_received_frames_.upper_bound(layer_info_it->second[layer]); - if (not_received_frame_it != not_yet_received_frames_.end() && - AheadOf(frame->id.picture_id, - *not_received_frame_it)) { - return kStash; - } - - if (!(AheadOf(frame->id.picture_id, - layer_info_it->second[layer]))) { - RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->id.picture_id - << " and packet range [" << frame->first_seq_num() - << ", " << frame->last_seq_num() - << "] already received, " - " dropping frame."; - return kDrop; - } - - ++frame->num_references; - frame->references[layer] = layer_info_it->second[layer]; - } - - UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); - return kHandOff; -} - -void RtpFrameReferenceFinder::UpdateLayerInfoVp8(RtpFrameObject* frame, - int64_t unwrapped_tl0, - uint8_t temporal_idx) { - auto layer_info_it = layer_info_.find(unwrapped_tl0); - - // Update this layer info and newer. - while (layer_info_it != layer_info_.end()) { - if (layer_info_it->second[temporal_idx] != -1 && - AheadOf(layer_info_it->second[temporal_idx], - frame->id.picture_id)) { - // The frame was not newer, then no subsequent layer info have to be - // update. - break; - } - - layer_info_it->second[temporal_idx] = frame->id.picture_id; - ++unwrapped_tl0; - layer_info_it = layer_info_.find(unwrapped_tl0); - } - not_yet_received_frames_.erase(frame->id.picture_id); - - UnwrapPictureIds(frame); -} - -RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameVp9( - RtpFrameObject* frame) { - const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); - const RTPVideoHeaderVP9& codec_header = - absl::get(video_header.video_type_header); - - if (codec_header.picture_id == kNoPictureId || - codec_header.temporal_idx == kNoTemporalIdx) { - return ManageFramePidOrSeqNum(frame, codec_header.picture_id); - } - - // Protect against corrupted packets with arbitrary large temporal idx. - if (codec_header.temporal_idx >= kMaxTemporalLayers || - codec_header.spatial_idx >= kMaxSpatialLayers) - return kDrop; - - frame->id.spatial_layer = codec_header.spatial_idx; - frame->inter_layer_predicted = codec_header.inter_layer_predicted; - frame->id.picture_id = codec_header.picture_id & 0x7FFF; - - if (last_picture_id_ == -1) - last_picture_id_ = frame->id.picture_id; - - if (codec_header.flexible_mode) { - if (codec_header.num_ref_pics > EncodedFrame::kMaxFrameReferences) { - return kDrop; - } - frame->num_references = codec_header.num_ref_pics; +void RtpFrameReferenceFinder::HandOffFrames(ReturnVector frames) { + for (auto& frame : frames) { + frame->SetId(frame->Id() + picture_id_offset_); for (size_t i = 0; i < frame->num_references; ++i) { - frame->references[i] = Subtract(frame->id.picture_id, - codec_header.pid_diff[i]); + frame->references[i] += picture_id_offset_; } - UnwrapPictureIds(frame); - return kHandOff; - } - - if (codec_header.tl0_pic_idx == kNoTl0PicIdx) { - RTC_LOG(LS_WARNING) << "TL0PICIDX is expected to be present in " - "non-flexible mode."; - return kDrop; - } - - GofInfo* info; - int64_t unwrapped_tl0 = - tl0_unwrapper_.Unwrap(codec_header.tl0_pic_idx & 0xFF); - if (codec_header.ss_data_available) { - if (codec_header.temporal_idx != 0) { - RTC_LOG(LS_WARNING) << "Received scalability structure on a non base " - "layer frame. Scalability structure ignored."; - } else { - if (codec_header.gof.num_frames_in_gof > kMaxVp9FramesInGof) { - return kDrop; - } - - for (size_t i = 0; i < codec_header.gof.num_frames_in_gof; ++i) { - if (codec_header.gof.num_ref_pics[i] > kMaxVp9RefPics) { - return kDrop; - } - } - - GofInfoVP9 gof = codec_header.gof; - if (gof.num_frames_in_gof == 0) { - RTC_LOG(LS_WARNING) << "Number of frames in GOF is zero. Assume " - "that stream has only one temporal layer."; - gof.SetGofInfoVP9(kTemporalStructureMode1); - } - - current_ss_idx_ = Add(current_ss_idx_, 1); - scalability_structures_[current_ss_idx_] = gof; - scalability_structures_[current_ss_idx_].pid_start = frame->id.picture_id; - gof_info_.emplace(unwrapped_tl0, - GofInfo(&scalability_structures_[current_ss_idx_], - frame->id.picture_id)); - } - - const auto gof_info_it = gof_info_.find(unwrapped_tl0); - if (gof_info_it == gof_info_.end()) - return kStash; - - info = &gof_info_it->second; - - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - frame->num_references = 0; - FrameReceivedVp9(frame->id.picture_id, info); - UnwrapPictureIds(frame); - return kHandOff; - } - } else if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - if (frame->id.spatial_layer == 0) { - RTC_LOG(LS_WARNING) << "Received keyframe without scalability structure"; - return kDrop; - } - const auto gof_info_it = gof_info_.find(unwrapped_tl0); - if (gof_info_it == gof_info_.end()) - return kStash; - - info = &gof_info_it->second; - - if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - frame->num_references = 0; - FrameReceivedVp9(frame->id.picture_id, info); - UnwrapPictureIds(frame); - return kHandOff; - } - } else { - auto gof_info_it = gof_info_.find( - (codec_header.temporal_idx == 0) ? unwrapped_tl0 - 1 : unwrapped_tl0); - - // Gof info for this frame is not available yet, stash this frame. - if (gof_info_it == gof_info_.end()) - return kStash; - - if (codec_header.temporal_idx == 0) { - gof_info_it = gof_info_ - .emplace(unwrapped_tl0, GofInfo(gof_info_it->second.gof, - frame->id.picture_id)) - .first; - } - - info = &gof_info_it->second; - } - - // Clean up info for base layers that are too old. - int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxGofSaved; - auto clean_gof_info_to = gof_info_.lower_bound(old_tl0_pic_idx); - gof_info_.erase(gof_info_.begin(), clean_gof_info_to); - - FrameReceivedVp9(frame->id.picture_id, info); - - // Make sure we don't miss any frame that could potentially have the - // up switch flag set. - if (MissingRequiredFrameVp9(frame->id.picture_id, *info)) - return kStash; - - if (codec_header.temporal_up_switch) - up_switch_.emplace(frame->id.picture_id, codec_header.temporal_idx); - - // Clean out old info about up switch frames. - uint16_t old_picture_id = Subtract(frame->id.picture_id, 50); - auto up_switch_erase_to = up_switch_.lower_bound(old_picture_id); - up_switch_.erase(up_switch_.begin(), up_switch_erase_to); - - size_t diff = ForwardDiff(info->gof->pid_start, - frame->id.picture_id); - size_t gof_idx = diff % info->gof->num_frames_in_gof; - - if (info->gof->num_ref_pics[gof_idx] > EncodedFrame::kMaxFrameReferences) { - return kDrop; - } - // Populate references according to the scalability structure. - frame->num_references = info->gof->num_ref_pics[gof_idx]; - for (size_t i = 0; i < frame->num_references; ++i) { - frame->references[i] = Subtract( - frame->id.picture_id, info->gof->pid_diff[gof_idx][i]); - - // If this is a reference to a frame earlier than the last up switch point, - // then ignore this reference. - if (UpSwitchInIntervalVp9(frame->id.picture_id, codec_header.temporal_idx, - frame->references[i])) { - --frame->num_references; - } - } - - // Override GOF references. - if (!codec_header.inter_pic_predicted) { - frame->num_references = 0; - } - - UnwrapPictureIds(frame); - return kHandOff; -} - -bool RtpFrameReferenceFinder::MissingRequiredFrameVp9(uint16_t picture_id, - const GofInfo& info) { - size_t diff = - ForwardDiff(info.gof->pid_start, picture_id); - size_t gof_idx = diff % info.gof->num_frames_in_gof; - size_t temporal_idx = info.gof->temporal_idx[gof_idx]; - - if (temporal_idx >= kMaxTemporalLayers) { - RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers - << " temporal " - "layers are supported."; - return true; - } - - // For every reference this frame has, check if there is a frame missing in - // the interval (|ref_pid|, |picture_id|) in any of the lower temporal - // layers. If so, we are missing a required frame. - uint8_t num_references = info.gof->num_ref_pics[gof_idx]; - for (size_t i = 0; i < num_references; ++i) { - uint16_t ref_pid = - Subtract(picture_id, info.gof->pid_diff[gof_idx][i]); - for (size_t l = 0; l < temporal_idx; ++l) { - auto missing_frame_it = missing_frames_for_layer_[l].lower_bound(ref_pid); - if (missing_frame_it != missing_frames_for_layer_[l].end() && - AheadOf(picture_id, *missing_frame_it)) { - return true; - } - } - } - return false; -} - -void RtpFrameReferenceFinder::FrameReceivedVp9(uint16_t picture_id, - GofInfo* info) { - int last_picture_id = info->last_picture_id; - size_t gof_size = std::min(info->gof->num_frames_in_gof, kMaxVp9FramesInGof); - - // If there is a gap, find which temporal layer the missing frames - // belong to and add the frame as missing for that temporal layer. - // Otherwise, remove this frame from the set of missing frames. - if (AheadOf(picture_id, last_picture_id)) { - size_t diff = ForwardDiff(info->gof->pid_start, - last_picture_id); - size_t gof_idx = diff % gof_size; - - last_picture_id = Add(last_picture_id, 1); - while (last_picture_id != picture_id) { - gof_idx = (gof_idx + 1) % gof_size; - RTC_CHECK(gof_idx < kMaxVp9FramesInGof); - - size_t temporal_idx = info->gof->temporal_idx[gof_idx]; - if (temporal_idx >= kMaxTemporalLayers) { - RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers - << " temporal " - "layers are supported."; - return; - } - - missing_frames_for_layer_[temporal_idx].insert(last_picture_id); - last_picture_id = Add(last_picture_id, 1); - } - - info->last_picture_id = last_picture_id; - } else { - size_t diff = - ForwardDiff(info->gof->pid_start, picture_id); - size_t gof_idx = diff % gof_size; - RTC_CHECK(gof_idx < kMaxVp9FramesInGof); - - size_t temporal_idx = info->gof->temporal_idx[gof_idx]; - if (temporal_idx >= kMaxTemporalLayers) { - RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers - << " temporal " - "layers are supported."; - return; - } - - missing_frames_for_layer_[temporal_idx].erase(picture_id); + frame_callback_->OnCompleteFrame(std::move(frame)); } } -bool RtpFrameReferenceFinder::UpSwitchInIntervalVp9(uint16_t picture_id, - uint8_t temporal_idx, - uint16_t pid_ref) { - for (auto up_switch_it = up_switch_.upper_bound(pid_ref); - up_switch_it != up_switch_.end() && - AheadOf(picture_id, up_switch_it->first); - ++up_switch_it) { - if (up_switch_it->second < temporal_idx) - return true; - } - - return false; -} - -void RtpFrameReferenceFinder::UnwrapPictureIds(RtpFrameObject* frame) { - for (size_t i = 0; i < frame->num_references; ++i) - frame->references[i] = unwrapper_.Unwrap(frame->references[i]); - frame->id.picture_id = unwrapper_.Unwrap(frame->id.picture_id); -} - -void RtpFrameReferenceFinder::UpdateLastPictureIdWithPaddingH264() { - auto seq_num_it = last_seq_num_gop_.begin(); - - // Check if next sequence number is in a stashed padding packet. - uint16_t next_padded_seq_num = seq_num_it->second.second + 1; - auto padding_seq_num_it = stashed_padding_.lower_bound(next_padded_seq_num); - - // Check for more consecutive padding packets to increment - // the "last-picture-id-with-padding" and remove the stashed packets. - while (padding_seq_num_it != stashed_padding_.end() && - *padding_seq_num_it == next_padded_seq_num) { - seq_num_it->second.second = next_padded_seq_num; - ++next_padded_seq_num; - padding_seq_num_it = stashed_padding_.erase(padding_seq_num_it); - } -} - -void RtpFrameReferenceFinder::UpdateLayerInfoH264(RtpFrameObject* frame, - int64_t unwrapped_tl0, - uint8_t temporal_idx) { - auto layer_info_it = layer_info_.find(unwrapped_tl0); - - // Update this layer info and newer. - while (layer_info_it != layer_info_.end()) { - if (layer_info_it->second[temporal_idx] != -1 && - AheadOf(layer_info_it->second[temporal_idx], - frame->id.picture_id)) { - // Not a newer frame. No subsequent layer info needs update. - break; - } - - layer_info_it->second[temporal_idx] = frame->id.picture_id; - ++unwrapped_tl0; - layer_info_it = layer_info_.find(unwrapped_tl0); - } - - for (size_t i = 0; i < frame->num_references; ++i) - frame->references[i] = rtp_seq_num_unwrapper_.Unwrap(frame->references[i]); - frame->id.picture_id = rtp_seq_num_unwrapper_.Unwrap(frame->id.picture_id); -} - -void RtpFrameReferenceFinder::UpdateDataH264(RtpFrameObject* frame, - int64_t unwrapped_tl0, - uint8_t temporal_idx) { - // Update last_seq_num_gop_ entry for last picture id. - auto seq_num_it = last_seq_num_gop_.begin(); - uint16_t last_pic_id = seq_num_it->second.first; - if (AheadOf(frame->id.picture_id, last_pic_id)) { - seq_num_it->second.first = frame->id.picture_id; - seq_num_it->second.second = frame->id.picture_id; - } - UpdateLastPictureIdWithPaddingH264(); - - UpdateLayerInfoH264(frame, unwrapped_tl0, temporal_idx); - - // Remove any current packets from |not_yet_received_seq_num_|. - uint16_t last_seq_num_padded = seq_num_it->second.second; - for (uint16_t n = frame->first_seq_num(); AheadOrAt(last_seq_num_padded, n); - ++n) { - not_yet_received_seq_num_.erase(n); - } -} - -} // namespace video_coding } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.h index ed67b91fe..3577ea828 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_frame_reference_finder.h @@ -11,24 +11,14 @@ #ifndef MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_ #define MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_ -#include -#include -#include #include -#include -#include -#include "modules/include/module_common_types_public.h" -#include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" -#include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/thread_annotations.h" +#include "modules/video_coding/frame_object.h" namespace webrtc { -namespace video_coding { - -class EncodedFrame; -class RtpFrameObject; +namespace internal { +class RtpFrameReferenceFinderImpl; +} // namespace internal // A complete frame is a frame which has received all its packets and all its // references are known. @@ -40,6 +30,8 @@ class OnCompleteFrameCallback { class RtpFrameReferenceFinder { public: + using ReturnVector = absl::InlinedVector, 3>; + explicit RtpFrameReferenceFinder(OnCompleteFrameCallback* frame_callback); explicit RtpFrameReferenceFinder(OnCompleteFrameCallback* frame_callback, int64_t picture_id_offset); @@ -61,164 +53,17 @@ class RtpFrameReferenceFinder { void ClearTo(uint16_t seq_num); private: - static const uint16_t kPicIdLength = 1 << 15; - static const uint8_t kMaxTemporalLayers = 5; - static const int kMaxLayerInfo = 50; - static const int kMaxStashedFrames = 100; - static const int kMaxNotYetReceivedFrames = 100; - static const int kMaxGofSaved = 50; - static const int kMaxPaddingAge = 100; - - enum FrameDecision { kStash, kHandOff, kDrop }; - - struct GofInfo { - GofInfo(GofInfoVP9* gof, uint16_t last_picture_id) - : gof(gof), last_picture_id(last_picture_id) {} - GofInfoVP9* gof; - uint16_t last_picture_id; - }; - - // Find the relevant group of pictures and update its "last-picture-id-with - // padding" sequence number. - void UpdateLastPictureIdWithPadding(uint16_t seq_num); - - // Retry stashed frames until no more complete frames are found. - void RetryStashedFrames(); - - void HandOffFrame(std::unique_ptr frame); - - FrameDecision ManageFrameInternal(RtpFrameObject* frame); - - FrameDecision ManageFrameGeneric( - RtpFrameObject* frame, - const RTPVideoHeader::GenericDescriptorInfo& descriptor); - - // Find references for frames with no or very limited information in the - // descriptor. If |picture_id| is unspecified then packet sequence numbers - // will be used to determine the references of the frames. - FrameDecision ManageFramePidOrSeqNum(RtpFrameObject* frame, int picture_id); - - // Find references for Vp8 frames - FrameDecision ManageFrameVp8(RtpFrameObject* frame); - - // Updates necessary layer info state used to determine frame references for - // Vp8. - void UpdateLayerInfoVp8(RtpFrameObject* frame, - int64_t unwrapped_tl0, - uint8_t temporal_idx); - - // Find references for Vp9 frames - FrameDecision ManageFrameVp9(RtpFrameObject* frame); - - // Check if we are missing a frame necessary to determine the references - // for this frame. - bool MissingRequiredFrameVp9(uint16_t picture_id, const GofInfo& info); - - // Updates which frames that have been received. If there is a gap, - // missing frames will be added to |missing_frames_for_layer_| or - // if this is an already missing frame then it will be removed. - void FrameReceivedVp9(uint16_t picture_id, GofInfo* info); - - // Check if there is a frame with the up-switch flag set in the interval - // (|pid_ref|, |picture_id|) with temporal layer smaller than |temporal_idx|. - bool UpSwitchInIntervalVp9(uint16_t picture_id, - uint8_t temporal_idx, - uint16_t pid_ref); - - // Unwrap |frame|s picture id and its references to 16 bits. - void UnwrapPictureIds(RtpFrameObject* frame); - - // Find references for H264 frames - FrameDecision ManageFrameH264(RtpFrameObject* frame); - - // Update "last-picture-id-with-padding" sequence number for H264. - void UpdateLastPictureIdWithPaddingH264(); - - // Update H264 layer info state used to determine frame references. - void UpdateLayerInfoH264(RtpFrameObject* frame, - int64_t unwrapped_tl0, - uint8_t temporal_idx); - - // Update H264 state for decodeable frames. - void UpdateDataH264(RtpFrameObject* frame, - int64_t unwrapped_tl0, - uint8_t temporal_idx); - - // For every group of pictures, hold two sequence numbers. The first being - // the sequence number of the last packet of the last completed frame, and - // the second being the sequence number of the last packet of the last - // completed frame advanced by any potential continuous packets of padding. - std::map, - DescendingSeqNumComp> - last_seq_num_gop_; - - // Save the last picture id in order to detect when there is a gap in frames - // that have not yet been fully received. - int last_picture_id_; - - // Padding packets that have been received but that are not yet continuous - // with any group of pictures. - std::set> stashed_padding_; - - // Frames earlier than the last received frame that have not yet been - // fully received. - std::set> - not_yet_received_frames_; - - // Sequence numbers of frames earlier than the last received frame that - // have not yet been fully received. - std::set> not_yet_received_seq_num_; - - // Frames that have been fully received but didn't have all the information - // needed to determine their references. - std::deque> stashed_frames_; - - // Holds the information about the last completed frame for a given temporal - // layer given an unwrapped Tl0 picture index. - std::map> layer_info_; - - // Where the current scalability structure is in the - // |scalability_structures_| array. - uint8_t current_ss_idx_; - - // Holds received scalability structures. - std::array scalability_structures_; - - // Holds the the Gof information for a given unwrapped TL0 picture index. - std::map gof_info_; - - // Keep track of which picture id and which temporal layer that had the - // up switch flag set. - std::map> - up_switch_; - - // For every temporal layer, keep a set of which frames that are missing. - std::array>, - kMaxTemporalLayers> - missing_frames_for_layer_; - - // How far frames have been cleared by sequence number. A frame will be - // cleared if it contains a packet with a sequence number older than - // |cleared_to_seq_num_|. - int cleared_to_seq_num_; - - OnCompleteFrameCallback* frame_callback_; - - // Unwrapper used to unwrap generic RTP streams. In a generic stream we derive - // a picture id from the packet sequence number. - SeqNumUnwrapper rtp_seq_num_unwrapper_; - - // Unwrapper used to unwrap VP8/VP9 streams which have their picture id - // specified. - SeqNumUnwrapper unwrapper_; - - SeqNumUnwrapper tl0_unwrapper_; + void HandOffFrames(ReturnVector frames); + // How far frames have been cleared out of the buffer by RTP sequence number. + // A frame will be cleared if it contains a packet with a sequence number + // older than |cleared_to_seq_num_|. + int cleared_to_seq_num_ = -1; const int64_t picture_id_offset_; + OnCompleteFrameCallback* frame_callback_; + std::unique_ptr impl_; }; -} // namespace video_coding } // namespace webrtc #endif // MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc new file mode 100644 index 000000000..87fff9c26 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/rtp_generic_ref_finder.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +RtpFrameReferenceFinder::ReturnVector RtpGenericFrameRefFinder::ManageFrame( + std::unique_ptr frame, + const RTPVideoHeader::GenericDescriptorInfo& descriptor) { + // Frame IDs are unwrapped in the RtpVideoStreamReceiver, no need to unwrap + // them here. + frame->SetId(descriptor.frame_id); + frame->SetSpatialIndex(descriptor.spatial_index); + + RtpFrameReferenceFinder::ReturnVector res; + if (EncodedFrame::kMaxFrameReferences < descriptor.dependencies.size()) { + RTC_LOG(LS_WARNING) << "Too many dependencies in generic descriptor."; + return res; + } + + frame->num_references = descriptor.dependencies.size(); + for (size_t i = 0; i < descriptor.dependencies.size(); ++i) { + frame->references[i] = descriptor.dependencies[i]; + } + + res.push_back(std::move(frame)); + return res; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.h new file mode 100644 index 000000000..87d7b5940 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_generic_ref_finder.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_RTP_GENERIC_REF_FINDER_H_ +#define MODULES_VIDEO_CODING_RTP_GENERIC_REF_FINDER_H_ + +#include + +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" + +namespace webrtc { + +class RtpGenericFrameRefFinder { + public: + RtpGenericFrameRefFinder() = default; + + RtpFrameReferenceFinder::ReturnVector ManageFrame( + std::unique_ptr frame, + const RTPVideoHeader::GenericDescriptorInfo& descriptor); +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_RTP_GENERIC_REF_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc new file mode 100644 index 000000000..4381cf095 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/rtp_seq_num_only_ref_finder.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +RtpFrameReferenceFinder::ReturnVector RtpSeqNumOnlyRefFinder::ManageFrame( + std::unique_ptr frame) { + FrameDecision decision = ManageFrameInternal(frame.get()); + + RtpFrameReferenceFinder::ReturnVector res; + switch (decision) { + case kStash: + if (stashed_frames_.size() > kMaxStashedFrames) + stashed_frames_.pop_back(); + stashed_frames_.push_front(std::move(frame)); + return res; + case kHandOff: + res.push_back(std::move(frame)); + RetryStashedFrames(res); + return res; + case kDrop: + return res; + } + + return res; +} + +RtpSeqNumOnlyRefFinder::FrameDecision +RtpSeqNumOnlyRefFinder::ManageFrameInternal(RtpFrameObject* frame) { + if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { + last_seq_num_gop_.insert(std::make_pair( + frame->last_seq_num(), + std::make_pair(frame->last_seq_num(), frame->last_seq_num()))); + } + + // We have received a frame but not yet a keyframe, stash this frame. + if (last_seq_num_gop_.empty()) + return kStash; + + // Clean up info for old keyframes but make sure to keep info + // for the last keyframe. + auto clean_to = last_seq_num_gop_.lower_bound(frame->last_seq_num() - 100); + for (auto it = last_seq_num_gop_.begin(); + it != clean_to && last_seq_num_gop_.size() > 1;) { + it = last_seq_num_gop_.erase(it); + } + + // Find the last sequence number of the last frame for the keyframe + // that this frame indirectly references. + auto seq_num_it = last_seq_num_gop_.upper_bound(frame->last_seq_num()); + if (seq_num_it == last_seq_num_gop_.begin()) { + RTC_LOG(LS_WARNING) << "Generic frame with packet range [" + << frame->first_seq_num() << ", " + << frame->last_seq_num() + << "] has no GoP, dropping frame."; + return kDrop; + } + seq_num_it--; + + // Make sure the packet sequence numbers are continuous, otherwise stash + // this frame. + uint16_t last_picture_id_gop = seq_num_it->second.first; + uint16_t last_picture_id_with_padding_gop = seq_num_it->second.second; + if (frame->frame_type() == VideoFrameType::kVideoFrameDelta) { + uint16_t prev_seq_num = frame->first_seq_num() - 1; + + if (prev_seq_num != last_picture_id_with_padding_gop) + return kStash; + } + + RTC_DCHECK(AheadOrAt(frame->last_seq_num(), seq_num_it->first)); + + // Since keyframes can cause reordering we can't simply assign the + // picture id according to some incrementing counter. + frame->SetId(frame->last_seq_num()); + frame->num_references = + frame->frame_type() == VideoFrameType::kVideoFrameDelta; + frame->references[0] = rtp_seq_num_unwrapper_.Unwrap(last_picture_id_gop); + if (AheadOf(frame->Id(), last_picture_id_gop)) { + seq_num_it->second.first = frame->Id(); + seq_num_it->second.second = frame->Id(); + } + + UpdateLastPictureIdWithPadding(frame->Id()); + frame->SetSpatialIndex(0); + frame->SetId(rtp_seq_num_unwrapper_.Unwrap(frame->Id())); + return kHandOff; +} + +void RtpSeqNumOnlyRefFinder::RetryStashedFrames( + RtpFrameReferenceFinder::ReturnVector& res) { + bool complete_frame = false; + do { + complete_frame = false; + for (auto frame_it = stashed_frames_.begin(); + frame_it != stashed_frames_.end();) { + FrameDecision decision = ManageFrameInternal(frame_it->get()); + + switch (decision) { + case kStash: + ++frame_it; + break; + case kHandOff: + complete_frame = true; + res.push_back(std::move(*frame_it)); + ABSL_FALLTHROUGH_INTENDED; + case kDrop: + frame_it = stashed_frames_.erase(frame_it); + } + } + } while (complete_frame); +} + +void RtpSeqNumOnlyRefFinder::UpdateLastPictureIdWithPadding(uint16_t seq_num) { + auto gop_seq_num_it = last_seq_num_gop_.upper_bound(seq_num); + + // If this padding packet "belongs" to a group of pictures that we don't track + // anymore, do nothing. + if (gop_seq_num_it == last_seq_num_gop_.begin()) + return; + --gop_seq_num_it; + + // Calculate the next contiuous sequence number and search for it in + // the padding packets we have stashed. + uint16_t next_seq_num_with_padding = gop_seq_num_it->second.second + 1; + auto padding_seq_num_it = + stashed_padding_.lower_bound(next_seq_num_with_padding); + + // While there still are padding packets and those padding packets are + // continuous, then advance the "last-picture-id-with-padding" and remove + // the stashed padding packet. + while (padding_seq_num_it != stashed_padding_.end() && + *padding_seq_num_it == next_seq_num_with_padding) { + gop_seq_num_it->second.second = next_seq_num_with_padding; + ++next_seq_num_with_padding; + padding_seq_num_it = stashed_padding_.erase(padding_seq_num_it); + } + + // In the case where the stream has been continuous without any new keyframes + // for a while there is a risk that new frames will appear to be older than + // the keyframe they belong to due to wrapping sequence number. In order + // to prevent this we advance the picture id of the keyframe every so often. + if (ForwardDiff(gop_seq_num_it->first, seq_num) > 10000) { + auto save = gop_seq_num_it->second; + last_seq_num_gop_.clear(); + last_seq_num_gop_[seq_num] = save; + } +} + +RtpFrameReferenceFinder::ReturnVector RtpSeqNumOnlyRefFinder::PaddingReceived( + uint16_t seq_num) { + auto clean_padding_to = + stashed_padding_.lower_bound(seq_num - kMaxPaddingAge); + stashed_padding_.erase(stashed_padding_.begin(), clean_padding_to); + stashed_padding_.insert(seq_num); + UpdateLastPictureIdWithPadding(seq_num); + RtpFrameReferenceFinder::ReturnVector res; + RetryStashedFrames(res); + return res; +} + +void RtpSeqNumOnlyRefFinder::ClearTo(uint16_t seq_num) { + auto it = stashed_frames_.begin(); + while (it != stashed_frames_.end()) { + if (AheadOf(seq_num, (*it)->first_seq_num())) { + it = stashed_frames_.erase(it); + } else { + ++it; + } + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.h new file mode 100644 index 000000000..ef3c02211 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_RTP_SEQ_NUM_ONLY_REF_FINDER_H_ +#define MODULES_VIDEO_CODING_RTP_SEQ_NUM_ONLY_REF_FINDER_H_ + +#include +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/numerics/sequence_number_util.h" + +namespace webrtc { + +class RtpSeqNumOnlyRefFinder { + public: + RtpSeqNumOnlyRefFinder() = default; + + RtpFrameReferenceFinder::ReturnVector ManageFrame( + std::unique_ptr frame); + RtpFrameReferenceFinder::ReturnVector PaddingReceived(uint16_t seq_num); + void ClearTo(uint16_t seq_num); + + private: + static constexpr int kMaxStashedFrames = 100; + static constexpr int kMaxPaddingAge = 100; + + enum FrameDecision { kStash, kHandOff, kDrop }; + + FrameDecision ManageFrameInternal(RtpFrameObject* frame); + void RetryStashedFrames(RtpFrameReferenceFinder::ReturnVector& res); + void UpdateLastPictureIdWithPadding(uint16_t seq_num); + + // For every group of pictures, hold two sequence numbers. The first being + // the sequence number of the last packet of the last completed frame, and + // the second being the sequence number of the last packet of the last + // completed frame advanced by any potential continuous packets of padding. + std::map, + DescendingSeqNumComp> + last_seq_num_gop_; + + // Padding packets that have been received but that are not yet continuous + // with any group of pictures. + std::set> stashed_padding_; + + // Frames that have been fully received but didn't have all the information + // needed to determine their references. + std::deque> stashed_frames_; + + // Unwrapper used to unwrap generic RTP streams. In a generic stream we derive + // a picture id from the packet sequence number. + SeqNumUnwrapper rtp_seq_num_unwrapper_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_RTP_SEQ_NUM_ONLY_REF_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc new file mode 100644 index 000000000..b448b2330 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc @@ -0,0 +1,246 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/rtp_vp8_ref_finder.h" + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +RtpFrameReferenceFinder::ReturnVector RtpVp8RefFinder::ManageFrame( + std::unique_ptr frame) { + FrameDecision decision = ManageFrameInternal(frame.get()); + + RtpFrameReferenceFinder::ReturnVector res; + switch (decision) { + case kStash: + if (stashed_frames_.size() > kMaxStashedFrames) + stashed_frames_.pop_back(); + stashed_frames_.push_front(std::move(frame)); + return res; + case kHandOff: + res.push_back(std::move(frame)); + RetryStashedFrames(res); + return res; + case kDrop: + return res; + } + + return res; +} + +RtpVp8RefFinder::FrameDecision RtpVp8RefFinder::ManageFrameInternal( + RtpFrameObject* frame) { + const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); + const RTPVideoHeaderVP8& codec_header = + absl::get(video_header.video_type_header); + + // Protect against corrupted packets with arbitrary large temporal idx. + if (codec_header.temporalIdx >= kMaxTemporalLayers) + return kDrop; + + frame->SetSpatialIndex(0); + frame->SetId(codec_header.pictureId & 0x7FFF); + + if (last_picture_id_ == -1) + last_picture_id_ = frame->Id(); + + // Clean up info about not yet received frames that are too old. + uint16_t old_picture_id = + Subtract(frame->Id(), kMaxNotYetReceivedFrames); + auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id); + not_yet_received_frames_.erase(not_yet_received_frames_.begin(), + clean_frames_to); + // Avoid re-adding picture ids that were just erased. + if (AheadOf(old_picture_id, last_picture_id_)) { + last_picture_id_ = old_picture_id; + } + // Find if there has been a gap in fully received frames and save the picture + // id of those frames in |not_yet_received_frames_|. + if (AheadOf(frame->Id(), last_picture_id_)) { + do { + last_picture_id_ = Add(last_picture_id_, 1); + not_yet_received_frames_.insert(last_picture_id_); + } while (last_picture_id_ != frame->Id()); + } + + int64_t unwrapped_tl0 = tl0_unwrapper_.Unwrap(codec_header.tl0PicIdx & 0xFF); + + // Clean up info for base layers that are too old. + int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxLayerInfo; + auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx); + layer_info_.erase(layer_info_.begin(), clean_layer_info_to); + + if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { + if (codec_header.temporalIdx != 0) { + return kDrop; + } + frame->num_references = 0; + layer_info_[unwrapped_tl0].fill(-1); + UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); + return kHandOff; + } + + auto layer_info_it = layer_info_.find( + codec_header.temporalIdx == 0 ? unwrapped_tl0 - 1 : unwrapped_tl0); + + // If we don't have the base layer frame yet, stash this frame. + if (layer_info_it == layer_info_.end()) + return kStash; + + // A non keyframe base layer frame has been received, copy the layer info + // from the previous base layer frame and set a reference to the previous + // base layer frame. + if (codec_header.temporalIdx == 0) { + layer_info_it = + layer_info_.emplace(unwrapped_tl0, layer_info_it->second).first; + frame->num_references = 1; + int64_t last_pid_on_layer = layer_info_it->second[0]; + + // Is this an old frame that has already been used to update the state? If + // so, drop it. + if (AheadOrAt(last_pid_on_layer, frame->Id())) { + return kDrop; + } + + frame->references[0] = last_pid_on_layer; + UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); + return kHandOff; + } + + // Layer sync frame, this frame only references its base layer frame. + if (codec_header.layerSync) { + frame->num_references = 1; + int64_t last_pid_on_layer = layer_info_it->second[codec_header.temporalIdx]; + + // Is this an old frame that has already been used to update the state? If + // so, drop it. + if (last_pid_on_layer != -1 && + AheadOrAt(last_pid_on_layer, frame->Id())) { + return kDrop; + } + + frame->references[0] = layer_info_it->second[0]; + UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); + return kHandOff; + } + + // Find all references for this frame. + frame->num_references = 0; + for (uint8_t layer = 0; layer <= codec_header.temporalIdx; ++layer) { + // If we have not yet received a previous frame on this temporal layer, + // stash this frame. + if (layer_info_it->second[layer] == -1) + return kStash; + + // If the last frame on this layer is ahead of this frame it means that + // a layer sync frame has been received after this frame for the same + // base layer frame, drop this frame. + if (AheadOf(layer_info_it->second[layer], + frame->Id())) { + return kDrop; + } + + // If we have not yet received a frame between this frame and the referenced + // frame then we have to wait for that frame to be completed first. + auto not_received_frame_it = + not_yet_received_frames_.upper_bound(layer_info_it->second[layer]); + if (not_received_frame_it != not_yet_received_frames_.end() && + AheadOf(frame->Id(), + *not_received_frame_it)) { + return kStash; + } + + if (!(AheadOf(frame->Id(), + layer_info_it->second[layer]))) { + RTC_LOG(LS_WARNING) << "Frame with picture id " << frame->Id() + << " and packet range [" << frame->first_seq_num() + << ", " << frame->last_seq_num() + << "] already received, " + " dropping frame."; + return kDrop; + } + + ++frame->num_references; + frame->references[layer] = layer_info_it->second[layer]; + } + + UpdateLayerInfoVp8(frame, unwrapped_tl0, codec_header.temporalIdx); + return kHandOff; +} + +void RtpVp8RefFinder::UpdateLayerInfoVp8(RtpFrameObject* frame, + int64_t unwrapped_tl0, + uint8_t temporal_idx) { + auto layer_info_it = layer_info_.find(unwrapped_tl0); + + // Update this layer info and newer. + while (layer_info_it != layer_info_.end()) { + if (layer_info_it->second[temporal_idx] != -1 && + AheadOf(layer_info_it->second[temporal_idx], + frame->Id())) { + // The frame was not newer, then no subsequent layer info have to be + // update. + break; + } + + layer_info_it->second[temporal_idx] = frame->Id(); + ++unwrapped_tl0; + layer_info_it = layer_info_.find(unwrapped_tl0); + } + not_yet_received_frames_.erase(frame->Id()); + + UnwrapPictureIds(frame); +} + +void RtpVp8RefFinder::RetryStashedFrames( + RtpFrameReferenceFinder::ReturnVector& res) { + bool complete_frame = false; + do { + complete_frame = false; + for (auto frame_it = stashed_frames_.begin(); + frame_it != stashed_frames_.end();) { + FrameDecision decision = ManageFrameInternal(frame_it->get()); + + switch (decision) { + case kStash: + ++frame_it; + break; + case kHandOff: + complete_frame = true; + res.push_back(std::move(*frame_it)); + ABSL_FALLTHROUGH_INTENDED; + case kDrop: + frame_it = stashed_frames_.erase(frame_it); + } + } + } while (complete_frame); +} + +void RtpVp8RefFinder::UnwrapPictureIds(RtpFrameObject* frame) { + for (size_t i = 0; i < frame->num_references; ++i) + frame->references[i] = unwrapper_.Unwrap(frame->references[i]); + frame->SetId(unwrapper_.Unwrap(frame->Id())); +} + +void RtpVp8RefFinder::ClearTo(uint16_t seq_num) { + auto it = stashed_frames_.begin(); + while (it != stashed_frames_.end()) { + if (AheadOf(seq_num, (*it)->first_seq_num())) { + it = stashed_frames_.erase(it); + } else { + ++it; + } + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h new file mode 100644 index 000000000..0a6cd7e10 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_RTP_VP8_REF_FINDER_H_ +#define MODULES_VIDEO_CODING_RTP_VP8_REF_FINDER_H_ + +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/numerics/sequence_number_util.h" + +namespace webrtc { + +class RtpVp8RefFinder { + public: + RtpVp8RefFinder() = default; + + RtpFrameReferenceFinder::ReturnVector ManageFrame( + std::unique_ptr frame); + void ClearTo(uint16_t seq_num); + + private: + static constexpr int kFrameIdLength = 1 << 15; + static constexpr int kMaxLayerInfo = 50; + static constexpr int kMaxNotYetReceivedFrames = 100; + static constexpr int kMaxStashedFrames = 100; + static constexpr int kMaxTemporalLayers = 5; + + enum FrameDecision { kStash, kHandOff, kDrop }; + + FrameDecision ManageFrameInternal(RtpFrameObject* frame); + void RetryStashedFrames(RtpFrameReferenceFinder::ReturnVector& res); + void UpdateLayerInfoVp8(RtpFrameObject* frame, + int64_t unwrapped_tl0, + uint8_t temporal_idx); + void UnwrapPictureIds(RtpFrameObject* frame); + + // Save the last picture id in order to detect when there is a gap in frames + // that have not yet been fully received. + int last_picture_id_ = -1; + + // Frames earlier than the last received frame that have not yet been + // fully received. + std::set> + not_yet_received_frames_; + + // Frames that have been fully received but didn't have all the information + // needed to determine their references. + std::deque> stashed_frames_; + + // Holds the information about the last completed frame for a given temporal + // layer given an unwrapped Tl0 picture index. + std::map> layer_info_; + + // Unwrapper used to unwrap VP8/VP9 streams which have their picture id + // specified. + SeqNumUnwrapper unwrapper_; + + SeqNumUnwrapper tl0_unwrapper_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_RTP_VP8_REF_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc new file mode 100644 index 000000000..b44bb2500 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc @@ -0,0 +1,353 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/rtp_vp9_ref_finder.h" + +#include +#include + +#include "rtc_base/logging.h" + +namespace webrtc { + +RtpFrameReferenceFinder::ReturnVector RtpVp9RefFinder::ManageFrame( + std::unique_ptr frame) { + FrameDecision decision = ManageFrameInternal(frame.get()); + + RtpFrameReferenceFinder::ReturnVector res; + switch (decision) { + case kStash: + if (stashed_frames_.size() > kMaxStashedFrames) + stashed_frames_.pop_back(); + stashed_frames_.push_front(std::move(frame)); + return res; + case kHandOff: + res.push_back(std::move(frame)); + RetryStashedFrames(res); + return res; + case kDrop: + return res; + } + + return res; +} + +RtpVp9RefFinder::FrameDecision RtpVp9RefFinder::ManageFrameInternal( + RtpFrameObject* frame) { + const RTPVideoHeader& video_header = frame->GetRtpVideoHeader(); + const RTPVideoHeaderVP9& codec_header = + absl::get(video_header.video_type_header); + + // Protect against corrupted packets with arbitrary large temporal idx. + if (codec_header.temporal_idx >= kMaxTemporalLayers || + codec_header.spatial_idx >= kMaxSpatialLayers) + return kDrop; + + frame->SetSpatialIndex(codec_header.spatial_idx); + frame->SetId(codec_header.picture_id & (kFrameIdLength - 1)); + + if (last_picture_id_ == -1) + last_picture_id_ = frame->Id(); + + if (codec_header.flexible_mode) { + if (codec_header.num_ref_pics > EncodedFrame::kMaxFrameReferences) { + return kDrop; + } + frame->num_references = codec_header.num_ref_pics; + for (size_t i = 0; i < frame->num_references; ++i) { + frame->references[i] = + Subtract(frame->Id(), codec_header.pid_diff[i]); + } + + FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); + return kHandOff; + } + + if (codec_header.tl0_pic_idx == kNoTl0PicIdx) { + RTC_LOG(LS_WARNING) << "TL0PICIDX is expected to be present in " + "non-flexible mode."; + return kDrop; + } + + GofInfo* info; + int64_t unwrapped_tl0 = + tl0_unwrapper_.Unwrap(codec_header.tl0_pic_idx & 0xFF); + if (codec_header.ss_data_available) { + if (codec_header.temporal_idx != 0) { + RTC_LOG(LS_WARNING) << "Received scalability structure on a non base " + "layer frame. Scalability structure ignored."; + } else { + if (codec_header.gof.num_frames_in_gof > kMaxVp9FramesInGof) { + return kDrop; + } + + for (size_t i = 0; i < codec_header.gof.num_frames_in_gof; ++i) { + if (codec_header.gof.num_ref_pics[i] > kMaxVp9RefPics) { + return kDrop; + } + } + + GofInfoVP9 gof = codec_header.gof; + if (gof.num_frames_in_gof == 0) { + RTC_LOG(LS_WARNING) << "Number of frames in GOF is zero. Assume " + "that stream has only one temporal layer."; + gof.SetGofInfoVP9(kTemporalStructureMode1); + } + + current_ss_idx_ = Add(current_ss_idx_, 1); + scalability_structures_[current_ss_idx_] = gof; + scalability_structures_[current_ss_idx_].pid_start = frame->Id(); + gof_info_.emplace( + unwrapped_tl0, + GofInfo(&scalability_structures_[current_ss_idx_], frame->Id())); + } + + const auto gof_info_it = gof_info_.find(unwrapped_tl0); + if (gof_info_it == gof_info_.end()) + return kStash; + + info = &gof_info_it->second; + + if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { + frame->num_references = 0; + FrameReceivedVp9(frame->Id(), info); + FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); + return kHandOff; + } + } else if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { + if (frame->SpatialIndex() == 0) { + RTC_LOG(LS_WARNING) << "Received keyframe without scalability structure"; + return kDrop; + } + const auto gof_info_it = gof_info_.find(unwrapped_tl0); + if (gof_info_it == gof_info_.end()) + return kStash; + + info = &gof_info_it->second; + + frame->num_references = 0; + FrameReceivedVp9(frame->Id(), info); + FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); + return kHandOff; + } else { + auto gof_info_it = gof_info_.find( + (codec_header.temporal_idx == 0) ? unwrapped_tl0 - 1 : unwrapped_tl0); + + // Gof info for this frame is not available yet, stash this frame. + if (gof_info_it == gof_info_.end()) + return kStash; + + if (codec_header.temporal_idx == 0) { + gof_info_it = gof_info_ + .emplace(unwrapped_tl0, + GofInfo(gof_info_it->second.gof, frame->Id())) + .first; + } + + info = &gof_info_it->second; + } + + // Clean up info for base layers that are too old. + int64_t old_tl0_pic_idx = unwrapped_tl0 - kMaxGofSaved; + auto clean_gof_info_to = gof_info_.lower_bound(old_tl0_pic_idx); + gof_info_.erase(gof_info_.begin(), clean_gof_info_to); + + FrameReceivedVp9(frame->Id(), info); + + // Make sure we don't miss any frame that could potentially have the + // up switch flag set. + if (MissingRequiredFrameVp9(frame->Id(), *info)) + return kStash; + + if (codec_header.temporal_up_switch) + up_switch_.emplace(frame->Id(), codec_header.temporal_idx); + + // Clean out old info about up switch frames. + uint16_t old_picture_id = Subtract(frame->Id(), 50); + auto up_switch_erase_to = up_switch_.lower_bound(old_picture_id); + up_switch_.erase(up_switch_.begin(), up_switch_erase_to); + + size_t diff = + ForwardDiff(info->gof->pid_start, frame->Id()); + size_t gof_idx = diff % info->gof->num_frames_in_gof; + + if (info->gof->num_ref_pics[gof_idx] > EncodedFrame::kMaxFrameReferences) { + return kDrop; + } + // Populate references according to the scalability structure. + frame->num_references = info->gof->num_ref_pics[gof_idx]; + for (size_t i = 0; i < frame->num_references; ++i) { + frame->references[i] = + Subtract(frame->Id(), info->gof->pid_diff[gof_idx][i]); + + // If this is a reference to a frame earlier than the last up switch point, + // then ignore this reference. + if (UpSwitchInIntervalVp9(frame->Id(), codec_header.temporal_idx, + frame->references[i])) { + --frame->num_references; + } + } + + // Override GOF references. + if (!codec_header.inter_pic_predicted) { + frame->num_references = 0; + } + + FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); + return kHandOff; +} + +bool RtpVp9RefFinder::MissingRequiredFrameVp9(uint16_t picture_id, + const GofInfo& info) { + size_t diff = + ForwardDiff(info.gof->pid_start, picture_id); + size_t gof_idx = diff % info.gof->num_frames_in_gof; + size_t temporal_idx = info.gof->temporal_idx[gof_idx]; + + if (temporal_idx >= kMaxTemporalLayers) { + RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers + << " temporal " + "layers are supported."; + return true; + } + + // For every reference this frame has, check if there is a frame missing in + // the interval (|ref_pid|, |picture_id|) in any of the lower temporal + // layers. If so, we are missing a required frame. + uint8_t num_references = info.gof->num_ref_pics[gof_idx]; + for (size_t i = 0; i < num_references; ++i) { + uint16_t ref_pid = + Subtract(picture_id, info.gof->pid_diff[gof_idx][i]); + for (size_t l = 0; l < temporal_idx; ++l) { + auto missing_frame_it = missing_frames_for_layer_[l].lower_bound(ref_pid); + if (missing_frame_it != missing_frames_for_layer_[l].end() && + AheadOf(picture_id, *missing_frame_it)) { + return true; + } + } + } + return false; +} + +void RtpVp9RefFinder::FrameReceivedVp9(uint16_t picture_id, GofInfo* info) { + int last_picture_id = info->last_picture_id; + size_t gof_size = std::min(info->gof->num_frames_in_gof, kMaxVp9FramesInGof); + + // If there is a gap, find which temporal layer the missing frames + // belong to and add the frame as missing for that temporal layer. + // Otherwise, remove this frame from the set of missing frames. + if (AheadOf(picture_id, last_picture_id)) { + size_t diff = ForwardDiff(info->gof->pid_start, + last_picture_id); + size_t gof_idx = diff % gof_size; + + last_picture_id = Add(last_picture_id, 1); + while (last_picture_id != picture_id) { + gof_idx = (gof_idx + 1) % gof_size; + RTC_CHECK(gof_idx < kMaxVp9FramesInGof); + + size_t temporal_idx = info->gof->temporal_idx[gof_idx]; + if (temporal_idx >= kMaxTemporalLayers) { + RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers + << " temporal " + "layers are supported."; + return; + } + + missing_frames_for_layer_[temporal_idx].insert(last_picture_id); + last_picture_id = Add(last_picture_id, 1); + } + + info->last_picture_id = last_picture_id; + } else { + size_t diff = + ForwardDiff(info->gof->pid_start, picture_id); + size_t gof_idx = diff % gof_size; + RTC_CHECK(gof_idx < kMaxVp9FramesInGof); + + size_t temporal_idx = info->gof->temporal_idx[gof_idx]; + if (temporal_idx >= kMaxTemporalLayers) { + RTC_LOG(LS_WARNING) << "At most " << kMaxTemporalLayers + << " temporal " + "layers are supported."; + return; + } + + missing_frames_for_layer_[temporal_idx].erase(picture_id); + } +} + +bool RtpVp9RefFinder::UpSwitchInIntervalVp9(uint16_t picture_id, + uint8_t temporal_idx, + uint16_t pid_ref) { + for (auto up_switch_it = up_switch_.upper_bound(pid_ref); + up_switch_it != up_switch_.end() && + AheadOf(picture_id, up_switch_it->first); + ++up_switch_it) { + if (up_switch_it->second < temporal_idx) + return true; + } + + return false; +} + +void RtpVp9RefFinder::RetryStashedFrames( + RtpFrameReferenceFinder::ReturnVector& res) { + bool complete_frame = false; + do { + complete_frame = false; + for (auto frame_it = stashed_frames_.begin(); + frame_it != stashed_frames_.end();) { + FrameDecision decision = ManageFrameInternal(frame_it->get()); + + switch (decision) { + case kStash: + ++frame_it; + break; + case kHandOff: + complete_frame = true; + res.push_back(std::move(*frame_it)); + ABSL_FALLTHROUGH_INTENDED; + case kDrop: + frame_it = stashed_frames_.erase(frame_it); + } + } + } while (complete_frame); +} + +void RtpVp9RefFinder::FlattenFrameIdAndRefs(RtpFrameObject* frame, + bool inter_layer_predicted) { + for (size_t i = 0; i < frame->num_references; ++i) { + frame->references[i] = + unwrapper_.Unwrap(frame->references[i]) * kMaxSpatialLayers + + *frame->SpatialIndex(); + } + frame->SetId(unwrapper_.Unwrap(frame->Id()) * kMaxSpatialLayers + + *frame->SpatialIndex()); + + if (inter_layer_predicted && + frame->num_references + 1 <= EncodedFrame::kMaxFrameReferences) { + frame->references[frame->num_references] = frame->Id() - 1; + ++frame->num_references; + } +} + +void RtpVp9RefFinder::ClearTo(uint16_t seq_num) { + auto it = stashed_frames_.begin(); + while (it != stashed_frames_.end()) { + if (AheadOf(seq_num, (*it)->first_seq_num())) { + it = stashed_frames_.erase(it); + } else { + ++it; + } + } +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h new file mode 100644 index 000000000..81008fea8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.h @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_RTP_VP9_REF_FINDER_H_ +#define MODULES_VIDEO_CODING_RTP_VP9_REF_FINDER_H_ + +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "modules/video_coding/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/numerics/sequence_number_util.h" + +namespace webrtc { + +class RtpVp9RefFinder { + public: + RtpVp9RefFinder() = default; + + RtpFrameReferenceFinder::ReturnVector ManageFrame( + std::unique_ptr frame); + void ClearTo(uint16_t seq_num); + + private: + static constexpr int kFrameIdLength = 1 << 15; + static constexpr int kMaxGofSaved = 50; + static constexpr int kMaxLayerInfo = 50; + static constexpr int kMaxNotYetReceivedFrames = 100; + static constexpr int kMaxStashedFrames = 100; + static constexpr int kMaxTemporalLayers = 5; + + enum FrameDecision { kStash, kHandOff, kDrop }; + + struct GofInfo { + GofInfo(GofInfoVP9* gof, uint16_t last_picture_id) + : gof(gof), last_picture_id(last_picture_id) {} + GofInfoVP9* gof; + uint16_t last_picture_id; + }; + + FrameDecision ManageFrameInternal(RtpFrameObject* frame); + void RetryStashedFrames(RtpFrameReferenceFinder::ReturnVector& res); + + bool MissingRequiredFrameVp9(uint16_t picture_id, const GofInfo& info); + + void FrameReceivedVp9(uint16_t picture_id, GofInfo* info); + bool UpSwitchInIntervalVp9(uint16_t picture_id, + uint8_t temporal_idx, + uint16_t pid_ref); + + void FlattenFrameIdAndRefs(RtpFrameObject* frame, bool inter_layer_predicted); + + // Save the last picture id in order to detect when there is a gap in frames + // that have not yet been fully received. + int last_picture_id_ = -1; + + // Frames that have been fully received but didn't have all the information + // needed to determine their references. + std::deque> stashed_frames_; + + // Where the current scalability structure is in the + // |scalability_structures_| array. + uint8_t current_ss_idx_ = 0; + + // Holds received scalability structures. + std::array scalability_structures_; + + // Holds the the Gof information for a given unwrapped TL0 picture index. + std::map gof_info_; + + // Keep track of which picture id and which temporal layer that had the + // up switch flag set. + std::map> + up_switch_; + + // For every temporal layer, keep a set of which frames that are missing. + std::array>, + kMaxTemporalLayers> + missing_frames_for_layer_; + + // Unwrapper used to unwrap VP8/VP9 streams which have their picture id + // specified. + SeqNumUnwrapper unwrapper_; + + SeqNumUnwrapper tl0_unwrapper_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_RTP_VP9_REF_FINDER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc index 4b4a23ed2..39710d82f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc @@ -12,16 +12,10 @@ #include #include "absl/strings/string_view.h" +#include "modules/video_coding/svc/scalability_structure_full_svc.h" #include "modules/video_coding/svc/scalability_structure_key_svc.h" -#include "modules/video_coding/svc/scalability_structure_l1t2.h" -#include "modules/video_coding/svc/scalability_structure_l1t3.h" -#include "modules/video_coding/svc/scalability_structure_l2t1.h" -#include "modules/video_coding/svc/scalability_structure_l2t1h.h" -#include "modules/video_coding/svc/scalability_structure_l2t2.h" #include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" -#include "modules/video_coding/svc/scalability_structure_l3t1.h" -#include "modules/video_coding/svc/scalability_structure_l3t3.h" -#include "modules/video_coding/svc/scalability_structure_s2t1.h" +#include "modules/video_coding/svc/scalability_structure_simulcast.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/checks.h" @@ -41,20 +35,31 @@ std::unique_ptr Create() { return std::make_unique(); } +template +std::unique_ptr CreateH() { + // 1.5:1 scaling, see https://w3c.github.io/webrtc-svc/#scalabilitymodes* + typename T::ScalingFactor factor; + factor.num = 2; + factor.den = 3; + return std::make_unique(factor); +} + constexpr NamedStructureFactory kFactories[] = { {"NONE", Create}, {"L1T2", Create}, {"L1T3", Create}, {"L2T1", Create}, - {"L2T1h", Create}, + {"L2T1h", CreateH}, {"L2T1_KEY", Create}, {"L2T2", Create}, {"L2T2_KEY", Create}, {"L2T2_KEY_SHIFT", Create}, + {"L2T3_KEY", Create}, {"L3T1", Create}, {"L3T3", Create}, {"L3T3_KEY", Create}, {"S2T1", Create}, + {"S3T3", Create}, }; } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc index c489b6050..b89de9933 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc @@ -19,9 +19,6 @@ #include "rtc_base/logging.h" namespace webrtc { -namespace { -enum : int { kKey, kDelta }; -} // namespace constexpr int ScalabilityStructureFullSvc::kMaxNumSpatialLayers; constexpr int ScalabilityStructureFullSvc::kMaxNumTemporalLayers; @@ -29,9 +26,11 @@ constexpr absl::string_view ScalabilityStructureFullSvc::kFramePatternNames[]; ScalabilityStructureFullSvc::ScalabilityStructureFullSvc( int num_spatial_layers, - int num_temporal_layers) + int num_temporal_layers, + ScalingFactor resolution_factor) : num_spatial_layers_(num_spatial_layers), num_temporal_layers_(num_temporal_layers), + resolution_factor_(resolution_factor), active_decode_targets_( (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); @@ -48,8 +47,10 @@ ScalabilityStructureFullSvc::StreamConfig() const { result.scaling_factor_num[num_spatial_layers_ - 1] = 1; result.scaling_factor_den[num_spatial_layers_ - 1] = 1; for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { - result.scaling_factor_num[sid - 1] = 1; - result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + result.scaling_factor_num[sid - 1] = + resolution_factor_.num * result.scaling_factor_num[sid]; + result.scaling_factor_den[sid - 1] = + resolution_factor_.den * result.scaling_factor_den[sid]; } return result; } @@ -98,6 +99,7 @@ ScalabilityStructureFullSvc::FramePattern ScalabilityStructureFullSvc::NextPattern() const { switch (last_pattern_) { case kNone: + return kKey; case kDeltaT2B: return kDeltaT0; case kDeltaT2A: @@ -110,6 +112,7 @@ ScalabilityStructureFullSvc::NextPattern() const { return kDeltaT2B; } return kDeltaT0; + case kKey: case kDeltaT0: if (TemporalLayerIsActive(2)) { return kDeltaT2A; @@ -119,6 +122,8 @@ ScalabilityStructureFullSvc::NextPattern() const { } return kDeltaT0; } + RTC_NOTREACHED(); + return kNone; } std::vector @@ -139,6 +144,7 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { absl::optional spatial_dependency_buffer_id; switch (current_pattern) { case kDeltaT0: + case kKey: // Disallow temporal references cross T0 on higher temporal layers. can_reference_t1_frame_for_spatial_id_.reset(); for (int sid = 0; sid < num_spatial_layers_; ++sid) { @@ -150,11 +156,11 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { } configs.emplace_back(); ScalableVideoController::LayerFrameConfig& config = configs.back(); - config.Id(last_pattern_ == kNone ? kKey : kDelta).S(sid).T(0); + config.Id(current_pattern).S(sid).T(0); if (spatial_dependency_buffer_id) { config.Reference(*spatial_dependency_buffer_id); - } else if (last_pattern_ == kNone) { + } else if (current_pattern == kKey) { config.Keyframe(); } @@ -178,7 +184,7 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { } configs.emplace_back(); ScalableVideoController::LayerFrameConfig& config = configs.back(); - config.Id(kDelta).S(sid).T(1); + config.Id(current_pattern).S(sid).T(1); // Temporal reference. config.Reference(BufferIndex(sid, /*tid=*/0)); // Spatial reference unless this is the lowest active spatial layer. @@ -188,7 +194,6 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { // No frame reference top layer frame, so no need save it into a buffer. if (num_temporal_layers_ > 2 || sid < num_spatial_layers_ - 1) { config.Update(BufferIndex(sid, /*tid=*/1)); - can_reference_t1_frame_for_spatial_id_.set(sid); } spatial_dependency_buffer_id = BufferIndex(sid, /*tid=*/1); } @@ -202,7 +207,7 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { } configs.emplace_back(); ScalableVideoController::LayerFrameConfig& config = configs.back(); - config.Id(kDelta).S(sid).T(2); + config.Id(current_pattern).S(sid).T(2); // Temporal reference. if (current_pattern == kDeltaT2B && can_reference_t1_frame_for_spatial_id_[sid]) { @@ -240,12 +245,20 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { return NextFrameConfig(/*restart=*/true); } - last_pattern_ = current_pattern; return configs; } GenericFrameInfo ScalabilityStructureFullSvc::OnEncodeDone( const LayerFrameConfig& config) { + // When encoder drops all frames for a temporal unit, it is better to reuse + // old temporal pattern rather than switch to next one, thus switch to next + // pattern defered here from the `NextFrameConfig`. + // In particular creating VP9 references rely on this behavior. + last_pattern_ = static_cast(config.Id()); + if (config.TemporalId() == 1) { + can_reference_t1_frame_for_spatial_id_.set(config.SpatialId()); + } + GenericFrameInfo frame_info; frame_info.spatial_id = config.SpatialId(); frame_info.temporal_id = config.TemporalId(); @@ -282,4 +295,104 @@ void ScalabilityStructureFullSvc::OnRatesUpdated( } } +FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0, 0}; + structure.templates.resize(3); + structure.templates[0].T(0).Dtis("SS").ChainDiffs({0}); + structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2}); + structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0, 0, 0}; + structure.templates.resize(5); + structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0}); + structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4}); + structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2}); + structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1}); + structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({2, 1}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 4; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 1, 1}; + structure.templates.resize(6); + auto& templates = structure.templates; + templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4}); + templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1}); + templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 3; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 1, 2}; + auto& templates = structure.templates; + templates.resize(6); + templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0}); + templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1}); + templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1}); + templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 9; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; + auto& t = structure.templates; + t.resize(15); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. Indexes are written in hex for nicer alignment. + t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); + t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); + t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); + t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1}); + t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1}); + t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1}); + t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1}); + t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3}); + t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1}); + t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1}); + t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12}); + t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1}); + t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1}); + return structure; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h index d490d6e4a..a3cad0af8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.h @@ -21,7 +21,13 @@ namespace webrtc { class ScalabilityStructureFullSvc : public ScalableVideoController { public: - ScalabilityStructureFullSvc(int num_spatial_layers, int num_temporal_layers); + struct ScalingFactor { + int num = 1; + int den = 2; + }; + ScalabilityStructureFullSvc(int num_spatial_layers, + int num_temporal_layers, + ScalingFactor resolution_factor); ~ScalabilityStructureFullSvc() override; StreamLayersConfig StreamConfig() const override; @@ -33,13 +39,14 @@ class ScalabilityStructureFullSvc : public ScalableVideoController { private: enum FramePattern { kNone, + kKey, kDeltaT2A, kDeltaT1, kDeltaT2B, kDeltaT0, }; static constexpr absl::string_view kFramePatternNames[] = { - "None", "DeltaT2A", "DeltaT1", "DeltaT2B", "DeltaT0"}; + "None", "Key", "DeltaT2A", "DeltaT1", "DeltaT2B", "DeltaT0"}; static constexpr int kMaxNumSpatialLayers = 3; static constexpr int kMaxNumTemporalLayers = 3; @@ -61,6 +68,7 @@ class ScalabilityStructureFullSvc : public ScalableVideoController { const int num_spatial_layers_; const int num_temporal_layers_; + const ScalingFactor resolution_factor_; FramePattern last_pattern_ = kNone; std::bitset can_reference_t0_frame_for_spatial_id_ = 0; @@ -68,6 +76,88 @@ class ScalabilityStructureFullSvc : public ScalableVideoController { std::bitset<32> active_decode_targets_; }; +// T1 0 0 +// / / / ... +// T0 0---0---0-- +// Time-> 0 1 2 3 4 +class ScalabilityStructureL1T2 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL1T2(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(1, 2, resolution_factor) {} + ~ScalabilityStructureL1T2() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// T2 0 0 0 0 +// | / | / +// T1 / 0 / 0 ... +// |_/ |_/ +// T0 0-------0------ +// Time-> 0 1 2 3 4 5 6 7 +class ScalabilityStructureL1T3 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL1T3(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(1, 3, resolution_factor) {} + ~ScalabilityStructureL1T3() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// S1 0--0--0- +// | | | ... +// S0 0--0--0- +class ScalabilityStructureL2T1 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL2T1(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(2, 1, resolution_factor) {} + ~ScalabilityStructureL2T1() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// S1T1 0 0 +// /| /| / +// S1T0 0-+-0-+-0 +// | | | | | ... +// S0T1 | 0 | 0 | +// |/ |/ |/ +// S0T0 0---0---0-- +// Time-> 0 1 2 3 4 +class ScalabilityStructureL2T2 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL2T2(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(2, 2, resolution_factor) {} + ~ScalabilityStructureL2T2() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// S2 0-0-0- +// | | | +// S1 0-0-0-... +// | | | +// S0 0-0-0- +// Time-> 0 1 2 +class ScalabilityStructureL3T1 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL3T1(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(3, 1, resolution_factor) {} + ~ScalabilityStructureL3T1() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +// https://www.w3.org/TR/webrtc-svc/#L3T3* +class ScalabilityStructureL3T3 : public ScalabilityStructureFullSvc { + public: + explicit ScalabilityStructureL3T3(ScalingFactor resolution_factor = {}) + : ScalabilityStructureFullSvc(3, 3, resolution_factor) {} + ~ScalabilityStructureL3T3() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + } // namespace webrtc #endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_FULL_SVC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc index cfc89a379..1cee80e84 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc @@ -22,28 +22,6 @@ #include "rtc_base/logging.h" namespace webrtc { -namespace { -// Values to use as LayerFrameConfig::Id -enum : int { kKey, kDelta }; - -DecodeTargetIndication -Dti(int sid, int tid, const ScalableVideoController::LayerFrameConfig& config) { - if (config.IsKeyframe() || config.Id() == kKey) { - RTC_DCHECK_EQ(config.TemporalId(), 0); - return sid < config.SpatialId() ? DecodeTargetIndication::kNotPresent - : DecodeTargetIndication::kSwitch; - } - - if (sid != config.SpatialId() || tid < config.TemporalId()) { - return DecodeTargetIndication::kNotPresent; - } - if (tid == config.TemporalId() && tid > 0) { - return DecodeTargetIndication::kDiscardable; - } - return DecodeTargetIndication::kSwitch; -} - -} // namespace constexpr int ScalabilityStructureKeySvc::kMaxNumSpatialLayers; constexpr int ScalabilityStructureKeySvc::kMaxNumTemporalLayers; @@ -88,6 +66,25 @@ bool ScalabilityStructureKeySvc::TemporalLayerIsActive(int tid) const { return false; } +DecodeTargetIndication ScalabilityStructureKeySvc::Dti( + int sid, + int tid, + const LayerFrameConfig& config) { + if (config.IsKeyframe() || config.Id() == kKey) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return sid < config.SpatialId() ? DecodeTargetIndication::kNotPresent + : DecodeTargetIndication::kSwitch; + } + + if (sid != config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (tid == config.TemporalId() && tid > 0) { + return DecodeTargetIndication::kDiscardable; + } + return DecodeTargetIndication::kSwitch; +} + std::vector ScalabilityStructureKeySvc::KeyframeConfig() { std::vector configs; @@ -129,7 +126,7 @@ ScalabilityStructureKeySvc::T0Config() { continue; } configs.emplace_back(); - configs.back().Id(kDelta).S(sid).T(0).ReferenceAndUpdate( + configs.back().Id(kDeltaT0).S(sid).T(0).ReferenceAndUpdate( BufferIndex(sid, /*tid=*/0)); } return configs; @@ -145,17 +142,16 @@ ScalabilityStructureKeySvc::T1Config() { } configs.emplace_back(); ScalableVideoController::LayerFrameConfig& config = configs.back(); - config.Id(kDelta).S(sid).T(1).Reference(BufferIndex(sid, /*tid=*/0)); + config.Id(kDeltaT1).S(sid).T(1).Reference(BufferIndex(sid, /*tid=*/0)); if (num_temporal_layers_ > 2) { config.Update(BufferIndex(sid, /*tid=*/1)); - can_reference_t1_frame_for_spatial_id_.set(sid); } } return configs; } std::vector -ScalabilityStructureKeySvc::T2Config() { +ScalabilityStructureKeySvc::T2Config(FramePattern pattern) { std::vector configs; configs.reserve(num_spatial_layers_); for (int sid = 0; sid < num_spatial_layers_; ++sid) { @@ -164,7 +160,7 @@ ScalabilityStructureKeySvc::T2Config() { } configs.emplace_back(); ScalableVideoController::LayerFrameConfig& config = configs.back(); - config.Id(kDelta).S(sid).T(2); + config.Id(pattern).S(sid).T(2); if (can_reference_t1_frame_for_spatial_id_[sid]) { config.Reference(BufferIndex(sid, /*tid=*/1)); } else { @@ -174,6 +170,37 @@ ScalabilityStructureKeySvc::T2Config() { return configs; } +ScalabilityStructureKeySvc::FramePattern +ScalabilityStructureKeySvc::NextPattern(FramePattern last_pattern) const { + switch (last_pattern) { + case kNone: + return kKey; + case kDeltaT2B: + return kDeltaT0; + case kDeltaT2A: + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + case kDeltaT1: + if (TemporalLayerIsActive(2)) { + return kDeltaT2B; + } + return kDeltaT0; + case kDeltaT0: + case kKey: + if (TemporalLayerIsActive(2)) { + return kDeltaT2A; + } + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + } + RTC_NOTREACHED(); + return kNone; +} + std::vector ScalabilityStructureKeySvc::NextFrameConfig(bool restart) { if (active_decode_targets_.none()) { @@ -185,37 +212,19 @@ ScalabilityStructureKeySvc::NextFrameConfig(bool restart) { last_pattern_ = kNone; } - switch (last_pattern_) { - case kNone: - last_pattern_ = kDeltaT0; + FramePattern current_pattern = NextPattern(last_pattern_); + switch (current_pattern) { + case kKey: return KeyframeConfig(); - case kDeltaT2B: - last_pattern_ = kDeltaT0; - return T0Config(); - case kDeltaT2A: - if (TemporalLayerIsActive(1)) { - last_pattern_ = kDeltaT1; - return T1Config(); - } - last_pattern_ = kDeltaT0; + case kDeltaT0: return T0Config(); case kDeltaT1: - if (TemporalLayerIsActive(2)) { - last_pattern_ = kDeltaT2B; - return T2Config(); - } - last_pattern_ = kDeltaT0; - return T0Config(); - case kDeltaT0: - if (TemporalLayerIsActive(2)) { - last_pattern_ = kDeltaT2A; - return T2Config(); - } else if (TemporalLayerIsActive(1)) { - last_pattern_ = kDeltaT1; - return T1Config(); - } - last_pattern_ = kDeltaT0; - return T0Config(); + return T1Config(); + case kDeltaT2A: + case kDeltaT2B: + return T2Config(current_pattern); + case kNone: + break; } RTC_NOTREACHED(); return {}; @@ -223,6 +232,15 @@ ScalabilityStructureKeySvc::NextFrameConfig(bool restart) { GenericFrameInfo ScalabilityStructureKeySvc::OnEncodeDone( const LayerFrameConfig& config) { + // When encoder drops all frames for a temporal unit, it is better to reuse + // old temporal pattern rather than switch to next one, thus switch to next + // pattern defered here from the `NextFrameConfig`. + // In particular creating VP9 references rely on this behavior. + last_pattern_ = static_cast(config.Id()); + if (config.TemporalId() == 1) { + can_reference_t1_frame_for_spatial_id_.set(config.SpatialId()); + } + GenericFrameInfo frame_info; frame_info.spatial_id = config.SpatialId(); frame_info.temporal_id = config.TemporalId(); @@ -301,6 +319,29 @@ FrameDependencyStructure ScalabilityStructureL2T2Key::DependencyStructure() return structure; } +ScalabilityStructureL2T3Key::~ScalabilityStructureL2T3Key() = default; + +FrameDependencyStructure ScalabilityStructureL2T3Key::DependencyStructure() + const { + FrameDependencyStructure structure; + structure.num_decode_targets = 6; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1}; + auto& templates = structure.templates; + templates.resize(10); + templates[0].S(0).T(0).Dtis("SSSSSS").ChainDiffs({0, 0}); + templates[1].S(0).T(0).Dtis("SSS---").ChainDiffs({8, 7}).FrameDiffs({8}); + templates[2].S(0).T(1).Dtis("-DS---").ChainDiffs({4, 3}).FrameDiffs({4}); + templates[3].S(0).T(2).Dtis("--D---").ChainDiffs({2, 1}).FrameDiffs({2}); + templates[4].S(0).T(2).Dtis("--D---").ChainDiffs({6, 5}).FrameDiffs({2}); + templates[5].S(1).T(0).Dtis("---SSS").ChainDiffs({1, 1}).FrameDiffs({1}); + templates[6].S(1).T(0).Dtis("---SSS").ChainDiffs({1, 8}).FrameDiffs({8}); + templates[7].S(1).T(1).Dtis("----DS").ChainDiffs({5, 4}).FrameDiffs({4}); + templates[8].S(1).T(2).Dtis("-----D").ChainDiffs({3, 2}).FrameDiffs({2}); + templates[9].S(1).T(2).Dtis("-----D").ChainDiffs({7, 6}).FrameDiffs({2}); + return structure; +} + ScalabilityStructureL3T3Key::~ScalabilityStructureL3T3Key() = default; FrameDependencyStructure ScalabilityStructureL3T3Key::DependencyStructure() diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h index 1d3277b5c..b66f6f83e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.h @@ -32,8 +32,9 @@ class ScalabilityStructureKeySvc : public ScalableVideoController { void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; private: - enum FramePattern { + enum FramePattern : int { kNone, + kKey, kDeltaT0, kDeltaT2A, kDeltaT1, @@ -53,10 +54,16 @@ class ScalabilityStructureKeySvc : public ScalableVideoController { active_decode_targets_.set(sid * num_temporal_layers_ + tid, value); } bool TemporalLayerIsActive(int tid) const; + static DecodeTargetIndication Dti(int sid, + int tid, + const LayerFrameConfig& config); + std::vector KeyframeConfig(); std::vector T0Config(); std::vector T1Config(); - std::vector T2Config(); + std::vector T2Config(FramePattern pattern); + + FramePattern NextPattern(FramePattern last_pattern) const; const int num_spatial_layers_; const int num_temporal_layers_; @@ -94,6 +101,14 @@ class ScalabilityStructureL2T2Key : public ScalabilityStructureKeySvc { FrameDependencyStructure DependencyStructure() const override; }; +class ScalabilityStructureL2T3Key : public ScalabilityStructureKeySvc { + public: + ScalabilityStructureL2T3Key() : ScalabilityStructureKeySvc(2, 3) {} + ~ScalabilityStructureL2T3Key() override; + + FrameDependencyStructure DependencyStructure() const override; +}; + class ScalabilityStructureL3T3Key : public ScalabilityStructureKeySvc { public: ScalabilityStructureL3T3Key() : ScalabilityStructureKeySvc(3, 3) {} diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc deleted file mode 100644 index f639e2da6..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l1t2.h" - -#include - -#include "api/transport/rtp/dependency_descriptor.h" - -namespace webrtc { - -ScalabilityStructureL1T2::~ScalabilityStructureL1T2() = default; - -FrameDependencyStructure ScalabilityStructureL1T2::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 2; - structure.num_chains = 1; - structure.decode_target_protected_by_chain = {0, 0}; - structure.templates.resize(3); - structure.templates[0].T(0).Dtis("SS").ChainDiffs({0}); - structure.templates[1].T(0).Dtis("SS").ChainDiffs({2}).FrameDiffs({2}); - structure.templates[2].T(1).Dtis("-D").ChainDiffs({1}).FrameDiffs({1}); - return structure; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.h deleted file mode 100644 index d2f81aa11..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ - -#include "api/transport/rtp/dependency_descriptor.h" -#include "modules/video_coding/svc/scalability_structure_full_svc.h" - -namespace webrtc { - -class ScalabilityStructureL1T2 : public ScalabilityStructureFullSvc { - public: - ScalabilityStructureL1T2() : ScalabilityStructureFullSvc(1, 2) {} - ~ScalabilityStructureL1T2() override; - - FrameDependencyStructure DependencyStructure() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc deleted file mode 100644 index 17073344c..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l1t3.h" - -#include - -#include "api/transport/rtp/dependency_descriptor.h" - -namespace webrtc { - -ScalabilityStructureL1T3::~ScalabilityStructureL1T3() = default; - -FrameDependencyStructure ScalabilityStructureL1T3::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 3; - structure.num_chains = 1; - structure.decode_target_protected_by_chain = {0, 0, 0}; - structure.templates.resize(5); - structure.templates[0].T(0).Dtis("SSS").ChainDiffs({0}); - structure.templates[1].T(0).Dtis("SSS").ChainDiffs({4}).FrameDiffs({4}); - structure.templates[2].T(1).Dtis("-DS").ChainDiffs({2}).FrameDiffs({2}); - structure.templates[3].T(2).Dtis("--D").ChainDiffs({1}).FrameDiffs({1}); - structure.templates[4].T(2).Dtis("--D").ChainDiffs({3}).FrameDiffs({1}); - return structure; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.h deleted file mode 100644 index 00e48ccc4..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ - -#include "api/transport/rtp/dependency_descriptor.h" -#include "modules/video_coding/svc/scalability_structure_full_svc.h" - -namespace webrtc { - -// T2 0 0 0 0 -// | / | / -// T1 / 0 / 0 ... -// |_/ |_/ -// T0 0-------0------ -// Time-> 0 1 2 3 4 5 6 7 -class ScalabilityStructureL1T3 : public ScalabilityStructureFullSvc { - public: - ScalabilityStructureL1T3() : ScalabilityStructureFullSvc(1, 3) {} - ~ScalabilityStructureL1T3() override; - - FrameDependencyStructure DependencyStructure() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L1T3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc deleted file mode 100644 index efd751665..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l2t1.h" - -#include - -#include "api/transport/rtp/dependency_descriptor.h" - -namespace webrtc { - -ScalabilityStructureL2T1::~ScalabilityStructureL2T1() = default; - -FrameDependencyStructure ScalabilityStructureL2T1::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 2; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 1}; - structure.templates.resize(4); - structure.templates[0].S(0).Dtis("SR").ChainDiffs({2, 1}).FrameDiffs({2}); - structure.templates[1].S(0).Dtis("SS").ChainDiffs({0, 0}); - structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({2, 1}); - structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 1}).FrameDiffs({1}); - return structure; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.h deleted file mode 100644 index 96a0da56d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ - -#include "api/transport/rtp/dependency_descriptor.h" -#include "modules/video_coding/svc/scalability_structure_full_svc.h" - -namespace webrtc { - -// S1 0--0--0- -// | | | ... -// S0 0--0--0- -class ScalabilityStructureL2T1 : public ScalabilityStructureFullSvc { - public: - ScalabilityStructureL2T1() : ScalabilityStructureFullSvc(2, 1) {} - ~ScalabilityStructureL2T1() override; - - FrameDependencyStructure DependencyStructure() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc deleted file mode 100644 index c4682764a..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l2t1h.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -ScalabilityStructureL2T1h::~ScalabilityStructureL2T1h() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureL2T1h::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = 2; - result.num_temporal_layers = 1; - // 1.5:1 scaling, see https://w3c.github.io/webrtc-svc/#scalabilitymodes* - result.scaling_factor_num[0] = 2; - result.scaling_factor_den[0] = 3; - return result; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.h deleted file mode 100644 index 7200a1084..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ - -#include "modules/video_coding/svc/scalability_structure_l2t1.h" -#include "modules/video_coding/svc/scalable_video_controller.h" - -namespace webrtc { - -class ScalabilityStructureL2T1h : public ScalabilityStructureL2T1 { - public: - ~ScalabilityStructureL2T1h() override; - - StreamLayersConfig StreamConfig() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T1H_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc deleted file mode 100644 index a381ad080..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l2t2.h" - -#include - -#include "api/transport/rtp/dependency_descriptor.h" - -namespace webrtc { - -ScalabilityStructureL2T2::~ScalabilityStructureL2T2() = default; - -FrameDependencyStructure ScalabilityStructureL2T2::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 4; - structure.num_chains = 2; - structure.decode_target_protected_by_chain = {0, 0, 1, 1}; - structure.templates.resize(6); - auto& templates = structure.templates; - templates[0].S(0).T(0).Dtis("SSSS").ChainDiffs({0, 0}); - templates[1].S(0).T(0).Dtis("SSRR").ChainDiffs({4, 3}).FrameDiffs({4}); - templates[2].S(0).T(1).Dtis("-D-R").ChainDiffs({2, 1}).FrameDiffs({2}); - templates[3].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({1}); - templates[4].S(1).T(0).Dtis("--SS").ChainDiffs({1, 1}).FrameDiffs({4, 1}); - templates[5].S(1).T(1).Dtis("---D").ChainDiffs({3, 2}).FrameDiffs({2, 1}); - return structure; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.h deleted file mode 100644 index 781ea7e60..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ - -#include "api/transport/rtp/dependency_descriptor.h" -#include "modules/video_coding/svc/scalability_structure_full_svc.h" - -namespace webrtc { - -// S1T1 0 0 -// /| /| / -// S1T0 0-+-0-+-0 -// | | | | | ... -// S0T1 | 0 | 0 | -// |/ |/ |/ -// S0T0 0---0---0-- -// Time-> 0 1 2 3 4 -class ScalabilityStructureL2T2 : public ScalabilityStructureFullSvc { - public: - ScalabilityStructureL2T2() : ScalabilityStructureFullSvc(2, 2) {} - ~ScalabilityStructureL2T2() override; - - FrameDependencyStructure DependencyStructure() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc deleted file mode 100644 index d7a532446..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l3t1.h" - -#include - -#include "api/transport/rtp/dependency_descriptor.h" - -namespace webrtc { - -ScalabilityStructureL3T1::~ScalabilityStructureL3T1() = default; - -FrameDependencyStructure ScalabilityStructureL3T1::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 3; - structure.num_chains = 3; - structure.decode_target_protected_by_chain = {0, 1, 2}; - auto& templates = structure.templates; - templates.resize(6); - templates[0].S(0).Dtis("SRR").ChainDiffs({3, 2, 1}).FrameDiffs({3}); - templates[1].S(0).Dtis("SSS").ChainDiffs({0, 0, 0}); - templates[2].S(1).Dtis("-SR").ChainDiffs({1, 1, 1}).FrameDiffs({3, 1}); - templates[3].S(1).Dtis("-SS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); - templates[4].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({3, 1}); - templates[5].S(2).Dtis("--S").ChainDiffs({2, 1, 1}).FrameDiffs({1}); - return structure; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.h deleted file mode 100644 index dea40e96b..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ - -#include "api/transport/rtp/dependency_descriptor.h" -#include "modules/video_coding/svc/scalability_structure_full_svc.h" - -namespace webrtc { - -// S2 0-0-0- -// | | | -// S1 0-0-0-... -// | | | -// S0 0-0-0- -// Time-> 0 1 2 -class ScalabilityStructureL3T1 : public ScalabilityStructureFullSvc { - public: - ScalabilityStructureL3T1() : ScalabilityStructureFullSvc(3, 1) {} - ~ScalabilityStructureL3T1() override; - - FrameDependencyStructure DependencyStructure() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc deleted file mode 100644 index 932056b0d..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_l3t3.h" - -#include - -#include "api/transport/rtp/dependency_descriptor.h" - -namespace webrtc { - -ScalabilityStructureL3T3::~ScalabilityStructureL3T3() = default; - -FrameDependencyStructure ScalabilityStructureL3T3::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = 9; - structure.num_chains = 3; - structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; - auto& t = structure.templates; - t.resize(15); - // Templates are shown in the order frames following them appear in the - // stream, but in `structure.templates` array templates are sorted by - // (`spatial_id`, `temporal_id`) since that is a dependency descriptor - // requirement. Indexes are written in hex for nicer alignment. - t[0x1].S(0).T(0).Dtis("SSSSSSSSS").ChainDiffs({0, 0, 0}); - t[0x6].S(1).T(0).Dtis("---SSSSSS").ChainDiffs({1, 1, 1}).FrameDiffs({1}); - t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({1}); - t[0x3].S(0).T(2).Dtis("--D--R--R").ChainDiffs({3, 2, 1}).FrameDiffs({3}); - t[0x8].S(1).T(2).Dtis("-----D--R").ChainDiffs({4, 3, 2}).FrameDiffs({3, 1}); - t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3, 1}); - t[0x2].S(0).T(1).Dtis("-DS-RR-RR").ChainDiffs({6, 5, 4}).FrameDiffs({6}); - t[0x7].S(1).T(1).Dtis("----DS-RR").ChainDiffs({7, 6, 5}).FrameDiffs({6, 1}); - t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6, 1}); - t[0x4].S(0).T(2).Dtis("--D--R--R").ChainDiffs({9, 8, 7}).FrameDiffs({3}); - t[0x9].S(1).T(2).Dtis("-----D--R").ChainDiffs({10, 9, 8}).FrameDiffs({3, 1}); - t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3, 1}); - t[0x0].S(0).T(0).Dtis("SSSRRRRRR").ChainDiffs({12, 11, 10}).FrameDiffs({12}); - t[0x5].S(1).T(0).Dtis("---SSSRRR").ChainDiffs({1, 1, 1}).FrameDiffs({12, 1}); - t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 1}).FrameDiffs({12, 1}); - return structure; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.h deleted file mode 100644 index 3f42726cc..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.h +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ - -#include "api/transport/rtp/dependency_descriptor.h" -#include "modules/video_coding/svc/scalability_structure_full_svc.h" - -namespace webrtc { - -// https://aomediacodec.github.io/av1-rtp-spec/#a63-l3t3-full-svc -class ScalabilityStructureL3T3 : public ScalabilityStructureFullSvc { - public: - ScalabilityStructureL3T3() : ScalabilityStructureFullSvc(3, 3) {} - ~ScalabilityStructureL3T3() override; - - FrameDependencyStructure DependencyStructure() const override; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L3T3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc deleted file mode 100644 index 618deb4b3..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/svc/scalability_structure_s2t1.h" - -#include -#include - -#include "absl/base/macros.h" -#include "api/transport/rtp/dependency_descriptor.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -constexpr int ScalabilityStructureS2T1::kNumSpatialLayers; - -ScalabilityStructureS2T1::~ScalabilityStructureS2T1() = default; - -ScalableVideoController::StreamLayersConfig -ScalabilityStructureS2T1::StreamConfig() const { - StreamLayersConfig result; - result.num_spatial_layers = kNumSpatialLayers; - result.num_temporal_layers = 1; - result.scaling_factor_num[0] = 1; - result.scaling_factor_den[0] = 2; - return result; -} - -FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const { - FrameDependencyStructure structure; - structure.num_decode_targets = kNumSpatialLayers; - structure.num_chains = kNumSpatialLayers; - structure.decode_target_protected_by_chain = {0, 1}; - structure.templates.resize(4); - structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); - structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0}); - structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); - structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0}); - return structure; -} - -std::vector -ScalabilityStructureS2T1::NextFrameConfig(bool restart) { - if (restart) { - can_reference_frame_for_spatial_id_.reset(); - } - std::vector configs; - configs.reserve(kNumSpatialLayers); - for (int sid = 0; sid < kNumSpatialLayers; ++sid) { - if (!active_decode_targets_[sid]) { - can_reference_frame_for_spatial_id_.reset(sid); - continue; - } - configs.emplace_back(); - LayerFrameConfig& config = configs.back().S(sid); - if (can_reference_frame_for_spatial_id_[sid]) { - config.ReferenceAndUpdate(sid); - } else { - config.Keyframe().Update(sid); - can_reference_frame_for_spatial_id_.set(sid); - } - } - - return configs; -} - -GenericFrameInfo ScalabilityStructureS2T1::OnEncodeDone( - const LayerFrameConfig& config) { - GenericFrameInfo frame_info; - frame_info.spatial_id = config.SpatialId(); - frame_info.temporal_id = config.TemporalId(); - frame_info.encoder_buffers = config.Buffers(); - frame_info.decode_target_indications = { - config.SpatialId() == 0 ? DecodeTargetIndication::kSwitch - : DecodeTargetIndication::kNotPresent, - config.SpatialId() == 1 ? DecodeTargetIndication::kSwitch - : DecodeTargetIndication::kNotPresent, - }; - frame_info.part_of_chain = {config.SpatialId() == 0, config.SpatialId() == 1}; - frame_info.active_decode_targets = active_decode_targets_; - return frame_info; -} - -void ScalabilityStructureS2T1::OnRatesUpdated( - const VideoBitrateAllocation& bitrates) { - active_decode_targets_.set(0, bitrates.GetBitrate(/*sid=*/0, /*tid=*/0) > 0); - active_decode_targets_.set(1, bitrates.GetBitrate(/*sid=*/1, /*tid=*/0) > 0); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.h deleted file mode 100644 index 0f27e480f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ -#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ - -#include - -#include "api/transport/rtp/dependency_descriptor.h" -#include "api/video/video_bitrate_allocation.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" -#include "modules/video_coding/svc/scalable_video_controller.h" - -namespace webrtc { - -// S1 0--0--0- -// ... -// S0 0--0--0- -class ScalabilityStructureS2T1 : public ScalableVideoController { - public: - ~ScalabilityStructureS2T1() override; - - StreamLayersConfig StreamConfig() const override; - FrameDependencyStructure DependencyStructure() const override; - - std::vector NextFrameConfig(bool restart) override; - GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; - void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; - - private: - static constexpr int kNumSpatialLayers = 2; - - std::bitset can_reference_frame_for_spatial_id_; - std::bitset<32> active_decode_targets_ = 0b11; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_S2T1_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc new file mode 100644 index 000000000..c23606673 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/svc/scalability_structure_simulcast.h" + +#include +#include + +#include "absl/base/macros.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { + +DecodeTargetIndication +Dti(int sid, int tid, const ScalableVideoController::LayerFrameConfig& config) { + if (sid != config.SpatialId() || tid < config.TemporalId()) { + return DecodeTargetIndication::kNotPresent; + } + if (tid == 0) { + RTC_DCHECK_EQ(config.TemporalId(), 0); + return DecodeTargetIndication::kSwitch; + } + if (tid == config.TemporalId()) { + return DecodeTargetIndication::kDiscardable; + } + RTC_DCHECK_GT(tid, config.TemporalId()); + return DecodeTargetIndication::kSwitch; +} + +} // namespace + +constexpr int ScalabilityStructureSimulcast::kMaxNumSpatialLayers; +constexpr int ScalabilityStructureSimulcast::kMaxNumTemporalLayers; + +ScalabilityStructureSimulcast::ScalabilityStructureSimulcast( + int num_spatial_layers, + int num_temporal_layers) + : num_spatial_layers_(num_spatial_layers), + num_temporal_layers_(num_temporal_layers), + active_decode_targets_( + (uint32_t{1} << (num_spatial_layers * num_temporal_layers)) - 1) { + RTC_DCHECK_LE(num_spatial_layers, kMaxNumSpatialLayers); + RTC_DCHECK_LE(num_temporal_layers, kMaxNumTemporalLayers); +} + +ScalabilityStructureSimulcast::~ScalabilityStructureSimulcast() = default; + +ScalableVideoController::StreamLayersConfig +ScalabilityStructureSimulcast::StreamConfig() const { + StreamLayersConfig result; + result.num_spatial_layers = num_spatial_layers_; + result.num_temporal_layers = num_temporal_layers_; + result.scaling_factor_num[num_spatial_layers_ - 1] = 1; + result.scaling_factor_den[num_spatial_layers_ - 1] = 1; + for (int sid = num_spatial_layers_ - 1; sid > 0; --sid) { + result.scaling_factor_num[sid - 1] = 1; + result.scaling_factor_den[sid - 1] = 2 * result.scaling_factor_den[sid]; + } + return result; +} + +bool ScalabilityStructureSimulcast::TemporalLayerIsActive(int tid) const { + if (tid >= num_temporal_layers_) { + return false; + } + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (DecodeTargetIsActive(sid, tid)) { + return true; + } + } + return false; +} + +ScalabilityStructureSimulcast::FramePattern +ScalabilityStructureSimulcast::NextPattern() const { + switch (last_pattern_) { + case kNone: + case kDeltaT2B: + return kDeltaT0; + case kDeltaT2A: + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + case kDeltaT1: + if (TemporalLayerIsActive(2)) { + return kDeltaT2B; + } + return kDeltaT0; + case kDeltaT0: + if (TemporalLayerIsActive(2)) { + return kDeltaT2A; + } + if (TemporalLayerIsActive(1)) { + return kDeltaT1; + } + return kDeltaT0; + } + RTC_NOTREACHED(); + return kDeltaT0; +} + +std::vector +ScalabilityStructureSimulcast::NextFrameConfig(bool restart) { + std::vector configs; + if (active_decode_targets_.none()) { + last_pattern_ = kNone; + return configs; + } + configs.reserve(num_spatial_layers_); + + if (last_pattern_ == kNone || restart) { + can_reference_t0_frame_for_spatial_id_.reset(); + last_pattern_ = kNone; + } + FramePattern current_pattern = NextPattern(); + + switch (current_pattern) { + case kDeltaT0: + // Disallow temporal references cross T0 on higher temporal layers. + can_reference_t1_frame_for_spatial_id_.reset(); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/0)) { + // Next frame from the spatial layer `sid` shouldn't depend on + // potentially old previous frame from the spatial layer `sid`. + can_reference_t0_frame_for_spatial_id_.reset(sid); + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(current_pattern).S(sid).T(0); + + if (can_reference_t0_frame_for_spatial_id_[sid]) { + config.ReferenceAndUpdate(BufferIndex(sid, /*tid=*/0)); + } else { + config.Keyframe().Update(BufferIndex(sid, /*tid=*/0)); + } + can_reference_t0_frame_for_spatial_id_.set(sid); + } + break; + case kDeltaT1: + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/1) || + !can_reference_t0_frame_for_spatial_id_[sid]) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(current_pattern) + .S(sid) + .T(1) + .Reference(BufferIndex(sid, /*tid=*/0)); + // Save frame only if there is a higher temporal layer that may need it. + if (num_temporal_layers_ > 2) { + config.Update(BufferIndex(sid, /*tid=*/1)); + } + } + break; + case kDeltaT2A: + case kDeltaT2B: + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + if (!DecodeTargetIsActive(sid, /*tid=*/2) || + !can_reference_t0_frame_for_spatial_id_[sid]) { + continue; + } + configs.emplace_back(); + ScalableVideoController::LayerFrameConfig& config = configs.back(); + config.Id(current_pattern).S(sid).T(2); + if (can_reference_t1_frame_for_spatial_id_[sid]) { + config.Reference(BufferIndex(sid, /*tid=*/1)); + } else { + config.Reference(BufferIndex(sid, /*tid=*/0)); + } + } + break; + case kNone: + RTC_NOTREACHED(); + break; + } + + return configs; +} + +GenericFrameInfo ScalabilityStructureSimulcast::OnEncodeDone( + const LayerFrameConfig& config) { + last_pattern_ = static_cast(config.Id()); + if (config.TemporalId() == 1) { + can_reference_t1_frame_for_spatial_id_.set(config.SpatialId()); + } + GenericFrameInfo frame_info; + frame_info.spatial_id = config.SpatialId(); + frame_info.temporal_id = config.TemporalId(); + frame_info.encoder_buffers = config.Buffers(); + frame_info.decode_target_indications.reserve(num_spatial_layers_ * + num_temporal_layers_); + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + frame_info.decode_target_indications.push_back(Dti(sid, tid, config)); + } + } + frame_info.part_of_chain.assign(num_spatial_layers_, false); + if (config.TemporalId() == 0) { + frame_info.part_of_chain[config.SpatialId()] = true; + } + frame_info.active_decode_targets = active_decode_targets_; + return frame_info; +} + +void ScalabilityStructureSimulcast::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + for (int sid = 0; sid < num_spatial_layers_; ++sid) { + // Enable/disable spatial layers independetely. + bool active = true; + for (int tid = 0; tid < num_temporal_layers_; ++tid) { + // To enable temporal layer, require bitrates for lower temporal layers. + active = active && bitrates.GetBitrate(sid, tid) > 0; + SetDecodeTargetIsActive(sid, tid, active); + } + } +} + +FrameDependencyStructure ScalabilityStructureS2T1::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.decode_target_protected_by_chain = {0, 1}; + structure.templates.resize(4); + structure.templates[0].S(0).Dtis("S-").ChainDiffs({2, 1}).FrameDiffs({2}); + structure.templates[1].S(0).Dtis("S-").ChainDiffs({0, 0}); + structure.templates[2].S(1).Dtis("-S").ChainDiffs({1, 2}).FrameDiffs({2}); + structure.templates[3].S(1).Dtis("-S").ChainDiffs({1, 0}); + return structure; +} + +FrameDependencyStructure ScalabilityStructureS3T3::DependencyStructure() const { + FrameDependencyStructure structure; + structure.num_decode_targets = 9; + structure.num_chains = 3; + structure.decode_target_protected_by_chain = {0, 0, 0, 1, 1, 1, 2, 2, 2}; + auto& t = structure.templates; + t.resize(15); + // Templates are shown in the order frames following them appear in the + // stream, but in `structure.templates` array templates are sorted by + // (`spatial_id`, `temporal_id`) since that is a dependency descriptor + // requirement. Indexes are written in hex for nicer alignment. + t[0x1].S(0).T(0).Dtis("SSS------").ChainDiffs({0, 0, 0}); + t[0x6].S(1).T(0).Dtis("---SSS---").ChainDiffs({1, 0, 0}); + t[0xB].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 0}); + t[0x3].S(0).T(2).Dtis("--D------").ChainDiffs({3, 2, 1}).FrameDiffs({3}); + t[0x8].S(1).T(2).Dtis("-----D---").ChainDiffs({4, 3, 2}).FrameDiffs({3}); + t[0xD].S(2).T(2).Dtis("--------D").ChainDiffs({5, 4, 3}).FrameDiffs({3}); + t[0x2].S(0).T(1).Dtis("-DS------").ChainDiffs({6, 5, 4}).FrameDiffs({6}); + t[0x7].S(1).T(1).Dtis("----DS---").ChainDiffs({7, 6, 5}).FrameDiffs({6}); + t[0xC].S(2).T(1).Dtis("-------DS").ChainDiffs({8, 7, 6}).FrameDiffs({6}); + t[0x4].S(0).T(2).Dtis("--D------").ChainDiffs({9, 8, 7}).FrameDiffs({3}); + t[0x9].S(1).T(2).Dtis("-----D---").ChainDiffs({10, 9, 8}).FrameDiffs({3}); + t[0xE].S(2).T(2).Dtis("--------D").ChainDiffs({11, 10, 9}).FrameDiffs({3}); + t[0x0].S(0).T(0).Dtis("SSS------").ChainDiffs({12, 11, 10}).FrameDiffs({12}); + t[0x5].S(1).T(0).Dtis("---SSS---").ChainDiffs({1, 12, 11}).FrameDiffs({12}); + t[0xA].S(2).T(0).Dtis("------SSS").ChainDiffs({2, 1, 12}).FrameDiffs({12}); + return structure; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h new file mode 100644 index 000000000..7b57df298 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.h @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_SIMULCAST_H_ +#define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_SIMULCAST_H_ + +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { + +// Scalability structure with multiple independent spatial layers each with the +// same temporal layering. +class ScalabilityStructureSimulcast : public ScalableVideoController { + public: + ScalabilityStructureSimulcast(int num_spatial_layers, + int num_temporal_layers); + ~ScalabilityStructureSimulcast() override; + + StreamLayersConfig StreamConfig() const override; + std::vector NextFrameConfig(bool restart) override; + GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; + + private: + enum FramePattern { + kNone, + kDeltaT2A, + kDeltaT1, + kDeltaT2B, + kDeltaT0, + }; + static constexpr int kMaxNumSpatialLayers = 3; + static constexpr int kMaxNumTemporalLayers = 3; + + // Index of the buffer to store last frame for layer (`sid`, `tid`) + int BufferIndex(int sid, int tid) const { + return tid * num_spatial_layers_ + sid; + } + bool DecodeTargetIsActive(int sid, int tid) const { + return active_decode_targets_[sid * num_temporal_layers_ + tid]; + } + void SetDecodeTargetIsActive(int sid, int tid, bool value) { + active_decode_targets_.set(sid * num_temporal_layers_ + tid, value); + } + FramePattern NextPattern() const; + bool TemporalLayerIsActive(int tid) const; + + const int num_spatial_layers_; + const int num_temporal_layers_; + + FramePattern last_pattern_ = kNone; + std::bitset can_reference_t0_frame_for_spatial_id_ = 0; + std::bitset can_reference_t1_frame_for_spatial_id_ = 0; + std::bitset<32> active_decode_targets_; +}; + +// S1 0--0--0- +// ... +// S0 0--0--0- +class ScalabilityStructureS2T1 : public ScalabilityStructureSimulcast { + public: + ScalabilityStructureS2T1() : ScalabilityStructureSimulcast(2, 1) {} + ~ScalabilityStructureS2T1() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +class ScalabilityStructureS3T3 : public ScalabilityStructureSimulcast { + public: + ScalabilityStructureS3T3() : ScalabilityStructureSimulcast(3, 3) {} + ~ScalabilityStructureS3T3() override = default; + + FrameDependencyStructure DependencyStructure() const override; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_SIMULCAST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc index 2b0393f9c..aeb4d88f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalability_structure_test_helpers.cc @@ -17,7 +17,6 @@ #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" -#include "api/video/video_frame_type.h" #include "modules/video_coding/chain_diff_calculator.h" #include "modules/video_coding/frame_dependencies_calculator.h" #include "modules/video_coding/svc/scalable_video_controller.h" @@ -56,8 +55,6 @@ void ScalabilityStructureWrapper::GenerateFrames( frame_info.chain_diffs = chain_diff_calculator_.From(frame_id, frame_info.part_of_chain); for (int64_t base_frame_id : frame_deps_calculator_.FromBuffersUsage( - is_keyframe ? VideoFrameType::kVideoFrameKey - : VideoFrameType::kVideoFrameDelta, frame_id, frame_info.encoder_buffers)) { frame_info.frame_diffs.push_back(frame_id - base_frame_id); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc index 6d8e6e8fc..3934e5780 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc @@ -32,14 +32,28 @@ FrameDependencyStructure ScalableVideoControllerNoLayering::DependencyStructure() const { FrameDependencyStructure structure; structure.num_decode_targets = 1; - FrameDependencyTemplate a_template; - a_template.decode_target_indications = {DecodeTargetIndication::kSwitch}; - structure.templates.push_back(a_template); + structure.num_chains = 1; + structure.decode_target_protected_by_chain = {0}; + + FrameDependencyTemplate key_frame; + key_frame.decode_target_indications = {DecodeTargetIndication::kSwitch}; + key_frame.chain_diffs = {0}; + structure.templates.push_back(key_frame); + + FrameDependencyTemplate delta_frame; + delta_frame.decode_target_indications = {DecodeTargetIndication::kSwitch}; + delta_frame.chain_diffs = {1}; + delta_frame.frame_diffs = {1}; + structure.templates.push_back(delta_frame); + return structure; } std::vector ScalableVideoControllerNoLayering::NextFrameConfig(bool restart) { + if (!enabled_) { + return {}; + } std::vector result(1); if (restart || start_) { result[0].Id(0).Keyframe().Update(0); @@ -61,7 +75,13 @@ GenericFrameInfo ScalableVideoControllerNoLayering::OnEncodeDone( } } frame_info.decode_target_indications = {DecodeTargetIndication::kSwitch}; + frame_info.part_of_chain = {true}; return frame_info; } +void ScalableVideoControllerNoLayering::OnRatesUpdated( + const VideoBitrateAllocation& bitrates) { + enabled_ = bitrates.GetBitrate(0, 0) > 0; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h index e253ffe84..6d66b61c8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.h @@ -28,10 +28,11 @@ class ScalableVideoControllerNoLayering : public ScalableVideoController { std::vector NextFrameConfig(bool restart) override; GenericFrameInfo OnEncodeDone(const LayerFrameConfig& config) override; - void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override {} + void OnRatesUpdated(const VideoBitrateAllocation& bitrates) override; private: bool start_ = true; + bool enabled_ = true; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc index d79075ff2..f6fb81815 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.cc @@ -24,7 +24,7 @@ VCMTimestampMap::VCMTimestampMap(size_t capacity) VCMTimestampMap::~VCMTimestampMap() {} -void VCMTimestampMap::Add(uint32_t timestamp, VCMFrameInformation* data) { +void VCMTimestampMap::Add(uint32_t timestamp, const VCMFrameInformation& data) { ring_buffer_[next_add_idx_].timestamp = timestamp; ring_buffer_[next_add_idx_].data = data; next_add_idx_ = (next_add_idx_ + 1) % capacity_; @@ -35,18 +35,18 @@ void VCMTimestampMap::Add(uint32_t timestamp, VCMFrameInformation* data) { } } -VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) { +absl::optional VCMTimestampMap::Pop(uint32_t timestamp) { while (!IsEmpty()) { if (ring_buffer_[next_pop_idx_].timestamp == timestamp) { // Found start time for this timestamp. - VCMFrameInformation* data = ring_buffer_[next_pop_idx_].data; - ring_buffer_[next_pop_idx_].data = nullptr; + const VCMFrameInformation& data = ring_buffer_[next_pop_idx_].data; + ring_buffer_[next_pop_idx_].timestamp = 0; next_pop_idx_ = (next_pop_idx_ + 1) % capacity_; return data; } else if (IsNewerTimestamp(ring_buffer_[next_pop_idx_].timestamp, timestamp)) { // The timestamp we are looking for is not in the list. - return nullptr; + return absl::nullopt; } // Not in this position, check next (and forget this position). @@ -54,7 +54,7 @@ VCMFrameInformation* VCMTimestampMap::Pop(uint32_t timestamp) { } // Could not find matching timestamp in list. - return nullptr; + return absl::nullopt; } bool VCMTimestampMap::IsEmpty() const { @@ -69,4 +69,11 @@ size_t VCMTimestampMap::Size() const { : next_add_idx_ + capacity_ - next_pop_idx_; } +void VCMTimestampMap::Clear() { + while (!IsEmpty()) { + ring_buffer_[next_pop_idx_].timestamp = 0; + next_pop_idx_ = (next_pop_idx_ + 1) % capacity_; + } +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h index cfa12573e..dc20a0551 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timestamp_map.h @@ -13,23 +13,42 @@ #include +#include "absl/types/optional.h" +#include "api/rtp_packet_infos.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_content_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" + namespace webrtc { -struct VCMFrameInformation; +struct VCMFrameInformation { + int64_t renderTimeMs; + absl::optional decodeStart; + void* userData; + VideoRotation rotation; + VideoContentType content_type; + EncodedImage::Timing timing; + int64_t ntp_time_ms; + RtpPacketInfos packet_infos; + // ColorSpace is not stored here, as it might be modified by decoders. +}; class VCMTimestampMap { public: explicit VCMTimestampMap(size_t capacity); ~VCMTimestampMap(); - void Add(uint32_t timestamp, VCMFrameInformation* data); - VCMFrameInformation* Pop(uint32_t timestamp); + void Add(uint32_t timestamp, const VCMFrameInformation& data); + absl::optional Pop(uint32_t timestamp); size_t Size() const; + void Clear(); private: struct TimestampDataTuple { uint32_t timestamp; - VCMFrameInformation* data; + VCMFrameInformation data; }; bool IsEmpty() const; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc index d15cf26d8..005bb26ea 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc @@ -18,89 +18,63 @@ namespace webrtc { namespace video_coding { -DecodedFramesHistory::LayerHistory::LayerHistory() = default; -DecodedFramesHistory::LayerHistory::~LayerHistory() = default; - DecodedFramesHistory::DecodedFramesHistory(size_t window_size) - : window_size_(window_size) {} + : buffer_(window_size) {} DecodedFramesHistory::~DecodedFramesHistory() = default; -void DecodedFramesHistory::InsertDecoded(const VideoLayerFrameId& frameid, - uint32_t timestamp) { - last_decoded_frame_ = frameid; +void DecodedFramesHistory::InsertDecoded(int64_t frame_id, uint32_t timestamp) { + last_decoded_frame_ = frame_id; last_decoded_frame_timestamp_ = timestamp; - if (static_cast(layers_.size()) < frameid.spatial_layer + 1) { - size_t old_size = layers_.size(); - layers_.resize(frameid.spatial_layer + 1); + int new_index = FrameIdToIndex(frame_id); - for (size_t i = old_size; i < layers_.size(); ++i) - layers_[i].buffer.resize(window_size_); + RTC_DCHECK(last_frame_id_ < frame_id); - layers_[frameid.spatial_layer].last_picture_id = frameid.picture_id; - layers_[frameid.spatial_layer] - .buffer[PictureIdToIndex(frameid.picture_id)] = true; - return; - } + // Clears expired values from the cyclic buffer_. + if (last_frame_id_) { + int64_t id_jump = frame_id - *last_frame_id_; + int last_index = FrameIdToIndex(*last_frame_id_); - int new_index = PictureIdToIndex(frameid.picture_id); - LayerHistory& history = layers_[frameid.spatial_layer]; - - RTC_DCHECK(history.last_picture_id < frameid.picture_id); - - // Clears expired values from the cyclic buffer. - if (history.last_picture_id) { - int64_t id_jump = frameid.picture_id - *history.last_picture_id; - int last_index = PictureIdToIndex(*history.last_picture_id); - - if (id_jump >= window_size_) { - std::fill(history.buffer.begin(), history.buffer.end(), false); + if (id_jump >= static_cast(buffer_.size())) { + std::fill(buffer_.begin(), buffer_.end(), false); } else if (new_index > last_index) { - std::fill(history.buffer.begin() + last_index + 1, - history.buffer.begin() + new_index, false); + std::fill(buffer_.begin() + last_index + 1, buffer_.begin() + new_index, + false); } else { - std::fill(history.buffer.begin() + last_index + 1, history.buffer.end(), - false); - std::fill(history.buffer.begin(), history.buffer.begin() + new_index, - false); + std::fill(buffer_.begin() + last_index + 1, buffer_.end(), false); + std::fill(buffer_.begin(), buffer_.begin() + new_index, false); } } - history.buffer[new_index] = true; - history.last_picture_id = frameid.picture_id; + buffer_[new_index] = true; + last_frame_id_ = frame_id; } -bool DecodedFramesHistory::WasDecoded(const VideoLayerFrameId& frameid) { - // Unseen before spatial layer. - if (static_cast(layers_.size()) < frameid.spatial_layer + 1) +bool DecodedFramesHistory::WasDecoded(int64_t frame_id) { + if (!last_frame_id_) return false; - LayerHistory& history = layers_[frameid.spatial_layer]; - - if (!history.last_picture_id) - return false; - - // Reference to the picture_id out of the stored history should happen. - if (frameid.picture_id <= *history.last_picture_id - window_size_) { - RTC_LOG(LS_WARNING) << "Referencing a frame out of the history window. " + // Reference to the picture_id out of the stored should happen. + if (frame_id <= *last_frame_id_ - static_cast(buffer_.size())) { + RTC_LOG(LS_WARNING) << "Referencing a frame out of the window. " "Assuming it was undecoded to avoid artifacts."; return false; } - if (frameid.picture_id > history.last_picture_id) + if (frame_id > last_frame_id_) return false; - return history.buffer[PictureIdToIndex(frameid.picture_id)]; + return buffer_[FrameIdToIndex(frame_id)]; } void DecodedFramesHistory::Clear() { - layers_.clear(); last_decoded_frame_timestamp_.reset(); last_decoded_frame_.reset(); + std::fill(buffer_.begin(), buffer_.end(), false); + last_frame_id_.reset(); } -absl::optional -DecodedFramesHistory::GetLastDecodedFrameId() { +absl::optional DecodedFramesHistory::GetLastDecodedFrameId() { return last_decoded_frame_; } @@ -108,9 +82,9 @@ absl::optional DecodedFramesHistory::GetLastDecodedFrameTimestamp() { return last_decoded_frame_timestamp_; } -int DecodedFramesHistory::PictureIdToIndex(int64_t frame_id) const { - int m = frame_id % window_size_; - return m >= 0 ? m : m + window_size_; +int DecodedFramesHistory::FrameIdToIndex(int64_t frame_id) const { + int m = frame_id % buffer_.size(); + return m >= 0 ? m : m + buffer_.size(); } } // namespace video_coding diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.h index 7cbe1f5cf..06008dc22 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/decoded_frames_history.h @@ -27,31 +27,23 @@ class DecodedFramesHistory { // window_size - how much frames back to the past are actually remembered. explicit DecodedFramesHistory(size_t window_size); ~DecodedFramesHistory(); - // Called for each decoded frame. Assumes picture id's are non-decreasing. - void InsertDecoded(const VideoLayerFrameId& frameid, uint32_t timestamp); - // Query if the following (picture_id, spatial_id) pair was inserted before. - // Should be at most less by window_size-1 than the last inserted picture id. - bool WasDecoded(const VideoLayerFrameId& frameid); + // Called for each decoded frame. Assumes frame id's are non-decreasing. + void InsertDecoded(int64_t frame_id, uint32_t timestamp); + // Query if the following (frame_id, spatial_id) pair was inserted before. + // Should be at most less by window_size-1 than the last inserted frame id. + bool WasDecoded(int64_t frame_id); void Clear(); - absl::optional GetLastDecodedFrameId(); + absl::optional GetLastDecodedFrameId(); absl::optional GetLastDecodedFrameTimestamp(); private: - struct LayerHistory { - LayerHistory(); - ~LayerHistory(); - // Cyclic bitset buffer. Stores last known |window_size| bits. - std::vector buffer; - absl::optional last_picture_id; - }; + int FrameIdToIndex(int64_t frame_id) const; - int PictureIdToIndex(int64_t frame_id) const; - - const int window_size_; - std::vector layers_; - absl::optional last_decoded_frame_; + std::vector buffer_; + absl::optional last_frame_id_; + absl::optional last_decoded_frame_; absl::optional last_decoded_frame_timestamp_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc index e3c249947..d297590f8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc @@ -29,6 +29,7 @@ constexpr uint8_t kVp8Header[kCodecTypeBytesCount] = {'V', 'P', '8', '0'}; constexpr uint8_t kVp9Header[kCodecTypeBytesCount] = {'V', 'P', '9', '0'}; constexpr uint8_t kAv1Header[kCodecTypeBytesCount] = {'A', 'V', '0', '1'}; constexpr uint8_t kH264Header[kCodecTypeBytesCount] = {'H', '2', '6', '4'}; +constexpr uint8_t kH265Header[kCodecTypeBytesCount] = {'H', '2', '6', '5'}; } // namespace @@ -164,7 +165,7 @@ absl::optional IvfFileReader::NextFrame() { image.SetTimestamp(static_cast(current_timestamp)); } image.SetEncodedData(payload); - image.SetSpatialIndex(static_cast(layer_sizes.size())); + image.SetSpatialIndex(static_cast(layer_sizes.size()) - 1); for (size_t i = 0; i < layer_sizes.size(); ++i) { image.SetSpatialLayerFrameSize(static_cast(i), layer_sizes[i]); } @@ -197,6 +198,9 @@ absl::optional IvfFileReader::ParseCodecType(uint8_t* buffer, if (memcmp(&buffer[start_pos], kH264Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecH264; } + if (memcmp(&buffer[start_pos], kH265Header, kCodecTypeBytesCount) == 0) { + return VideoCodecType::kVideoCodecH265; + } has_error_ = true; RTC_LOG(LS_ERROR) << "Unknown codec type: " << std::string( diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc index 496da894a..819613b53 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc @@ -87,6 +87,12 @@ bool IvfFileWriter::WriteHeader() { ivf_header[10] = '6'; ivf_header[11] = '4'; break; + case kVideoCodecH265: + ivf_header[8] = 'H'; + ivf_header[9] = '2'; + ivf_header[10] = '6'; + ivf_header[11] = '5'; + break; default: RTC_LOG(LS_ERROR) << "Unknown CODEC type: " << codec_type_; return false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/qp_parser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/qp_parser.cc new file mode 100644 index 000000000..18f225447 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/qp_parser.cc @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/utility/qp_parser.h" + +#include "modules/video_coding/utility/vp8_header_parser.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" + +namespace webrtc { + +absl::optional QpParser::Parse(VideoCodecType codec_type, + size_t spatial_idx, + const uint8_t* frame_data, + size_t frame_size) { + if (frame_data == nullptr || frame_size == 0 || + spatial_idx >= kMaxSimulcastStreams) { + return absl::nullopt; + } + + if (codec_type == kVideoCodecVP8) { + int qp = -1; + if (vp8::GetQp(frame_data, frame_size, &qp)) { + return qp; + } + } else if (codec_type == kVideoCodecVP9) { + int qp = -1; + if (vp9::GetQp(frame_data, frame_size, &qp)) { + return qp; + } + } else if (codec_type == kVideoCodecH264) { + return h264_parsers_[spatial_idx].Parse(frame_data, frame_size); + } + + return absl::nullopt; +} + +absl::optional QpParser::H264QpParser::Parse( + const uint8_t* frame_data, + size_t frame_size) { + MutexLock lock(&mutex_); + bitstream_parser_.ParseBitstream( + rtc::ArrayView(frame_data, frame_size)); + return bitstream_parser_.GetLastSliceQp(); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/qp_parser.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/qp_parser.h new file mode 100644 index 000000000..f132ff933 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/qp_parser.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_ +#define MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_ + +#include "absl/types/optional.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "common_video/h264/h264_bitstream_parser.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +class QpParser { + public: + absl::optional Parse(VideoCodecType codec_type, + size_t spatial_idx, + const uint8_t* frame_data, + size_t frame_size); + + private: + // A thread safe wrapper for H264 bitstream parser. + class H264QpParser { + public: + absl::optional Parse(const uint8_t* frame_data, + size_t frame_size); + + private: + Mutex mutex_; + H264BitstreamParser bitstream_parser_ RTC_GUARDED_BY(mutex_); + }; + + H264QpParser h264_parsers_[kMaxSimulcastStreams]; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h index 28f225f39..20169a3ce 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/quality_scaler.h @@ -18,12 +18,13 @@ #include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/experiments/quality_scaling_experiment.h" #include "rtc_base/numerics/moving_average.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counted_object.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" namespace webrtc { @@ -82,7 +83,7 @@ class QualityScaler { std::unique_ptr pending_qp_task_ RTC_GUARDED_BY(&task_checker_); QualityScalerQpUsageHandlerInterface* const handler_ RTC_GUARDED_BY(&task_checker_); - SequenceChecker task_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker task_checker_; VideoEncoder::QpThresholds thresholds_ RTC_GUARDED_BY(&task_checker_); const int64_t sampling_period_ms_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc index f8ddd4db4..b33e29695 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc @@ -9,90 +9,195 @@ */ #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "absl/strings/string_view.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/logging.h" namespace webrtc { -#define RETURN_FALSE_IF_ERROR(x) \ - if (!(x)) { \ - return false; \ +// Evaluates x and returns false if false. +#define RETURN_IF_FALSE(x) \ + if (!(x)) { \ + return false; \ } +// Evaluates x, which is intended to return an optional. If result is nullopt, +// returns false. Else, calls fun() with the dereferenced optional as parameter. +#define READ_OR_RETURN(x, fun) \ + do { \ + if (auto optional_val = (x)) { \ + fun(*optional_val); \ + } else { \ + return false; \ + } \ + } while (false) + namespace vp9 { namespace { const size_t kVp9NumRefsPerFrame = 3; const size_t kVp9MaxRefLFDeltas = 4; const size_t kVp9MaxModeLFDeltas = 2; +const size_t kVp9MinTileWidthB64 = 4; +const size_t kVp9MaxTileWidthB64 = 64; -bool Vp9ReadProfile(rtc::BitBuffer* br, uint8_t* profile) { - uint32_t high_bit; - uint32_t low_bit; - RETURN_FALSE_IF_ERROR(br->ReadBits(&low_bit, 1)); - RETURN_FALSE_IF_ERROR(br->ReadBits(&high_bit, 1)); - *profile = (high_bit << 1) + low_bit; - if (*profile > 2) { - uint32_t reserved_bit; - RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1)); - if (reserved_bit) { - RTC_LOG(LS_WARNING) << "Failed to get QP. Unsupported bitstream profile."; +class BitstreamReader { + public: + explicit BitstreamReader(rtc::BitBuffer* buffer) : buffer_(buffer) {} + + // Reads on bit from the input stream and: + // * returns false if bit cannot be read + // * calls f_true() if bit is true, returns return value of that function + // * calls f_else() if bit is false, returns return value of that function + bool IfNextBoolean( + std::function f_true, + std::function f_false = [] { return true; }) { + uint32_t val; + if (!buffer_->ReadBits(1, val)) { return false; } + if (val != 0) { + return f_true(); + } + return f_false(); } - return true; -} -bool Vp9ReadSyncCode(rtc::BitBuffer* br) { - uint32_t sync_code; - RETURN_FALSE_IF_ERROR(br->ReadBits(&sync_code, 24)); - if (sync_code != 0x498342) { - RTC_LOG(LS_WARNING) << "Failed to get QP. Invalid sync code."; - return false; + absl::optional ReadBoolean() { + uint32_t val; + if (!buffer_->ReadBits(1, val)) { + return {}; + } + return {val != 0}; } - return true; -} -bool Vp9ReadColorConfig(rtc::BitBuffer* br, - uint8_t profile, - FrameInfo* frame_info) { - if (profile == 0 || profile == 1) { + // Reads a bit from the input stream and returns: + // * false if bit cannot be read + // * true if bit matches expected_val + // * false if bit does not match expected_val - in which case |error_msg| is + // logged as warning, if provided. + bool VerifyNextBooleanIs(bool expected_val, absl::string_view error_msg) { + uint32_t val; + if (!buffer_->ReadBits(1, val)) { + return false; + } + if ((val != 0) != expected_val) { + if (!error_msg.empty()) { + RTC_LOG(LS_WARNING) << error_msg; + } + return false; + } + return true; + } + + // Reads |bits| bits from the bitstream and interprets them as an unsigned + // integer that gets cast to the type T before returning. + // Returns nullopt if all bits cannot be read. + // If number of bits matches size of data type, the bits parameter may be + // omitted. Ex: + // ReadUnsigned(2); // Returns uint8_t with 2 LSB populated. + // ReadUnsigned(); // Returns uint8_t with all 8 bits populated. + template + absl::optional ReadUnsigned(int bits = sizeof(T) * 8) { + RTC_DCHECK_LE(bits, 32); + RTC_DCHECK_LE(bits, sizeof(T) * 8); + uint32_t val; + if (!buffer_->ReadBits(bits, val)) { + return {}; + } + return (static_cast(val)); + } + + // Helper method that reads |num_bits| from the bitstream, returns: + // * false if bits cannot be read. + // * true if |expected_val| matches the read bits + // * false if |expected_val| does not match the read bits, and logs + // |error_msg| as a warning (if provided). + bool VerifyNextUnsignedIs(int num_bits, + uint32_t expected_val, + absl::string_view error_msg) { + uint32_t val; + if (!buffer_->ReadBits(num_bits, val)) { + return false; + } + if (val != expected_val) { + if (!error_msg.empty()) { + RTC_LOG(LS_WARNING) << error_msg; + } + return false; + } + return true; + } + + // Basically the same as ReadUnsigned() - but for signed integers. + // Here |bits| indicates the size of the value - number of bits read from the + // bit buffer is one higher (the sign bit). This is made to matche the spec in + // which eg s(4) = f(1) sign-bit, plus an f(4). + template + absl::optional ReadSigned(int bits = sizeof(T) * 8) { + uint32_t sign; + if (!buffer_->ReadBits(1, sign)) { + return {}; + } + uint32_t val; + if (!buffer_->ReadBits(bits, val)) { + return {}; + } + int64_t sign_val = val; + if (sign != 0) { + sign_val = -sign_val; + } + return {static_cast(sign_val)}; + } + + // Reads |bits| from the bitstream, disregarding their value. + // Returns true if full number of bits were read, false otherwise. + bool ConsumeBits(int bits) { return buffer_->ConsumeBits(bits); } + + private: + rtc::BitBuffer* buffer_; +}; + +bool Vp9ReadColorConfig(BitstreamReader* br, FrameInfo* frame_info) { + if (frame_info->profile == 2 || frame_info->profile == 3) { + READ_OR_RETURN(br->ReadBoolean(), [frame_info](bool ten_or_twelve_bits) { + frame_info->bit_detph = + ten_or_twelve_bits ? BitDept::k12Bit : BitDept::k10Bit; + }); + } else { frame_info->bit_detph = BitDept::k8Bit; - } else if (profile == 2 || profile == 3) { - uint32_t ten_or_twelve_bits; - RETURN_FALSE_IF_ERROR(br->ReadBits(&ten_or_twelve_bits, 1)); - frame_info->bit_detph = - ten_or_twelve_bits ? BitDept::k12Bit : BitDept::k10Bit; } - uint32_t color_space; - RETURN_FALSE_IF_ERROR(br->ReadBits(&color_space, 3)); - frame_info->color_space = static_cast(color_space); - // SRGB is 7. - if (color_space != 7) { - uint32_t color_range; - RETURN_FALSE_IF_ERROR(br->ReadBits(&color_range, 1)); - frame_info->color_range = - color_range ? ColorRange::kFull : ColorRange::kStudio; + READ_OR_RETURN( + br->ReadUnsigned(3), [frame_info](uint8_t color_space) { + frame_info->color_space = static_cast(color_space); + }); - if (profile == 1 || profile == 3) { - uint32_t subsampling_x; - uint32_t subsampling_y; - RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_x, 1)); - RETURN_FALSE_IF_ERROR(br->ReadBits(&subsampling_y, 1)); - if (subsampling_x) { - frame_info->sub_sampling = - subsampling_y ? YuvSubsampling::k420 : YuvSubsampling::k422; - } else { - frame_info->sub_sampling = - subsampling_y ? YuvSubsampling::k440 : YuvSubsampling::k444; - } + if (frame_info->color_space != ColorSpace::CS_RGB) { + READ_OR_RETURN(br->ReadBoolean(), [frame_info](bool color_range) { + frame_info->color_range = + color_range ? ColorRange::kFull : ColorRange::kStudio; + }); - uint32_t reserved_bit; - RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1)); - if (reserved_bit) { - RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set."; - return false; - } + if (frame_info->profile == 1 || frame_info->profile == 3) { + READ_OR_RETURN(br->ReadUnsigned(2), + [frame_info](uint8_t subsampling) { + switch (subsampling) { + case 0b00: + frame_info->sub_sampling = YuvSubsampling::k444; + break; + case 0b01: + frame_info->sub_sampling = YuvSubsampling::k440; + break; + case 0b10: + frame_info->sub_sampling = YuvSubsampling::k422; + break; + case 0b11: + frame_info->sub_sampling = YuvSubsampling::k420; + break; + } + }); + + RETURN_IF_FALSE(br->VerifyNextBooleanIs( + 0, "Failed to parse header. Reserved bit set.")); } else { // Profile 0 or 2. frame_info->sub_sampling = YuvSubsampling::k420; @@ -100,14 +205,10 @@ bool Vp9ReadColorConfig(rtc::BitBuffer* br, } else { // SRGB frame_info->color_range = ColorRange::kFull; - if (profile == 1 || profile == 3) { + if (frame_info->profile == 1 || frame_info->profile == 3) { frame_info->sub_sampling = YuvSubsampling::k444; - uint32_t reserved_bit; - RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1)); - if (reserved_bit) { - RTC_LOG(LS_WARNING) << "Failed to parse header. Reserved bit set."; - return false; - } + RETURN_IF_FALSE(br->VerifyNextBooleanIs( + 0, "Failed to parse header. Reserved bit set.")); } else { RTC_LOG(LS_WARNING) << "Failed to parse header. 4:4:4 color not supported" " in profile 0 or 2."; @@ -118,44 +219,45 @@ bool Vp9ReadColorConfig(rtc::BitBuffer* br, return true; } -bool Vp9ReadFrameSize(rtc::BitBuffer* br, FrameInfo* frame_info) { - // 16 bits: frame width - 1. - uint16_t frame_width_minus_one; - RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_width_minus_one)); - // 16 bits: frame height - 1. - uint16_t frame_height_minus_one; - RETURN_FALSE_IF_ERROR(br->ReadUInt16(&frame_height_minus_one)); - frame_info->frame_width = frame_width_minus_one + 1; - frame_info->frame_height = frame_height_minus_one + 1; +bool Vp9ReadFrameSize(BitstreamReader* br, FrameInfo* frame_info) { + // 16 bits: frame (width|height) - 1. + READ_OR_RETURN(br->ReadUnsigned(), [frame_info](uint16_t width) { + frame_info->frame_width = width + 1; + }); + READ_OR_RETURN(br->ReadUnsigned(), [frame_info](uint16_t height) { + frame_info->frame_height = height + 1; + }); return true; } -bool Vp9ReadRenderSize(rtc::BitBuffer* br, FrameInfo* frame_info) { - uint32_t render_and_frame_size_different; - RETURN_FALSE_IF_ERROR(br->ReadBits(&render_and_frame_size_different, 1)); - if (render_and_frame_size_different) { - // 16 bits: render width - 1. - uint16_t render_width_minus_one; - RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_width_minus_one)); - // 16 bits: render height - 1. - uint16_t render_height_minus_one; - RETURN_FALSE_IF_ERROR(br->ReadUInt16(&render_height_minus_one)); - frame_info->render_width = render_width_minus_one + 1; - frame_info->render_height = render_height_minus_one + 1; - } else { - frame_info->render_width = frame_info->frame_width; - frame_info->render_height = frame_info->frame_height; - } - return true; +bool Vp9ReadRenderSize(BitstreamReader* br, FrameInfo* frame_info) { + // render_and_frame_size_different + return br->IfNextBoolean( + [&] { + // 16 bits: render (width|height) - 1. + READ_OR_RETURN(br->ReadUnsigned(), + [frame_info](uint16_t width) { + frame_info->render_width = width + 1; + }); + READ_OR_RETURN(br->ReadUnsigned(), + [frame_info](uint16_t height) { + frame_info->render_height = height + 1; + }); + return true; + }, + /*else*/ + [&] { + frame_info->render_height = frame_info->frame_height; + frame_info->render_width = frame_info->frame_width; + return true; + }); } -bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br, FrameInfo* frame_info) { - uint32_t found_ref = 0; - for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) { +bool Vp9ReadFrameSizeFromRefs(BitstreamReader* br, FrameInfo* frame_info) { + bool found_ref = false; + for (size_t i = 0; !found_ref && i < kVp9NumRefsPerFrame; i++) { // Size in refs. - RETURN_FALSE_IF_ERROR(br->ReadBits(&found_ref, 1)); - if (found_ref) - break; + READ_OR_RETURN(br->ReadBoolean(), [&](bool ref) { found_ref = ref; }); } if (!found_ref) { @@ -166,83 +268,156 @@ bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br, FrameInfo* frame_info) { return Vp9ReadRenderSize(br, frame_info); } -bool Vp9ReadInterpolationFilter(rtc::BitBuffer* br) { - uint32_t bit; - RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1)); - if (bit) - return true; - - return br->ConsumeBits(2); -} - -bool Vp9ReadLoopfilter(rtc::BitBuffer* br) { +bool Vp9ReadLoopfilter(BitstreamReader* br) { // 6 bits: filter level. // 3 bits: sharpness level. - RETURN_FALSE_IF_ERROR(br->ConsumeBits(9)); + RETURN_IF_FALSE(br->ConsumeBits(9)); - uint32_t mode_ref_delta_enabled; - RETURN_FALSE_IF_ERROR(br->ReadBits(&mode_ref_delta_enabled, 1)); - if (mode_ref_delta_enabled) { - uint32_t mode_ref_delta_update; - RETURN_FALSE_IF_ERROR(br->ReadBits(&mode_ref_delta_update, 1)); - if (mode_ref_delta_update) { - uint32_t bit; + return br->IfNextBoolean([&] { // if mode_ref_delta_enabled + return br->IfNextBoolean([&] { // if mode_ref_delta_update for (size_t i = 0; i < kVp9MaxRefLFDeltas; i++) { - RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1)); - if (bit) { - RETURN_FALSE_IF_ERROR(br->ConsumeBits(7)); - } + RETURN_IF_FALSE(br->IfNextBoolean([&] { return br->ConsumeBits(7); })); } for (size_t i = 0; i < kVp9MaxModeLFDeltas; i++) { - RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1)); - if (bit) { - RETURN_FALSE_IF_ERROR(br->ConsumeBits(7)); - } + RETURN_IF_FALSE(br->IfNextBoolean([&] { return br->ConsumeBits(7); })); } - } + return true; + }); + }); +} + +bool Vp9ReadQp(BitstreamReader* br, FrameInfo* frame_info) { + READ_OR_RETURN(br->ReadUnsigned(), + [frame_info](uint8_t qp) { frame_info->base_qp = qp; }); + + // yuv offsets + for (int i = 0; i < 3; ++i) { + RETURN_IF_FALSE(br->IfNextBoolean([br] { // if delta_coded + return br->ConsumeBits(5); + })); } return true; } -} // namespace -bool Parse(const uint8_t* buf, size_t length, int* qp, FrameInfo* frame_info) { - rtc::BitBuffer br(buf, length); +bool Vp9ReadSegmentationParams(BitstreamReader* br) { + constexpr int kVp9MaxSegments = 8; + constexpr int kVp9SegLvlMax = 4; + constexpr int kSegmentationFeatureBits[kVp9SegLvlMax] = {8, 6, 2, 0}; + constexpr bool kSegmentationFeatureSigned[kVp9SegLvlMax] = {1, 1, 0, 0}; - // Frame marker. - uint32_t frame_marker; - RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_marker, 2)); - if (frame_marker != 0x2) { - RTC_LOG(LS_WARNING) << "Failed to parse header. Frame marker should be 2."; - return false; + return br->IfNextBoolean([&] { // segmentation_enabled + return br->IfNextBoolean([&] { // update_map + // Consume probs. + for (int i = 0; i < 7; ++i) { + RETURN_IF_FALSE(br->IfNextBoolean([br] { return br->ConsumeBits(7); })); + } + + return br->IfNextBoolean([&] { // temporal_update + // Consume probs. + for (int i = 0; i < 3; ++i) { + RETURN_IF_FALSE( + br->IfNextBoolean([br] { return br->ConsumeBits(7); })); + } + return true; + }); + }); + }); + + return br->IfNextBoolean([&] { + RETURN_IF_FALSE(br->ConsumeBits(1)); // abs_or_delta + for (int i = 0; i < kVp9MaxSegments; ++i) { + for (int j = 0; j < kVp9SegLvlMax; ++j) { + RETURN_IF_FALSE(br->IfNextBoolean([&] { // feature_enabled + return br->ConsumeBits(kSegmentationFeatureBits[j] + + kSegmentationFeatureSigned[j]); + })); + } + } + return true; + }); +} + +bool Vp9ReadTileInfo(BitstreamReader* br, FrameInfo* frame_info) { + size_t mi_cols = (frame_info->frame_width + 7) >> 3; + size_t sb64_cols = (mi_cols + 7) >> 3; + + size_t min_log2 = 0; + while ((kVp9MaxTileWidthB64 << min_log2) < sb64_cols) { + ++min_log2; } - // Profile. - uint8_t profile; - if (!Vp9ReadProfile(&br, &profile)) - return false; - frame_info->profile = profile; + size_t max_log2 = 1; + while ((sb64_cols >> max_log2) >= kVp9MinTileWidthB64) { + ++max_log2; + } + --max_log2; + + size_t cols_log2 = min_log2; + bool done = false; + while (!done && cols_log2 < max_log2) { + RETURN_IF_FALSE(br->IfNextBoolean( + [&] { + ++cols_log2; + return true; + }, + [&] { + done = true; + return true; + })); + } + + // rows_log2; + return br->IfNextBoolean([&] { return br->ConsumeBits(1); }); +} +} // namespace + +bool Parse(const uint8_t* buf, size_t length, FrameInfo* frame_info) { + rtc::BitBuffer bit_buffer(buf, length); + BitstreamReader br(&bit_buffer); + + // Frame marker. + RETURN_IF_FALSE(br.VerifyNextUnsignedIs( + 2, 0x2, "Failed to parse header. Frame marker should be 2.")); + + // Profile has low bit first. + READ_OR_RETURN(br.ReadBoolean(), + [frame_info](bool low) { frame_info->profile = int{low}; }); + READ_OR_RETURN(br.ReadBoolean(), [frame_info](bool high) { + frame_info->profile |= int{high} << 1; + }); + if (frame_info->profile > 2) { + RETURN_IF_FALSE(br.VerifyNextBooleanIs( + false, "Failed to get QP. Unsupported bitstream profile.")); + } // Show existing frame. - uint32_t show_existing_frame; - RETURN_FALSE_IF_ERROR(br.ReadBits(&show_existing_frame, 1)); - if (show_existing_frame) - return false; + RETURN_IF_FALSE(br.IfNextBoolean([&] { + READ_OR_RETURN(br.ReadUnsigned(3), + [frame_info](uint8_t frame_idx) { + frame_info->show_existing_frame = frame_idx; + }); + return true; + })); + if (frame_info->show_existing_frame.has_value()) { + return true; + } - // Frame type: KEY_FRAME(0), INTER_FRAME(1). - uint32_t frame_type; - uint32_t show_frame; - uint32_t error_resilient; - RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_type, 1)); - RETURN_FALSE_IF_ERROR(br.ReadBits(&show_frame, 1)); - RETURN_FALSE_IF_ERROR(br.ReadBits(&error_resilient, 1)); - frame_info->show_frame = show_frame; - frame_info->error_resilient = error_resilient; + READ_OR_RETURN(br.ReadBoolean(), [frame_info](bool frame_type) { + // Frame type: KEY_FRAME(0), INTER_FRAME(1). + frame_info->is_keyframe = frame_type == 0; + }); + READ_OR_RETURN(br.ReadBoolean(), [frame_info](bool show_frame) { + frame_info->show_frame = show_frame; + }); + READ_OR_RETURN(br.ReadBoolean(), [frame_info](bool error_resilient) { + frame_info->error_resilient = error_resilient; + }); - if (frame_type == 0) { - // Key-frame. - if (!Vp9ReadSyncCode(&br)) - return false; - if (!Vp9ReadColorConfig(&br, profile, frame_info)) + if (frame_info->is_keyframe) { + RETURN_IF_FALSE(br.VerifyNextUnsignedIs( + 24, 0x498342, "Failed to get QP. Invalid sync code.")); + + if (!Vp9ReadColorConfig(&br, frame_info)) return false; if (!Vp9ReadFrameSize(&br, frame_info)) return false; @@ -250,76 +425,92 @@ bool Parse(const uint8_t* buf, size_t length, int* qp, FrameInfo* frame_info) { return false; } else { // Non-keyframe. - uint32_t intra_only = 0; - if (!show_frame) - RETURN_FALSE_IF_ERROR(br.ReadBits(&intra_only, 1)); - if (!error_resilient) - RETURN_FALSE_IF_ERROR(br.ConsumeBits(2)); // Reset frame context. + bool is_intra_only = false; + if (!frame_info->show_frame) { + READ_OR_RETURN(br.ReadBoolean(), + [&](bool intra_only) { is_intra_only = intra_only; }); + } + if (!frame_info->error_resilient) { + RETURN_IF_FALSE(br.ConsumeBits(2)); // Reset frame context. + } - if (intra_only) { - if (!Vp9ReadSyncCode(&br)) - return false; + if (is_intra_only) { + RETURN_IF_FALSE(br.VerifyNextUnsignedIs( + 24, 0x498342, "Failed to get QP. Invalid sync code.")); - if (profile > 0) { - if (!Vp9ReadColorConfig(&br, profile, frame_info)) + if (frame_info->profile > 0) { + if (!Vp9ReadColorConfig(&br, frame_info)) return false; } // Refresh frame flags. - RETURN_FALSE_IF_ERROR(br.ConsumeBits(8)); + RETURN_IF_FALSE(br.ConsumeBits(8)); if (!Vp9ReadFrameSize(&br, frame_info)) return false; if (!Vp9ReadRenderSize(&br, frame_info)) return false; } else { // Refresh frame flags. - RETURN_FALSE_IF_ERROR(br.ConsumeBits(8)); + RETURN_IF_FALSE(br.ConsumeBits(8)); for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) { // 3 bits: Ref frame index. // 1 bit: Ref frame sign biases. - RETURN_FALSE_IF_ERROR(br.ConsumeBits(4)); + RETURN_IF_FALSE(br.ConsumeBits(4)); } if (!Vp9ReadFrameSizeFromRefs(&br, frame_info)) return false; // Allow high precision mv. - RETURN_FALSE_IF_ERROR(br.ConsumeBits(1)); + RETURN_IF_FALSE(br.ConsumeBits(1)); // Interpolation filter. - if (!Vp9ReadInterpolationFilter(&br)) - return false; + RETURN_IF_FALSE(br.IfNextBoolean([] { return true; }, + [&br] { return br.ConsumeBits(2); })); } } - if (!error_resilient) { + if (!frame_info->error_resilient) { // 1 bit: Refresh frame context. // 1 bit: Frame parallel decoding mode. - RETURN_FALSE_IF_ERROR(br.ConsumeBits(2)); + RETURN_IF_FALSE(br.ConsumeBits(2)); } // Frame context index. - RETURN_FALSE_IF_ERROR(br.ConsumeBits(2)); + RETURN_IF_FALSE(br.ConsumeBits(2)); if (!Vp9ReadLoopfilter(&br)) return false; - // Base QP. - uint8_t base_q0; - RETURN_FALSE_IF_ERROR(br.ReadUInt8(&base_q0)); - *qp = base_q0; + // Read base QP. + RETURN_IF_FALSE(Vp9ReadQp(&br, frame_info)); + + const bool kParseFullHeader = false; + if (kParseFullHeader) { + // Currently not used, but will be needed when parsing beyond the + // uncompressed header. + RETURN_IF_FALSE(Vp9ReadSegmentationParams(&br)); + + RETURN_IF_FALSE(Vp9ReadTileInfo(&br, frame_info)); + + RETURN_IF_FALSE(br.ConsumeBits(16)); // header_size_in_bytes + } + return true; } bool GetQp(const uint8_t* buf, size_t length, int* qp) { FrameInfo frame_info; - return Parse(buf, length, qp, &frame_info); + if (!Parse(buf, length, &frame_info)) { + return false; + } + *qp = frame_info.base_qp; + return true; } absl::optional ParseIntraFrameInfo(const uint8_t* buf, size_t length) { - int qp = 0; FrameInfo frame_info; - if (Parse(buf, length, &qp, &frame_info) && frame_info.frame_width > 0) { + if (Parse(buf, length, &frame_info) && frame_info.frame_width > 0) { return frame_info; } return absl::nullopt; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h index a7f04670d..7a5e2c058 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h @@ -65,6 +65,8 @@ enum class YuvSubsampling { struct FrameInfo { int profile = 0; // Profile 0-3 are valid. + absl::optional show_existing_frame; + bool is_keyframe = false; bool show_frame = false; bool error_resilient = false; BitDept bit_detph = BitDept::k8Bit; @@ -75,6 +77,7 @@ struct FrameInfo { int frame_height = 0; int render_width = 0; int render_height = 0; + int base_qp = 0; }; // Parses frame information for a VP9 key-frame or all-intra frame from a diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc index 4777fe51c..90a02e0c2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_codec_initializer.cc @@ -20,6 +20,7 @@ #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/codecs/av1/av1_svc_config.h" #include "modules/video_coding/codecs/vp9/svc_config.h" #include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/checks.h" @@ -56,7 +57,6 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0); VideoCodec video_codec; - memset(&video_codec, 0, sizeof(video_codec)); video_codec.codecType = config.codec_type; switch (config.content_type) { @@ -148,6 +148,12 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( video_codec.maxBitrate = kEncoderMinBitrateKbps; video_codec.maxFramerate = max_framerate; + video_codec.spatialLayers[0] = {0}; + video_codec.spatialLayers[0].width = video_codec.width; + video_codec.spatialLayers[0].height = video_codec.height; + video_codec.spatialLayers[0].maxFramerate = max_framerate; + video_codec.spatialLayers[0].numberOfTemporalLayers = + streams[0].num_temporal_layers.value_or(1); // Set codec specific options if (config.encoder_specific_settings) @@ -255,6 +261,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( break; } + case kVideoCodecAV1: + if (!SetAv1SvcConfig(video_codec)) { + RTC_LOG(LS_WARNING) << "Failed to configure svc bitrates for av1."; + } + break; case kVideoCodecH264: { if (!config.encoder_specific_settings) *video_codec.H264() = VideoEncoder::GetDefaultH264Settings(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc index 049695d75..f19ea5132 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.cc @@ -13,10 +13,10 @@ #include #include +#include "api/sequence_checker.h" #include "api/video/encoded_image.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/timing.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -105,7 +105,7 @@ class VideoCodingModuleImpl : public VideoCodingModule { } private: - rtc::ThreadChecker construction_thread_; + SequenceChecker construction_thread_; const std::unique_ptr timing_; vcm::VideoReceiver receiver_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h index aee6337e5..d74799460 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_coding_impl.h @@ -16,6 +16,7 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/frame_buffer.h" #include "modules/video_coding/generic_decoder.h" @@ -25,9 +26,7 @@ #include "modules/video_coding/timing.h" #include "rtc_base/one_time_event.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -97,9 +96,9 @@ class VideoReceiver : public Module { // In builds where DCHECKs aren't enabled, it will return true. bool IsDecoderThreadRunning(); - rtc::ThreadChecker construction_thread_checker_; - rtc::ThreadChecker decoder_thread_checker_; - rtc::ThreadChecker module_thread_checker_; + SequenceChecker construction_thread_checker_; + SequenceChecker decoder_thread_checker_; + SequenceChecker module_thread_checker_; Clock* const clock_; Mutex process_mutex_; VCMTiming* _timing; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc index c2c8f8aa1..43dbc9f0b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver.cc @@ -14,6 +14,7 @@ #include #include "api/rtp_headers.h" +#include "api/sequence_checker.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" #include "modules/utility/include/process_thread.h" @@ -33,7 +34,6 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/one_time_event.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" @@ -173,8 +173,7 @@ int32_t VideoReceiver::RegisterPacketRequestCallback( // Should be called as often as possible to get the most out of the decoder. int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) { RTC_DCHECK_RUN_ON(&decoder_thread_checker_); - VCMEncodedFrame* frame = _receiver.FrameForDecoding( - maxWaitTimeMs, _codecDataBase.PrefersLateDecoding()); + VCMEncodedFrame* frame = _receiver.FrameForDecoding(maxWaitTimeMs, true); if (!frame) return VCM_FRAME_NOT_READY; @@ -280,7 +279,7 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload, // Callers don't provide any ntp time. const VCMPacket packet(incomingPayload, payloadLength, rtp_header, video_header, /*ntp_time_ms=*/0, - clock_->TimeInMilliseconds()); + clock_->CurrentTime()); int32_t ret = _receiver.InsertPacket(packet); // TODO(holmer): Investigate if this somehow should use the key frame diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc index 6b3cb6367..b893b954b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.cc @@ -33,18 +33,18 @@ VideoReceiver2::VideoReceiver2(Clock* clock, VCMTiming* timing) timing_(timing), decodedFrameCallback_(timing_, clock_), codecDataBase_() { - decoder_thread_checker_.Detach(); + decoder_sequence_checker_.Detach(); } VideoReceiver2::~VideoReceiver2() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); } // Register a receive callback. Will be called whenever there is a new frame // ready for rendering. int32_t VideoReceiver2::RegisterReceiveCallback( VCMReceiveCallback* receiveCallback) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); RTC_DCHECK(!IsDecoderThreadRunning()); // This value is set before the decoder thread starts and unset after // the decoder thread has been stopped. @@ -52,20 +52,35 @@ int32_t VideoReceiver2::RegisterReceiveCallback( return VCM_OK; } -// Register an externally defined decoder object. +// Register an externally defined decoder object. This may be called on either +// the construction sequence or the decoder sequence to allow for lazy creation +// of video decoders. If called on the decoder sequence |externalDecoder| cannot +// be a nullptr. It's the responsibility of the caller to make sure that the +// access from the two sequences are mutually exclusive. void VideoReceiver2::RegisterExternalDecoder(VideoDecoder* externalDecoder, uint8_t payloadType) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); - RTC_DCHECK(!IsDecoderThreadRunning()); + if (IsDecoderThreadRunning()) { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + // Don't allow deregistering decoders on the decoder thread. + RTC_DCHECK(externalDecoder != nullptr); + } else { + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); + } + if (externalDecoder == nullptr) { - RTC_CHECK(codecDataBase_.DeregisterExternalDecoder(payloadType)); + codecDataBase_.DeregisterExternalDecoder(payloadType); return; } codecDataBase_.RegisterExternalDecoder(externalDecoder, payloadType); } +bool VideoReceiver2::IsExternalDecoderRegistered(uint8_t payloadType) const { + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); + return codecDataBase_.IsExternalDecoderRegistered(payloadType); +} + void VideoReceiver2::DecoderThreadStarting() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); RTC_DCHECK(!IsDecoderThreadRunning()); #if RTC_DCHECK_IS_ON decoder_thread_is_running_ = true; @@ -73,17 +88,17 @@ void VideoReceiver2::DecoderThreadStarting() { } void VideoReceiver2::DecoderThreadStopped() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); RTC_DCHECK(IsDecoderThreadRunning()); #if RTC_DCHECK_IS_ON decoder_thread_is_running_ = false; - decoder_thread_checker_.Detach(); + decoder_sequence_checker_.Detach(); #endif } // Must be called from inside the receive side critical section. int32_t VideoReceiver2::Decode(const VCMEncodedFrame* frame) { - RTC_DCHECK_RUN_ON(&decoder_thread_checker_); + RTC_DCHECK_RUN_ON(&decoder_sequence_checker_); TRACE_EVENT0("webrtc", "VideoReceiver2::Decode"); // Change decoder if payload type has changed VCMGenericDecoder* decoder = @@ -98,7 +113,7 @@ int32_t VideoReceiver2::Decode(const VCMEncodedFrame* frame) { int32_t VideoReceiver2::RegisterReceiveCodec(uint8_t payload_type, const VideoCodec* receiveCodec, int32_t numberOfCores) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(&construction_sequence_checker_); RTC_DCHECK(!IsDecoderThreadRunning()); if (receiveCodec == nullptr) { return VCM_PARAMETER_ERROR; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h index c7b7b80b6..0c3fe1a25 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/video_receiver2.h @@ -11,11 +11,11 @@ #ifndef MODULES_VIDEO_CODING_VIDEO_RECEIVER2_H_ #define MODULES_VIDEO_CODING_VIDEO_RECEIVER2_H_ +#include "api/sequence_checker.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/generic_decoder.h" #include "modules/video_coding/timing.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -36,6 +36,7 @@ class VideoReceiver2 { void RegisterExternalDecoder(VideoDecoder* externalDecoder, uint8_t payloadType); + bool IsExternalDecoderRegistered(uint8_t payloadType) const; int32_t RegisterReceiveCallback(VCMReceiveCallback* receiveCallback); int32_t Decode(const webrtc::VCMEncodedFrame* frame); @@ -54,8 +55,8 @@ class VideoReceiver2 { // In builds where DCHECKs aren't enabled, it will return true. bool IsDecoderThreadRunning(); - rtc::ThreadChecker construction_thread_checker_; - rtc::ThreadChecker decoder_thread_checker_; + SequenceChecker construction_sequence_checker_; + SequenceChecker decoder_sequence_checker_; Clock* const clock_; VCMTiming* timing_; VCMDecodedFrameCallback decodedFrameCallback_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h index 0db50471e..1d574f4a4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter.h @@ -30,11 +30,6 @@ class DenoiserFilter { CpuType* cpu_type); virtual ~DenoiserFilter() {} - - virtual void CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) = 0; virtual uint32_t Variance16x8(const uint8_t* a, int a_stride, const uint8_t* b, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc index b1831a677..5411e556e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc @@ -15,17 +15,6 @@ namespace webrtc { -void DenoiserFilterC::CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) { - for (int i = 0; i < 16; i++) { - memcpy(dst, src, 16); - src += src_stride; - dst += dst_stride; - } -} - uint32_t DenoiserFilterC::Variance16x8(const uint8_t* a, int a_stride, const uint8_t* b, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h index f05663e1a..5633c171f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_c.h @@ -20,10 +20,6 @@ namespace webrtc { class DenoiserFilterC : public DenoiserFilter { public: DenoiserFilterC() {} - void CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) override; uint32_t Variance16x8(const uint8_t* a, int a_stride, const uint8_t* b, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc index 4eabe02ea..e1e6ed4f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.cc @@ -64,19 +64,6 @@ static void VarianceNeonW8(const uint8_t* a, static_cast(HorizontalAddS32x4(vaddq_s32(v_sse_lo, v_sse_hi))); } -void DenoiserFilterNEON::CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) { - uint8x16_t qtmp; - for (int r = 0; r < 16; r++) { - qtmp = vld1q_u8(src); - vst1q_u8(dst, qtmp); - src += src_stride; - dst += dst_stride; - } -} - uint32_t DenoiserFilterNEON::Variance16x8(const uint8_t* a, int a_stride, const uint8_t* b, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h index decbd41c0..4d9f271e5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_neon.h @@ -18,10 +18,6 @@ namespace webrtc { class DenoiserFilterNEON : public DenoiserFilter { public: DenoiserFilterNEON() {} - void CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) override; uint32_t Variance16x8(const uint8_t* a, int a_stride, const uint8_t* b, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc index 281169bcc..5ca5f0cf3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc @@ -100,18 +100,6 @@ static uint32_t AbsSumDiff16x1(__m128i acc_diff) { return sum_diff; } -// TODO(jackychen): Optimize this function using SSE2. -void DenoiserFilterSSE2::CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) { - for (int i = 0; i < 16; i++) { - memcpy(dst, src, 16); - src += src_stride; - dst += dst_stride; - } -} - uint32_t DenoiserFilterSSE2::Variance16x8(const uint8_t* src, int src_stride, const uint8_t* ref, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h index 6fb7279a2..8fe4b905a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/denoiser_filter_sse2.h @@ -20,10 +20,6 @@ namespace webrtc { class DenoiserFilterSSE2 : public DenoiserFilter { public: DenoiserFilterSSE2() {} - void CopyMem16x16(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) override; uint32_t Variance16x8(const uint8_t* a, int a_stride, const uint8_t* b, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc index 3a1812514..1d844e61d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/video_denoiser.cc @@ -19,17 +19,6 @@ namespace webrtc { #if DISPLAY || DISPLAYNEON -static void CopyMem8x8(const uint8_t* src, - int src_stride, - uint8_t* dst, - int dst_stride) { - for (int i = 0; i < 8; i++) { - memcpy(dst, src, 8); - src += src_stride; - dst += dst_stride; - } -} - static void ShowRect(const std::unique_ptr& filter, const std::unique_ptr& d_status, const std::unique_ptr& moving_edge_red, @@ -58,16 +47,16 @@ static void ShowRect(const std::unique_ptr& filter, memset(uv_tmp, 200, 8 * 8); if (d_status[mb_index] == 1) { // Paint to red. - CopyMem8x8(mb_src_u, stride_u_src, mb_dst_u, stride_u_dst); - CopyMem8x8(uv_tmp, 8, mb_dst_v, stride_v_dst); + libyuv::CopyPlane(mb_src_u, stride_u_src, mb_dst_u, stride_u_dst, 8, 8); + libyuv::CopyPlane(uv_tmp, 8, mb_dst_v, stride_v_dst, 8, 8); } else if (moving_edge_red[mb_row * mb_cols_ + mb_col] && x_density[mb_col] * y_density[mb_row]) { // Paint to blue. - CopyMem8x8(uv_tmp, 8, mb_dst_u, stride_u_dst); - CopyMem8x8(mb_src_v, stride_v_src, mb_dst_v, stride_v_dst); + libyuv::CopyPlane(uv_tmp, 8, mb_dst_u, stride_u_dst, 8, 8); + libyuv::CopyPlane(mb_src_v, stride_v_src, mb_dst_v, stride_v_dst, 8, 8); } else { - CopyMem8x8(mb_src_u, stride_u_src, mb_dst_u, stride_u_dst); - CopyMem8x8(mb_src_v, stride_v_src, mb_dst_v, stride_v_dst); + libyuv::CopyPlane(mb_src_u, stride_u_src, mb_dst_u, stride_u_dst, 8, 8); + libyuv::CopyPlane(mb_src_v, stride_v_src, mb_dst_v, stride_v_dst, 8, 8); } } } @@ -194,7 +183,7 @@ void VideoDenoiser::CopySrcOnMOB(const uint8_t* y_src, (x_density_[mb_col] * y_density_[mb_row] && moving_object_[mb_row * mb_cols_ + mb_col])) { // Copy y source. - filter_->CopyMem16x16(mb_src, stride_src, mb_dst, stride_dst); + libyuv::CopyPlane(mb_src, stride_src, mb_dst, stride_dst, 16, 16); } } } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/OWNERS b/TMessagesProj/jni/voip/webrtc/net/dcsctp/OWNERS new file mode 100644 index 000000000..06a0f8617 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/OWNERS @@ -0,0 +1,2 @@ +boivie@webrtc.org +orphis@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/internal_types.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/internal_types.h new file mode 100644 index 000000000..b651d45d9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/internal_types.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_COMMON_INTERNAL_TYPES_H_ +#define NET_DCSCTP_COMMON_INTERNAL_TYPES_H_ + +#include + +#include "net/dcsctp/public/strong_alias.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// Stream Sequence Number (SSN) +using SSN = StrongAlias; + +// Message Identifier (MID) +using MID = StrongAlias; + +// Fragment Sequence Number (FSN) +using FSN = StrongAlias; + +// Transmission Sequence Number (TSN) +using TSN = StrongAlias; + +// Reconfiguration Request Sequence Number +using ReconfigRequestSN = StrongAlias; + +// Verification Tag, used for packet validation. +using VerificationTag = StrongAlias; + +// Tie Tag, used as a nonce when connecting. +using TieTag = StrongAlias; + +// Hasher for separated ordered/unordered stream identifiers. +struct UnorderedStreamHash { + size_t operator()(const std::pair& p) const { + return std::hash{}(*p.first) ^ + (std::hash{}(*p.second) << 1); + } +}; + +} // namespace dcsctp +#endif // NET_DCSCTP_COMMON_INTERNAL_TYPES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/math.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/math.h new file mode 100644 index 000000000..12f690ed5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/math.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_COMMON_MATH_H_ +#define NET_DCSCTP_COMMON_MATH_H_ + +namespace dcsctp { + +// Rounds up `val` to the nearest value that is divisible by four. Frequently +// used to e.g. pad chunks or parameters to an even 32-bit offset. +template +IntType RoundUpTo4(IntType val) { + return (val + 3) & ~3; +} + +// Similarly, rounds down `val` to the nearest value that is divisible by four. +template +IntType RoundDownTo4(IntType val) { + return val & ~3; +} + +// Returns true if `val` is divisible by four. +template +bool IsDivisibleBy4(IntType val) { + return (val & 3) == 0; +} + +} // namespace dcsctp + +#endif // NET_DCSCTP_COMMON_MATH_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/pair_hash.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/pair_hash.h new file mode 100644 index 000000000..62af8b422 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/pair_hash.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_COMMON_PAIR_HASH_H_ +#define NET_DCSCTP_COMMON_PAIR_HASH_H_ + +#include + +#include +#include + +namespace dcsctp { + +// A custom hash function for std::pair, to be able to be used as key in a +// std::unordered_map. If absl::flat_hash_map would ever be used, this is +// unnecessary as it already has a hash function for std::pair. +struct PairHash { + template + size_t operator()(const std::pair& p) const { + return (3 * std::hash{}(p.first)) ^ std::hash{}(p.second); + } +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_COMMON_PAIR_HASH_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/sequence_numbers.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/sequence_numbers.h new file mode 100644 index 000000000..52b638b54 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/sequence_numbers.h @@ -0,0 +1,166 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_COMMON_SEQUENCE_NUMBERS_H_ +#define NET_DCSCTP_COMMON_SEQUENCE_NUMBERS_H_ + +#include +#include +#include + +#include "net/dcsctp/common/internal_types.h" + +namespace dcsctp { + +// UnwrappedSequenceNumber handles wrapping sequence numbers and unwraps them to +// an int64_t value space, to allow wrapped sequence numbers to be easily +// compared for ordering. +// +// Sequence numbers are expected to be monotonically increasing, but they do not +// need to be unwrapped in order, as long as the difference to the previous one +// is not larger than half the range of the wrapped sequence number. +// +// The WrappedType must be a StrongAlias type. +template +class UnwrappedSequenceNumber { + public: + static_assert( + !std::numeric_limits::is_signed, + "The wrapped type must be unsigned"); + static_assert( + std::numeric_limits::max() < + std::numeric_limits::max(), + "The wrapped type must be less than the int64_t value space"); + + // The unwrapper is a sort of factory and converts wrapped sequence numbers to + // unwrapped ones. + class Unwrapper { + public: + Unwrapper() : largest_(kValueLimit) {} + Unwrapper(const Unwrapper&) = default; + Unwrapper& operator=(const Unwrapper&) = default; + + // Given a wrapped `value`, and with knowledge of its current last seen + // largest number, will return a value that can be compared using normal + // operators, such as less-than, greater-than etc. + // + // This will also update the Unwrapper's state, to track the last seen + // largest value. + UnwrappedSequenceNumber Unwrap(WrappedType value) { + WrappedType wrapped_largest = + static_cast(largest_ % kValueLimit); + int64_t result = largest_ + Delta(value, wrapped_largest); + if (largest_ < result) { + largest_ = result; + } + return UnwrappedSequenceNumber(result); + } + + // Similar to `Unwrap`, but will not update the Unwrappers's internal state. + UnwrappedSequenceNumber PeekUnwrap(WrappedType value) const { + WrappedType uint32_largest = + static_cast(largest_ % kValueLimit); + int64_t result = largest_ + Delta(value, uint32_largest); + return UnwrappedSequenceNumber(result); + } + + // Resets the Unwrapper to its pristine state. Used when a sequence number + // is to be reset to zero. + void Reset() { largest_ = kValueLimit; } + + private: + static int64_t Delta(WrappedType value, WrappedType prev_value) { + static constexpr typename WrappedType::UnderlyingType kBreakpoint = + kValueLimit / 2; + typename WrappedType::UnderlyingType diff = *value - *prev_value; + diff %= kValueLimit; + if (diff < kBreakpoint) { + return static_cast(diff); + } + return static_cast(diff) - kValueLimit; + } + + int64_t largest_; + }; + + // Returns the wrapped value this type represents. + WrappedType Wrap() const { + return static_cast(value_ % kValueLimit); + } + + template + friend H AbslHashValue(H state, + const UnwrappedSequenceNumber& hash) { + return H::combine(std::move(state), hash.value_); + } + + bool operator==(const UnwrappedSequenceNumber& other) const { + return value_ == other.value_; + } + bool operator!=(const UnwrappedSequenceNumber& other) const { + return value_ != other.value_; + } + bool operator<(const UnwrappedSequenceNumber& other) const { + return value_ < other.value_; + } + bool operator>(const UnwrappedSequenceNumber& other) const { + return value_ > other.value_; + } + bool operator>=(const UnwrappedSequenceNumber& other) const { + return value_ >= other.value_; + } + bool operator<=(const UnwrappedSequenceNumber& other) const { + return value_ <= other.value_; + } + + // Increments the value. + void Increment() { ++value_; } + + // Returns the next value relative to this sequence number. + UnwrappedSequenceNumber next_value() const { + return UnwrappedSequenceNumber(value_ + 1); + } + + // Returns a new sequence number based on `value`, and adding `delta` (which + // may be negative). + static UnwrappedSequenceNumber AddTo( + UnwrappedSequenceNumber value, + int delta) { + return UnwrappedSequenceNumber(value.value_ + delta); + } + + // Returns the absolute difference between `lhs` and `rhs`. + static typename WrappedType::UnderlyingType Difference( + UnwrappedSequenceNumber lhs, + UnwrappedSequenceNumber rhs) { + return (lhs.value_ > rhs.value_) ? (lhs.value_ - rhs.value_) + : (rhs.value_ - lhs.value_); + } + + private: + explicit UnwrappedSequenceNumber(int64_t value) : value_(value) {} + static constexpr int64_t kValueLimit = + static_cast(1) + << std::numeric_limits::digits; + + int64_t value_; +}; + +// Unwrapped Transmission Sequence Numbers (TSN) +using UnwrappedTSN = UnwrappedSequenceNumber; + +// Unwrapped Stream Sequence Numbers (SSN) +using UnwrappedSSN = UnwrappedSequenceNumber; + +// Unwrapped Message Identifier (MID) +using UnwrappedMID = UnwrappedSequenceNumber; + +} // namespace dcsctp + +#endif // NET_DCSCTP_COMMON_SEQUENCE_NUMBERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/str_join.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/str_join.h new file mode 100644 index 000000000..04517827b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/common/str_join.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_COMMON_STR_JOIN_H_ +#define NET_DCSCTP_COMMON_STR_JOIN_H_ + +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +template +std::string StrJoin(const Range& seq, absl::string_view delimiter) { + rtc::StringBuilder sb; + int idx = 0; + + for (const typename Range::value_type& elem : seq) { + if (idx > 0) { + sb << delimiter; + } + sb << elem; + + ++idx; + } + return sb.Release(); +} + +template +std::string StrJoin(const Range& seq, + absl::string_view delimiter, + const Functor& fn) { + rtc::StringBuilder sb; + int idx = 0; + + for (const typename Range::value_type& elem : seq) { + if (idx > 0) { + sb << delimiter; + } + fn(sb, elem); + + ++idx; + } + return sb.Release(); +} + +} // namespace dcsctp + +#endif // NET_DCSCTP_COMMON_STR_JOIN_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc new file mode 100644 index 000000000..b4b6224ec --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.cc @@ -0,0 +1,460 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/fuzzers/dcsctp_fuzzers.h" + +#include +#include +#include + +#include "net/dcsctp/common/math.h" +#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" +#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" +#include "net/dcsctp/packet/chunk/data_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/chunk/shutdown_chunk.h" +#include "net/dcsctp/packet/error_cause/protocol_violation_cause.h" +#include "net/dcsctp/packet/error_cause/user_initiated_abort_cause.h" +#include "net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h" +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/state_cookie_parameter.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/socket/dcsctp_socket.h" +#include "net/dcsctp/socket/state_cookie.h" +#include "rtc_base/logging.h" + +namespace dcsctp { +namespace dcsctp_fuzzers { +namespace { +static constexpr int kRandomValue = FuzzerCallbacks::kRandomValue; +static constexpr size_t kMinInputLength = 5; +static constexpr size_t kMaxInputLength = 1024; + +// A starting state for the socket, when fuzzing. +enum class StartingState : int { + kConnectNotCalled, + // When socket initiating Connect + kConnectCalled, + kReceivedInitAck, + kReceivedCookieAck, + // When socket initiating Shutdown + kShutdownCalled, + kReceivedShutdownAck, + // When peer socket initiated Connect + kReceivedInit, + kReceivedCookieEcho, + // When peer initiated Shutdown + kReceivedShutdown, + kReceivedShutdownComplete, + kNumberOfStates, +}; + +// State about the current fuzzing iteration +class FuzzState { + public: + explicit FuzzState(rtc::ArrayView data) : data_(data) {} + + uint8_t GetByte() { + uint8_t value = 0; + if (offset_ < data_.size()) { + value = data_[offset_]; + ++offset_; + } + return value; + } + + TSN GetNextTSN() { return TSN(tsn_++); } + MID GetNextMID() { return MID(mid_++); } + + bool empty() const { return offset_ >= data_.size(); } + + private: + uint32_t tsn_ = kRandomValue; + uint32_t mid_ = 0; + rtc::ArrayView data_; + size_t offset_ = 0; +}; + +void SetSocketState(DcSctpSocketInterface& socket, + FuzzerCallbacks& socket_cb, + StartingState state) { + // We'll use another temporary peer socket for the establishment. + FuzzerCallbacks peer_cb; + DcSctpSocket peer("peer", peer_cb, nullptr, {}); + + switch (state) { + case StartingState::kConnectNotCalled: + return; + case StartingState::kConnectCalled: + socket.Connect(); + return; + case StartingState::kReceivedInitAck: + socket.Connect(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK + return; + case StartingState::kReceivedCookieAck: + socket.Connect(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK + return; + case StartingState::kShutdownCalled: + socket.Connect(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK + socket.Shutdown(); + return; + case StartingState::kReceivedShutdownAck: + socket.Connect(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK + socket.Shutdown(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // SHUTDOWN + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN_ACK + return; + case StartingState::kReceivedInit: + peer.Connect(); + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT + return; + case StartingState::kReceivedCookieEcho: + peer.Connect(); + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT_ACK + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ECHO + return; + case StartingState::kReceivedShutdown: + socket.Connect(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK + peer.Shutdown(); + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN + return; + case StartingState::kReceivedShutdownComplete: + socket.Connect(); + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // INIT + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // INIT_ACK + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // COOKIE_ECHO + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // COOKIE_ACK + peer.Shutdown(); + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN + peer.ReceivePacket(socket_cb.ConsumeSentPacket()); // SHUTDOWN_ACK + socket.ReceivePacket(peer_cb.ConsumeSentPacket()); // SHUTDOWN_COMPLETE + return; + case StartingState::kNumberOfStates: + RTC_CHECK(false); + return; + } +} + +void MakeDataChunk(FuzzState& state, SctpPacket::Builder& b) { + DataChunk::Options options; + options.is_unordered = IsUnordered(state.GetByte() != 0); + options.is_beginning = Data::IsBeginning(state.GetByte() != 0); + options.is_end = Data::IsEnd(state.GetByte() != 0); + b.Add(DataChunk(state.GetNextTSN(), StreamID(state.GetByte()), + SSN(state.GetByte()), PPID(53), std::vector(10), + options)); +} + +void MakeInitChunk(FuzzState& state, SctpPacket::Builder& b) { + Parameters::Builder builder; + builder.Add(ForwardTsnSupportedParameter()); + + b.Add(InitChunk(VerificationTag(kRandomValue), 10000, 1000, 1000, + TSN(kRandomValue), builder.Build())); +} + +void MakeInitAckChunk(FuzzState& state, SctpPacket::Builder& b) { + Parameters::Builder builder; + builder.Add(ForwardTsnSupportedParameter()); + + uint8_t state_cookie[] = {1, 2, 3, 4, 5}; + Parameters::Builder params_builder = + Parameters::Builder().Add(StateCookieParameter(state_cookie)); + + b.Add(InitAckChunk(VerificationTag(kRandomValue), 10000, 1000, 1000, + TSN(kRandomValue), builder.Build())); +} + +void MakeSackChunk(FuzzState& state, SctpPacket::Builder& b) { + std::vector gap_ack_blocks; + uint16_t last_end = 0; + while (gap_ack_blocks.size() < 20) { + uint8_t delta_start = state.GetByte(); + if (delta_start < 0x80) { + break; + } + uint8_t delta_end = state.GetByte(); + + uint16_t start = last_end + delta_start; + uint16_t end = start + delta_end; + last_end = end; + gap_ack_blocks.emplace_back(start, end); + } + + TSN cum_ack_tsn(kRandomValue + state.GetByte()); + b.Add(SackChunk(cum_ack_tsn, 10000, std::move(gap_ack_blocks), {})); +} + +void MakeHeartbeatRequestChunk(FuzzState& state, SctpPacket::Builder& b) { + uint8_t info[] = {1, 2, 3, 4, 5}; + b.Add(HeartbeatRequestChunk( + Parameters::Builder().Add(HeartbeatInfoParameter(info)).Build())); +} + +void MakeHeartbeatAckChunk(FuzzState& state, SctpPacket::Builder& b) { + std::vector info(8); + b.Add(HeartbeatRequestChunk( + Parameters::Builder().Add(HeartbeatInfoParameter(info)).Build())); +} + +void MakeAbortChunk(FuzzState& state, SctpPacket::Builder& b) { + b.Add(AbortChunk( + /*filled_in_verification_tag=*/true, + Parameters::Builder().Add(UserInitiatedAbortCause("Fuzzing")).Build())); +} + +void MakeErrorChunk(FuzzState& state, SctpPacket::Builder& b) { + b.Add(ErrorChunk( + Parameters::Builder().Add(ProtocolViolationCause("Fuzzing")).Build())); +} + +void MakeCookieEchoChunk(FuzzState& state, SctpPacket::Builder& b) { + std::vector cookie(StateCookie::kCookieSize); + b.Add(CookieEchoChunk(cookie)); +} + +void MakeCookieAckChunk(FuzzState& state, SctpPacket::Builder& b) { + b.Add(CookieAckChunk()); +} + +void MakeShutdownChunk(FuzzState& state, SctpPacket::Builder& b) { + b.Add(ShutdownChunk(state.GetNextTSN())); +} + +void MakeShutdownAckChunk(FuzzState& state, SctpPacket::Builder& b) { + b.Add(ShutdownAckChunk()); +} + +void MakeShutdownCompleteChunk(FuzzState& state, SctpPacket::Builder& b) { + b.Add(ShutdownCompleteChunk(false)); +} + +void MakeReConfigChunk(FuzzState& state, SctpPacket::Builder& b) { + std::vector streams = {StreamID(state.GetByte())}; + Parameters::Builder params_builder = + Parameters::Builder().Add(OutgoingSSNResetRequestParameter( + ReconfigRequestSN(kRandomValue), ReconfigRequestSN(kRandomValue), + state.GetNextTSN(), streams)); + b.Add(ReConfigChunk(params_builder.Build())); +} + +void MakeForwardTsnChunk(FuzzState& state, SctpPacket::Builder& b) { + std::vector skipped_streams; + for (;;) { + uint8_t stream = state.GetByte(); + if (skipped_streams.size() > 20 || stream < 0x80) { + break; + } + skipped_streams.emplace_back(StreamID(stream), SSN(state.GetByte())); + } + b.Add(ForwardTsnChunk(state.GetNextTSN(), std::move(skipped_streams))); +} + +void MakeIDataChunk(FuzzState& state, SctpPacket::Builder& b) { + DataChunk::Options options; + options.is_unordered = IsUnordered(state.GetByte() != 0); + options.is_beginning = Data::IsBeginning(state.GetByte() != 0); + options.is_end = Data::IsEnd(state.GetByte() != 0); + b.Add(IDataChunk(state.GetNextTSN(), StreamID(state.GetByte()), + state.GetNextMID(), PPID(53), FSN(0), + std::vector(10), options)); +} + +void MakeIForwardTsnChunk(FuzzState& state, SctpPacket::Builder& b) { + std::vector skipped_streams; + for (;;) { + uint8_t stream = state.GetByte(); + if (skipped_streams.size() > 20 || stream < 0x80) { + break; + } + skipped_streams.emplace_back(StreamID(stream), SSN(state.GetByte())); + } + b.Add(IForwardTsnChunk(state.GetNextTSN(), std::move(skipped_streams))); +} + +class RandomFuzzedChunk : public Chunk { + public: + explicit RandomFuzzedChunk(FuzzState& state) : state_(state) {} + + void SerializeTo(std::vector& out) const override { + size_t bytes = state_.GetByte(); + for (size_t i = 0; i < bytes; ++i) { + out.push_back(state_.GetByte()); + } + } + + std::string ToString() const override { return std::string("RANDOM_FUZZED"); } + + private: + FuzzState& state_; +}; + +void MakeChunkWithRandomContent(FuzzState& state, SctpPacket::Builder& b) { + b.Add(RandomFuzzedChunk(state)); +} + +std::vector GeneratePacket(FuzzState& state) { + DcSctpOptions options; + // Setting a fixed limit to not be dependent on the defaults, which may + // change. + options.mtu = 2048; + SctpPacket::Builder builder(VerificationTag(kRandomValue), options); + + // The largest expected serialized chunk, as created by fuzzers. + static constexpr size_t kMaxChunkSize = 256; + + for (int i = 0; i < 5 && builder.bytes_remaining() > kMaxChunkSize; ++i) { + switch (state.GetByte()) { + case 1: + MakeDataChunk(state, builder); + break; + case 2: + MakeInitChunk(state, builder); + break; + case 3: + MakeInitAckChunk(state, builder); + break; + case 4: + MakeSackChunk(state, builder); + break; + case 5: + MakeHeartbeatRequestChunk(state, builder); + break; + case 6: + MakeHeartbeatAckChunk(state, builder); + break; + case 7: + MakeAbortChunk(state, builder); + break; + case 8: + MakeErrorChunk(state, builder); + break; + case 9: + MakeCookieEchoChunk(state, builder); + break; + case 10: + MakeCookieAckChunk(state, builder); + break; + case 11: + MakeShutdownChunk(state, builder); + break; + case 12: + MakeShutdownAckChunk(state, builder); + break; + case 13: + MakeShutdownCompleteChunk(state, builder); + break; + case 14: + MakeReConfigChunk(state, builder); + break; + case 15: + MakeForwardTsnChunk(state, builder); + break; + case 16: + MakeIDataChunk(state, builder); + break; + case 17: + MakeIForwardTsnChunk(state, builder); + break; + case 18: + MakeChunkWithRandomContent(state, builder); + break; + default: + break; + } + } + std::vector packet = builder.Build(); + return packet; +} +} // namespace + +void FuzzSocket(DcSctpSocketInterface& socket, + FuzzerCallbacks& cb, + rtc::ArrayView data) { + if (data.size() < kMinInputLength || data.size() > kMaxInputLength) { + return; + } + if (data[0] >= static_cast(StartingState::kNumberOfStates)) { + return; + } + + // Set the socket in a specified valid starting state + SetSocketState(socket, cb, static_cast(data[0])); + + FuzzState state(data.subview(1)); + + while (!state.empty()) { + switch (state.GetByte()) { + case 1: + // Generate a valid SCTP packet (based on fuzz data) and "receive it". + socket.ReceivePacket(GeneratePacket(state)); + break; + case 2: + socket.Connect(); + break; + case 3: + socket.Shutdown(); + break; + case 4: + socket.Close(); + break; + case 5: { + StreamID streams[] = {StreamID(state.GetByte())}; + socket.ResetStreams(streams); + } break; + case 6: { + uint8_t flags = state.GetByte(); + SendOptions options; + options.unordered = IsUnordered(flags & 0x01); + options.max_retransmissions = + (flags & 0x02) != 0 ? absl::make_optional(0) : absl::nullopt; + size_t payload_exponent = (flags >> 2) % 16; + size_t payload_size = static_cast(1) << payload_exponent; + socket.Send(DcSctpMessage(StreamID(state.GetByte()), PPID(53), + std::vector(payload_size)), + options); + break; + } + case 7: { + // Expire an active timeout/timer. + uint8_t timeout_idx = state.GetByte(); + absl::optional timeout_id = cb.ExpireTimeout(timeout_idx); + if (timeout_id.has_value()) { + socket.HandleTimeout(*timeout_id); + } + break; + } + default: + break; + } + } +} +} // namespace dcsctp_fuzzers +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h new file mode 100644 index 000000000..0a69bf0f8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_FUZZERS_DCSCTP_FUZZERS_H_ +#define NET_DCSCTP_FUZZERS_DCSCTP_FUZZERS_H_ + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "net/dcsctp/public/dcsctp_socket.h" + +namespace dcsctp { +namespace dcsctp_fuzzers { + +// A fake timeout used during fuzzing. +class FuzzerTimeout : public Timeout { + public: + explicit FuzzerTimeout(std::set& active_timeouts) + : active_timeouts_(active_timeouts) {} + + void Start(DurationMs duration_ms, TimeoutID timeout_id) override { + // Start is only allowed to be called on stopped or expired timeouts. + if (timeout_id_.has_value()) { + // It has been started before, but maybe it expired. Ensure that it's not + // running at least. + RTC_DCHECK(active_timeouts_.find(*timeout_id_) == active_timeouts_.end()); + } + timeout_id_ = timeout_id; + RTC_DCHECK(active_timeouts_.insert(timeout_id).second); + } + + void Stop() override { + // Stop is only allowed to be called on active timeouts. Not stopped or + // expired. + RTC_DCHECK(timeout_id_.has_value()); + RTC_DCHECK(active_timeouts_.erase(*timeout_id_) == 1); + timeout_id_ = absl::nullopt; + } + + // A set of all active timeouts, managed by `FuzzerCallbacks`. + std::set& active_timeouts_; + // If present, the timout is active and will expire reported as `timeout_id`. + absl::optional timeout_id_; +}; + +class FuzzerCallbacks : public DcSctpSocketCallbacks { + public: + static constexpr int kRandomValue = 42; + void SendPacket(rtc::ArrayView data) override { + sent_packets_.emplace_back(std::vector(data.begin(), data.end())); + } + std::unique_ptr CreateTimeout() override { + return std::make_unique(active_timeouts_); + } + TimeMs TimeMillis() override { return TimeMs(42); } + uint32_t GetRandomInt(uint32_t low, uint32_t high) override { + return kRandomValue; + } + void OnMessageReceived(DcSctpMessage message) override {} + void OnError(ErrorKind error, absl::string_view message) override {} + void OnAborted(ErrorKind error, absl::string_view message) override {} + void OnConnected() override {} + void OnClosed() override {} + void OnConnectionRestarted() override {} + void OnStreamsResetFailed(rtc::ArrayView outgoing_streams, + absl::string_view reason) override {} + void OnStreamsResetPerformed( + rtc::ArrayView outgoing_streams) override {} + void OnIncomingStreamsReset( + rtc::ArrayView incoming_streams) override {} + void NotifyOutgoingMessageBufferEmpty() override {} + + std::vector ConsumeSentPacket() { + if (sent_packets_.empty()) { + return {}; + } + std::vector ret = sent_packets_.front(); + sent_packets_.pop_front(); + return ret; + } + + // Given an index among the active timeouts, will expire that one. + absl::optional ExpireTimeout(size_t index) { + if (index < active_timeouts_.size()) { + auto it = active_timeouts_.begin(); + std::advance(it, index); + TimeoutID timeout_id = *it; + active_timeouts_.erase(it); + return timeout_id; + } + return absl::nullopt; + } + + private: + // Needs to be ordered, to allow fuzzers to expire timers. + std::set active_timeouts_; + std::deque> sent_packets_; +}; + +// Given some fuzzing `data` will send packets to the socket as well as calling +// API methods. +void FuzzSocket(DcSctpSocketInterface& socket, + FuzzerCallbacks& cb, + rtc::ArrayView data); + +} // namespace dcsctp_fuzzers +} // namespace dcsctp +#endif // NET_DCSCTP_FUZZERS_DCSCTP_FUZZERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_reader.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_reader.h new file mode 100644 index 000000000..b87648886 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_reader.h @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef NET_DCSCTP_PACKET_BOUNDED_BYTE_READER_H_ +#define NET_DCSCTP_PACKET_BOUNDED_BYTE_READER_H_ + +#include + +#include "api/array_view.h" + +namespace dcsctp { + +// TODO(boivie): These generic functions - and possibly this entire class - +// could be a candidate to have added to rtc_base/. They should use compiler +// intrinsics as well. +namespace internal { +// Loads a 8-bit unsigned word at `data`. +inline uint8_t LoadBigEndian8(const uint8_t* data) { + return data[0]; +} + +// Loads a 16-bit unsigned word at `data`. +inline uint16_t LoadBigEndian16(const uint8_t* data) { + return (data[0] << 8) | data[1]; +} + +// Loads a 32-bit unsigned word at `data`. +inline uint32_t LoadBigEndian32(const uint8_t* data) { + return (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3]; +} +} // namespace internal + +// BoundedByteReader wraps an ArrayView and divides it into two parts; A fixed +// size - which is the template parameter - and a variable size, which is what +// remains in `data` after the `FixedSize`. +// +// The BoundedByteReader provides methods to load/read big endian numbers from +// the FixedSize portion of the buffer, and these are read with static bounds +// checking, to avoid out-of-bounds accesses without a run-time penalty. +// +// The variable sized portion can either be used to create sub-readers, which +// themselves would provide compile-time bounds-checking, or the entire variable +// sized portion can be retrieved as an ArrayView. +template +class BoundedByteReader { + public: + explicit BoundedByteReader(rtc::ArrayView data) : data_(data) { + RTC_DCHECK(data.size() >= FixedSize); + } + + template + uint8_t Load8() const { + static_assert(offset + sizeof(uint8_t) <= FixedSize, "Out-of-bounds"); + return internal::LoadBigEndian8(&data_[offset]); + } + + template + uint16_t Load16() const { + static_assert(offset + sizeof(uint16_t) <= FixedSize, "Out-of-bounds"); + static_assert((offset % sizeof(uint16_t)) == 0, "Unaligned access"); + return internal::LoadBigEndian16(&data_[offset]); + } + + template + uint32_t Load32() const { + static_assert(offset + sizeof(uint32_t) <= FixedSize, "Out-of-bounds"); + static_assert((offset % sizeof(uint32_t)) == 0, "Unaligned access"); + return internal::LoadBigEndian32(&data_[offset]); + } + + template + BoundedByteReader sub_reader(size_t variable_offset) const { + RTC_DCHECK(FixedSize + variable_offset + SubSize <= data_.size()); + + rtc::ArrayView sub_span = + data_.subview(FixedSize + variable_offset, SubSize); + return BoundedByteReader(sub_span); + } + + size_t variable_data_size() const { return data_.size() - FixedSize; } + + rtc::ArrayView variable_data() const { + return data_.subview(FixedSize, data_.size() - FixedSize); + } + + private: + const rtc::ArrayView data_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_BOUNDED_BYTE_READER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h new file mode 100644 index 000000000..4e547b052 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/bounded_byte_writer.h @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef NET_DCSCTP_PACKET_BOUNDED_BYTE_WRITER_H_ +#define NET_DCSCTP_PACKET_BOUNDED_BYTE_WRITER_H_ + +#include + +#include "api/array_view.h" + +namespace dcsctp { + +// TODO(boivie): These generic functions - and possibly this entire class - +// could be a candidate to have added to rtc_base/. They should use compiler +// intrinsics as well. +namespace internal { +// Stores a 8-bit unsigned word at `data`. +inline void StoreBigEndian8(uint8_t* data, uint8_t val) { + data[0] = val; +} + +// Stores a 16-bit unsigned word at `data`. +inline void StoreBigEndian16(uint8_t* data, uint16_t val) { + data[0] = val >> 8; + data[1] = val; +} + +// Stores a 32-bit unsigned word at `data`. +inline void StoreBigEndian32(uint8_t* data, uint32_t val) { + data[0] = val >> 24; + data[1] = val >> 16; + data[2] = val >> 8; + data[3] = val; +} +} // namespace internal + +// BoundedByteWriter wraps an ArrayView and divides it into two parts; A fixed +// size - which is the template parameter - and a variable size, which is what +// remains in `data` after the `FixedSize`. +// +// The BoundedByteWriter provides methods to write big endian numbers to the +// FixedSize portion of the buffer, and these are written with static bounds +// checking, to avoid out-of-bounds accesses without a run-time penalty. +// +// The variable sized portion can either be used to create sub-writers, which +// themselves would provide compile-time bounds-checking, or data can be copied +// to it. +template +class BoundedByteWriter { + public: + explicit BoundedByteWriter(rtc::ArrayView data) : data_(data) { + RTC_DCHECK(data.size() >= FixedSize); + } + + template + void Store8(uint8_t value) { + static_assert(offset + sizeof(uint8_t) <= FixedSize, "Out-of-bounds"); + internal::StoreBigEndian8(&data_[offset], value); + } + + template + void Store16(uint16_t value) { + static_assert(offset + sizeof(uint16_t) <= FixedSize, "Out-of-bounds"); + static_assert((offset % sizeof(uint16_t)) == 0, "Unaligned access"); + internal::StoreBigEndian16(&data_[offset], value); + } + + template + void Store32(uint32_t value) { + static_assert(offset + sizeof(uint32_t) <= FixedSize, "Out-of-bounds"); + static_assert((offset % sizeof(uint32_t)) == 0, "Unaligned access"); + internal::StoreBigEndian32(&data_[offset], value); + } + + template + BoundedByteWriter sub_writer(size_t variable_offset) { + RTC_DCHECK(FixedSize + variable_offset + SubSize <= data_.size()); + + return BoundedByteWriter( + data_.subview(FixedSize + variable_offset, SubSize)); + } + + void CopyToVariableData(rtc::ArrayView source) { + memcpy(data_.data() + FixedSize, source.data(), + std::min(source.size(), data_.size() - FixedSize)); + } + + private: + rtc::ArrayView data_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_BOUNDED_BYTE_WRITER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.cc new file mode 100644 index 000000000..8348eb96a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/abort_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.7 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 6 |Reserved |T| Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / zero or more Error Causes / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int AbortChunk::kType; + +absl::optional AbortChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + absl::optional error_causes = + Parameters::Parse(reader->variable_data()); + if (!error_causes.has_value()) { + return absl::nullopt; + } + uint8_t flags = reader->Load8<1>(); + bool filled_in_verification_tag = (flags & (1 << kFlagsBitT)) == 0; + return AbortChunk(filled_in_verification_tag, *std::move(error_causes)); +} + +void AbortChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView error_causes = error_causes_.data(); + BoundedByteWriter writer = AllocateTLV(out, error_causes.size()); + writer.Store8<1>(filled_in_verification_tag_ ? 0 : (1 << kFlagsBitT)); + writer.CopyToVariableData(error_causes); +} + +std::string AbortChunk::ToString() const { + return "ABORT"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.h new file mode 100644 index 000000000..1408a75e8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/abort_chunk.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_ABORT_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_ABORT_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.7 +struct AbortChunkConfig : ChunkConfig { + static constexpr int kType = 6; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class AbortChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = AbortChunkConfig::kType; + + AbortChunk(bool filled_in_verification_tag, Parameters error_causes) + : filled_in_verification_tag_(filled_in_verification_tag), + error_causes_(std::move(error_causes)) {} + + AbortChunk(AbortChunk&& other) = default; + AbortChunk& operator=(AbortChunk&& other) = default; + + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + bool filled_in_verification_tag() const { + return filled_in_verification_tag_; + } + + const Parameters& error_causes() const { return error_causes_; } + + private: + static constexpr int kFlagsBitT = 0; + bool filled_in_verification_tag_; + Parameters error_causes_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_ABORT_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/chunk.cc new file mode 100644 index 000000000..832ab8228 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/chunk.cc @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/chunk.h" + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/math.h" +#include "net/dcsctp/packet/chunk/abort_chunk.h" +#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" +#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" +#include "net/dcsctp/packet/chunk/data_chunk.h" +#include "net/dcsctp/packet/chunk/error_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" +#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" +#include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/init_ack_chunk.h" +#include "net/dcsctp/packet/chunk/init_chunk.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_ack_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_complete_chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +template +bool ParseAndPrint(uint8_t chunk_type, + rtc::ArrayView data, + rtc::StringBuilder& sb) { + if (chunk_type == Chunk::kType) { + absl::optional c = Chunk::Parse(data); + if (c.has_value()) { + sb << c->ToString(); + } else { + sb << "Failed to parse chunk of type " << chunk_type; + } + return true; + } + return false; +} + +std::string DebugConvertChunkToString(rtc::ArrayView data) { + rtc::StringBuilder sb; + + if (data.empty()) { + sb << "Failed to parse chunk due to empty data"; + } else { + uint8_t chunk_type = data[0]; + if (!ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb) && + !ParseAndPrint(chunk_type, data, sb)) { + sb << "Unhandled chunk type: " << static_cast(chunk_type); + } + } + return sb.Release(); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/chunk.h new file mode 100644 index 000000000..687aa1daa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/chunk.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_CHUNK_H_ + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// Base class for all SCTP chunks +class Chunk { + public: + Chunk() {} + virtual ~Chunk() = default; + + // Chunks can contain data payloads that shouldn't be copied unnecessarily. + Chunk(Chunk&& other) = default; + Chunk& operator=(Chunk&& other) = default; + Chunk(const Chunk&) = delete; + Chunk& operator=(const Chunk&) = delete; + + // Serializes the chunk to `out`, growing it as necessary. + virtual void SerializeTo(std::vector& out) const = 0; + + // Returns a human readable description of this chunk and its parameters. + virtual std::string ToString() const = 0; +}; + +// Introspects the chunk in `data` and returns a human readable textual +// representation of it, to be used in debugging. +std::string DebugConvertChunkToString(rtc::ArrayView data); + +struct ChunkConfig { + static constexpr int kTypeSizeInBytes = 1; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.cc new file mode 100644 index 000000000..4839969cc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.cc @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.12 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 11 |Chunk Flags | Length = 4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int CookieAckChunk::kType; + +absl::optional CookieAckChunk::Parse( + rtc::ArrayView data) { + if (!ParseTLV(data).has_value()) { + return absl::nullopt; + } + return CookieAckChunk(); +} + +void CookieAckChunk::SerializeTo(std::vector& out) const { + AllocateTLV(out); +} + +std::string CookieAckChunk::ToString() const { + return "COOKIE-ACK"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.h new file mode 100644 index 000000000..f7d4a33f7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_ack_chunk.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_COOKIE_ACK_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_COOKIE_ACK_CHUNK_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.12 +struct CookieAckChunkConfig : ChunkConfig { + static constexpr int kType = 11; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class CookieAckChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = CookieAckChunkConfig::kType; + + CookieAckChunk() {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_COOKIE_ACK_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.cc new file mode 100644 index 000000000..a01d0b13c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.11 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 10 |Chunk Flags | Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / Cookie / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int CookieEchoChunk::kType; + +absl::optional CookieEchoChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return CookieEchoChunk(reader->variable_data()); +} + +void CookieEchoChunk::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out, cookie_.size()); + writer.CopyToVariableData(cookie_); +} + +std::string CookieEchoChunk::ToString() const { + return "COOKIE-ECHO"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.h new file mode 100644 index 000000000..8cb80527f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/cookie_echo_chunk.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_COOKIE_ECHO_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_COOKIE_ECHO_CHUNK_H_ +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.11 +struct CookieEchoChunkConfig : ChunkConfig { + static constexpr int kType = 10; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class CookieEchoChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = CookieEchoChunkConfig::kType; + + explicit CookieEchoChunk(rtc::ArrayView cookie) + : cookie_(cookie.begin(), cookie.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView cookie() const { return cookie_; } + + private: + std::vector cookie_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_COOKIE_ECHO_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_chunk.cc new file mode 100644 index 000000000..cf65f53d2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_chunk.cc @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/data_chunk.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.1 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 0 | Reserved|U|B|E| Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Identifier S | Stream Sequence Number n | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Payload Protocol Identifier | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / User Data (seq n of Stream S) / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int DataChunk::kType; + +absl::optional DataChunk::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + uint8_t flags = reader->Load8<1>(); + TSN tsn(reader->Load32<4>()); + StreamID stream_identifier(reader->Load16<8>()); + SSN ssn(reader->Load16<10>()); + PPID ppid(reader->Load32<12>()); + + Options options; + options.is_end = Data::IsEnd((flags & (1 << kFlagsBitEnd)) != 0); + options.is_beginning = + Data::IsBeginning((flags & (1 << kFlagsBitBeginning)) != 0); + options.is_unordered = IsUnordered((flags & (1 << kFlagsBitUnordered)) != 0); + options.immediate_ack = + ImmediateAckFlag((flags & (1 << kFlagsBitImmediateAck)) != 0); + + return DataChunk(tsn, stream_identifier, ssn, ppid, + std::vector(reader->variable_data().begin(), + reader->variable_data().end()), + options); +} + +void DataChunk::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out, payload().size()); + + writer.Store8<1>( + (*options().is_end ? (1 << kFlagsBitEnd) : 0) | + (*options().is_beginning ? (1 << kFlagsBitBeginning) : 0) | + (*options().is_unordered ? (1 << kFlagsBitUnordered) : 0) | + (*options().immediate_ack ? (1 << kFlagsBitImmediateAck) : 0)); + writer.Store32<4>(*tsn()); + writer.Store16<8>(*stream_id()); + writer.Store16<10>(*ssn()); + writer.Store32<12>(*ppid()); + + writer.CopyToVariableData(payload()); +} + +std::string DataChunk::ToString() const { + rtc::StringBuilder sb; + sb << "DATA, type=" << (options().is_unordered ? "unordered" : "ordered") + << "::" + << (*options().is_beginning && *options().is_end + ? "complete" + : *options().is_beginning ? "first" + : *options().is_end ? "last" : "middle") + << ", tsn=" << *tsn() << ", stream_id=" << *stream_id() + << ", ppid=" << *ppid() << ", length=" << payload().size(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_chunk.h new file mode 100644 index 000000000..12bb05f2c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_chunk.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_DATA_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_DATA_CHUNK_H_ +#include +#include + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.1 +struct DataChunkConfig : ChunkConfig { + static constexpr int kType = 0; + static constexpr size_t kHeaderSize = 16; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class DataChunk : public AnyDataChunk, public TLVTrait { + public: + static constexpr int kType = DataChunkConfig::kType; + + // Exposed to allow the retransmission queue to make room for the correct + // header size. + static constexpr size_t kHeaderSize = DataChunkConfig::kHeaderSize; + + DataChunk(TSN tsn, + StreamID stream_id, + SSN ssn, + PPID ppid, + std::vector payload, + const Options& options) + : AnyDataChunk(tsn, + stream_id, + ssn, + MID(0), + FSN(0), + ppid, + std::move(payload), + options) {} + + DataChunk(TSN tsn, Data&& data, bool immediate_ack) + : AnyDataChunk(tsn, std::move(data), immediate_ack) {} + + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_DATA_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_common.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_common.h new file mode 100644 index 000000000..b15a03459 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/data_common.h @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_DATA_COMMON_H_ +#define NET_DCSCTP_PACKET_CHUNK_DATA_COMMON_H_ +#include + +#include +#include + +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/data.h" + +namespace dcsctp { + +// Base class for DataChunk and IDataChunk +class AnyDataChunk : public Chunk { + public: + // Represents the "immediate ack" flag on DATA/I-DATA, from RFC7053. + using ImmediateAckFlag = StrongAlias; + + // Data chunk options. + // See https://tools.ietf.org/html/rfc4960#section-3.3.1 + struct Options { + Data::IsEnd is_end = Data::IsEnd(false); + Data::IsBeginning is_beginning = Data::IsBeginning(false); + IsUnordered is_unordered = IsUnordered(false); + ImmediateAckFlag immediate_ack = ImmediateAckFlag(false); + }; + + TSN tsn() const { return tsn_; } + + Options options() const { + Options options; + options.is_end = data_.is_end; + options.is_beginning = data_.is_beginning; + options.is_unordered = data_.is_unordered; + options.immediate_ack = immediate_ack_; + return options; + } + + StreamID stream_id() const { return data_.stream_id; } + SSN ssn() const { return data_.ssn; } + MID message_id() const { return data_.message_id; } + FSN fsn() const { return data_.fsn; } + PPID ppid() const { return data_.ppid; } + rtc::ArrayView payload() const { return data_.payload; } + + // Extracts the Data from the chunk, as a destructive action. + Data extract() && { return std::move(data_); } + + AnyDataChunk(TSN tsn, + StreamID stream_id, + SSN ssn, + MID message_id, + FSN fsn, + PPID ppid, + std::vector payload, + const Options& options) + : tsn_(tsn), + data_(stream_id, + ssn, + message_id, + fsn, + ppid, + std::move(payload), + options.is_beginning, + options.is_end, + options.is_unordered), + immediate_ack_(options.immediate_ack) {} + + AnyDataChunk(TSN tsn, Data data, bool immediate_ack) + : tsn_(tsn), data_(std::move(data)), immediate_ack_(immediate_ack) {} + + protected: + // Bits in `flags` header field. + static constexpr int kFlagsBitEnd = 0; + static constexpr int kFlagsBitBeginning = 1; + static constexpr int kFlagsBitUnordered = 2; + static constexpr int kFlagsBitImmediateAck = 3; + + private: + TSN tsn_; + Data data_; + ImmediateAckFlag immediate_ack_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_DATA_COMMON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/error_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/error_chunk.cc new file mode 100644 index 000000000..baac0c558 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/error_chunk.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/error_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 9 | Chunk Flags | Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / one or more Error Causes / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ErrorChunk::kType; + +absl::optional ErrorChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + absl::optional error_causes = + Parameters::Parse(reader->variable_data()); + if (!error_causes.has_value()) { + return absl::nullopt; + } + return ErrorChunk(*std::move(error_causes)); +} + +void ErrorChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView error_causes = error_causes_.data(); + BoundedByteWriter writer = AllocateTLV(out, error_causes.size()); + writer.CopyToVariableData(error_causes); +} + +std::string ErrorChunk::ToString() const { + return "ERROR"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/error_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/error_chunk.h new file mode 100644 index 000000000..96122cff6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/error_chunk.h @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_ERROR_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_ERROR_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10 +struct ErrorChunkConfig : ChunkConfig { + static constexpr int kType = 9; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 4; +}; + +class ErrorChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = ErrorChunkConfig::kType; + + explicit ErrorChunk(Parameters error_causes) + : error_causes_(std::move(error_causes)) {} + + ErrorChunk(ErrorChunk&& other) = default; + ErrorChunk& operator=(ErrorChunk&& other) = default; + + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + const Parameters& error_causes() const { return error_causes_; } + + private: + Parameters error_causes_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_ERROR_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc new file mode 100644 index 000000000..f01505094 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.cc @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" + +#include +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc3758#section-3.2 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 192 | Flags = 0x00 | Length = Variable | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | New Cumulative TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream-1 | Stream Sequence-1 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ / +// / \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream-N | Stream Sequence-N | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ForwardTsnChunk::kType; + +absl::optional ForwardTsnChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + TSN new_cumulative_tsn(reader->Load32<4>()); + + size_t streams_skipped = + reader->variable_data_size() / kSkippedStreamBufferSize; + + std::vector skipped_streams; + skipped_streams.reserve(streams_skipped); + for (size_t i = 0; i < streams_skipped; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(i * + kSkippedStreamBufferSize); + + StreamID stream_id(sub_reader.Load16<0>()); + SSN ssn(sub_reader.Load16<2>()); + skipped_streams.emplace_back(stream_id, ssn); + } + return ForwardTsnChunk(new_cumulative_tsn, std::move(skipped_streams)); +} + +void ForwardTsnChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView skipped = skipped_streams(); + size_t variable_size = skipped.size() * kSkippedStreamBufferSize; + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(*new_cumulative_tsn()); + for (size_t i = 0; i < skipped.size(); ++i) { + BoundedByteWriter sub_writer = + writer.sub_writer(i * + kSkippedStreamBufferSize); + sub_writer.Store16<0>(*skipped[i].stream_id); + sub_writer.Store16<2>(*skipped[i].ssn); + } +} + +std::string ForwardTsnChunk::ToString() const { + rtc::StringBuilder sb; + sb << "FORWARD-TSN, new_cumulative_tsn=" << *new_cumulative_tsn(); + return sb.str(); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.h new file mode 100644 index 000000000..b9ef666f4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_chunk.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_FORWARD_TSN_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_FORWARD_TSN_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc3758#section-3.2 +struct ForwardTsnChunkConfig : ChunkConfig { + static constexpr int kType = 192; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 4; +}; + +class ForwardTsnChunk : public AnyForwardTsnChunk, + public TLVTrait { + public: + static constexpr int kType = ForwardTsnChunkConfig::kType; + + ForwardTsnChunk(TSN new_cumulative_tsn, + std::vector skipped_streams) + : AnyForwardTsnChunk(new_cumulative_tsn, std::move(skipped_streams)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + private: + static constexpr size_t kSkippedStreamBufferSize = 4; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_FORWARD_TSN_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_common.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_common.h new file mode 100644 index 000000000..37bd2aaff --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/forward_tsn_common.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_FORWARD_TSN_COMMON_H_ +#define NET_DCSCTP_PACKET_CHUNK_FORWARD_TSN_COMMON_H_ +#include + +#include +#include + +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" + +namespace dcsctp { + +// Base class for both ForwardTsnChunk and IForwardTsnChunk +class AnyForwardTsnChunk : public Chunk { + public: + struct SkippedStream { + SkippedStream(StreamID stream_id, SSN ssn) + : stream_id(stream_id), ssn(ssn), unordered(false), message_id(0) {} + SkippedStream(IsUnordered unordered, StreamID stream_id, MID message_id) + : stream_id(stream_id), + ssn(0), + unordered(unordered), + message_id(message_id) {} + + StreamID stream_id; + + // Set for FORWARD_TSN + SSN ssn; + + // Set for I-FORWARD_TSN + IsUnordered unordered; + MID message_id; + + bool operator==(const SkippedStream& other) const { + return stream_id == other.stream_id && ssn == other.ssn && + unordered == other.unordered && message_id == other.message_id; + } + }; + + AnyForwardTsnChunk(TSN new_cumulative_tsn, + std::vector skipped_streams) + : new_cumulative_tsn_(new_cumulative_tsn), + skipped_streams_(std::move(skipped_streams)) {} + + TSN new_cumulative_tsn() const { return new_cumulative_tsn_; } + + rtc::ArrayView skipped_streams() const { + return skipped_streams_; + } + + private: + TSN new_cumulative_tsn_; + std::vector skipped_streams_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_FORWARD_TSN_COMMON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc new file mode 100644 index 000000000..3cbcd09c7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.6 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 5 | Chunk Flags | Heartbeat Ack Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Heartbeat Information TLV (Variable-Length) / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int HeartbeatAckChunk::kType; + +absl::optional HeartbeatAckChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + absl::optional parameters = + Parameters::Parse(reader->variable_data()); + if (!parameters.has_value()) { + return absl::nullopt; + } + return HeartbeatAckChunk(*std::move(parameters)); +} + +void HeartbeatAckChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView parameters = parameters_.data(); + BoundedByteWriter writer = AllocateTLV(out, parameters.size()); + writer.CopyToVariableData(parameters); +} + +std::string HeartbeatAckChunk::ToString() const { + return "HEARTBEAT-ACK"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h new file mode 100644 index 000000000..a6479f78b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_HEARTBEAT_ACK_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_HEARTBEAT_ACK_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.6 +struct HeartbeatAckChunkConfig : ChunkConfig { + static constexpr int kType = 5; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class HeartbeatAckChunk : public Chunk, + public TLVTrait { + public: + static constexpr int kType = HeartbeatAckChunkConfig::kType; + + explicit HeartbeatAckChunk(Parameters parameters) + : parameters_(std::move(parameters)) {} + + HeartbeatAckChunk(HeartbeatAckChunk&& other) = default; + HeartbeatAckChunk& operator=(HeartbeatAckChunk&& other) = default; + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + const Parameters& parameters() const { return parameters_; } + + absl::optional info() const { + return parameters_.get(); + } + + private: + Parameters parameters_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_HEARTBEAT_ACK_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc new file mode 100644 index 000000000..d759d6b16 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.5 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 4 | Chunk Flags | Heartbeat Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Heartbeat Information TLV (Variable-Length) / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int HeartbeatRequestChunk::kType; + +absl::optional HeartbeatRequestChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + absl::optional parameters = + Parameters::Parse(reader->variable_data()); + if (!parameters.has_value()) { + return absl::nullopt; + } + return HeartbeatRequestChunk(*std::move(parameters)); +} + +void HeartbeatRequestChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView parameters = parameters_.data(); + BoundedByteWriter writer = AllocateTLV(out, parameters.size()); + writer.CopyToVariableData(parameters); +} + +std::string HeartbeatRequestChunk::ToString() const { + return "HEARTBEAT"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.h new file mode 100644 index 000000000..fe2ce1950 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/heartbeat_request_chunk.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_HEARTBEAT_REQUEST_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_HEARTBEAT_REQUEST_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { +// https://tools.ietf.org/html/rfc4960#section-3.3.5 +struct HeartbeatRequestChunkConfig : ChunkConfig { + static constexpr int kType = 4; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class HeartbeatRequestChunk : public Chunk, + public TLVTrait { + public: + static constexpr int kType = HeartbeatRequestChunkConfig::kType; + + explicit HeartbeatRequestChunk(Parameters parameters) + : parameters_(std::move(parameters)) {} + + HeartbeatRequestChunk(HeartbeatRequestChunk&& other) = default; + HeartbeatRequestChunk& operator=(HeartbeatRequestChunk&& other) = default; + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + const Parameters& parameters() const { return parameters_; } + Parameters extract_parameters() && { return std::move(parameters_); } + absl::optional info() const { + return parameters_.get(); + } + + private: + Parameters parameters_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_HEARTBEAT_REQUEST_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.cc new file mode 100644 index 000000000..378c52790 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.cc @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/idata_chunk.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc8260#section-2.1 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 64 | Res |I|U|B|E| Length = Variable | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Identifier | Reserved | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Message Identifier | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Payload Protocol Identifier / Fragment Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / User Data / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int IDataChunk::kType; + +absl::optional IDataChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + uint8_t flags = reader->Load8<1>(); + TSN tsn(reader->Load32<4>()); + StreamID stream_identifier(reader->Load16<8>()); + MID message_id(reader->Load32<12>()); + uint32_t ppid_or_fsn = reader->Load32<16>(); + + Options options; + options.is_end = Data::IsEnd((flags & (1 << kFlagsBitEnd)) != 0); + options.is_beginning = + Data::IsBeginning((flags & (1 << kFlagsBitBeginning)) != 0); + options.is_unordered = IsUnordered((flags & (1 << kFlagsBitUnordered)) != 0); + options.immediate_ack = + ImmediateAckFlag((flags & (1 << kFlagsBitImmediateAck)) != 0); + + return IDataChunk(tsn, stream_identifier, message_id, + PPID(options.is_beginning ? ppid_or_fsn : 0), + FSN(options.is_beginning ? 0 : ppid_or_fsn), + std::vector(reader->variable_data().begin(), + reader->variable_data().end()), + options); +} + +void IDataChunk::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out, payload().size()); + + writer.Store8<1>( + (*options().is_end ? (1 << kFlagsBitEnd) : 0) | + (*options().is_beginning ? (1 << kFlagsBitBeginning) : 0) | + (*options().is_unordered ? (1 << kFlagsBitUnordered) : 0) | + (*options().immediate_ack ? (1 << kFlagsBitImmediateAck) : 0)); + writer.Store32<4>(*tsn()); + writer.Store16<8>(*stream_id()); + writer.Store32<12>(*message_id()); + writer.Store32<16>(options().is_beginning ? *ppid() : *fsn()); + writer.CopyToVariableData(payload()); +} + +std::string IDataChunk::ToString() const { + rtc::StringBuilder sb; + sb << "I-DATA, type=" << (options().is_unordered ? "unordered" : "ordered") + << "::" + << (*options().is_beginning && *options().is_end + ? "complete" + : *options().is_beginning ? "first" + : *options().is_end ? "last" : "middle") + << ", tsn=" << *tsn() << ", stream_id=" << *stream_id() + << ", message_id=" << *message_id(); + + if (*options().is_beginning) { + sb << ", ppid=" << *ppid(); + } else { + sb << ", fsn=" << *fsn(); + } + sb << ", length=" << payload().size(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.h new file mode 100644 index 000000000..8cdf2a1fc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/idata_chunk.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_IDATA_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_IDATA_CHUNK_H_ +#include +#include + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc8260#section-2.1 +struct IDataChunkConfig : ChunkConfig { + static constexpr int kType = 64; + static constexpr size_t kHeaderSize = 20; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class IDataChunk : public AnyDataChunk, public TLVTrait { + public: + static constexpr int kType = IDataChunkConfig::kType; + + // Exposed to allow the retransmission queue to make room for the correct + // header size. + static constexpr size_t kHeaderSize = IDataChunkConfig::kHeaderSize; + IDataChunk(TSN tsn, + StreamID stream_id, + MID message_id, + PPID ppid, + FSN fsn, + std::vector payload, + const Options& options) + : AnyDataChunk(tsn, + stream_id, + SSN(0), + message_id, + fsn, + ppid, + std::move(payload), + options) {} + + explicit IDataChunk(TSN tsn, Data&& data, bool immediate_ack) + : AnyDataChunk(tsn, std::move(data), immediate_ack) {} + + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_IDATA_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc new file mode 100644 index 000000000..a647a8bf8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" + +#include +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc8260#section-2.3.1 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 194 | Flags = 0x00 | Length = Variable | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | New Cumulative TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Identifier | Reserved |U| +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Message Identifier | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / / +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Identifier | Reserved |U| +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Message Identifier | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int IForwardTsnChunk::kType; + +absl::optional IForwardTsnChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + TSN new_cumulative_tsn(reader->Load32<4>()); + + size_t streams_skipped = + reader->variable_data_size() / kSkippedStreamBufferSize; + std::vector skipped_streams; + skipped_streams.reserve(streams_skipped); + size_t offset = 0; + for (size_t i = 0; i < streams_skipped; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(offset); + + StreamID stream_id(sub_reader.Load16<0>()); + IsUnordered unordered(sub_reader.Load8<3>() & 0x01); + MID message_id(sub_reader.Load32<4>()); + skipped_streams.emplace_back(unordered, stream_id, message_id); + offset += kSkippedStreamBufferSize; + } + RTC_DCHECK(offset == reader->variable_data_size()); + return IForwardTsnChunk(new_cumulative_tsn, std::move(skipped_streams)); +} + +void IForwardTsnChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView skipped = skipped_streams(); + size_t variable_size = skipped.size() * kSkippedStreamBufferSize; + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(*new_cumulative_tsn()); + size_t offset = 0; + for (size_t i = 0; i < skipped.size(); ++i) { + BoundedByteWriter sub_writer = + writer.sub_writer(offset); + + sub_writer.Store16<0>(*skipped[i].stream_id); + sub_writer.Store8<3>(skipped[i].unordered ? 1 : 0); + sub_writer.Store32<4>(*skipped[i].message_id); + offset += kSkippedStreamBufferSize; + } + RTC_DCHECK(offset == variable_size); +} + +std::string IForwardTsnChunk::ToString() const { + rtc::StringBuilder sb; + sb << "I-FORWARD-TSN, new_cumulative_tsn=" << *new_cumulative_tsn(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.h new file mode 100644 index 000000000..54d23f7a8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/iforward_tsn_chunk.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_IFORWARD_TSN_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_IFORWARD_TSN_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc8260#section-2.3.1 +struct IForwardTsnChunkConfig : ChunkConfig { + static constexpr int kType = 194; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 8; +}; + +class IForwardTsnChunk : public AnyForwardTsnChunk, + public TLVTrait { + public: + static constexpr int kType = IForwardTsnChunkConfig::kType; + + IForwardTsnChunk(TSN new_cumulative_tsn, + std::vector skipped_streams) + : AnyForwardTsnChunk(new_cumulative_tsn, std::move(skipped_streams)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + private: + static constexpr size_t kSkippedStreamBufferSize = 8; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_IFORWARD_TSN_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.cc new file mode 100644 index 000000000..c7ef9da1f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.cc @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/init_ack_chunk.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_format.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.3 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 2 | Chunk Flags | Chunk Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Initiate Tag | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Advertised Receiver Window Credit | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Number of Outbound Streams | Number of Inbound Streams | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Initial TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Optional/Variable-Length Parameters / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int InitAckChunk::kType; + +absl::optional InitAckChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + VerificationTag initiate_tag(reader->Load32<4>()); + uint32_t a_rwnd = reader->Load32<8>(); + uint16_t nbr_outbound_streams = reader->Load16<12>(); + uint16_t nbr_inbound_streams = reader->Load16<14>(); + TSN initial_tsn(reader->Load32<16>()); + absl::optional parameters = + Parameters::Parse(reader->variable_data()); + if (!parameters.has_value()) { + return absl::nullopt; + } + return InitAckChunk(initiate_tag, a_rwnd, nbr_outbound_streams, + nbr_inbound_streams, initial_tsn, *std::move(parameters)); +} + +void InitAckChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView parameters = parameters_.data(); + BoundedByteWriter writer = AllocateTLV(out, parameters.size()); + + writer.Store32<4>(*initiate_tag_); + writer.Store32<8>(a_rwnd_); + writer.Store16<12>(nbr_outbound_streams_); + writer.Store16<14>(nbr_inbound_streams_); + writer.Store32<16>(*initial_tsn_); + writer.CopyToVariableData(parameters); +} + +std::string InitAckChunk::ToString() const { + return rtc::StringFormat("INIT_ACK, initiate_tag=0x%0x, initial_tsn=%u", + *initiate_tag(), *initial_tsn()); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.h new file mode 100644 index 000000000..6fcf64b2e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_ack_chunk.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_INIT_ACK_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_INIT_ACK_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.3 +struct InitAckChunkConfig : ChunkConfig { + static constexpr int kType = 2; + static constexpr size_t kHeaderSize = 20; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class InitAckChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = InitAckChunkConfig::kType; + + InitAckChunk(VerificationTag initiate_tag, + uint32_t a_rwnd, + uint16_t nbr_outbound_streams, + uint16_t nbr_inbound_streams, + TSN initial_tsn, + Parameters parameters) + : initiate_tag_(initiate_tag), + a_rwnd_(a_rwnd), + nbr_outbound_streams_(nbr_outbound_streams), + nbr_inbound_streams_(nbr_inbound_streams), + initial_tsn_(initial_tsn), + parameters_(std::move(parameters)) {} + + InitAckChunk(InitAckChunk&& other) = default; + InitAckChunk& operator=(InitAckChunk&& other) = default; + + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + VerificationTag initiate_tag() const { return initiate_tag_; } + uint32_t a_rwnd() const { return a_rwnd_; } + uint16_t nbr_outbound_streams() const { return nbr_outbound_streams_; } + uint16_t nbr_inbound_streams() const { return nbr_inbound_streams_; } + TSN initial_tsn() const { return initial_tsn_; } + const Parameters& parameters() const { return parameters_; } + + private: + VerificationTag initiate_tag_; + uint32_t a_rwnd_; + uint16_t nbr_outbound_streams_; + uint16_t nbr_inbound_streams_; + TSN initial_tsn_; + Parameters parameters_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_INIT_ACK_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_chunk.cc new file mode 100644 index 000000000..803010707 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_chunk.cc @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/init_chunk.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_format.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.2 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 1 | Chunk Flags | Chunk Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Initiate Tag | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Advertised Receiver Window Credit (a_rwnd) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Number of Outbound Streams | Number of Inbound Streams | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Initial TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Optional/Variable-Length Parameters / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int InitChunk::kType; + +absl::optional InitChunk::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + VerificationTag initiate_tag(reader->Load32<4>()); + uint32_t a_rwnd = reader->Load32<8>(); + uint16_t nbr_outbound_streams = reader->Load16<12>(); + uint16_t nbr_inbound_streams = reader->Load16<14>(); + TSN initial_tsn(reader->Load32<16>()); + + absl::optional parameters = + Parameters::Parse(reader->variable_data()); + if (!parameters.has_value()) { + return absl::nullopt; + } + return InitChunk(initiate_tag, a_rwnd, nbr_outbound_streams, + nbr_inbound_streams, initial_tsn, *std::move(parameters)); +} + +void InitChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView parameters = parameters_.data(); + BoundedByteWriter writer = AllocateTLV(out, parameters.size()); + + writer.Store32<4>(*initiate_tag_); + writer.Store32<8>(a_rwnd_); + writer.Store16<12>(nbr_outbound_streams_); + writer.Store16<14>(nbr_inbound_streams_); + writer.Store32<16>(*initial_tsn_); + + writer.CopyToVariableData(parameters); +} + +std::string InitChunk::ToString() const { + return rtc::StringFormat("INIT, initiate_tag=0x%0x, initial_tsn=%u", + *initiate_tag(), *initial_tsn()); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_chunk.h new file mode 100644 index 000000000..38f9994ca --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/init_chunk.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_INIT_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_INIT_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.2 +struct InitChunkConfig : ChunkConfig { + static constexpr int kType = 1; + static constexpr size_t kHeaderSize = 20; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class InitChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = InitChunkConfig::kType; + + InitChunk(VerificationTag initiate_tag, + uint32_t a_rwnd, + uint16_t nbr_outbound_streams, + uint16_t nbr_inbound_streams, + TSN initial_tsn, + Parameters parameters) + : initiate_tag_(initiate_tag), + a_rwnd_(a_rwnd), + nbr_outbound_streams_(nbr_outbound_streams), + nbr_inbound_streams_(nbr_inbound_streams), + initial_tsn_(initial_tsn), + parameters_(std::move(parameters)) {} + + InitChunk(InitChunk&& other) = default; + InitChunk& operator=(InitChunk&& other) = default; + + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + VerificationTag initiate_tag() const { return initiate_tag_; } + uint32_t a_rwnd() const { return a_rwnd_; } + uint16_t nbr_outbound_streams() const { return nbr_outbound_streams_; } + uint16_t nbr_inbound_streams() const { return nbr_inbound_streams_; } + TSN initial_tsn() const { return initial_tsn_; } + const Parameters& parameters() const { return parameters_; } + + private: + VerificationTag initiate_tag_; + uint32_t a_rwnd_; + uint16_t nbr_outbound_streams_; + uint16_t nbr_inbound_streams_; + TSN initial_tsn_; + Parameters parameters_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_INIT_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.cc new file mode 100644 index 000000000..f39f3b619 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.cc @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-3.1 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 130 | Chunk Flags | Chunk Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Re-configuration Parameter / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Re-configuration Parameter (optional) / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ReConfigChunk::kType; + +absl::optional ReConfigChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + absl::optional parameters = + Parameters::Parse(reader->variable_data()); + if (!parameters.has_value()) { + return absl::nullopt; + } + + return ReConfigChunk(*std::move(parameters)); +} + +void ReConfigChunk::SerializeTo(std::vector& out) const { + rtc::ArrayView parameters = parameters_.data(); + BoundedByteWriter writer = AllocateTLV(out, parameters.size()); + writer.CopyToVariableData(parameters); +} + +std::string ReConfigChunk::ToString() const { + return "RE-CONFIG"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.h new file mode 100644 index 000000000..9d2539a51 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/reconfig_chunk.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_RECONFIG_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_RECONFIG_CHUNK_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-3.1 +struct ReConfigChunkConfig : ChunkConfig { + static constexpr int kType = 130; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class ReConfigChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = ReConfigChunkConfig::kType; + + explicit ReConfigChunk(Parameters parameters) + : parameters_(std::move(parameters)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + const Parameters& parameters() const { return parameters_; } + Parameters extract_parameters() { return std::move(parameters_); } + + private: + Parameters parameters_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_RECONFIG_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.cc new file mode 100644 index 000000000..d80e43008 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.cc @@ -0,0 +1,155 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/sack_chunk.h" + +#include + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.4 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 3 |Chunk Flags | Chunk Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cumulative TSN Ack | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Advertised Receiver Window Credit (a_rwnd) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Number of Gap Ack Blocks = N | Number of Duplicate TSNs = X | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Gap Ack Block #1 Start | Gap Ack Block #1 End | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / / +// \ ... \ +// / / +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Gap Ack Block #N Start | Gap Ack Block #N End | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Duplicate TSN 1 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / / +// \ ... \ +// / / +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Duplicate TSN X | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int SackChunk::kType; + +absl::optional SackChunk::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + TSN tsn_ack(reader->Load32<4>()); + uint32_t a_rwnd = reader->Load32<8>(); + uint16_t nbr_of_gap_blocks = reader->Load16<12>(); + uint16_t nbr_of_dup_tsns = reader->Load16<14>(); + + if (reader->variable_data_size() != nbr_of_gap_blocks * kGapAckBlockSize + + nbr_of_dup_tsns * kDupTsnBlockSize) { + RTC_DLOG(LS_WARNING) << "Invalid number of gap blocks or duplicate TSNs"; + return absl::nullopt; + } + + std::vector gap_ack_blocks; + gap_ack_blocks.reserve(nbr_of_gap_blocks); + size_t offset = 0; + for (int i = 0; i < nbr_of_gap_blocks; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(offset); + + uint16_t start = sub_reader.Load16<0>(); + uint16_t end = sub_reader.Load16<2>(); + gap_ack_blocks.emplace_back(start, end); + offset += kGapAckBlockSize; + } + + std::set duplicate_tsns; + for (int i = 0; i < nbr_of_dup_tsns; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(offset); + + duplicate_tsns.insert(TSN(sub_reader.Load32<0>())); + offset += kDupTsnBlockSize; + } + RTC_DCHECK(offset == reader->variable_data_size()); + + return SackChunk(tsn_ack, a_rwnd, gap_ack_blocks, duplicate_tsns); +} + +void SackChunk::SerializeTo(std::vector& out) const { + int nbr_of_gap_blocks = gap_ack_blocks_.size(); + int nbr_of_dup_tsns = duplicate_tsns_.size(); + size_t variable_size = + nbr_of_gap_blocks * kGapAckBlockSize + nbr_of_dup_tsns * kDupTsnBlockSize; + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(*cumulative_tsn_ack_); + writer.Store32<8>(a_rwnd_); + writer.Store16<12>(nbr_of_gap_blocks); + writer.Store16<14>(nbr_of_dup_tsns); + + size_t offset = 0; + for (int i = 0; i < nbr_of_gap_blocks; ++i) { + BoundedByteWriter sub_writer = + writer.sub_writer(offset); + + sub_writer.Store16<0>(gap_ack_blocks_[i].start); + sub_writer.Store16<2>(gap_ack_blocks_[i].end); + offset += kGapAckBlockSize; + } + + for (TSN tsn : duplicate_tsns_) { + BoundedByteWriter sub_writer = + writer.sub_writer(offset); + + sub_writer.Store32<0>(*tsn); + offset += kDupTsnBlockSize; + } + + RTC_DCHECK(offset == variable_size); +} + +std::string SackChunk::ToString() const { + rtc::StringBuilder sb; + sb << "SACK, cum_ack_tsn=" << *cumulative_tsn_ack() + << ", a_rwnd=" << a_rwnd(); + for (const GapAckBlock& gap : gap_ack_blocks_) { + uint32_t first = *cumulative_tsn_ack_ + gap.start; + uint32_t last = *cumulative_tsn_ack_ + gap.end; + sb << ", gap=" << first << "--" << last; + } + if (!duplicate_tsns_.empty()) { + sb << ", dup_tsns=" + << StrJoin(duplicate_tsns(), ",", + [](rtc::StringBuilder& sb, TSN tsn) { sb << *tsn; }); + } + + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.h new file mode 100644 index 000000000..e6758fa33 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/sack_chunk.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_SACK_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_SACK_CHUNK_H_ +#include + +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.4 +struct SackChunkConfig : ChunkConfig { + static constexpr int kType = 3; + static constexpr size_t kHeaderSize = 16; + static constexpr size_t kVariableLengthAlignment = 4; +}; + +class SackChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = SackChunkConfig::kType; + + struct GapAckBlock { + GapAckBlock(uint16_t start, uint16_t end) : start(start), end(end) {} + + uint16_t start; + uint16_t end; + + bool operator==(const GapAckBlock& other) const { + return start == other.start && end == other.end; + } + }; + + SackChunk(TSN cumulative_tsn_ack, + uint32_t a_rwnd, + std::vector gap_ack_blocks, + std::set duplicate_tsns) + : cumulative_tsn_ack_(cumulative_tsn_ack), + a_rwnd_(a_rwnd), + gap_ack_blocks_(std::move(gap_ack_blocks)), + duplicate_tsns_(std::move(duplicate_tsns)) {} + static absl::optional Parse(rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + TSN cumulative_tsn_ack() const { return cumulative_tsn_ack_; } + uint32_t a_rwnd() const { return a_rwnd_; } + rtc::ArrayView gap_ack_blocks() const { + return gap_ack_blocks_; + } + const std::set& duplicate_tsns() const { return duplicate_tsns_; } + + private: + static constexpr size_t kGapAckBlockSize = 4; + static constexpr size_t kDupTsnBlockSize = 4; + + const TSN cumulative_tsn_ack_; + const uint32_t a_rwnd_; + std::vector gap_ack_blocks_; + std::set duplicate_tsns_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_SACK_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc new file mode 100644 index 000000000..d42aceead --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/shutdown_ack_chunk.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.9 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 8 |Chunk Flags | Length = 4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ShutdownAckChunk::kType; + +absl::optional ShutdownAckChunk::Parse( + rtc::ArrayView data) { + if (!ParseTLV(data).has_value()) { + return absl::nullopt; + } + return ShutdownAckChunk(); +} + +void ShutdownAckChunk::SerializeTo(std::vector& out) const { + AllocateTLV(out); +} + +std::string ShutdownAckChunk::ToString() const { + return "SHUTDOWN-ACK"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.h new file mode 100644 index 000000000..29c1a98be --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_ack_chunk.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_ACK_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_ACK_CHUNK_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.9 +struct ShutdownAckChunkConfig : ChunkConfig { + static constexpr int kType = 8; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class ShutdownAckChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = ShutdownAckChunkConfig::kType; + + ShutdownAckChunk() {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_ACK_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.cc new file mode 100644 index 000000000..59f806f7f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/shutdown_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.8 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 7 | Chunk Flags | Length = 8 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cumulative TSN Ack | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ShutdownChunk::kType; + +absl::optional ShutdownChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + TSN cumulative_tsn_ack(reader->Load32<4>()); + return ShutdownChunk(cumulative_tsn_ack); +} + +void ShutdownChunk::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(*cumulative_tsn_ack_); +} + +std::string ShutdownChunk::ToString() const { + return "SHUTDOWN"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.h new file mode 100644 index 000000000..8148cca28 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_chunk.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_CHUNK_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.8 +struct ShutdownChunkConfig : ChunkConfig { + static constexpr int kType = 7; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class ShutdownChunk : public Chunk, public TLVTrait { + public: + static constexpr int kType = ShutdownChunkConfig::kType; + + explicit ShutdownChunk(TSN cumulative_tsn_ack) + : cumulative_tsn_ack_(cumulative_tsn_ack) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + TSN cumulative_tsn_ack() const { return cumulative_tsn_ack_; } + + private: + TSN cumulative_tsn_ack_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc new file mode 100644 index 000000000..3f5485743 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk/shutdown_complete_chunk.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.13 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 14 |Reserved |T| Length = 4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ShutdownCompleteChunk::kType; + +absl::optional ShutdownCompleteChunk::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + uint8_t flags = reader->Load8<1>(); + bool tag_reflected = (flags & (1 << kFlagsBitT)) != 0; + return ShutdownCompleteChunk(tag_reflected); +} + +void ShutdownCompleteChunk::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store8<1>(tag_reflected_ ? (1 << kFlagsBitT) : 0); +} + +std::string ShutdownCompleteChunk::ToString() const { + return "SHUTDOWN-COMPLETE"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.h new file mode 100644 index 000000000..46d28e88d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk/shutdown_complete_chunk.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_COMPLETE_CHUNK_H_ +#define NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_COMPLETE_CHUNK_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.13 +struct ShutdownCompleteChunkConfig : ChunkConfig { + static constexpr int kType = 14; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class ShutdownCompleteChunk : public Chunk, + public TLVTrait { + public: + static constexpr int kType = ShutdownCompleteChunkConfig::kType; + + explicit ShutdownCompleteChunk(bool tag_reflected) + : tag_reflected_(tag_reflected) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + bool tag_reflected() const { return tag_reflected_; } + + private: + static constexpr int kFlagsBitT = 0; + bool tag_reflected_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_SHUTDOWN_COMPLETE_CHUNK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk_validators.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk_validators.cc new file mode 100644 index 000000000..48d351827 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk_validators.cc @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/chunk_validators.h" + +#include +#include +#include + +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "rtc_base/logging.h" + +namespace dcsctp { + +SackChunk ChunkValidators::Clean(SackChunk&& sack) { + if (Validate(sack)) { + return std::move(sack); + } + + RTC_DLOG(LS_WARNING) << "Received SACK is malformed; cleaning it"; + + std::vector gap_ack_blocks; + gap_ack_blocks.reserve(sack.gap_ack_blocks().size()); + + // First: Only keep blocks that are sane + for (const SackChunk::GapAckBlock& gap_ack_block : sack.gap_ack_blocks()) { + if (gap_ack_block.end > gap_ack_block.start) { + gap_ack_blocks.emplace_back(gap_ack_block); + } + } + + // Not more than at most one remaining? Exit early. + if (gap_ack_blocks.size() <= 1) { + return SackChunk(sack.cumulative_tsn_ack(), sack.a_rwnd(), + std::move(gap_ack_blocks), sack.duplicate_tsns()); + } + + // Sort the intervals by their start value, to aid in the merging below. + absl::c_sort(gap_ack_blocks, [&](const SackChunk::GapAckBlock& a, + const SackChunk::GapAckBlock& b) { + return a.start < b.start; + }); + + // Merge overlapping ranges. + std::vector merged; + merged.reserve(gap_ack_blocks.size()); + merged.push_back(gap_ack_blocks[0]); + + for (size_t i = 1; i < gap_ack_blocks.size(); ++i) { + if (merged.back().end + 1 >= gap_ack_blocks[i].start) { + merged.back().end = std::max(merged.back().end, gap_ack_blocks[i].end); + } else { + merged.push_back(gap_ack_blocks[i]); + } + } + + return SackChunk(sack.cumulative_tsn_ack(), sack.a_rwnd(), std::move(merged), + sack.duplicate_tsns()); +} + +bool ChunkValidators::Validate(const SackChunk& sack) { + if (sack.gap_ack_blocks().empty()) { + return true; + } + + // Ensure that gap-ack-blocks are sorted, has an "end" that is not before + // "start" and are non-overlapping and non-adjacent. + uint16_t prev_end = 0; + for (const SackChunk::GapAckBlock& gap_ack_block : sack.gap_ack_blocks()) { + if (gap_ack_block.end < gap_ack_block.start) { + return false; + } + if (gap_ack_block.start <= (prev_end + 1)) { + return false; + } + prev_end = gap_ack_block.end; + } + return true; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk_validators.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk_validators.h new file mode 100644 index 000000000..b11848a16 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/chunk_validators.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CHUNK_VALIDATORS_H_ +#define NET_DCSCTP_PACKET_CHUNK_VALIDATORS_H_ + +#include "net/dcsctp/packet/chunk/sack_chunk.h" + +namespace dcsctp { +// Validates and cleans SCTP chunks. +class ChunkValidators { + public: + // Given a SackChunk, will return `true` if it's valid, and `false` if not. + static bool Validate(const SackChunk& sack); + + // Given a SackChunk, it will return a cleaned and validated variant of it. + // RFC4960 doesn't say anything about validity of SACKs or if the Gap ACK + // blocks must be sorted, and non-overlapping. While they always are in + // well-behaving implementations, this can't be relied on. + // + // This method internally calls `Validate`, which means that you can always + // pass a SackChunk to this method (valid or not), and use the results. + static SackChunk Clean(SackChunk&& sack); +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CHUNK_VALIDATORS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/crc32c.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/crc32c.cc new file mode 100644 index 000000000..e3f0dc1d1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/crc32c.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/crc32c.h" + +#include + +#include "third_party/crc32c/src/include/crc32c/crc32c.h" + +namespace dcsctp { + +uint32_t GenerateCrc32C(rtc::ArrayView data) { + uint32_t crc32c = crc32c_value(data.data(), data.size()); + + // Byte swapping for little endian byte order: + uint8_t byte0 = crc32c; + uint8_t byte1 = crc32c >> 8; + uint8_t byte2 = crc32c >> 16; + uint8_t byte3 = crc32c >> 24; + crc32c = ((byte0 << 24) | (byte1 << 16) | (byte2 << 8) | byte3); + return crc32c; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/crc32c.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/crc32c.h new file mode 100644 index 000000000..a969e1b26 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/crc32c.h @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_CRC32C_H_ +#define NET_DCSCTP_PACKET_CRC32C_H_ + +#include + +#include "api/array_view.h" + +namespace dcsctp { + +// Generates the CRC32C checksum of `data`. +uint32_t GenerateCrc32C(rtc::ArrayView data); + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_CRC32C_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/data.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/data.h new file mode 100644 index 000000000..f2d2e7490 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/data.h @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_DATA_H_ +#define NET_DCSCTP_PACKET_DATA_H_ + +#include +#include +#include + +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// Represents data that is either received and extracted from a DATA/I-DATA +// chunk, or data that is supposed to be sent, and wrapped in a DATA/I-DATA +// chunk (depending on peer capabilities). +// +// The data wrapped in this structure is actually the same as the DATA/I-DATA +// chunk (actually the union of them), but to avoid having all components be +// aware of the implementation details of the different chunks, this abstraction +// is used instead. A notable difference is also that it doesn't carry a +// Transmission Sequence Number (TSN), as that is not known when a chunk is +// created (assigned late, just when sending), and that the TSNs in DATA/I-DATA +// are wrapped numbers, and within the library, unwrapped sequence numbers are +// preferably used. +struct Data { + // Indicates if a chunk is the first in a fragmented message and maps to the + // "beginning" flag in DATA/I-DATA chunk. + using IsBeginning = StrongAlias; + + // Indicates if a chunk is the last in a fragmented message and maps to the + // "end" flag in DATA/I-DATA chunk. + using IsEnd = StrongAlias; + + Data(StreamID stream_id, + SSN ssn, + MID message_id, + FSN fsn, + PPID ppid, + std::vector payload, + IsBeginning is_beginning, + IsEnd is_end, + IsUnordered is_unordered) + : stream_id(stream_id), + ssn(ssn), + message_id(message_id), + fsn(fsn), + ppid(ppid), + payload(std::move(payload)), + is_beginning(is_beginning), + is_end(is_end), + is_unordered(is_unordered) {} + + // Move-only, to avoid accidental copies. + Data(Data&& other) = default; + Data& operator=(Data&& other) = default; + + // Creates a copy of this `Data` object. + Data Clone() const { + return Data(stream_id, ssn, message_id, fsn, ppid, payload, is_beginning, + is_end, is_unordered); + } + + // The size of this data, which translates to the size of its payload. + size_t size() const { return payload.size(); } + + // Stream Identifier. + StreamID stream_id; + + // Stream Sequence Number (SSN), per stream, for ordered chunks. Defined by + // RFC4960 and used only in DATA chunks (not I-DATA). + SSN ssn; + + // Message Identifier (MID) per stream and ordered/unordered. Defined by + // RFC8260, and used together with options.is_unordered and stream_id to + // uniquely identify a message. Used only in I-DATA chunks (not DATA). + MID message_id; + // Fragment Sequence Number (FSN) per stream and ordered/unordered, as above. + FSN fsn; + + // Payload Protocol Identifier (PPID). + PPID ppid; + + // The actual data payload. + std::vector payload; + + // If this data represents the first, last or a middle chunk. + IsBeginning is_beginning; + IsEnd is_end; + // If this data is sent/received unordered. + IsUnordered is_unordered; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_DATA_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc new file mode 100644 index 000000000..ef67c2a49 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.10 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=10 | Cause Length=4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int CookieReceivedWhileShuttingDownCause::kType; + +absl::optional +CookieReceivedWhileShuttingDownCause::Parse( + rtc::ArrayView data) { + if (!ParseTLV(data).has_value()) { + return absl::nullopt; + } + return CookieReceivedWhileShuttingDownCause(); +} + +void CookieReceivedWhileShuttingDownCause::SerializeTo( + std::vector& out) const { + AllocateTLV(out); +} + +std::string CookieReceivedWhileShuttingDownCause::ToString() const { + return "Cookie Received While Shutting Down"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h new file mode 100644 index 000000000..362f181fb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_COOKIE_RECEIVED_WHILE_SHUTTING_DOWN_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_COOKIE_RECEIVED_WHILE_SHUTTING_DOWN_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.10 +struct CookieReceivedWhileShuttingDownCauseConfig : public ParameterConfig { + static constexpr int kType = 10; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class CookieReceivedWhileShuttingDownCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = + CookieReceivedWhileShuttingDownCauseConfig::kType; + + CookieReceivedWhileShuttingDownCause() {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_COOKIE_RECEIVED_WHILE_SHUTTING_DOWN_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/error_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/error_cause.cc new file mode 100644 index 000000000..dcd07472e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/error_cause.cc @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/error_cause.h" + +#include + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/math.h" +#include "net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h" +#include "net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h" +#include "net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h" +#include "net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h" +#include "net/dcsctp/packet/error_cause/no_user_data_cause.h" +#include "net/dcsctp/packet/error_cause/out_of_resource_error_cause.h" +#include "net/dcsctp/packet/error_cause/protocol_violation_cause.h" +#include "net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h" +#include "net/dcsctp/packet/error_cause/stale_cookie_error_cause.h" +#include "net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h" +#include "net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h" +#include "net/dcsctp/packet/error_cause/unresolvable_address_cause.h" +#include "net/dcsctp/packet/error_cause/user_initiated_abort_cause.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +template +bool ParseAndPrint(ParameterDescriptor descriptor, rtc::StringBuilder& sb) { + if (descriptor.type == ErrorCause::kType) { + absl::optional p = ErrorCause::Parse(descriptor.data); + if (p.has_value()) { + sb << p->ToString(); + } else { + sb << "Failed to parse error cause of type " << ErrorCause::kType; + } + return true; + } + return false; +} + +std::string ErrorCausesToString(const Parameters& parameters) { + rtc::StringBuilder sb; + + std::vector descriptors = parameters.descriptors(); + for (size_t i = 0; i < descriptors.size(); ++i) { + if (i > 0) { + sb << "\n"; + } + + const ParameterDescriptor& d = descriptors[i]; + if (!ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb) && + !ParseAndPrint(d, sb)) { + sb << "Unhandled parameter of type: " << d.type; + } + } + + return sb.Release(); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/error_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/error_cause.h new file mode 100644 index 000000000..fa2bf8147 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/error_cause.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_ERROR_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_ERROR_CAUSE_H_ + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// Converts the Error Causes in `parameters` to a human readable string, +// to be used in error reporting and logging. +std::string ErrorCausesToString(const Parameters& parameters); + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_ERROR_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc new file mode 100644 index 000000000..018754422 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.7 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=7 | Cause Length=4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int InvalidMandatoryParameterCause::kType; + +absl::optional +InvalidMandatoryParameterCause::Parse(rtc::ArrayView data) { + if (!ParseTLV(data).has_value()) { + return absl::nullopt; + } + return InvalidMandatoryParameterCause(); +} + +void InvalidMandatoryParameterCause::SerializeTo( + std::vector& out) const { + AllocateTLV(out); +} + +std::string InvalidMandatoryParameterCause::ToString() const { + return "Invalid Mandatory Parameter"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h new file mode 100644 index 000000000..e192b5a42 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_INVALID_MANDATORY_PARAMETER_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_INVALID_MANDATORY_PARAMETER_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.7 +struct InvalidMandatoryParameterCauseConfig : public ParameterConfig { + static constexpr int kType = 7; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class InvalidMandatoryParameterCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = InvalidMandatoryParameterCauseConfig::kType; + + InvalidMandatoryParameterCause() {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_INVALID_MANDATORY_PARAMETER_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc new file mode 100644 index 000000000..b2ddd6f4e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.1 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=1 | Cause Length=8 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Identifier | (Reserved) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int InvalidStreamIdentifierCause::kType; + +absl::optional +InvalidStreamIdentifierCause::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + StreamID stream_id(reader->Load16<4>()); + return InvalidStreamIdentifierCause(stream_id); +} + +void InvalidStreamIdentifierCause::SerializeTo( + std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + + writer.Store16<4>(*stream_id_); +} + +std::string InvalidStreamIdentifierCause::ToString() const { + rtc::StringBuilder sb; + sb << "Invalid Stream Identifier, stream_id=" << *stream_id_; + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h new file mode 100644 index 000000000..b7dfe177b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_INVALID_STREAM_IDENTIFIER_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_INVALID_STREAM_IDENTIFIER_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.1 +struct InvalidStreamIdentifierCauseConfig : public ParameterConfig { + static constexpr int kType = 1; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class InvalidStreamIdentifierCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = InvalidStreamIdentifierCauseConfig::kType; + + explicit InvalidStreamIdentifierCause(StreamID stream_id) + : stream_id_(stream_id) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + StreamID stream_id() const { return stream_id_; } + + private: + StreamID stream_id_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_INVALID_STREAM_IDENTIFIER_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc new file mode 100644 index 000000000..c4e2961bd --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h" + +#include + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.2 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=2 | Cause Length=8+N*2 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Number of missing params=N | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Missing Param Type #1 | Missing Param Type #2 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Missing Param Type #N-1 | Missing Param Type #N | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int MissingMandatoryParameterCause::kType; + +absl::optional +MissingMandatoryParameterCause::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + uint32_t count = reader->Load32<4>(); + if (reader->variable_data_size() != count * kMissingParameterSize) { + RTC_DLOG(LS_WARNING) << "Invalid number of missing parameters"; + return absl::nullopt; + } + + std::vector missing_parameter_types; + missing_parameter_types.reserve(count); + for (size_t i = 0; i < count; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(i * kMissingParameterSize); + + missing_parameter_types.push_back(sub_reader.Load16<0>()); + } + return MissingMandatoryParameterCause(missing_parameter_types); +} + +void MissingMandatoryParameterCause::SerializeTo( + std::vector& out) const { + size_t variable_size = + missing_parameter_types_.size() * kMissingParameterSize; + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(missing_parameter_types_.size()); + + for (size_t i = 0; i < missing_parameter_types_.size(); ++i) { + BoundedByteWriter sub_writer = + writer.sub_writer(i * kMissingParameterSize); + + sub_writer.Store16<0>(missing_parameter_types_[i]); + } +} + +std::string MissingMandatoryParameterCause::ToString() const { + rtc::StringBuilder sb; + sb << "Missing Mandatory Parameter, missing_parameter_types=" + << StrJoin(missing_parameter_types_, ","); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h new file mode 100644 index 000000000..443542429 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_MISSING_MANDATORY_PARAMETER_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_MISSING_MANDATORY_PARAMETER_CAUSE_H_ +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.2 +struct MissingMandatoryParameterCauseConfig : public ParameterConfig { + static constexpr int kType = 2; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 2; +}; + +class MissingMandatoryParameterCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = MissingMandatoryParameterCauseConfig::kType; + + explicit MissingMandatoryParameterCause( + rtc::ArrayView missing_parameter_types) + : missing_parameter_types_(missing_parameter_types.begin(), + missing_parameter_types.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView missing_parameter_types() const { + return missing_parameter_types_; + } + + private: + static constexpr size_t kMissingParameterSize = 2; + std::vector missing_parameter_types_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_MISSING_MANDATORY_PARAMETER_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.cc new file mode 100644 index 000000000..2853915b0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.cc @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/no_user_data_cause.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.9 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=9 | Cause Length=8 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / TSN value / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int NoUserDataCause::kType; + +absl::optional NoUserDataCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + TSN tsn(reader->Load32<4>()); + return NoUserDataCause(tsn); +} + +void NoUserDataCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(*tsn_); +} + +std::string NoUserDataCause::ToString() const { + rtc::StringBuilder sb; + sb << "No User Data, tsn=" << *tsn_; + return sb.Release(); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.h new file mode 100644 index 000000000..1087dcc97 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/no_user_data_cause.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_NO_USER_DATA_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_NO_USER_DATA_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.9 +struct NoUserDataCauseConfig : public ParameterConfig { + static constexpr int kType = 9; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class NoUserDataCause : public Parameter, + public TLVTrait { + public: + static constexpr int kType = NoUserDataCauseConfig::kType; + + explicit NoUserDataCause(TSN tsn) : tsn_(tsn) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + TSN tsn() const { return tsn_; } + + private: + TSN tsn_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_NO_USER_DATA_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc new file mode 100644 index 000000000..e5c7c0e78 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/out_of_resource_error_cause.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.4 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=4 | Cause Length=4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int OutOfResourceErrorCause::kType; + +absl::optional OutOfResourceErrorCause::Parse( + rtc::ArrayView data) { + if (!ParseTLV(data).has_value()) { + return absl::nullopt; + } + return OutOfResourceErrorCause(); +} + +void OutOfResourceErrorCause::SerializeTo(std::vector& out) const { + AllocateTLV(out); +} + +std::string OutOfResourceErrorCause::ToString() const { + return "Out Of Resource"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h new file mode 100644 index 000000000..fc798ca4a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_OUT_OF_RESOURCE_ERROR_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_OUT_OF_RESOURCE_ERROR_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.4 +struct OutOfResourceParameterConfig : public ParameterConfig { + static constexpr int kType = 4; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class OutOfResourceErrorCause : public Parameter, + public TLVTrait { + public: + static constexpr int kType = OutOfResourceParameterConfig::kType; + + OutOfResourceErrorCause() {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_OUT_OF_RESOURCE_ERROR_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.cc new file mode 100644 index 000000000..1b8d423af --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/protocol_violation_cause.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.13 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=13 | Cause Length=Variable | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / Additional Information / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ProtocolViolationCause::kType; + +absl::optional ProtocolViolationCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return ProtocolViolationCause( + std::string(reinterpret_cast(reader->variable_data().data()), + reader->variable_data().size())); +} + +void ProtocolViolationCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = + AllocateTLV(out, additional_information_.size()); + writer.CopyToVariableData(rtc::MakeArrayView( + reinterpret_cast(additional_information_.data()), + additional_information_.size())); +} + +std::string ProtocolViolationCause::ToString() const { + rtc::StringBuilder sb; + sb << "Protocol Violation, additional_information=" + << additional_information_; + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.h new file mode 100644 index 000000000..3081e1f28 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/protocol_violation_cause.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_PROTOCOL_VIOLATION_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_PROTOCOL_VIOLATION_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.13 +struct ProtocolViolationCauseConfig : public ParameterConfig { + static constexpr int kType = 13; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class ProtocolViolationCause : public Parameter, + public TLVTrait { + public: + static constexpr int kType = ProtocolViolationCauseConfig::kType; + + explicit ProtocolViolationCause(absl::string_view additional_information) + : additional_information_(additional_information) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + absl::string_view additional_information() const { + return additional_information_; + } + + private: + std::string additional_information_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_PROTOCOL_VIOLATION_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc new file mode 100644 index 000000000..abe5de621 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.11 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=11 | Cause Length=Variable | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / New Address TLVs / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int RestartOfAnAssociationWithNewAddressesCause::kType; + +absl::optional +RestartOfAnAssociationWithNewAddressesCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return RestartOfAnAssociationWithNewAddressesCause(reader->variable_data()); +} + +void RestartOfAnAssociationWithNewAddressesCause::SerializeTo( + std::vector& out) const { + BoundedByteWriter writer = + AllocateTLV(out, new_address_tlvs_.size()); + writer.CopyToVariableData(new_address_tlvs_); +} + +std::string RestartOfAnAssociationWithNewAddressesCause::ToString() const { + return "Restart of an Association with New Addresses"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h new file mode 100644 index 000000000..a1cccdc8a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_RESTART_OF_AN_ASSOCIATION_WITH_NEW_ADDRESS_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_RESTART_OF_AN_ASSOCIATION_WITH_NEW_ADDRESS_CAUSE_H_ +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.11 +struct RestartOfAnAssociationWithNewAddressesCauseConfig + : public ParameterConfig { + static constexpr int kType = 11; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class RestartOfAnAssociationWithNewAddressesCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = + RestartOfAnAssociationWithNewAddressesCauseConfig::kType; + + explicit RestartOfAnAssociationWithNewAddressesCause( + rtc::ArrayView new_address_tlvs) + : new_address_tlvs_(new_address_tlvs.begin(), new_address_tlvs.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView new_address_tlvs() const { + return new_address_tlvs_; + } + + private: + std::vector new_address_tlvs_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_RESTART_OF_AN_ASSOCIATION_WITH_NEW_ADDRESS_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc new file mode 100644 index 000000000..d77d8488f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/stale_cookie_error_cause.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.3 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=3 | Cause Length=8 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Measure of Staleness (usec.) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int StaleCookieErrorCause::kType; + +absl::optional StaleCookieErrorCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + uint32_t staleness_us = reader->Load32<4>(); + return StaleCookieErrorCause(staleness_us); +} + +void StaleCookieErrorCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(staleness_us_); +} + +std::string StaleCookieErrorCause::ToString() const { + rtc::StringBuilder sb; + sb << "Stale Cookie Error, staleness_us=" << staleness_us_; + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h new file mode 100644 index 000000000..d8b7b5b5b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_STALE_COOKIE_ERROR_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_STALE_COOKIE_ERROR_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.3 +struct StaleCookieParameterConfig : public ParameterConfig { + static constexpr int kType = 3; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class StaleCookieErrorCause : public Parameter, + public TLVTrait { + public: + static constexpr int kType = StaleCookieParameterConfig::kType; + + explicit StaleCookieErrorCause(uint32_t staleness_us) + : staleness_us_(staleness_us) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + uint16_t staleness_us() const { return staleness_us_; } + + private: + uint32_t staleness_us_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_STALE_COOKIE_ERROR_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc new file mode 100644 index 000000000..04b960d99 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h" + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.6 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=6 | Cause Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / Unrecognized Chunk / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int UnrecognizedChunkTypeCause::kType; + +absl::optional UnrecognizedChunkTypeCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + std::vector unrecognized_chunk(reader->variable_data().begin(), + reader->variable_data().end()); + return UnrecognizedChunkTypeCause(std::move(unrecognized_chunk)); +} + +void UnrecognizedChunkTypeCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = + AllocateTLV(out, unrecognized_chunk_.size()); + writer.CopyToVariableData(unrecognized_chunk_); +} + +std::string UnrecognizedChunkTypeCause::ToString() const { + rtc::StringBuilder sb; + sb << "Unrecognized Chunk Type, chunk_type="; + if (!unrecognized_chunk_.empty()) { + sb << static_cast(unrecognized_chunk_[0]); + } else { + sb << ""; + } + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h new file mode 100644 index 000000000..26d3d3b8f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_UNRECOGNIZED_CHUNK_TYPE_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_UNRECOGNIZED_CHUNK_TYPE_CAUSE_H_ +#include +#include + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.6 +struct UnrecognizedChunkTypeCauseConfig : public ParameterConfig { + static constexpr int kType = 6; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class UnrecognizedChunkTypeCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = UnrecognizedChunkTypeCauseConfig::kType; + + explicit UnrecognizedChunkTypeCause(std::vector unrecognized_chunk) + : unrecognized_chunk_(std::move(unrecognized_chunk)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView unrecognized_chunk() const { + return unrecognized_chunk_; + } + + private: + std::vector unrecognized_chunk_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_UNRECOGNIZED_CHUNK_TYPE_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc new file mode 100644 index 000000000..80001a9ea --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.8 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=8 | Cause Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / Unrecognized Parameters / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int UnrecognizedParametersCause::kType; + +absl::optional UnrecognizedParametersCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return UnrecognizedParametersCause(reader->variable_data()); +} + +void UnrecognizedParametersCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = + AllocateTLV(out, unrecognized_parameters_.size()); + writer.CopyToVariableData(unrecognized_parameters_); +} + +std::string UnrecognizedParametersCause::ToString() const { + return "Unrecognized Parameters"; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h new file mode 100644 index 000000000..ebec5ed4c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_UNRECOGNIZED_PARAMETER_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_UNRECOGNIZED_PARAMETER_CAUSE_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.8 +struct UnrecognizedParametersCauseConfig : public ParameterConfig { + static constexpr int kType = 8; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class UnrecognizedParametersCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = UnrecognizedParametersCauseConfig::kType; + + explicit UnrecognizedParametersCause( + rtc::ArrayView unrecognized_parameters) + : unrecognized_parameters_(unrecognized_parameters.begin(), + unrecognized_parameters.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView unrecognized_parameters() const { + return unrecognized_parameters_; + } + + private: + std::vector unrecognized_parameters_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_UNRECOGNIZED_PARAMETER_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc new file mode 100644 index 000000000..8108d31aa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/unresolvable_address_cause.h" + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.5 + +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=5 | Cause Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / Unresolvable Address / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int UnresolvableAddressCause::kType; + +absl::optional UnresolvableAddressCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return UnresolvableAddressCause(reader->variable_data()); +} + +void UnresolvableAddressCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = + AllocateTLV(out, unresolvable_address_.size()); + writer.CopyToVariableData(unresolvable_address_); +} + +std::string UnresolvableAddressCause::ToString() const { + return "Unresolvable Address"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.h new file mode 100644 index 000000000..c63b3779e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/unresolvable_address_cause.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_UNRESOLVABLE_ADDRESS_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_UNRESOLVABLE_ADDRESS_CAUSE_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.5 +struct UnresolvableAddressCauseConfig : public ParameterConfig { + static constexpr int kType = 5; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class UnresolvableAddressCause + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = UnresolvableAddressCauseConfig::kType; + + explicit UnresolvableAddressCause( + rtc::ArrayView unresolvable_address) + : unresolvable_address_(unresolvable_address.begin(), + unresolvable_address.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView unresolvable_address() const { + return unresolvable_address_; + } + + private: + std::vector unresolvable_address_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_UNRESOLVABLE_ADDRESS_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc new file mode 100644 index 000000000..da99aacbf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/error_cause/user_initiated_abort_cause.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.12 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Cause Code=12 | Cause Length=Variable | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / Upper Layer Abort Reason / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int UserInitiatedAbortCause::kType; + +absl::optional UserInitiatedAbortCause::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + if (reader->variable_data().empty()) { + return UserInitiatedAbortCause(""); + } + return UserInitiatedAbortCause( + std::string(reinterpret_cast(reader->variable_data().data()), + reader->variable_data().size())); +} + +void UserInitiatedAbortCause::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = + AllocateTLV(out, upper_layer_abort_reason_.size()); + writer.CopyToVariableData(rtc::MakeArrayView( + reinterpret_cast(upper_layer_abort_reason_.data()), + upper_layer_abort_reason_.size())); +} + +std::string UserInitiatedAbortCause::ToString() const { + rtc::StringBuilder sb; + sb << "User-Initiated Abort, reason=" << upper_layer_abort_reason_; + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h new file mode 100644 index 000000000..9eb16657b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_ERROR_CAUSE_USER_INITIATED_ABORT_CAUSE_H_ +#define NET_DCSCTP_PACKET_ERROR_CAUSE_USER_INITIATED_ABORT_CAUSE_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.10.12 +struct UserInitiatedAbortCauseConfig : public ParameterConfig { + static constexpr int kType = 12; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class UserInitiatedAbortCause : public Parameter, + public TLVTrait { + public: + static constexpr int kType = UserInitiatedAbortCauseConfig::kType; + + explicit UserInitiatedAbortCause(absl::string_view upper_layer_abort_reason) + : upper_layer_abort_reason_(upper_layer_abort_reason) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + absl::string_view upper_layer_abort_reason() const { + return upper_layer_abort_reason_; + } + + private: + std::string upper_layer_abort_reason_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_ERROR_CAUSE_USER_INITIATED_ABORT_CAUSE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc new file mode 100644 index 000000000..c33e3e11f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.6 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 18 | Parameter Length = 12 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Request Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Number of new streams | Reserved | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int AddIncomingStreamsRequestParameter::kType; + +absl::optional +AddIncomingStreamsRequestParameter::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + ReconfigRequestSN request_sequence_number(reader->Load32<4>()); + uint16_t nbr_of_new_streams = reader->Load16<8>(); + + return AddIncomingStreamsRequestParameter(request_sequence_number, + nbr_of_new_streams); +} + +void AddIncomingStreamsRequestParameter::SerializeTo( + std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(*request_sequence_number_); + writer.Store16<8>(nbr_of_new_streams_); +} + +std::string AddIncomingStreamsRequestParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Add Incoming Streams Request, req_seq_nbr=" + << *request_sequence_number(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h new file mode 100644 index 000000000..3859eb3f7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_ADD_INCOMING_STREAMS_REQUEST_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_ADD_INCOMING_STREAMS_REQUEST_PARAMETER_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.6 +struct AddIncomingStreamsRequestParameterConfig : ParameterConfig { + static constexpr int kType = 18; + static constexpr size_t kHeaderSize = 12; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class AddIncomingStreamsRequestParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = AddIncomingStreamsRequestParameterConfig::kType; + + explicit AddIncomingStreamsRequestParameter( + ReconfigRequestSN request_sequence_number, + uint16_t nbr_of_new_streams) + : request_sequence_number_(request_sequence_number), + nbr_of_new_streams_(nbr_of_new_streams) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + ReconfigRequestSN request_sequence_number() const { + return request_sequence_number_; + } + uint16_t nbr_of_new_streams() const { return nbr_of_new_streams_; } + + private: + ReconfigRequestSN request_sequence_number_; + uint16_t nbr_of_new_streams_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_ADD_INCOMING_STREAMS_REQUEST_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc new file mode 100644 index 000000000..4787ee971 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.5 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 17 | Parameter Length = 12 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Request Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Number of new streams | Reserved | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int AddOutgoingStreamsRequestParameter::kType; + +absl::optional +AddOutgoingStreamsRequestParameter::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + ReconfigRequestSN request_sequence_number(reader->Load32<4>()); + uint16_t nbr_of_new_streams = reader->Load16<8>(); + + return AddOutgoingStreamsRequestParameter(request_sequence_number, + nbr_of_new_streams); +} + +void AddOutgoingStreamsRequestParameter::SerializeTo( + std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(*request_sequence_number_); + writer.Store16<8>(nbr_of_new_streams_); +} + +std::string AddOutgoingStreamsRequestParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Add Outgoing Streams Request, req_seq_nbr=" + << *request_sequence_number(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h new file mode 100644 index 000000000..01e8f91cf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_ADD_OUTGOING_STREAMS_REQUEST_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_ADD_OUTGOING_STREAMS_REQUEST_PARAMETER_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.5 +struct AddOutgoingStreamsRequestParameterConfig : ParameterConfig { + static constexpr int kType = 17; + static constexpr size_t kHeaderSize = 12; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class AddOutgoingStreamsRequestParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = AddOutgoingStreamsRequestParameterConfig::kType; + + explicit AddOutgoingStreamsRequestParameter( + ReconfigRequestSN request_sequence_number, + uint16_t nbr_of_new_streams) + : request_sequence_number_(request_sequence_number), + nbr_of_new_streams_(nbr_of_new_streams) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + ReconfigRequestSN request_sequence_number() const { + return request_sequence_number_; + } + uint16_t nbr_of_new_streams() const { return nbr_of_new_streams_; } + + private: + ReconfigRequestSN request_sequence_number_; + uint16_t nbr_of_new_streams_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_ADD_OUTGOING_STREAMS_REQUEST_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc new file mode 100644 index 000000000..7dd8e1923 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc3758#section-3.1 + +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 49152 | Parameter Length = 4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ForwardTsnSupportedParameter::kType; + +absl::optional +ForwardTsnSupportedParameter::Parse(rtc::ArrayView data) { + if (!ParseTLV(data).has_value()) { + return absl::nullopt; + } + return ForwardTsnSupportedParameter(); +} + +void ForwardTsnSupportedParameter::SerializeTo( + std::vector& out) const { + AllocateTLV(out); +} + +std::string ForwardTsnSupportedParameter::ToString() const { + return "Forward TSN Supported"; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h new file mode 100644 index 000000000..d4cff4ac2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_FORWARD_TSN_SUPPORTED_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_FORWARD_TSN_SUPPORTED_PARAMETER_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc3758#section-3.1 +struct ForwardTsnSupportedParameterConfig : ParameterConfig { + static constexpr int kType = 49152; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class ForwardTsnSupportedParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = ForwardTsnSupportedParameterConfig::kType; + + ForwardTsnSupportedParameter() {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_FORWARD_TSN_SUPPORTED_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc new file mode 100644 index 000000000..918976d30 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.5 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 4 | Chunk Flags | Heartbeat Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// \ \ +// / Heartbeat Information TLV (Variable-Length) / +// \ \ +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int HeartbeatInfoParameter::kType; + +absl::optional HeartbeatInfoParameter::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return HeartbeatInfoParameter(reader->variable_data()); +} + +void HeartbeatInfoParameter::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out, info_.size()); + writer.CopyToVariableData(info_); +} + +std::string HeartbeatInfoParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Heartbeat Info parameter (info_length=" << info_.size() << ")"; + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.h new file mode 100644 index 000000000..ec503a94b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/heartbeat_info_parameter.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_HEARTBEAT_INFO_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_HEARTBEAT_INFO_PARAMETER_H_ +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.5 +struct HeartbeatInfoParameterConfig : ParameterConfig { + static constexpr int kType = 1; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class HeartbeatInfoParameter : public Parameter, + public TLVTrait { + public: + static constexpr int kType = HeartbeatInfoParameterConfig::kType; + + explicit HeartbeatInfoParameter(rtc::ArrayView info) + : info_(info.begin(), info.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView info() const { return info_; } + + private: + std::vector info_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_HEARTBEAT_INFO_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc new file mode 100644 index 000000000..6191adfe9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h" + +#include + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.2 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 14 | Parameter Length = 8 + 2 * N | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Request Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Number 1 (optional) | Stream Number 2 (optional) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / ...... / +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Number N-1 (optional) | Stream Number N (optional) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int IncomingSSNResetRequestParameter::kType; + +absl::optional +IncomingSSNResetRequestParameter::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + ReconfigRequestSN request_sequence_number(reader->Load32<4>()); + + size_t stream_count = reader->variable_data_size() / kStreamIdSize; + std::vector stream_ids; + stream_ids.reserve(stream_count); + for (size_t i = 0; i < stream_count; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(i * kStreamIdSize); + + stream_ids.push_back(StreamID(sub_reader.Load16<0>())); + } + + return IncomingSSNResetRequestParameter(request_sequence_number, + std::move(stream_ids)); +} + +void IncomingSSNResetRequestParameter::SerializeTo( + std::vector& out) const { + size_t variable_size = stream_ids_.size() * kStreamIdSize; + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(*request_sequence_number_); + + for (size_t i = 0; i < stream_ids_.size(); ++i) { + BoundedByteWriter sub_writer = + writer.sub_writer(i * kStreamIdSize); + sub_writer.Store16<0>(*stream_ids_[i]); + } +} + +std::string IncomingSSNResetRequestParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Incoming SSN Reset Request, req_seq_nbr=" + << *request_sequence_number(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h new file mode 100644 index 000000000..18963efaf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_INCOMING_SSN_RESET_REQUEST_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_INCOMING_SSN_RESET_REQUEST_PARAMETER_H_ +#include + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.2 +struct IncomingSSNResetRequestParameterConfig : ParameterConfig { + static constexpr int kType = 14; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 2; +}; + +class IncomingSSNResetRequestParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = IncomingSSNResetRequestParameterConfig::kType; + + explicit IncomingSSNResetRequestParameter( + ReconfigRequestSN request_sequence_number, + std::vector stream_ids) + : request_sequence_number_(request_sequence_number), + stream_ids_(std::move(stream_ids)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + ReconfigRequestSN request_sequence_number() const { + return request_sequence_number_; + } + rtc::ArrayView stream_ids() const { return stream_ids_; } + + private: + static constexpr size_t kStreamIdSize = sizeof(uint16_t); + + ReconfigRequestSN request_sequence_number_; + std::vector stream_ids_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_INCOMING_SSN_RESET_REQUEST_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc new file mode 100644 index 000000000..c25a2426b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" + +#include + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "net/dcsctp/public/types.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.1 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 13 | Parameter Length = 16 + 2 * N | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Request Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Response Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Sender's Last Assigned TSN | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Number 1 (optional) | Stream Number 2 (optional) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// / ...... / +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Stream Number N-1 (optional) | Stream Number N (optional) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int OutgoingSSNResetRequestParameter::kType; + +absl::optional +OutgoingSSNResetRequestParameter::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + ReconfigRequestSN request_sequence_number(reader->Load32<4>()); + ReconfigRequestSN response_sequence_number(reader->Load32<8>()); + TSN sender_last_assigned_tsn(reader->Load32<12>()); + + size_t stream_count = reader->variable_data_size() / kStreamIdSize; + std::vector stream_ids; + stream_ids.reserve(stream_count); + for (size_t i = 0; i < stream_count; ++i) { + BoundedByteReader sub_reader = + reader->sub_reader(i * kStreamIdSize); + + stream_ids.push_back(StreamID(sub_reader.Load16<0>())); + } + + return OutgoingSSNResetRequestParameter( + request_sequence_number, response_sequence_number, + sender_last_assigned_tsn, std::move(stream_ids)); +} + +void OutgoingSSNResetRequestParameter::SerializeTo( + std::vector& out) const { + size_t variable_size = stream_ids_.size() * kStreamIdSize; + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(*request_sequence_number_); + writer.Store32<8>(*response_sequence_number_); + writer.Store32<12>(*sender_last_assigned_tsn_); + + for (size_t i = 0; i < stream_ids_.size(); ++i) { + BoundedByteWriter sub_writer = + writer.sub_writer(i * kStreamIdSize); + sub_writer.Store16<0>(*stream_ids_[i]); + } +} + +std::string OutgoingSSNResetRequestParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Outgoing SSN Reset Request, req_seq_nbr=" << *request_sequence_number() + << ", resp_seq_nbr=" << *response_sequence_number() + << ", sender_last_asg_tsn=" << *sender_last_assigned_tsn(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h new file mode 100644 index 000000000..6eb44e079 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_OUTGOING_SSN_RESET_REQUEST_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_OUTGOING_SSN_RESET_REQUEST_PARAMETER_H_ +#include +#include + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.1 +struct OutgoingSSNResetRequestParameterConfig : ParameterConfig { + static constexpr int kType = 13; + static constexpr size_t kHeaderSize = 16; + static constexpr size_t kVariableLengthAlignment = 2; +}; + +class OutgoingSSNResetRequestParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = OutgoingSSNResetRequestParameterConfig::kType; + + explicit OutgoingSSNResetRequestParameter( + ReconfigRequestSN request_sequence_number, + ReconfigRequestSN response_sequence_number, + TSN sender_last_assigned_tsn, + std::vector stream_ids) + : request_sequence_number_(request_sequence_number), + response_sequence_number_(response_sequence_number), + sender_last_assigned_tsn_(sender_last_assigned_tsn), + stream_ids_(std::move(stream_ids)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + ReconfigRequestSN request_sequence_number() const { + return request_sequence_number_; + } + ReconfigRequestSN response_sequence_number() const { + return response_sequence_number_; + } + TSN sender_last_assigned_tsn() const { return sender_last_assigned_tsn_; } + rtc::ArrayView stream_ids() const { return stream_ids_; } + + private: + static constexpr size_t kStreamIdSize = sizeof(uint16_t); + + ReconfigRequestSN request_sequence_number_; + ReconfigRequestSN response_sequence_number_; + TSN sender_last_assigned_tsn_; + std::vector stream_ids_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_OUTGOING_SSN_RESET_REQUEST_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/parameter.cc new file mode 100644 index 000000000..b3b2bffef --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/parameter.cc @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/parameter.h" + +#include + +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/math.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h" +#include "net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h" +#include "net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h" +#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h" +#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" +#include "net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/state_cookie_parameter.h" +#include "net/dcsctp/packet/parameter/supported_extensions_parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +constexpr size_t kParameterHeaderSize = 4; + +Parameters::Builder& Parameters::Builder::Add(const Parameter& p) { + // https://tools.ietf.org/html/rfc4960#section-3.2.1 + // "If the length of the parameter is not a multiple of 4 bytes, the sender + // pads the parameter at the end (i.e., after the Parameter Value field) with + // all zero bytes." + if (data_.size() % 4 != 0) { + data_.resize(RoundUpTo4(data_.size())); + } + + p.SerializeTo(data_); + return *this; +} + +std::vector Parameters::descriptors() const { + rtc::ArrayView span(data_); + std::vector result; + while (!span.empty()) { + BoundedByteReader header(span); + uint16_t type = header.Load16<0>(); + uint16_t length = header.Load16<2>(); + result.emplace_back(type, span.subview(0, length)); + size_t length_with_padding = RoundUpTo4(length); + if (length_with_padding > span.size()) { + break; + } + span = span.subview(length_with_padding); + } + return result; +} + +absl::optional Parameters::Parse( + rtc::ArrayView data) { + // Validate the parameter descriptors + rtc::ArrayView span(data); + while (!span.empty()) { + if (span.size() < kParameterHeaderSize) { + RTC_DLOG(LS_WARNING) << "Insufficient parameter length"; + return absl::nullopt; + } + BoundedByteReader header(span); + uint16_t length = header.Load16<2>(); + if (length < kParameterHeaderSize || length > span.size()) { + RTC_DLOG(LS_WARNING) << "Invalid parameter length field"; + return absl::nullopt; + } + size_t length_with_padding = RoundUpTo4(length); + if (length_with_padding > span.size()) { + break; + } + span = span.subview(length_with_padding); + } + return Parameters(std::vector(data.begin(), data.end())); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/parameter.h new file mode 100644 index 000000000..e8fa67c8f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/parameter.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_PARAMETER_H_ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +class Parameter { + public: + Parameter() {} + virtual ~Parameter() = default; + + Parameter(const Parameter& other) = default; + Parameter& operator=(const Parameter& other) = default; + + virtual void SerializeTo(std::vector& out) const = 0; + virtual std::string ToString() const = 0; +}; + +struct ParameterDescriptor { + ParameterDescriptor(uint16_t type, rtc::ArrayView data) + : type(type), data(data) {} + uint16_t type; + rtc::ArrayView data; +}; + +class Parameters { + public: + class Builder { + public: + Builder() {} + Builder& Add(const Parameter& p); + Parameters Build() { return Parameters(std::move(data_)); } + + private: + std::vector data_; + }; + + static absl::optional Parse(rtc::ArrayView data); + + Parameters() {} + Parameters(Parameters&& other) = default; + Parameters& operator=(Parameters&& other) = default; + + rtc::ArrayView data() const { return data_; } + std::vector descriptors() const; + + template + absl::optional

get() const { + static_assert(std::is_base_of::value, + "Template parameter not derived from Parameter"); + for (const auto& p : descriptors()) { + if (p.type == P::kType) { + return P::Parse(p.data); + } + } + return absl::nullopt; + } + + private: + explicit Parameters(std::vector data) : data_(std::move(data)) {} + std::vector data_; +}; + +struct ParameterConfig { + static constexpr int kTypeSizeInBytes = 2; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc new file mode 100644 index 000000000..fafb204ac --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc @@ -0,0 +1,152 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" + +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.4 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 16 | Parameter Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Response Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Result | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Sender's Next TSN (optional) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Receiver's Next TSN (optional) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int ReconfigurationResponseParameter::kType; + +absl::string_view ToString(ReconfigurationResponseParameter::Result result) { + switch (result) { + case ReconfigurationResponseParameter::Result::kSuccessNothingToDo: + return "Success: nothing to do"; + case ReconfigurationResponseParameter::Result::kSuccessPerformed: + return "Success: performed"; + case ReconfigurationResponseParameter::Result::kDenied: + return "Denied"; + case ReconfigurationResponseParameter::Result::kErrorWrongSSN: + return "Error: wrong ssn"; + case ReconfigurationResponseParameter::Result:: + kErrorRequestAlreadyInProgress: + return "Error: request already in progress"; + case ReconfigurationResponseParameter::Result::kErrorBadSequenceNumber: + return "Error: bad sequence number"; + case ReconfigurationResponseParameter::Result::kInProgress: + return "In progress"; + } +} + +absl::optional +ReconfigurationResponseParameter::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + ReconfigRequestSN response_sequence_number(reader->Load32<4>()); + Result result; + uint32_t result_nbr = reader->Load32<8>(); + switch (result_nbr) { + case 0: + result = ReconfigurationResponseParameter::Result::kSuccessNothingToDo; + break; + case 1: + result = ReconfigurationResponseParameter::Result::kSuccessPerformed; + break; + case 2: + result = ReconfigurationResponseParameter::Result::kDenied; + break; + case 3: + result = ReconfigurationResponseParameter::Result::kErrorWrongSSN; + break; + case 4: + result = ReconfigurationResponseParameter::Result:: + kErrorRequestAlreadyInProgress; + break; + case 5: + result = + ReconfigurationResponseParameter::Result::kErrorBadSequenceNumber; + break; + case 6: + result = ReconfigurationResponseParameter::Result::kInProgress; + break; + default: + RTC_DLOG(LS_WARNING) << "Invalid reconfig response result: " + << result_nbr; + return absl::nullopt; + } + + if (reader->variable_data().empty()) { + return ReconfigurationResponseParameter(response_sequence_number, result); + } else if (reader->variable_data_size() != kNextTsnHeaderSize) { + RTC_DLOG(LS_WARNING) << "Invalid parameter size"; + return absl::nullopt; + } + + BoundedByteReader sub_reader = + reader->sub_reader(0); + + TSN sender_next_tsn(sub_reader.Load32<0>()); + TSN receiver_next_tsn(sub_reader.Load32<4>()); + + return ReconfigurationResponseParameter(response_sequence_number, result, + sender_next_tsn, receiver_next_tsn); +} + +void ReconfigurationResponseParameter::SerializeTo( + std::vector& out) const { + size_t variable_size = + (sender_next_tsn().has_value() ? kNextTsnHeaderSize : 0); + BoundedByteWriter writer = AllocateTLV(out, variable_size); + + writer.Store32<4>(*response_sequence_number_); + uint32_t result_nbr = + static_cast::type>(result_); + writer.Store32<8>(result_nbr); + + if (sender_next_tsn().has_value()) { + BoundedByteWriter sub_writer = + writer.sub_writer(0); + + sub_writer.Store32<0>(sender_next_tsn_.has_value() ? **sender_next_tsn_ + : 0); + sub_writer.Store32<4>(receiver_next_tsn_.has_value() ? **receiver_next_tsn_ + : 0); + } +} + +std::string ReconfigurationResponseParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Re-configuration Response, resp_seq_nbr=" + << *response_sequence_number(); + return sb.Release(); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h new file mode 100644 index 000000000..c5a68acb3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_RECONFIGURATION_RESPONSE_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_RECONFIGURATION_RESPONSE_PARAMETER_H_ +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.4 +struct ReconfigurationResponseParameterConfig : ParameterConfig { + static constexpr int kType = 16; + static constexpr size_t kHeaderSize = 12; + static constexpr size_t kVariableLengthAlignment = 4; +}; + +class ReconfigurationResponseParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = ReconfigurationResponseParameterConfig::kType; + + enum class Result { + kSuccessNothingToDo = 0, + kSuccessPerformed = 1, + kDenied = 2, + kErrorWrongSSN = 3, + kErrorRequestAlreadyInProgress = 4, + kErrorBadSequenceNumber = 5, + kInProgress = 6, + }; + + ReconfigurationResponseParameter(ReconfigRequestSN response_sequence_number, + Result result) + : response_sequence_number_(response_sequence_number), + result_(result), + sender_next_tsn_(absl::nullopt), + receiver_next_tsn_(absl::nullopt) {} + + explicit ReconfigurationResponseParameter( + ReconfigRequestSN response_sequence_number, + Result result, + TSN sender_next_tsn, + TSN receiver_next_tsn) + : response_sequence_number_(response_sequence_number), + result_(result), + sender_next_tsn_(sender_next_tsn), + receiver_next_tsn_(receiver_next_tsn) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + ReconfigRequestSN response_sequence_number() const { + return response_sequence_number_; + } + Result result() const { return result_; } + absl::optional sender_next_tsn() const { return sender_next_tsn_; } + absl::optional receiver_next_tsn() const { return receiver_next_tsn_; } + + private: + static constexpr size_t kNextTsnHeaderSize = 8; + ReconfigRequestSN response_sequence_number_; + Result result_; + absl::optional sender_next_tsn_; + absl::optional receiver_next_tsn_; +}; + +absl::string_view ToString(ReconfigurationResponseParameter::Result result); + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_RECONFIGURATION_RESPONSE_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc new file mode 100644 index 000000000..d656e0db8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.3 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 15 | Parameter Length = 8 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Re-configuration Request Sequence Number | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int SSNTSNResetRequestParameter::kType; + +absl::optional SSNTSNResetRequestParameter::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + ReconfigRequestSN request_sequence_number(reader->Load32<4>()); + + return SSNTSNResetRequestParameter(request_sequence_number); +} + +void SSNTSNResetRequestParameter::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(*request_sequence_number_); +} + +std::string SSNTSNResetRequestParameter::ToString() const { + rtc::StringBuilder sb; + sb << "SSN/TSN Reset Request, req_seq_nbr=" << *request_sequence_number(); + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h new file mode 100644 index 000000000..e31d7ebe8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_SSN_TSN_RESET_REQUEST_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_SSN_TSN_RESET_REQUEST_PARAMETER_H_ +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc6525#section-4.3 +struct SSNTSNResetRequestParameterConfig : ParameterConfig { + static constexpr int kType = 15; + static constexpr size_t kHeaderSize = 8; + static constexpr size_t kVariableLengthAlignment = 0; +}; + +class SSNTSNResetRequestParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = SSNTSNResetRequestParameterConfig::kType; + + explicit SSNTSNResetRequestParameter( + ReconfigRequestSN request_sequence_number) + : request_sequence_number_(request_sequence_number) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + ReconfigRequestSN request_sequence_number() const { + return request_sequence_number_; + } + + private: + ReconfigRequestSN request_sequence_number_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_SSN_TSN_RESET_REQUEST_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.cc new file mode 100644 index 000000000..9777aa666 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.cc @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/state_cookie_parameter.h" + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.3.1 + +constexpr int StateCookieParameter::kType; + +absl::optional StateCookieParameter::Parse( + rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + return StateCookieParameter(reader->variable_data()); +} + +void StateCookieParameter::SerializeTo(std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out, data_.size()); + writer.CopyToVariableData(data_); +} + +std::string StateCookieParameter::ToString() const { + rtc::StringBuilder sb; + sb << "State Cookie parameter (cookie_length=" << data_.size() << ")"; + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.h new file mode 100644 index 000000000..f4355495e --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/state_cookie_parameter.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_STATE_COOKIE_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_STATE_COOKIE_PARAMETER_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc4960#section-3.3.3.1 +struct StateCookieParameterConfig : ParameterConfig { + static constexpr int kType = 7; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class StateCookieParameter : public Parameter, + public TLVTrait { + public: + static constexpr int kType = StateCookieParameterConfig::kType; + + explicit StateCookieParameter(rtc::ArrayView data) + : data_(data.begin(), data.end()) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + rtc::ArrayView data() const { return data_; } + + private: + std::vector data_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_STATE_COOKIE_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.cc new file mode 100644 index 000000000..6a8fb214d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/parameter/supported_extensions_parameter.h" + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc5061#section-4.2.7 + +// 0 1 2 3 +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Parameter Type = 0x8008 | Parameter Length | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | CHUNK TYPE 1 | CHUNK TYPE 2 | CHUNK TYPE 3 | CHUNK TYPE 4 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | .... | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | CHUNK TYPE N | PAD | PAD | PAD | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +constexpr int SupportedExtensionsParameter::kType; + +absl::optional +SupportedExtensionsParameter::Parse(rtc::ArrayView data) { + absl::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return absl::nullopt; + } + + std::vector chunk_types(reader->variable_data().begin(), + reader->variable_data().end()); + return SupportedExtensionsParameter(std::move(chunk_types)); +} + +void SupportedExtensionsParameter::SerializeTo( + std::vector& out) const { + BoundedByteWriter writer = AllocateTLV(out, chunk_types_.size()); + writer.CopyToVariableData(chunk_types_); +} + +std::string SupportedExtensionsParameter::ToString() const { + rtc::StringBuilder sb; + sb << "Supported Extensions (" << StrJoin(chunk_types_, ", ") << ")"; + return sb.Release(); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.h new file mode 100644 index 000000000..5689fd803 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/parameter/supported_extensions_parameter.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_PARAMETER_SUPPORTED_EXTENSIONS_PARAMETER_H_ +#define NET_DCSCTP_PACKET_PARAMETER_SUPPORTED_EXTENSIONS_PARAMETER_H_ +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/tlv_trait.h" + +namespace dcsctp { + +// https://tools.ietf.org/html/rfc5061#section-4.2.7 +struct SupportedExtensionsParameterConfig : ParameterConfig { + static constexpr int kType = 0x8008; + static constexpr size_t kHeaderSize = 4; + static constexpr size_t kVariableLengthAlignment = 1; +}; + +class SupportedExtensionsParameter + : public Parameter, + public TLVTrait { + public: + static constexpr int kType = SupportedExtensionsParameterConfig::kType; + + explicit SupportedExtensionsParameter(std::vector chunk_types) + : chunk_types_(std::move(chunk_types)) {} + + static absl::optional Parse( + rtc::ArrayView data); + + void SerializeTo(std::vector& out) const override; + std::string ToString() const override; + + bool supports(uint8_t chunk_type) const { + return std::find(chunk_types_.begin(), chunk_types_.end(), chunk_type) != + chunk_types_.end(); + } + + rtc::ArrayView chunk_types() const { return chunk_types_; } + + private: + std::vector chunk_types_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_PARAMETER_SUPPORTED_EXTENSIONS_PARAMETER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/sctp_packet.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/sctp_packet.cc new file mode 100644 index 000000000..da06ccf86 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/sctp_packet.cc @@ -0,0 +1,184 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/sctp_packet.h" + +#include + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/math.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/crc32c.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_format.h" + +namespace dcsctp { +namespace { +constexpr size_t kMaxUdpPacketSize = 65535; +constexpr size_t kChunkTlvHeaderSize = 4; +constexpr size_t kExpectedDescriptorCount = 4; +} // namespace + +/* + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Source Port Number | Destination Port Number | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Verification Tag | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Checksum | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +*/ + +SctpPacket::Builder::Builder(VerificationTag verification_tag, + const DcSctpOptions& options) + : verification_tag_(verification_tag), + source_port_(options.local_port), + dest_port_(options.remote_port), + max_packet_size_(RoundDownTo4(options.mtu)) {} + +SctpPacket::Builder& SctpPacket::Builder::Add(const Chunk& chunk) { + if (out_.empty()) { + out_.reserve(max_packet_size_); + out_.resize(SctpPacket::kHeaderSize); + BoundedByteWriter buffer(out_); + buffer.Store16<0>(source_port_); + buffer.Store16<2>(dest_port_); + buffer.Store32<4>(*verification_tag_); + // Checksum is at offset 8 - written when calling Build(); + } + RTC_DCHECK(IsDivisibleBy4(out_.size())); + + chunk.SerializeTo(out_); + if (out_.size() % 4 != 0) { + out_.resize(RoundUpTo4(out_.size())); + } + + RTC_DCHECK(out_.size() <= max_packet_size_) + << "Exceeded max size, data=" << out_.size() + << ", max_size=" << max_packet_size_; + return *this; +} + +size_t SctpPacket::Builder::bytes_remaining() const { + if (out_.empty()) { + // The packet header (CommonHeader) hasn't been written yet: + return max_packet_size_ - kHeaderSize; + } else if (out_.size() > max_packet_size_) { + RTC_DCHECK(false) << "Exceeded max size, data=" << out_.size() + << ", max_size=" << max_packet_size_; + return 0; + } + return max_packet_size_ - out_.size(); +} + +std::vector SctpPacket::Builder::Build() { + std::vector out; + out_.swap(out); + + if (!out.empty()) { + uint32_t crc = GenerateCrc32C(out); + BoundedByteWriter(out).Store32<8>(crc); + } + + RTC_DCHECK(out.size() <= max_packet_size_) + << "Exceeded max size, data=" << out.size() + << ", max_size=" << max_packet_size_; + + return out; +} + +absl::optional SctpPacket::Parse( + rtc::ArrayView data, + bool disable_checksum_verification) { + if (data.size() < kHeaderSize + kChunkTlvHeaderSize || + data.size() > kMaxUdpPacketSize) { + RTC_DLOG(LS_WARNING) << "Invalid packet size"; + return absl::nullopt; + } + + BoundedByteReader reader(data); + + CommonHeader common_header; + common_header.source_port = reader.Load16<0>(); + common_header.destination_port = reader.Load16<2>(); + common_header.verification_tag = VerificationTag(reader.Load32<4>()); + common_header.checksum = reader.Load32<8>(); + + // Create a copy of the packet, which will be held by this object. + std::vector data_copy = + std::vector(data.begin(), data.end()); + + // Verify the checksum. The checksum field must be zero when that's done. + BoundedByteWriter(data_copy).Store32<8>(0); + uint32_t calculated_checksum = GenerateCrc32C(data_copy); + if (!disable_checksum_verification && + calculated_checksum != common_header.checksum) { + RTC_DLOG(LS_WARNING) << rtc::StringFormat( + "Invalid packet checksum, packet_checksum=0x%08x, " + "calculated_checksum=0x%08x", + common_header.checksum, calculated_checksum); + return absl::nullopt; + } + // Restore the checksum in the header. + BoundedByteWriter(data_copy).Store32<8>(common_header.checksum); + + // Validate and parse the chunk headers in the message. + /* + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Chunk Type | Chunk Flags | Chunk Length | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + */ + + std::vector descriptors; + descriptors.reserve(kExpectedDescriptorCount); + rtc::ArrayView descriptor_data = + rtc::ArrayView(data_copy).subview(kHeaderSize); + while (!descriptor_data.empty()) { + if (descriptor_data.size() < kChunkTlvHeaderSize) { + RTC_DLOG(LS_WARNING) << "Too small chunk"; + return absl::nullopt; + } + BoundedByteReader chunk_header(descriptor_data); + uint8_t type = chunk_header.Load8<0>(); + uint8_t flags = chunk_header.Load8<1>(); + uint16_t length = chunk_header.Load16<2>(); + uint16_t padded_length = RoundUpTo4(length); + if (padded_length > descriptor_data.size()) { + RTC_DLOG(LS_WARNING) << "Too large chunk. length=" << length + << ", remaining=" << descriptor_data.size(); + return absl::nullopt; + } else if (padded_length < kChunkTlvHeaderSize) { + RTC_DLOG(LS_WARNING) << "Too small chunk. length=" << length; + return absl::nullopt; + } + descriptors.emplace_back(type, flags, + descriptor_data.subview(0, padded_length)); + descriptor_data = descriptor_data.subview(padded_length); + } + + // Note that iterators (and pointer) are guaranteed to be stable when moving a + // std::vector, and `descriptors` have pointers to within `data_copy`. + return SctpPacket(common_header, std::move(data_copy), + std::move(descriptors)); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/sctp_packet.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/sctp_packet.h new file mode 100644 index 000000000..2600caf7a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/sctp_packet.h @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_SCTP_PACKET_H_ +#define NET_DCSCTP_PACKET_SCTP_PACKET_H_ + +#include + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/public/dcsctp_options.h" + +namespace dcsctp { + +// The "Common Header", which every SCTP packet starts with, and is described in +// https://tools.ietf.org/html/rfc4960#section-3.1. +struct CommonHeader { + uint16_t source_port; + uint16_t destination_port; + VerificationTag verification_tag; + uint32_t checksum; +}; + +// Represents an immutable (received or to-be-sent) SCTP packet. +class SctpPacket { + public: + static constexpr size_t kHeaderSize = 12; + + struct ChunkDescriptor { + ChunkDescriptor(uint8_t type, + uint8_t flags, + rtc::ArrayView data) + : type(type), flags(flags), data(data) {} + uint8_t type; + uint8_t flags; + rtc::ArrayView data; + }; + + SctpPacket(SctpPacket&& other) = default; + SctpPacket& operator=(SctpPacket&& other) = default; + SctpPacket(const SctpPacket&) = delete; + SctpPacket& operator=(const SctpPacket&) = delete; + + // Used for building SctpPacket, as those are immutable. + class Builder { + public: + Builder(VerificationTag verification_tag, const DcSctpOptions& options); + + Builder(Builder&& other) = default; + Builder& operator=(Builder&& other) = default; + + // Adds a chunk to the to-be-built SCTP packet. + Builder& Add(const Chunk& chunk); + + // The number of bytes remaining in the packet for chunk storage until the + // packet reaches its maximum size. + size_t bytes_remaining() const; + + // Indicates if any packets have been added to the builder. + bool empty() const { return out_.empty(); } + + // Returns the payload of the build SCTP packet. The Builder will be cleared + // after having called this function, and can be used to build a new packet. + std::vector Build(); + + private: + void WritePacketHeader(); + VerificationTag verification_tag_; + uint16_t source_port_; + uint16_t dest_port_; + // The maximum packet size is always even divisible by four, as chunks are + // always padded to a size even divisible by four. + size_t max_packet_size_; + std::vector out_; + }; + + // Parses `data` as an SCTP packet and returns it if it validates. + static absl::optional Parse( + rtc::ArrayView data, + bool disable_checksum_verification = false); + + // Returns the SCTP common header. + const CommonHeader& common_header() const { return common_header_; } + + // Returns the chunks (types and offsets) within the packet. + rtc::ArrayView descriptors() const { + return descriptors_; + } + + private: + SctpPacket(const CommonHeader& common_header, + std::vector data, + std::vector descriptors) + : common_header_(common_header), + data_(std::move(data)), + descriptors_(std::move(descriptors)) {} + + CommonHeader common_header_; + + // As the `descriptors_` refer to offset within data, and since SctpPacket is + // movable, `data` needs to be pointer stable, which it is according to + // http://www.open-std.org/JTC1/SC22/WG21/docs/lwg-active.html#2321 + std::vector data_; + // The chunks and their offsets within `data_ `. + std::vector descriptors_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_SCTP_PACKET_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/tlv_trait.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/tlv_trait.cc new file mode 100644 index 000000000..493b6a461 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/tlv_trait.cc @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/packet/tlv_trait.h" + +#include "rtc_base/logging.h" + +namespace dcsctp { +namespace tlv_trait_impl { +void ReportInvalidSize(size_t actual_size, size_t expected_size) { + RTC_DLOG(LS_WARNING) << "Invalid size (" << actual_size + << ", expected minimum " << expected_size << " bytes)"; +} + +void ReportInvalidType(int actual_type, int expected_type) { + RTC_DLOG(LS_WARNING) << "Invalid type (" << actual_type << ", expected " + << expected_type << ")"; +} + +void ReportInvalidFixedLengthField(size_t value, size_t expected) { + RTC_DLOG(LS_WARNING) << "Invalid length field (" << value << ", expected " + << expected << " bytes)"; +} + +void ReportInvalidVariableLengthField(size_t value, size_t available) { + RTC_DLOG(LS_WARNING) << "Invalid length field (" << value << ", available " + << available << " bytes)"; +} + +void ReportInvalidPadding(size_t padding_bytes) { + RTC_DLOG(LS_WARNING) << "Invalid padding (" << padding_bytes << " bytes)"; +} + +void ReportInvalidLengthMultiple(size_t length, size_t alignment) { + RTC_DLOG(LS_WARNING) << "Invalid length field (" << length + << ", expected an even multiple of " << alignment + << " bytes)"; +} +} // namespace tlv_trait_impl +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/tlv_trait.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/tlv_trait.h new file mode 100644 index 000000000..a3c728efd --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/packet/tlv_trait.h @@ -0,0 +1,165 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PACKET_TLV_TRAIT_H_ +#define NET_DCSCTP_PACKET_TLV_TRAIT_H_ + +#include +#include + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" + +namespace dcsctp { +namespace tlv_trait_impl { +// Logging functions, only to be used by TLVTrait, which is a templated class. +void ReportInvalidSize(size_t actual_size, size_t expected_size); +void ReportInvalidType(int actual_type, int expected_type); +void ReportInvalidFixedLengthField(size_t value, size_t expected); +void ReportInvalidVariableLengthField(size_t value, size_t available); +void ReportInvalidPadding(size_t padding_bytes); +void ReportInvalidLengthMultiple(size_t length, size_t alignment); +} // namespace tlv_trait_impl + +// Various entities in SCTP are padded data blocks, with a type and length +// field at fixed offsets, all stored in a 4-byte header. +// +// See e.g. https://tools.ietf.org/html/rfc4960#section-3.2 and +// https://tools.ietf.org/html/rfc4960#section-3.2.1 +// +// These are helper classes for writing and parsing that data, which in SCTP is +// called Type-Length-Value, or TLV. +// +// This templated class is configurable - a struct passed in as template +// parameter with the following expected members: +// * kType - The type field's value +// * kTypeSizeInBytes - The type field's width in bytes. +// Either 1 or 2. +// * kHeaderSize - The fixed size header +// * kVariableLengthAlignment - The size alignment on the variable data. Set +// to zero (0) if no variable data is used. +// +// This class is to be used as a trait +// (https://en.wikipedia.org/wiki/Trait_(computer_programming)) that adds a few +// public and protected members and which a class inherits from when it +// represents a type-length-value object. +template +class TLVTrait { + private: + static constexpr size_t kTlvHeaderSize = 4; + + protected: + static constexpr size_t kHeaderSize = Config::kHeaderSize; + + static_assert(Config::kTypeSizeInBytes == 1 || Config::kTypeSizeInBytes == 2, + "kTypeSizeInBytes must be 1 or 2"); + static_assert(Config::kHeaderSize >= kTlvHeaderSize, + "HeaderSize must be >= 4 bytes"); + static_assert((Config::kHeaderSize % 4 == 0), + "kHeaderSize must be an even multiple of 4 bytes"); + static_assert((Config::kVariableLengthAlignment == 0 || + Config::kVariableLengthAlignment == 1 || + Config::kVariableLengthAlignment == 2 || + Config::kVariableLengthAlignment == 4 || + Config::kVariableLengthAlignment == 8), + "kVariableLengthAlignment must be an allowed value"); + + // Validates the data with regards to size, alignment and type. + // If valid, returns a bounded buffer. + static absl::optional> ParseTLV( + rtc::ArrayView data) { + if (data.size() < Config::kHeaderSize) { + tlv_trait_impl::ReportInvalidSize(data.size(), Config::kHeaderSize); + return absl::nullopt; + } + BoundedByteReader tlv_header(data); + + const int type = (Config::kTypeSizeInBytes == 1) + ? tlv_header.template Load8<0>() + : tlv_header.template Load16<0>(); + + if (type != Config::kType) { + tlv_trait_impl::ReportInvalidType(type, Config::kType); + return absl::nullopt; + } + const uint16_t length = tlv_header.template Load16<2>(); + if (Config::kVariableLengthAlignment == 0) { + // Don't expect any variable length data at all. + if (length != Config::kHeaderSize || data.size() != Config::kHeaderSize) { + tlv_trait_impl::ReportInvalidFixedLengthField(length, + Config::kHeaderSize); + return absl::nullopt; + } + } else { + // Expect variable length data - verify its size alignment. + if (length > data.size() || length < Config::kHeaderSize) { + tlv_trait_impl::ReportInvalidVariableLengthField(length, data.size()); + return absl::nullopt; + } + const size_t padding = data.size() - length; + if (padding > 3) { + // https://tools.ietf.org/html/rfc4960#section-3.2 + // "This padding MUST NOT be more than 3 bytes in total" + tlv_trait_impl::ReportInvalidPadding(padding); + return absl::nullopt; + } + if (!ValidateLengthAlignment(length, Config::kVariableLengthAlignment)) { + tlv_trait_impl::ReportInvalidLengthMultiple( + length, Config::kVariableLengthAlignment); + return absl::nullopt; + } + } + return BoundedByteReader(data.subview(0, length)); + } + + // Allocates space for data with a static header size, as defined by + // `Config::kHeaderSize` and a variable footer, as defined by `variable_size` + // (which may be 0) and writes the type and length in the header. + static BoundedByteWriter AllocateTLV( + std::vector& out, + size_t variable_size = 0) { + const size_t offset = out.size(); + const size_t size = Config::kHeaderSize + variable_size; + out.resize(offset + size); + + BoundedByteWriter tlv_header( + rtc::ArrayView(out.data() + offset, kTlvHeaderSize)); + if (Config::kTypeSizeInBytes == 1) { + tlv_header.template Store8<0>(static_cast(Config::kType)); + } else { + tlv_header.template Store16<0>(Config::kType); + } + tlv_header.template Store16<2>(size); + + return BoundedByteWriter( + rtc::ArrayView(out.data() + offset, size)); + } + + private: + static bool ValidateLengthAlignment(uint16_t length, size_t alignment) { + // This is to avoid MSVC believing there could be a "mod by zero", when it + // certainly can't. + if (alignment == 0) { + return true; + } + return (length % alignment) == 0; + } +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PACKET_TLV_TRAIT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_message.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_message.h new file mode 100644 index 000000000..38e676391 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_message.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_DCSCTP_MESSAGE_H_ +#define NET_DCSCTP_PUBLIC_DCSCTP_MESSAGE_H_ + +#include +#include +#include + +#include "api/array_view.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// An SCTP message is a group of bytes sent and received as a whole on a +// specified stream identifier (`stream_id`), and with a payload protocol +// identifier (`ppid`). +class DcSctpMessage { + public: + DcSctpMessage(StreamID stream_id, PPID ppid, std::vector payload) + : stream_id_(stream_id), ppid_(ppid), payload_(std::move(payload)) {} + + DcSctpMessage(DcSctpMessage&& other) = default; + DcSctpMessage& operator=(DcSctpMessage&& other) = default; + DcSctpMessage(const DcSctpMessage&) = delete; + DcSctpMessage& operator=(const DcSctpMessage&) = delete; + + // The stream identifier to which the message is sent. + StreamID stream_id() const { return stream_id_; } + + // The payload protocol identifier (ppid) associated with the message. + PPID ppid() const { return ppid_; } + + // The payload of the message. + rtc::ArrayView payload() const { return payload_; } + + // When destructing the message, extracts the payload. + std::vector ReleasePayload() && { return std::move(payload_); } + + private: + StreamID stream_id_; + PPID ppid_; + std::vector payload_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_DCSCTP_MESSAGE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h new file mode 100644 index 000000000..4f5b50cf5 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_options.h @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_DCSCTP_OPTIONS_H_ +#define NET_DCSCTP_PUBLIC_DCSCTP_OPTIONS_H_ + +#include +#include + +#include "net/dcsctp/public/types.h" + +namespace dcsctp { +struct DcSctpOptions { + // The largest safe SCTP packet. Starting from the minimum guaranteed MTU + // value of 1280 for IPv6 (which may not support fragmentation), take off 85 + // bytes for DTLS/TURN/TCP/IP and ciphertext overhead. + // + // Additionally, it's possible that TURN adds an additional 4 bytes of + // overhead after a channel has been established, so an additional 4 bytes is + // subtracted + // + // 1280 IPV6 MTU + // -40 IPV6 header + // -8 UDP + // -24 GCM Cipher + // -13 DTLS record header + // -4 TURN ChannelData + // = 1191 bytes. + static constexpr size_t kMaxSafeMTUSize = 1191; + + // The local port for which the socket is supposed to be bound to. Incoming + // packets will be verified that they are sent to this port number and all + // outgoing packets will have this port number as source port. + int local_port = 5000; + + // The remote port to send packets to. All outgoing packets will have this + // port number as destination port. + int remote_port = 5000; + + // The announced maximum number of incoming streams. Note that this value is + // constant and can't be currently increased in run-time as "Add Incoming + // Streams Request" in RFC6525 isn't supported. + // + // The socket implementation doesn't have any per-stream fixed costs, which is + // why the default value is set to be the maximum value. + uint16_t announced_maximum_incoming_streams = 65535; + + // The announced maximum number of outgoing streams. Note that this value is + // constant and can't be currently increased in run-time as "Add Outgoing + // Streams Request" in RFC6525 isn't supported. + // + // The socket implementation doesn't have any per-stream fixed costs, which is + // why the default value is set to be the maximum value. + uint16_t announced_maximum_outgoing_streams = 65535; + + // Maximum SCTP packet size. The library will limit the size of generated + // packets to be less than or equal to this number. This does not include any + // overhead of DTLS, TURN, UDP or IP headers. + size_t mtu = kMaxSafeMTUSize; + + // The largest allowed message payload to be sent. Messages will be rejected + // if their payload is larger than this value. Note that this doesn't affect + // incoming messages, which may larger than this value (but smaller than + // `max_receiver_window_buffer_size`). + size_t max_message_size = 256 * 1024; + + // Maximum received window buffer size. This should be a bit larger than the + // largest sized message you want to be able to receive. This essentially + // limits the memory usage on the receive side. Note that memory is allocated + // dynamically, and this represents the maximum amount of buffered data. The + // actual memory usage of the library will be smaller in normal operation, and + // will be larger than this due to other allocations and overhead if the + // buffer is fully utilized. + size_t max_receiver_window_buffer_size = 5 * 1024 * 1024; + + // Maximum send buffer size. It will not be possible to queue more data than + // this before sending it. + size_t max_send_buffer_size = 2 * 1024 * 1024; + + // Max allowed RTT value. When the RTT is measured and it's found to be larger + // than this value, it will be discarded and not used for e.g. any RTO + // calculation. The default value is an extreme maximum but can be adapted + // to better match the environment. + DurationMs rtt_max = DurationMs(8000); + + // Initial RTO value. + DurationMs rto_initial = DurationMs(500); + + // Maximum RTO value. + DurationMs rto_max = DurationMs(800); + + // Minimum RTO value. This must be larger than an expected peer delayed ack + // timeout. + DurationMs rto_min = DurationMs(220); + + // T1-init timeout. + DurationMs t1_init_timeout = DurationMs(1000); + + // T1-cookie timeout. + DurationMs t1_cookie_timeout = DurationMs(1000); + + // T2-shutdown timeout. + DurationMs t2_shutdown_timeout = DurationMs(1000); + + // Hearbeat interval (on idle connections only). + DurationMs heartbeat_interval = DurationMs(30000); + + // The maximum time when a SACK will be sent from the arrival of an + // unacknowledged packet. Whatever is smallest of RTO/2 and this will be used. + DurationMs delayed_ack_max_timeout = DurationMs(200); + + // Do slow start as TCP - double cwnd instead of increasing it by MTU. + bool slow_start_tcp_style = false; + + // The initial congestion window size, in number of MTUs. + // See https://tools.ietf.org/html/rfc4960#section-7.2.1 which defaults at ~3 + // and https://research.google/pubs/pub36640/ which argues for at least ten + // segments. + size_t cwnd_mtus_initial = 10; + + // The minimum congestion window size, in number of MTUs. + // See https://tools.ietf.org/html/rfc4960#section-7.2.3. + size_t cwnd_mtus_min = 4; + + // Maximum Data Retransmit Attempts (per DATA chunk). + int max_retransmissions = 10; + + // Max.Init.Retransmits (https://tools.ietf.org/html/rfc4960#section-15) + int max_init_retransmits = 8; + + // RFC3758 Partial Reliability Extension + bool enable_partial_reliability = true; + + // RFC8260 Stream Schedulers and User Message Interleaving + bool enable_message_interleaving = false; + + // If RTO should be added to heartbeat_interval + bool heartbeat_interval_include_rtt = true; + + // Disables SCTP packet crc32 verification. Useful when running with fuzzers. + bool disable_checksum_verification = false; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_DCSCTP_OPTIONS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h new file mode 100644 index 000000000..1c2fb97f7 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h @@ -0,0 +1,332 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_DCSCTP_SOCKET_H_ +#define NET_DCSCTP_PUBLIC_DCSCTP_SOCKET_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/packet_observer.h" +#include "net/dcsctp/public/timeout.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// The socket/association state +enum class SocketState { + // The socket is closed. + kClosed, + // The socket has initiated a connection, which is not yet established. Note + // that for incoming connections and for reconnections when the socket is + // already connected, the socket will not transition to this state. + kConnecting, + // The socket is connected, and the connection is established. + kConnected, + // The socket is shutting down, and the connection is not yet closed. + kShuttingDown, +}; + +// Send options for sending messages +struct SendOptions { + // If the message should be sent with unordered message delivery. + IsUnordered unordered = IsUnordered(false); + + // If set, will discard messages that haven't been correctly sent and + // received before the lifetime has expired. This is only available if the + // peer supports Partial Reliability Extension (RFC3758). + absl::optional lifetime = absl::nullopt; + + // If set, limits the number of retransmissions. This is only available + // if the peer supports Partial Reliability Extension (RFC3758). + absl::optional max_retransmissions = absl::nullopt; +}; + +enum class ErrorKind { + // Indicates that no error has occurred. This will never be the case when + // `OnError` or `OnAborted` is called. + kNoError, + // There have been too many retries or timeouts, and the library has given up. + kTooManyRetries, + // A command was received that is only possible to execute when the socket is + // connected, which it is not. + kNotConnected, + // Parsing of the command or its parameters failed. + kParseFailed, + // Commands are received in the wrong sequence, which indicates a + // synchronisation mismatch between the peers. + kWrongSequence, + // The peer has reported an issue using ERROR or ABORT command. + kPeerReported, + // The peer has performed a protocol violation. + kProtocolViolation, + // The receive or send buffers have been exhausted. + kResourceExhaustion, + // The client has performed an invalid operation. + kUnsupportedOperation, +}; + +inline constexpr absl::string_view ToString(ErrorKind error) { + switch (error) { + case ErrorKind::kNoError: + return "NO_ERROR"; + case ErrorKind::kTooManyRetries: + return "TOO_MANY_RETRIES"; + case ErrorKind::kNotConnected: + return "NOT_CONNECTED"; + case ErrorKind::kParseFailed: + return "PARSE_FAILED"; + case ErrorKind::kWrongSequence: + return "WRONG_SEQUENCE"; + case ErrorKind::kPeerReported: + return "PEER_REPORTED"; + case ErrorKind::kProtocolViolation: + return "PROTOCOL_VIOLATION"; + case ErrorKind::kResourceExhaustion: + return "RESOURCE_EXHAUSTION"; + case ErrorKind::kUnsupportedOperation: + return "UNSUPPORTED_OPERATION"; + } +} + +enum class SendStatus { + // The message was enqueued successfully. As sending the message is done + // asynchronously, this is no guarantee that the message has been actually + // sent. + kSuccess, + // The message was rejected as the payload was empty (which is not allowed in + // SCTP). + kErrorMessageEmpty, + // The message was rejected as the payload was larger than what has been set + // as `DcSctpOptions.max_message_size`. + kErrorMessageTooLarge, + // The message could not be enqueued as the socket is out of resources. This + // mainly indicates that the send queue is full. + kErrorResourceExhaustion, + // The message could not be sent as the socket is shutting down. + kErrorShuttingDown, +}; + +inline constexpr absl::string_view ToString(SendStatus error) { + switch (error) { + case SendStatus::kSuccess: + return "SUCCESS"; + case SendStatus::kErrorMessageEmpty: + return "ERROR_MESSAGE_EMPTY"; + case SendStatus::kErrorMessageTooLarge: + return "ERROR_MESSAGE_TOO_LARGE"; + case SendStatus::kErrorResourceExhaustion: + return "ERROR_RESOURCE_EXHAUSTION"; + case SendStatus::kErrorShuttingDown: + return "ERROR_SHUTTING_DOWN"; + } +} + +// Return value of ResetStreams. +enum class ResetStreamsStatus { + // If the connection is not yet established, this will be returned. + kNotConnected, + // Indicates that ResetStreams operation has been successfully initiated. + kPerformed, + // Indicates that ResetStreams has failed as it's not supported by the peer. + kNotSupported, +}; + +inline constexpr absl::string_view ToString(ResetStreamsStatus error) { + switch (error) { + case ResetStreamsStatus::kNotConnected: + return "NOT_CONNECTED"; + case ResetStreamsStatus::kPerformed: + return "PERFORMED"; + case ResetStreamsStatus::kNotSupported: + return "NOT_SUPPORTED"; + } +} + +// Callbacks that the DcSctpSocket will be done synchronously to the owning +// client. It is allowed to call back into the library from callbacks that start +// with "On". It has been explicitly documented when it's not allowed to call +// back into this library from within a callback. +// +// Theses callbacks are only synchronously triggered as a result of the client +// calling a public method in `DcSctpSocketInterface`. +class DcSctpSocketCallbacks { + public: + virtual ~DcSctpSocketCallbacks() = default; + + // Called when the library wants the packet serialized as `data` to be sent. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual void SendPacket(rtc::ArrayView data) = 0; + + // Called when the library wants to create a Timeout. The callback must return + // an object that implements that interface. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual std::unique_ptr CreateTimeout() = 0; + + // Returns the current time in milliseconds (from any epoch). + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual TimeMs TimeMillis() = 0; + + // Called when the library needs a random number uniformly distributed between + // `low` (inclusive) and `high` (exclusive). The random numbers used by the + // library are not used for cryptographic purposes. There are no requirements + // that the random number generator must be secure. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual uint32_t GetRandomInt(uint32_t low, uint32_t high) = 0; + + // Triggered when the outgoing message buffer is empty, meaning that there are + // no more queued messages, but there can still be packets in-flight or to be + // retransmitted. (in contrast to SCTP_SENDER_DRY_EVENT). + // TODO(boivie): This is currently only used in benchmarks to have a steady + // flow of packets to send + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual void NotifyOutgoingMessageBufferEmpty() = 0; + + // Called when the library has received an SCTP message in full and delivers + // it to the upper layer. + // + // It is allowed to call into this library from within this callback. + virtual void OnMessageReceived(DcSctpMessage message) = 0; + + // Triggered when an non-fatal error is reported by either this library or + // from the other peer (by sending an ERROR command). These should be logged, + // but no other action need to be taken as the association is still viable. + // + // It is allowed to call into this library from within this callback. + virtual void OnError(ErrorKind error, absl::string_view message) = 0; + + // Triggered when the socket has aborted - either as decided by this socket + // due to e.g. too many retransmission attempts, or by the peer when + // receiving an ABORT command. No other callbacks will be done after this + // callback, unless reconnecting. + // + // It is allowed to call into this library from within this callback. + virtual void OnAborted(ErrorKind error, absl::string_view message) = 0; + + // Called when calling `Connect` succeeds, but also for incoming successful + // connection attempts. + // + // It is allowed to call into this library from within this callback. + virtual void OnConnected() = 0; + + // Called when the socket is closed in a controlled way. No other + // callbacks will be done after this callback, unless reconnecting. + // + // It is allowed to call into this library from within this callback. + virtual void OnClosed() = 0; + + // On connection restarted (by peer). This is just a notification, and the + // association is expected to work fine after this call, but there could have + // been packet loss as a result of restarting the association. + // + // It is allowed to call into this library from within this callback. + virtual void OnConnectionRestarted() = 0; + + // Indicates that a stream reset request has failed. + // + // It is allowed to call into this library from within this callback. + virtual void OnStreamsResetFailed( + rtc::ArrayView outgoing_streams, + absl::string_view reason) = 0; + + // Indicates that a stream reset request has been performed. + // + // It is allowed to call into this library from within this callback. + virtual void OnStreamsResetPerformed( + rtc::ArrayView outgoing_streams) = 0; + + // When a peer has reset some of its outgoing streams, this will be called. An + // empty list indicates that all streams have been reset. + // + // It is allowed to call into this library from within this callback. + virtual void OnIncomingStreamsReset( + rtc::ArrayView incoming_streams) = 0; +}; + +// The DcSctpSocket implementation implements the following interface. +class DcSctpSocketInterface { + public: + virtual ~DcSctpSocketInterface() = default; + + // To be called when an incoming SCTP packet is to be processed. + virtual void ReceivePacket(rtc::ArrayView data) = 0; + + // To be called when a timeout has expired. The `timeout_id` is provided + // when the timeout was initiated. + virtual void HandleTimeout(TimeoutID timeout_id) = 0; + + // Connects the socket. This is an asynchronous operation, and + // `DcSctpSocketCallbacks::OnConnected` will be called on success. + virtual void Connect() = 0; + + // Gracefully shutdowns the socket and sends all outstanding data. This is an + // asynchronous operation and `DcSctpSocketCallbacks::OnClosed` will be called + // on success. + virtual void Shutdown() = 0; + + // Closes the connection non-gracefully. Will send ABORT if the connection is + // not already closed. No callbacks will be made after Close() has returned. + virtual void Close() = 0; + + // The socket state. + virtual SocketState state() const = 0; + + // The options it was created with. + virtual const DcSctpOptions& options() const = 0; + + // Update the options max_message_size. + virtual void SetMaxMessageSize(size_t max_message_size) = 0; + + // Sends the message `message` using the provided send options. + // Sending a message is an asynchrous operation, and the `OnError` callback + // may be invoked to indicate any errors in sending the message. + // + // The association does not have to be established before calling this method. + // If it's called before there is an established association, the message will + // be queued. + virtual SendStatus Send(DcSctpMessage message, + const SendOptions& send_options) = 0; + + // Resetting streams is an asynchronous operation and the results will + // be notified using `DcSctpSocketCallbacks::OnStreamsResetDone()` on success + // and `DcSctpSocketCallbacks::OnStreamsResetFailed()` on failure. Note that + // only outgoing streams can be reset. + // + // When it's known that the peer has reset its own outgoing streams, + // `DcSctpSocketCallbacks::OnIncomingStreamReset` is called. + // + // Note that resetting a stream will also remove all queued messages on those + // streams, but will ensure that the currently sent message (if any) is fully + // sent before closing the stream. + // + // Resetting streams can only be done on an established association that + // supports stream resetting. Calling this method on e.g. a closed association + // or streams that don't support resetting will not perform any operation. + virtual ResetStreamsStatus ResetStreams( + rtc::ArrayView outgoing_streams) = 0; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_DCSCTP_SOCKET_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/packet_observer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/packet_observer.h new file mode 100644 index 000000000..fe7567824 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/packet_observer.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_PACKET_OBSERVER_H_ +#define NET_DCSCTP_PUBLIC_PACKET_OBSERVER_H_ + +#include + +#include "api/array_view.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// A PacketObserver can be attached to a socket and will be called for +// all sent and received packets. +class PacketObserver { + public: + virtual ~PacketObserver() = default; + // Called when a packet is sent, with the current time (in milliseconds) as + // `now`, and the packet payload as `payload`. + virtual void OnSentPacket(TimeMs now, + rtc::ArrayView payload) = 0; + + // Called when a packet is received, with the current time (in milliseconds) + // as `now`, and the packet payload as `payload`. + virtual void OnReceivedPacket(TimeMs now, + rtc::ArrayView payload) = 0; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_PACKET_OBSERVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/strong_alias.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/strong_alias.h new file mode 100644 index 000000000..96678442b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/strong_alias.h @@ -0,0 +1,85 @@ +/* + * Copyright 2019 The Chromium Authors. All rights reserved. + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_STRONG_ALIAS_H_ +#define NET_DCSCTP_PUBLIC_STRONG_ALIAS_H_ + +#include +#include + +namespace dcsctp { + +// This is a copy of +// https://source.chromium.org/chromium/chromium/src/+/master:base/types/strong_alias.h +// as the API (and internals) are using type-safe integral identifiers, but this +// library can't depend on that file. The ostream operator has been removed +// per WebRTC library conventions, and the underlying type is exposed. + +template +class StrongAlias { + public: + using UnderlyingType = TheUnderlyingType; + constexpr StrongAlias() = default; + constexpr explicit StrongAlias(const UnderlyingType& v) : value_(v) {} + constexpr explicit StrongAlias(UnderlyingType&& v) noexcept + : value_(std::move(v)) {} + + constexpr UnderlyingType* operator->() { return &value_; } + constexpr const UnderlyingType* operator->() const { return &value_; } + + constexpr UnderlyingType& operator*() & { return value_; } + constexpr const UnderlyingType& operator*() const& { return value_; } + constexpr UnderlyingType&& operator*() && { return std::move(value_); } + constexpr const UnderlyingType&& operator*() const&& { + return std::move(value_); + } + + constexpr UnderlyingType& value() & { return value_; } + constexpr const UnderlyingType& value() const& { return value_; } + constexpr UnderlyingType&& value() && { return std::move(value_); } + constexpr const UnderlyingType&& value() const&& { return std::move(value_); } + + constexpr explicit operator const UnderlyingType&() const& { return value_; } + + constexpr bool operator==(const StrongAlias& other) const { + return value_ == other.value_; + } + constexpr bool operator!=(const StrongAlias& other) const { + return value_ != other.value_; + } + constexpr bool operator<(const StrongAlias& other) const { + return value_ < other.value_; + } + constexpr bool operator<=(const StrongAlias& other) const { + return value_ <= other.value_; + } + constexpr bool operator>(const StrongAlias& other) const { + return value_ > other.value_; + } + constexpr bool operator>=(const StrongAlias& other) const { + return value_ >= other.value_; + } + + // Hasher to use in std::unordered_map, std::unordered_set, etc. + struct Hasher { + using argument_type = StrongAlias; + using result_type = std::size_t; + result_type operator()(const argument_type& id) const { + return std::hash()(id.value()); + } + }; + + protected: + UnderlyingType value_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_STRONG_ALIAS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/timeout.h new file mode 100644 index 000000000..64ba35109 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/timeout.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_TIMEOUT_H_ +#define NET_DCSCTP_PUBLIC_TIMEOUT_H_ + +#include + +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// A very simple timeout that can be started and stopped. When started, +// it will be given a unique `timeout_id` which should be provided to +// `DcSctpSocket::HandleTimeout` when it expires. +class Timeout { + public: + virtual ~Timeout() = default; + + // Called to start time timeout, with the duration in milliseconds as + // `duration` and with the timeout identifier as `timeout_id`, which - if + // the timeout expires - shall be provided to `DcSctpSocket::HandleTimeout`. + // + // `Start` and `Stop` will always be called in pairs. In other words will + // ´Start` never be called twice, without a call to `Stop` in between. + virtual void Start(DurationMs duration, TimeoutID timeout_id) = 0; + + // Called to stop the running timeout. + // + // `Start` and `Stop` will always be called in pairs. In other words will + // ´Start` never be called twice, without a call to `Stop` in between. + // + // `Stop` will always be called prior to releasing this object. + virtual void Stop() = 0; + + // Called to restart an already running timeout, with the `duration` and + // `timeout_id` parameters as described in `Start`. This can be overridden by + // the implementation to restart it more efficiently. + virtual void Restart(DurationMs duration, TimeoutID timeout_id) { + Stop(); + Start(duration, timeout_id); + } +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_TIMEOUT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h new file mode 100644 index 000000000..d516daffe --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/types.h @@ -0,0 +1,110 @@ +/* + * Copyright 2019 The Chromium Authors. All rights reserved. + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_PUBLIC_TYPES_H_ +#define NET_DCSCTP_PUBLIC_TYPES_H_ + +#include +#include + +#include "net/dcsctp/public/strong_alias.h" + +namespace dcsctp { + +// Stream Identifier +using StreamID = StrongAlias; + +// Payload Protocol Identifier (PPID) +using PPID = StrongAlias; + +// Timeout Identifier +using TimeoutID = StrongAlias; + +// Indicates if a message is allowed to be received out-of-order compared to +// other messages on the same stream. +using IsUnordered = StrongAlias; + +// Duration, as milliseconds. Overflows after 24 days. +class DurationMs : public StrongAlias { + public: + constexpr explicit DurationMs(const UnderlyingType& v) + : StrongAlias(v) {} + + // Convenience methods for working with time. + constexpr DurationMs& operator+=(DurationMs d) { + value_ += d.value_; + return *this; + } + constexpr DurationMs& operator-=(DurationMs d) { + value_ -= d.value_; + return *this; + } + template + constexpr DurationMs& operator*=(T factor) { + value_ *= factor; + return *this; + } +}; + +constexpr inline DurationMs operator+(DurationMs lhs, DurationMs rhs) { + return lhs += rhs; +} +constexpr inline DurationMs operator-(DurationMs lhs, DurationMs rhs) { + return lhs -= rhs; +} +template +constexpr inline DurationMs operator*(DurationMs lhs, T rhs) { + return lhs *= rhs; +} +template +constexpr inline DurationMs operator*(T lhs, DurationMs rhs) { + return rhs *= lhs; +} +constexpr inline int32_t operator/(DurationMs lhs, DurationMs rhs) { + return lhs.value() / rhs.value(); +} + +// Represents time, in milliseconds since a client-defined epoch. +class TimeMs : public StrongAlias { + public: + constexpr explicit TimeMs(const UnderlyingType& v) + : StrongAlias(v) {} + + // Convenience methods for working with time. + constexpr TimeMs& operator+=(DurationMs d) { + value_ += *d; + return *this; + } + constexpr TimeMs& operator-=(DurationMs d) { + value_ -= *d; + return *this; + } + + static constexpr TimeMs InfiniteFuture() { + return TimeMs(std::numeric_limits::max()); + } +}; + +constexpr inline TimeMs operator+(TimeMs lhs, DurationMs rhs) { + return lhs += rhs; +} +constexpr inline TimeMs operator+(DurationMs lhs, TimeMs rhs) { + return rhs += lhs; +} +constexpr inline TimeMs operator-(TimeMs lhs, DurationMs rhs) { + return lhs -= rhs; +} +constexpr inline DurationMs operator-(TimeMs lhs, TimeMs rhs) { + return DurationMs(*lhs - *rhs); +} + +} // namespace dcsctp + +#endif // NET_DCSCTP_PUBLIC_TYPES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc new file mode 100644 index 000000000..68a4895ec --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.cc @@ -0,0 +1,286 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/rx/data_tracker.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/timer/timer.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +bool DataTracker::IsTSNValid(TSN tsn) const { + UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.PeekUnwrap(tsn); + + // Note that this method doesn't return `false` for old DATA chunks, as those + // are actually valid, and receiving those may affect the generated SACK + // response (by setting "duplicate TSNs"). + + uint32_t difference = + UnwrappedTSN::Difference(unwrapped_tsn, last_cumulative_acked_tsn_); + if (difference > kMaxAcceptedOutstandingFragments) { + return false; + } + return true; +} + +void DataTracker::Observe(TSN tsn, + AnyDataChunk::ImmediateAckFlag immediate_ack) { + UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.Unwrap(tsn); + + // IsTSNValid must be called prior to calling this method. + RTC_DCHECK( + UnwrappedTSN::Difference(unwrapped_tsn, last_cumulative_acked_tsn_) <= + kMaxAcceptedOutstandingFragments); + + // Old chunk already seen before? + if (unwrapped_tsn <= last_cumulative_acked_tsn_) { + duplicate_tsns_.insert(unwrapped_tsn.Wrap()); + return; + } + + if (unwrapped_tsn == last_cumulative_acked_tsn_.next_value()) { + last_cumulative_acked_tsn_ = unwrapped_tsn; + // The cumulative acked tsn may be moved even further, if a gap was filled. + while (!additional_tsns_.empty() && + *additional_tsns_.begin() == + last_cumulative_acked_tsn_.next_value()) { + last_cumulative_acked_tsn_.Increment(); + additional_tsns_.erase(additional_tsns_.begin()); + } + } else { + bool inserted = additional_tsns_.insert(unwrapped_tsn).second; + if (!inserted) { + // Already seen before. + duplicate_tsns_.insert(unwrapped_tsn.Wrap()); + } + } + + // https://tools.ietf.org/html/rfc4960#section-6.7 + // "Upon the reception of a new DATA chunk, an endpoint shall examine the + // continuity of the TSNs received. If the endpoint detects a gap in + // the received DATA chunk sequence, it SHOULD send a SACK with Gap Ack + // Blocks immediately. The data receiver continues sending a SACK after + // receipt of each SCTP packet that doesn't fill the gap." + if (!additional_tsns_.empty()) { + UpdateAckState(AckState::kImmediate, "packet loss"); + } + + // https://tools.ietf.org/html/rfc7053#section-5.2 + // "Upon receipt of an SCTP packet containing a DATA chunk with the I + // bit set, the receiver SHOULD NOT delay the sending of the corresponding + // SACK chunk, i.e., the receiver SHOULD immediately respond with the + // corresponding SACK chunk." + if (*immediate_ack) { + UpdateAckState(AckState::kImmediate, "immediate-ack bit set"); + } + + if (!seen_packet_) { + // https://tools.ietf.org/html/rfc4960#section-5.1 + // "After the reception of the first DATA chunk in an association the + // endpoint MUST immediately respond with a SACK to acknowledge the DATA + // chunk." + seen_packet_ = true; + UpdateAckState(AckState::kImmediate, "first DATA chunk"); + } + + // https://tools.ietf.org/html/rfc4960#section-6.2 + // "Specifically, an acknowledgement SHOULD be generated for at least + // every second packet (not every second DATA chunk) received, and SHOULD be + // generated within 200 ms of the arrival of any unacknowledged DATA chunk." + if (ack_state_ == AckState::kIdle) { + UpdateAckState(AckState::kBecomingDelayed, "received DATA when idle"); + } else if (ack_state_ == AckState::kDelayed) { + UpdateAckState(AckState::kImmediate, "received DATA when already delayed"); + } +} + +void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) { + // ForwardTSN is sent to make the receiver (this socket) "forget" about partly + // received (or not received at all) data, up until `new_cumulative_ack`. + + UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.Unwrap(new_cumulative_ack); + UnwrappedTSN prev_last_cum_ack_tsn = last_cumulative_acked_tsn_; + + // Old chunk already seen before? + if (unwrapped_tsn <= last_cumulative_acked_tsn_) { + // https://tools.ietf.org/html/rfc3758#section-3.6 + // "Note, if the "New Cumulative TSN" value carried in the arrived + // FORWARD TSN chunk is found to be behind or at the current cumulative TSN + // point, the data receiver MUST treat this FORWARD TSN as out-of-date and + // MUST NOT update its Cumulative TSN. The receiver SHOULD send a SACK to + // its peer (the sender of the FORWARD TSN) since such a duplicate may + // indicate the previous SACK was lost in the network." + UpdateAckState(AckState::kImmediate, + "FORWARD_TSN new_cumulative_tsn was behind"); + return; + } + + // https://tools.ietf.org/html/rfc3758#section-3.6 + // "When a FORWARD TSN chunk arrives, the data receiver MUST first update + // its cumulative TSN point to the value carried in the FORWARD TSN chunk, and + // then MUST further advance its cumulative TSN point locally if possible, as + // shown by the following example..." + + // The `new_cumulative_ack` will become the current + // `last_cumulative_acked_tsn_`, and if there have been prior "gaps" that are + // now overlapping with the new value, remove them. + last_cumulative_acked_tsn_ = unwrapped_tsn; + int erased_additional_tsns = std::distance( + additional_tsns_.begin(), additional_tsns_.upper_bound(unwrapped_tsn)); + additional_tsns_.erase(additional_tsns_.begin(), + additional_tsns_.upper_bound(unwrapped_tsn)); + + // See if the `last_cumulative_acked_tsn_` can be moved even further: + while (!additional_tsns_.empty() && + *additional_tsns_.begin() == last_cumulative_acked_tsn_.next_value()) { + last_cumulative_acked_tsn_.Increment(); + additional_tsns_.erase(additional_tsns_.begin()); + ++erased_additional_tsns; + } + + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "FORWARD_TSN, cum_ack_tsn=" + << *prev_last_cum_ack_tsn.Wrap() << "->" + << *new_cumulative_ack << "->" + << *last_cumulative_acked_tsn_.Wrap() << ", removed " + << erased_additional_tsns << " additional TSNs"; + + // https://tools.ietf.org/html/rfc3758#section-3.6 + // "Any time a FORWARD TSN chunk arrives, for the purposes of sending a + // SACK, the receiver MUST follow the same rules as if a DATA chunk had been + // received (i.e., follow the delayed sack rules specified in ..." + if (ack_state_ == AckState::kIdle) { + UpdateAckState(AckState::kBecomingDelayed, + "received FORWARD_TSN when idle"); + } else if (ack_state_ == AckState::kDelayed) { + UpdateAckState(AckState::kImmediate, + "received FORWARD_TSN when already delayed"); + } +} + +SackChunk DataTracker::CreateSelectiveAck(size_t a_rwnd) { + // Note that in SCTP, the receiver side is allowed to discard received data + // and signal that to the sender, but only chunks that have previously been + // reported in the gap-ack-blocks. However, this implementation will never do + // that. So this SACK produced is more like a NR-SACK as explained in + // https://ieeexplore.ieee.org/document/4697037 and which there is an RFC + // draft at https://tools.ietf.org/html/draft-tuexen-tsvwg-sctp-multipath-17. + std::set duplicate_tsns; + duplicate_tsns_.swap(duplicate_tsns); + + return SackChunk(last_cumulative_acked_tsn_.Wrap(), a_rwnd, + CreateGapAckBlocks(), std::move(duplicate_tsns)); +} + +std::vector DataTracker::CreateGapAckBlocks() const { + // This method will calculate the gaps between blocks of contiguous values in + // `additional_tsns_`, in the same format as the SACK chunk expects it; + // offsets from the "cumulative ack TSN value". + std::vector gap_ack_blocks; + + absl::optional first_tsn_in_block = absl::nullopt; + absl::optional last_tsn_in_block = absl::nullopt; + + auto flush = [&]() { + if (first_tsn_in_block.has_value()) { + auto start_diff = UnwrappedTSN::Difference(*first_tsn_in_block, + last_cumulative_acked_tsn_); + auto end_diff = UnwrappedTSN::Difference(*last_tsn_in_block, + last_cumulative_acked_tsn_); + gap_ack_blocks.emplace_back(static_cast(start_diff), + static_cast(end_diff)); + first_tsn_in_block = absl::nullopt; + last_tsn_in_block = absl::nullopt; + } + }; + for (UnwrappedTSN tsn : additional_tsns_) { + if (last_tsn_in_block.has_value() && + last_tsn_in_block->next_value() == tsn) { + // Continuing the same block. + last_tsn_in_block = tsn; + } else { + // New block, or a gap from the old block's last value. + flush(); + first_tsn_in_block = tsn; + last_tsn_in_block = tsn; + } + } + flush(); + return gap_ack_blocks; +} + +bool DataTracker::ShouldSendAck(bool also_if_delayed) { + if (ack_state_ == AckState::kImmediate || + (also_if_delayed && (ack_state_ == AckState::kBecomingDelayed || + ack_state_ == AckState::kDelayed))) { + UpdateAckState(AckState::kIdle, "sending SACK"); + return true; + } + + return false; +} + +bool DataTracker::will_increase_cum_ack_tsn(TSN tsn) const { + UnwrappedTSN unwrapped = tsn_unwrapper_.PeekUnwrap(tsn); + return unwrapped == last_cumulative_acked_tsn_.next_value(); +} + +void DataTracker::ForceImmediateSack() { + ack_state_ = AckState::kImmediate; +} + +void DataTracker::HandleDelayedAckTimerExpiry() { + UpdateAckState(AckState::kImmediate, "delayed ack timer expired"); +} + +void DataTracker::ObservePacketEnd() { + if (ack_state_ == AckState::kBecomingDelayed) { + UpdateAckState(AckState::kDelayed, "packet end"); + } +} + +void DataTracker::UpdateAckState(AckState new_state, absl::string_view reason) { + if (new_state != ack_state_) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "State changed from " + << ToString(ack_state_) << " to " + << ToString(new_state) << " due to " << reason; + if (ack_state_ == AckState::kDelayed) { + delayed_ack_timer_.Stop(); + } else if (new_state == AckState::kDelayed) { + delayed_ack_timer_.Start(); + } + ack_state_ = new_state; + } +} + +absl::string_view DataTracker::ToString(AckState ack_state) { + switch (ack_state) { + case AckState::kIdle: + return "IDLE"; + case AckState::kBecomingDelayed: + return "BECOMING_DELAYED"; + case AckState::kDelayed: + return "DELAYED"; + case AckState::kImmediate: + return "IMMEDIATE"; + } +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h new file mode 100644 index 000000000..f5deaf147 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/data_tracker.h @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_RX_DATA_TRACKER_H_ +#define NET_DCSCTP_RX_DATA_TRACKER_H_ + +#include +#include + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/timer/timer.h" + +namespace dcsctp { + +// Keeps track of received DATA chunks and handles all logic for _when_ to +// create SACKs and also _how_ to generate them. +// +// It only uses TSNs to track delivery and doesn't need to be aware of streams. +// +// SACKs are optimally sent every second packet on connections with no packet +// loss. When packet loss is detected, it's sent for every packet. When SACKs +// are not sent directly, a timer is used to send a SACK delayed (by RTO/2, or +// 200ms, whatever is smallest). +class DataTracker { + public: + // The maximum number of accepted in-flight DATA chunks. This indicates the + // maximum difference from this buffer's last cumulative ack TSN, and any + // received data. Data received beyond this limit will be dropped, which will + // force the transmitter to send data that actually increases the last + // cumulative acked TSN. + static constexpr uint32_t kMaxAcceptedOutstandingFragments = 256; + + explicit DataTracker(absl::string_view log_prefix, + Timer* delayed_ack_timer, + TSN peer_initial_tsn) + : log_prefix_(std::string(log_prefix) + "dtrack: "), + delayed_ack_timer_(*delayed_ack_timer), + last_cumulative_acked_tsn_( + tsn_unwrapper_.Unwrap(TSN(*peer_initial_tsn - 1))) {} + + // Indicates if the provided TSN is valid. If this return false, the data + // should be dropped and not added to any other buffers, which essentially + // means that there is intentional packet loss. + bool IsTSNValid(TSN tsn) const; + + // Call for every incoming data chunk. + void Observe(TSN tsn, + AnyDataChunk::ImmediateAckFlag immediate_ack = + AnyDataChunk::ImmediateAckFlag(false)); + // Called at the end of processing an SCTP packet. + void ObservePacketEnd(); + + // Called for incoming FORWARD-TSN/I-FORWARD-TSN chunks + void HandleForwardTsn(TSN new_cumulative_ack); + + // Indicates if a SACK should be sent. There may be other reasons to send a + // SACK, but if this function indicates so, it should be sent as soon as + // possible. Calling this function will make it clear a flag so that if it's + // called again, it will probably return false. + // + // If the delayed ack timer is running, this method will return false _unless_ + // `also_if_delayed` is set to true. Then it will return true as well. + bool ShouldSendAck(bool also_if_delayed = false); + + // Returns the last cumulative ack TSN - the last seen data chunk's TSN + // value before any packet loss was detected. + TSN last_cumulative_acked_tsn() const { + return TSN(last_cumulative_acked_tsn_.Wrap()); + } + + // Returns true if the received `tsn` would increase the cumulative ack TSN. + bool will_increase_cum_ack_tsn(TSN tsn) const; + + // Forces `ShouldSendSack` to return true. + void ForceImmediateSack(); + + // Note that this will clear `duplicates_`, so every SackChunk that is + // consumed must be sent. + SackChunk CreateSelectiveAck(size_t a_rwnd); + + void HandleDelayedAckTimerExpiry(); + + private: + enum class AckState { + // No need to send an ACK. + kIdle, + + // Has received data chunks (but not yet end of packet). + kBecomingDelayed, + + // Has received data chunks and the end of a packet. Delayed ack timer is + // running and a SACK will be sent on expiry, or if DATA is sent, or after + // next packet with data. + kDelayed, + + // Send a SACK immediately after handling this packet. + kImmediate, + }; + std::vector CreateGapAckBlocks() const; + void UpdateAckState(AckState new_state, absl::string_view reason); + static absl::string_view ToString(AckState ack_state); + + const std::string log_prefix_; + // If a packet has ever been seen. + bool seen_packet_ = false; + Timer& delayed_ack_timer_; + AckState ack_state_ = AckState::kIdle; + UnwrappedTSN::Unwrapper tsn_unwrapper_; + + // All TSNs up until (and including) this value have been seen. + UnwrappedTSN last_cumulative_acked_tsn_; + // Received TSNs that are not directly following `last_cumulative_acked_tsn_`. + std::set additional_tsns_; + std::set duplicate_tsns_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_RX_DATA_TRACKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc new file mode 100644 index 000000000..581b9fcc4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc @@ -0,0 +1,245 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/rx/reassembly_queue.h" + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/rx/reassembly_streams.h" +#include "net/dcsctp/rx/traditional_reassembly_streams.h" +#include "rtc_base/logging.h" + +namespace dcsctp { +ReassemblyQueue::ReassemblyQueue(absl::string_view log_prefix, + TSN peer_initial_tsn, + size_t max_size_bytes) + : log_prefix_(std::string(log_prefix) + "reasm: "), + max_size_bytes_(max_size_bytes), + watermark_bytes_(max_size_bytes * kHighWatermarkLimit), + last_assembled_tsn_watermark_( + tsn_unwrapper_.Unwrap(TSN(*peer_initial_tsn - 1))), + streams_(std::make_unique( + log_prefix_, + [this](rtc::ArrayView tsns, + DcSctpMessage message) { + AddReassembledMessage(tsns, std::move(message)); + })) {} + +void ReassemblyQueue::Add(TSN tsn, Data data) { + RTC_DCHECK(IsConsistent()); + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "added tsn=" << *tsn + << ", stream=" << *data.stream_id << ":" + << *data.message_id << ":" << *data.fsn << ", type=" + << (data.is_beginning && data.is_end + ? "complete" + : data.is_beginning + ? "first" + : data.is_end ? "last" : "middle"); + + UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.Unwrap(tsn); + + if (unwrapped_tsn <= last_assembled_tsn_watermark_ || + delivered_tsns_.find(unwrapped_tsn) != delivered_tsns_.end()) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Chunk has already been delivered - skipping"; + return; + } + + // If a stream reset has been received with a "sender's last assigned tsn" in + // the future, the socket is in "deferred reset processing" mode and must + // buffer chunks until it's exited. + if (deferred_reset_streams_.has_value() && + unwrapped_tsn > + tsn_unwrapper_.Unwrap( + deferred_reset_streams_->req.sender_last_assigned_tsn())) { + RTC_DLOG(LS_VERBOSE) + << log_prefix_ << "Deferring chunk with tsn=" << *tsn + << " until cum_ack_tsn=" + << *deferred_reset_streams_->req.sender_last_assigned_tsn(); + // https://tools.ietf.org/html/rfc6525#section-5.2.2 + // "In this mode, any data arriving with a TSN larger than the + // Sender's Last Assigned TSN for the affected stream(s) MUST be queued + // locally and held until the cumulative acknowledgment point reaches the + // Sender's Last Assigned TSN." + queued_bytes_ += data.size(); + deferred_reset_streams_->deferred_chunks.emplace_back( + std::make_pair(tsn, std::move(data))); + } else { + queued_bytes_ += streams_->Add(unwrapped_tsn, std::move(data)); + } + + // https://tools.ietf.org/html/rfc4960#section-6.9 + // "Note: If the data receiver runs out of buffer space while still + // waiting for more fragments to complete the reassembly of the message, it + // should dispatch part of its inbound message through a partial delivery + // API (see Section 10), freeing some of its receive buffer space so that + // the rest of the message may be received." + + // TODO(boivie): Support EOR flag and partial delivery? + RTC_DCHECK(IsConsistent()); +} + +ReconfigurationResponseParameter::Result ReassemblyQueue::ResetStreams( + const OutgoingSSNResetRequestParameter& req, + TSN cum_tsn_ack) { + RTC_DCHECK(IsConsistent()); + if (deferred_reset_streams_.has_value()) { + // In deferred mode already. + return ReconfigurationResponseParameter::Result::kInProgress; + } else if (req.request_sequence_number() <= + last_completed_reset_req_seq_nbr_) { + // Already performed at some time previously. + return ReconfigurationResponseParameter::Result::kSuccessPerformed; + } + + UnwrappedTSN sla_tsn = tsn_unwrapper_.Unwrap(req.sender_last_assigned_tsn()); + UnwrappedTSN unwrapped_cum_tsn_ack = tsn_unwrapper_.Unwrap(cum_tsn_ack); + + // https://tools.ietf.org/html/rfc6525#section-5.2.2 + // "If the Sender's Last Assigned TSN is greater than the + // cumulative acknowledgment point, then the endpoint MUST enter "deferred + // reset processing"." + if (sla_tsn > unwrapped_cum_tsn_ack) { + RTC_DLOG(LS_VERBOSE) + << log_prefix_ + << "Entering deferred reset processing mode until cum_tsn_ack=" + << *req.sender_last_assigned_tsn(); + deferred_reset_streams_ = absl::make_optional(req); + return ReconfigurationResponseParameter::Result::kInProgress; + } + + // https://tools.ietf.org/html/rfc6525#section-5.2.2 + // "... streams MUST be reset to 0 as the next expected SSN." + streams_->ResetStreams(req.stream_ids()); + last_completed_reset_req_seq_nbr_ = req.request_sequence_number(); + RTC_DCHECK(IsConsistent()); + return ReconfigurationResponseParameter::Result::kSuccessPerformed; +} + +bool ReassemblyQueue::MaybeResetStreamsDeferred(TSN cum_ack_tsn) { + RTC_DCHECK(IsConsistent()); + if (deferred_reset_streams_.has_value()) { + UnwrappedTSN unwrapped_cum_ack_tsn = tsn_unwrapper_.Unwrap(cum_ack_tsn); + UnwrappedTSN unwrapped_sla_tsn = tsn_unwrapper_.Unwrap( + deferred_reset_streams_->req.sender_last_assigned_tsn()); + if (unwrapped_cum_ack_tsn >= unwrapped_sla_tsn) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Leaving deferred reset processing with tsn=" + << *cum_ack_tsn << ", feeding back " + << deferred_reset_streams_->deferred_chunks.size() + << " chunks"; + // https://tools.ietf.org/html/rfc6525#section-5.2.2 + // "... streams MUST be reset to 0 as the next expected SSN." + streams_->ResetStreams(deferred_reset_streams_->req.stream_ids()); + std::vector> deferred_chunks = + std::move(deferred_reset_streams_->deferred_chunks); + // The response will not be sent now, but as a reply to the retried + // request, which will come as "in progress" has been sent prior. + last_completed_reset_req_seq_nbr_ = + deferred_reset_streams_->req.request_sequence_number(); + deferred_reset_streams_ = absl::nullopt; + + // https://tools.ietf.org/html/rfc6525#section-5.2.2 + // "Any queued TSNs (queued at step E2) MUST now be released and processed + // normally." + for (auto& p : deferred_chunks) { + const TSN& tsn = p.first; + Data& data = p.second; + queued_bytes_ -= data.size(); + Add(tsn, std::move(data)); + } + + RTC_DCHECK(IsConsistent()); + return true; + } else { + RTC_DLOG(LS_VERBOSE) << "Staying in deferred reset processing. tsn=" + << *cum_ack_tsn; + } + } + + return false; +} + +std::vector ReassemblyQueue::FlushMessages() { + std::vector ret; + reassembled_messages_.swap(ret); + return ret; +} + +void ReassemblyQueue::AddReassembledMessage( + rtc::ArrayView tsns, + DcSctpMessage message) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Assembled message from TSN=[" + << StrJoin(tsns, ",", + [](rtc::StringBuilder& sb, UnwrappedTSN tsn) { + sb << *tsn.Wrap(); + }) + << "], message; stream_id=" << *message.stream_id() + << ", ppid=" << *message.ppid() + << ", payload=" << message.payload().size() << " bytes"; + + for (const UnwrappedTSN tsn : tsns) { + // Update watermark, or insert into delivered_tsns_ + if (tsn == last_assembled_tsn_watermark_.next_value()) { + last_assembled_tsn_watermark_.Increment(); + } else { + delivered_tsns_.insert(tsn); + } + } + + // With new TSNs in delivered_tsns, gaps might be filled. + while (!delivered_tsns_.empty() && + *delivered_tsns_.begin() == + last_assembled_tsn_watermark_.next_value()) { + last_assembled_tsn_watermark_.Increment(); + delivered_tsns_.erase(delivered_tsns_.begin()); + } + + reassembled_messages_.emplace_back(std::move(message)); +} + +void ReassemblyQueue::Handle(const AnyForwardTsnChunk& forward_tsn) { + RTC_DCHECK(IsConsistent()); + UnwrappedTSN tsn = tsn_unwrapper_.Unwrap(forward_tsn.new_cumulative_tsn()); + + last_assembled_tsn_watermark_ = std::max(last_assembled_tsn_watermark_, tsn); + delivered_tsns_.erase(delivered_tsns_.begin(), + delivered_tsns_.upper_bound(tsn)); + + queued_bytes_ -= + streams_->HandleForwardTsn(tsn, forward_tsn.skipped_streams()); + RTC_DCHECK(IsConsistent()); +} + +bool ReassemblyQueue::IsConsistent() const { + // Allow queued_bytes_ to be larger than max_size_bytes, as it's not actively + // enforced in this class. This comparison will still trigger if queued_bytes_ + // became "negative". + return (queued_bytes_ >= 0 && queued_bytes_ <= 2 * max_size_bytes_); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h new file mode 100644 index 000000000..b752e53ac --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.h @@ -0,0 +1,163 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_RX_REASSEMBLY_QUEUE_H_ +#define NET_DCSCTP_RX_REASSEMBLY_QUEUE_H_ + +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/rx/reassembly_streams.h" + +namespace dcsctp { + +// Contains the received DATA chunks that haven't yet been reassembled, and +// reassembles chunks when possible. +// +// The actual assembly is handled by an implementation of the +// `ReassemblyStreams` interface. +// +// Except for reassembling fragmented messages, this class will also handle two +// less common operations; To handle the receiver-side of partial reliability +// (limited number of retransmissions or limited message lifetime) as well as +// stream resetting, which is used when a sender wishes to close a data channel. +// +// Partial reliability is handled when a FORWARD-TSN or I-FORWARD-TSN chunk is +// received, and it will simply delete any chunks matching the parameters in +// that chunk. This is mainly implemented in ReassemblyStreams. +// +// Resetting streams is handled when a RECONFIG chunks is received, with an +// "Outgoing SSN Reset Request" parameter. That parameter will contain a list of +// streams to reset, and a `sender_last_assigned_tsn`. If this TSN is not yet +// seen, the stream cannot be directly reset, and this class will respond that +// the reset is "deferred". But if this TSN provided is known, the stream can be +// immediately be reset. +// +// The ReassemblyQueue has a maximum size, as it would otherwise be an DoS +// attack vector where a peer could consume all memory of the other peer by +// sending a lot of ordered chunks, but carefully withholding an early one. It +// also has a watermark limit, which the caller can query is the number of bytes +// is above that limit. This is used by the caller to be selective in what to +// add to the reassembly queue, so that it's not exhausted. The caller is +// expected to call `is_full` prior to adding data to the queue and to act +// accordingly if the queue is full. +class ReassemblyQueue { + public: + // When the queue is filled over this fraction (of its maximum size), the + // socket should restrict incoming data to avoid filling up the queue. + static constexpr float kHighWatermarkLimit = 0.9; + + ReassemblyQueue(absl::string_view log_prefix, + TSN peer_initial_tsn, + size_t max_size_bytes); + + // Adds a data chunk to the queue, with a `tsn` and other parameters in + // `data`. + void Add(TSN tsn, Data data); + + // Indicates if the reassembly queue has any reassembled messages that can be + // retrieved by calling `FlushMessages`. + bool HasMessages() const { return !reassembled_messages_.empty(); } + + // Returns any reassembled messages. + std::vector FlushMessages(); + + // Handle a ForwardTSN chunk, when the sender has indicated that the received + // (this class) should forget about some chunks. This is used to implement + // partial reliability. + void Handle(const AnyForwardTsnChunk& forward_tsn); + + // Given the reset stream request and the current cum_tsn_ack, might either + // reset the streams directly (returns kSuccessPerformed), or at a later time, + // by entering the "deferred reset processing" mode (returns kInProgress). + ReconfigurationResponseParameter::Result ResetStreams( + const OutgoingSSNResetRequestParameter& req, + TSN cum_tsn_ack); + + // Given the current (updated) cum_tsn_ack, might leave "defererred reset + // processing" mode and reset streams. Returns true if so. + bool MaybeResetStreamsDeferred(TSN cum_ack_tsn); + + // The number of payload bytes that have been queued. Note that the actual + // memory usage is higher due to additional overhead of tracking received + // data. + size_t queued_bytes() const { return queued_bytes_; } + + // The remaining bytes until the queue is full. + size_t remaining_bytes() const { return max_size_bytes_ - queued_bytes_; } + + // Indicates if the queue is full. Data should not be added to the queue when + // it's full. + bool is_full() const { return queued_bytes_ >= max_size_bytes_; } + + // Indicates if the queue is above the watermark limit, which is a certain + // percentage of its size. + bool is_above_watermark() const { return queued_bytes_ >= watermark_bytes_; } + + // Returns the watermark limit, in bytes. + size_t watermark_bytes() const { return watermark_bytes_; } + + private: + bool IsConsistent() const; + void AddReassembledMessage(rtc::ArrayView tsns, + DcSctpMessage message); + + struct DeferredResetStreams { + explicit DeferredResetStreams(OutgoingSSNResetRequestParameter req) + : req(std::move(req)) {} + OutgoingSSNResetRequestParameter req; + std::vector> deferred_chunks; + }; + + const std::string log_prefix_; + const size_t max_size_bytes_; + const size_t watermark_bytes_; + UnwrappedTSN::Unwrapper tsn_unwrapper_; + + // Whenever a message has been assembled, either increase + // `last_assembled_tsn_watermark_` or - if there are gaps - add the message's + // TSNs into delivered_tsns_ so that messages are not re-delivered on + // duplicate chunks. + UnwrappedTSN last_assembled_tsn_watermark_; + std::set delivered_tsns_; + // Messages that have been reassembled, and will be returned by + // `FlushMessages`. + std::vector reassembled_messages_; + + // If present, "deferred reset processing" mode is active. + absl::optional deferred_reset_streams_; + + // Contains the last request sequence number of the + // OutgoingSSNResetRequestParameter that was performed. + ReconfigRequestSN last_completed_reset_req_seq_nbr_ = ReconfigRequestSN(0); + + // The number of "payload bytes" that are in this queue, in total. + size_t queued_bytes_ = 0; + + // The actual implementation of ReassemblyStreams. + std::unique_ptr streams_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_RX_REASSEMBLY_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h new file mode 100644 index 000000000..a8b42b5a2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_streams.h @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_RX_REASSEMBLY_STREAMS_H_ +#define NET_DCSCTP_RX_REASSEMBLY_STREAMS_H_ + +#include +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/dcsctp_message.h" + +namespace dcsctp { + +// Implementations of this interface will be called when data is received, when +// data should be skipped/forgotten or when sequence number should be reset. +// +// As a result of these operations - mainly when data is received - the +// implementations of this interface should notify when a message has been +// assembled, by calling the provided callback of type `OnAssembledMessage`. How +// it assembles messages will depend on e.g. if a message was sent on an ordered +// or unordered stream. +// +// Implementations will - for each operation - indicate how much additional +// memory that has been used as a result of performing the operation. This is +// used to limit the maximum amount of memory used, to prevent out-of-memory +// situations. +class ReassemblyStreams { + public: + // This callback will be provided as an argument to the constructor of the + // concrete class implementing this interface and should be called when a + // message has been assembled as well as indicating from which TSNs this + // message was assembled from. + using OnAssembledMessage = + std::function tsns, + DcSctpMessage message)>; + + virtual ~ReassemblyStreams() = default; + + // Adds a data chunk to a stream as identified in `data`. + // If it was the last remaining chunk in a message, reassemble one (or + // several, in case of ordered chunks) messages. + // + // Returns the additional number of bytes added to the queue as a result of + // performing this operation. If this addition resulted in messages being + // assembled and delivered, this may be negative. + virtual int Add(UnwrappedTSN tsn, Data data) = 0; + + // Called for incoming FORWARD-TSN/I-FORWARD-TSN chunks - when the sender + // wishes the received to skip/forget about data up until the provided TSN. + // This is used to implement partial reliability, such as limiting the number + // of retransmissions or the an expiration duration. As a result of skipping + // data, this may result in the implementation being able to assemble messages + // in ordered streams. + // + // Returns the number of bytes removed from the queue as a result of + // this operation. + virtual size_t HandleForwardTsn( + UnwrappedTSN new_cumulative_ack_tsn, + rtc::ArrayView + skipped_streams) = 0; + + // Called for incoming (possibly deferred) RE_CONFIG chunks asking for + // either a few streams, or all streams (when the list is empty) to be + // reset - to have their next SSN or Message ID to be zero. + virtual void ResetStreams(rtc::ArrayView stream_ids) = 0; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_RX_REASSEMBLY_STREAMS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc new file mode 100644 index 000000000..7cec1150d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc @@ -0,0 +1,290 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/rx/traditional_reassembly_streams.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "rtc_base/logging.h" + +namespace dcsctp { +namespace { + +// Given a map (`chunks`) and an iterator to within that map (`iter`), this +// function will return an iterator to the first chunk in that message, which +// has the `is_beginning` flag set. If there are any gaps, or if the beginning +// can't be found, `absl::nullopt` is returned. +absl::optional::iterator> FindBeginning( + const std::map& chunks, + std::map::iterator iter) { + UnwrappedTSN prev_tsn = iter->first; + for (;;) { + if (iter->second.is_beginning) { + return iter; + } + if (iter == chunks.begin()) { + return absl::nullopt; + } + --iter; + if (iter->first.next_value() != prev_tsn) { + return absl::nullopt; + } + prev_tsn = iter->first; + } +} + +// Given a map (`chunks`) and an iterator to within that map (`iter`), this +// function will return an iterator to the chunk after the last chunk in that +// message, which has the `is_end` flag set. If there are any gaps, or if the +// end can't be found, `absl::nullopt` is returned. +absl::optional::iterator> FindEnd( + std::map& chunks, + std::map::iterator iter) { + UnwrappedTSN prev_tsn = iter->first; + for (;;) { + if (iter->second.is_end) { + return ++iter; + } + ++iter; + if (iter == chunks.end()) { + return absl::nullopt; + } + if (iter->first != prev_tsn.next_value()) { + return absl::nullopt; + } + prev_tsn = iter->first; + } +} +} // namespace + +int TraditionalReassemblyStreams::UnorderedStream::Add(UnwrappedTSN tsn, + Data data) { + int queued_bytes = data.size(); + auto p = chunks_.emplace(tsn, std::move(data)); + if (!p.second /* !inserted */) { + return 0; + } + + queued_bytes -= TryToAssembleMessage(p.first); + + return queued_bytes; +} + +size_t TraditionalReassemblyStreams::UnorderedStream::TryToAssembleMessage( + ChunkMap::iterator iter) { + // TODO(boivie): This method is O(N) with the number of fragments in a + // message, which can be inefficient for very large values of N. This could be + // optimized by e.g. only trying to assemble a message once _any_ beginning + // and _any_ end has been found. + absl::optional start = FindBeginning(chunks_, iter); + if (!start.has_value()) { + return 0; + } + absl::optional end = FindEnd(chunks_, iter); + if (!end.has_value()) { + return 0; + } + + size_t bytes_assembled = AssembleMessage(*start, *end); + chunks_.erase(*start, *end); + return bytes_assembled; +} + +size_t TraditionalReassemblyStreams::StreamBase::AssembleMessage( + const ChunkMap::iterator start, + const ChunkMap::iterator end) { + size_t count = std::distance(start, end); + + if (count == 1) { + // Fast path - zero-copy + const Data& data = start->second; + size_t payload_size = start->second.size(); + UnwrappedTSN tsns[1] = {start->first}; + DcSctpMessage message(data.stream_id, data.ppid, std::move(data.payload)); + parent_.on_assembled_message_(tsns, std::move(message)); + return payload_size; + } + + // Slow path - will need to concatenate the payload. + std::vector tsns; + std::vector payload; + + size_t payload_size = std::accumulate( + start, end, 0, + [](size_t v, const auto& p) { return v + p.second.size(); }); + + tsns.reserve(count); + payload.reserve(payload_size); + for (auto it = start; it != end; ++it) { + const Data& data = it->second; + tsns.push_back(it->first); + payload.insert(payload.end(), data.payload.begin(), data.payload.end()); + } + + DcSctpMessage message(start->second.stream_id, start->second.ppid, + std::move(payload)); + parent_.on_assembled_message_(tsns, std::move(message)); + + return payload_size; +} + +size_t TraditionalReassemblyStreams::UnorderedStream::EraseTo( + UnwrappedTSN tsn) { + auto end_iter = chunks_.upper_bound(tsn); + size_t removed_bytes = std::accumulate( + chunks_.begin(), end_iter, 0, + [](size_t r, const auto& p) { return r + p.second.size(); }); + + chunks_.erase(chunks_.begin(), end_iter); + return removed_bytes; +} + +size_t TraditionalReassemblyStreams::OrderedStream::TryToAssembleMessage() { + if (chunks_by_ssn_.empty() || chunks_by_ssn_.begin()->first != next_ssn_) { + return 0; + } + + ChunkMap& chunks = chunks_by_ssn_.begin()->second; + + if (!chunks.begin()->second.is_beginning || !chunks.rbegin()->second.is_end) { + return 0; + } + + uint32_t tsn_diff = + UnwrappedTSN::Difference(chunks.rbegin()->first, chunks.begin()->first); + if (tsn_diff != chunks.size() - 1) { + return 0; + } + + size_t assembled_bytes = AssembleMessage(chunks.begin(), chunks.end()); + chunks_by_ssn_.erase(chunks_by_ssn_.begin()); + next_ssn_.Increment(); + return assembled_bytes; +} + +size_t TraditionalReassemblyStreams::OrderedStream::TryToAssembleMessages() { + size_t assembled_bytes = 0; + + for (;;) { + size_t assembled_bytes_this_iter = TryToAssembleMessage(); + if (assembled_bytes_this_iter == 0) { + break; + } + assembled_bytes += assembled_bytes_this_iter; + } + return assembled_bytes; +} + +int TraditionalReassemblyStreams::OrderedStream::Add(UnwrappedTSN tsn, + Data data) { + int queued_bytes = data.size(); + + UnwrappedSSN ssn = ssn_unwrapper_.Unwrap(data.ssn); + auto p = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); + if (!p.second /* !inserted */) { + return 0; + } + + if (ssn == next_ssn_) { + queued_bytes -= TryToAssembleMessages(); + } + + return queued_bytes; +} + +size_t TraditionalReassemblyStreams::OrderedStream::EraseTo(SSN ssn) { + UnwrappedSSN unwrapped_ssn = ssn_unwrapper_.Unwrap(ssn); + + auto end_iter = chunks_by_ssn_.upper_bound(unwrapped_ssn); + size_t removed_bytes = std::accumulate( + chunks_by_ssn_.begin(), end_iter, 0, [](size_t r1, const auto& p) { + return r1 + + absl::c_accumulate(p.second, 0, [](size_t r2, const auto& q) { + return r2 + q.second.size(); + }); + }); + chunks_by_ssn_.erase(chunks_by_ssn_.begin(), end_iter); + + if (unwrapped_ssn >= next_ssn_) { + unwrapped_ssn.Increment(); + next_ssn_ = unwrapped_ssn; + } + + removed_bytes += TryToAssembleMessages(); + return removed_bytes; +} + +int TraditionalReassemblyStreams::Add(UnwrappedTSN tsn, Data data) { + if (data.is_unordered) { + auto it = unordered_streams_.emplace(data.stream_id, this).first; + return it->second.Add(tsn, std::move(data)); + } + + auto it = ordered_streams_.emplace(data.stream_id, this).first; + return it->second.Add(tsn, std::move(data)); +} + +size_t TraditionalReassemblyStreams::HandleForwardTsn( + UnwrappedTSN new_cumulative_ack_tsn, + rtc::ArrayView skipped_streams) { + size_t bytes_removed = 0; + // The `skipped_streams` only over ordered messages - need to + // iterate all unordered streams manually to remove those chunks. + for (auto& entry : unordered_streams_) { + bytes_removed += entry.second.EraseTo(new_cumulative_ack_tsn); + } + + for (const auto& skipped_stream : skipped_streams) { + auto it = ordered_streams_.find(skipped_stream.stream_id); + if (it != ordered_streams_.end()) { + bytes_removed += it->second.EraseTo(skipped_stream.ssn); + } + } + + return bytes_removed; +} + +void TraditionalReassemblyStreams::ResetStreams( + rtc::ArrayView stream_ids) { + if (stream_ids.empty()) { + for (auto& entry : ordered_streams_) { + const StreamID& stream_id = entry.first; + OrderedStream& stream = entry.second; + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Resetting implicit stream_id=" << *stream_id; + stream.Reset(); + } + } else { + for (StreamID stream_id : stream_ids) { + auto it = ordered_streams_.find(stream_id); + if (it != ordered_streams_.end()) { + RTC_DLOG(LS_VERBOSE) + << log_prefix_ << "Resetting explicit stream_id=" << *stream_id; + it->second.Reset(); + } + } + } +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h new file mode 100644 index 000000000..12d1d933a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_RX_TRADITIONAL_REASSEMBLY_STREAMS_H_ +#define NET_DCSCTP_RX_TRADITIONAL_REASSEMBLY_STREAMS_H_ +#include +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/rx/reassembly_streams.h" + +namespace dcsctp { + +// Handles reassembly of incoming data when interleaved message sending +// is not enabled on the association, i.e. when RFC8260 is not in use and +// RFC4960 is to be followed. +class TraditionalReassemblyStreams : public ReassemblyStreams { + public: + TraditionalReassemblyStreams(absl::string_view log_prefix, + OnAssembledMessage on_assembled_message) + : log_prefix_(log_prefix), on_assembled_message_(on_assembled_message) {} + + int Add(UnwrappedTSN tsn, Data data) override; + + size_t HandleForwardTsn( + UnwrappedTSN new_cumulative_ack_tsn, + rtc::ArrayView skipped_streams) + override; + + void ResetStreams(rtc::ArrayView stream_ids) override; + + private: + using ChunkMap = std::map; + + // Base class for `UnorderedStream` and `OrderedStream`. + class StreamBase { + protected: + explicit StreamBase(TraditionalReassemblyStreams* parent) + : parent_(*parent) {} + + size_t AssembleMessage(const ChunkMap::iterator start, + const ChunkMap::iterator end); + TraditionalReassemblyStreams& parent_; + }; + + // Manages all received data for a specific unordered stream, and assembles + // messages when possible. + class UnorderedStream : StreamBase { + public: + explicit UnorderedStream(TraditionalReassemblyStreams* parent) + : StreamBase(parent) {} + int Add(UnwrappedTSN tsn, Data data); + // Returns the number of bytes removed from the queue. + size_t EraseTo(UnwrappedTSN tsn); + + private: + // Given an iterator to any chunk within the map, try to assemble a message + // into `reassembled_messages` containing it and - if successful - erase + // those chunks from the stream chunks map. + // + // Returns the number of bytes that were assembled. + size_t TryToAssembleMessage(ChunkMap::iterator iter); + + ChunkMap chunks_; + }; + + // Manages all received data for a specific ordered stream, and assembles + // messages when possible. + class OrderedStream : StreamBase { + public: + explicit OrderedStream(TraditionalReassemblyStreams* parent) + : StreamBase(parent), next_ssn_(ssn_unwrapper_.Unwrap(SSN(0))) {} + int Add(UnwrappedTSN tsn, Data data); + size_t EraseTo(SSN ssn); + void Reset() { + ssn_unwrapper_.Reset(); + next_ssn_ = ssn_unwrapper_.Unwrap(SSN(0)); + } + + private: + // Try to assemble one or several messages in order from the stream. + // Returns the number of bytes assembled if a message was assembled. + size_t TryToAssembleMessage(); + size_t TryToAssembleMessages(); + // This must be an ordered container to be able to iterate in SSN order. + std::map chunks_by_ssn_; + UnwrappedSSN::Unwrapper ssn_unwrapper_; + UnwrappedSSN next_ssn_; + }; + + const std::string log_prefix_; + + // Callback for when a message has been assembled. + const OnAssembledMessage on_assembled_message_; + + // All unordered and ordered streams, managing not-yet-assembled data. + std::unordered_map + unordered_streams_; + std::unordered_map + ordered_streams_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_RX_TRADITIONAL_REASSEMBLY_STREAMS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h new file mode 100644 index 000000000..79f3f36d1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_CALLBACK_DEFERRER_H_ +#define NET_DCSCTP_SOCKET_CALLBACK_DEFERRER_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/ref_counted_base.h" +#include "api/scoped_refptr.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "rtc_base/ref_counted_object.h" + +namespace dcsctp { + +// Defers callbacks until they can be safely triggered. +// +// There are a lot of callbacks from the dcSCTP library to the client, +// such as when messages are received or streams are closed. When the client +// receives these callbacks, the client is expected to be able to call into the +// library - from within the callback. For example, sending a reply message when +// a certain SCTP message has been received, or to reconnect when the connection +// was closed for any reason. This means that the dcSCTP library must always be +// in a consistent and stable state when these callbacks are delivered, and to +// ensure that's the case, callbacks are not immediately delivered from where +// they originate, but instead queued (deferred) by this class. At the end of +// any public API method that may result in callbacks, they are triggered and +// then delivered. +// +// There are a number of exceptions, which is clearly annotated in the API. +class CallbackDeferrer : public DcSctpSocketCallbacks { + public: + explicit CallbackDeferrer(DcSctpSocketCallbacks& underlying) + : underlying_(underlying) {} + + void TriggerDeferred() { + // Need to swap here. The client may call into the library from within a + // callback, and that might result in adding new callbacks to this instance, + // and the vector can't be modified while iterated on. + std::vector> deferred; + deferred.swap(deferred_); + + for (auto& cb : deferred) { + cb(underlying_); + } + } + + void SendPacket(rtc::ArrayView data) override { + // Will not be deferred - call directly. + underlying_.SendPacket(data); + } + + std::unique_ptr CreateTimeout() override { + // Will not be deferred - call directly. + return underlying_.CreateTimeout(); + } + + TimeMs TimeMillis() override { + // Will not be deferred - call directly. + return underlying_.TimeMillis(); + } + + uint32_t GetRandomInt(uint32_t low, uint32_t high) override { + // Will not be deferred - call directly. + return underlying_.GetRandomInt(low, high); + } + + void NotifyOutgoingMessageBufferEmpty() override { + // Will not be deferred - call directly. + underlying_.NotifyOutgoingMessageBufferEmpty(); + } + + void OnMessageReceived(DcSctpMessage message) override { + deferred_.emplace_back( + [deliverer = MessageDeliverer(std::move(message))]( + DcSctpSocketCallbacks& cb) mutable { deliverer.Deliver(cb); }); + } + + void OnError(ErrorKind error, absl::string_view message) override { + deferred_.emplace_back( + [error, message = std::string(message)](DcSctpSocketCallbacks& cb) { + cb.OnError(error, message); + }); + } + + void OnAborted(ErrorKind error, absl::string_view message) override { + deferred_.emplace_back( + [error, message = std::string(message)](DcSctpSocketCallbacks& cb) { + cb.OnAborted(error, message); + }); + } + + void OnConnected() override { + deferred_.emplace_back([](DcSctpSocketCallbacks& cb) { cb.OnConnected(); }); + } + + void OnClosed() override { + deferred_.emplace_back([](DcSctpSocketCallbacks& cb) { cb.OnClosed(); }); + } + + void OnConnectionRestarted() override { + deferred_.emplace_back( + [](DcSctpSocketCallbacks& cb) { cb.OnConnectionRestarted(); }); + } + + void OnStreamsResetFailed(rtc::ArrayView outgoing_streams, + absl::string_view reason) override { + deferred_.emplace_back( + [streams = std::vector(outgoing_streams.begin(), + outgoing_streams.end()), + reason = std::string(reason)](DcSctpSocketCallbacks& cb) { + cb.OnStreamsResetFailed(streams, reason); + }); + } + + void OnStreamsResetPerformed( + rtc::ArrayView outgoing_streams) override { + deferred_.emplace_back( + [streams = std::vector(outgoing_streams.begin(), + outgoing_streams.end())]( + DcSctpSocketCallbacks& cb) { + cb.OnStreamsResetPerformed(streams); + }); + } + + void OnIncomingStreamsReset( + rtc::ArrayView incoming_streams) override { + deferred_.emplace_back( + [streams = std::vector(incoming_streams.begin(), + incoming_streams.end())]( + DcSctpSocketCallbacks& cb) { cb.OnIncomingStreamsReset(streams); }); + } + + private: + // A wrapper around the move-only DcSctpMessage, to let it be captured in a + // lambda. + class MessageDeliverer { + public: + explicit MessageDeliverer(DcSctpMessage&& message) + : state_(rtc::make_ref_counted(std::move(message))) {} + + void Deliver(DcSctpSocketCallbacks& c) { + // Really ensure that it's only called once. + RTC_DCHECK(!state_->has_delivered); + state_->has_delivered = true; + c.OnMessageReceived(std::move(state_->message)); + } + + private: + struct State : public rtc::RefCountInterface { + explicit State(DcSctpMessage&& m) + : has_delivered(false), message(std::move(m)) {} + bool has_delivered; + DcSctpMessage message; + }; + rtc::scoped_refptr state_; + }; + + DcSctpSocketCallbacks& underlying_; + std::vector> deferred_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_CALLBACK_DEFERRER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h new file mode 100644 index 000000000..c6d3692b2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/capabilities.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_CAPABILITIES_H_ +#define NET_DCSCTP_SOCKET_CAPABILITIES_H_ + +namespace dcsctp { +// Indicates what the association supports, meaning that both parties +// support it and that feature can be used. +struct Capabilities { + // RFC3758 Partial Reliability Extension + bool partial_reliability = false; + // RFC8260 Stream Schedulers and User Message Interleaving + bool message_interleaving = false; + // RFC6525 Stream Reconfiguration + bool reconfig = false; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_CAPABILITIES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/context.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/context.h new file mode 100644 index 000000000..eca5b9e4f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/context.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_CONTEXT_H_ +#define NET_DCSCTP_SOCKET_CONTEXT_H_ + +#include + +#include "absl/strings/string_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +// A set of helper methods used by handlers to e.g. send packets. +// +// Implemented by the TransmissionControlBlock. +class Context { + public: + virtual ~Context() = default; + + // Indicates if a connection has been established. + virtual bool is_connection_established() const = 0; + + // Returns this side's initial TSN value. + virtual TSN my_initial_tsn() const = 0; + + // Returns the peer's initial TSN value. + virtual TSN peer_initial_tsn() const = 0; + + // Returns the socket callbacks. + virtual DcSctpSocketCallbacks& callbacks() const = 0; + + // Observes a measured RTT value, in milliseconds. + virtual void ObserveRTT(DurationMs rtt_ms) = 0; + + // Returns the current Retransmission Timeout (rto) value, in milliseconds. + virtual DurationMs current_rto() const = 0; + + // Increments the transmission error counter, given a human readable reason. + virtual bool IncrementTxErrorCounter(absl::string_view reason) = 0; + + // Clears the transmission error counter. + virtual void ClearTxErrorCounter() = 0; + + // Returns true if there have been too many retransmission errors. + virtual bool HasTooManyTxErrors() const = 0; + + // Returns a PacketBuilder, filled in with the correct verification tag. + virtual SctpPacket::Builder PacketBuilder() const = 0; + + // Builds the packet from `builder` and sends it. + virtual void Send(SctpPacket::Builder& builder) = 0; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_CONTEXT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc new file mode 100644 index 000000000..174288eeb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc @@ -0,0 +1,1542 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/socket/dcsctp_socket.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/abort_chunk.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" +#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" +#include "net/dcsctp/packet/chunk/data_chunk.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "net/dcsctp/packet/chunk/error_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" +#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" +#include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/init_ack_chunk.h" +#include "net/dcsctp/packet/chunk/init_chunk.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_ack_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_complete_chunk.h" +#include "net/dcsctp/packet/chunk_validators.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h" +#include "net/dcsctp/packet/error_cause/error_cause.h" +#include "net/dcsctp/packet/error_cause/no_user_data_cause.h" +#include "net/dcsctp/packet/error_cause/out_of_resource_error_cause.h" +#include "net/dcsctp/packet/error_cause/protocol_violation_cause.h" +#include "net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h" +#include "net/dcsctp/packet/error_cause/user_initiated_abort_cause.h" +#include "net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/parameter/state_cookie_parameter.h" +#include "net/dcsctp/packet/parameter/supported_extensions_parameter.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/packet_observer.h" +#include "net/dcsctp/rx/data_tracker.h" +#include "net/dcsctp/rx/reassembly_queue.h" +#include "net/dcsctp/socket/callback_deferrer.h" +#include "net/dcsctp/socket/capabilities.h" +#include "net/dcsctp/socket/heartbeat_handler.h" +#include "net/dcsctp/socket/state_cookie.h" +#include "net/dcsctp/socket/stream_reset_handler.h" +#include "net/dcsctp/socket/transmission_control_block.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/retransmission_queue.h" +#include "net/dcsctp/tx/send_queue.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/strings/string_format.h" + +namespace dcsctp { +namespace { + +// https://tools.ietf.org/html/rfc4960#section-5.1 +constexpr uint32_t kMinVerificationTag = 1; +constexpr uint32_t kMaxVerificationTag = std::numeric_limits::max(); + +// https://tools.ietf.org/html/rfc4960#section-3.3.2 +constexpr uint32_t kMinInitialTsn = 0; +constexpr uint32_t kMaxInitialTsn = std::numeric_limits::max(); + +Capabilities GetCapabilities(const DcSctpOptions& options, + const Parameters& parameters) { + Capabilities capabilities; + absl::optional supported_extensions = + parameters.get(); + + if (options.enable_partial_reliability) { + capabilities.partial_reliability = + parameters.get().has_value(); + if (supported_extensions.has_value()) { + capabilities.partial_reliability |= + supported_extensions->supports(ForwardTsnChunk::kType); + } + } + + if (options.enable_message_interleaving && supported_extensions.has_value()) { + capabilities.message_interleaving = + supported_extensions->supports(IDataChunk::kType) && + supported_extensions->supports(IForwardTsnChunk::kType); + } + if (supported_extensions.has_value() && + supported_extensions->supports(ReConfigChunk::kType)) { + capabilities.reconfig = true; + } + return capabilities; +} + +void AddCapabilityParameters(const DcSctpOptions& options, + Parameters::Builder& builder) { + std::vector chunk_types = {ReConfigChunk::kType}; + + if (options.enable_partial_reliability) { + builder.Add(ForwardTsnSupportedParameter()); + chunk_types.push_back(ForwardTsnChunk::kType); + } + if (options.enable_message_interleaving) { + chunk_types.push_back(IDataChunk::kType); + chunk_types.push_back(IForwardTsnChunk::kType); + } + builder.Add(SupportedExtensionsParameter(std::move(chunk_types))); +} + +TieTag MakeTieTag(DcSctpSocketCallbacks& cb) { + uint32_t tie_tag_upper = + cb.GetRandomInt(0, std::numeric_limits::max()); + uint32_t tie_tag_lower = + cb.GetRandomInt(1, std::numeric_limits::max()); + return TieTag(static_cast(tie_tag_upper) << 32 | + static_cast(tie_tag_lower)); +} + +} // namespace + +DcSctpSocket::DcSctpSocket(absl::string_view log_prefix, + DcSctpSocketCallbacks& callbacks, + std::unique_ptr packet_observer, + const DcSctpOptions& options) + : log_prefix_(std::string(log_prefix) + ": "), + packet_observer_(std::move(packet_observer)), + options_(options), + callbacks_(callbacks), + timer_manager_([this]() { return callbacks_.CreateTimeout(); }), + t1_init_(timer_manager_.CreateTimer( + "t1-init", + [this]() { return OnInitTimerExpiry(); }, + TimerOptions(options.t1_init_timeout, + TimerBackoffAlgorithm::kExponential, + options.max_init_retransmits))), + t1_cookie_(timer_manager_.CreateTimer( + "t1-cookie", + [this]() { return OnCookieTimerExpiry(); }, + TimerOptions(options.t1_cookie_timeout, + TimerBackoffAlgorithm::kExponential, + options.max_init_retransmits))), + t2_shutdown_(timer_manager_.CreateTimer( + "t2-shutdown", + [this]() { return OnShutdownTimerExpiry(); }, + TimerOptions(options.t2_shutdown_timeout, + TimerBackoffAlgorithm::kExponential, + options.max_retransmissions))), + send_queue_(log_prefix_, options_.max_send_buffer_size) {} + +std::string DcSctpSocket::log_prefix() const { + return log_prefix_ + "[" + std::string(ToString(state_)) + "] "; +} + +bool DcSctpSocket::IsConsistent() const { + switch (state_) { + case State::kClosed: + return (tcb_ == nullptr && !t1_init_->is_running() && + !t1_cookie_->is_running() && !t2_shutdown_->is_running()); + case State::kCookieWait: + return (tcb_ == nullptr && t1_init_->is_running() && + !t1_cookie_->is_running() && !t2_shutdown_->is_running()); + case State::kCookieEchoed: + return (tcb_ != nullptr && !t1_init_->is_running() && + t1_cookie_->is_running() && !t2_shutdown_->is_running() && + cookie_echo_chunk_.has_value()); + case State::kEstablished: + return (tcb_ != nullptr && !t1_init_->is_running() && + !t1_cookie_->is_running() && !t2_shutdown_->is_running()); + case State::kShutdownPending: + return (tcb_ != nullptr && !t1_init_->is_running() && + !t1_cookie_->is_running() && !t2_shutdown_->is_running()); + case State::kShutdownSent: + return (tcb_ != nullptr && !t1_init_->is_running() && + !t1_cookie_->is_running() && t2_shutdown_->is_running()); + case State::kShutdownReceived: + return (tcb_ != nullptr && !t1_init_->is_running() && + !t1_cookie_->is_running() && !t2_shutdown_->is_running()); + case State::kShutdownAckSent: + return (tcb_ != nullptr && !t1_init_->is_running() && + !t1_cookie_->is_running() && t2_shutdown_->is_running()); + } +} + +constexpr absl::string_view DcSctpSocket::ToString(DcSctpSocket::State state) { + switch (state) { + case DcSctpSocket::State::kClosed: + return "CLOSED"; + case DcSctpSocket::State::kCookieWait: + return "COOKIE_WAIT"; + case DcSctpSocket::State::kCookieEchoed: + return "COOKIE_ECHOED"; + case DcSctpSocket::State::kEstablished: + return "ESTABLISHED"; + case DcSctpSocket::State::kShutdownPending: + return "SHUTDOWN_PENDING"; + case DcSctpSocket::State::kShutdownSent: + return "SHUTDOWN_SENT"; + case DcSctpSocket::State::kShutdownReceived: + return "SHUTDOWN_RECEIVED"; + case DcSctpSocket::State::kShutdownAckSent: + return "SHUTDOWN_ACK_SENT"; + } +} + +void DcSctpSocket::SetState(State state, absl::string_view reason) { + if (state_ != state) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Socket state changed from " + << ToString(state_) << " to " << ToString(state) + << " due to " << reason; + state_ = state; + } +} + +void DcSctpSocket::SendInit() { + Parameters::Builder params_builder; + AddCapabilityParameters(options_, params_builder); + InitChunk init(/*initiate_tag=*/connect_params_.verification_tag, + /*a_rwnd=*/options_.max_receiver_window_buffer_size, + options_.announced_maximum_outgoing_streams, + options_.announced_maximum_incoming_streams, + connect_params_.initial_tsn, params_builder.Build()); + SctpPacket::Builder b(VerificationTag(0), options_); + b.Add(init); + SendPacket(b); +} + +void DcSctpSocket::MakeConnectionParameters() { + VerificationTag new_verification_tag( + callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag)); + TSN initial_tsn(callbacks_.GetRandomInt(kMinInitialTsn, kMaxInitialTsn)); + connect_params_.initial_tsn = initial_tsn; + connect_params_.verification_tag = new_verification_tag; +} + +void DcSctpSocket::Connect() { + if (state_ == State::kClosed) { + MakeConnectionParameters(); + RTC_DLOG(LS_INFO) + << log_prefix() + << rtc::StringFormat( + "Connecting. my_verification_tag=%08x, my_initial_tsn=%u", + *connect_params_.verification_tag, *connect_params_.initial_tsn); + SendInit(); + t1_init_->Start(); + SetState(State::kCookieWait, "Connect called"); + } else { + RTC_DLOG(LS_WARNING) << log_prefix() + << "Called Connect on a socket that is not closed"; + } + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); +} + +void DcSctpSocket::Shutdown() { + if (tcb_ != nullptr) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "Upon receipt of the SHUTDOWN primitive from its upper layer, the + // endpoint enters the SHUTDOWN-PENDING state and remains there until all + // outstanding data has been acknowledged by its peer." + + // TODO(webrtc:12739): Remove this check, as it just hides the problem that + // the socket can transition from ShutdownSent to ShutdownPending, or + // ShutdownAckSent to ShutdownPending which is illegal. + if (state_ != State::kShutdownSent && state_ != State::kShutdownAckSent) { + SetState(State::kShutdownPending, "Shutdown called"); + t1_init_->Stop(); + t1_cookie_->Stop(); + MaybeSendShutdownOrAck(); + } + } else { + // Connection closed before even starting to connect, or during the initial + // connection phase. There is no outstanding data, so the socket can just + // be closed (stopping any connection timers, if any), as this is the + // client's intention, by calling Shutdown. + InternalClose(ErrorKind::kNoError, ""); + } + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); +} + +void DcSctpSocket::Close() { + if (state_ != State::kClosed) { + if (tcb_ != nullptr) { + SctpPacket::Builder b = tcb_->PacketBuilder(); + b.Add(AbortChunk(/*filled_in_verification_tag=*/true, + Parameters::Builder() + .Add(UserInitiatedAbortCause("Close called")) + .Build())); + SendPacket(b); + } + InternalClose(ErrorKind::kNoError, ""); + } else { + RTC_DLOG(LS_INFO) << log_prefix() << "Called Close on a closed socket"; + } + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); +} + +void DcSctpSocket::CloseConnectionBecauseOfTooManyTransmissionErrors() { + SendPacket(tcb_->PacketBuilder().Add(AbortChunk( + true, Parameters::Builder() + .Add(UserInitiatedAbortCause("Too many retransmissions")) + .Build()))); + InternalClose(ErrorKind::kTooManyRetries, "Too many retransmissions"); +} + +void DcSctpSocket::InternalClose(ErrorKind error, absl::string_view message) { + if (state_ != State::kClosed) { + t1_init_->Stop(); + t1_cookie_->Stop(); + t2_shutdown_->Stop(); + tcb_ = nullptr; + cookie_echo_chunk_ = absl::nullopt; + + if (error == ErrorKind::kNoError) { + callbacks_.OnClosed(); + } else { + callbacks_.OnAborted(error, message); + } + SetState(State::kClosed, message); + } + // This method's purpose is to abort/close and make it consistent by ensuring + // that e.g. all timers really are stopped. + RTC_DCHECK(IsConsistent()); +} + +SendStatus DcSctpSocket::Send(DcSctpMessage message, + const SendOptions& send_options) { + if (message.payload().empty()) { + callbacks_.OnError(ErrorKind::kProtocolViolation, + "Unable to send empty message"); + return SendStatus::kErrorMessageEmpty; + } + if (message.payload().size() > options_.max_message_size) { + callbacks_.OnError(ErrorKind::kProtocolViolation, + "Unable to send too large message"); + return SendStatus::kErrorMessageTooLarge; + } + if (state_ == State::kShutdownPending || state_ == State::kShutdownSent || + state_ == State::kShutdownReceived || state_ == State::kShutdownAckSent) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "An endpoint should reject any new data request from its upper layer + // if it is in the SHUTDOWN-PENDING, SHUTDOWN-SENT, SHUTDOWN-RECEIVED, or + // SHUTDOWN-ACK-SENT state." + callbacks_.OnError(ErrorKind::kWrongSequence, + "Unable to send message as the socket is shutting down"); + return SendStatus::kErrorShuttingDown; + } + if (send_queue_.IsFull()) { + callbacks_.OnError(ErrorKind::kResourceExhaustion, + "Unable to send message as the send queue is full"); + return SendStatus::kErrorResourceExhaustion; + } + + TimeMs now = callbacks_.TimeMillis(); + send_queue_.Add(now, std::move(message), send_options); + if (tcb_ != nullptr) { + tcb_->SendBufferedPackets(now); + } + + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); + return SendStatus::kSuccess; +} + +ResetStreamsStatus DcSctpSocket::ResetStreams( + rtc::ArrayView outgoing_streams) { + if (tcb_ == nullptr) { + callbacks_.OnError(ErrorKind::kWrongSequence, + "Can't reset streams as the socket is not connected"); + return ResetStreamsStatus::kNotConnected; + } + if (!tcb_->capabilities().reconfig) { + callbacks_.OnError(ErrorKind::kUnsupportedOperation, + "Can't reset streams as the peer doesn't support it"); + return ResetStreamsStatus::kNotSupported; + } + + tcb_->stream_reset_handler().ResetStreams(outgoing_streams); + absl::optional reconfig = + tcb_->stream_reset_handler().MakeStreamResetRequest(); + if (reconfig.has_value()) { + SctpPacket::Builder builder = tcb_->PacketBuilder(); + builder.Add(*reconfig); + SendPacket(builder); + } + + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); + return ResetStreamsStatus::kPerformed; +} + +SocketState DcSctpSocket::state() const { + switch (state_) { + case State::kClosed: + return SocketState::kClosed; + case State::kCookieWait: + ABSL_FALLTHROUGH_INTENDED; + case State::kCookieEchoed: + return SocketState::kConnecting; + case State::kEstablished: + return SocketState::kConnected; + case State::kShutdownPending: + ABSL_FALLTHROUGH_INTENDED; + case State::kShutdownSent: + ABSL_FALLTHROUGH_INTENDED; + case State::kShutdownReceived: + ABSL_FALLTHROUGH_INTENDED; + case State::kShutdownAckSent: + return SocketState::kShuttingDown; + } +} + +void DcSctpSocket::SetMaxMessageSize(size_t max_message_size) { + options_.max_message_size = max_message_size; +} + +void DcSctpSocket::MaybeSendShutdownOnPacketReceived(const SctpPacket& packet) { + if (state_ == State::kShutdownSent) { + bool has_data_chunk = + std::find_if(packet.descriptors().begin(), packet.descriptors().end(), + [](const SctpPacket::ChunkDescriptor& descriptor) { + return descriptor.type == DataChunk::kType; + }) != packet.descriptors().end(); + if (has_data_chunk) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "While in the SHUTDOWN-SENT state, the SHUTDOWN sender MUST immediately + // respond to each received packet containing one or more DATA chunks with + // a SHUTDOWN chunk and restart the T2-shutdown timer."" + SendShutdown(); + t2_shutdown_->set_duration(tcb_->current_rto()); + t2_shutdown_->Start(); + } + } +} + +bool DcSctpSocket::ValidatePacket(const SctpPacket& packet) { + const CommonHeader& header = packet.common_header(); + VerificationTag my_verification_tag = + tcb_ != nullptr ? tcb_->my_verification_tag() : VerificationTag(0); + + if (header.verification_tag == VerificationTag(0)) { + if (packet.descriptors().size() == 1 && + packet.descriptors()[0].type == InitChunk::kType) { + // https://tools.ietf.org/html/rfc4960#section-8.5.1 + // "When an endpoint receives an SCTP packet with the Verification Tag + // set to 0, it should verify that the packet contains only an INIT chunk. + // Otherwise, the receiver MUST silently discard the packet."" + return true; + } + callbacks_.OnError( + ErrorKind::kParseFailed, + "Only a single INIT chunk can be present in packets sent on " + "verification_tag = 0"); + return false; + } + + if (packet.descriptors().size() == 1 && + packet.descriptors()[0].type == AbortChunk::kType) { + // https://tools.ietf.org/html/rfc4960#section-8.5.1 + // "The receiver of an ABORT MUST accept the packet if the Verification + // Tag field of the packet matches its own tag and the T bit is not set OR + // if it is set to its peer's tag and the T bit is set in the Chunk Flags. + // Otherwise, the receiver MUST silently discard the packet and take no + // further action." + bool t_bit = (packet.descriptors()[0].flags & 0x01) != 0; + if (t_bit && tcb_ == nullptr) { + // Can't verify the tag - assume it's okey. + return true; + } + if ((!t_bit && header.verification_tag == my_verification_tag) || + (t_bit && header.verification_tag == tcb_->peer_verification_tag())) { + return true; + } + callbacks_.OnError(ErrorKind::kParseFailed, + "ABORT chunk verification tag was wrong"); + return false; + } + + if (packet.descriptors()[0].type == InitAckChunk::kType) { + if (header.verification_tag == connect_params_.verification_tag) { + return true; + } + callbacks_.OnError( + ErrorKind::kParseFailed, + rtc::StringFormat( + "Packet has invalid verification tag: %08x, expected %08x", + *header.verification_tag, *connect_params_.verification_tag)); + return false; + } + + if (packet.descriptors()[0].type == CookieEchoChunk::kType) { + // Handled in chunk handler (due to RFC 4960, section 5.2.4). + return true; + } + + if (packet.descriptors().size() == 1 && + packet.descriptors()[0].type == ShutdownCompleteChunk::kType) { + // https://tools.ietf.org/html/rfc4960#section-8.5.1 + // "The receiver of a SHUTDOWN COMPLETE shall accept the packet if the + // Verification Tag field of the packet matches its own tag and the T bit is + // not set OR if it is set to its peer's tag and the T bit is set in the + // Chunk Flags. Otherwise, the receiver MUST silently discard the packet + // and take no further action." + bool t_bit = (packet.descriptors()[0].flags & 0x01) != 0; + if (t_bit && tcb_ == nullptr) { + // Can't verify the tag - assume it's okey. + return true; + } + if ((!t_bit && header.verification_tag == my_verification_tag) || + (t_bit && header.verification_tag == tcb_->peer_verification_tag())) { + return true; + } + callbacks_.OnError(ErrorKind::kParseFailed, + "SHUTDOWN_COMPLETE chunk verification tag was wrong"); + return false; + } + + // https://tools.ietf.org/html/rfc4960#section-8.5 + // "When receiving an SCTP packet, the endpoint MUST ensure that the value + // in the Verification Tag field of the received SCTP packet matches its own + // tag. If the received Verification Tag value does not match the receiver's + // own tag value, the receiver shall silently discard the packet and shall not + // process it any further..." + if (header.verification_tag == my_verification_tag) { + return true; + } + + callbacks_.OnError( + ErrorKind::kParseFailed, + rtc::StringFormat( + "Packet has invalid verification tag: %08x, expected %08x", + *header.verification_tag, *my_verification_tag)); + return false; +} + +void DcSctpSocket::HandleTimeout(TimeoutID timeout_id) { + timer_manager_.HandleTimeout(timeout_id); + + if (tcb_ != nullptr && tcb_->HasTooManyTxErrors()) { + // Tearing down the TCB has to be done outside the handlers. + CloseConnectionBecauseOfTooManyTransmissionErrors(); + } + + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); +} + +void DcSctpSocket::ReceivePacket(rtc::ArrayView data) { + if (packet_observer_ != nullptr) { + packet_observer_->OnReceivedPacket(callbacks_.TimeMillis(), data); + } + + absl::optional packet = + SctpPacket::Parse(data, options_.disable_checksum_verification); + if (!packet.has_value()) { + // https://tools.ietf.org/html/rfc4960#section-6.8 + // "The default procedure for handling invalid SCTP packets is to + // silently discard them." + callbacks_.OnError(ErrorKind::kParseFailed, + "Failed to parse received SCTP packet"); + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); + return; + } + + if (RTC_DLOG_IS_ON) { + for (const auto& descriptor : packet->descriptors()) { + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received " + << DebugConvertChunkToString(descriptor.data); + } + } + + if (!ValidatePacket(*packet)) { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Packet failed verification tag check - dropping"; + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); + return; + } + + MaybeSendShutdownOnPacketReceived(*packet); + + for (const auto& descriptor : packet->descriptors()) { + if (!Dispatch(packet->common_header(), descriptor)) { + break; + } + } + + if (tcb_ != nullptr) { + tcb_->data_tracker().ObservePacketEnd(); + tcb_->MaybeSendSack(); + } + + RTC_DCHECK(IsConsistent()); + callbacks_.TriggerDeferred(); +} + +void DcSctpSocket::DebugPrintOutgoing(rtc::ArrayView payload) { + auto packet = SctpPacket::Parse(payload); + RTC_DCHECK(packet.has_value()); + + for (const auto& desc : packet->descriptors()) { + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Sent " + << DebugConvertChunkToString(desc.data); + } +} + +bool DcSctpSocket::Dispatch(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + switch (descriptor.type) { + case DataChunk::kType: + HandleData(header, descriptor); + break; + case InitChunk::kType: + HandleInit(header, descriptor); + break; + case InitAckChunk::kType: + HandleInitAck(header, descriptor); + break; + case SackChunk::kType: + HandleSack(header, descriptor); + break; + case HeartbeatRequestChunk::kType: + HandleHeartbeatRequest(header, descriptor); + break; + case HeartbeatAckChunk::kType: + HandleHeartbeatAck(header, descriptor); + break; + case AbortChunk::kType: + HandleAbort(header, descriptor); + break; + case ErrorChunk::kType: + HandleError(header, descriptor); + break; + case CookieEchoChunk::kType: + HandleCookieEcho(header, descriptor); + break; + case CookieAckChunk::kType: + HandleCookieAck(header, descriptor); + break; + case ShutdownChunk::kType: + HandleShutdown(header, descriptor); + break; + case ShutdownAckChunk::kType: + HandleShutdownAck(header, descriptor); + break; + case ShutdownCompleteChunk::kType: + HandleShutdownComplete(header, descriptor); + break; + case ReConfigChunk::kType: + HandleReconfig(header, descriptor); + break; + case ForwardTsnChunk::kType: + HandleForwardTsn(header, descriptor); + break; + case IDataChunk::kType: + HandleIData(header, descriptor); + break; + case IForwardTsnChunk::kType: + HandleForwardTsn(header, descriptor); + break; + default: + return HandleUnrecognizedChunk(descriptor); + } + return true; +} + +bool DcSctpSocket::HandleUnrecognizedChunk( + const SctpPacket::ChunkDescriptor& descriptor) { + bool report_as_error = (descriptor.type & 0x40) != 0; + bool continue_processing = (descriptor.type & 0x80) != 0; + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received unknown chunk: " + << static_cast(descriptor.type); + if (report_as_error) { + rtc::StringBuilder sb; + sb << "Received unknown chunk of type: " + << static_cast(descriptor.type) << " with report-error bit set"; + callbacks_.OnError(ErrorKind::kParseFailed, sb.str()); + RTC_DLOG(LS_VERBOSE) + << log_prefix() + << "Unknown chunk, with type indicating it should be reported."; + + // https://tools.ietf.org/html/rfc4960#section-3.2 + // "... report in an ERROR chunk using the 'Unrecognized Chunk Type' + // cause." + if (tcb_ != nullptr) { + // Need TCB - this chunk must be sent with a correct verification tag. + SendPacket(tcb_->PacketBuilder().Add( + ErrorChunk(Parameters::Builder() + .Add(UnrecognizedChunkTypeCause(std::vector( + descriptor.data.begin(), descriptor.data.end()))) + .Build()))); + } + } + if (!continue_processing) { + // https://tools.ietf.org/html/rfc4960#section-3.2 + // "Stop processing this SCTP packet and discard it, do not process any + // further chunks within it." + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Unknown chunk, with type indicating not to " + "process any further chunks"; + } + + return continue_processing; +} + +absl::optional DcSctpSocket::OnInitTimerExpiry() { + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t1_init_->name() + << " has expired: " << t1_init_->expiration_count() + << "/" << t1_init_->options().max_restarts; + RTC_DCHECK(state_ == State::kCookieWait); + + if (t1_init_->is_running()) { + SendInit(); + } else { + InternalClose(ErrorKind::kTooManyRetries, "No INIT_ACK received"); + } + RTC_DCHECK(IsConsistent()); + return absl::nullopt; +} + +absl::optional DcSctpSocket::OnCookieTimerExpiry() { + // https://tools.ietf.org/html/rfc4960#section-4 + // "If the T1-cookie timer expires, the endpoint MUST retransmit COOKIE + // ECHO and restart the T1-cookie timer without changing state. This MUST + // be repeated up to 'Max.Init.Retransmits' times. After that, the endpoint + // MUST abort the initialization process and report the error to the SCTP + // user." + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t1_cookie_->name() + << " has expired: " << t1_cookie_->expiration_count() + << "/" << t1_cookie_->options().max_restarts; + + RTC_DCHECK(state_ == State::kCookieEchoed); + + if (t1_cookie_->is_running()) { + SendCookieEcho(); + } else { + InternalClose(ErrorKind::kTooManyRetries, "No COOKIE_ACK received"); + } + + RTC_DCHECK(IsConsistent()); + return absl::nullopt; +} + +absl::optional DcSctpSocket::OnShutdownTimerExpiry() { + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t2_shutdown_->name() + << " has expired: " << t2_shutdown_->expiration_count() + << "/" << t2_shutdown_->options().max_restarts; + + if (!t2_shutdown_->is_running()) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "An endpoint should limit the number of retransmissions of the SHUTDOWN + // chunk to the protocol parameter 'Association.Max.Retrans'. If this + // threshold is exceeded, the endpoint should destroy the TCB..." + + SendPacket(tcb_->PacketBuilder().Add( + AbortChunk(true, Parameters::Builder() + .Add(UserInitiatedAbortCause( + "Too many retransmissions of SHUTDOWN")) + .Build()))); + + InternalClose(ErrorKind::kTooManyRetries, "No SHUTDOWN_ACK received"); + RTC_DCHECK(IsConsistent()); + return absl::nullopt; + } + + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "If the timer expires, the endpoint must resend the SHUTDOWN with the + // updated last sequential TSN received from its peer." + SendShutdown(); + RTC_DCHECK(IsConsistent()); + return tcb_->current_rto(); +} + +void DcSctpSocket::SendPacket(SctpPacket::Builder& builder) { + if (builder.empty()) { + return; + } + + std::vector payload = builder.Build(); + + if (RTC_DLOG_IS_ON) { + DebugPrintOutgoing(payload); + } + + // The heartbeat interval timer is restarted for every sent packet, to + // fire when the outgoing channel is inactive. + if (tcb_ != nullptr) { + tcb_->heartbeat_handler().RestartTimer(); + } + + if (packet_observer_ != nullptr) { + packet_observer_->OnSentPacket(callbacks_.TimeMillis(), payload); + } + callbacks_.SendPacket(payload); +} + +bool DcSctpSocket::ValidateHasTCB() { + if (tcb_ != nullptr) { + return true; + } + + callbacks_.OnError( + ErrorKind::kNotConnected, + "Received unexpected commands on socket that is not connected"); + return false; +} + +void DcSctpSocket::ReportFailedToParseChunk(int chunk_type) { + rtc::StringBuilder sb; + sb << "Failed to parse chunk of type: " << chunk_type; + callbacks_.OnError(ErrorKind::kParseFailed, sb.str()); +} + +void DcSctpSocket::HandleData(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = DataChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + HandleDataCommon(*chunk); + } +} + +void DcSctpSocket::HandleIData(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = IDataChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + HandleDataCommon(*chunk); + } +} + +void DcSctpSocket::HandleDataCommon(AnyDataChunk& chunk) { + TSN tsn = chunk.tsn(); + AnyDataChunk::ImmediateAckFlag immediate_ack = chunk.options().immediate_ack; + Data data = std::move(chunk).extract(); + + if (data.payload.empty()) { + // Empty DATA chunks are illegal. + SendPacket(tcb_->PacketBuilder().Add( + ErrorChunk(Parameters::Builder().Add(NoUserDataCause(tsn)).Build()))); + callbacks_.OnError(ErrorKind::kProtocolViolation, + "Received DATA chunk with no user data"); + return; + } + + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Handle DATA, queue_size=" + << tcb_->reassembly_queue().queued_bytes() + << ", water_mark=" + << tcb_->reassembly_queue().watermark_bytes() + << ", full=" << tcb_->reassembly_queue().is_full() + << ", above=" + << tcb_->reassembly_queue().is_above_watermark(); + + if (tcb_->reassembly_queue().is_full()) { + // If the reassembly queue is full, there is nothing that can be done. The + // specification only allows dropping gap-ack-blocks, and that's not + // likely to help as the socket has been trying to fill gaps since the + // watermark was reached. + SendPacket(tcb_->PacketBuilder().Add(AbortChunk( + true, Parameters::Builder().Add(OutOfResourceErrorCause()).Build()))); + InternalClose(ErrorKind::kResourceExhaustion, + "Reassembly Queue is exhausted"); + return; + } + + if (tcb_->reassembly_queue().is_above_watermark()) { + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Is above high watermark"; + // If the reassembly queue is above its high watermark, only accept data + // chunks that increase its cumulative ack tsn in an attempt to fill gaps + // to deliver messages. + if (!tcb_->data_tracker().will_increase_cum_ack_tsn(tsn)) { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Rejected data because of exceeding watermark"; + tcb_->data_tracker().ForceImmediateSack(); + return; + } + } + + if (!tcb_->data_tracker().IsTSNValid(tsn)) { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Rejected data because of failing TSN validity"; + return; + } + + tcb_->data_tracker().Observe(tsn, immediate_ack); + tcb_->reassembly_queue().MaybeResetStreamsDeferred( + tcb_->data_tracker().last_cumulative_acked_tsn()); + tcb_->reassembly_queue().Add(tsn, std::move(data)); + DeliverReassembledMessages(); +} + +void DcSctpSocket::HandleInit(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = InitChunk::Parse(descriptor.data); + if (!ValidateParseSuccess(chunk)) { + return; + } + + if (chunk->initiate_tag() == VerificationTag(0) || + chunk->nbr_outbound_streams() == 0 || chunk->nbr_inbound_streams() == 0) { + // https://tools.ietf.org/html/rfc4960#section-3.3.2 + // "If the value of the Initiate Tag in a received INIT chunk is found + // to be 0, the receiver MUST treat it as an error and close the + // association by transmitting an ABORT." + + // "A receiver of an INIT with the OS value set to 0 SHOULD abort the + // association." + + // "A receiver of an INIT with the MIS value of 0 SHOULD abort the + // association." + + SendPacket(SctpPacket::Builder(VerificationTag(0), options_) + .Add(AbortChunk( + /*filled_in_verification_tag=*/false, + Parameters::Builder() + .Add(ProtocolViolationCause("INIT malformed")) + .Build()))); + InternalClose(ErrorKind::kProtocolViolation, "Received invalid INIT"); + return; + } + + if (state_ == State::kShutdownAckSent) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "If an endpoint is in the SHUTDOWN-ACK-SENT state and receives an + // INIT chunk (e.g., if the SHUTDOWN COMPLETE was lost) with source and + // destination transport addresses (either in the IP addresses or in the + // INIT chunk) that belong to this association, it should discard the INIT + // chunk and retransmit the SHUTDOWN ACK chunk." + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received Init indicating lost ShutdownComplete"; + SendShutdownAck(); + return; + } + + TieTag tie_tag(0); + if (state_ == State::kClosed) { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received Init in closed state (normal)"; + + MakeConnectionParameters(); + } else if (state_ == State::kCookieWait || state_ == State::kCookieEchoed) { + // https://tools.ietf.org/html/rfc4960#section-5.2.1 + // "This usually indicates an initialization collision, i.e., each + // endpoint is attempting, at about the same time, to establish an + // association with the other endpoint. Upon receipt of an INIT in the + // COOKIE-WAIT state, an endpoint MUST respond with an INIT ACK using the + // same parameters it sent in its original INIT chunk (including its + // Initiate Tag, unchanged). When responding, the endpoint MUST send the + // INIT ACK back to the same address that the original INIT (sent by this + // endpoint) was sent." + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received Init indicating simultaneous connections"; + } else { + RTC_DCHECK(tcb_ != nullptr); + // https://tools.ietf.org/html/rfc4960#section-5.2.2 + // "The outbound SCTP packet containing this INIT ACK MUST carry a + // Verification Tag value equal to the Initiate Tag found in the + // unexpected INIT. And the INIT ACK MUST contain a new Initiate Tag + // (randomly generated; see Section 5.3.1). Other parameters for the + // endpoint SHOULD be copied from the existing parameters of the + // association (e.g., number of outbound streams) into the INIT ACK and + // cookie." + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received Init indicating restarted connection"; + // Create a new verification tag - different from the previous one. + for (int tries = 0; tries < 10; ++tries) { + connect_params_.verification_tag = VerificationTag( + callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag)); + if (connect_params_.verification_tag != tcb_->my_verification_tag()) { + break; + } + } + + // Make the initial TSN make a large jump, so that there is no overlap + // with the old and new association. + connect_params_.initial_tsn = + TSN(*tcb_->retransmission_queue().next_tsn() + 1000000); + tie_tag = tcb_->tie_tag(); + } + + RTC_DLOG(LS_VERBOSE) + << log_prefix() + << rtc::StringFormat( + "Proceeding with connection. my_verification_tag=%08x, " + "my_initial_tsn=%u, peer_verification_tag=%08x, " + "peer_initial_tsn=%u", + *connect_params_.verification_tag, *connect_params_.initial_tsn, + *chunk->initiate_tag(), *chunk->initial_tsn()); + + Capabilities capabilities = GetCapabilities(options_, chunk->parameters()); + + SctpPacket::Builder b(chunk->initiate_tag(), options_); + Parameters::Builder params_builder = + Parameters::Builder().Add(StateCookieParameter( + StateCookie(chunk->initiate_tag(), chunk->initial_tsn(), + chunk->a_rwnd(), tie_tag, capabilities) + .Serialize())); + AddCapabilityParameters(options_, params_builder); + + InitAckChunk init_ack(/*initiate_tag=*/connect_params_.verification_tag, + options_.max_receiver_window_buffer_size, + options_.announced_maximum_outgoing_streams, + options_.announced_maximum_incoming_streams, + connect_params_.initial_tsn, params_builder.Build()); + b.Add(init_ack); + SendPacket(b); +} + +void DcSctpSocket::SendCookieEcho() { + RTC_DCHECK(tcb_ != nullptr); + TimeMs now = callbacks_.TimeMillis(); + SctpPacket::Builder b = tcb_->PacketBuilder(); + b.Add(*cookie_echo_chunk_); + + // https://tools.ietf.org/html/rfc4960#section-5.1 + // "The COOKIE ECHO chunk can be bundled with any pending outbound DATA + // chunks, but it MUST be the first chunk in the packet and until the COOKIE + // ACK is returned the sender MUST NOT send any other packets to the peer." + tcb_->SendBufferedPackets(b, now, /*only_one_packet=*/true); +} + +void DcSctpSocket::HandleInitAck( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = InitAckChunk::Parse(descriptor.data); + if (!ValidateParseSuccess(chunk)) { + return; + } + + if (state_ != State::kCookieWait) { + // https://tools.ietf.org/html/rfc4960#section-5.2.3 + // "If an INIT ACK is received by an endpoint in any state other than + // the COOKIE-WAIT state, the endpoint should discard the INIT ACK chunk." + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received INIT_ACK in unexpected state"; + return; + } + + auto cookie = chunk->parameters().get(); + if (!cookie.has_value()) { + SendPacket(SctpPacket::Builder(connect_params_.verification_tag, options_) + .Add(AbortChunk( + /*filled_in_verification_tag=*/false, + Parameters::Builder() + .Add(ProtocolViolationCause("INIT-ACK malformed")) + .Build()))); + InternalClose(ErrorKind::kProtocolViolation, + "InitAck chunk doesn't contain a cookie"); + return; + } + Capabilities capabilities = GetCapabilities(options_, chunk->parameters()); + t1_init_->Stop(); + + tcb_ = std::make_unique( + timer_manager_, log_prefix_, options_, capabilities, callbacks_, + send_queue_, connect_params_.verification_tag, + connect_params_.initial_tsn, chunk->initiate_tag(), chunk->initial_tsn(), + chunk->a_rwnd(), MakeTieTag(callbacks_), + [this]() { return state_ == State::kEstablished; }, + [this](SctpPacket::Builder& builder) { return SendPacket(builder); }); + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Created peer TCB: " << tcb_->ToString(); + + SetState(State::kCookieEchoed, "INIT_ACK received"); + + // The connection isn't fully established just yet. + cookie_echo_chunk_ = CookieEchoChunk(cookie->data()); + SendCookieEcho(); + t1_cookie_->Start(); +} + +void DcSctpSocket::HandleCookieEcho( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = + CookieEchoChunk::Parse(descriptor.data); + if (!ValidateParseSuccess(chunk)) { + return; + } + + absl::optional cookie = + StateCookie::Deserialize(chunk->cookie()); + if (!cookie.has_value()) { + callbacks_.OnError(ErrorKind::kParseFailed, "Failed to parse state cookie"); + return; + } + + if (tcb_ != nullptr) { + if (!HandleCookieEchoWithTCB(header, *cookie)) { + return; + } + } else { + if (header.verification_tag != connect_params_.verification_tag) { + callbacks_.OnError( + ErrorKind::kParseFailed, + rtc::StringFormat( + "Received CookieEcho with invalid verification tag: %08x, " + "expected %08x", + *header.verification_tag, *connect_params_.verification_tag)); + return; + } + } + + // The init timer can be running on simultaneous connections. + t1_init_->Stop(); + t1_cookie_->Stop(); + if (state_ != State::kEstablished) { + cookie_echo_chunk_ = absl::nullopt; + SetState(State::kEstablished, "COOKIE_ECHO received"); + callbacks_.OnConnected(); + } + + if (tcb_ == nullptr) { + tcb_ = std::make_unique( + timer_manager_, log_prefix_, options_, cookie->capabilities(), + callbacks_, send_queue_, connect_params_.verification_tag, + connect_params_.initial_tsn, cookie->initiate_tag(), + cookie->initial_tsn(), cookie->a_rwnd(), MakeTieTag(callbacks_), + [this]() { return state_ == State::kEstablished; }, + [this](SctpPacket::Builder& builder) { return SendPacket(builder); }); + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Created peer TCB: " << tcb_->ToString(); + } + + SctpPacket::Builder b = tcb_->PacketBuilder(); + b.Add(CookieAckChunk()); + + // https://tools.ietf.org/html/rfc4960#section-5.1 + // "A COOKIE ACK chunk may be bundled with any pending DATA chunks (and/or + // SACK chunks), but the COOKIE ACK chunk MUST be the first chunk in the + // packet." + tcb_->SendBufferedPackets(b, callbacks_.TimeMillis()); +} + +bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, + const StateCookie& cookie) { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Handling CookieEchoChunk with TCB. local_tag=" + << *tcb_->my_verification_tag() + << ", peer_tag=" << *header.verification_tag + << ", tcb_tag=" << *tcb_->peer_verification_tag() + << ", cookie_tag=" << *cookie.initiate_tag() + << ", local_tie_tag=" << *tcb_->tie_tag() + << ", peer_tie_tag=" << *cookie.tie_tag(); + // https://tools.ietf.org/html/rfc4960#section-5.2.4 + // "Handle a COOKIE ECHO when a TCB Exists" + if (header.verification_tag != tcb_->my_verification_tag() && + tcb_->peer_verification_tag() != cookie.initiate_tag() && + cookie.tie_tag() == tcb_->tie_tag()) { + // "A) In this case, the peer may have restarted." + if (state_ == State::kShutdownAckSent) { + // "If the endpoint is in the SHUTDOWN-ACK-SENT state and recognizes + // that the peer has restarted ... it MUST NOT set up a new association + // but instead resend the SHUTDOWN ACK and send an ERROR chunk with a + // "Cookie Received While Shutting Down" error cause to its peer." + SctpPacket::Builder b(cookie.initiate_tag(), options_); + b.Add(ShutdownAckChunk()); + b.Add(ErrorChunk(Parameters::Builder() + .Add(CookieReceivedWhileShuttingDownCause()) + .Build())); + SendPacket(b); + callbacks_.OnError(ErrorKind::kWrongSequence, + "Received COOKIE-ECHO while shutting down"); + return false; + } + + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received COOKIE-ECHO indicating a restarted peer"; + + // If a message was partly sent, and the peer restarted, resend it in + // full by resetting the send queue. + send_queue_.Reset(); + tcb_ = nullptr; + callbacks_.OnConnectionRestarted(); + } else if (header.verification_tag == tcb_->my_verification_tag() && + tcb_->peer_verification_tag() != cookie.initiate_tag()) { + // TODO(boivie): Handle the peer_tag == 0? + // "B) In this case, both sides may be attempting to start an + // association at about the same time, but the peer endpoint started its + // INIT after responding to the local endpoint's INIT." + RTC_DLOG(LS_VERBOSE) + << log_prefix() + << "Received COOKIE-ECHO indicating simultaneous connections"; + tcb_ = nullptr; + } else if (header.verification_tag != tcb_->my_verification_tag() && + tcb_->peer_verification_tag() == cookie.initiate_tag() && + cookie.tie_tag() == TieTag(0)) { + // "C) In this case, the local endpoint's cookie has arrived late. + // Before it arrived, the local endpoint sent an INIT and received an + // INIT ACK and finally sent a COOKIE ECHO with the peer's same tag but + // a new tag of its own. The cookie should be silently discarded. The + // endpoint SHOULD NOT change states and should leave any timers + // running." + RTC_DLOG(LS_VERBOSE) + << log_prefix() + << "Received COOKIE-ECHO indicating a late COOKIE-ECHO. Discarding"; + return false; + } else if (header.verification_tag == tcb_->my_verification_tag() && + tcb_->peer_verification_tag() == cookie.initiate_tag()) { + // "D) When both local and remote tags match, the endpoint should enter + // the ESTABLISHED state, if it is in the COOKIE-ECHOED state. It + // should stop any cookie timer that may be running and send a COOKIE + // ACK." + RTC_DLOG(LS_VERBOSE) + << log_prefix() + << "Received duplicate COOKIE-ECHO, probably because of peer not " + "receiving COOKIE-ACK and retransmitting COOKIE-ECHO. Continuing."; + } + return true; +} + +void DcSctpSocket::HandleCookieAck( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = CookieAckChunk::Parse(descriptor.data); + if (!ValidateParseSuccess(chunk)) { + return; + } + + if (state_ != State::kCookieEchoed) { + // https://tools.ietf.org/html/rfc4960#section-5.2.5 + // "At any state other than COOKIE-ECHOED, an endpoint should silently + // discard a received COOKIE ACK chunk." + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received COOKIE_ACK not in COOKIE_ECHOED state"; + return; + } + + // RFC 4960, Errata ID: 4400 + t1_cookie_->Stop(); + cookie_echo_chunk_ = absl::nullopt; + SetState(State::kEstablished, "COOKIE_ACK received"); + tcb_->SendBufferedPackets(callbacks_.TimeMillis()); + callbacks_.OnConnected(); +} + +void DcSctpSocket::DeliverReassembledMessages() { + if (tcb_->reassembly_queue().HasMessages()) { + for (auto& message : tcb_->reassembly_queue().FlushMessages()) { + callbacks_.OnMessageReceived(std::move(message)); + } + } +} + +void DcSctpSocket::HandleSack(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = SackChunk::Parse(descriptor.data); + + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + TimeMs now = callbacks_.TimeMillis(); + SackChunk sack = ChunkValidators::Clean(*std::move(chunk)); + + if (tcb_->retransmission_queue().HandleSack(now, sack)) { + MaybeSendShutdownOrAck(); + // Receiving an ACK will decrease outstanding bytes (maybe now below + // cwnd?) or indicate packet loss that may result in sending FORWARD-TSN. + tcb_->SendBufferedPackets(now); + } else { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Dropping out-of-order SACK with TSN " + << *sack.cumulative_tsn_ack(); + } + } +} + +void DcSctpSocket::HandleHeartbeatRequest( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = + HeartbeatRequestChunk::Parse(descriptor.data); + + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + tcb_->heartbeat_handler().HandleHeartbeatRequest(*std::move(chunk)); + } +} + +void DcSctpSocket::HandleHeartbeatAck( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = + HeartbeatAckChunk::Parse(descriptor.data); + + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + tcb_->heartbeat_handler().HandleHeartbeatAck(*std::move(chunk)); + } +} + +void DcSctpSocket::HandleAbort(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = AbortChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk)) { + std::string error_string = ErrorCausesToString(chunk->error_causes()); + if (tcb_ == nullptr) { + // https://tools.ietf.org/html/rfc4960#section-3.3.7 + // "If an endpoint receives an ABORT with a format error or no TCB is + // found, it MUST silently discard it." + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received ABORT (" << error_string + << ") on a connection with no TCB. Ignoring"; + return; + } + + RTC_DLOG(LS_WARNING) << log_prefix() << "Received ABORT (" << error_string + << ") - closing connection."; + InternalClose(ErrorKind::kPeerReported, error_string); + } +} + +void DcSctpSocket::HandleError(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = ErrorChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk)) { + std::string error_string = ErrorCausesToString(chunk->error_causes()); + if (tcb_ == nullptr) { + RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received ERROR (" << error_string + << ") on a connection with no TCB. Ignoring"; + return; + } + + RTC_DLOG(LS_WARNING) << log_prefix() << "Received ERROR: " << error_string; + callbacks_.OnError(ErrorKind::kPeerReported, + "Peer reported error: " + error_string); + } +} + +void DcSctpSocket::HandleReconfig( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = ReConfigChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + tcb_->stream_reset_handler().HandleReConfig(*std::move(chunk)); + } +} + +void DcSctpSocket::HandleShutdown( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + if (!ValidateParseSuccess(ShutdownChunk::Parse(descriptor.data))) { + return; + } + + if (state_ == State::kClosed) { + return; + } else if (state_ == State::kCookieWait || state_ == State::kCookieEchoed) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "If a SHUTDOWN is received in the COOKIE-WAIT or COOKIE ECHOED state, + // the SHUTDOWN chunk SHOULD be silently discarded." + } else if (state_ == State::kShutdownSent) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "If an endpoint is in the SHUTDOWN-SENT state and receives a + // SHUTDOWN chunk from its peer, the endpoint shall respond immediately + // with a SHUTDOWN ACK to its peer, and move into the SHUTDOWN-ACK-SENT + // state restarting its T2-shutdown timer." + SendShutdownAck(); + SetState(State::kShutdownAckSent, "SHUTDOWN received"); + } else if (state_ == State::kShutdownAckSent) { + // TODO(webrtc:12739): This condition should be removed and handled by the + // next (state_ != State::kShutdownReceived). + return; + } else if (state_ != State::kShutdownReceived) { + RTC_DLOG(LS_VERBOSE) << log_prefix() + << "Received SHUTDOWN - shutting down the socket"; + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "Upon reception of the SHUTDOWN, the peer endpoint shall enter the + // SHUTDOWN-RECEIVED state, stop accepting new data from its SCTP user, + // and verify, by checking the Cumulative TSN Ack field of the chunk, that + // all its outstanding DATA chunks have been received by the SHUTDOWN + // sender." + SetState(State::kShutdownReceived, "SHUTDOWN received"); + MaybeSendShutdownOrAck(); + } +} + +void DcSctpSocket::HandleShutdownAck( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + if (!ValidateParseSuccess(ShutdownAckChunk::Parse(descriptor.data))) { + return; + } + + if (state_ == State::kShutdownSent || state_ == State::kShutdownAckSent) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "Upon the receipt of the SHUTDOWN ACK, the SHUTDOWN sender shall stop + // the T2-shutdown timer, send a SHUTDOWN COMPLETE chunk to its peer, and + // remove all record of the association." + + // "If an endpoint is in the SHUTDOWN-ACK-SENT state and receives a + // SHUTDOWN ACK, it shall stop the T2-shutdown timer, send a SHUTDOWN + // COMPLETE chunk to its peer, and remove all record of the association." + + SctpPacket::Builder b = tcb_->PacketBuilder(); + b.Add(ShutdownCompleteChunk(/*tag_reflected=*/false)); + SendPacket(b); + InternalClose(ErrorKind::kNoError, ""); + } else { + // https://tools.ietf.org/html/rfc4960#section-8.5.1 + // "If the receiver is in COOKIE-ECHOED or COOKIE-WAIT state + // the procedures in Section 8.4 SHOULD be followed; in other words, it + // should be treated as an Out Of The Blue packet." + + // https://tools.ietf.org/html/rfc4960#section-8.4 + // "If the packet contains a SHUTDOWN ACK chunk, the receiver + // should respond to the sender of the OOTB packet with a SHUTDOWN + // COMPLETE. When sending the SHUTDOWN COMPLETE, the receiver of the OOTB + // packet must fill in the Verification Tag field of the outbound packet + // with the Verification Tag received in the SHUTDOWN ACK and set the T + // bit in the Chunk Flags to indicate that the Verification Tag is + // reflected." + + SctpPacket::Builder b(header.verification_tag, options_); + b.Add(ShutdownCompleteChunk(/*tag_reflected=*/true)); + SendPacket(b); + } +} + +void DcSctpSocket::HandleShutdownComplete( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + if (!ValidateParseSuccess(ShutdownCompleteChunk::Parse(descriptor.data))) { + return; + } + + if (state_ == State::kShutdownAckSent) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "Upon reception of the SHUTDOWN COMPLETE chunk, the endpoint will + // verify that it is in the SHUTDOWN-ACK-SENT state; if it is not, the + // chunk should be discarded. If the endpoint is in the SHUTDOWN-ACK-SENT + // state, the endpoint should stop the T2-shutdown timer and remove all + // knowledge of the association (and thus the association enters the + // CLOSED state)." + InternalClose(ErrorKind::kNoError, ""); + } +} + +void DcSctpSocket::HandleForwardTsn( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = + ForwardTsnChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + HandleForwardTsnCommon(*chunk); + } +} + +void DcSctpSocket::HandleIForwardTsn( + const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor) { + absl::optional chunk = + IForwardTsnChunk::Parse(descriptor.data); + if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { + HandleForwardTsnCommon(*chunk); + } +} + +void DcSctpSocket::HandleForwardTsnCommon(const AnyForwardTsnChunk& chunk) { + if (!tcb_->capabilities().partial_reliability) { + SctpPacket::Builder b = tcb_->PacketBuilder(); + b.Add(AbortChunk(/*filled_in_verification_tag=*/true, + Parameters::Builder() + .Add(ProtocolViolationCause( + "I-FORWARD-TSN received, but not indicated " + "during connection establishment")) + .Build())); + SendPacket(b); + + callbacks_.OnError(ErrorKind::kProtocolViolation, + "Received a FORWARD_TSN without announced peer support"); + return; + } + tcb_->data_tracker().HandleForwardTsn(chunk.new_cumulative_tsn()); + tcb_->reassembly_queue().Handle(chunk); + // A forward TSN - for ordered streams - may allow messages to be + // delivered. + DeliverReassembledMessages(); + + // Processing a FORWARD_TSN might result in sending a SACK. + tcb_->MaybeSendSack(); +} + +void DcSctpSocket::MaybeSendShutdownOrAck() { + if (tcb_->retransmission_queue().outstanding_bytes() != 0) { + return; + } + + if (state_ == State::kShutdownPending) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "Once all its outstanding data has been acknowledged, the endpoint + // shall send a SHUTDOWN chunk to its peer including in the Cumulative TSN + // Ack field the last sequential TSN it has received from the peer. It + // shall then start the T2-shutdown timer and enter the SHUTDOWN-SENT + // state."" + + SendShutdown(); + t2_shutdown_->set_duration(tcb_->current_rto()); + t2_shutdown_->Start(); + SetState(State::kShutdownSent, "No more outstanding data"); + } else if (state_ == State::kShutdownReceived) { + // https://tools.ietf.org/html/rfc4960#section-9.2 + // "If the receiver of the SHUTDOWN has no more outstanding DATA + // chunks, the SHUTDOWN receiver MUST send a SHUTDOWN ACK and start a + // T2-shutdown timer of its own, entering the SHUTDOWN-ACK-SENT state. If + // the timer expires, the endpoint must resend the SHUTDOWN ACK." + + SendShutdownAck(); + SetState(State::kShutdownAckSent, "No more outstanding data"); + } +} + +void DcSctpSocket::SendShutdown() { + SctpPacket::Builder b = tcb_->PacketBuilder(); + b.Add(ShutdownChunk(tcb_->data_tracker().last_cumulative_acked_tsn())); + SendPacket(b); +} + +void DcSctpSocket::SendShutdownAck() { + SendPacket(tcb_->PacketBuilder().Add(ShutdownAckChunk())); + t2_shutdown_->set_duration(tcb_->current_rto()); + t2_shutdown_->Start(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h new file mode 100644 index 000000000..24c0437b4 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.h @@ -0,0 +1,276 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_DCSCTP_SOCKET_H_ +#define NET_DCSCTP_SOCKET_DCSCTP_SOCKET_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/chunk/abort_chunk.h" +#include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" +#include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" +#include "net/dcsctp/packet/chunk/data_chunk.h" +#include "net/dcsctp/packet/chunk/data_common.h" +#include "net/dcsctp/packet/chunk/error_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" +#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" +#include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/init_ack_chunk.h" +#include "net/dcsctp/packet/chunk/init_chunk.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_ack_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_chunk.h" +#include "net/dcsctp/packet/chunk/shutdown_complete_chunk.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/packet_observer.h" +#include "net/dcsctp/rx/data_tracker.h" +#include "net/dcsctp/rx/reassembly_queue.h" +#include "net/dcsctp/socket/callback_deferrer.h" +#include "net/dcsctp/socket/state_cookie.h" +#include "net/dcsctp/socket/transmission_control_block.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/fcfs_send_queue.h" +#include "net/dcsctp/tx/retransmission_error_counter.h" +#include "net/dcsctp/tx/retransmission_queue.h" +#include "net/dcsctp/tx/retransmission_timeout.h" + +namespace dcsctp { + +// DcSctpSocket represents a single SCTP socket, to be used over DTLS. +// +// Every dcSCTP is completely isolated from any other socket. +// +// This class manages all packet and chunk dispatching and mainly handles the +// connection sequences (connect, close, shutdown, etc) as well as managing +// the Transmission Control Block (tcb). +// +// This class is thread-compatible. +class DcSctpSocket : public DcSctpSocketInterface { + public: + // Instantiates a DcSctpSocket, which interacts with the world through the + // `callbacks` interface and is configured using `options`. + // + // For debugging, `log_prefix` will prefix all debug logs, and a + // `packet_observer` can be attached to e.g. dump sent and received packets. + DcSctpSocket(absl::string_view log_prefix, + DcSctpSocketCallbacks& callbacks, + std::unique_ptr packet_observer, + const DcSctpOptions& options); + + DcSctpSocket(const DcSctpSocket&) = delete; + DcSctpSocket& operator=(const DcSctpSocket&) = delete; + + // Implementation of `DcSctpSocketInterface`. + void ReceivePacket(rtc::ArrayView data) override; + void HandleTimeout(TimeoutID timeout_id) override; + void Connect() override; + void Shutdown() override; + void Close() override; + SendStatus Send(DcSctpMessage message, + const SendOptions& send_options) override; + ResetStreamsStatus ResetStreams( + rtc::ArrayView outgoing_streams) override; + SocketState state() const override; + const DcSctpOptions& options() const override { return options_; } + void SetMaxMessageSize(size_t max_message_size) override; + + // Returns this socket's verification tag, or zero if not yet connected. + VerificationTag verification_tag() const { + return tcb_ != nullptr ? tcb_->my_verification_tag() : VerificationTag(0); + } + + private: + // Parameter proposals valid during the connect phase. + struct ConnectParameters { + TSN initial_tsn = TSN(0); + VerificationTag verification_tag = VerificationTag(0); + }; + + // Detailed state (separate from SocketState, which is the public state). + enum class State { + kClosed, + kCookieWait, + // TCB valid in these: + kCookieEchoed, + kEstablished, + kShutdownPending, + kShutdownSent, + kShutdownReceived, + kShutdownAckSent, + }; + + // Returns the log prefix used for debug logging. + std::string log_prefix() const; + + bool IsConsistent() const; + static constexpr absl::string_view ToString(DcSctpSocket::State state); + + // Changes the socket state, given a `reason` (for debugging/logging). + void SetState(State state, absl::string_view reason); + // Fills in `connect_params` with random verification tag and initial TSN. + void MakeConnectionParameters(); + // Closes the association. Note that the TCB will not be valid past this call. + void InternalClose(ErrorKind error, absl::string_view message); + // Closes the association, because of too many retransmission errors. + void CloseConnectionBecauseOfTooManyTransmissionErrors(); + // Timer expiration handlers + absl::optional OnInitTimerExpiry(); + absl::optional OnCookieTimerExpiry(); + absl::optional OnShutdownTimerExpiry(); + // Builds the packet from `builder` and sends it (through callbacks). + void SendPacket(SctpPacket::Builder& builder); + // Sends SHUTDOWN or SHUTDOWN-ACK if the socket is shutting down and if all + // outstanding data has been acknowledged. + void MaybeSendShutdownOrAck(); + // If the socket is shutting down, responds SHUTDOWN to any incoming DATA. + void MaybeSendShutdownOnPacketReceived(const SctpPacket& packet); + // Sends a INIT chunk. + void SendInit(); + // Sends a CookieEcho chunk. + void SendCookieEcho(); + // Sends a SHUTDOWN chunk. + void SendShutdown(); + // Sends a SHUTDOWN-ACK chunk. + void SendShutdownAck(); + // Validates the SCTP packet, as a whole - not the validity of individual + // chunks within it, as that's done in the different chunk handlers. + bool ValidatePacket(const SctpPacket& packet); + // Parses `payload`, which is a serialized packet that is just going to be + // sent and prints all chunks. + void DebugPrintOutgoing(rtc::ArrayView payload); + // Called whenever there may be reassembled messages, and delivers those. + void DeliverReassembledMessages(); + // Returns true if there is a TCB, and false otherwise (and reports an error). + bool ValidateHasTCB(); + + // Returns true if the parsing of a chunk of type `T` succeeded. If it didn't, + // it reports an error and returns false. + template + bool ValidateParseSuccess(const absl::optional& c) { + if (c.has_value()) { + return true; + } + + ReportFailedToParseChunk(T::kType); + return false; + } + + // Reports failing to have parsed a chunk with the provided `chunk_type`. + void ReportFailedToParseChunk(int chunk_type); + // Called when unknown chunks are received. May report an error. + bool HandleUnrecognizedChunk(const SctpPacket::ChunkDescriptor& descriptor); + + // Will dispatch more specific chunk handlers. + bool Dispatch(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming DATA chunks. + void HandleData(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming I-DATA chunks. + void HandleIData(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Common handler for DATA and I-DATA chunks. + void HandleDataCommon(AnyDataChunk& chunk); + // Handles incoming INIT chunks. + void HandleInit(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming INIT-ACK chunks. + void HandleInitAck(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming SACK chunks. + void HandleSack(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming HEARTBEAT chunks. + void HandleHeartbeatRequest(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming HEARTBEAT-ACK chunks. + void HandleHeartbeatAck(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming ABORT chunks. + void HandleAbort(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming ERROR chunks. + void HandleError(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming COOKIE-ECHO chunks. + void HandleCookieEcho(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles receiving COOKIE-ECHO when there already is a TCB. The return value + // indicates if the processing should continue. + bool HandleCookieEchoWithTCB(const CommonHeader& header, + const StateCookie& cookie); + // Handles incoming COOKIE-ACK chunks. + void HandleCookieAck(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming SHUTDOWN chunks. + void HandleShutdown(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming SHUTDOWN-ACK chunks. + void HandleShutdownAck(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming FORWARD-TSN chunks. + void HandleForwardTsn(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming I-FORWARD-TSN chunks. + void HandleIForwardTsn(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Handles incoming RE-CONFIG chunks. + void HandleReconfig(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + // Common handled for FORWARD-TSN/I-FORWARD-TSN. + void HandleForwardTsnCommon(const AnyForwardTsnChunk& chunk); + // Handles incoming SHUTDOWN-COMPLETE chunks + void HandleShutdownComplete(const CommonHeader& header, + const SctpPacket::ChunkDescriptor& descriptor); + + const std::string log_prefix_; + const std::unique_ptr packet_observer_; + DcSctpOptions options_; + + // Enqueues callbacks and dispatches them just before returning to the caller. + CallbackDeferrer callbacks_; + + TimerManager timer_manager_; + const std::unique_ptr t1_init_; + const std::unique_ptr t1_cookie_; + const std::unique_ptr t2_shutdown_; + + // The actual SendQueue implementation. As data can be sent on a socket before + // the connection is established, this component is not in the TCB. + FCFSSendQueue send_queue_; + + // Only valid when state == State::kCookieEchoed + // A cached Cookie Echo Chunk, to be re-sent on timer expiry. + absl::optional cookie_echo_chunk_ = absl::nullopt; + + // Contains verification tag and initial TSN between having sent the INIT + // until the connection is established (there is no TCB at this point). + ConnectParameters connect_params_; + // The socket state. + State state_ = State::kClosed; + // If the connection is established, contains a transmission control block. + std::unique_ptr tcb_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_DCSCTP_SOCKET_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/heartbeat_handler.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/heartbeat_handler.cc new file mode 100644 index 000000000..30a0001c6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/heartbeat_handler.cc @@ -0,0 +1,189 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/socket/heartbeat_handler.h" + +#include + +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" +#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" +#include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/socket/context.h" +#include "net/dcsctp/timer/timer.h" +#include "rtc_base/logging.h" + +namespace dcsctp { + +// This is stored (in serialized form) as HeartbeatInfoParameter sent in +// HeartbeatRequestChunk and received back in HeartbeatAckChunk. It should be +// well understood that this data may be modified by the peer, so it can't +// be trusted. +// +// It currently only stores a timestamp, in millisecond precision, to allow for +// RTT measurements. If that would be manipulated by the peer, it would just +// result in incorrect RTT measurements, which isn't an issue. +class HeartbeatInfo { + public: + static constexpr size_t kBufferSize = sizeof(uint64_t); + static_assert(kBufferSize == 8, "Unexpected buffer size"); + + explicit HeartbeatInfo(TimeMs created_at) : created_at_(created_at) {} + + std::vector Serialize() { + uint32_t high_bits = static_cast(*created_at_ >> 32); + uint32_t low_bits = static_cast(*created_at_); + + std::vector data(kBufferSize); + BoundedByteWriter writer(data); + writer.Store32<0>(high_bits); + writer.Store32<4>(low_bits); + return data; + } + + static absl::optional Deserialize( + rtc::ArrayView data) { + if (data.size() != kBufferSize) { + RTC_LOG(LS_WARNING) << "Invalid heartbeat info: " << data.size() + << " bytes"; + return absl::nullopt; + } + + BoundedByteReader reader(data); + uint32_t high_bits = reader.Load32<0>(); + uint32_t low_bits = reader.Load32<4>(); + + uint64_t created_at = static_cast(high_bits) << 32 | low_bits; + return HeartbeatInfo(TimeMs(created_at)); + } + + TimeMs created_at() const { return created_at_; } + + private: + const TimeMs created_at_; +}; + +HeartbeatHandler::HeartbeatHandler(absl::string_view log_prefix, + const DcSctpOptions& options, + Context* context, + TimerManager* timer_manager) + : log_prefix_(std::string(log_prefix) + "heartbeat: "), + ctx_(context), + timer_manager_(timer_manager), + interval_duration_(options.heartbeat_interval), + interval_duration_should_include_rtt_( + options.heartbeat_interval_include_rtt), + interval_timer_(timer_manager_->CreateTimer( + "heartbeat-interval", + [this]() { return OnIntervalTimerExpiry(); }, + TimerOptions(interval_duration_, TimerBackoffAlgorithm::kFixed))), + timeout_timer_(timer_manager_->CreateTimer( + "heartbeat-timeout", + [this]() { return OnTimeoutTimerExpiry(); }, + TimerOptions(options.rto_initial, + TimerBackoffAlgorithm::kExponential, + /*max_restarts=*/0))) { + // The interval timer must always be running as long as the association is up. + interval_timer_->Start(); +} + +void HeartbeatHandler::RestartTimer() { + if (interval_duration_should_include_rtt_) { + // The RTT should be used, but it's not easy accessible. The RTO will + // suffice. + interval_timer_->set_duration(interval_duration_ + ctx_->current_rto()); + } else { + interval_timer_->set_duration(interval_duration_); + } + + interval_timer_->Start(); +} + +void HeartbeatHandler::HandleHeartbeatRequest(HeartbeatRequestChunk chunk) { + // https://tools.ietf.org/html/rfc4960#section-8.3 + // "The receiver of the HEARTBEAT should immediately respond with a + // HEARTBEAT ACK that contains the Heartbeat Information TLV, together with + // any other received TLVs, copied unchanged from the received HEARTBEAT + // chunk." + ctx_->Send(ctx_->PacketBuilder().Add( + HeartbeatAckChunk(std::move(chunk).extract_parameters()))); +} + +void HeartbeatHandler::HandleHeartbeatAck(HeartbeatAckChunk chunk) { + timeout_timer_->Stop(); + absl::optional info_param = chunk.info(); + if (!info_param.has_value()) { + ctx_->callbacks().OnError( + ErrorKind::kParseFailed, + "Failed to parse HEARTBEAT-ACK; No Heartbeat Info parameter"); + return; + } + absl::optional info = + HeartbeatInfo::Deserialize(info_param->info()); + if (!info.has_value()) { + ctx_->callbacks().OnError(ErrorKind::kParseFailed, + "Failed to parse HEARTBEAT-ACK; Failed to " + "deserialized Heartbeat info parameter"); + return; + } + + DurationMs duration(*ctx_->callbacks().TimeMillis() - *info->created_at()); + + ctx_->ObserveRTT(duration); + + // https://tools.ietf.org/html/rfc4960#section-8.1 + // "The counter shall be reset each time ... a HEARTBEAT ACK is received from + // the peer endpoint." + ctx_->ClearTxErrorCounter(); +} + +absl::optional HeartbeatHandler::OnIntervalTimerExpiry() { + if (ctx_->is_connection_established()) { + HeartbeatInfo info(ctx_->callbacks().TimeMillis()); + timeout_timer_->set_duration(ctx_->current_rto()); + timeout_timer_->Start(); + RTC_DLOG(LS_INFO) << log_prefix_ << "Sending HEARTBEAT with timeout " + << *timeout_timer_->duration(); + + Parameters parameters = Parameters::Builder() + .Add(HeartbeatInfoParameter(info.Serialize())) + .Build(); + + ctx_->Send(ctx_->PacketBuilder().Add( + HeartbeatRequestChunk(std::move(parameters)))); + } else { + RTC_DLOG(LS_VERBOSE) + << log_prefix_ + << "Will not send HEARTBEAT when connection not established"; + } + return absl::nullopt; +} + +absl::optional HeartbeatHandler::OnTimeoutTimerExpiry() { + // Note that the timeout timer is not restarted. It will be started again when + // the interval timer expires. + RTC_DCHECK(!timeout_timer_->is_running()); + ctx_->IncrementTxErrorCounter("HEARTBEAT timeout"); + return absl::nullopt; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/heartbeat_handler.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/heartbeat_handler.h new file mode 100644 index 000000000..14c310953 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/heartbeat_handler.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_HEARTBEAT_HANDLER_H_ +#define NET_DCSCTP_SOCKET_HEARTBEAT_HANDLER_H_ + +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" +#include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/socket/context.h" +#include "net/dcsctp/timer/timer.h" + +namespace dcsctp { + +// HeartbeatHandler handles all logic around sending heartbeats and receiving +// the responses, as well as receiving incoming heartbeat requests. +// +// Heartbeats are sent on idle connections to ensure that the connection is +// still healthy and to measure the RTT. If a number of heartbeats time out, +// the connection will eventually be closed. +class HeartbeatHandler { + public: + HeartbeatHandler(absl::string_view log_prefix, + const DcSctpOptions& options, + Context* context, + TimerManager* timer_manager); + + // Called when the heartbeat interval timer should be restarted. This is + // generally done every time data is sent, which makes the timer expire when + // the connection is idle. + void RestartTimer(); + + // Called on received HeartbeatRequestChunk chunks. + void HandleHeartbeatRequest(HeartbeatRequestChunk chunk); + + // Called on received HeartbeatRequestChunk chunks. + void HandleHeartbeatAck(HeartbeatAckChunk chunk); + + private: + absl::optional OnIntervalTimerExpiry(); + absl::optional OnTimeoutTimerExpiry(); + + const std::string log_prefix_; + Context* ctx_; + TimerManager* timer_manager_; + // The time for a connection to be idle before a heartbeat is sent. + const DurationMs interval_duration_; + // Adding RTT to the duration will add some jitter, which is good in + // production, but less good in unit tests, which is why it can be disabled. + const bool interval_duration_should_include_rtt_; + const std::unique_ptr interval_timer_; + const std::unique_ptr timeout_timer_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_HEARTBEAT_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_context.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_context.h new file mode 100644 index 000000000..d86b99a20 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_context.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_ +#define NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_ + +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/socket/context.h" +#include "net/dcsctp/socket/mock_dcsctp_socket_callbacks.h" +#include "test/gmock.h" + +namespace dcsctp { + +class MockContext : public Context { + public: + static constexpr TSN MyInitialTsn() { return TSN(990); } + static constexpr TSN PeerInitialTsn() { return TSN(10); } + static constexpr VerificationTag PeerVerificationTag() { + return VerificationTag(0x01234567); + } + + explicit MockContext(MockDcSctpSocketCallbacks* callbacks) + : callbacks_(*callbacks) { + ON_CALL(*this, is_connection_established) + .WillByDefault(testing::Return(true)); + ON_CALL(*this, my_initial_tsn) + .WillByDefault(testing::Return(MyInitialTsn())); + ON_CALL(*this, peer_initial_tsn) + .WillByDefault(testing::Return(PeerInitialTsn())); + ON_CALL(*this, callbacks).WillByDefault(testing::ReturnRef(callbacks_)); + ON_CALL(*this, current_rto).WillByDefault(testing::Return(DurationMs(123))); + ON_CALL(*this, Send).WillByDefault([this](SctpPacket::Builder& builder) { + callbacks_.SendPacket(builder.Build()); + }); + } + + MOCK_METHOD(bool, is_connection_established, (), (const, override)); + MOCK_METHOD(TSN, my_initial_tsn, (), (const, override)); + MOCK_METHOD(TSN, peer_initial_tsn, (), (const, override)); + MOCK_METHOD(DcSctpSocketCallbacks&, callbacks, (), (const, override)); + + MOCK_METHOD(void, ObserveRTT, (DurationMs rtt_ms), (override)); + MOCK_METHOD(DurationMs, current_rto, (), (const, override)); + MOCK_METHOD(bool, + IncrementTxErrorCounter, + (absl::string_view reason), + (override)); + MOCK_METHOD(void, ClearTxErrorCounter, (), (override)); + MOCK_METHOD(bool, HasTooManyTxErrors, (), (const, override)); + SctpPacket::Builder PacketBuilder() const override { + return SctpPacket::Builder(PeerVerificationTag(), options_); + } + MOCK_METHOD(void, Send, (SctpPacket::Builder & builder), (override)); + + DcSctpOptions options_; + MockDcSctpSocketCallbacks& callbacks_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h new file mode 100644 index 000000000..799f85c27 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_MOCK_DCSCTP_SOCKET_CALLBACKS_H_ +#define NET_DCSCTP_SOCKET_MOCK_DCSCTP_SOCKET_CALLBACKS_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/timeout.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/timer/fake_timeout.h" +#include "rtc_base/logging.h" +#include "rtc_base/random.h" +#include "test/gmock.h" + +namespace dcsctp { + +namespace internal { +// It can be argued if a mocked random number generator should be deterministic +// or if it should be have as a "real" random number generator. In this +// implementation, each instantiation of `MockDcSctpSocketCallbacks` will have +// their `GetRandomInt` return different sequences, but each instantiation will +// always generate the same sequence of random numbers. This to make it easier +// to compare logs from tests, but still to let e.g. two different sockets (used +// in the same test) get different random numbers, so that they don't start e.g. +// on the same sequence number. While that isn't an issue in the protocol, it +// just makes debugging harder as the two sockets would look exactly the same. +// +// In a real implementation of `DcSctpSocketCallbacks` the random number +// generator backing `GetRandomInt` should be seeded externally and correctly. +inline int GetUniqueSeed() { + static int seed = 0; + return ++seed; +} +} // namespace internal + +class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { + public: + explicit MockDcSctpSocketCallbacks(absl::string_view name = "") + : log_prefix_(name.empty() ? "" : std::string(name) + ": "), + random_(internal::GetUniqueSeed()), + timeout_manager_([this]() { return now_; }) { + ON_CALL(*this, SendPacket) + .WillByDefault([this](rtc::ArrayView data) { + sent_packets_.emplace_back( + std::vector(data.begin(), data.end())); + }); + ON_CALL(*this, OnMessageReceived) + .WillByDefault([this](DcSctpMessage message) { + received_messages_.emplace_back(std::move(message)); + }); + + ON_CALL(*this, OnError) + .WillByDefault([this](ErrorKind error, absl::string_view message) { + RTC_LOG(LS_WARNING) + << log_prefix_ << "Socket error: " << ToString(error) << "; " + << message; + }); + ON_CALL(*this, OnAborted) + .WillByDefault([this](ErrorKind error, absl::string_view message) { + RTC_LOG(LS_WARNING) + << log_prefix_ << "Socket abort: " << ToString(error) << "; " + << message; + }); + ON_CALL(*this, TimeMillis).WillByDefault([this]() { return now_; }); + } + MOCK_METHOD(void, + SendPacket, + (rtc::ArrayView data), + (override)); + + std::unique_ptr CreateTimeout() override { + return timeout_manager_.CreateTimeout(); + } + + MOCK_METHOD(TimeMs, TimeMillis, (), (override)); + uint32_t GetRandomInt(uint32_t low, uint32_t high) override { + return random_.Rand(low, high); + } + MOCK_METHOD(void, NotifyOutgoingMessageBufferEmpty, (), (override)); + + MOCK_METHOD(void, OnMessageReceived, (DcSctpMessage message), (override)); + MOCK_METHOD(void, + OnError, + (ErrorKind error, absl::string_view message), + (override)); + MOCK_METHOD(void, + OnAborted, + (ErrorKind error, absl::string_view message), + (override)); + MOCK_METHOD(void, OnConnected, (), (override)); + MOCK_METHOD(void, OnClosed, (), (override)); + MOCK_METHOD(void, OnConnectionRestarted, (), (override)); + MOCK_METHOD(void, + OnStreamsResetFailed, + (rtc::ArrayView outgoing_streams, + absl::string_view reason), + (override)); + MOCK_METHOD(void, + OnStreamsResetPerformed, + (rtc::ArrayView outgoing_streams), + (override)); + MOCK_METHOD(void, + OnIncomingStreamsReset, + (rtc::ArrayView incoming_streams), + (override)); + + bool HasPacket() const { return !sent_packets_.empty(); } + + std::vector ConsumeSentPacket() { + if (sent_packets_.empty()) { + return {}; + } + std::vector ret = std::move(sent_packets_.front()); + sent_packets_.pop_front(); + return ret; + } + absl::optional ConsumeReceivedMessage() { + if (received_messages_.empty()) { + return absl::nullopt; + } + DcSctpMessage ret = std::move(received_messages_.front()); + received_messages_.pop_front(); + return ret; + } + + void AdvanceTime(DurationMs duration_ms) { now_ = now_ + duration_ms; } + void SetTime(TimeMs now) { now_ = now; } + + absl::optional GetNextExpiredTimeout() { + return timeout_manager_.GetNextExpiredTimeout(); + } + + private: + const std::string log_prefix_; + TimeMs now_ = TimeMs(0); + webrtc::Random random_; + FakeTimeoutManager timeout_manager_; + std::deque> sent_packets_; + std::deque received_messages_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_MOCK_DCSCTP_SOCKET_CALLBACKS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc new file mode 100644 index 000000000..7d04cbb0d --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.cc @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/socket/state_cookie.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/bounded_byte_reader.h" +#include "net/dcsctp/packet/bounded_byte_writer.h" +#include "net/dcsctp/socket/capabilities.h" +#include "rtc_base/logging.h" + +namespace dcsctp { + +// Magic values, which the state cookie is prefixed with. +constexpr uint32_t kMagic1 = 1684230979; +constexpr uint32_t kMagic2 = 1414541360; +constexpr size_t StateCookie::kCookieSize; + +std::vector StateCookie::Serialize() { + std::vector cookie; + cookie.resize(kCookieSize); + BoundedByteWriter buffer(cookie); + buffer.Store32<0>(kMagic1); + buffer.Store32<4>(kMagic2); + buffer.Store32<8>(*initiate_tag_); + buffer.Store32<12>(*initial_tsn_); + buffer.Store32<16>(a_rwnd_); + buffer.Store32<20>(static_cast(*tie_tag_ >> 32)); + buffer.Store32<24>(static_cast(*tie_tag_)); + buffer.Store8<28>(capabilities_.partial_reliability); + buffer.Store8<29>(capabilities_.message_interleaving); + buffer.Store8<30>(capabilities_.reconfig); + return cookie; +} + +absl::optional StateCookie::Deserialize( + rtc::ArrayView cookie) { + if (cookie.size() != kCookieSize) { + RTC_DLOG(LS_WARNING) << "Invalid state cookie: " << cookie.size() + << " bytes"; + return absl::nullopt; + } + + BoundedByteReader buffer(cookie); + uint32_t magic1 = buffer.Load32<0>(); + uint32_t magic2 = buffer.Load32<4>(); + if (magic1 != kMagic1 || magic2 != kMagic2) { + RTC_DLOG(LS_WARNING) << "Invalid state cookie; wrong magic"; + return absl::nullopt; + } + + VerificationTag verification_tag(buffer.Load32<8>()); + TSN initial_tsn(buffer.Load32<12>()); + uint32_t a_rwnd = buffer.Load32<16>(); + uint32_t tie_tag_upper = buffer.Load32<20>(); + uint32_t tie_tag_lower = buffer.Load32<24>(); + TieTag tie_tag(static_cast(tie_tag_upper) << 32 | + static_cast(tie_tag_lower)); + Capabilities capabilities; + capabilities.partial_reliability = buffer.Load8<28>() != 0; + capabilities.message_interleaving = buffer.Load8<29>() != 0; + capabilities.reconfig = buffer.Load8<30>() != 0; + + return StateCookie(verification_tag, initial_tsn, a_rwnd, tie_tag, + capabilities); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h new file mode 100644 index 000000000..df4b80139 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/state_cookie.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_STATE_COOKIE_H_ +#define NET_DCSCTP_SOCKET_STATE_COOKIE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/socket/capabilities.h" + +namespace dcsctp { + +// This is serialized as a state cookie and put in INIT_ACK. The client then +// responds with this in COOKIE_ECHO. +// +// NOTE: Expect that the client will modify it to try to exploit the library. +// Do not trust anything in it; no pointers or anything like that. +class StateCookie { + public: + static constexpr size_t kCookieSize = 31; + + StateCookie(VerificationTag initiate_tag, + TSN initial_tsn, + uint32_t a_rwnd, + TieTag tie_tag, + Capabilities capabilities) + : initiate_tag_(initiate_tag), + initial_tsn_(initial_tsn), + a_rwnd_(a_rwnd), + tie_tag_(tie_tag), + capabilities_(capabilities) {} + + // Returns a serialized version of this cookie. + std::vector Serialize(); + + // Deserializes the cookie, and returns absl::nullopt if that failed. + static absl::optional Deserialize( + rtc::ArrayView cookie); + + VerificationTag initiate_tag() const { return initiate_tag_; } + TSN initial_tsn() const { return initial_tsn_; } + uint32_t a_rwnd() const { return a_rwnd_; } + TieTag tie_tag() const { return tie_tag_; } + const Capabilities& capabilities() const { return capabilities_; } + + private: + const VerificationTag initiate_tag_; + const TSN initial_tsn_; + const uint32_t a_rwnd_; + const TieTag tie_tag_; + const Capabilities capabilities_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_STATE_COOKIE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc new file mode 100644 index 000000000..a1f57e6b2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.cc @@ -0,0 +1,347 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/socket/stream_reset_handler.h" + +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" +#include "net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h" +#include "net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h" +#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" +#include "net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/packet/tlv_trait.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/rx/data_tracker.h" +#include "net/dcsctp/rx/reassembly_queue.h" +#include "net/dcsctp/socket/context.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/retransmission_queue.h" +#include "rtc_base/logging.h" + +namespace dcsctp { +namespace { +using ResponseResult = ReconfigurationResponseParameter::Result; + +bool DescriptorsAre(const std::vector& c, + uint16_t e1, + uint16_t e2) { + return (c[0].type == e1 && c[1].type == e2) || + (c[0].type == e2 && c[1].type == e1); +} + +} // namespace + +bool StreamResetHandler::Validate(const ReConfigChunk& chunk) { + const Parameters& parameters = chunk.parameters(); + + // https://tools.ietf.org/html/rfc6525#section-3.1 + // "Note that each RE-CONFIG chunk holds at least one parameter + // and at most two parameters. Only the following combinations are allowed:" + std::vector descriptors = parameters.descriptors(); + if (descriptors.size() == 1) { + if ((descriptors[0].type == OutgoingSSNResetRequestParameter::kType) || + (descriptors[0].type == IncomingSSNResetRequestParameter::kType) || + (descriptors[0].type == SSNTSNResetRequestParameter::kType) || + (descriptors[0].type == AddOutgoingStreamsRequestParameter::kType) || + (descriptors[0].type == AddIncomingStreamsRequestParameter::kType) || + (descriptors[0].type == ReconfigurationResponseParameter::kType)) { + return true; + } + } else if (descriptors.size() == 2) { + if (DescriptorsAre(descriptors, OutgoingSSNResetRequestParameter::kType, + IncomingSSNResetRequestParameter::kType) || + DescriptorsAre(descriptors, AddOutgoingStreamsRequestParameter::kType, + AddIncomingStreamsRequestParameter::kType) || + DescriptorsAre(descriptors, ReconfigurationResponseParameter::kType, + OutgoingSSNResetRequestParameter::kType) || + DescriptorsAre(descriptors, ReconfigurationResponseParameter::kType, + ReconfigurationResponseParameter::kType)) { + return true; + } + } + + RTC_LOG(LS_WARNING) << "Invalid set of RE-CONFIG parameters"; + return false; +} + +absl::optional> +StreamResetHandler::Process(const ReConfigChunk& chunk) { + if (!Validate(chunk)) { + return absl::nullopt; + } + + std::vector responses; + + for (const ParameterDescriptor& desc : chunk.parameters().descriptors()) { + switch (desc.type) { + case OutgoingSSNResetRequestParameter::kType: + HandleResetOutgoing(desc, responses); + break; + + case IncomingSSNResetRequestParameter::kType: + HandleResetIncoming(desc, responses); + break; + + case ReconfigurationResponseParameter::kType: + HandleResponse(desc); + break; + } + } + + return responses; +} + +void StreamResetHandler::HandleReConfig(ReConfigChunk chunk) { + absl::optional> responses = + Process(chunk); + + if (!responses.has_value()) { + ctx_->callbacks().OnError(ErrorKind::kParseFailed, + "Failed to parse RE-CONFIG command"); + return; + } + + if (!responses->empty()) { + SctpPacket::Builder b = ctx_->PacketBuilder(); + Parameters::Builder params_builder; + for (const auto& response : *responses) { + params_builder.Add(response); + } + b.Add(ReConfigChunk(params_builder.Build())); + ctx_->Send(b); + } +} + +bool StreamResetHandler::ValidateReqSeqNbr( + ReconfigRequestSN req_seq_nbr, + std::vector& responses) { + if (req_seq_nbr == last_processed_req_seq_nbr_) { + // This has already been performed previously. + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "req=" << *req_seq_nbr + << " already processed"; + responses.push_back(ReconfigurationResponseParameter( + req_seq_nbr, ResponseResult::kSuccessNothingToDo)); + return false; + } + + if (req_seq_nbr != ReconfigRequestSN(*last_processed_req_seq_nbr_ + 1)) { + // Too old, too new, from wrong association etc. + // This is expected to happen when handing over a RTCPeerConnection from one + // server to another. The client will notice this and may decide to close + // old data channels, which may be sent to the wrong (or both) servers + // during a handover. + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "req=" << *req_seq_nbr + << " bad seq_nbr"; + responses.push_back(ReconfigurationResponseParameter( + req_seq_nbr, ResponseResult::kErrorBadSequenceNumber)); + return false; + } + + return true; +} + +void StreamResetHandler::HandleResetOutgoing( + const ParameterDescriptor& descriptor, + std::vector& responses) { + absl::optional req = + OutgoingSSNResetRequestParameter::Parse(descriptor.data); + if (!req.has_value()) { + ctx_->callbacks().OnError(ErrorKind::kParseFailed, + "Failed to parse Outgoing Reset command"); + return; + } + + if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) { + ResponseResult result; + + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Reset outgoing streams with req_seq_nbr=" + << *req->request_sequence_number(); + + result = reassembly_queue_->ResetStreams( + *req, data_tracker_->last_cumulative_acked_tsn()); + if (result == ResponseResult::kSuccessPerformed) { + last_processed_req_seq_nbr_ = req->request_sequence_number(); + ctx_->callbacks().OnIncomingStreamsReset(req->stream_ids()); + } + responses.push_back(ReconfigurationResponseParameter( + req->request_sequence_number(), result)); + } +} + +void StreamResetHandler::HandleResetIncoming( + const ParameterDescriptor& descriptor, + std::vector& responses) { + absl::optional req = + IncomingSSNResetRequestParameter::Parse(descriptor.data); + if (!req.has_value()) { + ctx_->callbacks().OnError(ErrorKind::kParseFailed, + "Failed to parse Incoming Reset command"); + return; + } + if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) { + responses.push_back(ReconfigurationResponseParameter( + req->request_sequence_number(), ResponseResult::kSuccessNothingToDo)); + last_processed_req_seq_nbr_ = req->request_sequence_number(); + } +} + +void StreamResetHandler::HandleResponse(const ParameterDescriptor& descriptor) { + absl::optional resp = + ReconfigurationResponseParameter::Parse(descriptor.data); + if (!resp.has_value()) { + ctx_->callbacks().OnError( + ErrorKind::kParseFailed, + "Failed to parse Reconfiguration Response command"); + return; + } + + if (current_request_.has_value() && current_request_->has_been_sent() && + resp->response_sequence_number() == current_request_->req_seq_nbr()) { + reconfig_timer_->Stop(); + + switch (resp->result()) { + case ResponseResult::kSuccessNothingToDo: + case ResponseResult::kSuccessPerformed: + RTC_DLOG(LS_VERBOSE) + << log_prefix_ << "Reset stream success, req_seq_nbr=" + << *current_request_->req_seq_nbr() << ", streams=" + << StrJoin(current_request_->streams(), ",", + [](rtc::StringBuilder& sb, StreamID stream_id) { + sb << *stream_id; + }); + ctx_->callbacks().OnStreamsResetPerformed(current_request_->streams()); + current_request_ = absl::nullopt; + retransmission_queue_->CommitResetStreams(); + break; + case ResponseResult::kInProgress: + RTC_DLOG(LS_VERBOSE) + << log_prefix_ << "Reset stream still pending, req_seq_nbr=" + << *current_request_->req_seq_nbr() << ", streams=" + << StrJoin(current_request_->streams(), ",", + [](rtc::StringBuilder& sb, StreamID stream_id) { + sb << *stream_id; + }); + // Force this request to be sent again, but with new req_seq_nbr. + current_request_->PrepareRetransmission(); + reconfig_timer_->set_duration(ctx_->current_rto()); + reconfig_timer_->Start(); + break; + case ResponseResult::kErrorRequestAlreadyInProgress: + case ResponseResult::kDenied: + case ResponseResult::kErrorWrongSSN: + case ResponseResult::kErrorBadSequenceNumber: + RTC_DLOG(LS_WARNING) + << log_prefix_ << "Reset stream error=" << ToString(resp->result()) + << ", req_seq_nbr=" << *current_request_->req_seq_nbr() + << ", streams=" + << StrJoin(current_request_->streams(), ",", + [](rtc::StringBuilder& sb, StreamID stream_id) { + sb << *stream_id; + }); + ctx_->callbacks().OnStreamsResetFailed(current_request_->streams(), + ToString(resp->result())); + current_request_ = absl::nullopt; + retransmission_queue_->RollbackResetStreams(); + break; + } + } +} + +absl::optional StreamResetHandler::MakeStreamResetRequest() { + // Only send stream resets if there are streams to reset, and no current + // ongoing request (there can only be one at a time), and if the stream + // can be reset. + if (streams_to_reset_.empty() || current_request_.has_value() || + !retransmission_queue_->CanResetStreams()) { + return absl::nullopt; + } + + std::vector streams_to_reset(streams_to_reset_.begin(), + streams_to_reset_.end()); + current_request_.emplace(TSN(*retransmission_queue_->next_tsn() - 1), + std::move(streams_to_reset)); + streams_to_reset_.clear(); + reconfig_timer_->set_duration(ctx_->current_rto()); + reconfig_timer_->Start(); + return MakeReconfigChunk(); +} + +ReConfigChunk StreamResetHandler::MakeReconfigChunk() { + // The req_seq_nbr will be empty if the request has never been sent before, + // or if it was sent, but the sender responded "in progress", and then the + // req_seq_nbr will be cleared to re-send with a new number. But if the + // request is re-sent due to timeout (reconfig-timer expiring), the same + // req_seq_nbr will be used. + RTC_DCHECK(current_request_.has_value()); + + if (!current_request_->has_been_sent()) { + current_request_->PrepareToSend(next_outgoing_req_seq_nbr_); + next_outgoing_req_seq_nbr_ = + ReconfigRequestSN(*next_outgoing_req_seq_nbr_ + 1); + } + + Parameters::Builder params_builder = + Parameters::Builder().Add(OutgoingSSNResetRequestParameter( + current_request_->req_seq_nbr(), current_request_->req_seq_nbr(), + current_request_->sender_last_assigned_tsn(), + current_request_->streams())); + + return ReConfigChunk(params_builder.Build()); +} + +void StreamResetHandler::ResetStreams( + rtc::ArrayView outgoing_streams) { + // Enqueue streams to be reset - as this may be called multiple times + // while a request is already in progress (and there can only be one). + for (StreamID stream_id : outgoing_streams) { + streams_to_reset_.insert(stream_id); + } + if (current_request_.has_value()) { + // Already an ongoing request - will need to wait for it to finish as + // there can only be one in-flight ReConfig chunk with requests at any + // time. + } else { + retransmission_queue_->PrepareResetStreams(std::vector( + streams_to_reset_.begin(), streams_to_reset_.end())); + } +} + +absl::optional StreamResetHandler::OnReconfigTimerExpiry() { + if (current_request_->has_been_sent()) { + // There is an outstanding request, which timed out while waiting for a + // response. + if (!ctx_->IncrementTxErrorCounter("RECONFIG timeout")) { + // Timed out. The connection will close after processing the timers. + return absl::nullopt; + } + } else { + // There is no outstanding request, but there is a prepared one. This means + // that the receiver has previously responded "in progress", which resulted + // in retrying the request (but with a new req_seq_nbr) after a while. + } + + ctx_->Send(ctx_->PacketBuilder().Add(MakeReconfigChunk())); + return ctx_->current_rto(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h new file mode 100644 index 000000000..dc0ee5e8c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/stream_reset_handler.h @@ -0,0 +1,222 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_STREAM_RESET_HANDLER_H_ +#define NET_DCSCTP_SOCKET_STREAM_RESET_HANDLER_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" +#include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/rx/data_tracker.h" +#include "net/dcsctp/rx/reassembly_queue.h" +#include "net/dcsctp/socket/context.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/retransmission_queue.h" + +namespace dcsctp { + +// StreamResetHandler handles sending outgoing stream reset requests (to close +// an SCTP stream, which translates to closing a data channel). +// +// It also handles incoming "outgoing stream reset requests", when the peer +// wants to close its data channel. +// +// Resetting streams is an asynchronous operation where the client will request +// a request a stream to be reset, but then it might not be performed exactly at +// this point. First, the sender might need to discard all messages that have +// been enqueued for this stream, or it may select to wait until all have been +// sent. At least, it must wait for the currently sending fragmented message to +// be fully sent, because a stream can't be reset while having received half a +// message. In the stream reset request, the "sender's last assigned TSN" is +// provided, which is simply the TSN for which the receiver should've received +// all messages before this value, before the stream can be reset. Since +// fragments can get lost or sent out-of-order, the receiver of a request may +// not have received all the data just yet, and then it will respond to the +// sender: "In progress". In other words, try again. The sender will then need +// to start a timer and try the very same request again (but with a new sequence +// number) until the receiver successfully performs the operation. +// +// All this can take some time, and may be driven by timers, so the client will +// ultimately be notified using callbacks. +// +// In this implementation, when a stream is reset, the queued but not-yet-sent +// messages will be discarded, but that may change in the future. RFC8831 allows +// both behaviors. +class StreamResetHandler { + public: + StreamResetHandler(absl::string_view log_prefix, + Context* context, + TimerManager* timer_manager, + DataTracker* data_tracker, + ReassemblyQueue* reassembly_queue, + RetransmissionQueue* retransmission_queue) + : log_prefix_(std::string(log_prefix) + "reset: "), + ctx_(context), + data_tracker_(data_tracker), + reassembly_queue_(reassembly_queue), + retransmission_queue_(retransmission_queue), + reconfig_timer_(timer_manager->CreateTimer( + "re-config", + [this]() { return OnReconfigTimerExpiry(); }, + TimerOptions(DurationMs(0)))), + next_outgoing_req_seq_nbr_(ReconfigRequestSN(*ctx_->my_initial_tsn())), + last_processed_req_seq_nbr_( + ReconfigRequestSN(*ctx_->peer_initial_tsn() - 1)) {} + + // Initiates reset of the provided streams. While there can only be one + // ongoing stream reset request at any time, this method can be called at any + // time and also multiple times. It will enqueue requests that can't be + // directly fulfilled, and will asynchronously process them when any ongoing + // request has completed. + void ResetStreams(rtc::ArrayView outgoing_streams); + + // Creates a Reset Streams request that must be sent if returned. Will start + // the reconfig timer. Will return absl::nullopt if there is no need to + // create a request (no streams to reset) or if there already is an ongoing + // stream reset request that hasn't completed yet. + absl::optional MakeStreamResetRequest(); + + // Called when handling and incoming RE-CONFIG chunk. + void HandleReConfig(ReConfigChunk chunk); + + private: + // Represents a stream request operation. There can only be one ongoing at + // any time, and a sent request may either succeed, fail or result in the + // receiver signaling that it can't process it right now, and then it will be + // retried. + class CurrentRequest { + public: + CurrentRequest(TSN sender_last_assigned_tsn, std::vector streams) + : req_seq_nbr_(absl::nullopt), + sender_last_assigned_tsn_(sender_last_assigned_tsn), + streams_(std::move(streams)) {} + + // Returns the current request sequence number, if this request has been + // sent (check `has_been_sent` first). Will return 0 if the request is just + // prepared (or scheduled for retransmission) but not yet sent. + ReconfigRequestSN req_seq_nbr() const { + return req_seq_nbr_.value_or(ReconfigRequestSN(0)); + } + + // The sender's last assigned TSN, from the retransmission queue. The + // receiver uses this to know when all data up to this TSN has been + // received, to know when to safely reset the stream. + TSN sender_last_assigned_tsn() const { return sender_last_assigned_tsn_; } + + // The streams that are to be reset. + const std::vector& streams() const { return streams_; } + + // If this request has been sent yet. If not, then it's either because it + // has only been prepared and not yet sent, or because the received couldn't + // apply the request, and then the exact same request will be retried, but + // with a new sequence number. + bool has_been_sent() const { return req_seq_nbr_.has_value(); } + + // If the receiver can't apply the request yet (and answered "In Progress"), + // this will be called to prepare the request to be retransmitted at a later + // time. + void PrepareRetransmission() { req_seq_nbr_ = absl::nullopt; } + + // If the request hasn't been sent yet, this assigns it a request number. + void PrepareToSend(ReconfigRequestSN new_req_seq_nbr) { + req_seq_nbr_ = new_req_seq_nbr; + } + + private: + // If this is set, this request has been sent. If it's not set, the request + // has been prepared, but has not yet been sent. This is typically used when + // the peer responded "in progress" and the same request (but a different + // request number) must be sent again. + absl::optional req_seq_nbr_; + // The sender's (that's us) last assigned TSN, from the retransmission + // queue. + TSN sender_last_assigned_tsn_; + // The streams that are to be reset in this request. + const std::vector streams_; + }; + + // Called to validate an incoming RE-CONFIG chunk. + bool Validate(const ReConfigChunk& chunk); + + // Processes a stream stream reconfiguration chunk and may either return + // absl::nullopt (on protocol errors), or a list of responses - either 0, 1 + // or 2. + absl::optional> Process( + const ReConfigChunk& chunk); + + // Creates the actual RE-CONFIG chunk. A request (which set `current_request`) + // must have been created prior. + ReConfigChunk MakeReconfigChunk(); + + // Called to validate the `req_seq_nbr`, that it's the next in sequence. If it + // fails to validate, and returns false, it will also add a response to + // `responses`. + bool ValidateReqSeqNbr( + ReconfigRequestSN req_seq_nbr, + std::vector& responses); + + // Called when this socket receives an outgoing stream reset request. It might + // either be performed straight away, or have to be deferred, and the result + // of that will be put in `responses`. + void HandleResetOutgoing( + const ParameterDescriptor& descriptor, + std::vector& responses); + + // Called when this socket receives an incoming stream reset request. This + // isn't really supported, but a successful response is put in `responses`. + void HandleResetIncoming( + const ParameterDescriptor& descriptor, + std::vector& responses); + + // Called when receiving a response to an outgoing stream reset request. It + // will either commit the stream resetting, if the operation was successful, + // or will schedule a retry if it was deferred. And if it failed, the + // operation will be rolled back. + void HandleResponse(const ParameterDescriptor& descriptor); + + // Expiration handler for the Reconfig timer. + absl::optional OnReconfigTimerExpiry(); + + const std::string log_prefix_; + Context* ctx_; + DataTracker* data_tracker_; + ReassemblyQueue* reassembly_queue_; + RetransmissionQueue* retransmission_queue_; + const std::unique_ptr reconfig_timer_; + + // Outgoing streams that have been requested to be reset, but hasn't yet + // been included in an outgoing request. + std::unordered_set streams_to_reset_; + + // The next sequence number for outgoing stream requests. + ReconfigRequestSN next_outgoing_req_seq_nbr_; + + // The current stream request operation. + absl::optional current_request_; + + // For incoming requests - last processed request sequence number. + ReconfigRequestSN last_processed_req_seq_nbr_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_STREAM_RESET_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc new file mode 100644 index 000000000..6e0be6a31 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/socket/transmission_control_block.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "net/dcsctp/packet/chunk/data_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/rx/data_tracker.h" +#include "net/dcsctp/rx/reassembly_queue.h" +#include "net/dcsctp/socket/capabilities.h" +#include "net/dcsctp/socket/stream_reset_handler.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/retransmission_queue.h" +#include "net/dcsctp/tx/retransmission_timeout.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { + +void TransmissionControlBlock::ObserveRTT(DurationMs rtt) { + DurationMs prev_rto = rto_.rto(); + rto_.ObserveRTT(rtt); + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "new rtt=" << *rtt + << ", srtt=" << *rto_.srtt() << ", rto=" << *rto_.rto() + << " (" << *prev_rto << ")"; + t3_rtx_->set_duration(rto_.rto()); + + DurationMs delayed_ack_tmo = + std::min(rto_.rto() * 0.5, options_.delayed_ack_max_timeout); + delayed_ack_timer_->set_duration(delayed_ack_tmo); +} + +absl::optional TransmissionControlBlock::OnRtxTimerExpiry() { + TimeMs now = callbacks_.TimeMillis(); + RTC_DLOG(LS_INFO) << log_prefix_ << "Timer " << t3_rtx_->name() + << " has expired"; + if (IncrementTxErrorCounter("t3-rtx expired")) { + retransmission_queue_.HandleT3RtxTimerExpiry(); + SendBufferedPackets(now); + } + return absl::nullopt; +} + +absl::optional TransmissionControlBlock::OnDelayedAckTimerExpiry() { + data_tracker_.HandleDelayedAckTimerExpiry(); + MaybeSendSack(); + return absl::nullopt; +} + +void TransmissionControlBlock::MaybeSendSack() { + if (data_tracker_.ShouldSendAck(/*also_if_delayed=*/false)) { + SctpPacket::Builder builder = PacketBuilder(); + builder.Add( + data_tracker_.CreateSelectiveAck(reassembly_queue_.remaining_bytes())); + Send(builder); + } +} + +void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, + TimeMs now, + bool only_one_packet) { + for (int packet_idx = 0;; ++packet_idx) { + // Only add control chunks to the first packet that is sent, if sending + // multiple packets in one go (as allowed by the congestion window). + if (packet_idx == 0) { + // https://tools.ietf.org/html/rfc4960#section-6 + // "Before an endpoint transmits a DATA chunk, if any received DATA + // chunks have not been acknowledged (e.g., due to delayed ack), the + // sender should create a SACK and bundle it with the outbound DATA chunk, + // as long as the size of the final SCTP packet does not exceed the + // current MTU." + if (data_tracker_.ShouldSendAck(/*also_if_delayed=*/true)) { + builder.Add(data_tracker_.CreateSelectiveAck( + reassembly_queue_.remaining_bytes())); + } + if (retransmission_queue_.ShouldSendForwardTsn(now)) { + if (capabilities_.message_interleaving) { + builder.Add(retransmission_queue_.CreateIForwardTsn()); + } else { + builder.Add(retransmission_queue_.CreateForwardTsn()); + } + } + absl::optional reconfig = + stream_reset_handler_.MakeStreamResetRequest(); + if (reconfig.has_value()) { + builder.Add(*reconfig); + } + } + + auto chunks = + retransmission_queue_.GetChunksToSend(now, builder.bytes_remaining()); + for (auto& elem : chunks) { + TSN tsn = elem.first; + Data data = std::move(elem.second); + if (capabilities_.message_interleaving) { + builder.Add(IDataChunk(tsn, std::move(data), false)); + } else { + builder.Add(DataChunk(tsn, std::move(data), false)); + } + } + if (builder.empty()) { + break; + } + Send(builder); + if (only_one_packet) { + break; + } + } +} + +std::string TransmissionControlBlock::ToString() const { + rtc::StringBuilder sb; + + sb.AppendFormat( + "verification_tag=%08x, last_cumulative_ack=%u, capabilities=", + *peer_verification_tag_, *data_tracker_.last_cumulative_acked_tsn()); + + if (capabilities_.partial_reliability) { + sb << "PR,"; + } + if (capabilities_.message_interleaving) { + sb << "IL,"; + } + if (capabilities_.reconfig) { + sb << "Reconfig,"; + } + + return sb.Release(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h new file mode 100644 index 000000000..2f1c9ada6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h @@ -0,0 +1,202 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_SOCKET_TRANSMISSION_CONTROL_BLOCK_H_ +#define NET_DCSCTP_SOCKET_TRANSMISSION_CONTROL_BLOCK_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/sctp_packet.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/rx/data_tracker.h" +#include "net/dcsctp/rx/reassembly_queue.h" +#include "net/dcsctp/socket/capabilities.h" +#include "net/dcsctp/socket/context.h" +#include "net/dcsctp/socket/heartbeat_handler.h" +#include "net/dcsctp/socket/stream_reset_handler.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/retransmission_error_counter.h" +#include "net/dcsctp/tx/retransmission_queue.h" +#include "net/dcsctp/tx/retransmission_timeout.h" +#include "net/dcsctp/tx/send_queue.h" + +namespace dcsctp { + +// The TransmissionControlBlock (TCB) represents an open connection to a peer, +// and holds all the resources for that. If the connection is e.g. shutdown, +// closed or restarted, this object will be deleted and/or replaced. +class TransmissionControlBlock : public Context { + public: + TransmissionControlBlock(TimerManager& timer_manager, + absl::string_view log_prefix, + const DcSctpOptions& options, + const Capabilities& capabilities, + DcSctpSocketCallbacks& callbacks, + SendQueue& send_queue, + VerificationTag my_verification_tag, + TSN my_initial_tsn, + VerificationTag peer_verification_tag, + TSN peer_initial_tsn, + size_t a_rwnd, + TieTag tie_tag, + std::function is_connection_established, + std::function send_fn) + : log_prefix_(log_prefix), + options_(options), + timer_manager_(timer_manager), + capabilities_(capabilities), + callbacks_(callbacks), + t3_rtx_(timer_manager_.CreateTimer( + "t3-rtx", + [this]() { return OnRtxTimerExpiry(); }, + TimerOptions(options.rto_initial))), + delayed_ack_timer_(timer_manager_.CreateTimer( + "delayed-ack", + [this]() { return OnDelayedAckTimerExpiry(); }, + TimerOptions(options.delayed_ack_max_timeout, + TimerBackoffAlgorithm::kExponential, + /*max_restarts=*/0))), + my_verification_tag_(my_verification_tag), + my_initial_tsn_(my_initial_tsn), + peer_verification_tag_(peer_verification_tag), + peer_initial_tsn_(peer_initial_tsn), + tie_tag_(tie_tag), + is_connection_established_(std::move(is_connection_established)), + send_fn_(std::move(send_fn)), + rto_(options), + tx_error_counter_(log_prefix, options), + data_tracker_(log_prefix, delayed_ack_timer_.get(), peer_initial_tsn), + reassembly_queue_(log_prefix, + peer_initial_tsn, + options.max_receiver_window_buffer_size), + retransmission_queue_( + log_prefix, + my_initial_tsn, + a_rwnd, + send_queue, + [this](DurationMs rtt) { return ObserveRTT(rtt); }, + [this]() { callbacks_.NotifyOutgoingMessageBufferEmpty(); }, + [this]() { tx_error_counter_.Clear(); }, + *t3_rtx_, + options, + capabilities.partial_reliability, + capabilities.message_interleaving), + stream_reset_handler_(log_prefix, + this, + &timer_manager, + &data_tracker_, + &reassembly_queue_, + &retransmission_queue_), + heartbeat_handler_(log_prefix, options, this, &timer_manager_) {} + + // Implementation of `Context`. + bool is_connection_established() const override { + return is_connection_established_(); + } + TSN my_initial_tsn() const override { return my_initial_tsn_; } + TSN peer_initial_tsn() const override { return peer_initial_tsn_; } + DcSctpSocketCallbacks& callbacks() const override { return callbacks_; } + void ObserveRTT(DurationMs rtt) override; + DurationMs current_rto() const override { return rto_.rto(); } + bool IncrementTxErrorCounter(absl::string_view reason) override { + return tx_error_counter_.Increment(reason); + } + void ClearTxErrorCounter() override { tx_error_counter_.Clear(); } + SctpPacket::Builder PacketBuilder() const override { + return SctpPacket::Builder(peer_verification_tag_, options_); + } + bool HasTooManyTxErrors() const override { + return tx_error_counter_.IsExhausted(); + } + void Send(SctpPacket::Builder& builder) override { send_fn_(builder); } + + // Other accessors + DataTracker& data_tracker() { return data_tracker_; } + ReassemblyQueue& reassembly_queue() { return reassembly_queue_; } + RetransmissionQueue& retransmission_queue() { return retransmission_queue_; } + StreamResetHandler& stream_reset_handler() { return stream_reset_handler_; } + HeartbeatHandler& heartbeat_handler() { return heartbeat_handler_; } + + // Returns this socket's verification tag, set in all packet headers. + VerificationTag my_verification_tag() const { return my_verification_tag_; } + // Returns the peer's verification tag, which should be in received packets. + VerificationTag peer_verification_tag() const { + return peer_verification_tag_; + } + // All negotiated supported capabilities. + const Capabilities& capabilities() const { return capabilities_; } + // A 64-bit tie-tag, used to e.g. detect reconnections. + TieTag tie_tag() const { return tie_tag_; } + + // Sends a SACK, if there is a need to. + void MaybeSendSack(); + + // Fills `builder` (which may already be filled with control chunks) with + // with other control and data chunks, and sends packets as much as can be + // allowed by the congestion control algorithm. If `only_one_packet` is true, + // only a single packet will be sent. Otherwise, zero, one or multiple may be + // sent. + void SendBufferedPackets(SctpPacket::Builder& builder, + TimeMs now, + bool only_one_packet = false); + + // As above, but without passing in a builder and allowing sending many + // packets. + void SendBufferedPackets(TimeMs now) { + SctpPacket::Builder builder(peer_verification_tag_, options_); + SendBufferedPackets(builder, now, /*only_one_packet=*/false); + } + + // Returns a textual representation of this object, for logging. + std::string ToString() const; + + private: + // Will be called when the retransmission timer (t3-rtx) expires. + absl::optional OnRtxTimerExpiry(); + // Will be called when the delayed ack timer expires. + absl::optional OnDelayedAckTimerExpiry(); + + const std::string log_prefix_; + const DcSctpOptions options_; + TimerManager& timer_manager_; + // Negotiated capabilities that both peers support. + const Capabilities capabilities_; + DcSctpSocketCallbacks& callbacks_; + // The data retransmission timer, called t3-rtx in SCTP. + const std::unique_ptr t3_rtx_; + // Delayed ack timer, which triggers when acks should be sent (when delayed). + const std::unique_ptr delayed_ack_timer_; + const VerificationTag my_verification_tag_; + const TSN my_initial_tsn_; + const VerificationTag peer_verification_tag_; + const TSN peer_initial_tsn_; + // Nonce, used to detect reconnections. + const TieTag tie_tag_; + const std::function is_connection_established_; + const std::function send_fn_; + + RetransmissionTimeout rto_; + RetransmissionErrorCounter tx_error_counter_; + DataTracker data_tracker_; + ReassemblyQueue reassembly_queue_; + RetransmissionQueue retransmission_queue_; + StreamResetHandler stream_reset_handler_; + HeartbeatHandler heartbeat_handler_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_SOCKET_TRANSMISSION_CONTROL_BLOCK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.cc new file mode 100644 index 000000000..e4f9f9138 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/testing/data_generator.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { +constexpr PPID kPpid = PPID(53); + +Data DataGenerator::Ordered(std::vector payload, + absl::string_view flags, + const DataGeneratorOptions opts) { + Data::IsBeginning is_beginning(flags.find('B') != std::string::npos); + Data::IsEnd is_end(flags.find('E') != std::string::npos); + + if (is_beginning) { + fsn_ = FSN(0); + } else { + fsn_ = FSN(*fsn_ + 1); + } + MID message_id = opts.message_id.value_or(message_id_); + Data ret = Data(opts.stream_id, SSN(static_cast(*message_id)), + message_id, fsn_, opts.ppid, std::move(payload), is_beginning, + is_end, IsUnordered(false)); + + if (is_end) { + message_id_ = MID(*message_id + 1); + } + return ret; +} + +Data DataGenerator::Unordered(std::vector payload, + absl::string_view flags, + const DataGeneratorOptions opts) { + Data::IsBeginning is_beginning(flags.find('B') != std::string::npos); + Data::IsEnd is_end(flags.find('E') != std::string::npos); + + if (is_beginning) { + fsn_ = FSN(0); + } else { + fsn_ = FSN(*fsn_ + 1); + } + MID message_id = opts.message_id.value_or(message_id_); + Data ret = Data(opts.stream_id, SSN(0), message_id, fsn_, kPpid, + std::move(payload), is_beginning, is_end, IsUnordered(true)); + if (is_end) { + message_id_ = MID(*message_id + 1); + } + return ret; +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h new file mode 100644 index 000000000..859450b1c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TESTING_DATA_GENERATOR_H_ +#define NET_DCSCTP_TESTING_DATA_GENERATOR_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/data.h" + +namespace dcsctp { + +struct DataGeneratorOptions { + StreamID stream_id = StreamID(1); + absl::optional message_id = absl::nullopt; + PPID ppid = PPID(53); +}; + +// Generates Data with correct sequence numbers, and used only in unit tests. +class DataGenerator { + public: + explicit DataGenerator(MID start_message_id = MID(0)) + : message_id_(start_message_id) {} + + // Generates ordered "data" with the provided `payload` and flags, which can + // contain "B" for setting the "is_beginning" flag, and/or "E" for setting the + // "is_end" flag. + Data Ordered(std::vector payload, + absl::string_view flags = "", + const DataGeneratorOptions opts = {}); + + // Generates unordered "data" with the provided `payload` and flags, which can + // contain "B" for setting the "is_beginning" flag, and/or "E" for setting the + // "is_end" flag. + Data Unordered(std::vector payload, + absl::string_view flags = "", + const DataGeneratorOptions opts = {}); + + // Resets the Message ID identifier - simulating a "stream reset". + void ResetStream() { message_id_ = MID(0); } + + private: + MID message_id_; + FSN fsn_ = FSN(0); +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TESTING_DATA_GENERATOR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/testing_macros.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/testing_macros.h new file mode 100644 index 000000000..5cbdfffdc --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/testing_macros.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TESTING_TESTING_MACROS_H_ +#define NET_DCSCTP_TESTING_TESTING_MACROS_H_ + +#include + +namespace dcsctp { + +#define DCSCTP_CONCAT_INNER_(x, y) x##y +#define DCSCTP_CONCAT_(x, y) DCSCTP_CONCAT_INNER_(x, y) + +// Similar to ASSERT_OK_AND_ASSIGN, this works with an absl::optional<> instead +// of an absl::StatusOr<>. +#define ASSERT_HAS_VALUE_AND_ASSIGN(lhs, rexpr) \ + auto DCSCTP_CONCAT_(tmp_opt_val__, __LINE__) = rexpr; \ + ASSERT_TRUE(DCSCTP_CONCAT_(tmp_opt_val__, __LINE__).has_value()); \ + lhs = *std::move(DCSCTP_CONCAT_(tmp_opt_val__, __LINE__)); + +} // namespace dcsctp + +#endif // NET_DCSCTP_TESTING_TESTING_MACROS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h new file mode 100644 index 000000000..927e6b280 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TIMER_FAKE_TIMEOUT_H_ +#define NET_DCSCTP_TIMER_FAKE_TIMEOUT_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "net/dcsctp/public/timeout.h" +#include "rtc_base/checks.h" + +namespace dcsctp { + +// A timeout used in tests. +class FakeTimeout : public Timeout { + public: + explicit FakeTimeout(std::function get_time, + std::function on_delete) + : get_time_(std::move(get_time)), on_delete_(std::move(on_delete)) {} + + ~FakeTimeout() override { on_delete_(this); } + + void Start(DurationMs duration_ms, TimeoutID timeout_id) override { + RTC_DCHECK(expiry_ == TimeMs::InfiniteFuture()); + timeout_id_ = timeout_id; + expiry_ = get_time_() + duration_ms; + } + void Stop() override { + RTC_DCHECK(expiry_ != TimeMs::InfiniteFuture()); + expiry_ = TimeMs::InfiniteFuture(); + } + + bool EvaluateHasExpired(TimeMs now) { + if (now >= expiry_) { + expiry_ = TimeMs::InfiniteFuture(); + return true; + } + return false; + } + + TimeoutID timeout_id() const { return timeout_id_; } + + private: + const std::function get_time_; + const std::function on_delete_; + + TimeoutID timeout_id_ = TimeoutID(0); + TimeMs expiry_ = TimeMs::InfiniteFuture(); +}; + +class FakeTimeoutManager { + public: + // The `get_time` function must return the current time, relative to any + // epoch. + explicit FakeTimeoutManager(std::function get_time) + : get_time_(std::move(get_time)) {} + + std::unique_ptr CreateTimeout() { + auto timer = std::make_unique( + get_time_, [this](FakeTimeout* timer) { timers_.erase(timer); }); + timers_.insert(timer.get()); + return timer; + } + + // NOTE: This can't return a vector, as calling EvaluateHasExpired requires + // calling socket->HandleTimeout directly afterwards, as the owning Timer + // still believes it's running, and it needs to be updated to set + // Timer::is_running_ to false before you operate on the Timer or Timeout + // again. + absl::optional GetNextExpiredTimeout() { + TimeMs now = get_time_(); + std::vector expired_timers; + for (auto& timer : timers_) { + if (timer->EvaluateHasExpired(now)) { + return timer->timeout_id(); + } + } + return absl::nullopt; + } + + private: + const std::function get_time_; + std::unordered_set timers_; +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_TIMER_FAKE_TIMEOUT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc new file mode 100644 index 000000000..6d3054eeb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/timer/task_queue_timeout.h" + +#include "rtc_base/logging.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" + +namespace dcsctp { + +TaskQueueTimeoutFactory::TaskQueueTimeout::TaskQueueTimeout( + TaskQueueTimeoutFactory& parent) + : parent_(parent), + pending_task_safety_flag_(webrtc::PendingTaskSafetyFlag::Create()) {} + +TaskQueueTimeoutFactory::TaskQueueTimeout::~TaskQueueTimeout() { + RTC_DCHECK_RUN_ON(&parent_.thread_checker_); + pending_task_safety_flag_->SetNotAlive(); +} + +void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, + TimeoutID timeout_id) { + RTC_DCHECK_RUN_ON(&parent_.thread_checker_); + RTC_DCHECK(timeout_expiration_ == TimeMs::InfiniteFuture()); + timeout_expiration_ = parent_.get_time_() + duration_ms; + timeout_id_ = timeout_id; + + if (timeout_expiration_ >= posted_task_expiration_) { + // There is already a running task, and it's scheduled to expire sooner than + // the new expiration time. Don't do anything; The `timeout_expiration_` has + // already been updated and if the delayed task _does_ expire and the timer + // hasn't been stopped, that will be noticed in the timeout handler, and the + // task will be re-scheduled. Most timers are stopped before they expire. + return; + } + + if (posted_task_expiration_ != TimeMs::InfiniteFuture()) { + RTC_DLOG(LS_VERBOSE) << "New timeout duration is less than scheduled - " + "ghosting old delayed task."; + // There is already a scheduled delayed task, but its expiration time is + // further away than the new expiration, so it can't be used. It will be + // "killed" by replacing the safety flag. This is not expected to happen + // especially often; Mainly when a timer did exponential backoff and + // later recovered. + pending_task_safety_flag_->SetNotAlive(); + pending_task_safety_flag_ = webrtc::PendingTaskSafetyFlag::Create(); + } + + posted_task_expiration_ = timeout_expiration_; + parent_.task_queue_.PostDelayedTask( + webrtc::ToQueuedTask( + pending_task_safety_flag_, + [timeout_id, this]() { + RTC_DLOG(LS_VERBOSE) << "Timout expired: " << timeout_id.value(); + RTC_DCHECK_RUN_ON(&parent_.thread_checker_); + RTC_DCHECK(posted_task_expiration_ != TimeMs::InfiniteFuture()); + posted_task_expiration_ = TimeMs::InfiniteFuture(); + + if (timeout_expiration_ == TimeMs::InfiniteFuture()) { + // The timeout was stopped before it expired. Very common. + } else { + // Note that the timeout might have been restarted, which updated + // `timeout_expiration_` but left the scheduled task running. So + // if it's not quite time to trigger the timeout yet, schedule a + // new delayed task with what's remaining and retry at that point + // in time. + DurationMs remaining = timeout_expiration_ - parent_.get_time_(); + timeout_expiration_ = TimeMs::InfiniteFuture(); + if (*remaining > 0) { + Start(remaining, timeout_id_); + } else { + // It has actually triggered. + RTC_DLOG(LS_VERBOSE) + << "Timout triggered: " << timeout_id.value(); + parent_.on_expired_(timeout_id_); + } + } + }), + duration_ms.value()); +} + +void TaskQueueTimeoutFactory::TaskQueueTimeout::Stop() { + // As the TaskQueue doesn't support deleting a posted task, just mark the + // timeout as not running. + RTC_DCHECK_RUN_ON(&parent_.thread_checker_); + timeout_expiration_ = TimeMs::InfiniteFuture(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h new file mode 100644 index 000000000..e8d12df59 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TIMER_TASK_QUEUE_TIMEOUT_H_ +#define NET_DCSCTP_TIMER_TASK_QUEUE_TIMEOUT_H_ + +#include +#include + +#include "api/task_queue/task_queue_base.h" +#include "net/dcsctp/public/timeout.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" + +namespace dcsctp { + +// The TaskQueueTimeoutFactory creates `Timeout` instances, which schedules +// itself to be triggered on the provided `task_queue`, which may be a thread, +// an actual TaskQueue or something else which supports posting a delayed task. +// +// Note that each `DcSctpSocket` must have its own `TaskQueueTimeoutFactory`, +// as the `TimeoutID` are not unique among sockets. +// +// This class must outlive any created Timeout that it has created. Note that +// the `DcSctpSocket` will ensure that all Timeouts are deleted when the socket +// is destructed, so this means that this class must outlive the `DcSctpSocket`. +// +// This class, and the timeouts created it, are not thread safe. +class TaskQueueTimeoutFactory { + public: + // The `get_time` function must return the current time, relative to any + // epoch. Whenever a timeout expires, the `on_expired` callback will be + // triggered, and then the client should provided `timeout_id` to + // `DcSctpSocketInterface::HandleTimeout`. + TaskQueueTimeoutFactory(webrtc::TaskQueueBase& task_queue, + std::function get_time, + std::function on_expired) + : task_queue_(task_queue), + get_time_(std::move(get_time)), + on_expired_(std::move(on_expired)) {} + + // Creates an implementation of `Timeout`. + std::unique_ptr CreateTimeout() { + return std::make_unique(*this); + } + + private: + class TaskQueueTimeout : public Timeout { + public: + explicit TaskQueueTimeout(TaskQueueTimeoutFactory& parent); + ~TaskQueueTimeout(); + + void Start(DurationMs duration_ms, TimeoutID timeout_id) override; + void Stop() override; + + private: + TaskQueueTimeoutFactory& parent_; + // A safety flag to ensure that posted tasks to the task queue don't + // reference these object when they go out of scope. Note that this safety + // flag will be re-created if the scheduled-but-not-yet-expired task is not + // to be run. This happens when there is a posted delayed task with an + // expiration time _further away_ than what is now the expected expiration + // time. In this scenario, a new delayed task has to be posted with a + // shorter duration and the old task has to be forgotten. + rtc::scoped_refptr pending_task_safety_flag_; + // The time when the posted delayed task is set to expire. Will be set to + // the infinite future if there is no such task running. + TimeMs posted_task_expiration_ = TimeMs::InfiniteFuture(); + // The time when the timeout expires. It will be set to the infinite future + // if the timeout is not running/not started. + TimeMs timeout_expiration_ = TimeMs::InfiniteFuture(); + // The current timeout ID that will be reported when expired. + TimeoutID timeout_id_ = TimeoutID(0); + }; + + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; + webrtc::TaskQueueBase& task_queue_; + const std::function get_time_; + const std::function on_expired_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TIMER_TASK_QUEUE_TIMEOUT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc new file mode 100644 index 000000000..593d639fa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/timer/timer.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "net/dcsctp/public/timeout.h" +#include "rtc_base/checks.h" + +namespace dcsctp { +namespace { +TimeoutID MakeTimeoutId(TimerID timer_id, TimerGeneration generation) { + return TimeoutID(static_cast(*timer_id) << 32 | *generation); +} + +DurationMs GetBackoffDuration(TimerBackoffAlgorithm algorithm, + DurationMs base_duration, + int expiration_count) { + switch (algorithm) { + case TimerBackoffAlgorithm::kFixed: + return base_duration; + case TimerBackoffAlgorithm::kExponential: { + int32_t duration_ms = *base_duration; + + while (expiration_count > 0 && duration_ms < *Timer::kMaxTimerDuration) { + duration_ms *= 2; + --expiration_count; + } + + return DurationMs(std::min(duration_ms, *Timer::kMaxTimerDuration)); + } + } +} +} // namespace + +constexpr DurationMs Timer::kMaxTimerDuration; + +Timer::Timer(TimerID id, + absl::string_view name, + OnExpired on_expired, + UnregisterHandler unregister_handler, + std::unique_ptr timeout, + const TimerOptions& options) + : id_(id), + name_(name), + options_(options), + on_expired_(std::move(on_expired)), + unregister_handler_(std::move(unregister_handler)), + timeout_(std::move(timeout)), + duration_(options.duration) {} + +Timer::~Timer() { + Stop(); + unregister_handler_(); +} + +void Timer::Start() { + expiration_count_ = 0; + if (!is_running()) { + is_running_ = true; + generation_ = TimerGeneration(*generation_ + 1); + timeout_->Start(duration_, MakeTimeoutId(id_, generation_)); + } else { + // Timer was running - stop and restart it, to make it expire in `duration_` + // from now. + generation_ = TimerGeneration(*generation_ + 1); + timeout_->Restart(duration_, MakeTimeoutId(id_, generation_)); + } +} + +void Timer::Stop() { + if (is_running()) { + timeout_->Stop(); + expiration_count_ = 0; + is_running_ = false; + } +} + +void Timer::Trigger(TimerGeneration generation) { + if (is_running_ && generation == generation_) { + ++expiration_count_; + is_running_ = false; + if (options_.max_restarts < 0 || + expiration_count_ <= options_.max_restarts) { + // The timer should still be running after this triggers. Start a new + // timer. Note that it might be very quickly restarted again, if the + // `on_expired_` callback returns a new duration. + is_running_ = true; + DurationMs duration = GetBackoffDuration(options_.backoff_algorithm, + duration_, expiration_count_); + generation_ = TimerGeneration(*generation_ + 1); + timeout_->Start(duration, MakeTimeoutId(id_, generation_)); + } + + absl::optional new_duration = on_expired_(); + if (new_duration.has_value() && new_duration != duration_) { + duration_ = new_duration.value(); + if (is_running_) { + // Restart it with new duration. + timeout_->Stop(); + + DurationMs duration = GetBackoffDuration(options_.backoff_algorithm, + duration_, expiration_count_); + generation_ = TimerGeneration(*generation_ + 1); + timeout_->Start(duration, MakeTimeoutId(id_, generation_)); + } + } + } +} + +void TimerManager::HandleTimeout(TimeoutID timeout_id) { + TimerID timer_id(*timeout_id >> 32); + TimerGeneration generation(*timeout_id); + auto it = timers_.find(timer_id); + if (it != timers_.end()) { + it->second->Trigger(generation); + } +} + +std::unique_ptr TimerManager::CreateTimer(absl::string_view name, + Timer::OnExpired on_expired, + const TimerOptions& options) { + next_id_ = TimerID(*next_id_ + 1); + TimerID id = next_id_; + // This would overflow after 4 billion timers created, which in SCTP would be + // after 800 million reconnections on a single socket. Ensure this will never + // happen. + RTC_CHECK_NE(*id, std::numeric_limits::max()); + auto timer = absl::WrapUnique(new Timer( + id, name, std::move(on_expired), [this, id]() { timers_.erase(id); }, + create_timeout_(), options)); + timers_[id] = timer.get(); + return timer; +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h new file mode 100644 index 000000000..bf923ea4c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TIMER_TIMER_H_ +#define NET_DCSCTP_TIMER_TIMER_H_ + +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "net/dcsctp/public/strong_alias.h" +#include "net/dcsctp/public/timeout.h" + +namespace dcsctp { + +using TimerID = StrongAlias; +using TimerGeneration = StrongAlias; + +enum class TimerBackoffAlgorithm { + // The base duration will be used for any restart. + kFixed, + // An exponential backoff is used for restarts, with a 2x multiplier, meaning + // that every restart will use a duration that is twice as long as the + // previous. + kExponential, +}; + +struct TimerOptions { + explicit TimerOptions(DurationMs duration) + : TimerOptions(duration, TimerBackoffAlgorithm::kExponential) {} + TimerOptions(DurationMs duration, TimerBackoffAlgorithm backoff_algorithm) + : TimerOptions(duration, backoff_algorithm, -1) {} + TimerOptions(DurationMs duration, + TimerBackoffAlgorithm backoff_algorithm, + int max_restarts) + : duration(duration), + backoff_algorithm(backoff_algorithm), + max_restarts(max_restarts) {} + + // The initial timer duration. Can be overridden with `set_duration`. + const DurationMs duration; + // If the duration should be increased (using exponential backoff) when it is + // restarted. If not set, the same duration will be used. + const TimerBackoffAlgorithm backoff_algorithm; + // The maximum number of times that the timer will be automatically restarted. + const int max_restarts; +}; + +// A high-level timer (in contrast to the low-level `Timeout` class). +// +// Timers are started and can be stopped or restarted. When a timer expires, +// the provided `on_expired` callback will be triggered. A timer is +// automatically restarted, as long as the number of restarts is below the +// configurable `max_restarts` parameter. The `is_running` property can be +// queried to know if it's still running after having expired. +// +// When a timer is restarted, it will use a configurable `backoff_algorithm` to +// possibly adjust the duration of the next expiry. It is also possible to +// return a new base duration (which is the duration before it's adjusted by the +// backoff algorithm). +class Timer { + public: + // The maximum timer duration - one day. + static constexpr DurationMs kMaxTimerDuration = DurationMs(24 * 3600 * 1000); + + // When expired, the timer handler can optionally return a new duration which + // will be set as `duration` and used as base duration when the timer is + // restarted and as input to the backoff algorithm. + using OnExpired = std::function()>; + + // TimerManager will have pointers to these instances, so they must not move. + Timer(const Timer&) = delete; + Timer& operator=(const Timer&) = delete; + + ~Timer(); + + // Starts the timer if it's stopped or restarts the timer if it's already + // running. The `expiration_count` will be reset. + void Start(); + + // Stops the timer. This can also be called when the timer is already stopped. + // The `expiration_count` will be reset. + void Stop(); + + // Sets the base duration. The actual timer duration may be larger depending + // on the backoff algorithm. + void set_duration(DurationMs duration) { + duration_ = std::min(duration, kMaxTimerDuration); + } + + // Retrieves the base duration. The actual timer duration may be larger + // depending on the backoff algorithm. + DurationMs duration() const { return duration_; } + + // Returns the number of times the timer has expired. + int expiration_count() const { return expiration_count_; } + + // Returns the timer's options. + const TimerOptions& options() const { return options_; } + + // Returns the name of the timer. + absl::string_view name() const { return name_; } + + // Indicates if this timer is currently running. + bool is_running() const { return is_running_; } + + private: + friend class TimerManager; + using UnregisterHandler = std::function; + Timer(TimerID id, + absl::string_view name, + OnExpired on_expired, + UnregisterHandler unregister, + std::unique_ptr timeout, + const TimerOptions& options); + + // Called by TimerManager. Will trigger the callback and increment + // `expiration_count`. The timer will automatically be restarted at the + // duration as decided by the backoff algorithm, unless the + // `TimerOptions::max_restarts` has been reached and then it will be stopped + // and `is_running()` will return false. + void Trigger(TimerGeneration generation); + + const TimerID id_; + const std::string name_; + const TimerOptions options_; + const OnExpired on_expired_; + const UnregisterHandler unregister_handler_; + const std::unique_ptr timeout_; + + DurationMs duration_; + + // Increased on each start, and is matched on Trigger, to avoid races. And by + // race, meaning that a timeout - which may be evaluated/expired on a + // different thread while this thread has stopped that timer already. Note + // that the entire socket is not thread-safe, so `TimerManager::HandleTimeout` + // is never executed concurrently with any timer starting/stopping. + // + // This will wrap around after 4 billion timer restarts, and if it wraps + // around, it would just trigger _this_ timer in advance (but it's hard to + // restart it 4 billion times within its duration). + TimerGeneration generation_ = TimerGeneration(0); + bool is_running_ = false; + // Incremented each time time has expired and reset when stopped or restarted. + int expiration_count_ = 0; +}; + +// Creates and manages timers. +class TimerManager { + public: + explicit TimerManager( + std::function()> create_timeout) + : create_timeout_(std::move(create_timeout)) {} + + // Creates a timer with name `name` that will expire (when started) after + // `options.duration` and call `on_expired`. There are more `options` that + // affects the behavior. Note that timers are created initially stopped. + std::unique_ptr CreateTimer(absl::string_view name, + Timer::OnExpired on_expired, + const TimerOptions& options); + + void HandleTimeout(TimeoutID timeout_id); + + private: + const std::function()> create_timeout_; + std::unordered_map timers_; + TimerID next_id_ = TimerID(0); +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_TIMER_TIMER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/fcfs_send_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/fcfs_send_queue.cc new file mode 100644 index 000000000..f2dc5e40f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/fcfs_send_queue.cc @@ -0,0 +1,250 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/tx/fcfs_send_queue.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/tx/send_queue.h" +#include "rtc_base/logging.h" + +namespace dcsctp { +void FCFSSendQueue::Add(TimeMs now, + DcSctpMessage message, + const SendOptions& send_options) { + RTC_DCHECK(!message.payload().empty()); + std::deque& queue = + IsPaused(message.stream_id()) ? paused_items_ : items_; + // Any limited lifetime should start counting from now - when the message + // has been added to the queue. + absl::optional expires_at = absl::nullopt; + if (send_options.lifetime.has_value()) { + // `expires_at` is the time when it expires. Which is slightly larger than + // the message's lifetime, as the message is alive during its entire + // lifetime (which may be zero). + expires_at = now + *send_options.lifetime + DurationMs(1); + } + queue.emplace_back(std::move(message), expires_at, send_options); +} + +size_t FCFSSendQueue::total_bytes() const { + // TODO(boivie): Have the current size as a member variable, so that's it not + // calculated for every operation. + return absl::c_accumulate(items_, 0, + [](size_t size, const Item& item) { + return size + item.remaining_size; + }) + + absl::c_accumulate(paused_items_, 0, + [](size_t size, const Item& item) { + return size + item.remaining_size; + }); +} + +bool FCFSSendQueue::IsFull() const { + return total_bytes() >= buffer_size_; +} + +bool FCFSSendQueue::IsEmpty() const { + return items_.empty(); +} + +FCFSSendQueue::Item* FCFSSendQueue::GetFirstNonExpiredMessage(TimeMs now) { + while (!items_.empty()) { + FCFSSendQueue::Item& item = items_.front(); + // An entire item can be discarded iff: + // 1) It hasn't been partially sent (has been allocated a message_id). + // 2) It has a non-negative expiry time. + // 3) And that expiry time has passed. + if (!item.message_id.has_value() && item.expires_at.has_value() && + *item.expires_at <= now) { + // TODO(boivie): This should be reported to the client. + RTC_DLOG(LS_VERBOSE) + << log_prefix_ + << "Message is expired before even partially sent - discarding"; + items_.pop_front(); + continue; + } + + return &item; + } + return nullptr; +} + +absl::optional FCFSSendQueue::Produce(TimeMs now, + size_t max_size) { + Item* item = GetFirstNonExpiredMessage(now); + if (item == nullptr) { + return absl::nullopt; + } + + DcSctpMessage& message = item->message; + + // Don't make too small fragments as that can result in increased risk of + // failure to assemble a message if a small fragment is missing. + if (item->remaining_size > max_size && max_size < kMinimumFragmentedPayload) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "tx-msg: Will not fragment " + << item->remaining_size << " bytes into buffer of " + << max_size << " bytes"; + return absl::nullopt; + } + + // Allocate Message ID and SSN when the first fragment is sent. + if (!item->message_id.has_value()) { + MID& mid = + mid_by_stream_id_[{item->send_options.unordered, message.stream_id()}]; + item->message_id = mid; + mid = MID(*mid + 1); + } + if (!item->send_options.unordered && !item->ssn.has_value()) { + SSN& ssn = ssn_by_stream_id_[message.stream_id()]; + item->ssn = ssn; + ssn = SSN(*ssn + 1); + } + + // Grab the next `max_size` fragment from this message and calculate flags. + rtc::ArrayView chunk_payload = + item->message.payload().subview(item->remaining_offset, max_size); + rtc::ArrayView message_payload = message.payload(); + Data::IsBeginning is_beginning(chunk_payload.data() == + message_payload.data()); + Data::IsEnd is_end((chunk_payload.data() + chunk_payload.size()) == + (message_payload.data() + message_payload.size())); + + StreamID stream_id = message.stream_id(); + PPID ppid = message.ppid(); + + // Zero-copy the payload if the message fits in a single chunk. + std::vector payload = + is_beginning && is_end + ? std::move(message).ReleasePayload() + : std::vector(chunk_payload.begin(), chunk_payload.end()); + + FSN fsn(item->current_fsn); + item->current_fsn = FSN(*item->current_fsn + 1); + + SendQueue::DataToSend chunk(Data(stream_id, item->ssn.value_or(SSN(0)), + item->message_id.value(), fsn, ppid, + std::move(payload), is_beginning, is_end, + item->send_options.unordered)); + chunk.max_retransmissions = item->send_options.max_retransmissions; + chunk.expires_at = item->expires_at; + + if (is_end) { + // The entire message has been sent, and its last data copied to `chunk`, so + // it can safely be discarded. + items_.pop_front(); + } else { + item->remaining_offset += chunk_payload.size(); + item->remaining_size -= chunk_payload.size(); + RTC_DCHECK(item->remaining_offset + item->remaining_size == + item->message.payload().size()); + RTC_DCHECK(item->remaining_size > 0); + } + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "tx-msg: Producing chunk of " + << chunk.data.size() << " bytes (max: " << max_size + << ")"; + return chunk; +} + +void FCFSSendQueue::Discard(IsUnordered unordered, + StreamID stream_id, + MID message_id) { + // As this method will only discard partially sent messages, and as the queue + // is a FIFO queue, the only partially sent message would be the topmost + // message. + if (!items_.empty()) { + Item& item = items_.front(); + if (item.send_options.unordered == unordered && + item.message.stream_id() == stream_id && item.message_id.has_value() && + *item.message_id == message_id) { + items_.pop_front(); + } + } +} + +void FCFSSendQueue::PrepareResetStreams( + rtc::ArrayView streams) { + for (StreamID stream_id : streams) { + paused_streams_.insert(stream_id); + } + + // Will not discard partially sent messages - only whole messages. Partially + // delivered messages (at the time of receiving a Stream Reset command) will + // always deliver all the fragments before actually resetting the stream. + for (auto it = items_.begin(); it != items_.end();) { + if (IsPaused(it->message.stream_id()) && it->remaining_offset == 0) { + it = items_.erase(it); + } else { + ++it; + } + } +} + +bool FCFSSendQueue::CanResetStreams() const { + for (auto& item : items_) { + if (IsPaused(item.message.stream_id())) { + return false; + } + } + return true; +} + +void FCFSSendQueue::CommitResetStreams() { + for (StreamID stream_id : paused_streams_) { + ssn_by_stream_id_[stream_id] = SSN(0); + // https://tools.ietf.org/html/rfc8260#section-2.3.2 + // "When an association resets the SSN using the SCTP extension defined + // in [RFC6525], the two counters (one for the ordered messages, one for + // the unordered messages) used for the MIDs MUST be reset to 0." + mid_by_stream_id_[{IsUnordered(false), stream_id}] = MID(0); + mid_by_stream_id_[{IsUnordered(true), stream_id}] = MID(0); + } + RollbackResetStreams(); +} + +void FCFSSendQueue::RollbackResetStreams() { + while (!paused_items_.empty()) { + items_.push_back(std::move(paused_items_.front())); + paused_items_.pop_front(); + } + paused_streams_.clear(); +} + +void FCFSSendQueue::Reset() { + if (!items_.empty()) { + // If this message has been partially sent, reset it so that it will be + // re-sent. + auto& item = items_.front(); + item.remaining_offset = 0; + item.remaining_size = item.message.payload().size(); + item.message_id = absl::nullopt; + item.ssn = absl::nullopt; + item.current_fsn = FSN(0); + } + RollbackResetStreams(); + mid_by_stream_id_.clear(); + ssn_by_stream_id_.clear(); +} + +bool FCFSSendQueue::IsPaused(StreamID stream_id) const { + return paused_streams_.find(stream_id) != paused_streams_.end(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/fcfs_send_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/fcfs_send_queue.h new file mode 100644 index 000000000..63e7eab49 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/fcfs_send_queue.h @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_FCFS_SEND_QUEUE_H_ +#define NET_DCSCTP_TX_FCFS_SEND_QUEUE_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/pair_hash.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_socket.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/tx/send_queue.h" + +namespace dcsctp { + +// The FCFSSendQueue (First-Come, First-Served Send Queue) holds all messages +// that the client wants to send, but that haven't yet been split into chunks +// and sent on the wire. +// +// First-Come, First Served means that it passes the data in the exact same +// order as they were delivered by the calling application, and is defined in +// https://tools.ietf.org/html/rfc8260#section-3.1. It's a FIFO queue, but that +// term isn't used in this RFC. +// +// As messages can be (requested to be) sent before +// the connection is properly established, this send queue is always present - +// even for closed connections. +class FCFSSendQueue : public SendQueue { + public: + // How small a data chunk's payload may be, if having to fragment a message. + static constexpr size_t kMinimumFragmentedPayload = 10; + + FCFSSendQueue(absl::string_view log_prefix, size_t buffer_size) + : log_prefix_(std::string(log_prefix) + "fcfs: "), + buffer_size_(buffer_size) {} + + // Indicates if the buffer is full. Note that it's up to the caller to ensure + // that the buffer is not full prior to adding new items to it. + bool IsFull() const; + // Indicates if the buffer is empty. + bool IsEmpty() const; + + // Adds the message to be sent using the `send_options` provided. The current + // time should be in `now`. Note that it's the responsibility of the caller to + // ensure that the buffer is not full (by calling `IsFull`) before adding + // messages to it. + void Add(TimeMs now, + DcSctpMessage message, + const SendOptions& send_options = {}); + + // Implementation of `SendQueue`. + absl::optional Produce(TimeMs now, size_t max_size) override; + void Discard(IsUnordered unordered, + StreamID stream_id, + MID message_id) override; + void PrepareResetStreams(rtc::ArrayView streams) override; + bool CanResetStreams() const override; + void CommitResetStreams() override; + void RollbackResetStreams() override; + void Reset() override; + + // The size of the buffer, in "payload bytes". + size_t total_bytes() const; + + private: + // An enqueued message and metadata. + struct Item { + explicit Item(DcSctpMessage msg, + absl::optional expires_at, + const SendOptions& send_options) + : message(std::move(msg)), + expires_at(expires_at), + send_options(send_options), + remaining_offset(0), + remaining_size(message.payload().size()) {} + DcSctpMessage message; + absl::optional expires_at; + SendOptions send_options; + // The remaining payload (offset and size) to be sent, when it has been + // fragmented. + size_t remaining_offset; + size_t remaining_size; + // If set, an allocated Message ID and SSN. Will be allocated when the first + // fragment is sent. + absl::optional message_id = absl::nullopt; + absl::optional ssn = absl::nullopt; + // The current Fragment Sequence Number, incremented for each fragment. + FSN current_fsn = FSN(0); + }; + + Item* GetFirstNonExpiredMessage(TimeMs now); + bool IsPaused(StreamID stream_id) const; + + const std::string log_prefix_; + const size_t buffer_size_; + std::deque items_; + + std::unordered_set paused_streams_; + std::deque paused_items_; + + std::unordered_map, MID, UnorderedStreamHash> + mid_by_stream_id_; + std::unordered_map ssn_by_stream_id_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_FCFS_SEND_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h new file mode 100644 index 000000000..54f5fd275 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/mock_send_queue.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_ +#define NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/tx/send_queue.h" +#include "test/gmock.h" + +namespace dcsctp { + +class MockSendQueue : public SendQueue { + public: + MockSendQueue() { + ON_CALL(*this, Produce).WillByDefault([](TimeMs now, size_t max_size) { + return absl::nullopt; + }); + } + + MOCK_METHOD(absl::optional, + Produce, + (TimeMs now, size_t max_size), + (override)); + MOCK_METHOD(void, + Discard, + (IsUnordered unordered, StreamID stream_id, MID message_id), + (override)); + MOCK_METHOD(void, + PrepareResetStreams, + (rtc::ArrayView streams), + (override)); + MOCK_METHOD(bool, CanResetStreams, (), (const, override)); + MOCK_METHOD(void, CommitResetStreams, (), (override)); + MOCK_METHOD(void, RollbackResetStreams, (), (override)); + MOCK_METHOD(void, Reset, (), (override)); +}; + +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_error_counter.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_error_counter.cc new file mode 100644 index 000000000..111b6efe9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_error_counter.cc @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/tx/retransmission_error_counter.h" + +#include "absl/strings/string_view.h" +#include "rtc_base/logging.h" + +namespace dcsctp { +bool RetransmissionErrorCounter::Increment(absl::string_view reason) { + ++counter_; + if (counter_ > limit_) { + RTC_DLOG(LS_INFO) << log_prefix_ << reason + << ", too many retransmissions, counter=" << counter_; + return false; + } + + RTC_DLOG(LS_VERBOSE) << log_prefix_ << reason << ", new counter=" << counter_ + << ", max=" << limit_; + return true; +} + +void RetransmissionErrorCounter::Clear() { + if (counter_ > 0) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "recovered from counter=" << counter_; + counter_ = 0; + } +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_error_counter.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_error_counter.h new file mode 100644 index 000000000..bb8d1f754 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_error_counter.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_RETRANSMISSION_ERROR_COUNTER_H_ +#define NET_DCSCTP_TX_RETRANSMISSION_ERROR_COUNTER_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "net/dcsctp/public/dcsctp_options.h" + +namespace dcsctp { + +// The RetransmissionErrorCounter is a simple counter with a limit, and when +// the limit is exceeded, the counter is exhausted and the connection will +// be closed. It's incremented on retransmission errors, such as the T3-RTX +// timer expiring, but also missing heartbeats and stream reset requests. +class RetransmissionErrorCounter { + public: + RetransmissionErrorCounter(absl::string_view log_prefix, + const DcSctpOptions& options) + : log_prefix_(std::string(log_prefix) + "rtx-errors: "), + limit_(options.max_retransmissions) {} + + // Increments the retransmission timer. If the maximum error count has been + // reached, `false` will be returned. + bool Increment(absl::string_view reason); + bool IsExhausted() const { return counter_ > limit_; } + + // Clears the retransmission errors. + void Clear(); + + // Returns its current value + int value() const { return counter_; } + + private: + const std::string log_prefix_; + const int limit_; + int counter_ = 0; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_RETRANSMISSION_ERROR_COUNTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc new file mode 100644 index 000000000..704e6ab16 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.cc @@ -0,0 +1,798 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/tx/retransmission_queue.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/math.h" +#include "net/dcsctp/common/pair_hash.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/packet/chunk/data_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" +#include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/types.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/send_queue.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" + +namespace dcsctp { +namespace { + +// The number of times a packet must be NACKed before it's retransmitted. +// See https://tools.ietf.org/html/rfc4960#section-7.2.4 +constexpr size_t kNumberOfNacksForRetransmission = 3; +} // namespace + +RetransmissionQueue::RetransmissionQueue( + absl::string_view log_prefix, + TSN initial_tsn, + size_t a_rwnd, + SendQueue& send_queue, + std::function on_new_rtt, + std::function on_send_queue_empty, + std::function on_clear_retransmission_counter, + Timer& t3_rtx, + const DcSctpOptions& options, + bool supports_partial_reliability, + bool use_message_interleaving) + : options_(options), + partial_reliability_(supports_partial_reliability), + log_prefix_(std::string(log_prefix) + "tx: "), + data_chunk_header_size_(use_message_interleaving + ? IDataChunk::kHeaderSize + : DataChunk::kHeaderSize), + on_new_rtt_(std::move(on_new_rtt)), + on_send_queue_empty_(std::move(on_send_queue_empty)), + on_clear_retransmission_counter_( + std::move(on_clear_retransmission_counter)), + t3_rtx_(t3_rtx), + cwnd_(options_.cwnd_mtus_initial * options_.mtu), + rwnd_(a_rwnd), + // https://tools.ietf.org/html/rfc4960#section-7.2.1 + // "The initial value of ssthresh MAY be arbitrarily high (for + // example, implementations MAY use the size of the receiver advertised + // window)."" + ssthresh_(rwnd_), + next_tsn_(tsn_unwrapper_.Unwrap(initial_tsn)), + last_cumulative_tsn_ack_(tsn_unwrapper_.Unwrap(TSN(*initial_tsn - 1))), + send_queue_(send_queue) {} + +// Returns how large a chunk will be, serialized, carrying the data +size_t RetransmissionQueue::GetSerializedChunkSize(const Data& data) const { + return RoundUpTo4(data_chunk_header_size_ + data.size()); +} + +void RetransmissionQueue::RemoveAcked(UnwrappedTSN cumulative_tsn_ack, + AckInfo& ack_info) { + auto first_unacked = outstanding_data_.upper_bound(cumulative_tsn_ack); + + for (auto it = outstanding_data_.begin(); it != first_unacked; ++it) { + ack_info.bytes_acked_by_cumulative_tsn_ack += it->second.data().size(); + ack_info.acked_tsns.push_back(it->first.Wrap()); + } + + outstanding_data_.erase(outstanding_data_.begin(), first_unacked); +} + +void RetransmissionQueue::AckGapBlocks( + UnwrappedTSN cumulative_tsn_ack, + rtc::ArrayView gap_ack_blocks, + AckInfo& ack_info) { + // Mark all non-gaps as ACKED (but they can't be removed) as (from RFC) + // "SCTP considers the information carried in the Gap Ack Blocks in the + // SACK chunk as advisory.". Note that when NR-SACK is supported, this can be + // handled differently. + + for (auto& block : gap_ack_blocks) { + auto start = outstanding_data_.lower_bound( + UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start)); + auto end = outstanding_data_.upper_bound( + UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end)); + for (auto iter = start; iter != end; ++iter) { + if (iter->second.state() != State::kAcked) { + ack_info.bytes_acked_by_new_gap_ack_blocks += + iter->second.data().size(); + iter->second.SetState(State::kAcked); + ack_info.highest_tsn_acked = + std::max(ack_info.highest_tsn_acked, iter->first); + ack_info.acked_tsns.push_back(iter->first.Wrap()); + } + } + } +} + +void RetransmissionQueue::NackBetweenAckBlocks( + UnwrappedTSN cumulative_tsn_ack, + rtc::ArrayView gap_ack_blocks, + AckInfo& ack_info) { + // Mark everything between the blocks as NACKED/TO_BE_RETRANSMITTED. + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "Mark the DATA chunk(s) with three miss indications for retransmission." + // "For each incoming SACK, miss indications are incremented only for + // missing TSNs prior to the highest TSN newly acknowledged in the SACK." + // + // What this means is that only when there is a increasing stream of data + // received and there are new packets seen (since last time), packets that are + // in-flight and between gaps should be nacked. This means that SCTP relies on + // the T3-RTX-timer to re-send packets otherwise. + UnwrappedTSN max_tsn_to_nack = ack_info.highest_tsn_acked; + if (is_in_fast_recovery() && cumulative_tsn_ack > last_cumulative_tsn_ack_) { + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "If an endpoint is in Fast Recovery and a SACK arrives that advances + // the Cumulative TSN Ack Point, the miss indications are incremented for + // all TSNs reported missing in the SACK." + max_tsn_to_nack = UnwrappedTSN::AddTo( + cumulative_tsn_ack, + gap_ack_blocks.empty() ? 0 : gap_ack_blocks.rbegin()->end); + } + + UnwrappedTSN prev_block_last_acked = cumulative_tsn_ack; + for (auto& block : gap_ack_blocks) { + UnwrappedTSN cur_block_first_acked = + UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start); + for (auto iter = outstanding_data_.upper_bound(prev_block_last_acked); + iter != outstanding_data_.lower_bound(cur_block_first_acked); ++iter) { + if (iter->first <= max_tsn_to_nack) { + iter->second.Nack(); + + if (iter->second.state() == State::kToBeRetransmitted) { + ack_info.has_packet_loss = true; + RTC_DLOG(LS_VERBOSE) << log_prefix_ << *iter->first.Wrap() + << " marked for retransmission"; + } + } + } + prev_block_last_acked = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end); + } + + // Note that packets are not NACKED which are above the highest gap-ack-block + // (or above the cumulative ack TSN if no gap-ack-blocks) as only packets + // up until the highest_tsn_acked (see above) should be considered when + // NACKing. +} + +void RetransmissionQueue::MaybeExitFastRecovery( + UnwrappedTSN cumulative_tsn_ack) { + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "When a SACK acknowledges all TSNs up to and including this [fast + // recovery] exit point, Fast Recovery is exited." + if (fast_recovery_exit_tsn_.has_value() && + cumulative_tsn_ack >= *fast_recovery_exit_tsn_) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "exit_point=" << *fast_recovery_exit_tsn_->Wrap() + << " reached - exiting fast recovery"; + fast_recovery_exit_tsn_ = absl::nullopt; + } +} + +void RetransmissionQueue::HandleIncreasedCumulativeTsnAck( + size_t outstanding_bytes, + size_t total_bytes_acked) { + // Allow some margin for classifying as fully utilized, due to e.g. that too + // small packets (less than kMinimumFragmentedPayload) are not sent + + // overhead. + bool is_fully_utilized = outstanding_bytes + options_.mtu >= cwnd_; + size_t old_cwnd = cwnd_; + if (phase() == CongestionAlgorithmPhase::kSlowStart) { + if (is_fully_utilized && !is_in_fast_recovery()) { + // https://tools.ietf.org/html/rfc4960#section-7.2.1 + // "Only when these three conditions are met can the cwnd be + // increased; otherwise, the cwnd MUST not be increased. If these + // conditions are met, then cwnd MUST be increased by, at most, the + // lesser of 1) the total size of the previously outstanding DATA + // chunk(s) acknowledged, and 2) the destination's path MTU." + if (options_.slow_start_tcp_style) { + cwnd_ += std::min(total_bytes_acked, cwnd_); + } else { + cwnd_ += std::min(total_bytes_acked, options_.mtu); + } + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "SS increase cwnd=" << cwnd_ + << " (" << old_cwnd << ")"; + } + } else if (phase() == CongestionAlgorithmPhase::kCongestionAvoidance) { + // https://tools.ietf.org/html/rfc4960#section-7.2.2 + // "Whenever cwnd is greater than ssthresh, upon each SACK arrival + // that advances the Cumulative TSN Ack Point, increase + // partial_bytes_acked by the total number of bytes of all new chunks + // acknowledged in that SACK including chunks acknowledged by the new + // Cumulative TSN Ack and by Gap Ack Blocks." + size_t old_pba = partial_bytes_acked_; + partial_bytes_acked_ += total_bytes_acked; + + if (partial_bytes_acked_ >= cwnd_ && is_fully_utilized) { + // https://tools.ietf.org/html/rfc4960#section-7.2.2 + // "When partial_bytes_acked is equal to or greater than cwnd and + // before the arrival of the SACK the sender had cwnd or more bytes of + // data outstanding (i.e., before arrival of the SACK, flightsize was + // greater than or equal to cwnd), increase cwnd by MTU, and reset + // partial_bytes_acked to (partial_bytes_acked - cwnd)." + cwnd_ += options_.mtu; + partial_bytes_acked_ -= cwnd_; + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "CA increase cwnd=" << cwnd_ + << " (" << old_cwnd << ") ssthresh=" << ssthresh_ + << ", pba=" << partial_bytes_acked_ << " (" + << old_pba << ")"; + } else { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "CA unchanged cwnd=" << cwnd_ + << " (" << old_cwnd << ") ssthresh=" << ssthresh_ + << ", pba=" << partial_bytes_acked_ << " (" + << old_pba << ")"; + } + } +} + +void RetransmissionQueue::HandlePacketLoss(UnwrappedTSN highest_tsn_acked) { + if (!is_in_fast_recovery()) { + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "If not in Fast Recovery, adjust the ssthresh and cwnd of the + // destination address(es) to which the missing DATA chunks were last + // sent, according to the formula described in Section 7.2.3." + size_t old_cwnd = cwnd_; + size_t old_pba = partial_bytes_acked_; + ssthresh_ = std::max(cwnd_ / 2, options_.cwnd_mtus_min * options_.mtu); + cwnd_ = ssthresh_; + partial_bytes_acked_ = 0; + + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "packet loss detected (not fast recovery). cwnd=" + << cwnd_ << " (" << old_cwnd + << "), ssthresh=" << ssthresh_ + << ", pba=" << partial_bytes_acked_ << " (" << old_pba + << ")"; + + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "If not in Fast Recovery, enter Fast Recovery and mark the highest + // outstanding TSN as the Fast Recovery exit point." + fast_recovery_exit_tsn_ = outstanding_data_.empty() + ? last_cumulative_tsn_ack_ + : outstanding_data_.rbegin()->first; + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "fast recovery initiated with exit_point=" + << *fast_recovery_exit_tsn_->Wrap(); + } else { + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "While in Fast Recovery, the ssthresh and cwnd SHOULD NOT change for + // any destinations due to a subsequent Fast Recovery event (i.e., one + // SHOULD NOT reduce the cwnd further due to a subsequent Fast Retransmit)." + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "packet loss detected (fast recovery). No changes."; + } +} + +void RetransmissionQueue::UpdateReceiverWindow(uint32_t a_rwnd) { + rwnd_ = outstanding_bytes_ >= a_rwnd ? 0 : a_rwnd - outstanding_bytes_; +} + +void RetransmissionQueue::StartT3RtxTimerIfOutstandingData() { + // Note: Can't use `outstanding_bytes()` as that one doesn't count chunks to + // be retransmitted. + if (outstanding_data_.empty()) { + // https://tools.ietf.org/html/rfc4960#section-6.3.2 + // "Whenever all outstanding data sent to an address have been + // acknowledged, turn off the T3-rtx timer of that address. + // Note: Already stopped in `StopT3RtxTimerOnIncreasedCumulativeTsnAck`." + } else { + // https://tools.ietf.org/html/rfc4960#section-6.3.2 + // "Whenever a SACK is received that acknowledges the DATA chunk + // with the earliest outstanding TSN for that address, restart the T3-rtx + // timer for that address with its current RTO (if there is still + // outstanding data on that address)." + // "Whenever a SACK is received missing a TSN that was previously + // acknowledged via a Gap Ack Block, start the T3-rtx for the destination + // address to which the DATA chunk was originally transmitted if it is not + // already running." + if (!t3_rtx_.is_running()) { + t3_rtx_.Start(); + } + } +} + +bool RetransmissionQueue::IsSackValid(const SackChunk& sack) const { + // https://tools.ietf.org/html/rfc4960#section-6.2.1 + // "If Cumulative TSN Ack is less than the Cumulative TSN Ack Point, + // then drop the SACK. Since Cumulative TSN Ack is monotonically increasing, + // a SACK whose Cumulative TSN Ack is less than the Cumulative TSN Ack Point + // indicates an out-of- order SACK." + // + // Note: Important not to drop SACKs with identical TSN to that previously + // received, as the gap ack blocks or dup tsn fields may have changed. + UnwrappedTSN cumulative_tsn_ack = + tsn_unwrapper_.PeekUnwrap(sack.cumulative_tsn_ack()); + if (cumulative_tsn_ack < last_cumulative_tsn_ack_) { + // https://tools.ietf.org/html/rfc4960#section-6.2.1 + // "If Cumulative TSN Ack is less than the Cumulative TSN Ack Point, + // then drop the SACK. Since Cumulative TSN Ack is monotonically + // increasing, a SACK whose Cumulative TSN Ack is less than the Cumulative + // TSN Ack Point indicates an out-of- order SACK." + return false; + } else if (outstanding_data_.empty() && + cumulative_tsn_ack > last_cumulative_tsn_ack_) { + // No in-flight data and cum-tsn-ack above what was last ACKed - not valid. + return false; + } else if (!outstanding_data_.empty() && + cumulative_tsn_ack > outstanding_data_.rbegin()->first) { + // There is in-flight data, but the cum-tsn-ack is beyond that - not valid. + return false; + } + return true; +} + +bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { + if (!IsSackValid(sack)) { + return false; + } + + size_t old_outstanding_bytes = outstanding_bytes_; + size_t old_rwnd = rwnd_; + UnwrappedTSN cumulative_tsn_ack = + tsn_unwrapper_.Unwrap(sack.cumulative_tsn_ack()); + + if (sack.gap_ack_blocks().empty()) { + UpdateRTT(now, cumulative_tsn_ack); + } + + AckInfo ack_info(cumulative_tsn_ack); + // Erase all items up to cumulative_tsn_ack. + RemoveAcked(cumulative_tsn_ack, ack_info); + + // ACK packets reported in the gap ack blocks + AckGapBlocks(cumulative_tsn_ack, sack.gap_ack_blocks(), ack_info); + + // NACK and possibly mark for retransmit chunks that weren't acked. + NackBetweenAckBlocks(cumulative_tsn_ack, sack.gap_ack_blocks(), ack_info); + + RecalculateOutstandingBytes(); + // Update of outstanding_data_ is now done. Congestion control remains. + UpdateReceiverWindow(sack.a_rwnd()); + + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Received SACK. Acked TSN: " + << StrJoin(ack_info.acked_tsns, ",", + [](rtc::StringBuilder& sb, TSN tsn) { + sb << *tsn; + }) + << ", cum_tsn_ack=" << *cumulative_tsn_ack.Wrap() << " (" + << *last_cumulative_tsn_ack_.Wrap() + << "), outstanding_bytes=" << outstanding_bytes_ << " (" + << old_outstanding_bytes << "), rwnd=" << rwnd_ << " (" + << old_rwnd << ")"; + + MaybeExitFastRecovery(cumulative_tsn_ack); + + if (cumulative_tsn_ack > last_cumulative_tsn_ack_) { + // https://tools.ietf.org/html/rfc4960#section-6.3.2 + // "Whenever a SACK is received that acknowledges the DATA chunk + // with the earliest outstanding TSN for that address, restart the T3-rtx + // timer for that address with its current RTO (if there is still + // outstanding data on that address)." + // Note: It may be started again in a bit further down. + t3_rtx_.Stop(); + + HandleIncreasedCumulativeTsnAck( + old_outstanding_bytes, ack_info.bytes_acked_by_cumulative_tsn_ack + + ack_info.bytes_acked_by_new_gap_ack_blocks); + } + + if (ack_info.has_packet_loss) { + is_in_fast_retransmit_ = true; + HandlePacketLoss(ack_info.highest_tsn_acked); + } + + // https://tools.ietf.org/html/rfc4960#section-8.2 + // "When an outstanding TSN is acknowledged [...] the endpoint shall clear + // the error counter ..." + if (ack_info.bytes_acked_by_cumulative_tsn_ack > 0 || + ack_info.bytes_acked_by_new_gap_ack_blocks > 0) { + on_clear_retransmission_counter_(); + } + + last_cumulative_tsn_ack_ = cumulative_tsn_ack; + StartT3RtxTimerIfOutstandingData(); + return true; +} + +void RetransmissionQueue::UpdateRTT(TimeMs now, + UnwrappedTSN cumulative_tsn_ack) { + // RTT updating is flawed in SCTP, as explained in e.g. Pedersen J, Griwodz C, + // Halvorsen P (2006) Considerations of SCTP retransmission delays for thin + // streams. + // Due to delayed acknowledgement, the SACK may be sent much later which + // increases the calculated RTT. + // TODO(boivie): Consider occasionally sending DATA chunks with I-bit set and + // use only those packets for measurement. + + auto it = outstanding_data_.find(cumulative_tsn_ack); + if (it != outstanding_data_.end()) { + if (!it->second.has_been_retransmitted()) { + // https://tools.ietf.org/html/rfc4960#section-6.3.1 + // "Karn's algorithm: RTT measurements MUST NOT be made using + // packets that were retransmitted (and thus for which it is ambiguous + // whether the reply was for the first instance of the chunk or for a + // later instance)" + DurationMs rtt = now - it->second.time_sent(); + on_new_rtt_(rtt); + } + } +} + +void RetransmissionQueue::RecalculateOutstandingBytes() { + outstanding_bytes_ = absl::c_accumulate( + outstanding_data_, 0, + [&](size_t r, const std::pair& d) { + // Packets that have been ACKED or NACKED are not outstanding, as they + // are received. And packets that are marked for retransmission or + // abandoned are lost, and not outstanding. + return r + (d.second.state() == State::kInFlight + ? GetSerializedChunkSize(d.second.data()) + : 0); + }); +} + +void RetransmissionQueue::HandleT3RtxTimerExpiry() { + size_t old_cwnd = cwnd_; + size_t old_outstanding_bytes = outstanding_bytes_; + // https://tools.ietf.org/html/rfc4960#section-6.3.3 + // "For the destination address for which the timer expires, adjust + // its ssthresh with rules defined in Section 7.2.3 and set the cwnd <- MTU." + ssthresh_ = std::max(cwnd_ / 2, 4 * options_.mtu); + cwnd_ = 1 * options_.mtu; + + // https://tools.ietf.org/html/rfc4960#section-6.3.3 + // "For the destination address for which the timer expires, set RTO + // <- RTO * 2 ("back off the timer"). The maximum value discussed in rule C7 + // above (RTO.max) may be used to provide an upper bound to this doubling + // operation." + + // Already done by the Timer implementation. + + // https://tools.ietf.org/html/rfc4960#section-6.3.3 + // "Determine how many of the earliest (i.e., lowest TSN) outstanding + // DATA chunks for the address for which the T3-rtx has expired will fit into + // a single packet" + + // https://tools.ietf.org/html/rfc4960#section-6.3.3 + // "Note: Any DATA chunks that were sent to the address for which the + // T3-rtx timer expired but did not fit in one MTU (rule E3 above) should be + // marked for retransmission and sent as soon as cwnd allows (normally, when a + // SACK arrives)." + int count = 0; + for (auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + TxData& item = elem.second; + if (item.state() == State::kInFlight || item.state() == State::kNacked) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Chunk " << *tsn.Wrap() + << " will be retransmitted due to T3-RTX"; + item.SetState(State::kToBeRetransmitted); + ++count; + } + } + + // Marking some packets as retransmitted changes outstanding bytes. + RecalculateOutstandingBytes(); + + // https://tools.ietf.org/html/rfc4960#section-6.3.3 + // "Start the retransmission timer T3-rtx on the destination address + // to which the retransmission is sent, if rule R1 above indicates to do so." + + // Already done by the Timer implementation. + + RTC_DLOG(LS_INFO) << log_prefix_ << "t3-rtx expired. new cwnd=" << cwnd_ + << " (" << old_cwnd << "), ssthresh=" << ssthresh_ + << ", rtx-packets=" << count << ", outstanding_bytes " + << outstanding_bytes_ << " (" << old_outstanding_bytes + << ")"; +} + +std::vector> +RetransmissionQueue::GetChunksToBeRetransmitted(size_t max_size) { + std::vector> result; + for (auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + TxData& item = elem.second; + + size_t serialized_size = GetSerializedChunkSize(item.data()); + if (item.state() == State::kToBeRetransmitted && + serialized_size <= max_size) { + item.Retransmit(); + result.emplace_back(tsn.Wrap(), item.data().Clone()); + max_size -= serialized_size; + } + // No point in continuing if the packet is full. + if (max_size <= data_chunk_header_size_) { + break; + } + } + // As some chunks may have switched state, that needs to be reflected here. + if (!result.empty()) { + RecalculateOutstandingBytes(); + } + return result; +} + +std::vector> RetransmissionQueue::GetChunksToSend( + TimeMs now, + size_t bytes_remaining_in_packet) { + // Chunks are always padded to even divisible by four. + RTC_DCHECK(IsDivisibleBy4(bytes_remaining_in_packet)); + + std::vector> to_be_sent; + size_t old_outstanding_bytes = outstanding_bytes_; + size_t old_rwnd = rwnd_; + if (is_in_fast_retransmit()) { + // https://tools.ietf.org/html/rfc4960#section-7.2.4 + // "Determine how many of the earliest (i.e., lowest TSN) DATA chunks + // marked for retransmission will fit into a single packet ... Retransmit + // those K DATA chunks in a single packet. When a Fast Retransmit is being + // performed, the sender SHOULD ignore the value of cwnd and SHOULD NOT + // delay retransmission for this single packet." + is_in_fast_retransmit_ = false; + to_be_sent = GetChunksToBeRetransmitted(bytes_remaining_in_packet); + size_t to_be_sent_bytes = absl::c_accumulate( + to_be_sent, 0, [&](size_t r, const std::pair& d) { + return r + GetSerializedChunkSize(d.second); + }); + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "fast-retransmit: sending " + << to_be_sent.size() << " chunks, " << to_be_sent_bytes + << " bytes"; + } else { + // Normal sending. Calculate the bandwidth budget (how many bytes that is + // allowed to be sent), and fill that up first with chunks that are + // scheduled to be retransmitted. If there is still budget, send new chunks + // (which will have their TSN assigned here.) + size_t remaining_cwnd_bytes = + outstanding_bytes_ >= cwnd_ ? 0 : cwnd_ - outstanding_bytes_; + size_t max_bytes = RoundDownTo4(std::min( + std::min(bytes_remaining_in_packet, rwnd()), remaining_cwnd_bytes)); + + to_be_sent = GetChunksToBeRetransmitted(max_bytes); + max_bytes -= absl::c_accumulate( + to_be_sent, 0, [&](size_t r, const std::pair& d) { + return r + GetSerializedChunkSize(d.second); + }); + + while (max_bytes > data_chunk_header_size_) { + RTC_DCHECK(IsDivisibleBy4(max_bytes)); + absl::optional chunk_opt = + send_queue_.Produce(now, max_bytes - data_chunk_header_size_); + if (!chunk_opt.has_value()) { + on_send_queue_empty_(); + break; + } + + UnwrappedTSN tsn = next_tsn_; + next_tsn_.Increment(); + to_be_sent.emplace_back(tsn.Wrap(), chunk_opt->data.Clone()); + + // All chunks are always padded to be even divisible by 4. + size_t chunk_size = GetSerializedChunkSize(chunk_opt->data); + max_bytes -= chunk_size; + outstanding_bytes_ += chunk_size; + rwnd_ -= chunk_size; + outstanding_data_.emplace( + tsn, RetransmissionQueue::TxData(std::move(chunk_opt->data), + chunk_opt->max_retransmissions, now, + chunk_opt->expires_at)); + } + } + + if (!to_be_sent.empty()) { + // https://tools.ietf.org/html/rfc4960#section-6.3.2 + // "Every time a DATA chunk is sent to any address (including a + // retransmission), if the T3-rtx timer of that address is not running, + // start it running so that it will expire after the RTO of that address." + if (!t3_rtx_.is_running()) { + t3_rtx_.Start(); + } + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Sending TSN " + << StrJoin(to_be_sent, ",", + [&](rtc::StringBuilder& sb, + const std::pair& c) { + sb << *c.first; + }) + << " - " + << absl::c_accumulate( + to_be_sent, 0, + [&](size_t r, const std::pair& d) { + return r + GetSerializedChunkSize(d.second); + }) + << " bytes. outstanding_bytes=" << outstanding_bytes_ + << " (" << old_outstanding_bytes << "), cwnd=" << cwnd_ + << ", rwnd=" << rwnd_ << " (" << old_rwnd << ")"; + } + return to_be_sent; +} + +std::vector> +RetransmissionQueue::GetChunkStatesForTesting() const { + std::vector> states; + states.emplace_back(last_cumulative_tsn_ack_.Wrap(), State::kAcked); + for (const auto& elem : outstanding_data_) { + states.emplace_back(elem.first.Wrap(), elem.second.state()); + } + return states; +} + +bool RetransmissionQueue::ShouldSendForwardTsn(TimeMs now) { + if (!partial_reliability_) { + return false; + } + ExpireChunks(now); + if (!outstanding_data_.empty()) { + auto it = outstanding_data_.begin(); + return it->first == last_cumulative_tsn_ack_.next_value() && + it->second.state() == State::kAbandoned; + } + return false; +} + +void RetransmissionQueue::TxData::Nack() { + ++nack_count_; + if (nack_count_ >= kNumberOfNacksForRetransmission) { + state_ = State::kToBeRetransmitted; + } else { + state_ = State::kNacked; + } +} + +void RetransmissionQueue::TxData::Retransmit() { + state_ = State::kInFlight; + nack_count_ = 0; + ++num_retransmissions_; +} + +bool RetransmissionQueue::TxData::has_expired(TimeMs now) const { + if (state_ != State::kAcked && state_ != State::kAbandoned) { + if (max_retransmissions_.has_value() && + num_retransmissions_ >= *max_retransmissions_) { + return true; + } else if (expires_at_.has_value() && *expires_at_ <= now) { + return true; + } + } + return false; +} + +void RetransmissionQueue::ExpireChunks(TimeMs now) { + for (const auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + const TxData& item = elem.second; + + // Chunks that are in-flight (possibly lost?), nacked or to be retransmitted + // can be expired easily. There is always a risk that a message is expired + // that was already received by the peer, but for which there haven't been + // a SACK received. But that's acceptable, and handled. + if (item.has_expired(now)) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Marking chunk " << *tsn.Wrap() + << " and message " << *item.data().message_id + << " as expired"; + ExpireAllFor(item); + } + } +} + +void RetransmissionQueue::ExpireAllFor( + const RetransmissionQueue::TxData& item) { + // Erase all remaining chunks from the producer, if any. + send_queue_.Discard(item.data().is_unordered, item.data().stream_id, + item.data().message_id); + for (auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + TxData& other = elem.second; + + if (other.state() != State::kAbandoned && + other.data().stream_id == item.data().stream_id && + other.data().is_unordered == item.data().is_unordered && + other.data().message_id == item.data().message_id) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Marking chunk " << *tsn.Wrap() + << " as abandoned"; + other.SetState(State::kAbandoned); + } + } +} + +ForwardTsnChunk RetransmissionQueue::CreateForwardTsn() const { + std::unordered_map + skipped_per_ordered_stream; + UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; + + for (const auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + const TxData& item = elem.second; + + if ((tsn != new_cumulative_ack.next_value()) || + item.state() != State::kAbandoned) { + break; + } + new_cumulative_ack = tsn; + if (!item.data().is_unordered && + item.data().ssn > skipped_per_ordered_stream[item.data().stream_id]) { + skipped_per_ordered_stream[item.data().stream_id] = item.data().ssn; + } + } + + std::vector skipped_streams; + skipped_streams.reserve(skipped_per_ordered_stream.size()); + for (const auto& elem : skipped_per_ordered_stream) { + skipped_streams.emplace_back(elem.first, elem.second); + } + return ForwardTsnChunk(new_cumulative_ack.Wrap(), std::move(skipped_streams)); +} + +IForwardTsnChunk RetransmissionQueue::CreateIForwardTsn() const { + std::unordered_map, MID, UnorderedStreamHash> + skipped_per_stream; + UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; + + for (const auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + const TxData& item = elem.second; + + if ((tsn != new_cumulative_ack.next_value()) || + item.state() != State::kAbandoned) { + break; + } + new_cumulative_ack = tsn; + std::pair stream_id = + std::make_pair(item.data().is_unordered, item.data().stream_id); + + if (item.data().message_id > skipped_per_stream[stream_id]) { + skipped_per_stream[stream_id] = item.data().message_id; + } + } + + std::vector skipped_streams; + skipped_streams.reserve(skipped_per_stream.size()); + for (const auto& elem : skipped_per_stream) { + const std::pair& stream = elem.first; + MID message_id = elem.second; + skipped_streams.emplace_back(stream.first, stream.second, message_id); + } + + return IForwardTsnChunk(new_cumulative_ack.Wrap(), + std::move(skipped_streams)); +} + +void RetransmissionQueue::PrepareResetStreams( + rtc::ArrayView streams) { + // TODO(boivie): These calls are now only affecting the send queue. The + // packet buffer can also change behavior - for example draining the chunk + // producer and eagerly assign TSNs so that an "Outgoing SSN Reset Request" + // can be sent quickly, with a known `sender_last_assigned_tsn`. + send_queue_.PrepareResetStreams(streams); +} +bool RetransmissionQueue::CanResetStreams() const { + return send_queue_.CanResetStreams(); +} +void RetransmissionQueue::CommitResetStreams() { + send_queue_.CommitResetStreams(); +} +void RetransmissionQueue::RollbackResetStreams() { + send_queue_.RollbackResetStreams(); +} + +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h new file mode 100644 index 000000000..c2599a438 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_queue.h @@ -0,0 +1,345 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_RETRANSMISSION_QUEUE_H_ +#define NET_DCSCTP_TX_RETRANSMISSION_QUEUE_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/sequence_numbers.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" +#include "net/dcsctp/packet/chunk/sack_chunk.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/timer/timer.h" +#include "net/dcsctp/tx/retransmission_timeout.h" +#include "net/dcsctp/tx/send_queue.h" + +namespace dcsctp { + +// The RetransmissionQueue manages all DATA/I-DATA chunks that are in-flight and +// schedules them to be retransmitted if necessary. Chunks are retransmitted +// when they have been lost for a number of consecutive SACKs, or when the +// retransmission timer, `t3_rtx` expires. +// +// As congestion control is tightly connected with the state of transmitted +// packets, that's also managed here to limit the amount of data that is +// in-flight (sent, but not yet acknowledged). +class RetransmissionQueue { + public: + static constexpr size_t kMinimumFragmentedPayload = 10; + // State for DATA chunks (message fragments) in the queue. + enum class State { + // The chunk has been sent but not received yet (from the sender's point of + // view, as no SACK has been received yet that reference this chunk). + kInFlight, + // A SACK has been received which explicitly marked this chunk as missing - + // it's now NACKED and may be retransmitted if NACKED enough times. + kNacked, + // A chunk that will be retransmitted when possible. + kToBeRetransmitted, + // A SACK has been received which explicitly marked this chunk as received. + kAcked, + // A chunk whose message has expired or has been retransmitted too many + // times (RFC3758). It will not be retransmitted anymore. + kAbandoned, + }; + + // Creates a RetransmissionQueue which will send data using `initial_tsn` as + // the first TSN to use for sent fragments. It will poll data from + // `send_queue` and call `on_send_queue_empty` when it is empty. When + // SACKs are received, it will estimate the RTT, and call `on_new_rtt`. When + // an outstanding chunk has been ACKed, it will call + // `on_clear_retransmission_counter` and will also use `t3_rtx`, which is the + // SCTP retransmission timer to manage retransmissions. + RetransmissionQueue(absl::string_view log_prefix, + TSN initial_tsn, + size_t a_rwnd, + SendQueue& send_queue, + std::function on_new_rtt, + std::function on_send_queue_empty, + std::function on_clear_retransmission_counter, + Timer& t3_rtx, + const DcSctpOptions& options, + bool supports_partial_reliability = true, + bool use_message_interleaving = false); + + // Handles a received SACK. Returns true if the `sack` was processed and + // false if it was discarded due to received out-of-order and not relevant. + bool HandleSack(TimeMs now, const SackChunk& sack); + + // Handles an expired retransmission timer. + void HandleT3RtxTimerExpiry(); + + // Returns a list of chunks to send that would fit in one SCTP packet with + // `bytes_remaining_in_packet` bytes available. This may be further limited by + // the congestion control windows. Note that `ShouldSendForwardTSN` must be + // called prior to this method, to abandon expired chunks, as this method will + // not expire any chunks. + std::vector> GetChunksToSend( + TimeMs now, + size_t bytes_remaining_in_packet); + + // Returns the internal state of all queued chunks. This is only used in + // unit-tests. + std::vector> GetChunkStatesForTesting() const; + + // Returns the next TSN that will be allocated for sent DATA chunks. + TSN next_tsn() const { return next_tsn_.Wrap(); } + + // Returns the size of the congestion window, in bytes. This is the number of + // bytes that may be in-flight. + size_t cwnd() const { return cwnd_; } + + // Overrides the current congestion window size. + void set_cwnd(size_t cwnd) { cwnd_ = cwnd; } + + // Returns the current receiver window size. + size_t rwnd() const { return rwnd_; } + + // Returns the number of bytes of packets that are in-flight. + size_t outstanding_bytes() const { return outstanding_bytes_; } + + // Given the current time `now`, it will evaluate if there are chunks that + // have expired and that need to be discarded. It returns true if a + // FORWARD-TSN should be sent. + bool ShouldSendForwardTsn(TimeMs now); + + // Creates a FORWARD-TSN chunk. + ForwardTsnChunk CreateForwardTsn() const; + + // Creates an I-FORWARD-TSN chunk. + IForwardTsnChunk CreateIForwardTsn() const; + + // See the SendQueue for a longer description of these methods related + // to stream resetting. + void PrepareResetStreams(rtc::ArrayView streams); + bool CanResetStreams() const; + void CommitResetStreams(); + void RollbackResetStreams(); + + private: + enum class CongestionAlgorithmPhase { + kSlowStart, + kCongestionAvoidance, + }; + + // A fragmented message's DATA chunk while in the retransmission queue, and + // its associated metadata. + class TxData { + public: + explicit TxData(Data data, + absl::optional max_retransmissions, + TimeMs time_sent, + absl::optional expires_at) + : max_retransmissions_(max_retransmissions), + time_sent_(time_sent), + expires_at_(expires_at), + data_(std::move(data)) {} + + TimeMs time_sent() const { return time_sent_; } + + State state() const { return state_; } + void SetState(State state) { state_ = state; } + + const Data& data() const { return data_; } + + // Nacks an item. If it has been nacked enough times, it will be marked for + // retransmission. + void Nack(); + void Retransmit(); + + bool has_been_retransmitted() { return num_retransmissions_ > 0; } + + // Given the current time, and the current state of this DATA chunk, it will + // indicate if it has expired (SCTP Partial Reliability Extension). + bool has_expired(TimeMs now) const; + + private: + State state_ = State::kInFlight; + // The number of times the DATA chunk has been nacked (by having received a + // SACK which doesn't include it). Will be cleared on retransmissions. + size_t nack_count_ = 0; + // The number of times the DATA chunk has been retransmitted. + size_t num_retransmissions_ = 0; + // If the message was sent with a maximum number of retransmissions, this is + // set to that number. The value zero (0) means that it will never be + // retransmitted. + const absl::optional max_retransmissions_; + // When the packet was sent, and placed in this queue. + const TimeMs time_sent_; + // If the message was sent with an expiration time, this is set. + const absl::optional expires_at_; + // The actual data to send/retransmit. + Data data_; + }; + + // Contains variables scoped to a processing of an incoming SACK. + struct AckInfo { + explicit AckInfo(UnwrappedTSN cumulative_tsn_ack) + : highest_tsn_acked(cumulative_tsn_ack) {} + + // All TSNs that have been acked (for the first time) in this SACK. + std::vector acked_tsns; + + // Bytes acked by increasing cumulative_tsn_ack in this SACK + size_t bytes_acked_by_cumulative_tsn_ack = 0; + + // Bytes acked by gap blocks in this SACK. + size_t bytes_acked_by_new_gap_ack_blocks = 0; + + // Indicates if this SACK indicates that packet loss has occurred. Just + // because a packet is missing in the SACK doesn't necessarily mean that + // there is packet loss as that packet might be in-flight and received + // out-of-order. But when it has been reported missing consecutive times, it + // will eventually be considered "lost" and this will be set. + bool has_packet_loss = false; + + // Highest TSN Newly Acknowledged, an SCTP variable. + UnwrappedTSN highest_tsn_acked; + }; + + // Returns how large a chunk will be, serialized, carrying the data + size_t GetSerializedChunkSize(const Data& data) const; + + // Indicates if the congestion control algorithm is in "fast recovery". + bool is_in_fast_recovery() const { + return fast_recovery_exit_tsn_.has_value(); + } + + // Indicates if the congestion control algorithm is in "fast retransmit". + bool is_in_fast_retransmit() const { return is_in_fast_retransmit_; } + + // Indicates if the provided SACK is valid given what has previously been + // received. If it returns false, the SACK is most likely a duplicate of + // something already seen, so this returning false doesn't necessarily mean + // that the SACK is illegal. + bool IsSackValid(const SackChunk& sack) const; + + // Given a `cumulative_tsn_ack` from an incoming SACK, will remove those items + // in the retransmission queue up until this value and will update `ack_info` + // by setting `bytes_acked_by_cumulative_tsn_ack` and `acked_tsns`. + void RemoveAcked(UnwrappedTSN cumulative_tsn_ack, AckInfo& ack_info); + + // Will mark the chunks covered by the `gap_ack_blocks` from an incoming SACK + // as "acked" and update `ack_info` by adding new TSNs to `added_tsns`. + void AckGapBlocks(UnwrappedTSN cumulative_tsn_ack, + rtc::ArrayView gap_ack_blocks, + AckInfo& ack_info); + + // Mark chunks reported as "missing", as "nacked" or "to be retransmitted" + // depending how many times this has happened. Only packets up until + // `ack_info.highest_tsn_acked` (highest TSN newly acknowledged) are + // nacked/retransmitted. The method will set `ack_info.has_packet_loss`. + void NackBetweenAckBlocks( + UnwrappedTSN cumulative_tsn_ack, + rtc::ArrayView gap_ack_blocks, + AckInfo& ack_info); + + // When a SACK chunk is received, this method will be called which _may_ call + // into the `RetransmissionTimeout` to update the RTO. + void UpdateRTT(TimeMs now, UnwrappedTSN cumulative_tsn_ack); + + // If the congestion control is in "fast recovery mode", this may be exited + // now. + void MaybeExitFastRecovery(UnwrappedTSN cumulative_tsn_ack); + + // If chunks have been ACKed, stop the retransmission timer. + void StopT3RtxTimerOnIncreasedCumulativeTsnAck( + UnwrappedTSN cumulative_tsn_ack); + + // Update the congestion control algorithm given as the cumulative ack TSN + // value has increased, as reported in an incoming SACK chunk. + void HandleIncreasedCumulativeTsnAck(size_t outstanding_bytes, + size_t total_bytes_acked); + // Update the congestion control algorithm, given as packet loss has been + // detected, as reported in an incoming SACK chunk. + void HandlePacketLoss(UnwrappedTSN highest_tsn_acked); + // Recalculate the number of in-flight payload bytes. + void RecalculateOutstandingBytes(); + // Update the view of the receiver window size. + void UpdateReceiverWindow(uint32_t a_rwnd); + // Given `max_size` of space left in a packet, which chunks can be added to + // it? + std::vector> GetChunksToBeRetransmitted(size_t max_size); + // If there is data sent and not ACKED, ensure that the retransmission timer + // is running. + void StartT3RtxTimerIfOutstandingData(); + + // Given the current time `now_ms`, expire chunks that have a limited + // lifetime. + void ExpireChunks(TimeMs now); + // Given that a message fragment, `item` has expired, expire all other + // fragments that share the same message - even never-before-sent fragments + // that are still in the SendQueue. + void ExpireAllFor(const RetransmissionQueue::TxData& item); + + // Returns the current congestion control algorithm phase. + CongestionAlgorithmPhase phase() const { + return (cwnd_ <= ssthresh_) + ? CongestionAlgorithmPhase::kSlowStart + : CongestionAlgorithmPhase::kCongestionAvoidance; + } + + const DcSctpOptions options_; + // If the peer supports RFC3758 - SCTP Partial Reliability Extension. + const bool partial_reliability_; + const std::string log_prefix_; + // The size of the data chunk (DATA/I-DATA) header that is used. + const size_t data_chunk_header_size_; + // Called when a new RTT measurement has been done + const std::function on_new_rtt_; + // Called when the send queue is empty. + const std::function on_send_queue_empty_; + // Called when a SACK has been seen that cleared the retransmission counter. + const std::function on_clear_retransmission_counter_; + // The retransmission counter. + Timer& t3_rtx_; + // Unwraps TSNs + UnwrappedTSN::Unwrapper tsn_unwrapper_; + + // Congestion Window. Number of bytes that may be in-flight (sent, not acked). + size_t cwnd_; + // Receive Window. Number of bytes available in the receiver's RX buffer. + size_t rwnd_; + // Slow Start Threshold. See RFC4960. + size_t ssthresh_; + // Partial Bytes Acked. See RFC4960. + size_t partial_bytes_acked_ = 0; + // If set, fast recovery is enabled until this TSN has been cumulative + // acked. + absl::optional fast_recovery_exit_tsn_ = absl::nullopt; + // Indicates if the congestion algorithm is in fast retransmit. + bool is_in_fast_retransmit_ = false; + + // Next TSN to used. + UnwrappedTSN next_tsn_; + // The last cumulative TSN ack number + UnwrappedTSN last_cumulative_tsn_ack_; + // The send queue. + SendQueue& send_queue_; + // All the outstanding data chunks that are in-flight and that have not been + // cumulative acked. Note that it also contains chunks that have been acked in + // gap ack blocks. + std::map outstanding_data_; + // The sum of the message bytes of the send_queue_ + size_t outstanding_bytes_ = 0; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_RETRANSMISSION_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc new file mode 100644 index 000000000..f38b94d32 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "net/dcsctp/tx/retransmission_timeout.h" + +#include +#include + +#include "net/dcsctp/public/dcsctp_options.h" + +namespace dcsctp { +namespace { +// https://tools.ietf.org/html/rfc4960#section-15 +constexpr double kRtoAlpha = 0.125; +constexpr double kRtoBeta = 0.25; +} // namespace + +RetransmissionTimeout::RetransmissionTimeout(const DcSctpOptions& options) + : min_rto_(*options.rto_min), + max_rto_(*options.rto_max), + max_rtt_(*options.rtt_max), + rto_(*options.rto_initial) {} + +void RetransmissionTimeout::ObserveRTT(DurationMs measured_rtt) { + double rtt = *measured_rtt; + + // Unrealistic values will be skipped. If a wrongly measured (or otherwise + // corrupt) value was processed, it could change the state in a way that would + // take a very long time to recover. + if (rtt < 0.0 || rtt > max_rtt_) { + return; + } + + if (first_measurement_) { + // https://tools.ietf.org/html/rfc4960#section-6.3.1 + // "When the first RTT measurement R is made, set + // SRTT <- R, + // RTTVAR <- R/2, and + // RTO <- SRTT + 4 * RTTVAR." + srtt_ = rtt; + rttvar_ = rtt * 0.5; + rto_ = srtt_ + 4 * rttvar_; + first_measurement_ = false; + } else { + // https://tools.ietf.org/html/rfc4960#section-6.3.1 + // "When a new RTT measurement R' is made, set + // RTTVAR <- (1 - RTO.Beta) * RTTVAR + RTO.Beta * |SRTT - R'| + // SRTT <- (1 - RTO.Alpha) * SRTT + RTO.Alpha * R' + // RTO <- SRTT + 4 * RTTVAR." + rttvar_ = (1 - kRtoBeta) * rttvar_ + kRtoBeta * std::abs(srtt_ - rtt); + srtt_ = (1 - kRtoAlpha) * srtt_ + kRtoAlpha * rtt; + rto_ = srtt_ + 4 * rttvar_; + } + + // Clamp RTO between min and max. + rto_ = std::fmin(std::fmax(rto_, min_rto_), max_rto_); +} +} // namespace dcsctp diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h new file mode 100644 index 000000000..0fac33e59 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/retransmission_timeout.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_RETRANSMISSION_TIMEOUT_H_ +#define NET_DCSCTP_TX_RETRANSMISSION_TIMEOUT_H_ + +#include +#include + +#include "net/dcsctp/public/dcsctp_options.h" + +namespace dcsctp { + +// Manages updating of the Retransmission Timeout (RTO) SCTP variable, which is +// used directly as the base timeout for T3-RTX and for other timers, such as +// delayed ack. +// +// When a round-trip-time (RTT) is calculated (outside this class), `Observe` +// is called, which calculates the retransmission timeout (RTO) value. The RTO +// value will become larger if the RTT is high and/or the RTT values are varying +// a lot, which is an indicator of a bad connection. +class RetransmissionTimeout { + public: + explicit RetransmissionTimeout(const DcSctpOptions& options); + + // To be called when a RTT has been measured, to update the RTO value. + void ObserveRTT(DurationMs measured_rtt); + + // Returns the Retransmission Timeout (RTO) value, in milliseconds. + DurationMs rto() const { return DurationMs(rto_); } + + // Returns the smoothed RTT value, in milliseconds. + DurationMs srtt() const { return DurationMs(srtt_); } + + private: + // Note that all intermediate state calculation is done in the floating point + // domain, to maintain precision. + const double min_rto_; + const double max_rto_; + const double max_rtt_; + // If this is the first measurement + bool first_measurement_ = true; + // Smoothed Round-Trip Time + double srtt_ = 0.0; + // Round-Trip Time Variation + double rttvar_ = 0.0; + // Retransmission Timeout + double rto_; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_RETRANSMISSION_TIMEOUT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h new file mode 100644 index 000000000..bb5aab2df --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/send_queue.h @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef NET_DCSCTP_TX_SEND_QUEUE_H_ +#define NET_DCSCTP_TX_SEND_QUEUE_H_ + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/data.h" +#include "net/dcsctp/public/types.h" + +namespace dcsctp { + +class SendQueue { + public: + // Container for a data chunk that is produced by the SendQueue + struct DataToSend { + explicit DataToSend(Data data) : data(std::move(data)) {} + // The data to send, including all parameters. + Data data; + + // Partial reliability - RFC3758 + absl::optional max_retransmissions; + absl::optional expires_at; + }; + + virtual ~SendQueue() = default; + + // TODO(boivie): This interface is obviously missing an "Add" function, but + // that is postponed a bit until the story around how to model message + // prioritization, which is important for any advanced stream scheduler, is + // further clarified. + + // Produce a chunk to be sent. + // + // `max_size` refers to how many payload bytes that may be produced, not + // including any headers. + virtual absl::optional Produce(TimeMs now, size_t max_size) = 0; + + // Discards a partially sent message identified by the parameters `unordered`, + // `stream_id` and `message_id`. The `message_id` comes from the returned + // information when having called `Produce`. A partially sent message means + // that it has had at least one fragment of it returned when `Produce` was + // called prior to calling this method). + // + // This is used when a message has been found to be expired (by the partial + // reliability extension), and the retransmission queue will signal the + // receiver that any partially received message fragments should be skipped. + // This means that any remaining fragments in the Send Queue must be removed + // as well so that they are not sent. + virtual void Discard(IsUnordered unordered, + StreamID stream_id, + MID message_id) = 0; + + // Prepares the streams to be reset. This is used to close a WebRTC data + // channel and will be signaled to the other side. + // + // Concretely, it discards all whole (not partly sent) messages in the given + // streams and pauses those streams so that future added messages aren't + // produced until `ResumeStreams` is called. + // + // TODO(boivie): Investigate if it really should discard any message at all. + // RFC8831 only mentions that "[RFC6525] also guarantees that all the messages + // are delivered (or abandoned) before the stream is reset." + // + // This method can be called multiple times to add more streams to be + // reset, and paused while they are resetting. This is the first part of the + // two-phase commit protocol to reset streams, where the caller completes the + // procedure by either calling `CommitResetStreams` or `RollbackResetStreams`. + virtual void PrepareResetStreams(rtc::ArrayView streams) = 0; + + // Returns true if all non-discarded messages during `PrepareResetStreams` + // (which are those that was partially sent before that method was called) + // have been sent. + virtual bool CanResetStreams() const = 0; + + // Called to commit to reset the streams provided to `PrepareResetStreams`. + // It will reset the stream sequence numbers (SSNs) and message identifiers + // (MIDs) and resume the paused streams. + virtual void CommitResetStreams() = 0; + + // Called to abort the resetting of streams provided to `PrepareResetStreams`. + // Will resume the paused streams without resetting the stream sequence + // numbers (SSNs) or message identifiers (MIDs). Note that the non-partial + // messages that were discarded when calling `PrepareResetStreams` will not be + // recovered, to better match the intention from the sender to "close the + // channel". + virtual void RollbackResetStreams() = 0; + + // Resets all message identifier counters (MID, SSN) and makes all partially + // messages be ready to be re-sent in full. This is used when the peer has + // been detected to have restarted and is used to try to minimize the amount + // of data loss. However, data loss cannot be completely guaranteed when a + // peer restarts. + virtual void Reset() = 0; +}; +} // namespace dcsctp + +#endif // NET_DCSCTP_TX_SEND_QUEUE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc index 9d8266eaf..7f26a981e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.cc @@ -10,7 +10,13 @@ #include "p2p/base/basic_async_resolver_factory.h" -#include "rtc_base/net_helpers.h" +#include +#include + +#include "absl/memory/memory.h" +#include "api/async_dns_resolver.h" +#include "rtc_base/async_resolver.h" +#include "rtc_base/logging.h" namespace webrtc { @@ -18,4 +24,113 @@ rtc::AsyncResolverInterface* BasicAsyncResolverFactory::Create() { return new rtc::AsyncResolver(); } +class WrappingAsyncDnsResolver; + +class WrappingAsyncDnsResolverResult : public AsyncDnsResolverResult { + public: + explicit WrappingAsyncDnsResolverResult(WrappingAsyncDnsResolver* owner) + : owner_(owner) {} + ~WrappingAsyncDnsResolverResult() {} + + // Note: Inline declaration not possible, since it refers to + // WrappingAsyncDnsResolver. + bool GetResolvedAddress(int family, rtc::SocketAddress* addr) const override; + int GetError() const override; + + private: + WrappingAsyncDnsResolver* const owner_; +}; + +class WrappingAsyncDnsResolver : public AsyncDnsResolverInterface, + public sigslot::has_slots<> { + public: + explicit WrappingAsyncDnsResolver(rtc::AsyncResolverInterface* wrapped) + : wrapped_(absl::WrapUnique(wrapped)), result_(this) {} + + ~WrappingAsyncDnsResolver() override { + // Workaround to get around the fact that sigslot-using objects can't be + // destroyed from within their callback: Alert class users early. + // TODO(bugs.webrtc.org/12651): Delete this class once the sigslot users are + // gone. + RTC_CHECK(!within_resolve_result_); + wrapped_.release()->Destroy(false); + } + + void Start(const rtc::SocketAddress& addr, + std::function callback) override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK_EQ(State::kNotStarted, state_); + state_ = State::kStarted; + callback_ = callback; + wrapped_->SignalDone.connect(this, + &WrappingAsyncDnsResolver::OnResolveResult); + wrapped_->Start(addr); + } + + const AsyncDnsResolverResult& result() const override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK_EQ(State::kResolved, state_); + return result_; + } + + private: + enum class State { kNotStarted, kStarted, kResolved }; + + friend class WrappingAsyncDnsResolverResult; + // For use by WrappingAsyncDnsResolverResult + rtc::AsyncResolverInterface* wrapped() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return wrapped_.get(); + } + + void OnResolveResult(rtc::AsyncResolverInterface* ref) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(state_ == State::kStarted); + RTC_DCHECK_EQ(ref, wrapped_.get()); + state_ = State::kResolved; + within_resolve_result_ = true; + callback_(); + within_resolve_result_ = false; + } + + // The class variables need to be accessed on a single thread. + SequenceChecker sequence_checker_; + std::function callback_ RTC_GUARDED_BY(sequence_checker_); + std::unique_ptr wrapped_ + RTC_GUARDED_BY(sequence_checker_); + State state_ RTC_GUARDED_BY(sequence_checker_) = State::kNotStarted; + WrappingAsyncDnsResolverResult result_ RTC_GUARDED_BY(sequence_checker_); + bool within_resolve_result_ RTC_GUARDED_BY(sequence_checker_) = false; +}; + +bool WrappingAsyncDnsResolverResult::GetResolvedAddress( + int family, + rtc::SocketAddress* addr) const { + if (!owner_->wrapped()) { + return false; + } + return owner_->wrapped()->GetResolvedAddress(family, addr); +} + +int WrappingAsyncDnsResolverResult::GetError() const { + if (!owner_->wrapped()) { + return -1; // FIXME: Find a code that makes sense. + } + return owner_->wrapped()->GetError(); +} + +std::unique_ptr +WrappingAsyncDnsResolverFactory::Create() { + return std::make_unique(wrapped_factory_->Create()); +} + +std::unique_ptr +WrappingAsyncDnsResolverFactory::CreateAndResolve( + const rtc::SocketAddress& addr, + std::function callback) { + std::unique_ptr resolver = Create(); + resolver->Start(addr, callback); + return resolver; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h index c4661b448..c98891306 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_async_resolver_factory.h @@ -11,16 +11,47 @@ #ifndef P2P_BASE_BASIC_ASYNC_RESOLVER_FACTORY_H_ #define P2P_BASE_BASIC_ASYNC_RESOLVER_FACTORY_H_ +#include +#include +#include + +#include "api/async_dns_resolver.h" #include "api/async_resolver_factory.h" #include "rtc_base/async_resolver_interface.h" namespace webrtc { -class BasicAsyncResolverFactory : public AsyncResolverFactory { +class BasicAsyncResolverFactory final : public AsyncResolverFactory { public: rtc::AsyncResolverInterface* Create() override; }; +// This class wraps a factory using the older webrtc::AsyncResolverFactory API, +// and produces webrtc::AsyncDnsResolver objects that contain an +// rtc::AsyncResolver object. +class WrappingAsyncDnsResolverFactory final + : public AsyncDnsResolverFactoryInterface { + public: + explicit WrappingAsyncDnsResolverFactory( + std::unique_ptr wrapped_factory) + : owned_factory_(std::move(wrapped_factory)), + wrapped_factory_(owned_factory_.get()) {} + + explicit WrappingAsyncDnsResolverFactory( + AsyncResolverFactory* non_owned_factory) + : wrapped_factory_(non_owned_factory) {} + + std::unique_ptr CreateAndResolve( + const rtc::SocketAddress& addr, + std::function callback) override; + + std::unique_ptr Create() override; + + private: + const std::unique_ptr owned_factory_; + AsyncResolverFactory* const wrapped_factory_; +}; + } // namespace webrtc #endif // P2P_BASE_BASIC_ASYNC_RESOLVER_FACTORY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_packet_socket_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_packet_socket_factory.cc index 8be907933..232e58b54 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_packet_socket_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_packet_socket_factory.cc @@ -15,6 +15,7 @@ #include #include "p2p/base/async_stun_tcp_socket.h" +#include "rtc_base/async_resolver.h" #include "rtc_base/async_tcp_socket.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/checks.h" @@ -81,16 +82,20 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateServerTcpSocket( return NULL; } + // Set TCP_NODELAY (via OPT_NODELAY) for improved performance; this causes + // small media packets to be sent immediately rather than being buffered up, + // reducing latency. + if (socket->SetOption(Socket::OPT_NODELAY, 1) != 0) { + RTC_LOG(LS_ERROR) << "Setting TCP_NODELAY option failed with error " + << socket->GetError(); + } + // If using fake TLS, wrap the TCP socket in a pseudo-SSL socket. if (opts & PacketSocketFactory::OPT_TLS_FAKE) { RTC_DCHECK(!(opts & PacketSocketFactory::OPT_TLS)); socket = new AsyncSSLSocket(socket); } - // Set TCP_NODELAY (via OPT_NODELAY) for improved performance. - // See http://go/gtalktcpnodelayexperiment - socket->SetOption(Socket::OPT_NODELAY, 1); - if (opts & PacketSocketFactory::OPT_STUN) return new cricket::AsyncStunTCPSocket(socket, true); @@ -123,6 +128,16 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( } } + // Set TCP_NODELAY (via OPT_NODELAY) for improved performance; this causes + // small media packets to be sent immediately rather than being buffered up, + // reducing latency. + // + // Must be done before calling Connect, otherwise it may fail. + if (socket->SetOption(Socket::OPT_NODELAY, 1) != 0) { + RTC_LOG(LS_ERROR) << "Setting TCP_NODELAY option failed with error " + << socket->GetError(); + } + // If using a proxy, wrap the socket in a proxy socket. if (proxy_info.type == PROXY_SOCKS5) { socket = new AsyncSocksProxySocket( @@ -181,10 +196,6 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( tcp_socket = new AsyncTCPSocket(socket, false); } - // Set TCP_NODELAY (via OPT_NODELAY) for improved performance. - // See http://go/gtalktcpnodelayexperiment - tcp_socket->SetOption(Socket::OPT_NODELAY, 1); - return tcp_socket; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc index fe6042102..0aa2bcbef 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc @@ -480,6 +480,7 @@ void Connection::OnReadPacket(const char* data, // If this is a STUN response, then update the writable bit. // Log at LS_INFO if we receive a ping on an unwritable connection. rtc::LoggingSeverity sev = (!writable() ? rtc::LS_INFO : rtc::LS_VERBOSE); + msg->ValidateMessageIntegrity(remote_candidate().password()); switch (msg->type()) { case STUN_BINDING_REQUEST: RTC_LOG_V(sev) << ToString() << ": Received " @@ -505,8 +506,7 @@ void Connection::OnReadPacket(const char* data, // id's match. case STUN_BINDING_RESPONSE: case STUN_BINDING_ERROR_RESPONSE: - if (msg->ValidateMessageIntegrity(data, size, - remote_candidate().password())) { + if (msg->IntegrityOk()) { requests_.CheckResponse(msg.get()); } // Otherwise silently discard the response message. @@ -523,8 +523,7 @@ void Connection::OnReadPacket(const char* data, break; case GOOG_PING_RESPONSE: case GOOG_PING_ERROR_RESPONSE: - if (msg->ValidateMessageIntegrity32(data, size, - remote_candidate().password())) { + if (msg->IntegrityOk()) { requests_.CheckResponse(msg.get()); } break; @@ -1372,13 +1371,15 @@ int ProxyConnection::Send(const void* data, stats_.sent_total_packets++; int sent = port_->SendTo(data, size, remote_candidate_.address(), options, true); + int64_t now = rtc::TimeMillis(); if (sent <= 0) { RTC_DCHECK(sent < 0); error_ = port_->GetError(); stats_.sent_discarded_packets++; } else { - send_rate_tracker_.AddSamples(sent); + send_rate_tracker_.AddSamplesAtTime(now, sent); } + last_send_data_ = now; return sent; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h index 88e930c21..d48137d01 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h @@ -237,6 +237,8 @@ class Connection : public CandidatePairInterface, // that the remote peer has received, if it is indicated in the incoming // connectivity check from the peer. void HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg); + // Timestamp when data was last sent (or attempted to be sent). + int64_t last_send_data() const { return last_send_data_; } int64_t last_data_received() const { return last_data_received_; } // Debugging description of this connection @@ -378,6 +380,7 @@ class Connection : public CandidatePairInterface, ConnectionInfo stats_; rtc::RateTracker recv_rate_tracker_; rtc::RateTracker send_rate_tracker_; + int64_t last_send_data_ = 0; private: // Update the local candidate based on the mapped address attribute. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc index f4b182efd..0a7175cfd 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.cc @@ -44,10 +44,10 @@ DefaultIceTransportFactory::CreateIceTransport( int component, IceTransportInit init) { BasicIceControllerFactory factory; - return new rtc::RefCountedObject( - std::make_unique( + return rtc::make_ref_counted( + cricket::P2PTransportChannel::Create( transport_name, component, init.port_allocator(), - init.async_resolver_factory(), init.event_log(), &factory)); + init.async_dns_resolver_factory(), init.event_log(), &factory)); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.h b/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.h index 4834c9ada..e46680d48 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/default_ice_transport_factory.h @@ -36,7 +36,7 @@ class DefaultIceTransport : public IceTransportInterface { } private: - const rtc::ThreadChecker thread_checker_{}; + const SequenceChecker thread_checker_{}; std::unique_ptr internal_ RTC_GUARDED_BY(thread_checker_); }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc index 52fe5c65a..99ee0f1a1 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.cc @@ -134,14 +134,13 @@ void StreamInterfaceChannel::Close() { DtlsTransport::DtlsTransport(IceTransportInternal* ice_transport, const webrtc::CryptoOptions& crypto_options, - webrtc::RtcEventLog* event_log) - : transport_name_(ice_transport->transport_name()), - component_(ice_transport->component()), + webrtc::RtcEventLog* event_log, + rtc::SSLProtocolVersion max_version) + : component_(ice_transport->component()), ice_transport_(ice_transport), downward_(NULL), srtp_ciphers_(crypto_options.GetSupportedDtlsSrtpCryptoSuites()), - ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_12), - crypto_options_(crypto_options), + ssl_max_version_(max_version), event_log_(event_log) { RTC_DCHECK(ice_transport_); ConnectToIceTransport(); @@ -149,16 +148,12 @@ DtlsTransport::DtlsTransport(IceTransportInternal* ice_transport, DtlsTransport::~DtlsTransport() = default; -const webrtc::CryptoOptions& DtlsTransport::crypto_options() const { - return crypto_options_; -} - DtlsTransportState DtlsTransport::dtls_state() const { return dtls_state_; } const std::string& DtlsTransport::transport_name() const { - return transport_name_; + return ice_transport_->transport_name(); } int DtlsTransport::component() const { @@ -199,17 +194,6 @@ rtc::scoped_refptr DtlsTransport::GetLocalCertificate() return local_certificate_; } -bool DtlsTransport::SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) { - if (dtls_active_) { - RTC_LOG(LS_ERROR) << "Not changing max. protocol version " - "while DTLS is negotiating"; - return false; - } - - ssl_max_version_ = version; - return true; -} - bool DtlsTransport::SetDtlsRole(rtc::SSLRole role) { if (dtls_) { RTC_DCHECK(dtls_role_); @@ -816,11 +800,11 @@ void DtlsTransport::set_dtls_state(DtlsTransportState state) { RTC_LOG(LS_VERBOSE) << ToString() << ": set_dtls_state from:" << dtls_state_ << " to " << state; dtls_state_ = state; - SignalDtlsState(this, state); + SendDtlsState(this, state); } void DtlsTransport::OnDtlsHandshakeError(rtc::SSLHandshakeError error) { - SignalDtlsHandshakeError(error); + SendDtlsHandshakeError(error); } void DtlsTransport::ConfigureHandshakeTimeout() { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h index 430c91233..f37e46857 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h @@ -16,6 +16,7 @@ #include #include "api/crypto/crypto_options.h" +#include "api/sequence_checker.h" #include "p2p/base/dtls_transport_internal.h" #include "p2p/base/ice_transport_internal.h" #include "rtc_base/buffer.h" @@ -24,8 +25,7 @@ #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace rtc { class PacketTransportInternal; @@ -55,7 +55,7 @@ class StreamInterfaceChannel : public rtc::StreamInterface { int* error) override; private: - webrtc::SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; IceTransportInternal* const ice_transport_; // owned by DtlsTransport rtc::StreamState state_ RTC_GUARDED_BY(sequence_checker_); rtc::BufferQueue packets_ RTC_GUARDED_BY(sequence_checker_); @@ -101,13 +101,14 @@ class DtlsTransport : public DtlsTransportInternal { // // |event_log| is an optional RtcEventLog for logging state changes. It should // outlive the DtlsTransport. - explicit DtlsTransport(IceTransportInternal* ice_transport, - const webrtc::CryptoOptions& crypto_options, - webrtc::RtcEventLog* event_log); + DtlsTransport( + IceTransportInternal* ice_transport, + const webrtc::CryptoOptions& crypto_options, + webrtc::RtcEventLog* event_log, + rtc::SSLProtocolVersion max_version = rtc::SSL_PROTOCOL_DTLS_12); ~DtlsTransport() override; - const webrtc::CryptoOptions& crypto_options() const override; DtlsTransportState dtls_state() const override; const std::string& transport_name() const override; int component() const override; @@ -142,8 +143,6 @@ class DtlsTransport : public DtlsTransportInternal { bool GetOption(rtc::Socket::Option opt, int* value) override; - bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) override; - // Find out which TLS version was negotiated bool GetSslVersionBytes(int* version) const override; // Find out which DTLS-SRTP cipher was negotiated @@ -191,7 +190,7 @@ class DtlsTransport : public DtlsTransportInternal { const absl::string_view RECEIVING_ABBREV[2] = {"_", "R"}; const absl::string_view WRITABLE_ABBREV[2] = {"_", "W"}; rtc::StringBuilder sb; - sb << "DtlsTransport[" << transport_name_ << "|" << component_ << "|" + sb << "DtlsTransport[" << transport_name() << "|" << component_ << "|" << RECEIVING_ABBREV[receiving()] << WRITABLE_ABBREV[writable()] << "]"; return sb.Release(); } @@ -222,22 +221,20 @@ class DtlsTransport : public DtlsTransportInternal { // Sets the DTLS state, signaling if necessary. void set_dtls_state(DtlsTransportState state); - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; - std::string transport_name_; - int component_; + const int component_; DtlsTransportState dtls_state_ = DTLS_TRANSPORT_NEW; // Underlying ice_transport, not owned by this class. - IceTransportInternal* ice_transport_; + IceTransportInternal* const ice_transport_; std::unique_ptr dtls_; // The DTLS stream StreamInterfaceChannel* downward_; // Wrapper for ice_transport_, owned by dtls_. - std::vector srtp_ciphers_; // SRTP ciphers to use with DTLS. + const std::vector srtp_ciphers_; // SRTP ciphers to use with DTLS. bool dtls_active_ = false; rtc::scoped_refptr local_certificate_; absl::optional dtls_role_; - rtc::SSLProtocolVersion ssl_max_version_; - webrtc::CryptoOptions crypto_options_; + const rtc::SSLProtocolVersion ssl_max_version_; rtc::Buffer remote_fingerprint_value_; std::string remote_fingerprint_algorithm_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_factory.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_factory.h index 9ad78a7cc..7c4a24adc 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_factory.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_factory.h @@ -31,7 +31,8 @@ class DtlsTransportFactory { virtual std::unique_ptr CreateDtlsTransport( IceTransportInternal* ice, - const webrtc::CryptoOptions& crypto_options) = 0; + const webrtc::CryptoOptions& crypto_options, + rtc::SSLProtocolVersion max_version) = 0; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h index 4c35d7371..ff71196f3 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h @@ -16,31 +16,35 @@ #include #include +#include +#include "absl/base/attributes.h" #include "api/crypto/crypto_options.h" #include "api/dtls_transport_interface.h" #include "api/scoped_refptr.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/packet_transport_internal.h" +#include "rtc_base/callback_list.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/third_party/sigslot/sigslot.h" namespace cricket { enum DtlsTransportState { // Haven't started negotiating. - DTLS_TRANSPORT_NEW = 0, + DTLS_TRANSPORT_NEW = static_cast(webrtc::DtlsTransportState::kNew), // Have started negotiating. - DTLS_TRANSPORT_CONNECTING, + DTLS_TRANSPORT_CONNECTING = + static_cast(webrtc::DtlsTransportState::kConnecting), // Negotiated, and has a secure connection. - DTLS_TRANSPORT_CONNECTED, + DTLS_TRANSPORT_CONNECTED = + static_cast(webrtc::DtlsTransportState::kConnected), // Transport is closed. - DTLS_TRANSPORT_CLOSED, + DTLS_TRANSPORT_CLOSED = static_cast(webrtc::DtlsTransportState::kClosed), // Failed due to some error in the handshake process. - DTLS_TRANSPORT_FAILED, + DTLS_TRANSPORT_FAILED = static_cast(webrtc::DtlsTransportState::kFailed), }; webrtc::DtlsTransportState ConvertDtlsTransportState( @@ -62,8 +66,6 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { public: ~DtlsTransportInternal() override; - virtual const webrtc::CryptoOptions& crypto_options() const = 0; - virtual DtlsTransportState dtls_state() const = 0; virtual int component() const = 0; @@ -107,21 +109,54 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { const uint8_t* digest, size_t digest_len) = 0; - virtual bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) = 0; + ABSL_DEPRECATED("Set the max version via construction.") + bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) { + return true; + } // Expose the underneath IceTransport. virtual IceTransportInternal* ice_transport() = 0; - sigslot::signal2 SignalDtlsState; + // F: void(DtlsTransportInternal*, const DtlsTransportState) + template + void SubscribeDtlsState(F&& callback) { + dtls_state_callback_list_.AddReceiver(std::forward(callback)); + } + + template + void SubscribeDtlsState(const void* id, F&& callback) { + dtls_state_callback_list_.AddReceiver(id, std::forward(callback)); + } + // Unsubscribe the subscription with given id. + void UnsubscribeDtlsState(const void* id) { + dtls_state_callback_list_.RemoveReceivers(id); + } + + void SendDtlsState(DtlsTransportInternal* transport, + DtlsTransportState state) { + dtls_state_callback_list_.Send(transport, state); + } // Emitted whenever the Dtls handshake failed on some transport channel. - sigslot::signal1 SignalDtlsHandshakeError; + // F: void(rtc::SSLHandshakeError) + template + void SubscribeDtlsHandshakeError(F&& callback) { + dtls_handshake_error_callback_list_.AddReceiver(std::forward(callback)); + } + + void SendDtlsHandshakeError(rtc::SSLHandshakeError error) { + dtls_handshake_error_callback_list_.Send(error); + } protected: DtlsTransportInternal(); private: RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransportInternal); + webrtc::CallbackList + dtls_handshake_error_callback_list_; + webrtc::CallbackList + dtls_state_callback_list_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h index 7061ea4b3..0628c4ce0 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_dtls_transport.h @@ -55,9 +55,15 @@ class FakeDtlsTransport : public DtlsTransportInternal { // If this constructor is called, a new fake ICE transport will be created, // and this FakeDtlsTransport will take the ownership. - explicit FakeDtlsTransport(const std::string& name, int component) + FakeDtlsTransport(const std::string& name, int component) : FakeDtlsTransport(std::make_unique(name, component)) { } + FakeDtlsTransport(const std::string& name, + int component, + rtc::Thread* network_thread) + : FakeDtlsTransport(std::make_unique(name, + component, + network_thread)) {} ~FakeDtlsTransport() override { if (dest_ && dest_->dest_ == this) { @@ -85,7 +91,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { } void SetDtlsState(DtlsTransportState state) { dtls_state_ = state; - SignalDtlsState(this, dtls_state_); + SendDtlsState(this, dtls_state_); } // Simulates the two DTLS transports connecting to each other. @@ -140,9 +146,6 @@ class FakeDtlsTransport : public DtlsTransportInternal { rtc::SSLFingerprint(alg, rtc::MakeArrayView(digest, digest_len)); return true; } - bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) override { - return true; - } bool SetDtlsRole(rtc::SSLRole role) override { dtls_role_ = std::move(role); return true; @@ -154,12 +157,6 @@ class FakeDtlsTransport : public DtlsTransportInternal { *role = *dtls_role_; return true; } - const webrtc::CryptoOptions& crypto_options() const override { - return crypto_options_; - } - void SetCryptoOptions(const webrtc::CryptoOptions& crypto_options) { - crypto_options_ = crypto_options; - } bool SetLocalCertificate( const rtc::scoped_refptr& certificate) override { do_dtls_ = true; @@ -297,7 +294,6 @@ class FakeDtlsTransport : public DtlsTransportInternal { absl::optional dtls_role_; int crypto_suite_ = rtc::SRTP_AES128_CM_SHA1_80; absl::optional ssl_cipher_suite_; - webrtc::CryptoOptions crypto_options_; DtlsTransportState dtls_state_ = DTLS_TRANSPORT_NEW; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h index edc573044..f8be8a983 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_ice_transport.h @@ -20,11 +20,15 @@ #include "absl/types/optional.h" #include "api/ice_transport_interface.h" #include "p2p/base/ice_transport_internal.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace cricket { +// All methods must be called on the network thread (which is either the thread +// calling the constructor, or the separate thread explicitly passed to the +// constructor). class FakeIceTransport : public IceTransportInternal { public: explicit FakeIceTransport(const std::string& name, @@ -34,6 +38,8 @@ class FakeIceTransport : public IceTransportInternal { component_(component), network_thread_(network_thread ? network_thread : rtc::Thread::Current()) {} + // Must be called either on the network thread, or after the network thread + // has been shut down. ~FakeIceTransport() override { if (dest_ && dest_->dest_ == this) { dest_->dest_ = nullptr; @@ -42,18 +48,31 @@ class FakeIceTransport : public IceTransportInternal { // If async, will send packets by "Post"-ing to message queue instead of // synchronously "Send"-ing. - void SetAsync(bool async) { async_ = async; } - void SetAsyncDelay(int delay_ms) { async_delay_ms_ = delay_ms; } + void SetAsync(bool async) { + RTC_DCHECK_RUN_ON(network_thread_); + async_ = async; + } + void SetAsyncDelay(int delay_ms) { + RTC_DCHECK_RUN_ON(network_thread_); + async_delay_ms_ = delay_ms; + } // SetWritable, SetReceiving and SetDestination are the main methods that can // be used for testing, to simulate connectivity or lack thereof. - void SetWritable(bool writable) { set_writable(writable); } - void SetReceiving(bool receiving) { set_receiving(receiving); } + void SetWritable(bool writable) { + RTC_DCHECK_RUN_ON(network_thread_); + set_writable(writable); + } + void SetReceiving(bool receiving) { + RTC_DCHECK_RUN_ON(network_thread_); + set_receiving(receiving); + } // Simulates the two transports connecting to each other. // If |asymmetric| is true this method only affects this FakeIceTransport. // If false, it affects |dest| as well. void SetDestination(FakeIceTransport* dest, bool asymmetric = false) { + RTC_DCHECK_RUN_ON(network_thread_); if (dest == dest_) { return; } @@ -75,12 +94,14 @@ class FakeIceTransport : public IceTransportInternal { void SetTransportState(webrtc::IceTransportState state, IceTransportState legacy_state) { + RTC_DCHECK_RUN_ON(network_thread_); transport_state_ = state; legacy_transport_state_ = legacy_state; SignalIceTransportStateChanged(this); } void SetConnectionCount(size_t connection_count) { + RTC_DCHECK_RUN_ON(network_thread_); size_t old_connection_count = connection_count_; connection_count_ = connection_count; if (connection_count) { @@ -94,6 +115,7 @@ class FakeIceTransport : public IceTransportInternal { } void SetCandidatesGatheringComplete() { + RTC_DCHECK_RUN_ON(network_thread_); if (gathering_state_ != kIceGatheringComplete) { gathering_state_ = kIceGatheringComplete; SignalGatheringState(this); @@ -102,16 +124,29 @@ class FakeIceTransport : public IceTransportInternal { // Convenience functions for accessing ICE config and other things. int receiving_timeout() const { + RTC_DCHECK_RUN_ON(network_thread_); return ice_config_.receiving_timeout_or_default(); } - bool gather_continually() const { return ice_config_.gather_continually(); } - const Candidates& remote_candidates() const { return remote_candidates_; } + bool gather_continually() const { + RTC_DCHECK_RUN_ON(network_thread_); + return ice_config_.gather_continually(); + } + const Candidates& remote_candidates() const { + RTC_DCHECK_RUN_ON(network_thread_); + return remote_candidates_; + } // Fake IceTransportInternal implementation. const std::string& transport_name() const override { return name_; } int component() const override { return component_; } - uint64_t IceTiebreaker() const { return tiebreaker_; } - IceMode remote_ice_mode() const { return remote_ice_mode_; } + uint64_t IceTiebreaker() const { + RTC_DCHECK_RUN_ON(network_thread_); + return tiebreaker_; + } + IceMode remote_ice_mode() const { + RTC_DCHECK_RUN_ON(network_thread_); + return remote_ice_mode_; + } const std::string& ice_ufrag() const { return ice_parameters_.ufrag; } const std::string& ice_pwd() const { return ice_parameters_.pwd; } const std::string& remote_ice_ufrag() const { @@ -126,6 +161,7 @@ class FakeIceTransport : public IceTransportInternal { } IceTransportState GetState() const override { + RTC_DCHECK_RUN_ON(network_thread_); if (legacy_transport_state_) { return *legacy_transport_state_; } @@ -143,6 +179,7 @@ class FakeIceTransport : public IceTransportInternal { } webrtc::IceTransportState GetIceTransportState() const override { + RTC_DCHECK_RUN_ON(network_thread_); if (transport_state_) { return *transport_state_; } @@ -159,21 +196,34 @@ class FakeIceTransport : public IceTransportInternal { return webrtc::IceTransportState::kConnected; } - void SetIceRole(IceRole role) override { role_ = role; } - IceRole GetIceRole() const override { return role_; } + void SetIceRole(IceRole role) override { + RTC_DCHECK_RUN_ON(network_thread_); + role_ = role; + } + IceRole GetIceRole() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return role_; + } void SetIceTiebreaker(uint64_t tiebreaker) override { + RTC_DCHECK_RUN_ON(network_thread_); tiebreaker_ = tiebreaker; } void SetIceParameters(const IceParameters& ice_params) override { + RTC_DCHECK_RUN_ON(network_thread_); ice_parameters_ = ice_params; } void SetRemoteIceParameters(const IceParameters& params) override { + RTC_DCHECK_RUN_ON(network_thread_); remote_ice_parameters_ = params; } - void SetRemoteIceMode(IceMode mode) override { remote_ice_mode_ = mode; } + void SetRemoteIceMode(IceMode mode) override { + RTC_DCHECK_RUN_ON(network_thread_); + remote_ice_mode_ = mode; + } void MaybeStartGathering() override { + RTC_DCHECK_RUN_ON(network_thread_); if (gathering_state_ == kIceGatheringNew) { gathering_state_ = kIceGatheringGathering; SignalGatheringState(this); @@ -181,15 +231,21 @@ class FakeIceTransport : public IceTransportInternal { } IceGatheringState gathering_state() const override { + RTC_DCHECK_RUN_ON(network_thread_); return gathering_state_; } - void SetIceConfig(const IceConfig& config) override { ice_config_ = config; } + void SetIceConfig(const IceConfig& config) override { + RTC_DCHECK_RUN_ON(network_thread_); + ice_config_ = config; + } void AddRemoteCandidate(const Candidate& candidate) override { + RTC_DCHECK_RUN_ON(network_thread_); remote_candidates_.push_back(candidate); } void RemoveRemoteCandidate(const Candidate& candidate) override { + RTC_DCHECK_RUN_ON(network_thread_); auto it = absl::c_find(remote_candidates_, candidate); if (it == remote_candidates_.end()) { RTC_LOG(LS_INFO) << "Trying to remove a candidate which doesn't exist."; @@ -199,7 +255,10 @@ class FakeIceTransport : public IceTransportInternal { remote_candidates_.erase(it); } - void RemoveAllRemoteCandidates() override { remote_candidates_.clear(); } + void RemoveAllRemoteCandidates() override { + RTC_DCHECK_RUN_ON(network_thread_); + remote_candidates_.clear(); + } bool GetStats(IceTransportStats* ice_transport_stats) override { CandidateStats candidate_stats; @@ -220,17 +279,25 @@ class FakeIceTransport : public IceTransportInternal { } // Fake PacketTransportInternal implementation. - bool writable() const override { return writable_; } - bool receiving() const override { return receiving_; } + bool writable() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return writable_; + } + bool receiving() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return receiving_; + } // If combine is enabled, every two consecutive packets to be sent with // "SendPacket" will be combined into one outgoing packet. void combine_outgoing_packets(bool combine) { + RTC_DCHECK_RUN_ON(network_thread_); combine_outgoing_packets_ = combine; } int SendPacket(const char* data, size_t len, const rtc::PacketOptions& options, int flags) override { + RTC_DCHECK_RUN_ON(network_thread_); if (!dest_) { return -1; } @@ -239,9 +306,12 @@ class FakeIceTransport : public IceTransportInternal { if (!combine_outgoing_packets_ || send_packet_.size() > len) { rtc::CopyOnWriteBuffer packet(std::move(send_packet_)); if (async_) { - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, rtc::Thread::Current(), - rtc::Bind(&FakeIceTransport::SendPacketInternal, this, packet), + network_thread_->PostDelayedTask( + ToQueuedTask(task_safety_.flag(), + [this, packet] { + RTC_DCHECK_RUN_ON(network_thread_); + FakeIceTransport::SendPacketInternal(packet); + }), async_delay_ms_); } else { SendPacketInternal(packet); @@ -253,10 +323,12 @@ class FakeIceTransport : public IceTransportInternal { } int SetOption(rtc::Socket::Option opt, int value) override { + RTC_DCHECK_RUN_ON(network_thread_); socket_options_[opt] = value; return true; } bool GetOption(rtc::Socket::Option opt, int* value) override { + RTC_DCHECK_RUN_ON(network_thread_); auto it = socket_options_.find(opt); if (it != socket_options_.end()) { *value = it->second; @@ -268,19 +340,27 @@ class FakeIceTransport : public IceTransportInternal { int GetError() override { return 0; } - rtc::CopyOnWriteBuffer last_sent_packet() { return last_sent_packet_; } + rtc::CopyOnWriteBuffer last_sent_packet() { + RTC_DCHECK_RUN_ON(network_thread_); + return last_sent_packet_; + } absl::optional network_route() const override { + RTC_DCHECK_RUN_ON(network_thread_); return network_route_; } void SetNetworkRoute(absl::optional network_route) { + RTC_DCHECK_RUN_ON(network_thread_); network_route_ = network_route; - network_thread_->Invoke( - RTC_FROM_HERE, [this] { SignalNetworkRouteChanged(network_route_); }); + network_thread_->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(network_thread_); + SignalNetworkRouteChanged(network_route_); + }); } private: - void set_writable(bool writable) { + void set_writable(bool writable) + RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { if (writable_ == writable) { return; } @@ -292,7 +372,8 @@ class FakeIceTransport : public IceTransportInternal { SignalWritableState(this); } - void set_receiving(bool receiving) { + void set_receiving(bool receiving) + RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { if (receiving_ == receiving) { return; } @@ -300,7 +381,8 @@ class FakeIceTransport : public IceTransportInternal { SignalReceivingState(this); } - void SendPacketInternal(const rtc::CopyOnWriteBuffer& packet) { + void SendPacketInternal(const rtc::CopyOnWriteBuffer& packet) + RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { if (dest_) { last_sent_packet_ = packet; dest_->SignalReadPacket(dest_, packet.data(), packet.size(), @@ -308,32 +390,37 @@ class FakeIceTransport : public IceTransportInternal { } } - rtc::AsyncInvoker invoker_; - std::string name_; - int component_; - FakeIceTransport* dest_ = nullptr; - bool async_ = false; - int async_delay_ms_ = 0; - Candidates remote_candidates_; - IceConfig ice_config_; - IceRole role_ = ICEROLE_UNKNOWN; - uint64_t tiebreaker_ = 0; - IceParameters ice_parameters_; - IceParameters remote_ice_parameters_; - IceMode remote_ice_mode_ = ICEMODE_FULL; - size_t connection_count_ = 0; - absl::optional transport_state_; - absl::optional legacy_transport_state_; - IceGatheringState gathering_state_ = kIceGatheringNew; - bool had_connection_ = false; - bool writable_ = false; - bool receiving_ = false; - bool combine_outgoing_packets_ = false; - rtc::CopyOnWriteBuffer send_packet_; - absl::optional network_route_; - std::map socket_options_; - rtc::CopyOnWriteBuffer last_sent_packet_; + const std::string name_; + const int component_; + FakeIceTransport* dest_ RTC_GUARDED_BY(network_thread_) = nullptr; + bool async_ RTC_GUARDED_BY(network_thread_) = false; + int async_delay_ms_ RTC_GUARDED_BY(network_thread_) = 0; + Candidates remote_candidates_ RTC_GUARDED_BY(network_thread_); + IceConfig ice_config_ RTC_GUARDED_BY(network_thread_); + IceRole role_ RTC_GUARDED_BY(network_thread_) = ICEROLE_UNKNOWN; + uint64_t tiebreaker_ RTC_GUARDED_BY(network_thread_) = 0; + IceParameters ice_parameters_ RTC_GUARDED_BY(network_thread_); + IceParameters remote_ice_parameters_ RTC_GUARDED_BY(network_thread_); + IceMode remote_ice_mode_ RTC_GUARDED_BY(network_thread_) = ICEMODE_FULL; + size_t connection_count_ RTC_GUARDED_BY(network_thread_) = 0; + absl::optional transport_state_ + RTC_GUARDED_BY(network_thread_); + absl::optional legacy_transport_state_ + RTC_GUARDED_BY(network_thread_); + IceGatheringState gathering_state_ RTC_GUARDED_BY(network_thread_) = + kIceGatheringNew; + bool had_connection_ RTC_GUARDED_BY(network_thread_) = false; + bool writable_ RTC_GUARDED_BY(network_thread_) = false; + bool receiving_ RTC_GUARDED_BY(network_thread_) = false; + bool combine_outgoing_packets_ RTC_GUARDED_BY(network_thread_) = false; + rtc::CopyOnWriteBuffer send_packet_ RTC_GUARDED_BY(network_thread_); + absl::optional network_route_ + RTC_GUARDED_BY(network_thread_); + std::map socket_options_ + RTC_GUARDED_BY(network_thread_); + rtc::CopyOnWriteBuffer last_sent_packet_ RTC_GUARDED_BY(network_thread_); rtc::Thread* const network_thread_; + webrtc::ScopedTaskSafetyDetached task_safety_; }; class FakeIceTransportWrapper : public webrtc::IceTransportInterface { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_packet_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_packet_transport.h index a5e2abb7d..b69c9b520 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_packet_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_packet_transport.h @@ -15,7 +15,6 @@ #include #include "p2p/base/packet_transport_internal.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/copy_on_write_buffer.h" namespace rtc { @@ -31,11 +30,6 @@ class FakePacketTransport : public PacketTransportInternal { } } - // If async, will send packets by "Post"-ing to message queue instead of - // synchronously "Send"-ing. - void SetAsync(bool async) { async_ = async; } - void SetAsyncDelay(int delay_ms) { async_delay_ms_ = delay_ms; } - // SetWritable, SetReceiving and SetDestination are the main methods that can // be used for testing, to simulate connectivity or lack thereof. void SetWritable(bool writable) { set_writable(writable); } @@ -70,14 +64,8 @@ class FakePacketTransport : public PacketTransportInternal { return -1; } CopyOnWriteBuffer packet(data, len); - if (async_) { - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, Thread::Current(), - Bind(&FakePacketTransport::SendPacketInternal, this, packet), - async_delay_ms_); - } else { - SendPacketInternal(packet); - } + SendPacketInternal(packet); + SentPacket sent_packet(options.packet_id, TimeMillis()); SignalSentPacket(this, sent_packet); return static_cast(len); @@ -139,11 +127,8 @@ class FakePacketTransport : public PacketTransportInternal { } CopyOnWriteBuffer last_sent_packet_; - AsyncInvoker invoker_; std::string transport_name_; FakePacketTransport* dest_ = nullptr; - bool async_ = false; - int async_delay_ms_ = 0; bool writable_ = false; bool receiving_ = false; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h index 266bb7956..9e0e33304 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/fake_port_allocator.h @@ -18,7 +18,6 @@ #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/port_allocator.h" #include "p2p/base/udp_port.h" -#include "rtc_base/bind.h" #include "rtc_base/net_helpers.h" #include "rtc_base/thread.h" @@ -119,8 +118,8 @@ class FakePortAllocatorSession : public PortAllocatorSession { username(), password(), std::string(), false)); RTC_DCHECK(port_); - port_->SignalDestroyed.connect( - this, &FakePortAllocatorSession::OnPortDestroyed); + port_->SubscribePortDestroyed( + [this](PortInterface* port) { OnPortDestroyed(port); }); AddPort(port_.get()); } ++port_config_count_; @@ -222,9 +221,7 @@ class FakePortAllocator : public cricket::PortAllocator { Initialize(); return; } - network_thread_->Invoke(RTC_FROM_HERE, - rtc::Bind(&PortAllocator::Initialize, - static_cast(this))); + network_thread_->Invoke(RTC_FROM_HERE, [this] { Initialize(); }); } void SetNetworkIgnoreMask(int network_ignore_mask) override {} diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h index d5dc29e78..0e77d1dd0 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_controller_interface.h @@ -87,7 +87,9 @@ class IceControllerInterface { // This represents the result of a call to SelectConnectionToPing. struct PingResult { PingResult(const Connection* conn, int _recheck_delay_ms) - : connection(conn), recheck_delay_ms(_recheck_delay_ms) {} + : connection(conn ? absl::optional(conn) + : absl::nullopt), + recheck_delay_ms(_recheck_delay_ms) {} // Connection that we should (optionally) ping. const absl::optional connection; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc index 1d5b6e740..104a95b5a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.cc @@ -14,6 +14,50 @@ namespace cricket { +using webrtc::RTCError; +using webrtc::RTCErrorType; + +RTCError VerifyCandidate(const Candidate& cand) { + // No address zero. + if (cand.address().IsNil() || cand.address().IsAnyIP()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "candidate has address of zero"); + } + + // Disallow all ports below 1024, except for 80 and 443 on public addresses. + int port = cand.address().port(); + if (cand.protocol() == cricket::TCP_PROTOCOL_NAME && + (cand.tcptype() == cricket::TCPTYPE_ACTIVE_STR || port == 0)) { + // Expected for active-only candidates per + // http://tools.ietf.org/html/rfc6544#section-4.5 so no error. + // Libjingle clients emit port 0, in "active" mode. + return RTCError::OK(); + } + if (port < 1024) { + if ((port != 80) && (port != 443)) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "candidate has port below 1024, but not 80 or 443"); + } + + if (cand.address().IsPrivateIP()) { + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "candidate has port of 80 or 443 with private IP address"); + } + } + + return RTCError::OK(); +} + +RTCError VerifyCandidates(const Candidates& candidates) { + for (const Candidate& candidate : candidates) { + RTCError error = VerifyCandidate(candidate); + if (!error.ok()) + return error; + } + return RTCError::OK(); +} + IceConfig::IceConfig() = default; IceConfig::IceConfig(int receiving_timeout_ms, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h index b735a1a74..b3eb2dc9e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/ice_transport_internal.h @@ -18,6 +18,7 @@ #include "absl/types/optional.h" #include "api/candidate.h" +#include "api/rtc_error.h" #include "api/transport/enums.h" #include "p2p/base/connection.h" #include "p2p/base/packet_transport_internal.h" @@ -74,6 +75,17 @@ enum class NominationMode { // The details are described in P2PTransportChannel. }; +// Utility method that checks if various required Candidate fields are filled in +// and contain valid values. If conditions are not met, an RTCError with the +// appropriated error number and description is returned. If the configuration +// is valid RTCError::OK() is returned. +webrtc::RTCError VerifyCandidate(const Candidate& cand); + +// Runs through a list of cricket::Candidate instances and calls VerifyCandidate +// for each one, stopping on the first error encounted and returning that error +// value if so. On success returns RTCError::OK(). +webrtc::RTCError VerifyCandidates(const Candidates& candidates); + // Information about ICE configuration. // TODO(deadbeef): Use absl::optional to represent unset values, instead of // -1. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/mdns_message.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/mdns_message.cc deleted file mode 100644 index 1aa996c4a..000000000 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/mdns_message.cc +++ /dev/null @@ -1,396 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/base/mdns_message.h" - -#include "rtc_base/logging.h" -#include "rtc_base/net_helpers.h" -#include "rtc_base/string_encode.h" - -namespace webrtc { - -namespace { -// RFC 1035, Section 4.1.1. -// -// QR bit. -constexpr uint16_t kMdnsFlagMaskQueryOrResponse = 0x8000; -// AA bit. -constexpr uint16_t kMdnsFlagMaskAuthoritative = 0x0400; -// RFC 1035, Section 4.1.2, QCLASS and RFC 6762, Section 18.12, repurposing of -// top bit of QCLASS as the unicast response bit. -constexpr uint16_t kMdnsQClassMaskUnicastResponse = 0x8000; -constexpr size_t kMdnsHeaderSizeBytes = 12; - -bool ReadDomainName(MessageBufferReader* buf, std::string* name) { - size_t name_start_pos = buf->CurrentOffset(); - uint8_t label_length; - if (!buf->ReadUInt8(&label_length)) { - return false; - } - // RFC 1035, Section 4.1.4. - // - // If the first two bits of the length octet are ones, the name is compressed - // and the rest six bits with the next octet denotes its position in the - // message by the offset from the start of the message. - auto is_pointer = [](uint8_t octet) { - return (octet & 0x80) && (octet & 0x40); - }; - while (label_length && !is_pointer(label_length)) { - // RFC 1035, Section 2.3.1, labels are restricted to 63 octets or less. - if (label_length > 63) { - return false; - } - std::string label; - if (!buf->ReadString(&label, label_length)) { - return false; - } - (*name) += label + "."; - if (!buf->ReadUInt8(&label_length)) { - return false; - } - } - if (is_pointer(label_length)) { - uint8_t next_octet; - if (!buf->ReadUInt8(&next_octet)) { - return false; - } - size_t pos_jump_to = ((label_length & 0x3f) << 8) | next_octet; - // A legitimate pointer only refers to a prior occurrence of the same name, - // and we should only move strictly backward to a prior name field after the - // header. - if (pos_jump_to >= name_start_pos || pos_jump_to < kMdnsHeaderSizeBytes) { - return false; - } - MessageBufferReader new_buf(buf->MessageData(), buf->MessageLength()); - if (!new_buf.Consume(pos_jump_to)) { - return false; - } - return ReadDomainName(&new_buf, name); - } - return true; -} - -void WriteDomainName(rtc::ByteBufferWriter* buf, const std::string& name) { - std::vector labels; - rtc::tokenize(name, '.', &labels); - for (const auto& label : labels) { - buf->WriteUInt8(label.length()); - buf->WriteString(label); - } - buf->WriteUInt8(0); -} - -} // namespace - -void MdnsHeader::SetQueryOrResponse(bool is_query) { - if (is_query) { - flags &= ~kMdnsFlagMaskQueryOrResponse; - } else { - flags |= kMdnsFlagMaskQueryOrResponse; - } -} - -void MdnsHeader::SetAuthoritative(bool is_authoritative) { - if (is_authoritative) { - flags |= kMdnsFlagMaskAuthoritative; - } else { - flags &= ~kMdnsFlagMaskAuthoritative; - } -} - -bool MdnsHeader::IsAuthoritative() const { - return flags & kMdnsFlagMaskAuthoritative; -} - -bool MdnsHeader::Read(MessageBufferReader* buf) { - if (!buf->ReadUInt16(&id) || !buf->ReadUInt16(&flags) || - !buf->ReadUInt16(&qdcount) || !buf->ReadUInt16(&ancount) || - !buf->ReadUInt16(&nscount) || !buf->ReadUInt16(&arcount)) { - RTC_LOG(LS_ERROR) << "Invalid mDNS header."; - return false; - } - return true; -} - -void MdnsHeader::Write(rtc::ByteBufferWriter* buf) const { - buf->WriteUInt16(id); - buf->WriteUInt16(flags); - buf->WriteUInt16(qdcount); - buf->WriteUInt16(ancount); - buf->WriteUInt16(nscount); - buf->WriteUInt16(arcount); -} - -bool MdnsHeader::IsQuery() const { - return !(flags & kMdnsFlagMaskQueryOrResponse); -} - -MdnsSectionEntry::MdnsSectionEntry() = default; -MdnsSectionEntry::~MdnsSectionEntry() = default; -MdnsSectionEntry::MdnsSectionEntry(const MdnsSectionEntry& other) = default; - -void MdnsSectionEntry::SetType(SectionEntryType type) { - switch (type) { - case SectionEntryType::kA: - type_ = 1; - return; - case SectionEntryType::kAAAA: - type_ = 28; - return; - default: - RTC_NOTREACHED(); - } -} - -SectionEntryType MdnsSectionEntry::GetType() const { - switch (type_) { - case 1: - return SectionEntryType::kA; - case 28: - return SectionEntryType::kAAAA; - default: - return SectionEntryType::kUnsupported; - } -} - -void MdnsSectionEntry::SetClass(SectionEntryClass cls) { - switch (cls) { - case SectionEntryClass::kIN: - class_ = 1; - return; - default: - RTC_NOTREACHED(); - } -} - -SectionEntryClass MdnsSectionEntry::GetClass() const { - switch (class_) { - case 1: - return SectionEntryClass::kIN; - default: - return SectionEntryClass::kUnsupported; - } -} - -MdnsQuestion::MdnsQuestion() = default; -MdnsQuestion::MdnsQuestion(const MdnsQuestion& other) = default; -MdnsQuestion::~MdnsQuestion() = default; - -bool MdnsQuestion::Read(MessageBufferReader* buf) { - if (!ReadDomainName(buf, &name_)) { - RTC_LOG(LS_ERROR) << "Invalid name."; - return false; - } - if (!buf->ReadUInt16(&type_) || !buf->ReadUInt16(&class_)) { - RTC_LOG(LS_ERROR) << "Invalid type and class."; - return false; - } - return true; -} - -bool MdnsQuestion::Write(rtc::ByteBufferWriter* buf) const { - WriteDomainName(buf, name_); - buf->WriteUInt16(type_); - buf->WriteUInt16(class_); - return true; -} - -void MdnsQuestion::SetUnicastResponse(bool should_unicast) { - if (should_unicast) { - class_ |= kMdnsQClassMaskUnicastResponse; - } else { - class_ &= ~kMdnsQClassMaskUnicastResponse; - } -} - -bool MdnsQuestion::ShouldUnicastResponse() const { - return class_ & kMdnsQClassMaskUnicastResponse; -} - -MdnsResourceRecord::MdnsResourceRecord() = default; -MdnsResourceRecord::MdnsResourceRecord(const MdnsResourceRecord& other) = - default; -MdnsResourceRecord::~MdnsResourceRecord() = default; - -bool MdnsResourceRecord::Read(MessageBufferReader* buf) { - if (!ReadDomainName(buf, &name_)) { - return false; - } - if (!buf->ReadUInt16(&type_) || !buf->ReadUInt16(&class_) || - !buf->ReadUInt32(&ttl_seconds_) || !buf->ReadUInt16(&rdlength_)) { - return false; - } - - switch (GetType()) { - case SectionEntryType::kA: - return ReadARData(buf); - case SectionEntryType::kAAAA: - return ReadQuadARData(buf); - case SectionEntryType::kUnsupported: - return false; - default: - RTC_NOTREACHED(); - } - return false; -} -bool MdnsResourceRecord::ReadARData(MessageBufferReader* buf) { - // A RDATA contains a 32-bit IPv4 address. - return buf->ReadString(&rdata_, 4); -} - -bool MdnsResourceRecord::ReadQuadARData(MessageBufferReader* buf) { - // AAAA RDATA contains a 128-bit IPv6 address. - return buf->ReadString(&rdata_, 16); -} - -bool MdnsResourceRecord::Write(rtc::ByteBufferWriter* buf) const { - WriteDomainName(buf, name_); - buf->WriteUInt16(type_); - buf->WriteUInt16(class_); - buf->WriteUInt32(ttl_seconds_); - buf->WriteUInt16(rdlength_); - switch (GetType()) { - case SectionEntryType::kA: - WriteARData(buf); - return true; - case SectionEntryType::kAAAA: - WriteQuadARData(buf); - return true; - case SectionEntryType::kUnsupported: - return false; - default: - RTC_NOTREACHED(); - } - return true; -} - -void MdnsResourceRecord::WriteARData(rtc::ByteBufferWriter* buf) const { - buf->WriteString(rdata_); -} - -void MdnsResourceRecord::WriteQuadARData(rtc::ByteBufferWriter* buf) const { - buf->WriteString(rdata_); -} - -bool MdnsResourceRecord::SetIPAddressInRecordData( - const rtc::IPAddress& address) { - int af = address.family(); - if (af != AF_INET && af != AF_INET6) { - return false; - } - char out[16] = {0}; - if (!rtc::inet_pton(af, address.ToString().c_str(), out)) { - return false; - } - rdlength_ = (af == AF_INET) ? 4 : 16; - rdata_ = std::string(out, rdlength_); - return true; -} - -bool MdnsResourceRecord::GetIPAddressFromRecordData( - rtc::IPAddress* address) const { - if (GetType() != SectionEntryType::kA && - GetType() != SectionEntryType::kAAAA) { - return false; - } - if (rdata_.size() != 4 && rdata_.size() != 16) { - return false; - } - char out[INET6_ADDRSTRLEN] = {0}; - int af = (GetType() == SectionEntryType::kA) ? AF_INET : AF_INET6; - if (!rtc::inet_ntop(af, rdata_.data(), out, sizeof(out))) { - return false; - } - return rtc::IPFromString(std::string(out), address); -} - -MdnsMessage::MdnsMessage() = default; -MdnsMessage::~MdnsMessage() = default; - -bool MdnsMessage::Read(MessageBufferReader* buf) { - RTC_DCHECK_EQ(0u, buf->CurrentOffset()); - if (!header_.Read(buf)) { - return false; - } - - auto read_question = [&buf](std::vector* section, - uint16_t count) { - section->resize(count); - for (auto& question : (*section)) { - if (!question.Read(buf)) { - return false; - } - } - return true; - }; - auto read_rr = [&buf](std::vector* section, - uint16_t count) { - section->resize(count); - for (auto& rr : (*section)) { - if (!rr.Read(buf)) { - return false; - } - } - return true; - }; - - if (!read_question(&question_section_, header_.qdcount) || - !read_rr(&answer_section_, header_.ancount) || - !read_rr(&authority_section_, header_.nscount) || - !read_rr(&additional_section_, header_.arcount)) { - return false; - } - return true; -} - -bool MdnsMessage::Write(rtc::ByteBufferWriter* buf) const { - header_.Write(buf); - - auto write_rr = [&buf](const std::vector& section) { - for (const auto& rr : section) { - if (!rr.Write(buf)) { - return false; - } - } - return true; - }; - - for (const auto& question : question_section_) { - if (!question.Write(buf)) { - return false; - } - } - if (!write_rr(answer_section_) || !write_rr(authority_section_) || - !write_rr(additional_section_)) { - return false; - } - - return true; -} - -bool MdnsMessage::ShouldUnicastResponse() const { - bool should_unicast = false; - for (const auto& question : question_section_) { - should_unicast |= question.ShouldUnicastResponse(); - } - return should_unicast; -} - -void MdnsMessage::AddQuestion(const MdnsQuestion& question) { - question_section_.push_back(question); - header_.qdcount = question_section_.size(); -} - -void MdnsMessage::AddAnswerRecord(const MdnsResourceRecord& answer) { - answer_section_.push_back(answer); - header_.ancount = answer_section_.size(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/mdns_message.h b/TMessagesProj/jni/voip/webrtc/p2p/base/mdns_message.h deleted file mode 100644 index 79be5219e..000000000 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/mdns_message.h +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef P2P_BASE_MDNS_MESSAGE_H_ -#define P2P_BASE_MDNS_MESSAGE_H_ - -// This file contains classes to read and write mDNSs message defined in RFC -// 6762 and RFC 1025 (DNS messages). Note that it is recommended by RFC 6762 to -// use the name compression scheme defined in RFC 1035 whenever possible. We -// currently only implement the capability of reading compressed names in mDNS -// messages in MdnsMessage::Read(); however, the MdnsMessage::Write() does not -// support name compression yet. -// -// Fuzzer tests (test/fuzzers/mdns_parser_fuzzer.cc) MUST always be performed -// after changes made to this file. - -#include - -#include -#include - -#include "rtc_base/byte_buffer.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/message_buffer_reader.h" - -namespace webrtc { - -// We use "section entry" to denote either a question or a resource record. -// -// RFC 1035 Section 3.2.2. -enum class SectionEntryType { - kA, - kAAAA, - // Only the above types are processed in the current implementation. - kUnsupported, -}; - -// RFC 1035 Section 3.2.4. -enum class SectionEntryClass { - kIN, - kUnsupported, -}; - -// RFC 1035, Section 4.1.1. -class MdnsHeader final { - public: - bool Read(MessageBufferReader* buf); - void Write(rtc::ByteBufferWriter* buf) const; - - void SetQueryOrResponse(bool is_query); - bool IsQuery() const; - void SetAuthoritative(bool is_authoritative); - bool IsAuthoritative() const; - - uint16_t id = 0; - uint16_t flags = 0; - // Number of entries in the question section. - uint16_t qdcount = 0; - // Number of resource records in the answer section. - uint16_t ancount = 0; - // Number of name server resource records in the authority records section. - uint16_t nscount = 0; - // Number of resource records in the additional records section. - uint16_t arcount = 0; -}; - -// Entries in each section after the header share a common structure. Note that -// this is not a concept defined in RFC 1035. -class MdnsSectionEntry { - public: - MdnsSectionEntry(); - MdnsSectionEntry(const MdnsSectionEntry& other); - virtual ~MdnsSectionEntry(); - virtual bool Read(MessageBufferReader* buf) = 0; - virtual bool Write(rtc::ByteBufferWriter* buf) const = 0; - - void SetName(const std::string& name) { name_ = name; } - // Returns the fully qualified domain name in the section entry, i.e., QNAME - // in a question or NAME in a resource record. - std::string GetName() const { return name_; } - - void SetType(SectionEntryType type); - SectionEntryType GetType() const; - void SetClass(SectionEntryClass cls); - SectionEntryClass GetClass() const; - - protected: - std::string name_; // Fully qualified domain name. - uint16_t type_ = 0; - uint16_t class_ = 0; -}; - -// RFC 1035, Section 4.1.2. -class MdnsQuestion final : public MdnsSectionEntry { - public: - MdnsQuestion(); - MdnsQuestion(const MdnsQuestion& other); - ~MdnsQuestion() override; - - bool Read(MessageBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; - - void SetUnicastResponse(bool should_unicast); - bool ShouldUnicastResponse() const; -}; - -// RFC 1035, Section 4.1.3. -class MdnsResourceRecord final : public MdnsSectionEntry { - public: - MdnsResourceRecord(); - MdnsResourceRecord(const MdnsResourceRecord& other); - ~MdnsResourceRecord() override; - - bool Read(MessageBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; - - void SetTtlSeconds(uint32_t ttl_seconds) { ttl_seconds_ = ttl_seconds; } - uint32_t GetTtlSeconds() const { return ttl_seconds_; } - // Returns true if |address| is in the address family AF_INET or AF_INET6 and - // |address| has a valid IPv4 or IPv6 address; false otherwise. - bool SetIPAddressInRecordData(const rtc::IPAddress& address); - // Returns true if the record is of type A or AAAA and the record has a valid - // IPv4 or IPv6 address; false otherwise. Stores the valid IP in |address|. - bool GetIPAddressFromRecordData(rtc::IPAddress* address) const; - - private: - // The list of methods reading and writing rdata can grow as we support more - // types of rdata. - bool ReadARData(MessageBufferReader* buf); - void WriteARData(rtc::ByteBufferWriter* buf) const; - - bool ReadQuadARData(MessageBufferReader* buf); - void WriteQuadARData(rtc::ByteBufferWriter* buf) const; - - uint32_t ttl_seconds_ = 0; - uint16_t rdlength_ = 0; - std::string rdata_; -}; - -class MdnsMessage final { - public: - // RFC 1035, Section 4.1. - enum class Section { kQuestion, kAnswer, kAuthority, kAdditional }; - - MdnsMessage(); - ~MdnsMessage(); - // Reads the mDNS message in |buf| and populates the corresponding fields in - // MdnsMessage. - bool Read(MessageBufferReader* buf); - // Write an mDNS message to |buf| based on the fields in MdnsMessage. - // - // TODO(qingsi): Implement name compression when writing mDNS messages. - bool Write(rtc::ByteBufferWriter* buf) const; - - void SetId(uint16_t id) { header_.id = id; } - uint16_t GetId() const { return header_.id; } - - void SetQueryOrResponse(bool is_query) { - header_.SetQueryOrResponse(is_query); - } - bool IsQuery() const { return header_.IsQuery(); } - - void SetAuthoritative(bool is_authoritative) { - header_.SetAuthoritative(is_authoritative); - } - bool IsAuthoritative() const { return header_.IsAuthoritative(); } - - // Returns true if the message is a query and the unicast response is - // preferred. False otherwise. - bool ShouldUnicastResponse() const; - - void AddQuestion(const MdnsQuestion& question); - // TODO(qingsi): Implement AddXRecord for name server and additional records. - void AddAnswerRecord(const MdnsResourceRecord& answer); - - const std::vector& question_section() const { - return question_section_; - } - const std::vector& answer_section() const { - return answer_section_; - } - const std::vector& authority_section() const { - return authority_section_; - } - const std::vector& additional_section() const { - return additional_section_; - } - - private: - MdnsHeader header_; - std::vector question_section_; - std::vector answer_section_; - std::vector authority_section_; - std::vector additional_section_; -}; - -} // namespace webrtc - -#endif // P2P_BASE_MDNS_MESSAGE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc index 9bf0b23db..eff79ab9b 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc @@ -10,27 +10,38 @@ #include "p2p/base/p2p_transport_channel.h" -#include +#include +#include + +#include +#include #include #include #include #include "absl/algorithm/container.h" +#include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "api/async_dns_resolver.h" #include "api/candidate.h" +#include "api/task_queue/queued_task.h" #include "logging/rtc_event_log/ice_logger.h" +#include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/basic_ice_controller.h" -#include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" #include "p2p/base/port.h" #include "rtc_base/checks.h" #include "rtc_base/crc32.h" #include "rtc_base/experiments/struct_parameters_parser.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" -#include "rtc_base/net_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/network_constants.h" #include "rtc_base/string_encode.h" #include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" @@ -109,6 +120,7 @@ namespace cricket { using webrtc::RTCError; using webrtc::RTCErrorType; +using webrtc::ToQueuedTask; bool IceCredentialsChanged(const std::string& old_ufrag, const std::string& old_pwd, @@ -121,26 +133,50 @@ bool IceCredentialsChanged(const std::string& old_ufrag, return (old_ufrag != new_ufrag) || (old_pwd != new_pwd); } +// static +std::unique_ptr P2PTransportChannel::Create( + const std::string& transport_name, + int component, + PortAllocator* allocator, + webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + webrtc::RtcEventLog* event_log, + IceControllerFactoryInterface* ice_controller_factory) { + return absl::WrapUnique(new P2PTransportChannel( + transport_name, component, allocator, async_dns_resolver_factory, + /* owned_dns_resolver_factory= */ nullptr, event_log, + ice_controller_factory)); +} + P2PTransportChannel::P2PTransportChannel(const std::string& transport_name, int component, PortAllocator* allocator) : P2PTransportChannel(transport_name, component, allocator, - nullptr, - nullptr) {} + /* async_dns_resolver_factory= */ nullptr, + /* owned_dns_resolver_factory= */ nullptr, + /* event_log= */ nullptr, + /* ice_controller_factory= */ nullptr) {} +// Private constructor, called from Create() P2PTransportChannel::P2PTransportChannel( const std::string& transport_name, int component, PortAllocator* allocator, - webrtc::AsyncResolverFactory* async_resolver_factory, + webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + std::unique_ptr + owned_dns_resolver_factory, webrtc::RtcEventLog* event_log, IceControllerFactoryInterface* ice_controller_factory) : transport_name_(transport_name), component_(component), allocator_(allocator), - async_resolver_factory_(async_resolver_factory), + // If owned_dns_resolver_factory is given, async_dns_resolver_factory is + // ignored. + async_dns_resolver_factory_(owned_dns_resolver_factory + ? owned_dns_resolver_factory.get() + : async_dns_resolver_factory), + owned_dns_resolver_factory_(std::move(owned_dns_resolver_factory)), network_thread_(rtc::Thread::Current()), incoming_only_(false), error_(0), @@ -191,16 +227,32 @@ P2PTransportChannel::P2PTransportChannel( } } +// Public constructor, exposed for backwards compatibility. +// Deprecated. +P2PTransportChannel::P2PTransportChannel( + const std::string& transport_name, + int component, + PortAllocator* allocator, + webrtc::AsyncResolverFactory* async_resolver_factory, + webrtc::RtcEventLog* event_log, + IceControllerFactoryInterface* ice_controller_factory) + : P2PTransportChannel( + transport_name, + component, + allocator, + nullptr, + std::make_unique( + async_resolver_factory), + event_log, + ice_controller_factory) {} + P2PTransportChannel::~P2PTransportChannel() { + RTC_DCHECK_RUN_ON(network_thread_); std::vector copy(connections().begin(), connections().end()); for (Connection* con : copy) { con->Destroy(); } - for (auto& p : resolvers_) { - p.resolver_->Destroy(false); - } resolvers_.clear(); - RTC_DCHECK_RUN_ON(network_thread_); } // Add the allocator session to our list so that we know which sessions @@ -283,10 +335,11 @@ bool P2PTransportChannel::MaybeSwitchSelectedConnection( // threshold, the new connection is in a better receiving state than the // currently selected connection. So we need to re-check whether it needs // to be switched at a later time. - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, thread(), - rtc::Bind(&P2PTransportChannel::SortConnectionsAndUpdateState, this, - *result.recheck_event), + network_thread_->PostDelayedTask( + ToQueuedTask(task_safety_, + [this, recheck = *result.recheck_event]() { + SortConnectionsAndUpdateState(recheck); + }), result.recheck_event->recheck_delay_ms); } @@ -703,7 +756,10 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { "send_ping_on_nomination_ice_controlled", &field_trials_.send_ping_on_nomination_ice_controlled, // Allow connections to live untouched longer that 30s. - "dead_connection_timeout_ms", &field_trials_.dead_connection_timeout_ms) + "dead_connection_timeout_ms", &field_trials_.dead_connection_timeout_ms, + // Stop gathering on strongly connected. + "stop_gather_on_strongly_connected", + &field_trials_.stop_gather_on_strongly_connected) ->Parse(webrtc::field_trial::FindFullName("WebRTC-IceFieldTrials")); if (field_trials_.dead_connection_timeout_ms < 30000) { @@ -838,6 +894,13 @@ void P2PTransportChannel::MaybeStartGathering() { static_cast(IceRestartState::MAX_VALUE)); } + for (const auto& session : allocator_sessions_) { + if (session->IsStopped()) { + continue; + } + session->StopGettingPorts(); + } + // Time for a new allocator. std::unique_ptr pooled_session = allocator_->TakePooledSession(transport_name(), component(), @@ -891,7 +954,8 @@ void P2PTransportChannel::OnPortReady(PortAllocatorSession* session, ports_.push_back(port); port->SignalUnknownAddress.connect(this, &P2PTransportChannel::OnUnknownAddress); - port->SignalDestroyed.connect(this, &P2PTransportChannel::OnPortDestroyed); + port->SubscribePortDestroyed( + [this](PortInterface* port) { OnPortDestroyed(port); }); port->SignalRoleConflict.connect(this, &P2PTransportChannel::OnRoleConflict); port->SignalSentPacket.connect(this, &P2PTransportChannel::OnSentPacket); @@ -1151,16 +1215,17 @@ void P2PTransportChannel::OnNominated(Connection* conn) { void P2PTransportChannel::ResolveHostnameCandidate(const Candidate& candidate) { RTC_DCHECK_RUN_ON(network_thread_); - if (!async_resolver_factory_) { + if (!async_dns_resolver_factory_) { RTC_LOG(LS_WARNING) << "Dropping ICE candidate with hostname address " "(no AsyncResolverFactory)"; return; } - rtc::AsyncResolverInterface* resolver = async_resolver_factory_->Create(); - resolvers_.emplace_back(candidate, resolver); - resolver->SignalDone.connect(this, &P2PTransportChannel::OnCandidateResolved); - resolver->Start(candidate.address()); + auto resolver = async_dns_resolver_factory_->Create(); + auto resptr = resolver.get(); + resolvers_.emplace_back(candidate, std::move(resolver)); + resptr->Start(candidate.address(), + [this, resptr]() { OnCandidateResolved(resptr); }); RTC_LOG(LS_INFO) << "Asynchronously resolving ICE candidate hostname " << candidate.address().HostAsSensitiveURIString(); } @@ -1215,38 +1280,44 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) { P2PTransportChannel::CandidateAndResolver::CandidateAndResolver( const Candidate& candidate, - rtc::AsyncResolverInterface* resolver) - : candidate_(candidate), resolver_(resolver) {} + std::unique_ptr&& resolver) + : candidate_(candidate), resolver_(std::move(resolver)) {} P2PTransportChannel::CandidateAndResolver::~CandidateAndResolver() {} void P2PTransportChannel::OnCandidateResolved( - rtc::AsyncResolverInterface* resolver) { + webrtc::AsyncDnsResolverInterface* resolver) { RTC_DCHECK_RUN_ON(network_thread_); auto p = absl::c_find_if(resolvers_, [resolver](const CandidateAndResolver& cr) { - return cr.resolver_ == resolver; + return cr.resolver_.get() == resolver; }); if (p == resolvers_.end()) { - RTC_LOG(LS_ERROR) << "Unexpected AsyncResolver signal"; + RTC_LOG(LS_ERROR) << "Unexpected AsyncDnsResolver return"; RTC_NOTREACHED(); return; } Candidate candidate = p->candidate_; + AddRemoteCandidateWithResult(candidate, resolver->result()); + // Now we can delete the resolver. + // TODO(bugs.webrtc.org/12651): Replace the stuff below with + // resolvers_.erase(p); + std::unique_ptr to_delete = + std::move(p->resolver_); + // Delay the actual deletion of the resolver until the lambda executes. + network_thread_->PostTask( + ToQueuedTask([delete_this = std::move(to_delete)] {})); resolvers_.erase(p); - AddRemoteCandidateWithResolver(candidate, resolver); - thread()->PostTask( - webrtc::ToQueuedTask([] {}, [resolver] { resolver->Destroy(false); })); } -void P2PTransportChannel::AddRemoteCandidateWithResolver( +void P2PTransportChannel::AddRemoteCandidateWithResult( Candidate candidate, - rtc::AsyncResolverInterface* resolver) { + const webrtc::AsyncDnsResolverResult& result) { RTC_DCHECK_RUN_ON(network_thread_); - if (resolver->GetError()) { + if (result.GetError()) { RTC_LOG(LS_WARNING) << "Failed to resolve ICE candidate hostname " << candidate.address().HostAsSensitiveURIString() - << " with error " << resolver->GetError(); + << " with error " << result.GetError(); return; } @@ -1254,9 +1325,8 @@ void P2PTransportChannel::AddRemoteCandidateWithResolver( // Prefer IPv6 to IPv4 if we have it (see RFC 5245 Section 15.1). // TODO(zstein): This won't work if we only have IPv4 locally but receive an // AAAA DNS record. - bool have_address = - resolver->GetResolvedAddress(AF_INET6, &resolved_address) || - resolver->GetResolvedAddress(AF_INET, &resolved_address); + bool have_address = result.GetResolvedAddress(AF_INET6, &resolved_address) || + result.GetResolvedAddress(AF_INET, &resolved_address); if (!have_address) { RTC_LOG(LS_INFO) << "ICE candidate hostname " << candidate.address().HostAsSensitiveURIString() @@ -1609,10 +1679,10 @@ void P2PTransportChannel::RequestSortAndStateUpdate( IceControllerEvent reason_to_sort) { RTC_DCHECK_RUN_ON(network_thread_); if (!sort_dirty_) { - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread(), - rtc::Bind(&P2PTransportChannel::SortConnectionsAndUpdateState, this, - reason_to_sort)); + network_thread_->PostTask( + ToQueuedTask(task_safety_, [this, reason_to_sort]() { + SortConnectionsAndUpdateState(reason_to_sort); + })); sort_dirty_ = true; } } @@ -1627,9 +1697,8 @@ void P2PTransportChannel::MaybeStartPinging() { RTC_LOG(LS_INFO) << ToString() << ": Have a pingable connection for the first time; " "starting to ping."; - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread(), - rtc::Bind(&P2PTransportChannel::CheckAndPing, this)); + network_thread_->PostTask( + ToQueuedTask(task_safety_, [this]() { CheckAndPing(); })); regathering_controller_->Start(); started_pinging_ = true; } @@ -1946,9 +2015,8 @@ void P2PTransportChannel::CheckAndPing() { MarkConnectionPinged(conn); } - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, thread(), - rtc::Bind(&P2PTransportChannel::CheckAndPing, this), delay); + network_thread_->PostDelayedTask( + ToQueuedTask(task_safety_, [this]() { CheckAndPing(); }), delay); } // This method is only for unit testing. @@ -2015,11 +2083,13 @@ void P2PTransportChannel::OnConnectionStateChange(Connection* connection) { // the connection is at the latest generation. It is not enough to check // that the connection becomes weakly connected because the connection may be // changing from (writable, receiving) to (writable, not receiving). - bool strongly_connected = !connection->weak(); - bool latest_generation = connection->local_candidate().generation() >= - allocator_session()->generation(); - if (strongly_connected && latest_generation) { - MaybeStopPortAllocatorSessions(); + if (field_trials_.stop_gather_on_strongly_connected) { + bool strongly_connected = !connection->weak(); + bool latest_generation = connection->local_candidate().generation() >= + allocator_session()->generation(); + if (strongly_connected && latest_generation) { + MaybeStopPortAllocatorSessions(); + } } // We have to unroll the stack before doing this because we may be changing // the state of connections while sorting. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h index 69a32e462..462aa105b 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h @@ -20,6 +20,9 @@ #ifndef P2P_BASE_P2P_TRANSPORT_CHANNEL_H_ #define P2P_BASE_P2P_TRANSPORT_CHANNEL_H_ +#include +#include + #include #include #include @@ -27,26 +30,43 @@ #include #include +#include "absl/base/attributes.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/async_dns_resolver.h" #include "api/async_resolver_factory.h" #include "api/candidate.h" #include "api/rtc_error.h" +#include "api/sequence_checker.h" +#include "api/transport/enums.h" +#include "api/transport/stun.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" #include "logging/rtc_event_log/ice_logger.h" #include "p2p/base/candidate_pair_interface.h" +#include "p2p/base/connection.h" #include "p2p/base/ice_controller_factory_interface.h" #include "p2p/base/ice_controller_interface.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel_ice_field_trials.h" +#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" #include "p2p/base/regathering_controller.h" -#include "rtc_base/async_invoker.h" +#include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" +#include "rtc_base/dscp.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -82,11 +102,19 @@ class RemoteCandidate : public Candidate { // two P2P clients connected to each other. class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { public: + static std::unique_ptr Create( + const std::string& transport_name, + int component, + PortAllocator* allocator, + webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + webrtc::RtcEventLog* event_log = nullptr, + IceControllerFactoryInterface* ice_controller_factory = nullptr); // For testing only. - // TODO(zstein): Remove once AsyncResolverFactory is required. + // TODO(zstein): Remove once AsyncDnsResolverFactory is required. P2PTransportChannel(const std::string& transport_name, int component, PortAllocator* allocator); + ABSL_DEPRECATED("bugs.webrtc.org/12598") P2PTransportChannel( const std::string& transport_name, int component, @@ -209,8 +237,18 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { } private: - rtc::Thread* thread() const { return network_thread_; } - + P2PTransportChannel( + const std::string& transport_name, + int component, + PortAllocator* allocator, + // DNS resolver factory + webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + // If the P2PTransportChannel has to delete the DNS resolver factory + // on release, this pointer is set. + std::unique_ptr + owned_dns_resolver_factory, + webrtc::RtcEventLog* event_log = nullptr, + IceControllerFactoryInterface* ice_controller_factory = nullptr); bool IsGettingPorts() { RTC_DCHECK_RUN_ON(network_thread_); return allocator_session()->IsGettingPorts(); @@ -361,12 +399,15 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { int64_t ComputeEstimatedDisconnectedTimeMs(int64_t now, Connection* old_connection); + webrtc::ScopedTaskSafety task_safety_; std::string transport_name_ RTC_GUARDED_BY(network_thread_); int component_ RTC_GUARDED_BY(network_thread_); PortAllocator* allocator_ RTC_GUARDED_BY(network_thread_); - webrtc::AsyncResolverFactory* async_resolver_factory_ + webrtc::AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ RTC_GUARDED_BY(network_thread_); - rtc::Thread* network_thread_; + const std::unique_ptr + owned_dns_resolver_factory_; + rtc::Thread* const network_thread_; bool incoming_only_ RTC_GUARDED_BY(network_thread_); int error_ RTC_GUARDED_BY(network_thread_); std::vector> allocator_sessions_ @@ -419,7 +460,6 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { bool has_been_writable_ RTC_GUARDED_BY(network_thread_) = false; // if writable_ has ever been true - rtc::AsyncInvoker invoker_ RTC_GUARDED_BY(network_thread_); absl::optional network_route_ RTC_GUARDED_BY(network_thread_); webrtc::IceEventLog ice_event_log_ RTC_GUARDED_BY(network_thread_); @@ -428,17 +468,23 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { RTC_GUARDED_BY(network_thread_); struct CandidateAndResolver final { - CandidateAndResolver(const Candidate& candidate, - rtc::AsyncResolverInterface* resolver); + CandidateAndResolver( + const Candidate& candidate, + std::unique_ptr&& resolver); ~CandidateAndResolver(); + // Moveable, but not copyable. + CandidateAndResolver(CandidateAndResolver&&) = default; + CandidateAndResolver& operator=(CandidateAndResolver&&) = default; + Candidate candidate_; - rtc::AsyncResolverInterface* resolver_; + std::unique_ptr resolver_; }; std::vector resolvers_ RTC_GUARDED_BY(network_thread_); void FinishAddingRemoteCandidate(const Candidate& new_remote_candidate); - void OnCandidateResolved(rtc::AsyncResolverInterface* resolver); - void AddRemoteCandidateWithResolver(Candidate candidate, - rtc::AsyncResolverInterface* resolver); + void OnCandidateResolved(webrtc::AsyncDnsResolverInterface* resolver); + void AddRemoteCandidateWithResult( + Candidate candidate, + const webrtc::AsyncDnsResolverResult& result); // Number of times the selected_connection_ has been modified. uint32_t selected_candidate_pair_changes_ = 0; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h index 00e1151ba..82dc580c1 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -58,6 +58,9 @@ struct IceFieldTrials { // The timeout after which the connection will be considered dead if no // traffic is received. int dead_connection_timeout_ms = 30000; + + // Stop gathering when having a strong connection. + bool stop_gather_on_strongly_connected = true; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc index 035d3d4bb..d24d40f95 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc @@ -137,6 +137,7 @@ Port::Port(rtc::Thread* thread, tiebreaker_(0), shared_socket_(true), weak_factory_(this) { + RTC_DCHECK(factory_ != NULL); Construct(); } @@ -188,6 +189,9 @@ void Port::Construct() { } Port::~Port() { + RTC_DCHECK_RUN_ON(thread_); + CancelPendingTasks(); + // Delete all of the remaining connections. We copy the list up front // because each deletion will cause it to be modified. @@ -490,7 +494,8 @@ bool Port::GetStunMessage(const char* data, } // If ICE, and the MESSAGE-INTEGRITY is bad, fail with a 401 Unauthorized - if (!stun_msg->ValidateMessageIntegrity(data, size, password_)) { + if (stun_msg->ValidateMessageIntegrity(password_) != + StunMessage::IntegrityStatus::kIntegrityOk) { RTC_LOG(LS_ERROR) << ToString() << ": Received " << StunMethodToString(stun_msg->type()) << " with bad M-I from " << addr.ToSensitiveString() @@ -556,7 +561,8 @@ bool Port::GetStunMessage(const char* data, // No stun attributes will be verified, if it's stun indication message. // Returning from end of the this method. } else if (stun_msg->type() == GOOG_PING_REQUEST) { - if (!stun_msg->ValidateMessageIntegrity32(data, size, password_)) { + if (stun_msg->ValidateMessageIntegrity(password_) != + StunMessage::IntegrityStatus::kIntegrityOk) { RTC_LOG(LS_ERROR) << ToString() << ": Received " << StunMethodToString(stun_msg->type()) << " with bad M-I from " << addr.ToSensitiveString() @@ -609,6 +615,16 @@ rtc::DiffServCodePoint Port::StunDscpValue() const { return rtc::DSCP_NO_CHANGE; } +void Port::set_timeout_delay(int delay) { + RTC_DCHECK_RUN_ON(thread_); + // Although this method is meant to only be used by tests, some downstream + // projects have started using it. Ideally we should update our tests to not + // require to modify this state and instead use a testing harness that allows + // adjusting the clock and then just use the kPortTimeoutDelay constant + // directly. + timeout_delay_ = delay; +} + bool Port::ParseStunUsername(const StunMessage* stun_msg, std::string* local_ufrag, std::string* remote_ufrag) const { @@ -818,7 +834,14 @@ void Port::Prune() { thread_->Post(RTC_FROM_HERE, this, MSG_DESTROY_IF_DEAD); } +// Call to stop any currently pending operations from running. +void Port::CancelPendingTasks() { + RTC_DCHECK_RUN_ON(thread_); + thread_->Clear(this); +} + void Port::OnMessage(rtc::Message* pmsg) { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(pmsg->message_id == MSG_DESTROY_IF_DEAD); bool dead = (state_ == State::INIT || state_ == State::PRUNED) && @@ -829,6 +852,14 @@ void Port::OnMessage(rtc::Message* pmsg) { } } +void Port::SubscribePortDestroyed( + std::function callback) { + port_destroyed_callback_list_.AddReceiver(callback); +} + +void Port::SendPortDestroyed(Port* port) { + port_destroyed_callback_list_.Send(port); +} void Port::OnNetworkTypeChanged(const rtc::Network* network) { RTC_DCHECK(network == network_); @@ -893,7 +924,7 @@ void Port::OnConnectionDestroyed(Connection* conn) { void Port::Destroy() { RTC_DCHECK(connections_.empty()); RTC_LOG(LS_INFO) << ToString() << ": Port deleted"; - SignalDestroyed(this); + SendPortDestroyed(this); delete this; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h index 1e20d1346..7759ade33 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h @@ -33,6 +33,7 @@ #include "p2p/base/port_interface.h" #include "p2p/base/stun_request.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/callback_list.h" #include "rtc_base/checks.h" #include "rtc_base/net_helper.h" #include "rtc_base/network.h" @@ -160,7 +161,7 @@ typedef std::set ServerAddresses; // connections to similar mechanisms of the other client. Subclasses of this // one add support for specific mechanisms like local UDP ports. class Port : public PortInterface, - public rtc::MessageHandlerAutoCleanup, + public rtc::MessageHandler, public sigslot::has_slots<> { public: // INIT: The state when a port is just created. @@ -209,14 +210,14 @@ class Port : public PortInterface, // Allows a port to be destroyed if no connection is using it. void Prune(); + // Call to stop any currently pending operations from running. + void CancelPendingTasks(); + // The thread on which this port performs its I/O. rtc::Thread* thread() { return thread_; } // The factory used to create the sockets of this port. rtc::PacketSocketFactory* socket_factory() const { return factory_; } - void set_socket_factory(rtc::PacketSocketFactory* factory) { - factory_ = factory; - } // For debugging purposes. const std::string& content_name() const { return content_name_; } @@ -266,6 +267,9 @@ class Port : public PortInterface, // connection. sigslot::signal1 SignalPortError; + void SubscribePortDestroyed( + std::function callback) override; + void SendPortDestroyed(Port* port); // Returns a map containing all of the connections of this port, keyed by the // remote address. typedef std::map AddressMap; @@ -322,7 +326,7 @@ class Port : public PortInterface, uint16_t max_port() { return max_port_; } // Timeout shortening function to speed up unit tests. - void set_timeout_delay(int delay) { timeout_delay_ = delay; } + void set_timeout_delay(int delay); // This method will return local and remote username fragements from the // stun username attribute if present. @@ -437,8 +441,8 @@ class Port : public PortInterface, void OnNetworkTypeChanged(const rtc::Network* network); - rtc::Thread* thread_; - rtc::PacketSocketFactory* factory_; + rtc::Thread* const thread_; + rtc::PacketSocketFactory* const factory_; std::string type_; bool send_retransmit_count_attribute_; rtc::Network* network_; @@ -484,6 +488,7 @@ class Port : public PortInterface, bool is_final); friend class Connection; + webrtc::CallbackList port_destroyed_callback_list_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h index 4bbe56c0b..33a23484f 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port_allocator.h @@ -16,6 +16,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/transport/enums.h" #include "p2p/base/port.h" #include "p2p/base/port_interface.h" @@ -25,7 +26,6 @@ #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace webrtc { class TurnCustomizer; @@ -638,7 +638,7 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { bool allow_tcp_listen_; uint32_t candidate_filter_; std::string origin_; - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; private: ServerAddresses stun_servers_; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h index 39eae18a0..73c8e36c7 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port_interface.h @@ -12,12 +12,14 @@ #define P2P_BASE_PORT_INTERFACE_H_ #include +#include #include #include "absl/types/optional.h" #include "api/candidate.h" #include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/callback_list.h" #include "rtc_base/socket_address.h" namespace rtc { @@ -112,7 +114,8 @@ class PortInterface { // Signaled when this port decides to delete itself because it no longer has // any usefulness. - sigslot::signal1 SignalDestroyed; + virtual void SubscribePortDestroyed( + std::function callback) = 0; // Signaled when Port discovers ice role conflict with the peer. sigslot::signal1 SignalRoleConflict; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc index fe38a3e4d..293e9dbcf 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.cc @@ -9,6 +9,7 @@ */ #include "p2p/base/regathering_controller.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { @@ -17,8 +18,8 @@ BasicRegatheringController::BasicRegatheringController( cricket::IceTransportInternal* ice_transport, rtc::Thread* thread) : config_(config), ice_transport_(ice_transport), thread_(thread) { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(ice_transport_); - RTC_DCHECK(thread_); ice_transport_->SignalStateChanged.connect( this, &BasicRegatheringController::OnIceTransportStateChanged); ice_transport->SignalWritableState.connect( @@ -29,51 +30,49 @@ BasicRegatheringController::BasicRegatheringController( this, &BasicRegatheringController::OnIceTransportNetworkRouteChanged); } -BasicRegatheringController::~BasicRegatheringController() = default; +BasicRegatheringController::~BasicRegatheringController() { + RTC_DCHECK_RUN_ON(thread_); +} void BasicRegatheringController::Start() { + RTC_DCHECK_RUN_ON(thread_); ScheduleRecurringRegatheringOnFailedNetworks(); } void BasicRegatheringController::SetConfig(const Config& config) { - bool need_cancel_and_reschedule_on_failed_networks = - has_recurring_schedule_on_failed_networks_ && - (config_.regather_on_failed_networks_interval != - config.regather_on_failed_networks_interval); + RTC_DCHECK_RUN_ON(thread_); + bool need_reschedule_on_failed_networks = + pending_regathering_ && (config_.regather_on_failed_networks_interval != + config.regather_on_failed_networks_interval); config_ = config; - if (need_cancel_and_reschedule_on_failed_networks) { - CancelScheduledRecurringRegatheringOnFailedNetworks(); + if (need_reschedule_on_failed_networks) { ScheduleRecurringRegatheringOnFailedNetworks(); } } void BasicRegatheringController:: ScheduleRecurringRegatheringOnFailedNetworks() { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(config_.regather_on_failed_networks_interval >= 0); - CancelScheduledRecurringRegatheringOnFailedNetworks(); - has_recurring_schedule_on_failed_networks_ = true; - invoker_for_failed_networks_.AsyncInvokeDelayed( - RTC_FROM_HERE, thread_, - rtc::Bind( - &BasicRegatheringController::RegatherOnFailedNetworksIfDoneGathering, - this), + // Reset pending_regathering_ to cancel any potentially pending tasks. + pending_regathering_.reset(new ScopedTaskSafety()); + + thread_->PostDelayedTask( + ToQueuedTask(*pending_regathering_.get(), + [this]() { + RTC_DCHECK_RUN_ON(thread_); + // Only regather when the current session is in the CLEARED + // state (i.e., not running or stopped). It is only + // possible to enter this state when we gather continually, + // so there is an implicit check on continual gathering + // here. + if (allocator_session_ && + allocator_session_->IsCleared()) { + allocator_session_->RegatherOnFailedNetworks(); + } + ScheduleRecurringRegatheringOnFailedNetworks(); + }), config_.regather_on_failed_networks_interval); } -void BasicRegatheringController::RegatherOnFailedNetworksIfDoneGathering() { - // Only regather when the current session is in the CLEARED state (i.e., not - // running or stopped). It is only possible to enter this state when we gather - // continually, so there is an implicit check on continual gathering here. - if (allocator_session_ && allocator_session_->IsCleared()) { - allocator_session_->RegatherOnFailedNetworks(); - } - ScheduleRecurringRegatheringOnFailedNetworks(); -} - -void BasicRegatheringController:: - CancelScheduledRecurringRegatheringOnFailedNetworks() { - invoker_for_failed_networks_.Clear(); - has_recurring_schedule_on_failed_networks_ = false; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h index 54a76dc3e..116d820a8 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/regathering_controller.h @@ -11,9 +11,11 @@ #ifndef P2P_BASE_REGATHERING_CONTROLLER_H_ #define P2P_BASE_REGATHERING_CONTROLLER_H_ +#include + #include "p2p/base/ice_transport_internal.h" #include "p2p/base/port_allocator.h" -#include "rtc_base/async_invoker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" namespace webrtc { @@ -80,20 +82,14 @@ class BasicRegatheringController : public sigslot::has_slots<> { void ScheduleRecurringRegatheringOnFailedNetworks(); // Cancels regathering scheduled by ScheduleRecurringRegatheringOnAllNetworks. void CancelScheduledRecurringRegatheringOnAllNetworks(); - // Cancels regathering scheduled by - // ScheduleRecurringRegatheringOnFailedNetworks. - void CancelScheduledRecurringRegatheringOnFailedNetworks(); - - // The following method perform the actual regathering, if the recent port - // allocator session has done the initial gathering. - void RegatherOnFailedNetworksIfDoneGathering(); + // We use a flag to be able to cancel pending regathering operations when + // the object goes out of scope or the config changes. + std::unique_ptr pending_regathering_; Config config_; cricket::IceTransportInternal* ice_transport_; cricket::PortAllocatorSession* allocator_session_ = nullptr; - bool has_recurring_schedule_on_failed_networks_ = false; - rtc::Thread* thread_; - rtc::AsyncInvoker invoker_for_failed_networks_; + rtc::Thread* const thread_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc index 4e1a1f6a9..7b1a2a83a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc @@ -17,11 +17,11 @@ #include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port_allocator.h" +#include "rtc_base/async_resolver_interface.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/net_helpers.h" #include "rtc_base/strings/string_builder.h" namespace cricket { @@ -306,7 +306,9 @@ int UDPPort::SendTo(const void* data, if (send_error_count_ < kSendErrorLogLimit) { ++send_error_count_; RTC_LOG(LS_ERROR) << ToString() << ": UDP send of " << size - << " bytes failed with error " << error_; + << " bytes to host " << addr.ToSensitiveString() << " (" + << addr.ToResolvedSensitiveString() + << ") failed with error " << error_; } } else { send_error_count_ = 0; @@ -593,7 +595,11 @@ void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) { options.info_signaled_after_sent.packet_type = rtc::PacketType::kStunMessage; CopyPortInformationToPacketInfo(&options.info_signaled_after_sent); if (socket_->SendTo(data, size, sreq->server_addr(), options) < 0) { - RTC_LOG_ERR_EX(LERROR, socket_->GetError()) << "sendto"; + RTC_LOG_ERR_EX(LERROR, socket_->GetError()) + << "UDP send of " << size << " bytes to host " + << sreq->server_addr().ToSensitiveString() << " (" + << sreq->server_addr().ToResolvedSensitiveString() + << ") failed with error " << error_; } stats_.stun_binding_requests_sent++; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc index 44376ced9..2870dcdfc 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_request.cc @@ -120,6 +120,18 @@ bool StunRequestManager::CheckResponse(StunMessage* msg) { } StunRequest* request = iter->second; + + // Now that we know the request, we can see if the response is + // integrity-protected or not. + // For some tests, the message integrity is not set in the request. + // Complain, and then don't check. + bool skip_integrity_checking = false; + if (request->msg()->integrity() == StunMessage::IntegrityStatus::kNotSet) { + skip_integrity_checking = true; + } else { + msg->ValidateMessageIntegrity(request->msg()->password()); + } + if (!msg->GetNonComprehendedAttributes().empty()) { // If a response contains unknown comprehension-required attributes, it's // simply discarded and the transaction is considered failed. See RFC5389 @@ -129,6 +141,9 @@ bool StunRequestManager::CheckResponse(StunMessage* msg) { delete request; return false; } else if (msg->type() == GetStunSuccessResponseType(request->type())) { + if (!msg->IntegrityOk() && !skip_integrity_checking) { + return false; + } request->OnResponse(msg); } else if (msg->type() == GetStunErrorResponseType(request->type())) { request->OnErrorResponse(msg); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc index efbf62e49..d4266bf0b 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc @@ -403,12 +403,14 @@ int TCPConnection::Send(const void* data, static_cast(port_)->CopyPortInformationToPacketInfo( &modified_options.info_signaled_after_sent); int sent = socket_->Send(data, size, modified_options); + int64_t now = rtc::TimeMillis(); if (sent < 0) { stats_.sent_discarded_packets++; error_ = socket_->GetError(); } else { - send_rate_tracker_.AddSamples(sent); + send_rate_tracker_.AddSamplesAtTime(now, sent); } + last_send_data_ = now; return sent; } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc index 9330a0007..54bdfb379 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.cc @@ -15,10 +15,9 @@ namespace cricket { -TestStunServer* TestStunServer::Create(rtc::Thread* thread, +TestStunServer* TestStunServer::Create(rtc::SocketServer* ss, const rtc::SocketAddress& addr) { - rtc::AsyncSocket* socket = - thread->socketserver()->CreateAsyncSocket(addr.family(), SOCK_DGRAM); + rtc::AsyncSocket* socket = ss->CreateAsyncSocket(addr.family(), SOCK_DGRAM); rtc::AsyncUDPSocket* udp_socket = rtc::AsyncUDPSocket::Create(socket, addr); return new TestStunServer(udp_socket); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.h index e44e7dbcd..11ac620bb 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/test_stun_server.h @@ -15,14 +15,14 @@ #include "p2p/base/stun_server.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/socket_address.h" -#include "rtc_base/thread.h" +#include "rtc_base/socket_server.h" namespace cricket { // A test STUN server. Useful for unit tests. class TestStunServer : StunServer { public: - static TestStunServer* Create(rtc::Thread* thread, + static TestStunServer* Create(rtc::SocketServer* ss, const rtc::SocketAddress& addr); // Set a fake STUN address to return to the client. diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h index d438a8330..ecd934861 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/test_turn_server.h @@ -14,6 +14,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/transport/stun.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/turn_server.h" @@ -21,7 +22,6 @@ #include "rtc_base/ssl_adapter.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace cricket { @@ -147,7 +147,7 @@ class TestTurnServer : public TurnAuthInterface { TurnServer server_; rtc::Thread* thread_; - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc index 4d39f207b..33925d43e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc @@ -28,6 +28,7 @@ #include "rtc_base/net_helpers.h" #include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "system_wrappers/include/field_trial.h" namespace cricket { @@ -346,6 +347,15 @@ void TurnPort::PrepareAddress() { server_address_.address.SetPort(TURN_DEFAULT_PORT); } + if (!AllowedTurnPort(server_address_.address.port())) { + // This can only happen after a 300 ALTERNATE SERVER, since the port can't + // be created with a disallowed port number. + RTC_LOG(LS_ERROR) << "Attempt to start allocation with disallowed port# " + << server_address_.address.port(); + OnAllocateError(STUN_ERROR_SERVER_ERROR, + "Attempt to start allocation to a disallowed port"); + return; + } if (server_address_.address.IsUnresolvedIP()) { ResolveTurnAddress(server_address_.address); } else { @@ -715,16 +725,6 @@ bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, return false; } - // This must be a response for one of our requests. - // Check success responses, but not errors, for MESSAGE-INTEGRITY. - if (IsStunSuccessResponseType(msg_type) && - !StunMessage::ValidateMessageIntegrity(data, size, hash())) { - RTC_LOG(LS_WARNING) << ToString() - << ": Received TURN message with invalid " - "message integrity, msg_type: " - << msg_type; - return true; - } request_manager_.CheckResponse(data, size); return true; @@ -943,6 +943,21 @@ rtc::DiffServCodePoint TurnPort::StunDscpValue() const { return stun_dscp_value_; } +// static +bool TurnPort::AllowedTurnPort(int port) { + // Port 53, 80 and 443 are used for existing deployments. + // Ports above 1024 are assumed to be OK to use. + if (port == 53 || port == 80 || port == 443 || port >= 1024) { + return true; + } + // Allow any port if relevant field trial is set. This allows disabling the + // check. + if (webrtc::field_trial::IsEnabled("WebRTC-Turn-AllowSystemPorts")) { + return true; + } + return false; +} + void TurnPort::OnMessage(rtc::Message* message) { switch (message->message_id) { case MSG_ALLOCATE_ERROR: @@ -1274,10 +1289,12 @@ void TurnPort::ScheduleEntryDestruction(TurnEntry* entry) { RTC_DCHECK(!entry->destruction_timestamp().has_value()); int64_t timestamp = rtc::TimeMillis(); entry->set_destruction_timestamp(timestamp); - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, thread(), - rtc::Bind(&TurnPort::DestroyEntryIfNotCancelled, this, entry, timestamp), - TURN_PERMISSION_TIMEOUT); + thread()->PostDelayedTask(ToQueuedTask(task_safety_.flag(), + [this, entry, timestamp] { + DestroyEntryIfNotCancelled( + entry, timestamp); + }), + TURN_PERMISSION_TIMEOUT); } bool TurnPort::SetEntryChannelId(const rtc::SocketAddress& address, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h index a9ec43419..55dbda5ec 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h @@ -23,9 +23,10 @@ #include "absl/memory/memory.h" #include "p2p/base/port.h" #include "p2p/client/basic_port_allocator.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_resolver_interface.h" #include "rtc_base/ssl_certificate.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace webrtc { class TurnCustomizer; @@ -65,6 +66,14 @@ class TurnPort : public Port { webrtc::TurnCustomizer* customizer) { // Do basic parameter validation. if (credentials.username.size() > kMaxTurnUsernameLength) { + RTC_LOG(LS_ERROR) << "Attempt to use TURN with a too long username " + << "of length " << credentials.username.size(); + return nullptr; + } + // Do not connect to low-numbered ports. The default STUN port is 3478. + if (!AllowedTurnPort(server_address.address.port())) { + RTC_LOG(LS_ERROR) << "Attempt to use TURN to connect to port " + << server_address.address.port(); return nullptr; } // Using `new` to access a non-public constructor. @@ -110,6 +119,14 @@ class TurnPort : public Port { rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr) { // Do basic parameter validation. if (credentials.username.size() > kMaxTurnUsernameLength) { + RTC_LOG(LS_ERROR) << "Attempt to use TURN with a too long username " + << "of length " << credentials.username.size(); + return nullptr; + } + // Do not connect to low-numbered ports. The default STUN port is 3478. + if (!AllowedTurnPort(server_address.address.port())) { + RTC_LOG(LS_ERROR) << "Attempt to use TURN to connect to port " + << server_address.address.port(); return nullptr; } // Using `new` to access a non-public constructor. @@ -210,9 +227,6 @@ class TurnPort : public Port { rtc::AsyncPacketSocket* socket() const { return socket_; } - // For testing only. - rtc::AsyncInvoker* invoker() { return &invoker_; } - // Signal with resolved server address. // Parameters are port, server address and resolved server address. // This signal will be sent only if server address is resolved successfully. @@ -295,6 +309,7 @@ class TurnPort : public Port { typedef std::map SocketOptionsMap; typedef std::set AttemptedServerSet; + static bool AllowedTurnPort(int port); void OnMessage(rtc::Message* pmsg) override; bool CreateTurnClientSocket(); @@ -397,8 +412,6 @@ class TurnPort : public Port { // The number of retries made due to allocate mismatch error. size_t allocate_mismatch_retries_; - rtc::AsyncInvoker invoker_; - // Optional TurnCustomizer that can modify outgoing messages. Once set, this // must outlive the TurnPort's lifetime. webrtc::TurnCustomizer* turn_customizer_ = nullptr; @@ -411,6 +424,8 @@ class TurnPort : public Port { // to be more easy to work with. std::string turn_logging_id_; + webrtc::ScopedTaskSafety task_safety_; + friend class TurnEntry; friend class TurnAllocateRequest; friend class TurnRefreshRequest; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc index 17a49e403..53f283bc9 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.cc @@ -15,10 +15,10 @@ #include #include "absl/algorithm/container.h" +#include "absl/memory/memory.h" #include "api/packet_socket_factory.h" #include "api/transport/stun.h" #include "p2p/base/async_stun_tcp_socket.h" -#include "rtc_base/bind.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" @@ -26,6 +26,7 @@ #include "rtc_base/message_digest.h" #include "rtc_base/socket_adapters.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace cricket { @@ -129,7 +130,7 @@ TurnServer::TurnServer(rtc::Thread* thread) enable_otu_nonce_(false) {} TurnServer::~TurnServer() { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); for (InternalSocketMap::iterator it = server_sockets_.begin(); it != server_sockets_.end(); ++it) { rtc::AsyncPacketSocket* socket = it->first; @@ -145,7 +146,7 @@ TurnServer::~TurnServer() { void TurnServer::AddInternalSocket(rtc::AsyncPacketSocket* socket, ProtocolType proto) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(server_sockets_.end() == server_sockets_.find(socket)); server_sockets_[socket] = proto; socket->SignalReadPacket.connect(this, &TurnServer::OnInternalPacket); @@ -153,7 +154,7 @@ void TurnServer::AddInternalSocket(rtc::AsyncPacketSocket* socket, void TurnServer::AddInternalServerSocket(rtc::AsyncSocket* socket, ProtocolType proto) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(server_listen_sockets_.end() == server_listen_sockets_.find(socket)); server_listen_sockets_[socket] = proto; @@ -163,20 +164,19 @@ void TurnServer::AddInternalServerSocket(rtc::AsyncSocket* socket, void TurnServer::SetExternalSocketFactory( rtc::PacketSocketFactory* factory, const rtc::SocketAddress& external_addr) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); external_socket_factory_.reset(factory); external_addr_ = external_addr; } void TurnServer::OnNewInternalConnection(rtc::AsyncSocket* socket) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(server_listen_sockets_.find(socket) != server_listen_sockets_.end()); AcceptConnection(socket); } void TurnServer::AcceptConnection(rtc::AsyncSocket* server_socket) { - RTC_DCHECK(thread_checker_.IsCurrent()); // Check if someone is trying to connect to us. rtc::SocketAddress accept_addr; rtc::AsyncSocket* accepted_socket = server_socket->Accept(&accept_addr); @@ -193,7 +193,7 @@ void TurnServer::AcceptConnection(rtc::AsyncSocket* server_socket) { void TurnServer::OnInternalSocketClose(rtc::AsyncPacketSocket* socket, int err) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); DestroyInternalSocket(socket); } @@ -202,7 +202,7 @@ void TurnServer::OnInternalPacket(rtc::AsyncPacketSocket* socket, size_t size, const rtc::SocketAddress& addr, const int64_t& /* packet_time_us */) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); // Fail if the packet is too small to even contain a channel header. if (size < TURN_CHANNEL_HEADER_SIZE) { return; @@ -229,7 +229,6 @@ void TurnServer::OnInternalPacket(rtc::AsyncPacketSocket* socket, void TurnServer::HandleStunMessage(TurnServerConnection* conn, const char* data, size_t size) { - RTC_DCHECK(thread_checker_.IsCurrent()); TurnMessage msg; rtc::ByteBufferReader buf(data, size); if (!msg.Read(&buf) || (buf.Length() > 0)) { @@ -295,7 +294,6 @@ void TurnServer::HandleStunMessage(TurnServerConnection* conn, } bool TurnServer::GetKey(const StunMessage* msg, std::string* key) { - RTC_DCHECK(thread_checker_.IsCurrent()); const StunByteStringAttribute* username_attr = msg->GetByteString(STUN_ATTR_USERNAME); if (!username_attr) { @@ -307,11 +305,10 @@ bool TurnServer::GetKey(const StunMessage* msg, std::string* key) { } bool TurnServer::CheckAuthorization(TurnServerConnection* conn, - const StunMessage* msg, + StunMessage* msg, const char* data, size_t size, const std::string& key) { - RTC_DCHECK(thread_checker_.IsCurrent()); // RFC 5389, 10.2.2. RTC_DCHECK(IsStunRequestType(msg->type())); const StunByteStringAttribute* mi_attr = @@ -323,14 +320,14 @@ bool TurnServer::CheckAuthorization(TurnServerConnection* conn, const StunByteStringAttribute* nonce_attr = msg->GetByteString(STUN_ATTR_NONCE); - // Fail if no M-I. + // Fail if no MESSAGE_INTEGRITY. if (!mi_attr) { SendErrorResponseWithRealmAndNonce(conn, msg, STUN_ERROR_UNAUTHORIZED, STUN_ERROR_REASON_UNAUTHORIZED); return false; } - // Fail if there is M-I but no username, nonce, or realm. + // Fail if there is MESSAGE_INTEGRITY but no username, nonce, or realm. if (!username_attr || !realm_attr || !nonce_attr) { SendErrorResponse(conn, msg, STUN_ERROR_BAD_REQUEST, STUN_ERROR_REASON_BAD_REQUEST); @@ -344,9 +341,9 @@ bool TurnServer::CheckAuthorization(TurnServerConnection* conn, return false; } - // Fail if bad username or M-I. - // We need |data| and |size| for the call to ValidateMessageIntegrity. - if (key.empty() || !StunMessage::ValidateMessageIntegrity(data, size, key)) { + // Fail if bad MESSAGE_INTEGRITY. + if (key.empty() || msg->ValidateMessageIntegrity(key) != + StunMessage::IntegrityStatus::kIntegrityOk) { SendErrorResponseWithRealmAndNonce(conn, msg, STUN_ERROR_UNAUTHORIZED, STUN_ERROR_REASON_UNAUTHORIZED); return false; @@ -370,7 +367,6 @@ bool TurnServer::CheckAuthorization(TurnServerConnection* conn, void TurnServer::HandleBindingRequest(TurnServerConnection* conn, const StunMessage* req) { - RTC_DCHECK(thread_checker_.IsCurrent()); StunMessage response; InitResponse(req, &response); @@ -385,7 +381,6 @@ void TurnServer::HandleBindingRequest(TurnServerConnection* conn, void TurnServer::HandleAllocateRequest(TurnServerConnection* conn, const TurnMessage* msg, const std::string& key) { - RTC_DCHECK(thread_checker_.IsCurrent()); // Check the parameters in the request. const StunUInt32Attribute* transport_attr = msg->GetUInt32(STUN_ATTR_REQUESTED_TRANSPORT); @@ -415,7 +410,6 @@ void TurnServer::HandleAllocateRequest(TurnServerConnection* conn, } std::string TurnServer::GenerateNonce(int64_t now) const { - RTC_DCHECK(thread_checker_.IsCurrent()); // Generate a nonce of the form hex(now + HMAC-MD5(nonce_key_, now)) std::string input(reinterpret_cast(&now), sizeof(now)); std::string nonce = rtc::hex_encode(input.c_str(), input.size()); @@ -426,7 +420,6 @@ std::string TurnServer::GenerateNonce(int64_t now) const { } bool TurnServer::ValidateNonce(const std::string& nonce) const { - RTC_DCHECK(thread_checker_.IsCurrent()); // Check the size. if (nonce.size() != kNonceSize) { return false; @@ -453,7 +446,6 @@ bool TurnServer::ValidateNonce(const std::string& nonce) const { } TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) { - RTC_DCHECK(thread_checker_.IsCurrent()); AllocationMap::const_iterator it = allocations_.find(*conn); return (it != allocations_.end()) ? it->second.get() : nullptr; } @@ -461,7 +453,6 @@ TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) { TurnServerAllocation* TurnServer::CreateAllocation(TurnServerConnection* conn, int proto, const std::string& key) { - RTC_DCHECK(thread_checker_.IsCurrent()); rtc::AsyncPacketSocket* external_socket = (external_socket_factory_) ? external_socket_factory_->CreateUdpSocket(external_addr_, 0, 0) @@ -482,7 +473,7 @@ void TurnServer::SendErrorResponse(TurnServerConnection* conn, const StunMessage* req, int code, const std::string& reason) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); TurnMessage resp; InitErrorResponse(req, code, reason, &resp); RTC_LOG(LS_INFO) << "Sending error response, type=" << resp.type() @@ -494,7 +485,6 @@ void TurnServer::SendErrorResponseWithRealmAndNonce(TurnServerConnection* conn, const StunMessage* msg, int code, const std::string& reason) { - RTC_DCHECK(thread_checker_.IsCurrent()); TurnMessage resp; InitErrorResponse(msg, code, reason, &resp); @@ -514,7 +504,6 @@ void TurnServer::SendErrorResponseWithAlternateServer( TurnServerConnection* conn, const StunMessage* msg, const rtc::SocketAddress& addr) { - RTC_DCHECK(thread_checker_.IsCurrent()); TurnMessage resp; InitErrorResponse(msg, STUN_ERROR_TRY_ALTERNATE, STUN_ERROR_REASON_TRY_ALTERNATE_SERVER, &resp); @@ -524,7 +513,7 @@ void TurnServer::SendErrorResponseWithAlternateServer( } void TurnServer::SendStun(TurnServerConnection* conn, StunMessage* msg) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); rtc::ByteBufferWriter buf; // Add a SOFTWARE attribute if one is set. if (!software_.empty()) { @@ -537,13 +526,12 @@ void TurnServer::SendStun(TurnServerConnection* conn, StunMessage* msg) { void TurnServer::Send(TurnServerConnection* conn, const rtc::ByteBufferWriter& buf) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); rtc::PacketOptions options; conn->socket()->SendTo(buf.Data(), buf.Length(), conn->src(), options); } void TurnServer::OnAllocationDestroyed(TurnServerAllocation* allocation) { - RTC_DCHECK(thread_checker_.IsCurrent()); // Removing the internal socket if the connection is not udp. rtc::AsyncPacketSocket* socket = allocation->conn()->socket(); InternalSocketMap::iterator iter = server_sockets_.find(socket); @@ -563,27 +551,21 @@ void TurnServer::OnAllocationDestroyed(TurnServerAllocation* allocation) { } void TurnServer::DestroyInternalSocket(rtc::AsyncPacketSocket* socket) { - RTC_DCHECK(thread_checker_.IsCurrent()); InternalSocketMap::iterator iter = server_sockets_.find(socket); if (iter != server_sockets_.end()) { rtc::AsyncPacketSocket* socket = iter->first; socket->SignalReadPacket.disconnect(this); server_sockets_.erase(iter); + std::unique_ptr socket_to_delete = + absl::WrapUnique(socket); // We must destroy the socket async to avoid invalidating the sigslot // callback list iterator inside a sigslot callback. (In other words, // deleting an object from within a callback from that object). - sockets_to_delete_.push_back( - std::unique_ptr(socket)); - invoker_.AsyncInvoke(RTC_FROM_HERE, rtc::Thread::Current(), - rtc::Bind(&TurnServer::FreeSockets, this)); + thread_->PostTask(webrtc::ToQueuedTask( + [socket_to_delete = std::move(socket_to_delete)] {})); } } -void TurnServer::FreeSockets() { - RTC_DCHECK(thread_checker_.IsCurrent()); - sockets_to_delete_.clear(); -} - TurnServerConnection::TurnServerConnection(const rtc::SocketAddress& src, ProtocolType proto, rtc::AsyncPacketSocket* socket) diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h index ca856448b..f90c3dac0 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_server.h @@ -19,13 +19,12 @@ #include #include +#include "api/sequence_checker.h" #include "p2p/base/port_interface.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace rtc { class ByteBufferWriter; @@ -129,8 +128,8 @@ class TurnServerAllocation : public rtc::MessageHandlerAutoCleanup, void OnChannelDestroyed(Channel* channel); void OnMessage(rtc::Message* msg) override; - TurnServer* server_; - rtc::Thread* thread_; + TurnServer* const server_; + rtc::Thread* const thread_; TurnServerConnection conn_; std::unique_ptr external_socket_; std::string key_; @@ -183,53 +182,53 @@ class TurnServer : public sigslot::has_slots<> { // Gets/sets the realm value to use for the server. const std::string& realm() const { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); return realm_; } void set_realm(const std::string& realm) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); realm_ = realm; } // Gets/sets the value for the SOFTWARE attribute for TURN messages. const std::string& software() const { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); return software_; } void set_software(const std::string& software) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); software_ = software; } const AllocationMap& allocations() const { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); return allocations_; } // Sets the authentication callback; does not take ownership. void set_auth_hook(TurnAuthInterface* auth_hook) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); auth_hook_ = auth_hook; } void set_redirect_hook(TurnRedirectInterface* redirect_hook) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); redirect_hook_ = redirect_hook; } void set_enable_otu_nonce(bool enable) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); enable_otu_nonce_ = enable; } // If set to true, reject CreatePermission requests to RFC1918 addresses. void set_reject_private_addresses(bool filter) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); reject_private_addresses_ = filter; } void set_enable_permission_checks(bool enable) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); enable_permission_checks_ = enable; } @@ -244,18 +243,22 @@ class TurnServer : public sigslot::has_slots<> { const rtc::SocketAddress& address); // For testing only. std::string SetTimestampForNextNonce(int64_t timestamp) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); ts_for_next_nonce_ = timestamp; return GenerateNonce(timestamp); } void SetStunMessageObserver(std::unique_ptr observer) { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); stun_message_observer_ = std::move(observer); } private: - std::string GenerateNonce(int64_t now) const; + // All private member functions and variables should have access restricted to + // thread_. But compile-time annotations are missing for members access from + // TurnServerAllocation (via friend declaration), and the On* methods, which + // are called via sigslot. + std::string GenerateNonce(int64_t now) const RTC_RUN_ON(thread_); void OnInternalPacket(rtc::AsyncPacketSocket* socket, const char* data, size_t size, @@ -265,29 +268,32 @@ class TurnServer : public sigslot::has_slots<> { void OnNewInternalConnection(rtc::AsyncSocket* socket); // Accept connections on this server socket. - void AcceptConnection(rtc::AsyncSocket* server_socket); + void AcceptConnection(rtc::AsyncSocket* server_socket) RTC_RUN_ON(thread_); void OnInternalSocketClose(rtc::AsyncPacketSocket* socket, int err); void HandleStunMessage(TurnServerConnection* conn, const char* data, - size_t size); - void HandleBindingRequest(TurnServerConnection* conn, const StunMessage* msg); + size_t size) RTC_RUN_ON(thread_); + void HandleBindingRequest(TurnServerConnection* conn, const StunMessage* msg) + RTC_RUN_ON(thread_); void HandleAllocateRequest(TurnServerConnection* conn, const TurnMessage* msg, - const std::string& key); + const std::string& key) RTC_RUN_ON(thread_); - bool GetKey(const StunMessage* msg, std::string* key); + bool GetKey(const StunMessage* msg, std::string* key) RTC_RUN_ON(thread_); bool CheckAuthorization(TurnServerConnection* conn, - const StunMessage* msg, + StunMessage* msg, const char* data, size_t size, - const std::string& key); - bool ValidateNonce(const std::string& nonce) const; + const std::string& key) RTC_RUN_ON(thread_); + bool ValidateNonce(const std::string& nonce) const RTC_RUN_ON(thread_); - TurnServerAllocation* FindAllocation(TurnServerConnection* conn); + TurnServerAllocation* FindAllocation(TurnServerConnection* conn) + RTC_RUN_ON(thread_); TurnServerAllocation* CreateAllocation(TurnServerConnection* conn, int proto, - const std::string& key); + const std::string& key) + RTC_RUN_ON(thread_); void SendErrorResponse(TurnServerConnection* conn, const StunMessage* req, @@ -297,55 +303,53 @@ class TurnServer : public sigslot::has_slots<> { void SendErrorResponseWithRealmAndNonce(TurnServerConnection* conn, const StunMessage* req, int code, - const std::string& reason); + const std::string& reason) + RTC_RUN_ON(thread_); void SendErrorResponseWithAlternateServer(TurnServerConnection* conn, const StunMessage* req, - const rtc::SocketAddress& addr); + const rtc::SocketAddress& addr) + RTC_RUN_ON(thread_); void SendStun(TurnServerConnection* conn, StunMessage* msg); void Send(TurnServerConnection* conn, const rtc::ByteBufferWriter& buf); - void OnAllocationDestroyed(TurnServerAllocation* allocation); - void DestroyInternalSocket(rtc::AsyncPacketSocket* socket); - - // Just clears |sockets_to_delete_|; called asynchronously. - void FreeSockets(); + void OnAllocationDestroyed(TurnServerAllocation* allocation) + RTC_RUN_ON(thread_); + void DestroyInternalSocket(rtc::AsyncPacketSocket* socket) + RTC_RUN_ON(thread_); typedef std::map InternalSocketMap; typedef std::map ServerSocketMap; - rtc::Thread* thread_; - rtc::ThreadChecker thread_checker_; - std::string nonce_key_; - std::string realm_; - std::string software_; - TurnAuthInterface* auth_hook_; - TurnRedirectInterface* redirect_hook_; + rtc::Thread* const thread_; + const std::string nonce_key_; + std::string realm_ RTC_GUARDED_BY(thread_); + std::string software_ RTC_GUARDED_BY(thread_); + TurnAuthInterface* auth_hook_ RTC_GUARDED_BY(thread_); + TurnRedirectInterface* redirect_hook_ RTC_GUARDED_BY(thread_); // otu - one-time-use. Server will respond with 438 if it's // sees the same nonce in next transaction. - bool enable_otu_nonce_; + bool enable_otu_nonce_ RTC_GUARDED_BY(thread_); bool reject_private_addresses_ = false; // Check for permission when receiving an external packet. bool enable_permission_checks_ = true; - InternalSocketMap server_sockets_; - ServerSocketMap server_listen_sockets_; - // Used when we need to delete a socket asynchronously. - std::vector> sockets_to_delete_; - std::unique_ptr external_socket_factory_; - rtc::SocketAddress external_addr_; + InternalSocketMap server_sockets_ RTC_GUARDED_BY(thread_); + ServerSocketMap server_listen_sockets_ RTC_GUARDED_BY(thread_); + std::unique_ptr external_socket_factory_ + RTC_GUARDED_BY(thread_); + rtc::SocketAddress external_addr_ RTC_GUARDED_BY(thread_); - AllocationMap allocations_; - - rtc::AsyncInvoker invoker_; + AllocationMap allocations_ RTC_GUARDED_BY(thread_); // For testing only. If this is non-zero, the next NONCE will be generated // from this value, and it will be reset to 0 after generating the NONCE. - int64_t ts_for_next_nonce_ = 0; + int64_t ts_for_next_nonce_ RTC_GUARDED_BY(thread_) = 0; // For testing only. Used to observe STUN messages received. - std::unique_ptr stun_message_observer_; + std::unique_ptr stun_message_observer_ + RTC_GUARDED_BY(thread_); friend class TurnServerAllocation; }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc index bb640d949..7e1f970fa 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc @@ -900,8 +900,9 @@ void BasicPortAllocatorSession::AddAllocatedPort(Port* port, this, &BasicPortAllocatorSession::OnCandidateError); port->SignalPortComplete.connect(this, &BasicPortAllocatorSession::OnPortComplete); - port->SignalDestroyed.connect(this, - &BasicPortAllocatorSession::OnPortDestroyed); + port->SubscribePortDestroyed( + [this](PortInterface* port) { OnPortDestroyed(port); }); + port->SignalPortError.connect(this, &BasicPortAllocatorSession::OnPortError); RTC_LOG(LS_INFO) << port->ToString() << ": Added port to allocator"; @@ -1423,7 +1424,8 @@ void AllocationSequence::CreateUDPPorts() { // UDPPort. if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { udp_port_ = port.get(); - port->SignalDestroyed.connect(this, &AllocationSequence::OnPortDestroyed); + port->SubscribePortDestroyed( + [this](PortInterface* port) { OnPortDestroyed(port); }); // If STUN is not disabled, setting stun server address to port. if (!IsFlagSet(PORTALLOCATOR_DISABLE_STUN)) { @@ -1561,8 +1563,10 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config) { relay_ports_.push_back(port.get()); // Listen to the port destroyed signal, to allow AllocationSequence to - // remove entrt from it's map. - port->SignalDestroyed.connect(this, &AllocationSequence::OnPortDestroyed); + // remove the entry from it's map. + port->SubscribePortDestroyed( + [this](PortInterface* port) { OnPortDestroyed(port); }); + } else { port = session_->allocator()->relay_port_factory()->Create( args, session_->allocator()->min_port(), diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h index 26b181807..b27016a1d 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h @@ -106,9 +106,8 @@ enum class SessionState { // process will be started. }; -class RTC_EXPORT BasicPortAllocatorSession - : public PortAllocatorSession, - public rtc::MessageHandlerAutoCleanup { +class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession, + public rtc::MessageHandler { public: BasicPortAllocatorSession(BasicPortAllocator* allocator, const std::string& content_name, @@ -324,7 +323,7 @@ class TurnPort; // Performs the allocation of ports, in a sequenced (timed) manner, for a given // network and IP address. -class AllocationSequence : public rtc::MessageHandlerAutoCleanup, +class AllocationSequence : public rtc::MessageHandler, public sigslot::has_slots<> { public: enum State { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc index f37f24994..d85d5f27e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc @@ -20,11 +20,11 @@ #include "api/transport/stun.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_resolver_interface.h" -#include "rtc_base/bind.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" @@ -104,7 +104,7 @@ class StunProber::Requester : public sigslot::has_slots<> { int16_t num_request_sent_ = 0; int16_t num_response_received_ = 0; - rtc::ThreadChecker& thread_checker_; + webrtc::SequenceChecker& thread_checker_; RTC_DISALLOW_COPY_AND_ASSIGN(Requester); }; @@ -262,6 +262,7 @@ StunProber::StunProber(rtc::PacketSocketFactory* socket_factory, networks_(networks) {} StunProber::~StunProber() { + RTC_DCHECK(thread_checker_.IsCurrent()); for (auto* req : requesters_) { if (req) { delete req; @@ -358,9 +359,8 @@ void StunProber::OnServerResolved(rtc::AsyncResolverInterface* resolver) { // Deletion of AsyncResolverInterface can't be done in OnResolveResult which // handles SignalDone. - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread_, - rtc::Bind(&rtc::AsyncResolverInterface::Destroy, resolver, false)); + thread_->PostTask( + webrtc::ToQueuedTask([resolver] { resolver->Destroy(false); })); servers_.pop_back(); if (servers_.size()) { @@ -453,13 +453,14 @@ int StunProber::get_wake_up_interval_ms() { } void StunProber::MaybeScheduleStunRequests() { - RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(thread_); int64_t now = rtc::TimeMillis(); if (Done()) { - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, thread_, - rtc::Bind(&StunProber::ReportOnFinished, this, SUCCESS), timeout_ms_); + thread_->PostDelayedTask( + webrtc::ToQueuedTask(task_safety_.flag(), + [this] { ReportOnFinished(SUCCESS); }), + timeout_ms_); return; } if (should_send_next_request(now)) { @@ -469,9 +470,9 @@ void StunProber::MaybeScheduleStunRequests() { } next_request_time_ms_ = now + interval_ms_; } - invoker_.AsyncInvokeDelayed( - RTC_FROM_HERE, thread_, - rtc::Bind(&StunProber::MaybeScheduleStunRequests, this), + thread_->PostDelayedTask( + webrtc::ToQueuedTask(task_safety_.flag(), + [this] { MaybeScheduleStunRequests(); }), get_wake_up_interval_ms()); } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h index a739a6c98..43d84ff80 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h @@ -15,16 +15,15 @@ #include #include -#include "rtc_base/async_invoker.h" +#include "api/sequence_checker.h" #include "rtc_base/byte_buffer.h" -#include "rtc_base/callback.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ip_address.h" #include "rtc_base/network.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace rtc { class AsyncPacketSocket; @@ -40,7 +39,7 @@ class StunProber; static const int kMaxUdpBufferSize = 1200; -typedef rtc::Callback2 AsyncCallback; +typedef std::function AsyncCallback; enum NatType { NATTYPE_INVALID, @@ -227,15 +226,13 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { // The set of STUN probe sockets and their state. std::vector requesters_; - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; // Temporary storage for created sockets. std::vector sockets_; // This tracks how many of the sockets are ready. size_t total_ready_sockets_ = 0; - rtc::AsyncInvoker invoker_; - Observer* observer_ = nullptr; // TODO(guoweis): Remove this once all dependencies move away from // AsyncCallback. @@ -243,6 +240,8 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { rtc::NetworkManager::NetworkList networks_; + webrtc::ScopedTaskSafety task_safety_; + RTC_DISALLOW_COPY_AND_ASSIGN(StunProber); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc index 8ff685d8e..5f815c589 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc @@ -15,42 +15,43 @@ #include #include -#include "api/media_stream_proxy.h" #include "api/media_stream_track_proxy.h" +#include "api/sequence_checker.h" #include "pc/audio_track.h" -#include "pc/jitter_buffer_delay.h" -#include "pc/jitter_buffer_delay_proxy.h" -#include "pc/media_stream.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { AudioRtpReceiver::AudioRtpReceiver(rtc::Thread* worker_thread, std::string receiver_id, - std::vector stream_ids) + std::vector stream_ids, + bool is_unified_plan) : AudioRtpReceiver(worker_thread, receiver_id, - CreateStreamsFromIds(std::move(stream_ids))) {} + CreateStreamsFromIds(std::move(stream_ids)), + is_unified_plan) {} AudioRtpReceiver::AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, - const std::vector>& streams) + const std::vector>& streams, + bool is_unified_plan) : worker_thread_(worker_thread), id_(receiver_id), - source_(new rtc::RefCountedObject(worker_thread)), + source_(rtc::make_ref_counted( + worker_thread, + is_unified_plan + ? RemoteAudioSource::OnAudioChannelGoneAction::kSurvive + : RemoteAudioSource::OnAudioChannelGoneAction::kEnd)), track_(AudioTrackProxyWithInternal::Create( rtc::Thread::Current(), AudioTrack::Create(receiver_id, source_))), cached_track_enabled_(track_->enabled()), attachment_id_(GenerateUniqueId()), - delay_(JitterBufferDelayProxy::Create( - rtc::Thread::Current(), - worker_thread_, - new rtc::RefCountedObject(worker_thread))) { + worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); RTC_DCHECK(track_->GetSource()->remote()); track_->RegisterObserver(this); @@ -59,139 +60,188 @@ AudioRtpReceiver::AudioRtpReceiver( } AudioRtpReceiver::~AudioRtpReceiver() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK(stopped_); + RTC_DCHECK(!media_channel_); + track_->GetSource()->UnregisterAudioObserver(this); track_->UnregisterObserver(this); - Stop(); } void AudioRtpReceiver::OnChanged() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (cached_track_enabled_ != track_->enabled()) { cached_track_enabled_ = track_->enabled(); - Reconfigure(); + worker_thread_->PostTask(ToQueuedTask( + worker_thread_safety_, + [this, enabled = cached_track_enabled_, volume = cached_volume_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + Reconfigure(enabled, volume); + })); } } -bool AudioRtpReceiver::SetOutputVolume(double volume) { +// RTC_RUN_ON(worker_thread_) +void AudioRtpReceiver::SetOutputVolume_w(double volume) { RTC_DCHECK_GE(volume, 0.0); RTC_DCHECK_LE(volume, 10.0); - RTC_DCHECK(media_channel_); - RTC_DCHECK(!stopped_); - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return ssrc_ ? media_channel_->SetOutputVolume(*ssrc_, volume) - : media_channel_->SetDefaultOutputVolume(volume); - }); + ssrc_ ? media_channel_->SetOutputVolume(*ssrc_, volume) + : media_channel_->SetDefaultOutputVolume(volume); } void AudioRtpReceiver::OnSetVolume(double volume) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK_GE(volume, 0); RTC_DCHECK_LE(volume, 10); - cached_volume_ = volume; - if (!media_channel_ || stopped_) { - RTC_LOG(LS_ERROR) - << "AudioRtpReceiver::OnSetVolume: No audio channel exists."; + if (stopped_) return; - } + + cached_volume_ = volume; + // When the track is disabled, the volume of the source, which is the // corresponding WebRtc Voice Engine channel will be 0. So we do not allow // setting the volume to the source when the track is disabled. - if (!stopped_ && track_->enabled()) { - if (!SetOutputVolume(cached_volume_)) { - RTC_NOTREACHED(); - } + if (track_->enabled()) { + worker_thread_->PostTask( + ToQueuedTask(worker_thread_safety_, [this, volume = cached_volume_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + SetOutputVolume_w(volume); + })); } } +rtc::scoped_refptr AudioRtpReceiver::dtls_transport() + const { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + return dtls_transport_; +} + std::vector AudioRtpReceiver::stream_ids() const { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); std::vector stream_ids(streams_.size()); for (size_t i = 0; i < streams_.size(); ++i) stream_ids[i] = streams_[i]->id(); return stream_ids; } +std::vector> +AudioRtpReceiver::streams() const { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + return streams_; +} + RtpParameters AudioRtpReceiver::GetParameters() const { - if (!media_channel_ || stopped_) { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) return RtpParameters(); - } - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_) - : media_channel_->GetDefaultRtpReceiveParameters(); - }); + return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_) + : media_channel_->GetDefaultRtpReceiveParameters(); } void AudioRtpReceiver::SetFrameDecryptor( rtc::scoped_refptr frame_decryptor) { + RTC_DCHECK_RUN_ON(worker_thread_); frame_decryptor_ = std::move(frame_decryptor); // Special Case: Set the frame decryptor to any value on any existing channel. - if (media_channel_ && ssrc_.has_value() && !stopped_) { - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); - }); + if (media_channel_ && ssrc_) { + media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); } } rtc::scoped_refptr AudioRtpReceiver::GetFrameDecryptor() const { + RTC_DCHECK_RUN_ON(worker_thread_); return frame_decryptor_; } void AudioRtpReceiver::Stop() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); // TODO(deadbeef): Need to do more here to fully stop receiving packets. - if (stopped_) { - return; + if (!stopped_) { + source_->SetState(MediaSourceInterface::kEnded); + stopped_ = true; } - if (media_channel_) { - // Allow that SetOutputVolume fail. This is the normal case when the - // underlying media channel has already been deleted. - SetOutputVolume(0.0); - } - stopped_ = true; + + worker_thread_->Invoke(RTC_FROM_HERE, [&]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (media_channel_) + SetOutputVolume_w(0.0); + SetMediaChannel_w(nullptr); + }); } void AudioRtpReceiver::StopAndEndTrack() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); Stop(); track_->internal()->set_ended(); } void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { - RTC_DCHECK(media_channel_); - if (!stopped_ && ssrc_ == ssrc) { - return; - } + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + bool ok = worker_thread_->Invoke( + RTC_FROM_HERE, [&, enabled = cached_track_enabled_, + volume = cached_volume_, was_stopped = stopped_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + RTC_DCHECK(was_stopped); + return false; // Can't restart. + } + + if (!was_stopped && ssrc_ == ssrc) { + // Already running with that ssrc. + RTC_DCHECK(worker_thread_safety_->alive()); + return true; + } + + if (!was_stopped) { + source_->Stop(media_channel_, ssrc_); + } + + ssrc_ = std::move(ssrc); + source_->Start(media_channel_, ssrc_); + if (ssrc_) { + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); + } + + Reconfigure(enabled, volume); + return true; + }); + + if (!ok) + return; - if (!stopped_) { - source_->Stop(media_channel_, ssrc_); - delay_->OnStop(); - } - ssrc_ = ssrc; stopped_ = false; - source_->Start(media_channel_, ssrc); - delay_->OnStart(media_channel_, ssrc.value_or(0)); - Reconfigure(); } void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) { - if (!media_channel_) { - RTC_LOG(LS_ERROR) - << "AudioRtpReceiver::SetupMediaChannel: No audio channel exists."; - return; - } + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RestartMediaChannel(ssrc); } void AudioRtpReceiver::SetupUnsignaledMediaChannel() { - if (!media_channel_) { - RTC_LOG(LS_ERROR) << "AudioRtpReceiver::SetupUnsignaledMediaChannel: No " - "audio channel exists."; - } + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RestartMediaChannel(absl::nullopt); } +uint32_t AudioRtpReceiver::ssrc() const { + RTC_DCHECK_RUN_ON(worker_thread_); + return ssrc_.value_or(0); +} + void AudioRtpReceiver::set_stream_ids(std::vector stream_ids) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); SetStreams(CreateStreamsFromIds(std::move(stream_ids))); } +void AudioRtpReceiver::set_transport( + rtc::scoped_refptr dtls_transport) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + dtls_transport_ = std::move(dtls_transport); +} + void AudioRtpReceiver::SetStreams( const std::vector>& streams) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { bool removed = true; @@ -224,51 +274,42 @@ void AudioRtpReceiver::SetStreams( } std::vector AudioRtpReceiver::GetSources() const { - if (!media_channel_ || !ssrc_ || stopped_) { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_ || !ssrc_) { return {}; } - return worker_thread_->Invoke>( - RTC_FROM_HERE, [&] { return media_channel_->GetSources(*ssrc_); }); + return media_channel_->GetSources(*ssrc_); } void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { - worker_thread_->Invoke( - RTC_FROM_HERE, [this, frame_transformer = std::move(frame_transformer)] { - RTC_DCHECK_RUN_ON(worker_thread_); - frame_transformer_ = frame_transformer; - if (media_channel_ && ssrc_.has_value() && !stopped_) { - media_channel_->SetDepacketizerToDecoderFrameTransformer( - *ssrc_, frame_transformer); - } - }); + RTC_DCHECK_RUN_ON(worker_thread_); + if (media_channel_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer(ssrc_.value_or(0), + frame_transformer); + } + frame_transformer_ = std::move(frame_transformer); } -void AudioRtpReceiver::Reconfigure() { - if (!media_channel_ || stopped_) { - RTC_LOG(LS_ERROR) - << "AudioRtpReceiver::Reconfigure: No audio channel exists."; - return; - } - if (!SetOutputVolume(track_->enabled() ? cached_volume_ : 0)) { - RTC_NOTREACHED(); - } - // Reattach the frame decryptor if we were reconfigured. - MaybeAttachFrameDecryptorToMediaChannel( - ssrc_, worker_thread_, frame_decryptor_, media_channel_, stopped_); +// RTC_RUN_ON(worker_thread_) +void AudioRtpReceiver::Reconfigure(bool track_enabled, double volume) { + RTC_DCHECK(media_channel_); - if (media_channel_ && ssrc_.has_value() && !stopped_) { - worker_thread_->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(worker_thread_); - if (!frame_transformer_) - return; - media_channel_->SetDepacketizerToDecoderFrameTransformer( - *ssrc_, frame_transformer_); - }); + SetOutputVolume_w(track_enabled ? volume : 0); + + if (ssrc_ && frame_decryptor_) { + // Reattach the frame decryptor if we were reconfigured. + media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); + } + + if (frame_transformer_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); } } void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); observer_ = observer; // Deliver any notifications the observer may have missed by being set late. if (received_first_packet_ && observer_) { @@ -278,16 +319,35 @@ void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { void AudioRtpReceiver::SetJitterBufferMinimumDelay( absl::optional delay_seconds) { - delay_->Set(delay_seconds); + RTC_DCHECK_RUN_ON(worker_thread_); + delay_.Set(delay_seconds); + if (media_channel_ && ssrc_) + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); } void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); + + if (stopped_ && !media_channel) + return; + + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(worker_thread_); + SetMediaChannel_w(media_channel); + }); +} + +// RTC_RUN_ON(worker_thread_) +void AudioRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { + media_channel ? worker_thread_safety_->SetAlive() + : worker_thread_safety_->SetNotAlive(); media_channel_ = static_cast(media_channel); } void AudioRtpReceiver::NotifyFirstPacketReceived() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (observer_) { observer_->OnFirstPacketReceived(media_type()); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h index f4b821068..7f2e55712 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h @@ -18,33 +18,43 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_stream_track_proxy.h" #include "api/media_types.h" #include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/transport/rtp/rtp_source.h" #include "media/base/media_channel.h" #include "pc/audio_track.h" -#include "pc/jitter_buffer_delay_interface.h" +#include "pc/jitter_buffer_delay.h" #include "pc/remote_audio_source.h" #include "pc/rtp_receiver.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { class AudioRtpReceiver : public ObserverInterface, public AudioSourceInterface::AudioObserver, - public rtc::RefCountedObject { + public RtpReceiverInternal { public: AudioRtpReceiver(rtc::Thread* worker_thread, std::string receiver_id, - std::vector stream_ids); + std::vector stream_ids, + bool is_unified_plan); // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, - const std::vector>& streams); + const std::vector>& streams, + bool is_unified_plan); virtual ~AudioRtpReceiver(); // ObserverInterface implementation @@ -53,22 +63,16 @@ class AudioRtpReceiver : public ObserverInterface, // AudioSourceInterface::AudioObserver implementation void OnSetVolume(double volume) override; - rtc::scoped_refptr audio_track() const { - return track_.get(); - } + rtc::scoped_refptr audio_track() const { return track_; } // RtpReceiverInterface implementation rtc::scoped_refptr track() const override { - return track_.get(); - } - rtc::scoped_refptr dtls_transport() const override { - return dtls_transport_; + return track_; } + rtc::scoped_refptr dtls_transport() const override; std::vector stream_ids() const override; std::vector> streams() - const override { - return streams_; - } + const override; cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_AUDIO; @@ -89,13 +93,11 @@ class AudioRtpReceiver : public ObserverInterface, void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; - uint32_t ssrc() const override { return ssrc_.value_or(0); } + uint32_t ssrc() const override; void NotifyFirstPacketReceived() override; void set_stream_ids(std::vector stream_ids) override; void set_transport( - rtc::scoped_refptr dtls_transport) override { - dtls_transport_ = dtls_transport; - } + rtc::scoped_refptr dtls_transport) override; void SetStreams(const std::vector>& streams) override; void SetObserver(RtpReceiverObserverInterface* observer) override; @@ -113,29 +115,40 @@ class AudioRtpReceiver : public ObserverInterface, private: void RestartMediaChannel(absl::optional ssrc); - void Reconfigure(); - bool SetOutputVolume(double volume); + void Reconfigure(bool track_enabled, double volume) + RTC_RUN_ON(worker_thread_); + void SetOutputVolume_w(double volume) RTC_RUN_ON(worker_thread_); + void SetMediaChannel_w(cricket::MediaChannel* media_channel) + RTC_RUN_ON(worker_thread_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; rtc::Thread* const worker_thread_; const std::string id_; const rtc::scoped_refptr source_; const rtc::scoped_refptr> track_; - cricket::VoiceMediaChannel* media_channel_ = nullptr; - absl::optional ssrc_; - std::vector> streams_; - bool cached_track_enabled_; - double cached_volume_ = 1; - bool stopped_ = true; - RtpReceiverObserverInterface* observer_ = nullptr; - bool received_first_packet_ = false; - int attachment_id_ = 0; - rtc::scoped_refptr frame_decryptor_; - rtc::scoped_refptr dtls_transport_; - // Allows to thread safely change playout delay. Handles caching cases if + cricket::VoiceMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = + nullptr; + absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); + std::vector> streams_ + RTC_GUARDED_BY(&signaling_thread_checker_); + bool cached_track_enabled_ RTC_GUARDED_BY(&signaling_thread_checker_); + double cached_volume_ RTC_GUARDED_BY(&signaling_thread_checker_) = 1.0; + bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true; + RtpReceiverObserverInterface* observer_ + RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; + bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = + false; + const int attachment_id_; + rtc::scoped_refptr frame_decryptor_ + RTC_GUARDED_BY(worker_thread_); + rtc::scoped_refptr dtls_transport_ + RTC_GUARDED_BY(&signaling_thread_checker_); + // Stores and updates the playout delay. Handles caching cases if // |SetJitterBufferMinimumDelay| is called before start. - rtc::scoped_refptr delay_; + JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_); rtc::scoped_refptr frame_transformer_ RTC_GUARDED_BY(worker_thread_); + const rtc::scoped_refptr worker_thread_safety_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc index 4f4c6b475..191d4efbc 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc @@ -19,7 +19,7 @@ namespace webrtc { rtc::scoped_refptr AudioTrack::Create( const std::string& id, const rtc::scoped_refptr& source) { - return new rtc::RefCountedObject(id, source); + return rtc::make_ref_counted(id, source); } AudioTrack::AudioTrack(const std::string& label, diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h index 8cff79e8b..07511a5c9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h @@ -16,7 +16,7 @@ #include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/scoped_refptr.h" -#include "rtc_base/thread_checker.h" +#include "api/sequence_checker.h" namespace webrtc { @@ -53,7 +53,7 @@ class AudioTrack : public MediaStreamTrack, private: const rtc::scoped_refptr audio_source_; - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.cc b/TMessagesProj/jni/voip/webrtc/pc/channel.cc index 02ee9d249..db8d0e33d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.cc @@ -10,40 +10,39 @@ #include "pc/channel.h" +#include +#include #include +#include #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" -#include "api/call/audio_sink.h" -#include "media/base/media_constants.h" +#include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" +#include "api/sequence_checker.h" +#include "api/task_queue/queued_task.h" +#include "media/base/codec.h" +#include "media/base/rid_description.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "p2p/base/packet_transport_internal.h" -#include "pc/channel_manager.h" #include "pc/rtp_media_utils.h" -#include "rtc_base/bind.h" -#include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/dscp.h" #include "rtc_base/logging.h" #include "rtc_base/network_route.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/trace_event.h" namespace cricket { -using rtc::Bind; -using rtc::UniqueRandomIdGenerator; -using webrtc::SdpType; - namespace { -struct SendPacketMessageData : public rtc::MessageData { - rtc::CopyOnWriteBuffer packet; - rtc::PacketOptions options; -}; +using ::rtc::UniqueRandomIdGenerator; +using ::webrtc::PendingTaskSafetyFlag; +using ::webrtc::SdpType; +using ::webrtc::ToQueuedTask; // Finds a stream based on target's Primary SSRC or RIDs. // This struct is used in BaseChannel::UpdateLocalStreams_w. @@ -80,14 +79,6 @@ struct StreamFinder { } // namespace -enum { - MSG_SEND_RTP_PACKET = 1, - MSG_SEND_RTCP_PACKET, - MSG_READYTOSENDDATA, - MSG_DATARECEIVED, - MSG_FIRSTPACKETRECEIVED, -}; - static void SafeSetError(const std::string& message, std::string* error_desc) { if (error_desc) { *error_desc = message; @@ -134,6 +125,7 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread, : worker_thread_(worker_thread), network_thread_(network_thread), signaling_thread_(signaling_thread), + alive_(PendingTaskSafetyFlag::Create()), content_name_(content_name), srtp_required_(srtp_required), crypto_options_(crypto_options), @@ -150,13 +142,10 @@ BaseChannel::~BaseChannel() { RTC_DCHECK_RUN_ON(worker_thread_); // Eats any outstanding messages or packets. - worker_thread_->Clear(&invoker_); - worker_thread_->Clear(this); - // We must destroy the media channel before the transport channel, otherwise - // the media channel may try to send on the dead transport channel. NULLing - // is not an effective strategy since the sends will come on another thread. - media_channel_.reset(); - RTC_LOG(LS_INFO) << "Destroyed channel: " << ToString(); + alive_->SetNotAlive(); + // The media channel is destroyed at the end of the destructor, since it + // is a std::unique_ptr. The transport channel (rtp_transport) must outlive + // the media channel. } std::string BaseChannel::ToString() const { @@ -171,7 +160,15 @@ std::string BaseChannel::ToString() const { bool BaseChannel::ConnectToRtpTransport() { RTC_DCHECK(rtp_transport_); - if (!RegisterRtpDemuxerSink()) { + RTC_DCHECK(media_channel()); + + // We don't need to call OnDemuxerCriteriaUpdatePending/Complete because + // there's no previous criteria to worry about. + bool result = rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this); + if (result) { + previous_demuxer_criteria_ = demuxer_criteria_; + } else { + previous_demuxer_criteria_ = {}; RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); return false; } @@ -188,6 +185,7 @@ bool BaseChannel::ConnectToRtpTransport() { void BaseChannel::DisconnectFromRtpTransport() { RTC_DCHECK(rtp_transport_); + RTC_DCHECK(media_channel()); rtp_transport_->UnregisterRtpDemuxerSink(this); rtp_transport_->SignalReadyToSend.disconnect(this); rtp_transport_->SignalNetworkRouteChanged.disconnect(this); @@ -196,45 +194,37 @@ void BaseChannel::DisconnectFromRtpTransport() { } void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(worker_thread()); - network_thread_->Invoke( - RTC_FROM_HERE, [this, rtp_transport] { SetRtpTransport(rtp_transport); }); - - // Both RTP and RTCP channels should be set, we can call SetInterface on - // the media channel and it can set network options. - media_channel_->SetInterface(this); + network_thread_->Invoke(RTC_FROM_HERE, [this, rtp_transport] { + SetRtpTransport(rtp_transport); + // Both RTP and RTCP channels should be set, we can call SetInterface on + // the media channel and it can set network options. + media_channel_->SetInterface(this); + }); } void BaseChannel::Deinit() { RTC_DCHECK_RUN_ON(worker_thread()); - media_channel_->SetInterface(/*iface=*/nullptr); // Packets arrive on the network thread, processing packets calls virtual // functions, so need to stop this process in Deinit that is called in // derived classes destructor. network_thread_->Invoke(RTC_FROM_HERE, [&] { - FlushRtcpMessages_n(); + RTC_DCHECK_RUN_ON(network_thread()); + media_channel_->SetInterface(/*iface=*/nullptr); if (rtp_transport_) { DisconnectFromRtpTransport(); } - // Clear pending read packets/messages. - network_thread_->Clear(&invoker_); - network_thread_->Clear(this); }); } bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { + RTC_DCHECK_RUN_ON(network_thread()); if (rtp_transport == rtp_transport_) { return true; } - if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke(RTC_FROM_HERE, [this, rtp_transport] { - return SetRtpTransport(rtp_transport); - }); - } - if (rtp_transport_) { DisconnectFromRtpTransport(); } @@ -264,55 +254,59 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { return true; } -bool BaseChannel::Enable(bool enable) { - worker_thread_->Invoke( - RTC_FROM_HERE, - Bind(enable ? &BaseChannel::EnableMedia_w : &BaseChannel::DisableMedia_w, - this)); - return true; +void BaseChannel::Enable(bool enable) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + if (enable == enabled_s_) + return; + + enabled_s_ = enable; + + worker_thread_->PostTask(ToQueuedTask(alive_, [this, enable] { + RTC_DCHECK_RUN_ON(worker_thread()); + // Sanity check to make sure that enabled_ and enabled_s_ + // stay in sync. + RTC_DCHECK_NE(enabled_, enable); + if (enable) { + EnableMedia_w(); + } else { + DisableMedia_w(); + } + })); } bool BaseChannel::SetLocalContent(const MediaContentDescription* content, SdpType type, std::string* error_desc) { + RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetLocalContent"); - return InvokeOnWorker( - RTC_FROM_HERE, - Bind(&BaseChannel::SetLocalContent_w, this, content, type, error_desc)); + return SetLocalContent_w(content, type, error_desc); } bool BaseChannel::SetRemoteContent(const MediaContentDescription* content, SdpType type, std::string* error_desc) { + RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent"); - return InvokeOnWorker( - RTC_FROM_HERE, - Bind(&BaseChannel::SetRemoteContent_w, this, content, type, error_desc)); + return SetRemoteContent_w(content, type, error_desc); } bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) { + RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled"); - return InvokeOnWorker( - RTC_FROM_HERE, - Bind(&BaseChannel::SetPayloadTypeDemuxingEnabled_w, this, enabled)); + return SetPayloadTypeDemuxingEnabled_w(enabled); } bool BaseChannel::IsReadyToReceiveMedia_w() const { // Receive data if we are enabled and have local content, - return enabled() && + return enabled_ && webrtc::RtpTransceiverDirectionHasRecv(local_content_direction_); } bool BaseChannel::IsReadyToSendMedia_w() const { - // Need to access some state updated on the network thread. - return network_thread_->Invoke( - RTC_FROM_HERE, Bind(&BaseChannel::IsReadyToSendMedia_n, this)); -} - -bool BaseChannel::IsReadyToSendMedia_n() const { // Send outgoing data if we are enabled, have local and remote content, // and we have had some form of connectivity. - return enabled() && + return enabled_ && webrtc::RtpTransceiverDirectionHasRecv(remote_content_direction_) && webrtc::RtpTransceiverDirectionHasSend(local_content_direction_) && was_ever_writable(); @@ -331,13 +325,6 @@ bool BaseChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet, int BaseChannel::SetOption(SocketType type, rtc::Socket::Option opt, int value) { - return network_thread_->Invoke( - RTC_FROM_HERE, Bind(&BaseChannel::SetOption_n, this, type, opt, value)); -} - -int BaseChannel::SetOption_n(SocketType type, - rtc::Socket::Option opt, - int value) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(rtp_transport_); switch (type) { @@ -364,7 +351,7 @@ void BaseChannel::OnWritableState(bool writable) { void BaseChannel::OnNetworkRouteChanged( absl::optional network_route) { - RTC_LOG(LS_INFO) << "Network route for " << ToString() << " was changed."; + RTC_LOG(LS_INFO) << "Network route changed for " << ToString(); RTC_DCHECK_RUN_ON(network_thread()); rtc::NetworkRoute new_route; @@ -375,31 +362,25 @@ void BaseChannel::OnNetworkRouteChanged( // use the same transport name and MediaChannel::OnNetworkRouteChanged cannot // work correctly. Intentionally leave it broken to simplify the code and // encourage the users to stop using non-muxing RTCP. - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, [=] { - media_channel_->OnNetworkRouteChanged(transport_name_, new_route); - }); + media_channel_->OnNetworkRouteChanged(transport_name_, new_route); } -sigslot::signal1& BaseChannel::SignalFirstPacketReceived() { - RTC_DCHECK_RUN_ON(signaling_thread_); - return SignalFirstPacketReceived_; -} - -sigslot::signal1& BaseChannel::SignalSentPacket() { - // TODO(bugs.webrtc.org/11994): Uncomment this check once callers have been - // fixed to access this variable from the correct thread. - // RTC_DCHECK_RUN_ON(worker_thread_); - return SignalSentPacket_; +void BaseChannel::SetFirstPacketReceivedCallback( + std::function callback) { + RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(!on_first_packet_received_ || !callback); + on_first_packet_received_ = std::move(callback); } void BaseChannel::OnTransportReadyToSend(bool ready) { - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, - [=] { media_channel_->OnReadyToSend(ready); }); + RTC_DCHECK_RUN_ON(network_thread()); + media_channel_->OnReadyToSend(ready); } bool BaseChannel::SendPacket(bool rtcp, rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { + RTC_DCHECK_RUN_ON(network_thread()); // Until all the code is migrated to use RtpPacketType instead of bool. RtpPacketType packet_type = rtcp ? RtpPacketType::kRtcp : RtpPacketType::kRtp; // SendPacket gets called from MediaEngine, on a pacer or an encoder thread. @@ -409,15 +390,6 @@ bool BaseChannel::SendPacket(bool rtcp, // SRTP and the inner workings of the transport channels. // The only downside is that we can't return a proper failure code if // needed. Since UDP is unreliable anyway, this should be a non-issue. - if (!network_thread_->IsCurrent()) { - // Avoid a copy by transferring the ownership of the packet data. - int message_id = rtcp ? MSG_SEND_RTCP_PACKET : MSG_SEND_RTP_PACKET; - SendPacketMessageData* data = new SendPacketMessageData; - data->packet = std::move(*packet); - data->options = options; - network_thread_->Post(RTC_FROM_HERE, this, message_id, data); - return true; - } TRACE_EVENT0("webrtc", "BaseChannel::SendPacket"); @@ -465,16 +437,11 @@ bool BaseChannel::SendPacket(bool rtcp, } void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { - // Take packet time from the |parsed_packet|. - // RtpPacketReceived.arrival_time_ms = (timestamp_us + 500) / 1000; - int64_t packet_time_us = -1; - if (parsed_packet.arrival_time_ms() > 0) { - packet_time_us = parsed_packet.arrival_time_ms() * 1000; - } + RTC_DCHECK_RUN_ON(network_thread()); - if (!has_received_packet_) { - has_received_packet_ = true; - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_FIRSTPACKETRECEIVED); + if (on_first_packet_received_) { + on_first_packet_received_(); + on_first_packet_received_ = nullptr; } if (!srtp_active() && srtp_required_) { @@ -495,40 +462,50 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { return; } - auto packet_buffer = parsed_packet.Buffer(); - - invoker_.AsyncInvoke( - RTC_FROM_HERE, worker_thread_, [this, packet_buffer, packet_time_us] { - RTC_DCHECK_RUN_ON(worker_thread()); - media_channel_->OnPacketReceived(packet_buffer, packet_time_us); - }); + webrtc::Timestamp packet_time = parsed_packet.arrival_time(); + media_channel_->OnPacketReceived( + parsed_packet.Buffer(), + packet_time.IsMinusInfinity() ? -1 : packet_time.us()); } void BaseChannel::UpdateRtpHeaderExtensionMap( const RtpHeaderExtensions& header_extensions) { - RTC_DCHECK(rtp_transport_); // Update the header extension map on network thread in case there is data // race. - // TODO(zhihuang): Add an rtc::ThreadChecker make sure to RtpTransport won't - // be accessed from different threads. // // NOTE: This doesn't take the BUNDLE case in account meaning the RTP header // extension maps are not merged when BUNDLE is enabled. This is fine because // the ID for MID should be consistent among all the RTP transports. network_thread_->Invoke(RTC_FROM_HERE, [this, &header_extensions] { + RTC_DCHECK_RUN_ON(network_thread()); rtp_transport_->UpdateRtpHeaderExtensionMap(header_extensions); }); } -bool BaseChannel::RegisterRtpDemuxerSink() { - RTC_DCHECK(rtp_transport_); - return network_thread_->Invoke(RTC_FROM_HERE, [this] { - return rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this); - }); +bool BaseChannel::RegisterRtpDemuxerSink_w() { + if (demuxer_criteria_ == previous_demuxer_criteria_) { + return true; + } + media_channel_->OnDemuxerCriteriaUpdatePending(); + // Copy demuxer criteria, since they're a worker-thread variable + // and we want to pass them to the network thread + return network_thread_->Invoke( + RTC_FROM_HERE, [this, demuxer_criteria = demuxer_criteria_] { + RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(rtp_transport_); + bool result = + rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria, this); + if (result) { + previous_demuxer_criteria_ = demuxer_criteria; + } else { + previous_demuxer_criteria_ = {}; + } + media_channel_->OnDemuxerCriteriaUpdateComplete(); + return result; + }); } void BaseChannel::EnableMedia_w() { - RTC_DCHECK(worker_thread_ == rtc::Thread::Current()); if (enabled_) return; @@ -538,7 +515,6 @@ void BaseChannel::EnableMedia_w() { } void BaseChannel::DisableMedia_w() { - RTC_DCHECK(worker_thread_ == rtc::Thread::Current()); if (!enabled_) return; @@ -557,46 +533,45 @@ void BaseChannel::UpdateWritableState_n() { } void BaseChannel::ChannelWritable_n() { - RTC_DCHECK_RUN_ON(network_thread()); if (writable_) { return; } - - RTC_LOG(LS_INFO) << "Channel writable (" << ToString() << ")" - << (was_ever_writable_ ? "" : " for the first time"); - - was_ever_writable_ = true; writable_ = true; - UpdateMediaSendRecvState(); + RTC_LOG(LS_INFO) << "Channel writable (" << ToString() << ")" + << (was_ever_writable_n_ ? "" : " for the first time"); + // We only have to do this PostTask once, when first transitioning to + // writable. + if (!was_ever_writable_n_) { + worker_thread_->PostTask(ToQueuedTask(alive_, [this] { + RTC_DCHECK_RUN_ON(worker_thread()); + was_ever_writable_ = true; + UpdateMediaSendRecvState_w(); + })); + } + was_ever_writable_n_ = true; } void BaseChannel::ChannelNotWritable_n() { - RTC_DCHECK_RUN_ON(network_thread()); - if (!writable_) + if (!writable_) { return; - - RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")"; + } writable_ = false; - UpdateMediaSendRecvState(); + RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")"; } bool BaseChannel::AddRecvStream_w(const StreamParams& sp) { - RTC_DCHECK(worker_thread() == rtc::Thread::Current()); return media_channel()->AddRecvStream(sp); } bool BaseChannel::RemoveRecvStream_w(uint32_t ssrc) { - RTC_DCHECK(worker_thread() == rtc::Thread::Current()); return media_channel()->RemoveRecvStream(ssrc); } void BaseChannel::ResetUnsignaledRecvStream_w() { - RTC_DCHECK(worker_thread() == rtc::Thread::Current()); media_channel()->ResetUnsignaledRecvStream(); } bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { - RTC_DCHECK_RUN_ON(worker_thread()); if (enabled == payload_type_demuxing_enabled_) { return true; } @@ -609,7 +584,7 @@ bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { // there is no straightforward way to identify those streams. media_channel()->ResetUnsignaledRecvStream(); demuxer_criteria_.payload_types.clear(); - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to disable payload type demuxing for " << ToString(); return false; @@ -617,7 +592,7 @@ bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { } else if (!payload_types_.empty()) { demuxer_criteria_.payload_types.insert(payload_types_.begin(), payload_types_.end()); - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to enable payload type demuxing for " << ToString(); return false; @@ -765,7 +740,7 @@ bool BaseChannel::UpdateRemoteStreams_w( new_stream.ssrcs.end()); } // Re-register the sink to update the receiving ssrcs. - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); ret = false; } @@ -775,7 +750,6 @@ bool BaseChannel::UpdateRemoteStreams_w( RtpHeaderExtensions BaseChannel::GetFilteredRtpHeaderExtensions( const RtpHeaderExtensions& extensions) { - RTC_DCHECK(rtp_transport_); if (crypto_options_.srtp.enable_encrypted_rtp_header_extensions) { RtpHeaderExtensions filtered; absl::c_copy_if(extensions, std::back_inserter(filtered), @@ -788,27 +762,6 @@ RtpHeaderExtensions BaseChannel::GetFilteredRtpHeaderExtensions( return webrtc::RtpExtension::FilterDuplicateNonEncrypted(extensions); } -void BaseChannel::OnMessage(rtc::Message* pmsg) { - TRACE_EVENT0("webrtc", "BaseChannel::OnMessage"); - switch (pmsg->message_id) { - case MSG_SEND_RTP_PACKET: - case MSG_SEND_RTCP_PACKET: { - RTC_DCHECK_RUN_ON(network_thread()); - SendPacketMessageData* data = - static_cast(pmsg->pdata); - bool rtcp = pmsg->message_id == MSG_SEND_RTCP_PACKET; - SendPacket(rtcp, &data->packet, data->options); - delete data; - break; - } - case MSG_FIRSTPACKETRECEIVED: { - RTC_DCHECK_RUN_ON(signaling_thread_); - SignalFirstPacketReceived_(this); - break; - } - } -} - void BaseChannel::MaybeAddHandledPayloadType(int payload_type) { if (payload_type_demuxing_enabled_) { demuxer_criteria_.payload_types.insert(static_cast(payload_type)); @@ -823,25 +776,9 @@ void BaseChannel::ClearHandledPayloadTypes() { payload_types_.clear(); } -void BaseChannel::FlushRtcpMessages_n() { - // Flush all remaining RTCP messages. This should only be called in - // destructor. - RTC_DCHECK_RUN_ON(network_thread()); - rtc::MessageList rtcp_messages; - network_thread_->Clear(this, MSG_SEND_RTCP_PACKET, &rtcp_messages); - for (const auto& message : rtcp_messages) { - network_thread_->Send(RTC_FROM_HERE, this, MSG_SEND_RTCP_PACKET, - message.pdata); - } -} - void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { RTC_DCHECK_RUN_ON(network_thread()); - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, - [this, sent_packet] { - RTC_DCHECK_RUN_ON(worker_thread()); - SignalSentPacket()(sent_packet); - }); + media_channel()->OnPacketSent(sent_packet); } VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, @@ -868,19 +805,10 @@ VoiceChannel::~VoiceChannel() { Deinit(); } -void BaseChannel::UpdateMediaSendRecvState() { - RTC_DCHECK_RUN_ON(network_thread()); - invoker_.AsyncInvoke(RTC_FROM_HERE, worker_thread_, - [this] { UpdateMediaSendRecvState_w(); }); -} - -void VoiceChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { - BaseChannel::Init_w(rtp_transport); -} - void VoiceChannel::UpdateMediaSendRecvState_w() { // Render incoming data if we're the active call, and we have the local // content. We receive data on the default channel and multiplexed streams. + RTC_DCHECK_RUN_ON(worker_thread()); bool recv = IsReadyToReceiveMedia_w(); media_channel()->SetPlayout(recv); @@ -900,23 +828,19 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString(); - RTC_DCHECK(content); - if (!content) { - SafeSetError("Can't find audio content in local description.", error_desc); - return false; - } - - const AudioContentDescription* audio = content->as_audio(); - RtpHeaderExtensions rtp_header_extensions = - GetFilteredRtpHeaderExtensions(audio->rtp_header_extensions()); + GetFilteredRtpHeaderExtensions(content->rtp_header_extensions()); + // TODO(tommi): There's a hop to the network thread here. + // some of the below is also network thread related. UpdateRtpHeaderExtensionMap(rtp_header_extensions); - media_channel()->SetExtmapAllowMixed(audio->extmap_allow_mixed()); + media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); AudioRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( - audio, rtp_header_extensions, - webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &recv_params); + content->as_audio(), rtp_header_extensions, + webrtc::RtpTransceiverDirectionHasRecv(content->direction()), + &recv_params); + if (!media_channel()->SetRecvParameters(recv_params)) { SafeSetError( "Failed to set local audio description recv parameters for m-section " @@ -926,12 +850,12 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, return false; } - if (webrtc::RtpTransceiverDirectionHasRecv(audio->direction())) { - for (const AudioCodec& codec : audio->codecs()) { + if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { + for (const AudioCodec& codec : content->as_audio()->codecs()) { MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing for " << ToString(); return false; } @@ -943,7 +867,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, // only give it to the media channel once we have a remote // description too (without a remote description, we won't be able // to send them anyway). - if (!UpdateLocalStreams_w(audio->streams(), type, error_desc)) { + if (!UpdateLocalStreams_w(content->as_audio()->streams(), type, error_desc)) { SafeSetError( "Failed to set local audio description streams for m-section with " "mid='" + @@ -964,12 +888,6 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString(); - RTC_DCHECK(content); - if (!content) { - SafeSetError("Can't find audio content in remote description.", error_desc); - return false; - } - const AudioContentDescription* audio = content->as_audio(); RtpHeaderExtensions rtp_header_extensions = @@ -997,7 +915,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, "disable payload type demuxing for " << ToString(); ClearHandledPayloadTypes(); - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to update audio demuxing for " << ToString(); return false; } @@ -1048,6 +966,7 @@ VideoChannel::~VideoChannel() { void VideoChannel::UpdateMediaSendRecvState_w() { // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. + RTC_DCHECK_RUN_ON(worker_thread()); bool send = IsReadyToSendMedia_w(); if (!media_channel()->SetSend(send)) { RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel: " + ToString(); @@ -1059,8 +978,9 @@ void VideoChannel::UpdateMediaSendRecvState_w() { } void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) { - InvokeOnWorker(RTC_FROM_HERE, Bind(&VideoMediaChannel::FillBitrateInfo, - media_channel(), bwe_info)); + RTC_DCHECK_RUN_ON(worker_thread()); + VideoMediaChannel* mc = media_channel(); + mc->FillBitrateInfo(bwe_info); } bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, @@ -1070,23 +990,17 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting local video description for " << ToString(); - RTC_DCHECK(content); - if (!content) { - SafeSetError("Can't find video content in local description.", error_desc); - return false; - } - - const VideoContentDescription* video = content->as_video(); - RtpHeaderExtensions rtp_header_extensions = - GetFilteredRtpHeaderExtensions(video->rtp_header_extensions()); + GetFilteredRtpHeaderExtensions(content->rtp_header_extensions()); UpdateRtpHeaderExtensionMap(rtp_header_extensions); - media_channel()->SetExtmapAllowMixed(video->extmap_allow_mixed()); + media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); VideoRecvParameters recv_params = last_recv_params_; + RtpParametersFromMediaDescription( - video, rtp_header_extensions, - webrtc::RtpTransceiverDirectionHasRecv(video->direction()), &recv_params); + content->as_video(), rtp_header_extensions, + webrtc::RtpTransceiverDirectionHasRecv(content->direction()), + &recv_params); VideoSendParameters send_params = last_send_params_; @@ -1119,12 +1033,12 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, return false; } - if (webrtc::RtpTransceiverDirectionHasRecv(video->direction())) { - for (const VideoCodec& codec : video->codecs()) { + if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { + for (const VideoCodec& codec : content->as_video()->codecs()) { MaybeAddHandledPayloadType(codec.id); } // Need to re-register the sink to update the handled payload. - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to set up video demuxing for " << ToString(); return false; } @@ -1146,7 +1060,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, // only give it to the media channel once we have a remote // description too (without a remote description, we won't be able // to send them anyway). - if (!UpdateLocalStreams_w(video->streams(), type, error_desc)) { + if (!UpdateLocalStreams_w(content->as_video()->streams(), type, error_desc)) { SafeSetError( "Failed to set local video description streams for m-section with " "mid='" + @@ -1167,12 +1081,6 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString(); - RTC_DCHECK(content); - if (!content) { - SafeSetError("Can't find video content in remote description.", error_desc); - return false; - } - const VideoContentDescription* video = content->as_video(); RtpHeaderExtensions rtp_header_extensions = @@ -1234,7 +1142,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, "disable payload type demuxing for " << ToString(); ClearHandledPayloadTypes(); - if (!RegisterRtpDemuxerSink()) { + if (!RegisterRtpDemuxerSink_w()) { RTC_LOG(LS_ERROR) << "Failed to update video demuxing for " << ToString(); return false; } @@ -1257,241 +1165,4 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, return true; } -RtpDataChannel::RtpDataChannel(rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel, - const std::string& content_name, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - UniqueRandomIdGenerator* ssrc_generator) - : BaseChannel(worker_thread, - network_thread, - signaling_thread, - std::move(media_channel), - content_name, - srtp_required, - crypto_options, - ssrc_generator) {} - -RtpDataChannel::~RtpDataChannel() { - TRACE_EVENT0("webrtc", "RtpDataChannel::~RtpDataChannel"); - // this can't be done in the base class, since it calls a virtual - DisableMedia_w(); - Deinit(); -} - -void RtpDataChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { - BaseChannel::Init_w(rtp_transport); - media_channel()->SignalDataReceived.connect(this, - &RtpDataChannel::OnDataReceived); - media_channel()->SignalReadyToSend.connect( - this, &RtpDataChannel::OnDataChannelReadyToSend); -} - -bool RtpDataChannel::SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result) { - return InvokeOnWorker( - RTC_FROM_HERE, Bind(&DataMediaChannel::SendData, media_channel(), params, - payload, result)); -} - -bool RtpDataChannel::CheckDataChannelTypeFromContent( - const MediaContentDescription* content, - std::string* error_desc) { - if (!content->as_rtp_data()) { - if (content->as_sctp()) { - SafeSetError("Data channel type mismatch. Expected RTP, got SCTP.", - error_desc); - } else { - SafeSetError("Data channel is not RTP or SCTP.", error_desc); - } - return false; - } - return true; -} - -bool RtpDataChannel::SetLocalContent_w(const MediaContentDescription* content, - SdpType type, - std::string* error_desc) { - TRACE_EVENT0("webrtc", "RtpDataChannel::SetLocalContent_w"); - RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting local data description for " << ToString(); - - RTC_DCHECK(content); - if (!content) { - SafeSetError("Can't find data content in local description.", error_desc); - return false; - } - - if (!CheckDataChannelTypeFromContent(content, error_desc)) { - return false; - } - const RtpDataContentDescription* data = content->as_rtp_data(); - - RtpHeaderExtensions rtp_header_extensions = - GetFilteredRtpHeaderExtensions(data->rtp_header_extensions()); - - DataRecvParameters recv_params = last_recv_params_; - RtpParametersFromMediaDescription( - data, rtp_header_extensions, - webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &recv_params); - if (!media_channel()->SetRecvParameters(recv_params)) { - SafeSetError( - "Failed to set remote data description recv parameters for m-section " - "with mid='" + - content_name() + "'.", - error_desc); - return false; - } - for (const DataCodec& codec : data->codecs()) { - MaybeAddHandledPayloadType(codec.id); - } - // Need to re-register the sink to update the handled payload. - if (!RegisterRtpDemuxerSink()) { - RTC_LOG(LS_ERROR) << "Failed to set up data demuxing for " << ToString(); - return false; - } - - last_recv_params_ = recv_params; - - // TODO(pthatcher): Move local streams into DataSendParameters, and - // only give it to the media channel once we have a remote - // description too (without a remote description, we won't be able - // to send them anyway). - if (!UpdateLocalStreams_w(data->streams(), type, error_desc)) { - SafeSetError( - "Failed to set local data description streams for m-section with " - "mid='" + - content_name() + "'.", - error_desc); - return false; - } - - set_local_content_direction(content->direction()); - UpdateMediaSendRecvState_w(); - return true; -} - -bool RtpDataChannel::SetRemoteContent_w(const MediaContentDescription* content, - SdpType type, - std::string* error_desc) { - TRACE_EVENT0("webrtc", "RtpDataChannel::SetRemoteContent_w"); - RTC_DCHECK_RUN_ON(worker_thread()); - RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString(); - - RTC_DCHECK(content); - if (!content) { - SafeSetError("Can't find data content in remote description.", error_desc); - return false; - } - - if (!CheckDataChannelTypeFromContent(content, error_desc)) { - return false; - } - - const RtpDataContentDescription* data = content->as_rtp_data(); - - // If the remote data doesn't have codecs, it must be empty, so ignore it. - if (!data->has_codecs()) { - return true; - } - - RtpHeaderExtensions rtp_header_extensions = - GetFilteredRtpHeaderExtensions(data->rtp_header_extensions()); - - RTC_LOG(LS_INFO) << "Setting remote data description for " << ToString(); - DataSendParameters send_params = last_send_params_; - RtpSendParametersFromMediaDescription( - data, rtp_header_extensions, - webrtc::RtpTransceiverDirectionHasRecv(data->direction()), &send_params); - if (!media_channel()->SetSendParameters(send_params)) { - SafeSetError( - "Failed to set remote data description send parameters for m-section " - "with mid='" + - content_name() + "'.", - error_desc); - return false; - } - last_send_params_ = send_params; - - // TODO(pthatcher): Move remote streams into DataRecvParameters, - // and only give it to the media channel once we have a local - // description too (without a local description, we won't be able to - // recv them anyway). - if (!UpdateRemoteStreams_w(data->streams(), type, error_desc)) { - SafeSetError( - "Failed to set remote data description streams for m-section with " - "mid='" + - content_name() + "'.", - error_desc); - return false; - } - - set_remote_content_direction(content->direction()); - UpdateMediaSendRecvState_w(); - return true; -} - -void RtpDataChannel::UpdateMediaSendRecvState_w() { - // Render incoming data if we're the active call, and we have the local - // content. We receive data on the default channel and multiplexed streams. - bool recv = IsReadyToReceiveMedia_w(); - if (!media_channel()->SetReceive(recv)) { - RTC_LOG(LS_ERROR) << "Failed to SetReceive on data channel: " << ToString(); - } - - // Send outgoing data if we're the active call, we have the remote content, - // and we have had some form of connectivity. - bool send = IsReadyToSendMedia_w(); - if (!media_channel()->SetSend(send)) { - RTC_LOG(LS_ERROR) << "Failed to SetSend on data channel: " << ToString(); - } - - // Trigger SignalReadyToSendData asynchronously. - OnDataChannelReadyToSend(send); - - RTC_LOG(LS_INFO) << "Changing data state, recv=" << recv << " send=" << send - << " for " << ToString(); -} - -void RtpDataChannel::OnMessage(rtc::Message* pmsg) { - switch (pmsg->message_id) { - case MSG_READYTOSENDDATA: { - DataChannelReadyToSendMessageData* data = - static_cast(pmsg->pdata); - ready_to_send_data_ = data->data(); - SignalReadyToSendData(ready_to_send_data_); - delete data; - break; - } - case MSG_DATARECEIVED: { - DataReceivedMessageData* data = - static_cast(pmsg->pdata); - SignalDataReceived(data->params, data->payload); - delete data; - break; - } - default: - BaseChannel::OnMessage(pmsg); - break; - } -} - -void RtpDataChannel::OnDataReceived(const ReceiveDataParams& params, - const char* data, - size_t len) { - DataReceivedMessageData* msg = new DataReceivedMessageData(params, data, len); - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_DATARECEIVED, msg); -} - -void RtpDataChannel::OnDataChannelReadyToSend(bool writable) { - // This is usded for congestion control to indicate that the stream is ready - // to send by the MediaChannel, as opposed to OnReadyToSend, which indicates - // that the transport channel is ready. - signaling_thread()->Post(RTC_FROM_HERE, this, MSG_READYTOSENDDATA, - new DataChannelReadyToSendMessageData(writable)); -} - } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.h b/TMessagesProj/jni/voip/webrtc/pc/channel.h index 51cc40fc5..76f65cc82 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.h @@ -11,6 +11,9 @@ #ifndef PC_CHANNEL_H_ #define PC_CHANNEL_H_ +#include +#include + #include #include #include @@ -18,30 +21,48 @@ #include #include +#include "absl/types/optional.h" #include "api/call/audio_sink.h" +#include "api/crypto/crypto_options.h" #include "api/function_view.h" #include "api/jsep.h" +#include "api/media_types.h" #include "api/rtp_receiver_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" +#include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" #include "media/base/media_channel.h" #include "media/base/media_engine.h" #include "media/base/stream_params.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "p2p/base/dtls_transport_internal.h" #include "p2p/base/packet_transport_internal.h" #include "pc/channel_interface.h" #include "pc/dtls_srtp_transport.h" #include "pc/media_session.h" #include "pc/rtp_transport.h" +#include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" #include "pc/srtp_filter.h" #include "pc/srtp_transport.h" -#include "rtc_base/async_invoker.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/location.h" #include "rtc_base/network.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" +#include "rtc_base/thread_message.h" #include "rtc_base/unique_id_generator.h" namespace webrtc { @@ -71,8 +92,10 @@ struct CryptoParams; // NetworkInterface. class BaseChannel : public ChannelInterface, - public rtc::MessageHandlerAutoCleanup, + // TODO(tommi): Remove has_slots inheritance. public sigslot::has_slots<>, + // TODO(tommi): Consider implementing these interfaces + // via composition. public MediaChannel::NetworkInterface, public webrtc::RtpPacketSinkInterface { public: @@ -101,23 +124,30 @@ class BaseChannel : public ChannelInterface, rtc::Thread* network_thread() const { return network_thread_; } const std::string& content_name() const override { return content_name_; } // TODO(deadbeef): This is redundant; remove this. - const std::string& transport_name() const override { return transport_name_; } - bool enabled() const override { return enabled_; } + const std::string& transport_name() const override { + RTC_DCHECK_RUN_ON(network_thread()); + if (rtp_transport_) + return rtp_transport_->transport_name(); + // TODO(tommi): Delete this variable. + return transport_name_; + } // This function returns true if using SRTP (DTLS-based keying or SDES). bool srtp_active() const { + RTC_DCHECK_RUN_ON(network_thread()); return rtp_transport_ && rtp_transport_->IsSrtpActive(); } - bool writable() const { return writable_; } - // Set an RTP level transport which could be an RtpTransport without // encryption, an SrtpTransport for SDES or a DtlsSrtpTransport for DTLS-SRTP. // This can be called from any thread and it hops to the network thread // internally. It would replace the |SetTransports| and its variants. bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) override; - webrtc::RtpTransportInternal* rtp_transport() const { return rtp_transport_; } + webrtc::RtpTransportInternal* rtp_transport() const { + RTC_DCHECK_RUN_ON(network_thread()); + return rtp_transport_; + } // Channel control bool SetLocalContent(const MediaContentDescription* content, @@ -136,7 +166,7 @@ class BaseChannel : public ChannelInterface, // actually belong to a new channel. See: crbug.com/webrtc/11477 bool SetPayloadTypeDemuxingEnabled(bool enabled) override; - bool Enable(bool enable) override; + void Enable(bool enable) override; const std::vector& local_streams() const override { return local_streams_; @@ -146,35 +176,32 @@ class BaseChannel : public ChannelInterface, } // Used for latency measurements. - sigslot::signal1& SignalFirstPacketReceived() override; - - // Forward SignalSentPacket to worker thread. - sigslot::signal1& SignalSentPacket(); + void SetFirstPacketReceivedCallback(std::function callback) override; // From RtpTransport - public for testing only void OnTransportReadyToSend(bool ready); // Only public for unit tests. Otherwise, consider protected. int SetOption(SocketType type, rtc::Socket::Option o, int val) override; - int SetOption_n(SocketType type, rtc::Socket::Option o, int val); // RtpPacketSinkInterface overrides. void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; - // Used by the RTCStatsCollector tests to set the transport name without - // creating RtpTransports. - void set_transport_name_for_testing(const std::string& transport_name) { - transport_name_ = transport_name; + MediaChannel* media_channel() const override { + return media_channel_.get(); } - MediaChannel* media_channel() const override { return media_channel_.get(); } - protected: - bool was_ever_writable() const { return was_ever_writable_; } + bool was_ever_writable() const { + RTC_DCHECK_RUN_ON(worker_thread()); + return was_ever_writable_; + } void set_local_content_direction(webrtc::RtpTransceiverDirection direction) { + RTC_DCHECK_RUN_ON(worker_thread()); local_content_direction_ = direction; } void set_remote_content_direction(webrtc::RtpTransceiverDirection direction) { + RTC_DCHECK_RUN_ON(worker_thread()); remote_content_direction_ = direction; } // These methods verify that: @@ -187,11 +214,9 @@ class BaseChannel : public ChannelInterface, // // When any of these properties change, UpdateMediaSendRecvState_w should be // called. - bool IsReadyToReceiveMedia_w() const; - bool IsReadyToSendMedia_w() const; - rtc::Thread* signaling_thread() { return signaling_thread_; } - - void FlushRtcpMessages_n(); + bool IsReadyToReceiveMedia_w() const RTC_RUN_ON(worker_thread()); + bool IsReadyToSendMedia_w() const RTC_RUN_ON(worker_thread()); + rtc::Thread* signaling_thread() const { return signaling_thread_; } // NetworkInterface implementation, called by MediaEngine bool SendPacket(rtc::CopyOnWriteBuffer* packet, @@ -204,64 +229,55 @@ class BaseChannel : public ChannelInterface, void OnNetworkRouteChanged(absl::optional network_route); - bool PacketIsRtcp(const rtc::PacketTransportInternal* transport, - const char* data, - size_t len); bool SendPacket(bool rtcp, rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options); - void EnableMedia_w(); - void DisableMedia_w(); + void EnableMedia_w() RTC_RUN_ON(worker_thread()); + void DisableMedia_w() RTC_RUN_ON(worker_thread()); // Performs actions if the RTP/RTCP writable state changed. This should // be called whenever a channel's writable state changes or when RTCP muxing // becomes active/inactive. - void UpdateWritableState_n(); - void ChannelWritable_n(); - void ChannelNotWritable_n(); + void UpdateWritableState_n() RTC_RUN_ON(network_thread()); + void ChannelWritable_n() RTC_RUN_ON(network_thread()); + void ChannelNotWritable_n() RTC_RUN_ON(network_thread()); - bool AddRecvStream_w(const StreamParams& sp); - bool RemoveRecvStream_w(uint32_t ssrc); - void ResetUnsignaledRecvStream_w(); - bool SetPayloadTypeDemuxingEnabled_w(bool enabled); - bool AddSendStream_w(const StreamParams& sp); - bool RemoveSendStream_w(uint32_t ssrc); + bool AddRecvStream_w(const StreamParams& sp) RTC_RUN_ON(worker_thread()); + bool RemoveRecvStream_w(uint32_t ssrc) RTC_RUN_ON(worker_thread()); + void ResetUnsignaledRecvStream_w() RTC_RUN_ON(worker_thread()); + bool SetPayloadTypeDemuxingEnabled_w(bool enabled) + RTC_RUN_ON(worker_thread()); + bool AddSendStream_w(const StreamParams& sp) RTC_RUN_ON(worker_thread()); + bool RemoveSendStream_w(uint32_t ssrc) RTC_RUN_ON(worker_thread()); // Should be called whenever the conditions for // IsReadyToReceiveMedia/IsReadyToSendMedia are satisfied (or unsatisfied). // Updates the send/recv state of the media channel. - void UpdateMediaSendRecvState(); - virtual void UpdateMediaSendRecvState_w() = 0; + virtual void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) = 0; bool UpdateLocalStreams_w(const std::vector& streams, webrtc::SdpType type, - std::string* error_desc); + std::string* error_desc) + RTC_RUN_ON(worker_thread()); bool UpdateRemoteStreams_w(const std::vector& streams, webrtc::SdpType type, - std::string* error_desc); + std::string* error_desc) + RTC_RUN_ON(worker_thread()); virtual bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) = 0; + std::string* error_desc) + RTC_RUN_ON(worker_thread()) = 0; virtual bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string* error_desc) = 0; + std::string* error_desc) + RTC_RUN_ON(worker_thread()) = 0; // Return a list of RTP header extensions with the non-encrypted extensions // removed depending on the current crypto_options_ and only if both the // non-encrypted and encrypted extension is present for the same URI. RtpHeaderExtensions GetFilteredRtpHeaderExtensions( const RtpHeaderExtensions& extensions); - // From MessageHandler - void OnMessage(rtc::Message* pmsg) override; - - // Helper function template for invoking methods on the worker thread. - template - T InvokeOnWorker(const rtc::Location& posted_from, - rtc::FunctionView functor) { - return worker_thread_->Invoke(posted_from, functor); - } - // Add |payload_type| to |demuxer_criteria_| if payload type demuxing is // enabled. void MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); @@ -271,52 +287,79 @@ class BaseChannel : public ChannelInterface, void UpdateRtpHeaderExtensionMap( const RtpHeaderExtensions& header_extensions); - bool RegisterRtpDemuxerSink(); + bool RegisterRtpDemuxerSink_w() RTC_RUN_ON(worker_thread()); // Return description of media channel to facilitate logging std::string ToString() const; - bool has_received_packet_ = false; - private: - bool ConnectToRtpTransport(); - void DisconnectFromRtpTransport(); + bool ConnectToRtpTransport() RTC_RUN_ON(network_thread()); + void DisconnectFromRtpTransport() RTC_RUN_ON(network_thread()); void SignalSentPacket_n(const rtc::SentPacket& sent_packet); - bool IsReadyToSendMedia_n() const; rtc::Thread* const worker_thread_; rtc::Thread* const network_thread_; rtc::Thread* const signaling_thread_; - rtc::AsyncInvoker invoker_; - sigslot::signal1 SignalFirstPacketReceived_ - RTC_GUARDED_BY(signaling_thread_); - sigslot::signal1 SignalSentPacket_ - RTC_GUARDED_BY(worker_thread_); + rtc::scoped_refptr alive_; const std::string content_name_; + std::function on_first_packet_received_ + RTC_GUARDED_BY(network_thread()); + // Won't be set when using raw packet transports. SDP-specific thing. + // TODO(bugs.webrtc.org/12230): Written on network thread, read on + // worker thread (at least). + // TODO(tommi): Remove this variable and instead use rtp_transport_ to + // return the transport name. This variable is currently required for + // "for_test" methods. std::string transport_name_; - webrtc::RtpTransportInternal* rtp_transport_ = nullptr; + webrtc::RtpTransportInternal* rtp_transport_ + RTC_GUARDED_BY(network_thread()) = nullptr; - std::vector > socket_options_; - std::vector > rtcp_socket_options_; - bool writable_ = false; - bool was_ever_writable_ = false; + std::vector > socket_options_ + RTC_GUARDED_BY(network_thread()); + std::vector > rtcp_socket_options_ + RTC_GUARDED_BY(network_thread()); + bool writable_ RTC_GUARDED_BY(network_thread()) = false; + bool was_ever_writable_n_ RTC_GUARDED_BY(network_thread()) = false; + bool was_ever_writable_ RTC_GUARDED_BY(worker_thread()) = false; const bool srtp_required_ = true; - webrtc::CryptoOptions crypto_options_; + + // TODO(tommi): This field shouldn't be necessary. It's a copy of + // PeerConnection::GetCryptoOptions(), which is const state. It's also only + // used to filter header extensions when calling + // `rtp_transport_->UpdateRtpHeaderExtensionMap()` when the local/remote + // content description is updated. Since the transport is actually owned + // by the transport controller that also gets updated whenever the content + // description changes, it seems we have two paths into the transports, along + // with several thread hops via various classes (such as the Channel classes) + // that only serve as additional layers and store duplicate state. The Jsep* + // family of classes already apply session description updates on the network + // thread every time it changes. + // For the Channel classes, we should be able to get rid of: + // * crypto_options (and fewer construction parameters)_ + // * UpdateRtpHeaderExtensionMap + // * GetFilteredRtpHeaderExtensions + // * Blocking thread hop to the network thread for every call to set + // local/remote content is updated. + const webrtc::CryptoOptions crypto_options_; // MediaChannel related members that should be accessed from the worker // thread. - std::unique_ptr media_channel_; + const std::unique_ptr media_channel_; // Currently the |enabled_| flag is accessed from the signaling thread as // well, but it can be changed only when signaling thread does a synchronous // call to the worker thread, so it should be safe. - bool enabled_ = false; + bool enabled_ RTC_GUARDED_BY(worker_thread()) = false; + bool enabled_s_ RTC_GUARDED_BY(signaling_thread()) = false; bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true; - std::vector local_streams_; - std::vector remote_streams_; + std::vector local_streams_ RTC_GUARDED_BY(worker_thread()); + std::vector remote_streams_ RTC_GUARDED_BY(worker_thread()); + // TODO(bugs.webrtc.org/12230): local_content_direction and + // remote_content_direction are set on the worker thread, but accessed on the + // network thread. webrtc::RtpTransceiverDirection local_content_direction_ = webrtc::RtpTransceiverDirection::kInactive; webrtc::RtpTransceiverDirection remote_content_direction_ = @@ -324,7 +367,12 @@ class BaseChannel : public ChannelInterface, // Cached list of payload types, used if payload type demuxing is re-enabled. std::set payload_types_ RTC_GUARDED_BY(worker_thread()); + // TODO(bugs.webrtc.org/12239): Modified on worker thread, accessed + // on network thread in RegisterRtpDemuxerSink_n (called from Init_w) webrtc::RtpDemuxerCriteria demuxer_criteria_; + // Accessed on the worker thread, modified on the network thread from + // RegisterRtpDemuxerSink_w's Invoke. + webrtc::RtpDemuxerCriteria previous_demuxer_criteria_; // This generator is used to generate SSRCs for local streams. // This is needed in cases where SSRCs are not negotiated or set explicitly // like in Simulcast. @@ -354,7 +402,6 @@ class VoiceChannel : public BaseChannel { cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_AUDIO; } - void Init_w(webrtc::RtpTransportInternal* rtp_transport) override; private: // overrides from BaseChannel @@ -416,104 +463,6 @@ class VideoChannel : public BaseChannel { VideoRecvParameters last_recv_params_; }; -// RtpDataChannel is a specialization for data. -class RtpDataChannel : public BaseChannel { - public: - RtpDataChannel(rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr channel, - const std::string& content_name, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); - ~RtpDataChannel(); - // TODO(zhihuang): Remove this once the RtpTransport can be shared between - // BaseChannels. - void Init_w(DtlsTransportInternal* rtp_dtls_transport, - DtlsTransportInternal* rtcp_dtls_transport, - rtc::PacketTransportInternal* rtp_packet_transport, - rtc::PacketTransportInternal* rtcp_packet_transport); - void Init_w(webrtc::RtpTransportInternal* rtp_transport) override; - - virtual bool SendData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - SendDataResult* result); - - // Should be called on the signaling thread only. - bool ready_to_send_data() const { return ready_to_send_data_; } - - sigslot::signal2 - SignalDataReceived; - // Signal for notifying when the channel becomes ready to send data. - // That occurs when the channel is enabled, the transport is writable, - // both local and remote descriptions are set, and the channel is unblocked. - sigslot::signal1 SignalReadyToSendData; - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_DATA; - } - - protected: - // downcasts a MediaChannel. - DataMediaChannel* media_channel() const override { - return static_cast(BaseChannel::media_channel()); - } - - private: - struct SendDataMessageData : public rtc::MessageData { - SendDataMessageData(const SendDataParams& params, - const rtc::CopyOnWriteBuffer* payload, - SendDataResult* result) - : params(params), payload(payload), result(result), succeeded(false) {} - - const SendDataParams& params; - const rtc::CopyOnWriteBuffer* payload; - SendDataResult* result; - bool succeeded; - }; - - struct DataReceivedMessageData : public rtc::MessageData { - // We copy the data because the data will become invalid after we - // handle DataMediaChannel::SignalDataReceived but before we fire - // SignalDataReceived. - DataReceivedMessageData(const ReceiveDataParams& params, - const char* data, - size_t len) - : params(params), payload(data, len) {} - const ReceiveDataParams params; - const rtc::CopyOnWriteBuffer payload; - }; - - typedef rtc::TypedMessageData DataChannelReadyToSendMessageData; - - // overrides from BaseChannel - // Checks that data channel type is RTP. - bool CheckDataChannelTypeFromContent(const MediaContentDescription* content, - std::string* error_desc); - bool SetLocalContent_w(const MediaContentDescription* content, - webrtc::SdpType type, - std::string* error_desc) override; - bool SetRemoteContent_w(const MediaContentDescription* content, - webrtc::SdpType type, - std::string* error_desc) override; - void UpdateMediaSendRecvState_w() override; - - void OnMessage(rtc::Message* pmsg) override; - void OnDataReceived(const ReceiveDataParams& params, - const char* data, - size_t len); - void OnDataChannelReadyToSend(bool writable); - - bool ready_to_send_data_ = false; - - // Last DataSendParameters sent down to the media_channel() via - // SetSendParameters. - DataSendParameters last_send_params_; - // Last DataRecvParameters sent down to the media_channel() via - // SetRecvParameters. - DataRecvParameters last_recv_params_; -}; - } // namespace cricket #endif // PC_CHANNEL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h index 68b648630..3b71f0f8b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h @@ -37,13 +37,12 @@ class ChannelInterface { virtual const std::string& content_name() const = 0; - virtual bool enabled() const = 0; - // Enables or disables this channel - virtual bool Enable(bool enable) = 0; + virtual void Enable(bool enable) = 0; // Used for latency measurements. - virtual sigslot::signal1& SignalFirstPacketReceived() = 0; + virtual void SetFirstPacketReceivedCallback( + std::function callback) = 0; // Channel control virtual bool SetLocalContent(const MediaContentDescription* content, diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc index 9d5adcad4..b58830b21 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc @@ -10,57 +10,54 @@ #include "pc/channel_manager.h" +#include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "api/sequence_checker.h" #include "media/base/media_constants.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/trace_event.h" namespace cricket { +// static +std::unique_ptr ChannelManager::Create( + std::unique_ptr media_engine, + bool enable_rtx, + rtc::Thread* worker_thread, + rtc::Thread* network_thread) { + RTC_DCHECK_RUN_ON(worker_thread); + RTC_DCHECK(network_thread); + RTC_DCHECK(worker_thread); + + if (media_engine) + media_engine->Init(); + + return absl::WrapUnique(new ChannelManager( + std::move(media_engine), enable_rtx, worker_thread, network_thread)); +} + ChannelManager::ChannelManager( std::unique_ptr media_engine, - std::unique_ptr data_engine, + bool enable_rtx, rtc::Thread* worker_thread, rtc::Thread* network_thread) : media_engine_(std::move(media_engine)), - data_engine_(std::move(data_engine)), - main_thread_(rtc::Thread::Current()), worker_thread_(worker_thread), - network_thread_(network_thread) { - RTC_DCHECK(data_engine_); + network_thread_(network_thread), + enable_rtx_(enable_rtx) { RTC_DCHECK(worker_thread_); RTC_DCHECK(network_thread_); + RTC_DCHECK_RUN_ON(worker_thread_); } ChannelManager::~ChannelManager() { - if (initialized_) { - Terminate(); - } - // The media engine needs to be deleted on the worker thread for thread safe - // destruction, - worker_thread_->Invoke(RTC_FROM_HERE, [&] { media_engine_.reset(); }); -} - -bool ChannelManager::SetVideoRtxEnabled(bool enable) { - // To be safe, this call is only allowed before initialization. Apps like - // Flute only have a singleton ChannelManager and we don't want this flag to - // be toggled between calls or when there's concurrent calls. We expect apps - // to enable this at startup and retain that setting for the lifetime of the - // app. - if (!initialized_) { - enable_rtx_ = enable; - return true; - } else { - RTC_LOG(LS_WARNING) << "Cannot toggle rtx after initialization!"; - return false; - } + RTC_DCHECK_RUN_ON(worker_thread_); } void ChannelManager::GetSupportedAudioSendCodecs( @@ -113,34 +110,6 @@ void ChannelManager::GetSupportedVideoReceiveCodecs( } } -void ChannelManager::GetSupportedDataCodecs( - std::vector* codecs) const { - *codecs = data_engine_->data_codecs(); -} - -bool ChannelManager::Init() { - RTC_DCHECK(!initialized_); - if (initialized_) { - return false; - } - RTC_DCHECK(network_thread_); - RTC_DCHECK(worker_thread_); - if (!network_thread_->IsCurrent()) { - // Do not allow invoking calls to other threads on the network thread. - network_thread_->Invoke( - RTC_FROM_HERE, [&] { network_thread_->DisallowBlockingCalls(); }); - } - - if (media_engine_) { - initialized_ = worker_thread_->Invoke( - RTC_FROM_HERE, [&] { return media_engine_->Init(); }); - RTC_DCHECK(initialized_); - } else { - initialized_ = true; - } - return initialized_; -} - RtpHeaderExtensions ChannelManager::GetDefaultEnabledAudioRtpHeaderExtensions() const { if (!media_engine_) @@ -169,23 +138,9 @@ ChannelManager::GetSupportedVideoRtpHeaderExtensions() const { return media_engine_->video().GetRtpHeaderExtensions(); } -void ChannelManager::Terminate() { - RTC_DCHECK(initialized_); - if (!initialized_) { - return; - } - // Need to destroy the channels on the worker thread. - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - video_channels_.clear(); - voice_channels_.clear(); - data_channels_.clear(); - }); - initialized_ = false; -} - VoiceChannel* ChannelManager::CreateVoiceChannel( webrtc::Call* call, - const cricket::MediaConfig& media_config, + const MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, rtc::Thread* signaling_thread, const std::string& content_name, @@ -193,6 +148,8 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( const webrtc::CryptoOptions& crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator, const AudioOptions& options) { + RTC_DCHECK(call); + RTC_DCHECK(media_engine_); // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in // PeerConnection and add the expectation that we're already on the right // thread. @@ -205,11 +162,6 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( } RTC_DCHECK_RUN_ON(worker_thread_); - RTC_DCHECK(initialized_); - RTC_DCHECK(call); - if (!media_engine_) { - return nullptr; - } VoiceMediaChannel* media_channel = media_engine_->voice().CreateMediaChannel( call, media_config, options, crypto_options); @@ -231,32 +183,25 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( void ChannelManager::DestroyVoiceChannel(VoiceChannel* voice_channel) { TRACE_EVENT0("webrtc", "ChannelManager::DestroyVoiceChannel"); - if (!voice_channel) { - return; - } + RTC_DCHECK(voice_channel); + if (!worker_thread_->IsCurrent()) { worker_thread_->Invoke(RTC_FROM_HERE, [&] { DestroyVoiceChannel(voice_channel); }); return; } - RTC_DCHECK(initialized_); + RTC_DCHECK_RUN_ON(worker_thread_); - auto it = absl::c_find_if(voice_channels_, - [&](const std::unique_ptr& p) { - return p.get() == voice_channel; - }); - RTC_DCHECK(it != voice_channels_.end()); - if (it == voice_channels_.end()) { - return; - } - - voice_channels_.erase(it); + voice_channels_.erase(absl::c_find_if( + voice_channels_, [&](const std::unique_ptr& p) { + return p.get() == voice_channel; + })); } VideoChannel* ChannelManager::CreateVideoChannel( webrtc::Call* call, - const cricket::MediaConfig& media_config, + const MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, rtc::Thread* signaling_thread, const std::string& content_name, @@ -265,6 +210,8 @@ VideoChannel* ChannelManager::CreateVideoChannel( rtc::UniqueRandomIdGenerator* ssrc_generator, const VideoOptions& options, webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { + RTC_DCHECK(call); + RTC_DCHECK(media_engine_); // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in // PeerConnection and add the expectation that we're already on the right // thread. @@ -278,11 +225,6 @@ VideoChannel* ChannelManager::CreateVideoChannel( } RTC_DCHECK_RUN_ON(worker_thread_); - RTC_DCHECK(initialized_); - RTC_DCHECK(call); - if (!media_engine_) { - return nullptr; - } VideoMediaChannel* media_channel = media_engine_->video().CreateMediaChannel( call, media_config, options, crypto_options, @@ -305,101 +247,30 @@ VideoChannel* ChannelManager::CreateVideoChannel( void ChannelManager::DestroyVideoChannel(VideoChannel* video_channel) { TRACE_EVENT0("webrtc", "ChannelManager::DestroyVideoChannel"); - if (!video_channel) { - return; - } + RTC_DCHECK(video_channel); + if (!worker_thread_->IsCurrent()) { worker_thread_->Invoke(RTC_FROM_HERE, [&] { DestroyVideoChannel(video_channel); }); return; } + RTC_DCHECK_RUN_ON(worker_thread_); - RTC_DCHECK(initialized_); - - auto it = absl::c_find_if(video_channels_, - [&](const std::unique_ptr& p) { - return p.get() == video_channel; - }); - RTC_DCHECK(it != video_channels_.end()); - if (it == video_channels_.end()) { - return; - } - - video_channels_.erase(it); -} - -RtpDataChannel* ChannelManager::CreateRtpDataChannel( - const cricket::MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator) { - if (!worker_thread_->IsCurrent()) { - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return CreateRtpDataChannel(media_config, rtp_transport, signaling_thread, - content_name, srtp_required, crypto_options, - ssrc_generator); - }); - } - - // This is ok to alloc from a thread other than the worker thread. - RTC_DCHECK(initialized_); - DataMediaChannel* media_channel = data_engine_->CreateChannel(media_config); - if (!media_channel) { - RTC_LOG(LS_WARNING) << "Failed to create RTP data channel."; - return nullptr; - } - - auto data_channel = std::make_unique( - worker_thread_, network_thread_, signaling_thread, - absl::WrapUnique(media_channel), content_name, srtp_required, - crypto_options, ssrc_generator); - - // Media Transports are not supported with Rtp Data Channel. - data_channel->Init_w(rtp_transport); - - RtpDataChannel* data_channel_ptr = data_channel.get(); - data_channels_.push_back(std::move(data_channel)); - return data_channel_ptr; -} - -void ChannelManager::DestroyRtpDataChannel(RtpDataChannel* data_channel) { - TRACE_EVENT0("webrtc", "ChannelManager::DestroyRtpDataChannel"); - if (!data_channel) { - return; - } - if (!worker_thread_->IsCurrent()) { - worker_thread_->Invoke( - RTC_FROM_HERE, [&] { return DestroyRtpDataChannel(data_channel); }); - return; - } - - RTC_DCHECK(initialized_); - - auto it = absl::c_find_if(data_channels_, - [&](const std::unique_ptr& p) { - return p.get() == data_channel; - }); - RTC_DCHECK(it != data_channels_.end()); - if (it == data_channels_.end()) { - return; - } - - data_channels_.erase(it); + video_channels_.erase(absl::c_find_if( + video_channels_, [&](const std::unique_ptr& p) { + return p.get() == video_channel; + })); } bool ChannelManager::StartAecDump(webrtc::FileWrapper file, int64_t max_size_bytes) { - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return media_engine_->voice().StartAecDump(std::move(file), max_size_bytes); - }); + RTC_DCHECK_RUN_ON(worker_thread_); + return media_engine_->voice().StartAecDump(std::move(file), max_size_bytes); } void ChannelManager::StopAecDump() { - worker_thread_->Invoke(RTC_FROM_HERE, - [&] { media_engine_->voice().StopAecDump(); }); + RTC_DCHECK_RUN_ON(worker_thread_); + media_engine_->voice().StopAecDump(); } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h index ba2c26009..43fa27935 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h @@ -19,6 +19,8 @@ #include "api/audio_options.h" #include "api/crypto/crypto_options.h" +#include "api/rtp_parameters.h" +#include "api/video/video_bitrate_allocator_factory.h" #include "call/call.h" #include "media/base/codec.h" #include "media/base/media_channel.h" @@ -29,6 +31,7 @@ #include "pc/session_description.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/thread.h" +#include "rtc_base/unique_id_generator.h" namespace cricket { @@ -42,32 +45,20 @@ namespace cricket { // using device manager. class ChannelManager final { public: - // Construct a ChannelManager with the specified media engine and data engine. - ChannelManager(std::unique_ptr media_engine, - std::unique_ptr data_engine, - rtc::Thread* worker_thread, - rtc::Thread* network_thread); + // Returns an initialized instance of ChannelManager. + // If media_engine is non-nullptr, then the returned ChannelManager instance + // will own that reference and media engine initialization + static std::unique_ptr Create( + std::unique_ptr media_engine, + bool enable_rtx, + rtc::Thread* worker_thread, + rtc::Thread* network_thread); + + ChannelManager() = delete; ~ChannelManager(); - // Accessors for the worker thread, allowing it to be set after construction, - // but before Init. set_worker_thread will return false if called after Init. rtc::Thread* worker_thread() const { return worker_thread_; } - bool set_worker_thread(rtc::Thread* thread) { - if (initialized_) { - return false; - } - worker_thread_ = thread; - return true; - } rtc::Thread* network_thread() const { return network_thread_; } - bool set_network_thread(rtc::Thread* thread) { - if (initialized_) { - return false; - } - network_thread_ = thread; - return true; - } - MediaEngineInterface* media_engine() { return media_engine_.get(); } // Retrieves the list of supported audio & video codec types. @@ -76,7 +67,6 @@ class ChannelManager final { void GetSupportedAudioReceiveCodecs(std::vector* codecs) const; void GetSupportedVideoSendCodecs(std::vector* codecs) const; void GetSupportedVideoReceiveCodecs(std::vector* codecs) const; - void GetSupportedDataCodecs(std::vector* codecs) const; RtpHeaderExtensions GetDefaultEnabledAudioRtpHeaderExtensions() const; std::vector GetSupportedAudioRtpHeaderExtensions() const; @@ -84,20 +74,13 @@ class ChannelManager final { std::vector GetSupportedVideoRtpHeaderExtensions() const; - // Indicates whether the media engine is started. - bool initialized() const { return initialized_; } - // Starts up the media engine. - bool Init(); - // Shuts down the media engine. - void Terminate(); - // The operations below all occur on the worker thread. // ChannelManager retains ownership of the created channels, so clients should // call the appropriate Destroy*Channel method when done. // Creates a voice channel, to be associated with the specified session. VoiceChannel* CreateVoiceChannel(webrtc::Call* call, - const cricket::MediaConfig& media_config, + const MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, rtc::Thread* signaling_thread, const std::string& content_name, @@ -113,7 +96,7 @@ class ChannelManager final { // Version of the above that takes PacketTransportInternal. VideoChannel* CreateVideoChannel( webrtc::Call* call, - const cricket::MediaConfig& media_config, + const MediaConfig& media_config, webrtc::RtpTransportInternal* rtp_transport, rtc::Thread* signaling_thread, const std::string& content_name, @@ -125,32 +108,6 @@ class ChannelManager final { // Destroys a video channel created by CreateVideoChannel. void DestroyVideoChannel(VideoChannel* video_channel); - RtpDataChannel* CreateRtpDataChannel( - const cricket::MediaConfig& media_config, - webrtc::RtpTransportInternal* rtp_transport, - rtc::Thread* signaling_thread, - const std::string& content_name, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); - // Destroys a data channel created by CreateRtpDataChannel. - void DestroyRtpDataChannel(RtpDataChannel* data_channel); - - // Indicates whether any channels exist. - bool has_channels() const { - return (!voice_channels_.empty() || !video_channels_.empty() || - !data_channels_.empty()); - } - - // RTX will be enabled/disabled in engines that support it. The supporting - // engines will start offering an RTX codec. Must be called before Init(). - bool SetVideoRtxEnabled(bool enable); - - // Starts/stops the local microphone and enables polling of the input level. - bool capturing() const { return capturing_; } - - // The operations below occur on the main thread. - // Starts AEC dump using existing file, with a specified maximum file size in // bytes. When the limit is reached, logging will stop and the file will be // closed. If max_size_bytes is set to <= 0, no limit will be used. @@ -160,20 +117,22 @@ class ChannelManager final { void StopAecDump(); private: - std::unique_ptr media_engine_; // Nullable. - std::unique_ptr data_engine_; // Non-null. - bool initialized_ = false; - rtc::Thread* main_thread_; - rtc::Thread* worker_thread_; - rtc::Thread* network_thread_; + ChannelManager(std::unique_ptr media_engine, + bool enable_rtx, + rtc::Thread* worker_thread, + rtc::Thread* network_thread); + + const std::unique_ptr media_engine_; // Nullable. + rtc::Thread* const worker_thread_; + rtc::Thread* const network_thread_; // Vector contents are non-null. - std::vector> voice_channels_; - std::vector> video_channels_; - std::vector> data_channels_; + std::vector> voice_channels_ + RTC_GUARDED_BY(worker_thread_); + std::vector> video_channels_ + RTC_GUARDED_BY(worker_thread_); - bool enable_rtx_ = false; - bool capturing_ = false; + const bool enable_rtx_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/composite_rtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/composite_rtp_transport.cc deleted file mode 100644 index 641d1d0fa..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/composite_rtp_transport.cc +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/composite_rtp_transport.h" - -#include -#include - -#include "absl/memory/memory.h" -#include "p2p/base/packet_transport_internal.h" - -namespace webrtc { - -CompositeRtpTransport::CompositeRtpTransport( - std::vector transports) - : transports_(std::move(transports)) { - RTC_DCHECK(!transports_.empty()) << "Cannot have an empty composite"; - std::vector rtp_transports; - std::vector rtcp_transports; - for (RtpTransportInternal* transport : transports_) { - RTC_DCHECK_EQ(transport->rtcp_mux_enabled(), rtcp_mux_enabled()) - << "Either all or none of the transports in a composite must enable " - "rtcp mux"; - RTC_DCHECK_EQ(transport->transport_name(), transport_name()) - << "All transports in a composite must have the same transport name"; - - transport->SignalNetworkRouteChanged.connect( - this, &CompositeRtpTransport::OnNetworkRouteChanged); - transport->SignalRtcpPacketReceived.connect( - this, &CompositeRtpTransport::OnRtcpPacketReceived); - } -} - -void CompositeRtpTransport::SetSendTransport( - RtpTransportInternal* send_transport) { - if (send_transport_ == send_transport) { - return; - } - - RTC_DCHECK(absl::c_linear_search(transports_, send_transport)) - << "Cannot set a send transport that isn't part of the composite"; - - if (send_transport_) { - send_transport_->SignalReadyToSend.disconnect(this); - send_transport_->SignalWritableState.disconnect(this); - send_transport_->SignalSentPacket.disconnect(this); - } - - send_transport_ = send_transport; - send_transport_->SignalReadyToSend.connect( - this, &CompositeRtpTransport::OnReadyToSend); - send_transport_->SignalWritableState.connect( - this, &CompositeRtpTransport::OnWritableState); - send_transport_->SignalSentPacket.connect( - this, &CompositeRtpTransport::OnSentPacket); - - SignalWritableState(send_transport_->IsWritable(/*rtcp=*/true) && - send_transport_->IsWritable(/*rtcp=*/false)); - if (send_transport_->IsReadyToSend()) { - SignalReadyToSend(true); - } -} - -void CompositeRtpTransport::RemoveTransport(RtpTransportInternal* transport) { - RTC_DCHECK(transport != send_transport_) << "Cannot remove send transport"; - - auto it = absl::c_find(transports_, transport); - if (it == transports_.end()) { - return; - } - - transport->SignalNetworkRouteChanged.disconnect(this); - transport->SignalRtcpPacketReceived.disconnect(this); - for (auto sink : rtp_demuxer_sinks_) { - transport->UnregisterRtpDemuxerSink(sink); - } - - transports_.erase(it); -} - -const std::string& CompositeRtpTransport::transport_name() const { - return transports_.front()->transport_name(); -} - -int CompositeRtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) { - int result = 0; - for (auto transport : transports_) { - result |= transport->SetRtpOption(opt, value); - } - return result; -} - -int CompositeRtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) { - int result = 0; - for (auto transport : transports_) { - result |= transport->SetRtcpOption(opt, value); - } - return result; -} - -bool CompositeRtpTransport::rtcp_mux_enabled() const { - return transports_.front()->rtcp_mux_enabled(); -} - -void CompositeRtpTransport::SetRtcpMuxEnabled(bool enabled) { - for (auto transport : transports_) { - transport->SetRtcpMuxEnabled(enabled); - } -} - -bool CompositeRtpTransport::IsReadyToSend() const { - return send_transport_ && send_transport_->IsReadyToSend(); -} - -bool CompositeRtpTransport::IsWritable(bool rtcp) const { - return send_transport_ && send_transport_->IsWritable(rtcp); -} - -bool CompositeRtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) { - if (!send_transport_) { - return false; - } - return send_transport_->SendRtpPacket(packet, options, flags); -} - -bool CompositeRtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) { - if (!send_transport_) { - return false; - } - return send_transport_->SendRtcpPacket(packet, options, flags); -} - -void CompositeRtpTransport::UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) { - for (RtpTransportInternal* transport : transports_) { - transport->UpdateRtpHeaderExtensionMap(header_extensions); - } -} - -bool CompositeRtpTransport::IsSrtpActive() const { - bool active = true; - for (RtpTransportInternal* transport : transports_) { - active &= transport->IsSrtpActive(); - } - return active; -} - -bool CompositeRtpTransport::RegisterRtpDemuxerSink( - const RtpDemuxerCriteria& criteria, - RtpPacketSinkInterface* sink) { - for (RtpTransportInternal* transport : transports_) { - transport->RegisterRtpDemuxerSink(criteria, sink); - } - rtp_demuxer_sinks_.insert(sink); - return true; -} - -bool CompositeRtpTransport::UnregisterRtpDemuxerSink( - RtpPacketSinkInterface* sink) { - for (RtpTransportInternal* transport : transports_) { - transport->UnregisterRtpDemuxerSink(sink); - } - rtp_demuxer_sinks_.erase(sink); - return true; -} - -void CompositeRtpTransport::OnNetworkRouteChanged( - absl::optional route) { - SignalNetworkRouteChanged(route); -} - -void CompositeRtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us) { - SignalRtcpPacketReceived(packet, packet_time_us); -} - -void CompositeRtpTransport::OnWritableState(bool writable) { - SignalWritableState(writable); -} - -void CompositeRtpTransport::OnReadyToSend(bool ready_to_send) { - SignalReadyToSend(ready_to_send); -} - -void CompositeRtpTransport::OnSentPacket(const rtc::SentPacket& packet) { - SignalSentPacket(packet); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/composite_rtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/composite_rtp_transport.h deleted file mode 100644 index 35f938257..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/composite_rtp_transport.h +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_COMPOSITE_RTP_TRANSPORT_H_ -#define PC_COMPOSITE_RTP_TRANSPORT_H_ - -#include -#include -#include -#include - -#include "call/rtp_demuxer.h" -#include "call/rtp_packet_sink_interface.h" -#include "pc/rtp_transport_internal.h" -#include "pc/session_description.h" -#include "rtc_base/async_packet_socket.h" -#include "rtc_base/copy_on_write_buffer.h" - -namespace webrtc { - -// Composite RTP transport capable of receiving from multiple sub-transports. -// -// CompositeRtpTransport is receive-only until the caller explicitly chooses -// which transport will be used to send and calls |SetSendTransport|. This -// choice must be made as part of the SDP negotiation process, based on receipt -// of a provisional answer. |CompositeRtpTransport| does not become writable or -// ready to send until |SetSendTransport| is called. -// -// When a full answer is received, the user should replace the composite -// transport with the single, chosen RTP transport, then delete the composite -// and all non-chosen transports. -class CompositeRtpTransport : public RtpTransportInternal { - public: - // Constructs a composite out of the given |transports|. |transports| must - // not be empty. All |transports| must outlive the composite. - explicit CompositeRtpTransport(std::vector transports); - - // Sets which transport will be used for sending packets. Once called, - // |IsReadyToSend|, |IsWritable|, and the associated signals will reflect the - // state of |send_tranpsort|. - void SetSendTransport(RtpTransportInternal* send_transport); - - // Removes |transport| from the composite. No-op if |transport| is null or - // not found in the composite. Removing a transport disconnects all signals - // and RTP demux sinks from that transport. The send transport may not be - // removed. - void RemoveTransport(RtpTransportInternal* transport); - - // All transports within a composite must have the same name. - const std::string& transport_name() const override; - - int SetRtpOption(rtc::Socket::Option opt, int value) override; - int SetRtcpOption(rtc::Socket::Option opt, int value) override; - - // All transports within a composite must either enable or disable RTCP mux. - bool rtcp_mux_enabled() const override; - - // Enables or disables RTCP mux for all component transports. - void SetRtcpMuxEnabled(bool enabled) override; - - // The composite is ready to send if |send_transport_| is set and ready to - // send. - bool IsReadyToSend() const override; - - // The composite is writable if |send_transport_| is set and writable. - bool IsWritable(bool rtcp) const override; - - // Sends an RTP packet. May only be called after |send_transport_| is set. - bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) override; - - // Sends an RTCP packet. May only be called after |send_transport_| is set. - bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, - int flags) override; - - // Updates the mapping of RTP header extensions for all component transports. - void UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) override; - - // SRTP is only active for a composite if it is active for all component - // transports. - bool IsSrtpActive() const override; - - // Registers an RTP demux sink with all component transports. - bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria, - RtpPacketSinkInterface* sink) override; - bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override; - - private: - // Receive-side signals. - void OnNetworkRouteChanged(absl::optional route); - void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us); - - // Send-side signals. - void OnWritableState(bool writable); - void OnReadyToSend(bool ready_to_send); - void OnSentPacket(const rtc::SentPacket& packet); - - std::vector transports_; - RtpTransportInternal* send_transport_ = nullptr; - - // Record of registered RTP demuxer sinks. Used to unregister sinks when a - // transport is removed. - std::set rtp_demuxer_sinks_; -}; - -} // namespace webrtc - -#endif // PC_COMPOSITE_RTP_TRANSPORT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc index 727fbd654..8d6ee636f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc @@ -15,9 +15,9 @@ #include #include "api/transport/field_trial_based_config.h" -#include "media/base/rtp_data_engine.h" +#include "media/sctp/sctp_transport_factory.h" #include "rtc_base/helpers.h" -#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -63,7 +63,7 @@ std::unique_ptr MaybeCreateSctpFactory( if (factory) { return factory; } -#ifdef HAVE_SCTP +#ifdef WEBRTC_HAVE_SCTP return std::make_unique(network_thread); #else return nullptr; @@ -75,11 +75,7 @@ std::unique_ptr MaybeCreateSctpFactory( // Static rtc::scoped_refptr ConnectionContext::Create( PeerConnectionFactoryDependencies* dependencies) { - auto context = new rtc::RefCountedObject(dependencies); - if (!context->channel_manager_->Init()) { - return nullptr; - } - return context; + return new ConnectionContext(dependencies); } ConnectionContext::ConnectionContext( @@ -97,7 +93,6 @@ ConnectionContext::ConnectionContext( network_monitor_factory_( std::move(dependencies->network_monitor_factory)), call_factory_(std::move(dependencies->call_factory)), - media_engine_(std::move(dependencies->media_engine)), sctp_factory_( MaybeCreateSctpFactory(std::move(dependencies->sctp_factory), network_thread())), @@ -107,7 +102,14 @@ ConnectionContext::ConnectionContext( signaling_thread_->AllowInvokesToThread(worker_thread_); signaling_thread_->AllowInvokesToThread(network_thread_); worker_thread_->AllowInvokesToThread(network_thread_); - network_thread_->DisallowAllInvokes(); + if (network_thread_->IsCurrent()) { + network_thread_->DisallowAllInvokes(); + } else { + network_thread_->PostTask(ToQueuedTask([thread = network_thread_] { + thread->DisallowBlockingCalls(); + thread->DisallowAllInvokes(); + })); + } RTC_DCHECK_RUN_ON(signaling_thread_); rtc::InitRandom(rtc::Time32()); @@ -120,16 +122,26 @@ ConnectionContext::ConnectionContext( default_socket_factory_ = std::make_unique(network_thread()); - channel_manager_ = std::make_unique( - std::move(media_engine_), std::make_unique(), - worker_thread(), network_thread()); + worker_thread_->Invoke(RTC_FROM_HERE, [&]() { + channel_manager_ = cricket::ChannelManager::Create( + std::move(dependencies->media_engine), + /*enable_rtx=*/true, worker_thread(), network_thread()); + }); - channel_manager_->SetVideoRtxEnabled(true); + // Set warning levels on the threads, to give warnings when response + // may be slower than is expected of the thread. + // Since some of the threads may be the same, start with the least + // restrictive limits and end with the least permissive ones. + // This will give warnings for all cases. + signaling_thread_->SetDispatchWarningMs(100); + worker_thread_->SetDispatchWarningMs(30); + network_thread_->SetDispatchWarningMs(10); } ConnectionContext::~ConnectionContext() { RTC_DCHECK_RUN_ON(signaling_thread_); - channel_manager_.reset(nullptr); + worker_thread_->Invoke(RTC_FROM_HERE, + [&]() { channel_manager_.reset(nullptr); }); // Make sure |worker_thread()| and |signaling_thread()| outlive // |default_socket_factory_| and |default_network_manager_|. diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h index 02d08a191..8fad13c10 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h @@ -17,19 +17,18 @@ #include "api/call/call_factory_interface.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/transport/sctp_transport_factory_interface.h" #include "api/transport/webrtc_key_value_config.h" #include "media/base/media_engine.h" -#include "media/sctp/sctp_transport_internal.h" #include "p2p/base/basic_packet_socket_factory.h" #include "pc/channel_manager.h" #include "rtc_base/checks.h" #include "rtc_base/network.h" #include "rtc_base/network_monitor_factory.h" -#include "rtc_base/ref_count.h" #include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -48,7 +47,8 @@ class RtcEventLog; // interferes with the operation of other PeerConnections. // // This class must be created and destroyed on the signaling thread. -class ConnectionContext : public rtc::RefCountInterface { +class ConnectionContext final + : public rtc::RefCountedNonVirtual { public: // Creates a ConnectionContext. May return null if initialization fails. // The Dependencies class allows simple management of all new dependencies @@ -62,7 +62,6 @@ class ConnectionContext : public rtc::RefCountInterface { // Functions called from PeerConnection and friends SctpTransportFactoryInterface* sctp_transport_factory() const { - RTC_DCHECK_RUN_ON(signaling_thread_); return sctp_factory_.get(); } @@ -94,7 +93,8 @@ class ConnectionContext : public rtc::RefCountInterface { protected: explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies); - virtual ~ConnectionContext(); + friend class rtc::RefCountedNonVirtual; + ~ConnectionContext(); private: // The following three variables are used to communicate between the @@ -121,10 +121,7 @@ class ConnectionContext : public rtc::RefCountInterface { std::unique_ptr default_socket_factory_ RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr media_engine_ - RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr const sctp_factory_ - RTC_GUARDED_BY(signaling_thread_); + std::unique_ptr const sctp_factory_; // Accessed both on signaling thread and worker thread. std::unique_ptr const trials_; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc index 9fabe13cc..d8e6b3989 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc @@ -10,57 +10,37 @@ #include "pc/data_channel_controller.h" +#include #include +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "pc/peer_connection.h" #include "pc/sctp_utils.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { bool DataChannelController::HasDataChannels() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return !rtp_data_channels_.empty() || !sctp_data_channels_.empty(); + return !sctp_data_channels_.empty(); } -bool DataChannelController::SendData(const cricket::SendDataParams& params, +bool DataChannelController::SendData(int sid, + const SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) { if (data_channel_transport()) - return DataChannelSendData(params, payload, result); - if (rtp_data_channel()) - return rtp_data_channel()->SendData(params, payload, result); + return DataChannelSendData(sid, params, payload, result); RTC_LOG(LS_ERROR) << "SendData called before transport is ready"; return false; } -bool DataChannelController::ConnectDataChannel( - RtpDataChannel* webrtc_data_channel) { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!rtp_data_channel()) { - // Don't log an error here, because DataChannels are expected to call - // ConnectDataChannel in this state. It's the only way to initially tell - // whether or not the underlying transport is ready. - return false; - } - rtp_data_channel()->SignalReadyToSendData.connect( - webrtc_data_channel, &RtpDataChannel::OnChannelReady); - rtp_data_channel()->SignalDataReceived.connect( - webrtc_data_channel, &RtpDataChannel::OnDataReceived); - return true; -} - -void DataChannelController::DisconnectDataChannel( - RtpDataChannel* webrtc_data_channel) { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!rtp_data_channel()) { - RTC_LOG(LS_ERROR) - << "DisconnectDataChannel called when rtp_data_channel_ is NULL."; - return; - } - rtp_data_channel()->SignalReadyToSendData.disconnect(webrtc_data_channel); - rtp_data_channel()->SignalDataReceived.disconnect(webrtc_data_channel); -} - bool DataChannelController::ConnectDataChannel( SctpDataChannel* webrtc_data_channel) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -117,8 +97,7 @@ void DataChannelController::RemoveSctpDataStream(int sid) { bool DataChannelController::ReadyToSendData() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return (rtp_data_channel() && rtp_data_channel()->ready_to_send_data()) || - (data_channel_transport() && data_channel_transport_ready_to_send_); + return (data_channel_transport() && data_channel_transport_ready_to_send_); } void DataChannelController::OnDataReceived( @@ -128,60 +107,70 @@ void DataChannelController::OnDataReceived( RTC_DCHECK_RUN_ON(network_thread()); cricket::ReceiveDataParams params; params.sid = channel_id; - params.type = ToCricketDataMessageType(type); - data_channel_transport_invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this, params, buffer] { - RTC_DCHECK_RUN_ON(signaling_thread()); - // TODO(bugs.webrtc.org/11547): The data being received should be - // delivered on the network thread. The way HandleOpenMessage_s works - // right now is that it's called for all types of buffers and operates - // as a selector function. Change this so that it's only called for - // buffers that it should be able to handle. Once we do that, we can - // deliver all other buffers on the network thread (change - // SignalDataChannelTransportReceivedData_s to - // SignalDataChannelTransportReceivedData_n). - if (!HandleOpenMessage_s(params, buffer)) { - SignalDataChannelTransportReceivedData_s(params, buffer); + params.type = type; + signaling_thread()->PostTask( + ToQueuedTask([self = weak_factory_.GetWeakPtr(), params, buffer] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + // TODO(bugs.webrtc.org/11547): The data being received should be + // delivered on the network thread. The way HandleOpenMessage_s works + // right now is that it's called for all types of buffers and operates + // as a selector function. Change this so that it's only called for + // buffers that it should be able to handle. Once we do that, we can + // deliver all other buffers on the network thread (change + // SignalDataChannelTransportReceivedData_s to + // SignalDataChannelTransportReceivedData_n). + if (!self->HandleOpenMessage_s(params, buffer)) { + self->SignalDataChannelTransportReceivedData_s(params, buffer); + } } - }); + })); } void DataChannelController::OnChannelClosing(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this, channel_id] { - RTC_DCHECK_RUN_ON(signaling_thread()); - SignalDataChannelTransportChannelClosing_s(channel_id); - }); + signaling_thread()->PostTask( + ToQueuedTask([self = weak_factory_.GetWeakPtr(), channel_id] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->SignalDataChannelTransportChannelClosing_s(channel_id); + } + })); } void DataChannelController::OnChannelClosed(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this, channel_id] { - RTC_DCHECK_RUN_ON(signaling_thread()); - SignalDataChannelTransportChannelClosed_s(channel_id); - }); + signaling_thread()->PostTask( + ToQueuedTask([self = weak_factory_.GetWeakPtr(), channel_id] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->SignalDataChannelTransportChannelClosed_s(channel_id); + } + })); } void DataChannelController::OnReadyToSend() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this] { - RTC_DCHECK_RUN_ON(signaling_thread()); - data_channel_transport_ready_to_send_ = true; - SignalDataChannelTransportWritable_s( - data_channel_transport_ready_to_send_); - }); + signaling_thread()->PostTask( + ToQueuedTask([self = weak_factory_.GetWeakPtr()] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->data_channel_transport_ready_to_send_ = true; + self->SignalDataChannelTransportWritable_s( + self->data_channel_transport_ready_to_send_); + } + })); } void DataChannelController::OnTransportClosed() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this] { - RTC_DCHECK_RUN_ON(signaling_thread()); - OnTransportChannelClosed(); - }); + signaling_thread()->PostTask( + ToQueuedTask([self = weak_factory_.GetWeakPtr()] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + self->OnTransportChannelClosed(); + } + })); } void DataChannelController::SetupDataChannelTransport_n() { @@ -234,15 +223,15 @@ std::vector DataChannelController::GetDataChannelStats() bool DataChannelController::HandleOpenMessage_s( const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { - if (params.type == cricket::DMT_CONTROL && IsOpenMessage(buffer)) { + if (params.type == DataMessageType::kControl && IsOpenMessage(buffer)) { // Received OPEN message; parse and signal that a new data channel should // be created. std::string label; InternalDataChannelInit config; - config.id = params.ssrc; + config.id = params.sid; if (!ParseDataChannelOpenMessage(buffer, &label, &config)) { - RTC_LOG(LS_WARNING) << "Failed to parse the OPEN message for ssrc " - << params.ssrc; + RTC_LOG(LS_WARNING) << "Failed to parse the OPEN message for sid " + << params.sid; return true; } config.open_handshake_role = InternalDataChannelInit::kAcker; @@ -274,49 +263,16 @@ DataChannelController::InternalCreateDataChannelWithProxy( if (pc_->IsClosed()) { return nullptr; } - if (data_channel_type_ == cricket::DCT_NONE) { - RTC_LOG(LS_ERROR) - << "InternalCreateDataChannel: Data is not supported in this call."; - return nullptr; - } - if (IsSctpLike(data_channel_type())) { - rtc::scoped_refptr channel = - InternalCreateSctpDataChannel(label, config); - if (channel) { - return SctpDataChannel::CreateProxy(channel); - } - } else if (data_channel_type() == cricket::DCT_RTP) { - rtc::scoped_refptr channel = - InternalCreateRtpDataChannel(label, config); - if (channel) { - return RtpDataChannel::CreateProxy(channel); - } + + rtc::scoped_refptr channel = + InternalCreateSctpDataChannel(label, config); + if (channel) { + return SctpDataChannel::CreateProxy(channel); } return nullptr; } -rtc::scoped_refptr -DataChannelController::InternalCreateRtpDataChannel( - const std::string& label, - const DataChannelInit* config) { - RTC_DCHECK_RUN_ON(signaling_thread()); - DataChannelInit new_config = config ? (*config) : DataChannelInit(); - rtc::scoped_refptr channel( - RtpDataChannel::Create(this, label, new_config, signaling_thread())); - if (!channel) { - return nullptr; - } - if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) { - RTC_LOG(LS_ERROR) << "DataChannel with label " << channel->label() - << " already exists."; - return nullptr; - } - rtp_data_channels_[channel->label()] = channel; - SignalRtpDataChannelCreated_(channel.get()); - return channel; -} - rtc::scoped_refptr DataChannelController::InternalCreateSctpDataChannel( const std::string& label, @@ -384,12 +340,12 @@ void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) { sctp_data_channels_to_free_.push_back(*it); sctp_data_channels_.erase(it); signaling_thread()->PostTask( - RTC_FROM_HERE, [self = weak_factory_.GetWeakPtr()] { + ToQueuedTask([self = weak_factory_.GetWeakPtr()] { if (self) { RTC_DCHECK_RUN_ON(self->signaling_thread()); self->sctp_data_channels_to_free_.clear(); } - }); + })); return; } } @@ -397,14 +353,8 @@ void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) { void DataChannelController::OnTransportChannelClosed() { RTC_DCHECK_RUN_ON(signaling_thread()); - // Use a temporary copy of the RTP/SCTP DataChannel list because the + // Use a temporary copy of the SCTP DataChannel list because the // DataChannel may callback to us and try to modify the list. - std::map> temp_rtp_dcs; - temp_rtp_dcs.swap(rtp_data_channels_); - for (const auto& kv : temp_rtp_dcs) { - kv.second->OnTransportChannelClosed(); - } - std::vector> temp_sctp_dcs; temp_sctp_dcs.swap(sctp_data_channels_); for (const auto& channel : temp_sctp_dcs) { @@ -422,70 +372,6 @@ SctpDataChannel* DataChannelController::FindDataChannelBySid(int sid) const { return nullptr; } -void DataChannelController::UpdateLocalRtpDataChannels( - const cricket::StreamParamsVec& streams) { - std::vector existing_channels; - - RTC_DCHECK_RUN_ON(signaling_thread()); - // Find new and active data channels. - for (const cricket::StreamParams& params : streams) { - // |it->sync_label| is actually the data channel label. The reason is that - // we use the same naming of data channels as we do for - // MediaStreams and Tracks. - // For MediaStreams, the sync_label is the MediaStream label and the - // track label is the same as |streamid|. - const std::string& channel_label = params.first_stream_id(); - auto data_channel_it = rtp_data_channels()->find(channel_label); - if (data_channel_it == rtp_data_channels()->end()) { - RTC_LOG(LS_ERROR) << "channel label not found"; - continue; - } - // Set the SSRC the data channel should use for sending. - data_channel_it->second->SetSendSsrc(params.first_ssrc()); - existing_channels.push_back(data_channel_it->first); - } - - UpdateClosingRtpDataChannels(existing_channels, true); -} - -void DataChannelController::UpdateRemoteRtpDataChannels( - const cricket::StreamParamsVec& streams) { - RTC_DCHECK_RUN_ON(signaling_thread()); - - std::vector existing_channels; - - // Find new and active data channels. - for (const cricket::StreamParams& params : streams) { - // The data channel label is either the mslabel or the SSRC if the mslabel - // does not exist. Ex a=ssrc:444330170 mslabel:test1. - std::string label = params.first_stream_id().empty() - ? rtc::ToString(params.first_ssrc()) - : params.first_stream_id(); - auto data_channel_it = rtp_data_channels()->find(label); - if (data_channel_it == rtp_data_channels()->end()) { - // This is a new data channel. - CreateRemoteRtpDataChannel(label, params.first_ssrc()); - } else { - data_channel_it->second->SetReceiveSsrc(params.first_ssrc()); - } - existing_channels.push_back(label); - } - - UpdateClosingRtpDataChannels(existing_channels, false); -} - -cricket::DataChannelType DataChannelController::data_channel_type() const { - // TODO(bugs.webrtc.org/9987): Should be restricted to the signaling thread. - // RTC_DCHECK_RUN_ON(signaling_thread()); - return data_channel_type_; -} - -void DataChannelController::set_data_channel_type( - cricket::DataChannelType type) { - RTC_DCHECK_RUN_ON(signaling_thread()); - data_channel_type_ = type; -} - DataChannelTransportInterface* DataChannelController::data_channel_transport() const { // TODO(bugs.webrtc.org/11547): Only allow this accessor to be called on the @@ -500,58 +386,9 @@ void DataChannelController::set_data_channel_transport( data_channel_transport_ = transport; } -const std::map>* -DataChannelController::rtp_data_channels() const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return &rtp_data_channels_; -} - -void DataChannelController::UpdateClosingRtpDataChannels( - const std::vector& active_channels, - bool is_local_update) { - auto it = rtp_data_channels_.begin(); - while (it != rtp_data_channels_.end()) { - RtpDataChannel* data_channel = it->second; - if (absl::c_linear_search(active_channels, data_channel->label())) { - ++it; - continue; - } - - if (is_local_update) { - data_channel->SetSendSsrc(0); - } else { - data_channel->RemotePeerRequestClose(); - } - - if (data_channel->state() == RtpDataChannel::kClosed) { - rtp_data_channels_.erase(it); - it = rtp_data_channels_.begin(); - } else { - ++it; - } - } -} - -void DataChannelController::CreateRemoteRtpDataChannel(const std::string& label, - uint32_t remote_ssrc) { - if (data_channel_type() != cricket::DCT_RTP) { - return; - } - rtc::scoped_refptr channel( - InternalCreateRtpDataChannel(label, nullptr)); - if (!channel.get()) { - RTC_LOG(LS_WARNING) << "Remote peer requested a DataChannel but" - "CreateDataChannel failed."; - return; - } - channel->SetReceiveSsrc(remote_ssrc); - rtc::scoped_refptr proxy_channel = - RtpDataChannel::CreateProxy(std::move(channel)); - pc_->Observer()->OnDataChannel(std::move(proxy_channel)); -} - bool DataChannelController::DataChannelSendData( - const cricket::SendDataParams& params, + int sid, + const SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) { // TODO(bugs.webrtc.org/11547): Expect method to be called on the network @@ -560,19 +397,9 @@ bool DataChannelController::DataChannelSendData( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(data_channel_transport()); - SendDataParams send_params; - send_params.type = ToWebrtcDataMessageType(params.type); - send_params.ordered = params.ordered; - if (params.max_rtx_count >= 0) { - send_params.max_rtx_count = params.max_rtx_count; - } else if (params.max_rtx_ms >= 0) { - send_params.max_rtx_ms = params.max_rtx_ms; - } - RTCError error = network_thread()->Invoke( - RTC_FROM_HERE, [this, params, send_params, payload] { - return data_channel_transport()->SendData(params.sid, send_params, - payload); + RTC_FROM_HERE, [this, sid, params, payload] { + return data_channel_transport()->SendData(sid, params, payload); }); if (error.ok()) { @@ -590,13 +417,15 @@ bool DataChannelController::DataChannelSendData( void DataChannelController::NotifyDataChannelsOfTransportCreated() { RTC_DCHECK_RUN_ON(network_thread()); - data_channel_transport_invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread(), [this] { - RTC_DCHECK_RUN_ON(signaling_thread()); - for (const auto& channel : sctp_data_channels_) { - channel->OnTransportChannelCreated(); + signaling_thread()->PostTask( + ToQueuedTask([self = weak_factory_.GetWeakPtr()] { + if (self) { + RTC_DCHECK_RUN_ON(self->signaling_thread()); + for (const auto& channel : self->sctp_data_channels_) { + channel->OnTransportChannelCreated(); + } } - }); + })); } rtc::Thread* DataChannelController::network_thread() const { diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h index 675928882..05fcff0e0 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h @@ -11,22 +11,36 @@ #ifndef PC_DATA_CHANNEL_CONTROLLER_H_ #define PC_DATA_CHANNEL_CONTROLLER_H_ +#include + #include #include #include #include +#include "api/data_channel_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/transport/data_channel_transport_interface.h" +#include "media/base/media_channel.h" +#include "media/base/media_engine.h" +#include "media/base/stream_params.h" #include "pc/channel.h" -#include "pc/rtp_data_channel.h" +#include "pc/data_channel_utils.h" #include "pc/sctp_data_channel.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" namespace webrtc { class PeerConnection; -class DataChannelController : public RtpDataChannelProviderInterface, - public SctpDataChannelProviderInterface, +class DataChannelController : public SctpDataChannelProviderInterface, public DataChannelSink { public: explicit DataChannelController(PeerConnection* pc) : pc_(pc) {} @@ -37,13 +51,12 @@ class DataChannelController : public RtpDataChannelProviderInterface, DataChannelController(DataChannelController&&) = delete; DataChannelController& operator=(DataChannelController&& other) = delete; - // Implements RtpDataChannelProviderInterface/ + // Implements // SctpDataChannelProviderInterface. - bool SendData(const cricket::SendDataParams& params, + bool SendData(int sid, + const SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override; - bool ConnectDataChannel(RtpDataChannel* webrtc_data_channel) override; - void DisconnectDataChannel(RtpDataChannel* webrtc_data_channel) override; bool ConnectDataChannel(SctpDataChannel* webrtc_data_channel) override; void DisconnectDataChannel(SctpDataChannel* webrtc_data_channel) override; void AddSctpDataStream(int sid) override; @@ -88,32 +101,11 @@ class DataChannelController : public RtpDataChannelProviderInterface, RTC_DCHECK_RUN_ON(signaling_thread()); return !sctp_data_channels_.empty(); } - bool HasRtpDataChannels() const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return !rtp_data_channels_.empty(); - } - - void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams); - void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams); // Accessors - cricket::DataChannelType data_channel_type() const; - void set_data_channel_type(cricket::DataChannelType type); - cricket::RtpDataChannel* rtp_data_channel() const { - return rtp_data_channel_; - } - void set_rtp_data_channel(cricket::RtpDataChannel* channel) { - rtp_data_channel_ = channel; - } DataChannelTransportInterface* data_channel_transport() const; void set_data_channel_transport(DataChannelTransportInterface* transport); - const std::map>* - rtp_data_channels() const; - sigslot::signal1& SignalRtpDataChannelCreated() { - RTC_DCHECK_RUN_ON(signaling_thread()); - return SignalRtpDataChannelCreated_; - } sigslot::signal1& SignalSctpDataChannelCreated() { RTC_DCHECK_RUN_ON(signaling_thread()); return SignalSctpDataChannelCreated_; @@ -124,10 +116,6 @@ class DataChannelController : public RtpDataChannelProviderInterface, void OnSctpDataChannelClosed(SctpDataChannel* channel); private: - rtc::scoped_refptr InternalCreateRtpDataChannel( - const std::string& label, - const DataChannelInit* config) /* RTC_RUN_ON(signaling_thread()) */; - rtc::scoped_refptr InternalCreateSctpDataChannel( const std::string& label, const InternalDataChannelInit* @@ -143,16 +131,9 @@ class DataChannelController : public RtpDataChannelProviderInterface, const InternalDataChannelInit& config) RTC_RUN_ON(signaling_thread()); - void CreateRemoteRtpDataChannel(const std::string& label, - uint32_t remote_ssrc) - RTC_RUN_ON(signaling_thread()); - - void UpdateClosingRtpDataChannels( - const std::vector& active_channels, - bool is_local_update) RTC_RUN_ON(signaling_thread()); - // Called from SendData when data_channel_transport() is true. - bool DataChannelSendData(const cricket::SendDataParams& params, + bool DataChannelSendData(int sid, + const SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result); @@ -163,17 +144,6 @@ class DataChannelController : public RtpDataChannelProviderInterface, rtc::Thread* network_thread() const; rtc::Thread* signaling_thread() const; - // Specifies which kind of data channel is allowed. This is controlled - // by the chrome command-line flag and constraints: - // 1. If chrome command-line switch 'enable-sctp-data-channels' is enabled, - // constraint kEnableDtlsSrtp is true, and constaint kEnableRtpDataChannels is - // not set or false, SCTP is allowed (DCT_SCTP); - // 2. If constraint kEnableRtpDataChannels is true, RTP is allowed (DCT_RTP); - // 3. If both 1&2 are false, data channel is not allowed (DCT_NONE). - cricket::DataChannelType data_channel_type_ = - cricket::DCT_NONE; // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and network thread. - // Plugin transport used for data channels. Pointer may be accessed and // checked from any thread, but the object may only be touched on the // network thread. @@ -185,22 +155,12 @@ class DataChannelController : public RtpDataChannelProviderInterface, bool data_channel_transport_ready_to_send_ RTC_GUARDED_BY(signaling_thread()) = false; - // |rtp_data_channel_| is used if in RTP data channel mode, - // |data_channel_transport_| when using SCTP. - cricket::RtpDataChannel* rtp_data_channel_ = nullptr; - // TODO(bugs.webrtc.org/9987): Accessed on both - // signaling and some other thread. - SctpSidAllocator sid_allocator_ /* RTC_GUARDED_BY(signaling_thread()) */; std::vector> sctp_data_channels_ RTC_GUARDED_BY(signaling_thread()); std::vector> sctp_data_channels_to_free_ RTC_GUARDED_BY(signaling_thread()); - // Map of label -> DataChannel - std::map> rtp_data_channels_ - RTC_GUARDED_BY(signaling_thread()); - // Signals from |data_channel_transport_|. These are invoked on the // signaling thread. // TODO(bugs.webrtc.org/11547): These '_s' signals likely all belong on the @@ -216,18 +176,13 @@ class DataChannelController : public RtpDataChannelProviderInterface, sigslot::signal1 SignalDataChannelTransportChannelClosed_s RTC_GUARDED_BY(signaling_thread()); - sigslot::signal1 SignalRtpDataChannelCreated_ - RTC_GUARDED_BY(signaling_thread()); sigslot::signal1 SignalSctpDataChannelCreated_ RTC_GUARDED_BY(signaling_thread()); - // Used from the network thread to invoke data channel transport signals on - // the signaling thread. - rtc::AsyncInvoker data_channel_transport_invoker_ - RTC_GUARDED_BY(network_thread()); - // Owning PeerConnection. PeerConnection* const pc_; + // The weak pointers must be dereferenced and invalidated on the signalling + // thread only. rtc::WeakPtrFactory weak_factory_{this}; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.cc index 51d6af941..a772241c3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.cc @@ -10,6 +10,10 @@ #include "pc/data_channel_utils.h" +#include + +#include "rtc_base/checks.h" + namespace webrtc { bool PacketQueue::Empty() const { @@ -47,8 +51,4 @@ void PacketQueue::Swap(PacketQueue* other) { other->packets_.swap(packets_); } -bool IsSctpLike(cricket::DataChannelType type) { - return type == cricket::DCT_SCTP; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.h b/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.h index 13c6620cd..85cacdb56 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.h +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_utils.h @@ -11,6 +11,8 @@ #ifndef PC_DATA_CHANNEL_UTILS_H_ #define PC_DATA_CHANNEL_UTILS_H_ +#include +#include #include #include #include @@ -55,8 +57,6 @@ struct DataChannelStats { uint64_t bytes_received; }; -bool IsSctpLike(cricket::DataChannelType type); - } // namespace webrtc #endif // PC_DATA_CHANNEL_UTILS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc index dacbcb411..f272ab79c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.cc @@ -166,7 +166,6 @@ void DtlsSrtpTransport::SetupRtpDtlsSrtp() { static_cast(send_key.size()), send_extension_ids, selected_crypto_suite, &recv_key[0], static_cast(recv_key.size()), recv_extension_ids)) { - SignalDtlsSrtpSetupFailure(this, /*rtcp=*/false); RTC_LOG(LS_WARNING) << "DTLS-SRTP key installation for RTP failed"; } } @@ -198,7 +197,6 @@ void DtlsSrtpTransport::SetupRtcpDtlsSrtp() { selected_crypto_suite, &rtcp_recv_key[0], static_cast(rtcp_recv_key.size()), recv_extension_ids)) { - SignalDtlsSrtpSetupFailure(this, /*rtcp=*/true); RTC_LOG(LS_WARNING) << "DTLS-SRTP key installation for RTCP failed"; } } @@ -277,14 +275,17 @@ void DtlsSrtpTransport::SetDtlsTransport( } if (*old_dtls_transport) { - (*old_dtls_transport)->SignalDtlsState.disconnect(this); + (*old_dtls_transport)->UnsubscribeDtlsState(this); } *old_dtls_transport = new_dtls_transport; if (new_dtls_transport) { - new_dtls_transport->SignalDtlsState.connect( - this, &DtlsSrtpTransport::OnDtlsState); + new_dtls_transport->SubscribeDtlsState( + this, [this](cricket::DtlsTransportInternal* transport, + cricket::DtlsTransportState state) { + OnDtlsState(transport, state); + }); } } @@ -303,7 +304,9 @@ void DtlsSrtpTransport::OnDtlsState(cricket::DtlsTransportInternal* transport, RTC_DCHECK(transport == rtp_dtls_transport_ || transport == rtcp_dtls_transport_); - SignalDtlsStateChange(); + if (on_dtls_state_change_) { + on_dtls_state_change_(); + } if (state != cricket::DTLS_TRANSPORT_CONNECTED) { ResetParams(); @@ -318,4 +321,8 @@ void DtlsSrtpTransport::OnWritableState( MaybeSetupDtlsSrtp(); } +void DtlsSrtpTransport::SetOnDtlsStateChange( + std::function callback) { + on_dtls_state_change_ = std::move(callback); +} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h index f50928fc8..bc82fd5a9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h @@ -11,6 +11,7 @@ #ifndef PC_DTLS_SRTP_TRANSPORT_H_ #define PC_DTLS_SRTP_TRANSPORT_H_ +#include #include #include "absl/types/optional.h" @@ -45,8 +46,7 @@ class DtlsSrtpTransport : public SrtpTransport { void UpdateRecvEncryptedHeaderExtensionIds( const std::vector& recv_extension_ids); - sigslot::signal SignalDtlsSrtpSetupFailure; - sigslot::signal<> SignalDtlsStateChange; + void SetOnDtlsStateChange(std::function callback); RTCError SetSrtpSendKey(const cricket::CryptoParams& params) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, @@ -63,16 +63,6 @@ class DtlsSrtpTransport : public SrtpTransport { active_reset_srtp_params_ = active_reset_srtp_params; } - virtual void OnErrorDemuxingPacket(uint32_t ssrc) override { - if (SignalOnErrorDemuxingPacket_) { - SignalOnErrorDemuxingPacket_(ssrc); - } - } - - void SetOnErrorDemuxingPacket(std::function f) { - SignalOnErrorDemuxingPacket_ = std::move(f); - } - private: bool IsDtlsActive(); bool IsDtlsConnected(); @@ -106,8 +96,7 @@ class DtlsSrtpTransport : public SrtpTransport { absl::optional> recv_extension_ids_; bool active_reset_srtp_params_ = false; - - std::function SignalOnErrorDemuxingPacket_ = nullptr; + std::function on_dtls_state_change_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc index 550ede790..1369db1f5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc @@ -12,7 +12,13 @@ #include +#include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "pc/ice_transport.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/ssl_certificate.h" namespace webrtc { @@ -42,11 +48,14 @@ DtlsTransport::DtlsTransport( : owner_thread_(rtc::Thread::Current()), info_(DtlsTransportState::kNew), internal_dtls_transport_(std::move(internal)), - ice_transport_(new rtc::RefCountedObject( + ice_transport_(rtc::make_ref_counted( internal_dtls_transport_->ice_transport())) { RTC_DCHECK(internal_dtls_transport_.get()); - internal_dtls_transport_->SignalDtlsState.connect( - this, &DtlsTransport::OnInternalDtlsState); + internal_dtls_transport_->SubscribeDtlsState( + [this](cricket::DtlsTransportInternal* transport, + cricket::DtlsTransportState state) { + OnInternalDtlsState(transport, state); + }); UpdateInformation(); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.h b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.h index ff8108ca9..893b1263a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.h @@ -17,7 +17,11 @@ #include "api/ice_transport_interface.h" #include "api/scoped_refptr.h" #include "p2p/base/dtls_transport.h" +#include "p2p/base/dtls_transport_internal.h" +#include "pc/ice_transport.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -25,8 +29,7 @@ class IceTransportWithPointer; // This implementation wraps a cricket::DtlsTransport, and takes // ownership of it. -class DtlsTransport : public DtlsTransportInterface, - public sigslot::has_slots<> { +class DtlsTransport : public DtlsTransportInterface { public: // This object must be constructed and updated on a consistent thread, // the same thread as the one the cricket::DtlsTransportInternal object diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc index 10378028c..67c3fac13 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc @@ -18,6 +18,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { @@ -64,9 +65,7 @@ rtc::scoped_refptr DtmfSender::Create( if (!signaling_thread) { return nullptr; } - rtc::scoped_refptr dtmf_sender( - new rtc::RefCountedObject(signaling_thread, provider)); - return dtmf_sender; + return rtc::make_ref_counted(signaling_thread, provider); } DtmfSender::DtmfSender(rtc::Thread* signaling_thread, @@ -86,19 +85,22 @@ DtmfSender::DtmfSender(rtc::Thread* signaling_thread, } DtmfSender::~DtmfSender() { + RTC_DCHECK_RUN_ON(signaling_thread_); StopSending(); } void DtmfSender::RegisterObserver(DtmfSenderObserverInterface* observer) { + RTC_DCHECK_RUN_ON(signaling_thread_); observer_ = observer; } void DtmfSender::UnregisterObserver() { + RTC_DCHECK_RUN_ON(signaling_thread_); observer_ = nullptr; } bool DtmfSender::CanInsertDtmf() { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); if (!provider_) { return false; } @@ -109,7 +111,7 @@ bool DtmfSender::InsertDtmf(const std::string& tones, int duration, int inter_tone_gap, int comma_delay) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); if (duration > kDtmfMaxDurationMs || duration < kDtmfMinDurationMs || inter_tone_gap < kDtmfMinGapMs || comma_delay < kDtmfMinGapMs) { @@ -132,38 +134,49 @@ bool DtmfSender::InsertDtmf(const std::string& tones, duration_ = duration; inter_tone_gap_ = inter_tone_gap; comma_delay_ = comma_delay; - // Clear the previous queue. - dtmf_driver_.Clear(); - // Kick off a new DTMF task queue. + + // Cancel any remaining tasks for previous tones. + if (safety_flag_) { + safety_flag_->SetNotAlive(); + } + safety_flag_ = PendingTaskSafetyFlag::Create(); + // Kick off a new DTMF task. QueueInsertDtmf(RTC_FROM_HERE, 1 /*ms*/); return true; } std::string DtmfSender::tones() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return tones_; } int DtmfSender::duration() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return duration_; } int DtmfSender::inter_tone_gap() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return inter_tone_gap_; } int DtmfSender::comma_delay() const { + RTC_DCHECK_RUN_ON(signaling_thread_); return comma_delay_; } void DtmfSender::QueueInsertDtmf(const rtc::Location& posted_from, uint32_t delay_ms) { - dtmf_driver_.AsyncInvokeDelayed( - posted_from, signaling_thread_, [this] { DoInsertDtmf(); }, delay_ms); + signaling_thread_->PostDelayedTask( + ToQueuedTask(safety_flag_, + [this] { + RTC_DCHECK_RUN_ON(signaling_thread_); + DoInsertDtmf(); + }), + delay_ms); } void DtmfSender::DoInsertDtmf() { - RTC_DCHECK(signaling_thread_->IsCurrent()); - // Get the first DTMF tone from the tone buffer. Unrecognized characters will // be ignored and skipped. size_t first_tone_pos = tones_.find_first_of(kDtmfValidTones); @@ -222,13 +235,17 @@ void DtmfSender::DoInsertDtmf() { } void DtmfSender::OnProviderDestroyed() { + RTC_DCHECK_RUN_ON(signaling_thread_); + RTC_LOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue."; StopSending(); provider_ = nullptr; } void DtmfSender::StopSending() { - dtmf_driver_.Clear(); + if (safety_flag_) { + safety_flag_->SetNotAlive(); + } } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h index e332a7ef5..5cf7b2eba 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h @@ -11,13 +11,18 @@ #ifndef PC_DTMF_SENDER_H_ #define PC_DTMF_SENDER_H_ +#include + #include #include "api/dtmf_sender_interface.h" #include "api/proxy.h" -#include "rtc_base/async_invoker.h" +#include "api/scoped_refptr.h" #include "rtc_base/constructor_magic.h" +#include "rtc_base/location.h" #include "rtc_base/ref_count.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" // DtmfSender is the native implementation of the RTCDTMFSender defined by @@ -70,32 +75,35 @@ class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> { private: DtmfSender(); - void QueueInsertDtmf(const rtc::Location& posted_from, uint32_t delay_ms); + void QueueInsertDtmf(const rtc::Location& posted_from, uint32_t delay_ms) + RTC_RUN_ON(signaling_thread_); // The DTMF sending task. - void DoInsertDtmf(); + void DoInsertDtmf() RTC_RUN_ON(signaling_thread_); void OnProviderDestroyed(); - void StopSending(); + void StopSending() RTC_RUN_ON(signaling_thread_); - DtmfSenderObserverInterface* observer_; + DtmfSenderObserverInterface* observer_ RTC_GUARDED_BY(signaling_thread_); rtc::Thread* signaling_thread_; - DtmfProviderInterface* provider_; - std::string tones_; - int duration_; - int inter_tone_gap_; - int comma_delay_; - // Invoker for running delayed tasks which feed the DTMF provider one tone at - // a time. - rtc::AsyncInvoker dtmf_driver_; + DtmfProviderInterface* provider_ RTC_GUARDED_BY(signaling_thread_); + std::string tones_ RTC_GUARDED_BY(signaling_thread_); + int duration_ RTC_GUARDED_BY(signaling_thread_); + int inter_tone_gap_ RTC_GUARDED_BY(signaling_thread_); + int comma_delay_ RTC_GUARDED_BY(signaling_thread_); + + // For cancelling the tasks which feed the DTMF provider one tone at a time. + rtc::scoped_refptr safety_flag_ RTC_GUARDED_BY( + signaling_thread_) RTC_PT_GUARDED_BY(signaling_thread_) = nullptr; RTC_DISALLOW_COPY_AND_ASSIGN(DtmfSender); }; // Define proxy for DtmfSenderInterface. -BEGIN_SIGNALING_PROXY_MAP(DtmfSender) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PRIMARY_PROXY_MAP(DtmfSender) + +PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*) PROXY_METHOD0(void, UnregisterObserver) PROXY_METHOD0(bool, CanInsertDtmf) diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc index 2400fd516..0daf8e445 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc @@ -12,7 +12,9 @@ #include +#include #include // For std::isdigit. +#include #include #include "p2p/base/port_interface.h" @@ -21,6 +23,7 @@ #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/socket_address.h" +#include "rtc_base/string_encode.h" namespace webrtc { @@ -31,6 +34,15 @@ static const int kDefaultStunPort = 3478; static const int kDefaultStunTlsPort = 5349; static const char kTransport[] = "transport"; +// Allowed characters in hostname per RFC 3986 Appendix A "reg-name" +static const char kRegNameCharacters[] = + "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789" + "-._~" // unreserved + "%" // pct-encoded + "!$&'()*+,;="; // sub-delims + // NOTE: Must be in the same order as the ServiceType enum. static const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"}; @@ -99,6 +111,7 @@ static bool ParseHostnameAndPortFromString(const std::string& in_str, int* port) { RTC_DCHECK(host->empty()); if (in_str.at(0) == '[') { + // IP_literal syntax std::string::size_type closebracket = in_str.rfind(']'); if (closebracket != std::string::npos) { std::string::size_type colonpos = in_str.find(':', closebracket); @@ -113,6 +126,7 @@ static bool ParseHostnameAndPortFromString(const std::string& in_str, return false; } } else { + // IPv4address or reg-name syntax std::string::size_type colonpos = in_str.find(':'); if (std::string::npos != colonpos) { if (!ParsePort(in_str.substr(colonpos + 1, std::string::npos), port)) { @@ -122,6 +136,10 @@ static bool ParseHostnameAndPortFromString(const std::string& in_str, } else { *host = in_str; } + // RFC 3986 section 3.2.2 and Appendix A - "reg-name" syntax + if (host->find_first_not_of(kRegNameCharacters) != std::string::npos) { + return false; + } } return !host->empty(); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.cc index ccc5ecd7f..205846755 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.cc @@ -10,8 +10,7 @@ #include "pc/ice_transport.h" -#include -#include +#include "api/sequence_checker.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h index c1529de6b..11f3de5d2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_transport.h @@ -12,8 +12,10 @@ #define PC_ICE_TRANSPORT_H_ #include "api/ice_transport_interface.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.cc b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.cc index c9506b3c5..801cef721 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.cc @@ -10,13 +10,10 @@ #include "pc/jitter_buffer_delay.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace { constexpr int kDefaultDelay = 0; @@ -25,43 +22,21 @@ constexpr int kMaximumDelayMs = 10000; namespace webrtc { -JitterBufferDelay::JitterBufferDelay(rtc::Thread* worker_thread) - : signaling_thread_(rtc::Thread::Current()), worker_thread_(worker_thread) { - RTC_DCHECK(worker_thread_); -} - -void JitterBufferDelay::OnStart(cricket::Delayable* media_channel, - uint32_t ssrc) { - RTC_DCHECK_RUN_ON(signaling_thread_); - - media_channel_ = media_channel; - ssrc_ = ssrc; - - // Trying to apply cached delay for the audio stream. - if (cached_delay_seconds_) { - Set(cached_delay_seconds_.value()); - } -} - -void JitterBufferDelay::OnStop() { - RTC_DCHECK_RUN_ON(signaling_thread_); - // Assume that audio stream is no longer present. - media_channel_ = nullptr; - ssrc_ = absl::nullopt; +JitterBufferDelay::JitterBufferDelay() { + worker_thread_checker_.Detach(); } void JitterBufferDelay::Set(absl::optional delay_seconds) { - RTC_DCHECK_RUN_ON(worker_thread_); - - // TODO(kuddai) propagate absl::optional deeper down as default preference. - int delay_ms = - rtc::saturated_cast(delay_seconds.value_or(kDefaultDelay) * 1000); - delay_ms = rtc::SafeClamp(delay_ms, 0, kMaximumDelayMs); - + RTC_DCHECK_RUN_ON(&worker_thread_checker_); cached_delay_seconds_ = delay_seconds; - if (media_channel_ && ssrc_) { - media_channel_->SetBaseMinimumPlayoutDelayMs(ssrc_.value(), delay_ms); - } +} + +int JitterBufferDelay::GetMs() const { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return rtc::SafeClamp( + rtc::saturated_cast(cached_delay_seconds_.value_or(kDefaultDelay) * + 1000), + 0, kMaximumDelayMs); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h index 8edfc6ce2..dc10e3d2b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h @@ -14,36 +14,25 @@ #include #include "absl/types/optional.h" -#include "media/base/delayable.h" -#include "pc/jitter_buffer_delay_interface.h" -#include "rtc_base/thread.h" +#include "api/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { // JitterBufferDelay converts delay from seconds to milliseconds for the // underlying media channel. It also handles cases when user sets delay before -// the start of media_channel by caching its request. Note, this class is not -// thread safe. Its thread safe version is defined in -// pc/jitter_buffer_delay_proxy.h -class JitterBufferDelay : public JitterBufferDelayInterface { +// the start of media_channel by caching its request. +class JitterBufferDelay { public: - // Must be called on signaling thread. - explicit JitterBufferDelay(rtc::Thread* worker_thread); + JitterBufferDelay(); - void OnStart(cricket::Delayable* media_channel, uint32_t ssrc) override; - - void OnStop() override; - - void Set(absl::optional delay_seconds) override; + void Set(absl::optional delay_seconds); + int GetMs() const; private: - // Throughout webrtc source, sometimes it is also called as |main_thread_|. - rtc::Thread* const signaling_thread_; - rtc::Thread* const worker_thread_; - // Media channel and ssrc together uniqely identify audio stream. - cricket::Delayable* media_channel_ = nullptr; - absl::optional ssrc_; - absl::optional cached_delay_seconds_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; + absl::optional cached_delay_seconds_ + RTC_GUARDED_BY(&worker_thread_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay_interface.h b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay_interface.h deleted file mode 100644 index f2132d318..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay_interface.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_JITTER_BUFFER_DELAY_INTERFACE_H_ -#define PC_JITTER_BUFFER_DELAY_INTERFACE_H_ - -#include - -#include "absl/types/optional.h" -#include "media/base/delayable.h" -#include "rtc_base/ref_count.h" - -namespace webrtc { - -// JitterBufferDelay delivers user's queries to the underlying media channel. It -// can describe either video or audio delay for receiving stream. "Interface" -// suffix in the interface name is required to be compatible with api/proxy.cc -class JitterBufferDelayInterface : public rtc::RefCountInterface { - public: - // OnStart allows to uniqely identify to which receiving stream playout - // delay must correpond through |media_channel| and |ssrc| pair. - virtual void OnStart(cricket::Delayable* media_channel, uint32_t ssrc) = 0; - - // Indicates that underlying receiving stream is stopped. - virtual void OnStop() = 0; - - virtual void Set(absl::optional delay_seconds) = 0; -}; - -} // namespace webrtc - -#endif // PC_JITTER_BUFFER_DELAY_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay_proxy.h deleted file mode 100644 index b3380fd25..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay_proxy.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_JITTER_BUFFER_DELAY_PROXY_H_ -#define PC_JITTER_BUFFER_DELAY_PROXY_H_ - -#include - -#include "api/proxy.h" -#include "media/base/delayable.h" -#include "pc/jitter_buffer_delay_interface.h" - -namespace webrtc { - -BEGIN_PROXY_MAP(JitterBufferDelay) -PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_METHOD2(void, OnStart, cricket::Delayable*, uint32_t) -PROXY_METHOD0(void, OnStop) -PROXY_WORKER_METHOD1(void, Set, absl::optional) -END_PROXY_MAP() - -} // namespace webrtc - -#endif // PC_JITTER_BUFFER_DELAY_PROXY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc index 4e4542182..6dacde629 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc @@ -14,6 +14,11 @@ #include "pc/webrtc_sdp.h" +// This file contains JsepIceCandidate-related functions that are not +// included in api/jsep_ice_candidate.cc. Some of these link to SDP +// parsing/serializing functions, which some users may not want. +// TODO(bugs.webrtc.org/12330): Merge the two .cc files somehow. + namespace webrtc { IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid, @@ -49,6 +54,16 @@ JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid, JsepIceCandidate::~JsepIceCandidate() {} +JsepCandidateCollection JsepCandidateCollection::Clone() const { + JsepCandidateCollection new_collection; + for (const auto& candidate : candidates_) { + new_collection.candidates_.push_back(std::make_unique( + candidate->sdp_mid(), candidate->sdp_mline_index(), + candidate->candidate())); + } + return new_collection; +} + bool JsepIceCandidate::Initialize(const std::string& sdp, SdpParseError* err) { return SdpDeserializeCandidate(sdp, this, err); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc index 7f30b50d9..9de81947d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc @@ -215,6 +215,18 @@ bool JsepSessionDescription::Initialize( return true; } +std::unique_ptr JsepSessionDescription::Clone() + const { + auto new_description = std::make_unique(type_); + new_description->session_id_ = session_id_; + new_description->session_version_ = session_version_; + new_description->description_ = description_->Clone(); + for (const auto& collection : candidate_collection_) { + new_description->candidate_collection_.push_back(collection.Clone()); + } + return new_description; +} + bool JsepSessionDescription::AddCandidate( const IceCandidateInterface* candidate) { if (!candidate) diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc index 2f7615ab3..dc4649bf1 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc @@ -14,7 +14,6 @@ #include #include -#include #include // for std::pair #include "api/array_view.h" @@ -25,6 +24,7 @@ #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" using webrtc::SdpType; @@ -77,7 +77,6 @@ JsepTransport::JsepTransport( std::unique_ptr unencrypted_rtp_transport, std::unique_ptr sdes_transport, std::unique_ptr dtls_srtp_transport, - std::unique_ptr datagram_rtp_transport, std::unique_ptr rtp_dtls_transport, std::unique_ptr rtcp_dtls_transport, std::unique_ptr sctp_transport) @@ -89,21 +88,20 @@ JsepTransport::JsepTransport( unencrypted_rtp_transport_(std::move(unencrypted_rtp_transport)), sdes_transport_(std::move(sdes_transport)), dtls_srtp_transport_(std::move(dtls_srtp_transport)), - rtp_dtls_transport_( - rtp_dtls_transport ? new rtc::RefCountedObject( - std::move(rtp_dtls_transport)) - : nullptr), - rtcp_dtls_transport_( - rtcp_dtls_transport - ? new rtc::RefCountedObject( - std::move(rtcp_dtls_transport)) - : nullptr), + rtp_dtls_transport_(rtp_dtls_transport + ? rtc::make_ref_counted( + std::move(rtp_dtls_transport)) + : nullptr), + rtcp_dtls_transport_(rtcp_dtls_transport + ? rtc::make_ref_counted( + std::move(rtcp_dtls_transport)) + : nullptr), sctp_data_channel_transport_( sctp_transport ? std::make_unique( sctp_transport.get()) : nullptr), sctp_transport_(sctp_transport - ? new rtc::RefCountedObject( + ? rtc::make_ref_counted( std::move(sctp_transport)) : nullptr) { RTC_DCHECK(ice_transport_); @@ -128,12 +126,6 @@ JsepTransport::JsepTransport( if (sctp_transport_) { sctp_transport_->SetDtlsTransport(rtp_dtls_transport_); } - - if (datagram_rtp_transport_ && default_rtp_transport()) { - composite_rtp_transport_ = std::make_unique( - std::vector{ - datagram_rtp_transport_.get(), default_rtp_transport()}); - } } JsepTransport::~JsepTransport() { @@ -175,23 +167,20 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( } // If doing SDES, setup the SDES crypto parameters. - { - rtc::CritScope scope(&accessor_lock_); - if (sdes_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!dtls_srtp_transport_); - if (!SetSdes(jsep_description.cryptos, - jsep_description.encrypted_header_extension_ids, type, - ContentSource::CS_LOCAL)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to setup SDES crypto parameters."); - } - } else if (dtls_srtp_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!sdes_transport_); - dtls_srtp_transport_->UpdateRecvEncryptedHeaderExtensionIds( - jsep_description.encrypted_header_extension_ids); + if (sdes_transport_) { + RTC_DCHECK(!unencrypted_rtp_transport_); + RTC_DCHECK(!dtls_srtp_transport_); + if (!SetSdes(jsep_description.cryptos, + jsep_description.encrypted_header_extension_ids, type, + ContentSource::CS_LOCAL)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to setup SDES crypto parameters."); } + } else if (dtls_srtp_transport_) { + RTC_DCHECK(!unencrypted_rtp_transport_); + RTC_DCHECK(!sdes_transport_); + dtls_srtp_transport_->UpdateRecvEncryptedHeaderExtensionIds( + jsep_description.encrypted_header_extension_ids); } bool ice_restarting = local_description_ != nullptr && @@ -212,18 +201,17 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( return error; } } - { - rtc::CritScope scope(&accessor_lock_); RTC_DCHECK(rtp_dtls_transport_->internal()); rtp_dtls_transport_->internal()->ice_transport()->SetIceParameters( ice_parameters); - if (rtcp_dtls_transport_) { - RTC_DCHECK(rtcp_dtls_transport_->internal()); - rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters( - ice_parameters); + { + if (rtcp_dtls_transport_) { + RTC_DCHECK(rtcp_dtls_transport_->internal()); + rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters( + ice_parameters); + } } - } // If PRANSWER/ANSWER is set, we should decide transport protocol type. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { error = NegotiateAndSetDtlsParameters(type); @@ -232,13 +220,11 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( local_description_.reset(); return error; } - { - rtc::CritScope scope(&accessor_lock_); - if (needs_ice_restart_ && ice_restarting) { - needs_ice_restart_ = false; - RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag cleared for transport " - << mid(); - } + + if (needs_ice_restart_ && ice_restarting) { + needs_ice_restart_ = false; + RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag cleared for transport " + << mid(); } return webrtc::RTCError::OK(); @@ -269,27 +255,24 @@ webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription( } // If doing SDES, setup the SDES crypto parameters. - { - rtc::CritScope lock(&accessor_lock_); - if (sdes_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!dtls_srtp_transport_); - if (!SetSdes(jsep_description.cryptos, - jsep_description.encrypted_header_extension_ids, type, - ContentSource::CS_REMOTE)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to setup SDES crypto parameters."); - } - sdes_transport_->CacheRtpAbsSendTimeHeaderExtension( - jsep_description.rtp_abs_sendtime_extn_id); - } else if (dtls_srtp_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!sdes_transport_); - dtls_srtp_transport_->UpdateSendEncryptedHeaderExtensionIds( - jsep_description.encrypted_header_extension_ids); - dtls_srtp_transport_->CacheRtpAbsSendTimeHeaderExtension( - jsep_description.rtp_abs_sendtime_extn_id); + if (sdes_transport_) { + RTC_DCHECK(!unencrypted_rtp_transport_); + RTC_DCHECK(!dtls_srtp_transport_); + if (!SetSdes(jsep_description.cryptos, + jsep_description.encrypted_header_extension_ids, type, + ContentSource::CS_REMOTE)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to setup SDES crypto parameters."); } + sdes_transport_->CacheRtpAbsSendTimeHeaderExtension( + jsep_description.rtp_abs_sendtime_extn_id); + } else if (dtls_srtp_transport_) { + RTC_DCHECK(!unencrypted_rtp_transport_); + RTC_DCHECK(!sdes_transport_); + dtls_srtp_transport_->UpdateSendEncryptedHeaderExtensionIds( + jsep_description.encrypted_header_extension_ids); + dtls_srtp_transport_->CacheRtpAbsSendTimeHeaderExtension( + jsep_description.rtp_abs_sendtime_extn_id); } remote_description_.reset(new JsepTransportDescription(jsep_description)); @@ -341,7 +324,7 @@ webrtc::RTCError JsepTransport::AddRemoteCandidates( } void JsepTransport::SetNeedsIceRestartFlag() { - rtc::CritScope scope(&accessor_lock_); + RTC_DCHECK_RUN_ON(network_thread_); if (!needs_ice_restart_) { needs_ice_restart_ = true; RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag set for transport " << mid(); @@ -350,7 +333,6 @@ void JsepTransport::SetNeedsIceRestartFlag() { absl::optional JsepTransport::GetDtlsRole() const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::CritScope scope(&accessor_lock_); RTC_DCHECK(rtp_dtls_transport_); RTC_DCHECK(rtp_dtls_transport_->internal()); rtc::SSLRole dtls_role; @@ -363,14 +345,16 @@ absl::optional JsepTransport::GetDtlsRole() const { bool JsepTransport::GetStats(TransportStats* stats) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::CritScope scope(&accessor_lock_); stats->transport_name = mid(); stats->channel_stats.clear(); RTC_DCHECK(rtp_dtls_transport_->internal()); - bool ret = GetTransportStats(rtp_dtls_transport_->internal(), stats); + bool ret = GetTransportStats(rtp_dtls_transport_->internal(), + ICE_CANDIDATE_COMPONENT_RTP, stats); + if (rtcp_dtls_transport_) { RTC_DCHECK(rtcp_dtls_transport_->internal()); - ret &= GetTransportStats(rtcp_dtls_transport_->internal(), stats); + ret &= GetTransportStats(rtcp_dtls_transport_->internal(), + ICE_CANDIDATE_COMPONENT_RTCP, stats); } return ret; } @@ -405,7 +389,6 @@ webrtc::RTCError JsepTransport::VerifyCertificateFingerprint( void JsepTransport::SetActiveResetSrtpParams(bool active_reset_srtp_params) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::CritScope scope(&accessor_lock_); if (dtls_srtp_transport_) { RTC_LOG(INFO) << "Setting active_reset_srtp_params of DtlsSrtpTransport to: " @@ -480,31 +463,22 @@ bool JsepTransport::SetRtcpMux(bool enable, } void JsepTransport::ActivateRtcpMux() { - { - // Don't hold the network_thread_ lock while calling other functions, - // since they might call other functions that call RTC_DCHECK_RUN_ON. - // TODO(https://crbug.com/webrtc/10318): Simplify when possible. - RTC_DCHECK_RUN_ON(network_thread_); - } - { - rtc::CritScope scope(&accessor_lock_); - if (unencrypted_rtp_transport_) { - RTC_DCHECK(!sdes_transport_); - RTC_DCHECK(!dtls_srtp_transport_); - unencrypted_rtp_transport_->SetRtcpPacketTransport(nullptr); - } else if (sdes_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!dtls_srtp_transport_); - sdes_transport_->SetRtcpPacketTransport(nullptr); - } else if (dtls_srtp_transport_) { - RTC_DCHECK(dtls_srtp_transport_); - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!sdes_transport_); - dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport_locked(), - /*rtcp_dtls_transport=*/nullptr); - } - rtcp_dtls_transport_ = nullptr; // Destroy this reference. + if (unencrypted_rtp_transport_) { + RTC_DCHECK(!sdes_transport_); + RTC_DCHECK(!dtls_srtp_transport_); + unencrypted_rtp_transport_->SetRtcpPacketTransport(nullptr); + } else if (sdes_transport_) { + RTC_DCHECK(!unencrypted_rtp_transport_); + RTC_DCHECK(!dtls_srtp_transport_); + sdes_transport_->SetRtcpPacketTransport(nullptr); + } else if (dtls_srtp_transport_) { + RTC_DCHECK(dtls_srtp_transport_); + RTC_DCHECK(!unencrypted_rtp_transport_); + RTC_DCHECK(!sdes_transport_); + dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport(), + /*rtcp_dtls_transport=*/nullptr); } + rtcp_dtls_transport_ = nullptr; // Destroy this reference. // Notify the JsepTransportController to update the aggregate states. SignalRtcpMuxActive(); } @@ -696,17 +670,12 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( } bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, + int component, TransportStats* stats) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(dtls_transport); TransportChannelStats substats; - if (rtcp_dtls_transport_) { - substats.component = dtls_transport == rtcp_dtls_transport_->internal() - ? ICE_CANDIDATE_COMPONENT_RTCP - : ICE_CANDIDATE_COMPONENT_RTP; - } else { - substats.component = ICE_CANDIDATE_COMPONENT_RTP; - } + substats.component = component; dtls_transport->GetSslVersionBytes(&substats.ssl_version_bytes); dtls_transport->GetSrtpCryptoSuite(&substats.srtp_crypto_suite); dtls_transport->GetSslCipherSuite(&substats.ssl_cipher_suite); diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h index 11c8168d9..5e8cae0ec 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h @@ -18,28 +18,38 @@ #include "absl/types/optional.h" #include "api/candidate.h" +#include "api/crypto_params.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/transport/data_channel_transport_interface.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/dtls_transport.h" +#include "p2p/base/dtls_transport_internal.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" -#include "pc/composite_rtp_transport.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" #include "pc/rtcp_mux_filter.h" #include "pc/rtp_transport.h" +#include "pc/rtp_transport_internal.h" #include "pc/sctp_transport.h" #include "pc/session_description.h" #include "pc/srtp_filter.h" #include "pc/srtp_transport.h" #include "pc/transport_stats.h" +#include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace cricket { @@ -89,7 +99,6 @@ class JsepTransport : public sigslot::has_slots<> { std::unique_ptr unencrypted_rtp_transport, std::unique_ptr sdes_transport, std::unique_ptr dtls_srtp_transport, - std::unique_ptr datagram_rtp_transport, std::unique_ptr rtp_dtls_transport, std::unique_ptr rtcp_dtls_transport, std::unique_ptr sctp_transport); @@ -115,38 +124,36 @@ class JsepTransport : public sigslot::has_slots<> { webrtc::RTCError SetLocalJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_); + webrtc::SdpType type); // Set the remote TransportDescription to be used by DTLS and ICE channels // that are part of this Transport. webrtc::RTCError SetRemoteJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type) RTC_LOCKS_EXCLUDED(accessor_lock_); - webrtc::RTCError AddRemoteCandidates(const Candidates& candidates) - RTC_LOCKS_EXCLUDED(accessor_lock_); + webrtc::SdpType type); + webrtc::RTCError AddRemoteCandidates(const Candidates& candidates); // Set the "needs-ice-restart" flag as described in JSEP. After the flag is // set, offers should generate new ufrags/passwords until an ICE restart // occurs. // - // This and the below method can be called safely from any thread as long as - // SetXTransportDescription is not in progress. - void SetNeedsIceRestartFlag() RTC_LOCKS_EXCLUDED(accessor_lock_); + // This and |needs_ice_restart()| must be called on the network thread. + void SetNeedsIceRestartFlag(); + // Returns true if the ICE restart flag above was set, and no ICE restart has // occurred yet for this transport (by applying a local description with // changed ufrag/password). - bool needs_ice_restart() const RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + bool needs_ice_restart() const { + RTC_DCHECK_RUN_ON(network_thread_); return needs_ice_restart_; } // Returns role if negotiated, or empty absl::optional if it hasn't been // negotiated yet. - absl::optional GetDtlsRole() const - RTC_LOCKS_EXCLUDED(accessor_lock_); + absl::optional GetDtlsRole() const; // TODO(deadbeef): Make this const. See comment in transportcontroller.h. - bool GetStats(TransportStats* stats) RTC_LOCKS_EXCLUDED(accessor_lock_); + bool GetStats(TransportStats* stats); const JsepTransportDescription* local_description() const { RTC_DCHECK_RUN_ON(network_thread_); @@ -158,71 +165,61 @@ class JsepTransport : public sigslot::has_slots<> { return remote_description_.get(); } - webrtc::RtpTransportInternal* rtp_transport() const - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); - if (composite_rtp_transport_) { - return composite_rtp_transport_.get(); - } else if (datagram_rtp_transport_) { - return datagram_rtp_transport_.get(); - } else { - return default_rtp_transport(); + // Returns the rtp transport, if any. + webrtc::RtpTransportInternal* rtp_transport() const { + if (dtls_srtp_transport_) { + return dtls_srtp_transport_.get(); } + if (sdes_transport_) { + return sdes_transport_.get(); + } + if (unencrypted_rtp_transport_) { + return unencrypted_rtp_transport_.get(); + } + return nullptr; } - const DtlsTransportInternal* rtp_dtls_transport() const - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + const DtlsTransportInternal* rtp_dtls_transport() const { if (rtp_dtls_transport_) { return rtp_dtls_transport_->internal(); - } else { - return nullptr; } + return nullptr; } - DtlsTransportInternal* rtp_dtls_transport() - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); - return rtp_dtls_transport_locked(); + DtlsTransportInternal* rtp_dtls_transport() { + if (rtp_dtls_transport_) { + return rtp_dtls_transport_->internal(); + } + return nullptr; } - const DtlsTransportInternal* rtcp_dtls_transport() const - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + const DtlsTransportInternal* rtcp_dtls_transport() const { + RTC_DCHECK_RUN_ON(network_thread_); if (rtcp_dtls_transport_) { return rtcp_dtls_transport_->internal(); - } else { - return nullptr; } + return nullptr; } - DtlsTransportInternal* rtcp_dtls_transport() - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + DtlsTransportInternal* rtcp_dtls_transport() { + RTC_DCHECK_RUN_ON(network_thread_); if (rtcp_dtls_transport_) { return rtcp_dtls_transport_->internal(); - } else { - return nullptr; } + return nullptr; } - rtc::scoped_refptr RtpDtlsTransport() - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + rtc::scoped_refptr RtpDtlsTransport() { return rtp_dtls_transport_; } - rtc::scoped_refptr SctpTransport() const - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + rtc::scoped_refptr SctpTransport() const { return sctp_transport_; } // TODO(bugs.webrtc.org/9719): Delete method, update callers to use // SctpTransport() instead. - webrtc::DataChannelTransportInterface* data_channel_transport() const - RTC_LOCKS_EXCLUDED(accessor_lock_) { - rtc::CritScope scope(&accessor_lock_); + webrtc::DataChannelTransportInterface* data_channel_transport() const { if (sctp_data_channel_transport_) { return sctp_data_channel_transport_.get(); } @@ -247,24 +244,14 @@ class JsepTransport : public sigslot::has_slots<> { void SetActiveResetSrtpParams(bool active_reset_srtp_params); private: - DtlsTransportInternal* rtp_dtls_transport_locked() - RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_) { - if (rtp_dtls_transport_) { - return rtp_dtls_transport_->internal(); - } else { - return nullptr; - } - } - bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source); - void ActivateRtcpMux(); + void ActivateRtcpMux() RTC_RUN_ON(network_thread_); bool SetSdes(const std::vector& cryptos, const std::vector& encrypted_extension_ids, webrtc::SdpType type, - ContentSource source) - RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_); + ContentSource source); // Negotiates and sets the DTLS parameters based on the current local and // remote transport description, such as the DTLS role to use, and whether @@ -281,8 +268,7 @@ class JsepTransport : public sigslot::has_slots<> { webrtc::SdpType local_description_type, ConnectionRole local_connection_role, ConnectionRole remote_connection_role, - absl::optional* negotiated_dtls_role) - RTC_LOCKS_EXCLUDED(accessor_lock_); + absl::optional* negotiated_dtls_role); // Pushes down the ICE parameters from the remote description. void SetRemoteIceParameters(const IceParameters& ice_parameters, @@ -295,31 +281,14 @@ class JsepTransport : public sigslot::has_slots<> { rtc::SSLFingerprint* remote_fingerprint); bool GetTransportStats(DtlsTransportInternal* dtls_transport, - TransportStats* stats) - RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_); - - // Returns the default (non-datagram) rtp transport, if any. - webrtc::RtpTransportInternal* default_rtp_transport() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_) { - if (dtls_srtp_transport_) { - return dtls_srtp_transport_.get(); - } else if (sdes_transport_) { - return sdes_transport_.get(); - } else if (unencrypted_rtp_transport_) { - return unencrypted_rtp_transport_.get(); - } else { - return nullptr; - } - } + int component, + TransportStats* stats); // Owning thread, for safety checks const rtc::Thread* const network_thread_; - // Critical scope for fields accessed off-thread - // TODO(https://bugs.webrtc.org/10300): Stop doing this. - rtc::RecursiveCriticalSection accessor_lock_; const std::string mid_; // needs-ice-restart bit as described in JSEP. - bool needs_ice_restart_ RTC_GUARDED_BY(accessor_lock_) = false; + bool needs_ice_restart_ RTC_GUARDED_BY(network_thread_) = false; rtc::scoped_refptr local_certificate_ RTC_GUARDED_BY(network_thread_); std::unique_ptr local_description_ @@ -334,31 +303,19 @@ class JsepTransport : public sigslot::has_slots<> { // To avoid downcasting and make it type safe, keep three unique pointers for // different SRTP mode and only one of these is non-nullptr. - std::unique_ptr unencrypted_rtp_transport_ - RTC_GUARDED_BY(accessor_lock_); - std::unique_ptr sdes_transport_ - RTC_GUARDED_BY(accessor_lock_); - std::unique_ptr dtls_srtp_transport_ - RTC_GUARDED_BY(accessor_lock_); + const std::unique_ptr unencrypted_rtp_transport_; + const std::unique_ptr sdes_transport_; + const std::unique_ptr dtls_srtp_transport_; - // If multiple RTP transports are in use, |composite_rtp_transport_| will be - // passed to callers. This is only valid for offer-only, receive-only - // scenarios, as it is not possible for the composite to correctly choose - // which transport to use for sending. - std::unique_ptr composite_rtp_transport_ - RTC_GUARDED_BY(accessor_lock_); - - rtc::scoped_refptr rtp_dtls_transport_ - RTC_GUARDED_BY(accessor_lock_); + const rtc::scoped_refptr rtp_dtls_transport_; + // The RTCP transport is const for all usages, except that it is cleared + // when RTCP multiplexing is turned on; this happens on the network thread. rtc::scoped_refptr rtcp_dtls_transport_ - RTC_GUARDED_BY(accessor_lock_); - rtc::scoped_refptr datagram_dtls_transport_ - RTC_GUARDED_BY(accessor_lock_); + RTC_GUARDED_BY(network_thread_); - std::unique_ptr - sctp_data_channel_transport_ RTC_GUARDED_BY(accessor_lock_); - rtc::scoped_refptr sctp_transport_ - RTC_GUARDED_BY(accessor_lock_); + const std::unique_ptr + sctp_data_channel_transport_; + const rtc::scoped_refptr sctp_transport_; SrtpFilter sdes_negotiator_ RTC_GUARDED_BY(network_thread_); RtcpMuxFilter rtcp_mux_negotiator_ RTC_GUARDED_BY(network_thread_); @@ -369,9 +326,6 @@ class JsepTransport : public sigslot::has_slots<> { absl::optional> recv_extension_ids_ RTC_GUARDED_BY(network_thread_); - std::unique_ptr datagram_rtp_transport_ - RTC_GUARDED_BY(accessor_lock_); - RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransport); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc index 0b0532002..372f4f69a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc @@ -10,92 +10,67 @@ #include "pc/jsep_transport_controller.h" +#include + +#include #include #include #include "absl/algorithm/container.h" -#include "api/ice_transport_factory.h" +#include "api/rtp_parameters.h" +#include "api/sequence_checker.h" +#include "api/transport/enums.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/dtls_transport.h" #include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "pc/srtp_filter.h" -#include "rtc_base/bind.h" #include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/socket_address.h" #include "rtc_base/thread.h" using webrtc::SdpType; +namespace webrtc { + namespace { -webrtc::RTCError VerifyCandidate(const cricket::Candidate& cand) { - // No address zero. - if (cand.address().IsNil() || cand.address().IsAnyIP()) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "candidate has address of zero"); - } - - // Disallow all ports below 1024, except for 80 and 443 on public addresses. - int port = cand.address().port(); - if (cand.protocol() == cricket::TCP_PROTOCOL_NAME && - (cand.tcptype() == cricket::TCPTYPE_ACTIVE_STR || port == 0)) { - // Expected for active-only candidates per - // http://tools.ietf.org/html/rfc6544#section-4.5 so no error. - // Libjingle clients emit port 0, in "active" mode. - return webrtc::RTCError::OK(); - } - if (port < 1024) { - if ((port != 80) && (port != 443)) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, - "candidate has port below 1024, but not 80 or 443"); - } - - if (cand.address().IsPrivateIP()) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, - "candidate has port of 80 or 443 with private IP address"); - } - } - - return webrtc::RTCError::OK(); -} - -webrtc::RTCError VerifyCandidates(const cricket::Candidates& candidates) { - for (const cricket::Candidate& candidate : candidates) { - webrtc::RTCError error = VerifyCandidate(candidate); - if (!error.ok()) { - return error; - } - } - return webrtc::RTCError::OK(); +bool IsBundledButNotFirstMid( + const std::map& bundle_groups_by_mid, + const std::string& mid) { + auto it = bundle_groups_by_mid.find(mid); + if (it == bundle_groups_by_mid.end()) + return false; + return mid != *it->second->FirstContentName(); } } // namespace -namespace webrtc { - JsepTransportController::JsepTransportController( - rtc::Thread* signaling_thread, rtc::Thread* network_thread, cricket::PortAllocator* port_allocator, - AsyncResolverFactory* async_resolver_factory, + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, Config config) - : signaling_thread_(signaling_thread), - network_thread_(network_thread), + : network_thread_(network_thread), port_allocator_(port_allocator), - async_resolver_factory_(async_resolver_factory), - config_(config) { + async_dns_resolver_factory_(async_dns_resolver_factory), + config_(config), + active_reset_srtp_params_(config.active_reset_srtp_params) { // The |transport_observer| is assumed to be non-null. RTC_DCHECK(config_.transport_observer); RTC_DCHECK(config_.rtcp_handler); RTC_DCHECK(config_.ice_transport_factory); + RTC_DCHECK(config_.on_dtls_handshake_error_); } JsepTransportController::~JsepTransportController() { // Channel destructors may try to send packets, so this needs to happen on // the network thread. - network_thread_->Invoke( - RTC_FROM_HERE, - rtc::Bind(&JsepTransportController::DestroyAllJsepTransports_n, this)); + RTC_DCHECK_RUN_ON(network_thread_); + DestroyAllJsepTransports_n(); } RTCError JsepTransportController::SetLocalDescription( @@ -106,6 +81,7 @@ RTCError JsepTransportController::SetLocalDescription( RTC_FROM_HERE, [=] { return SetLocalDescription(type, description); }); } + RTC_DCHECK_RUN_ON(network_thread_); if (!initial_offerer_.has_value()) { initial_offerer_.emplace(type == SdpType::kOffer); if (*initial_offerer_) { @@ -125,11 +101,13 @@ RTCError JsepTransportController::SetRemoteDescription( RTC_FROM_HERE, [=] { return SetRemoteDescription(type, description); }); } + RTC_DCHECK_RUN_ON(network_thread_); return ApplyDescription_n(/*local=*/false, type, description); } RtpTransportInternal* JsepTransportController::GetRtpTransport( const std::string& mid) const { + RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { return nullptr; @@ -139,6 +117,7 @@ RtpTransportInternal* JsepTransportController::GetRtpTransport( DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport( const std::string& mid) const { + RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { return nullptr; @@ -148,6 +127,7 @@ DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport( cricket::DtlsTransportInternal* JsepTransportController::GetDtlsTransport( const std::string& mid) { + RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { return nullptr; @@ -157,6 +137,7 @@ cricket::DtlsTransportInternal* JsepTransportController::GetDtlsTransport( const cricket::DtlsTransportInternal* JsepTransportController::GetRtcpDtlsTransport(const std::string& mid) const { + RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { return nullptr; @@ -166,6 +147,7 @@ JsepTransportController::GetRtcpDtlsTransport(const std::string& mid) const { rtc::scoped_refptr JsepTransportController::LookupDtlsTransportByMid(const std::string& mid) { + RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { return nullptr; @@ -175,6 +157,7 @@ JsepTransportController::LookupDtlsTransportByMid(const std::string& mid) { rtc::scoped_refptr JsepTransportController::GetSctpTransport( const std::string& mid) const { + RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { return nullptr; @@ -183,11 +166,7 @@ rtc::scoped_refptr JsepTransportController::GetSctpTransport( } void JsepTransportController::SetIceConfig(const cricket::IceConfig& config) { - if (!network_thread_->IsCurrent()) { - network_thread_->Invoke(RTC_FROM_HERE, [&] { SetIceConfig(config); }); - return; - } - + RTC_DCHECK_RUN_ON(network_thread_); ice_config_ = config; for (auto& dtls : GetDtlsTransports()) { dtls->ice_transport()->SetIceConfig(ice_config_); @@ -195,6 +174,7 @@ void JsepTransportController::SetIceConfig(const cricket::IceConfig& config) { } void JsepTransportController::SetNeedsIceRestartFlag() { + RTC_DCHECK_RUN_ON(network_thread_); for (auto& kv : jsep_transports_by_name_) { kv.second->SetNeedsIceRestartFlag(); } @@ -202,6 +182,8 @@ void JsepTransportController::SetNeedsIceRestartFlag() { bool JsepTransportController::NeedsIceRestart( const std::string& transport_name) const { + RTC_DCHECK_RUN_ON(network_thread_); + const cricket::JsepTransport* transport = GetJsepTransportByName(transport_name); if (!transport) { @@ -212,11 +194,16 @@ bool JsepTransportController::NeedsIceRestart( absl::optional JsepTransportController::GetDtlsRole( const std::string& mid) const { + // TODO(tommi): Remove this hop. Currently it's called from the signaling + // thread during negotiations, potentially multiple times. + // WebRtcSessionDescriptionFactory::InternalCreateAnswer is one example. if (!network_thread_->IsCurrent()) { return network_thread_->Invoke>( RTC_FROM_HERE, [&] { return GetDtlsRole(mid); }); } + RTC_DCHECK_RUN_ON(network_thread_); + const cricket::JsepTransport* t = GetJsepTransportForMid(mid); if (!t) { return absl::optional(); @@ -231,6 +218,8 @@ bool JsepTransportController::SetLocalCertificate( RTC_FROM_HERE, [&] { return SetLocalCertificate(certificate); }); } + RTC_DCHECK_RUN_ON(network_thread_); + // Can't change a certificate, or set a null certificate. if (certificate_ || !certificate) { return false; @@ -253,10 +242,7 @@ bool JsepTransportController::SetLocalCertificate( rtc::scoped_refptr JsepTransportController::GetLocalCertificate( const std::string& transport_name) const { - if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke>( - RTC_FROM_HERE, [&] { return GetLocalCertificate(transport_name); }); - } + RTC_DCHECK_RUN_ON(network_thread_); const cricket::JsepTransport* t = GetJsepTransportByName(transport_name); if (!t) { @@ -268,10 +254,7 @@ JsepTransportController::GetLocalCertificate( std::unique_ptr JsepTransportController::GetRemoteSSLCertChain( const std::string& transport_name) const { - if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke>( - RTC_FROM_HERE, [&] { return GetRemoteSSLCertChain(transport_name); }); - } + RTC_DCHECK_RUN_ON(network_thread_); // Get the certificate from the RTP transport's DTLS handshake. Should be // identical to the RTCP transport's, since they were given the same remote @@ -303,17 +286,8 @@ void JsepTransportController::MaybeStartGathering() { RTCError JsepTransportController::AddRemoteCandidates( const std::string& transport_name, const cricket::Candidates& candidates) { - if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke(RTC_FROM_HERE, [&] { - return AddRemoteCandidates(transport_name, candidates); - }); - } - - // Verify each candidate before passing down to the transport layer. - RTCError error = VerifyCandidates(candidates); - if (!error.ok()) { - return error; - } + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(VerifyCandidates(candidates).ok()); auto jsep_transport = GetJsepTransportByName(transport_name); if (!jsep_transport) { RTC_LOG(LS_WARNING) << "Not adding candidate because the JsepTransport " @@ -330,6 +304,8 @@ RTCError JsepTransportController::RemoveRemoteCandidates( RTC_FROM_HERE, [&] { return RemoveRemoteCandidates(candidates); }); } + RTC_DCHECK_RUN_ON(network_thread_); + // Verify each candidate before passing down to the transport layer. RTCError error = VerifyCandidates(candidates); if (!error.ok()) { @@ -372,10 +348,7 @@ RTCError JsepTransportController::RemoveRemoteCandidates( bool JsepTransportController::GetStats(const std::string& transport_name, cricket::TransportStats* stats) { - if (!network_thread_->IsCurrent()) { - return network_thread_->Invoke( - RTC_FROM_HERE, [=] { return GetStats(transport_name, stats); }); - } + RTC_DCHECK_RUN_ON(network_thread_); cricket::JsepTransport* transport = GetJsepTransportByName(transport_name); if (!transport) { @@ -392,11 +365,11 @@ void JsepTransportController::SetActiveResetSrtpParams( }); return; } - + RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(INFO) << "Updating the active_reset_srtp_params for JsepTransportController: " << active_reset_srtp_params; - config_.active_reset_srtp_params = active_reset_srtp_params; + active_reset_srtp_params_ = active_reset_srtp_params; for (auto& kv : jsep_transports_by_name_) { kv.second->SetActiveResetSrtpParams(active_reset_srtp_params); } @@ -425,7 +398,7 @@ JsepTransportController::CreateIceTransport(const std::string& transport_name, IceTransportInit init; init.set_port_allocator(port_allocator_); - init.set_async_resolver_factory(async_resolver_factory_); + init.set_async_dns_resolver_factory(async_dns_resolver_factory_); init.set_event_log(config_.event_log); return config_.ice_transport_factory->CreateIceTransport( transport_name, component, std::move(init)); @@ -435,20 +408,20 @@ std::unique_ptr JsepTransportController::CreateDtlsTransport( const cricket::ContentInfo& content_info, cricket::IceTransportInternal* ice) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); std::unique_ptr dtls; if (config_.dtls_transport_factory) { dtls = config_.dtls_transport_factory->CreateDtlsTransport( - ice, config_.crypto_options); + ice, config_.crypto_options, config_.ssl_max_version); } else { dtls = std::make_unique(ice, config_.crypto_options, - config_.event_log); + config_.event_log, + config_.ssl_max_version); } RTC_DCHECK(dtls); - dtls->SetSslMaxProtocolVersion(config_.ssl_max_version); dtls->ice_transport()->SetIceRole(ice_role_); dtls->ice_transport()->SetIceTiebreaker(ice_tiebreaker_); dtls->ice_transport()->SetIceConfig(ice_config_); @@ -462,8 +435,6 @@ JsepTransportController::CreateDtlsTransport( this, &JsepTransportController::OnTransportWritableState_n); dtls->SignalReceivingState.connect( this, &JsepTransportController::OnTransportReceivingState_n); - dtls->SignalDtlsHandshakeError.connect( - this, &JsepTransportController::OnDtlsHandshakeError); dtls->ice_transport()->SignalGatheringState.connect( this, &JsepTransportController::OnTransportGatheringState_n); dtls->ice_transport()->SignalCandidateGathered.connect( @@ -480,6 +451,9 @@ JsepTransportController::CreateDtlsTransport( this, &JsepTransportController::OnTransportStateChanged_n); dtls->ice_transport()->SignalCandidatePairChanged.connect( this, &JsepTransportController::OnTransportCandidatePairChanged_n); + + dtls->SubscribeDtlsHandshakeError( + [this](rtc::SSLHandshakeError error) { OnDtlsHandshakeError(error); }); return dtls; } @@ -488,7 +462,7 @@ JsepTransportController::CreateUnencryptedRtpTransport( const std::string& transport_name, rtc::PacketTransportInternal* rtp_packet_transport, rtc::PacketTransportInternal* rtcp_packet_transport) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); auto unencrypted_rtp_transport = std::make_unique(rtcp_packet_transport == nullptr); unencrypted_rtp_transport->SetRtpPacketTransport(rtp_packet_transport); @@ -503,7 +477,7 @@ JsepTransportController::CreateSdesTransport( const std::string& transport_name, cricket::DtlsTransportInternal* rtp_dtls_transport, cricket::DtlsTransportInternal* rtcp_dtls_transport) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); auto srtp_transport = std::make_unique(rtcp_dtls_transport == nullptr); RTC_DCHECK(rtp_dtls_transport); @@ -522,7 +496,7 @@ JsepTransportController::CreateDtlsSrtpTransport( const std::string& transport_name, cricket::DtlsTransportInternal* rtp_dtls_transport, cricket::DtlsTransportInternal* rtcp_dtls_transport) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); auto dtls_srtp_transport = std::make_unique( rtcp_dtls_transport == nullptr); if (config_.enable_external_auth) { @@ -531,18 +505,19 @@ JsepTransportController::CreateDtlsSrtpTransport( dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport, rtcp_dtls_transport); - dtls_srtp_transport->SetActiveResetSrtpParams( - config_.active_reset_srtp_params); - dtls_srtp_transport->SignalDtlsStateChange.connect( - this, &JsepTransportController::UpdateAggregateStates_n); - dtls_srtp_transport->SetOnErrorDemuxingPacket([this](uint32_t ssrc) { - this->JsepTransportController::ErrorDemuxingPacket_n(ssrc); + dtls_srtp_transport->SetActiveResetSrtpParams(active_reset_srtp_params_); + // Capturing this in the callback because JsepTransportController will always + // outlive the DtlsSrtpTransport. + dtls_srtp_transport->SetOnDtlsStateChange([this]() { + RTC_DCHECK_RUN_ON(this->network_thread_); + this->UpdateAggregateStates_n(); }); return dtls_srtp_transport; } std::vector JsepTransportController::GetDtlsTransports() { + RTC_DCHECK_RUN_ON(network_thread_); std::vector dtls_transports; for (auto it = jsep_transports_by_name_.begin(); it != jsep_transports_by_name_.end(); ++it) { @@ -563,7 +538,6 @@ RTCError JsepTransportController::ApplyDescription_n( bool local, SdpType type, const cricket::SessionDescription* description) { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(description); if (local) { @@ -573,21 +547,32 @@ RTCError JsepTransportController::ApplyDescription_n( } RTCError error; - error = ValidateAndMaybeUpdateBundleGroup(local, type, description); + error = ValidateAndMaybeUpdateBundleGroups(local, type, description); if (!error.ok()) { return error; } + // Established BUNDLE groups by MID. + std::map + established_bundle_groups_by_mid; + for (const auto& bundle_group : bundle_groups_) { + for (const std::string& content_name : bundle_group->content_names()) { + established_bundle_groups_by_mid[content_name] = bundle_group.get(); + } + } - std::vector merged_encrypted_extension_ids; - if (bundle_group_) { - merged_encrypted_extension_ids = - MergeEncryptedHeaderExtensionIdsForBundle(description); + std::map> + merged_encrypted_extension_ids_by_bundle; + if (!bundle_groups_.empty()) { + merged_encrypted_extension_ids_by_bundle = + MergeEncryptedHeaderExtensionIdsForBundles( + established_bundle_groups_by_mid, description); } for (const cricket::ContentInfo& content_info : description->contents()) { - // Don't create transports for rejected m-lines and bundled m-lines." + // Don't create transports for rejected m-lines and bundled m-lines. if (content_info.rejected || - (IsBundled(content_info.name) && content_info.name != *bundled_mid())) { + IsBundledButNotFirstMid(established_bundle_groups_by_mid, + content_info.name)) { continue; } error = MaybeCreateJsepTransport(local, content_info, *description); @@ -603,14 +588,24 @@ RTCError JsepTransportController::ApplyDescription_n( const cricket::TransportInfo& transport_info = description->transport_infos()[i]; if (content_info.rejected) { - HandleRejectedContent(content_info, description); + // This may cause groups to be removed from |bundle_groups_| and + // |established_bundle_groups_by_mid|. + HandleRejectedContent(content_info, established_bundle_groups_by_mid); continue; } - if (IsBundled(content_info.name) && content_info.name != *bundled_mid()) { - if (!HandleBundledContent(content_info)) { + auto it = established_bundle_groups_by_mid.find(content_info.name); + const cricket::ContentGroup* established_bundle_group = + it != established_bundle_groups_by_mid.end() ? it->second : nullptr; + + // For bundle members that are not BUNDLE-tagged (not first in the group), + // configure their transport to be the same as the BUNDLE-tagged transport. + if (established_bundle_group && + content_info.name != *established_bundle_group->FirstContentName()) { + if (!HandleBundledContent(content_info, *established_bundle_group)) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "Failed to process the bundled m= section with mid='" + + "Failed to process the bundled m= section with " + "mid='" + content_info.name + "'."); } continue; @@ -622,8 +617,13 @@ RTCError JsepTransportController::ApplyDescription_n( } std::vector extension_ids; - if (bundled_mid() && content_info.name == *bundled_mid()) { - extension_ids = merged_encrypted_extension_ids; + // Is BUNDLE-tagged (first in the group)? + if (established_bundle_group && + content_info.name == *established_bundle_group->FirstContentName()) { + auto it = merged_encrypted_extension_ids_by_bundle.find( + established_bundle_group); + RTC_DCHECK(it != merged_encrypted_extension_ids_by_bundle.end()); + extension_ids = it->second; } else { extension_ids = GetEncryptedHeaderExtensionIds(content_info); } @@ -661,51 +661,98 @@ RTCError JsepTransportController::ApplyDescription_n( return RTCError::OK(); } -RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup( +RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups( bool local, SdpType type, const cricket::SessionDescription* description) { RTC_DCHECK(description); - const cricket::ContentGroup* new_bundle_group = - description->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - // The BUNDLE group containing a MID that no m= section has is invalid. - if (new_bundle_group) { + std::vector new_bundle_groups = + description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + // Verify |new_bundle_groups|. + std::map new_bundle_groups_by_mid; + for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) { for (const std::string& content_name : new_bundle_group->content_names()) { + // The BUNDLE group must not contain a MID that is a member of a different + // BUNDLE group, or that contains the same MID multiple times. + if (new_bundle_groups_by_mid.find(content_name) != + new_bundle_groups_by_mid.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a MID='" + content_name + + "' that is already in a BUNDLE group."); + } + new_bundle_groups_by_mid.insert( + std::make_pair(content_name, new_bundle_group)); + // The BUNDLE group must not contain a MID that no m= section has. if (!description->GetContentByName(content_name)) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "The BUNDLE group contains MID='" + content_name + + "A BUNDLE group contains a MID='" + content_name + "' matching no m= section."); } } } if (type == SdpType::kAnswer) { - const cricket::ContentGroup* offered_bundle_group = - local ? remote_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE) - : local_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + std::vector offered_bundle_groups = + local ? remote_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE) + : local_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); - if (new_bundle_group) { - // The BUNDLE group in answer should be a subset of offered group. + std::map + offered_bundle_groups_by_mid; + for (const cricket::ContentGroup* offered_bundle_group : + offered_bundle_groups) { + for (const std::string& content_name : + offered_bundle_group->content_names()) { + offered_bundle_groups_by_mid[content_name] = offered_bundle_group; + } + } + + std::map + new_bundle_groups_by_offered_bundle_groups; + for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) { + if (!new_bundle_group->FirstContentName()) { + // Empty groups could be a subset of any group. + continue; + } + // The group in the answer (new_bundle_group) must have a corresponding + // group in the offer (original_group), because the answer groups may only + // be subsets of the offer groups. + auto it = offered_bundle_groups_by_mid.find( + *new_bundle_group->FirstContentName()); + if (it == offered_bundle_groups_by_mid.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group was added in the answer that did not " + "exist in the offer."); + } + const cricket::ContentGroup* offered_bundle_group = it->second; + if (new_bundle_groups_by_offered_bundle_groups.find( + offered_bundle_group) != + new_bundle_groups_by_offered_bundle_groups.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A MID in the answer has changed group."); + } + new_bundle_groups_by_offered_bundle_groups.insert( + std::make_pair(offered_bundle_group, new_bundle_group)); for (const std::string& content_name : new_bundle_group->content_names()) { - if (!offered_bundle_group || - !offered_bundle_group->HasContentName(content_name)) { + it = offered_bundle_groups_by_mid.find(content_name); + // The BUNDLE group in answer should be a subset of offered group. + if (it == offered_bundle_groups_by_mid.end() || + it->second != offered_bundle_group) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "The BUNDLE group in answer contains a MID='" + + "A BUNDLE group in answer contains a MID='" + content_name + - "' that was " - "not in the offered group."); + "' that was not in the offered group."); } } } - if (bundle_group_) { - for (const std::string& content_name : bundle_group_->content_names()) { + for (const auto& bundle_group : bundle_groups_) { + for (const std::string& content_name : bundle_group->content_names()) { // An answer that removes m= sections from pre-negotiated BUNDLE group // without rejecting it, is invalid. - if (!new_bundle_group || - !new_bundle_group->HasContentName(content_name)) { + auto it = new_bundle_groups_by_mid.find(content_name); + if (it == new_bundle_groups_by_mid.end()) { auto* content_info = description->GetContentByName(content_name); if (!content_info || !content_info->rejected) { return RTCError(RTCErrorType::INVALID_PARAMETER, @@ -726,33 +773,39 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroup( } if (ShouldUpdateBundleGroup(type, description)) { - bundle_group_ = *new_bundle_group; - } - - if (!bundled_mid()) { - return RTCError::OK(); - } - - auto bundled_content = description->GetContentByName(*bundled_mid()); - if (!bundled_content) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "An m= section associated with the BUNDLE-tag doesn't exist."); - } - - // If the |bundled_content| is rejected, other contents in the bundle group - // should be rejected. - if (bundled_content->rejected) { - for (const auto& content_name : bundle_group_->content_names()) { - auto other_content = description->GetContentByName(content_name); - if (!other_content->rejected) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "The m= section with mid='" + content_name + - "' should be rejected."); - } + bundle_groups_.clear(); + for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) { + bundle_groups_.push_back( + std::make_unique(*new_bundle_group)); } } + for (const auto& bundle_group : bundle_groups_) { + if (!bundle_group->FirstContentName()) + continue; + + // The first MID in a BUNDLE group is BUNDLE-tagged. + auto bundled_content = + description->GetContentByName(*bundle_group->FirstContentName()); + if (!bundled_content) { + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "An m= section associated with the BUNDLE-tag doesn't exist."); + } + + // If the |bundled_content| is rejected, other contents in the bundle group + // must also be rejected. + if (bundled_content->rejected) { + for (const auto& content_name : bundle_group->content_names()) { + auto other_content = description->GetContentByName(content_name); + if (!other_content->rejected) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "The m= section with mid='" + content_name + + "' should be rejected."); + } + } + } + } return RTCError::OK(); } @@ -772,30 +825,49 @@ RTCError JsepTransportController::ValidateContent( void JsepTransportController::HandleRejectedContent( const cricket::ContentInfo& content_info, - const cricket::SessionDescription* description) { + std::map& + established_bundle_groups_by_mid) { // If the content is rejected, let the // BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first, // then destroy the cricket::JsepTransport. - RemoveTransportForMid(content_info.name); - if (content_info.name == bundled_mid()) { - for (const auto& content_name : bundle_group_->content_names()) { + auto it = established_bundle_groups_by_mid.find(content_info.name); + cricket::ContentGroup* bundle_group = + it != established_bundle_groups_by_mid.end() ? it->second : nullptr; + if (bundle_group && !bundle_group->content_names().empty() && + content_info.name == *bundle_group->FirstContentName()) { + // Rejecting a BUNDLE group's first mid means we are rejecting the entire + // group. + for (const auto& content_name : bundle_group->content_names()) { RemoveTransportForMid(content_name); + // We are about to delete this BUNDLE group, erase all mappings to it. + it = established_bundle_groups_by_mid.find(content_name); + RTC_DCHECK(it != established_bundle_groups_by_mid.end()); + established_bundle_groups_by_mid.erase(it); } - bundle_group_.reset(); - } else if (IsBundled(content_info.name)) { - // Remove the rejected content from the |bundle_group_|. - bundle_group_->RemoveContentName(content_info.name); - // Reset the bundle group if nothing left. - if (!bundle_group_->FirstContentName()) { - bundle_group_.reset(); + // Delete the BUNDLE group. + auto bundle_group_it = std::find_if( + bundle_groups_.begin(), bundle_groups_.end(), + [bundle_group](std::unique_ptr& group) { + return bundle_group == group.get(); + }); + RTC_DCHECK(bundle_group_it != bundle_groups_.end()); + bundle_groups_.erase(bundle_group_it); + } else { + RemoveTransportForMid(content_info.name); + if (bundle_group) { + // Remove the rejected content from the |bundle_group|. + bundle_group->RemoveContentName(content_info.name); } } MaybeDestroyJsepTransport(content_info.name); } bool JsepTransportController::HandleBundledContent( - const cricket::ContentInfo& content_info) { - auto jsep_transport = GetJsepTransportByName(*bundled_mid()); + const cricket::ContentInfo& content_info, + const cricket::ContentGroup& bundle_group) { + RTC_DCHECK(bundle_group.FirstContentName()); + auto jsep_transport = + GetJsepTransportByName(*bundle_group.FirstContentName()); RTC_DCHECK(jsep_transport); // If the content is bundled, let the // BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first, @@ -814,24 +886,34 @@ bool JsepTransportController::HandleBundledContent( bool JsepTransportController::SetTransportForMid( const std::string& mid, cricket::JsepTransport* jsep_transport) { - RTC_DCHECK(jsep_transport); - if (mid_to_transport_[mid] == jsep_transport) { - return true; - } RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(jsep_transport); + + auto it = mid_to_transport_.find(mid); + if (it != mid_to_transport_.end() && it->second == jsep_transport) + return true; + pending_mids_.push_back(mid); - mid_to_transport_[mid] = jsep_transport; + + if (it == mid_to_transport_.end()) { + mid_to_transport_.insert(std::make_pair(mid, jsep_transport)); + } else { + it->second = jsep_transport; + } + return config_.transport_observer->OnTransportChanged( mid, jsep_transport->rtp_transport(), jsep_transport->RtpDtlsTransport(), jsep_transport->data_channel_transport()); } void JsepTransportController::RemoveTransportForMid(const std::string& mid) { + RTC_DCHECK_RUN_ON(network_thread_); bool ret = config_.transport_observer->OnTransportChanged(mid, nullptr, nullptr, nullptr); // Calling OnTransportChanged with nullptr should always succeed, since it is // only expected to fail when adding media to a transport (not removing). RTC_DCHECK(ret); + mid_to_transport_.erase(mid); } @@ -866,11 +948,11 @@ bool JsepTransportController::ShouldUpdateBundleGroup( } RTC_DCHECK(local_desc_ && remote_desc_); - const cricket::ContentGroup* local_bundle = - local_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - const cricket::ContentGroup* remote_bundle = - remote_desc_->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - return local_bundle && remote_bundle; + std::vector local_bundles = + local_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + std::vector remote_bundles = + remote_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + return !local_bundles.empty() && !remote_bundles.empty(); } std::vector JsepTransportController::GetEncryptedHeaderExtensionIds( @@ -894,26 +976,32 @@ std::vector JsepTransportController::GetEncryptedHeaderExtensionIds( return encrypted_header_extension_ids; } -std::vector -JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundle( +std::map> +JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundles( + const std::map& bundle_groups_by_mid, const cricket::SessionDescription* description) { RTC_DCHECK(description); - RTC_DCHECK(bundle_group_); - - std::vector merged_ids; + RTC_DCHECK(!bundle_groups_.empty()); + std::map> + merged_encrypted_extension_ids_by_bundle; // Union the encrypted header IDs in the group when bundle is enabled. for (const cricket::ContentInfo& content_info : description->contents()) { - if (bundle_group_->HasContentName(content_info.name)) { - std::vector extension_ids = - GetEncryptedHeaderExtensionIds(content_info); - for (int id : extension_ids) { - if (!absl::c_linear_search(merged_ids, id)) { - merged_ids.push_back(id); - } + auto it = bundle_groups_by_mid.find(content_info.name); + if (it == bundle_groups_by_mid.end()) + continue; + // Get or create list of IDs for the BUNDLE group. + std::vector& merged_ids = + merged_encrypted_extension_ids_by_bundle[it->second]; + // Add IDs not already in the list. + std::vector extension_ids = + GetEncryptedHeaderExtensionIds(content_info); + for (int id : extension_ids) { + if (!absl::c_linear_search(merged_ids, id)) { + merged_ids.push_back(id); } } } - return merged_ids; + return merged_encrypted_extension_ids_by_bundle; } int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId( @@ -960,7 +1048,6 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( bool local, const cricket::ContentInfo& content_info, const cricket::SessionDescription& description) { - RTC_DCHECK(network_thread_->IsCurrent()); cricket::JsepTransport* transport = GetJsepTransportByName(content_info.name); if (transport) { return RTCError::OK(); @@ -984,7 +1071,6 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( std::unique_ptr unencrypted_rtp_transport; std::unique_ptr sdes_transport; std::unique_ptr dtls_srtp_transport; - std::unique_ptr datagram_rtp_transport; rtc::scoped_refptr rtcp_ice; if (config_.rtcp_mux_policy != @@ -1020,9 +1106,8 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( std::make_unique( content_info.name, certificate_, std::move(ice), std::move(rtcp_ice), std::move(unencrypted_rtp_transport), std::move(sdes_transport), - std::move(dtls_srtp_transport), std::move(datagram_rtp_transport), - std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport), - std::move(sctp_transport)); + std::move(dtls_srtp_transport), std::move(rtp_dtls_transport), + std::move(rtcp_dtls_transport), std::move(sctp_transport)); jsep_transport->rtp_transport()->SignalRtcpPacketReceived.connect( this, &JsepTransportController::OnRtcpPacketReceived_n); @@ -1056,8 +1141,6 @@ void JsepTransportController::MaybeDestroyJsepTransport( } void JsepTransportController::DestroyAllJsepTransports_n() { - RTC_DCHECK(network_thread_->IsCurrent()); - for (const auto& jsep_transport : jsep_transports_by_name_) { config_.transport_observer->OnTransportChanged(jsep_transport.first, nullptr, nullptr, nullptr); @@ -1067,10 +1150,9 @@ void JsepTransportController::DestroyAllJsepTransports_n() { } void JsepTransportController::SetIceRole_n(cricket::IceRole ice_role) { - RTC_DCHECK(network_thread_->IsCurrent()); - ice_role_ = ice_role; - for (auto& dtls : GetDtlsTransports()) { + auto dtls_transports = GetDtlsTransports(); + for (auto& dtls : dtls_transports) { dtls->ice_transport()->SetIceRole(ice_role_); } } @@ -1125,7 +1207,6 @@ cricket::IceRole JsepTransportController::DetermineIceRole( void JsepTransportController::OnTransportWritableState_n( rtc::PacketTransportInternal* transport) { - RTC_DCHECK(network_thread_->IsCurrent()); RTC_LOG(LS_INFO) << " Transport " << transport->transport_name() << " writability changed to " << transport->writable() << "."; @@ -1134,58 +1215,44 @@ void JsepTransportController::OnTransportWritableState_n( void JsepTransportController::OnTransportReceivingState_n( rtc::PacketTransportInternal* transport) { - RTC_DCHECK(network_thread_->IsCurrent()); UpdateAggregateStates_n(); } void JsepTransportController::OnTransportGatheringState_n( cricket::IceTransportInternal* transport) { - RTC_DCHECK(network_thread_->IsCurrent()); UpdateAggregateStates_n(); } void JsepTransportController::OnTransportCandidateGathered_n( cricket::IceTransportInternal* transport, const cricket::Candidate& candidate) { - RTC_DCHECK(network_thread_->IsCurrent()); - // We should never signal peer-reflexive candidates. if (candidate.type() == cricket::PRFLX_PORT_TYPE) { RTC_NOTREACHED(); return; } - std::string transport_name = transport->transport_name(); - invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread_, [this, transport_name, candidate] { - SignalIceCandidatesGathered(transport_name, {candidate}); - }); + + signal_ice_candidates_gathered_.Send( + transport->transport_name(), std::vector{candidate}); } void JsepTransportController::OnTransportCandidateError_n( cricket::IceTransportInternal* transport, const cricket::IceCandidateErrorEvent& event) { - RTC_DCHECK(network_thread_->IsCurrent()); - - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, - [this, event] { SignalIceCandidateError(event); }); + signal_ice_candidate_error_.Send(event); } void JsepTransportController::OnTransportCandidatesRemoved_n( cricket::IceTransportInternal* transport, const cricket::Candidates& candidates) { - invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread_, - [this, candidates] { SignalIceCandidatesRemoved(candidates); }); + signal_ice_candidates_removed_.Send(candidates); } void JsepTransportController::OnTransportCandidatePairChanged_n( const cricket::CandidatePairChangeEvent& event) { - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, [this, event] { - SignalIceCandidatePairChanged(event); - }); + signal_ice_candidate_pair_changed_.Send(event); } void JsepTransportController::OnTransportRoleConflict_n( cricket::IceTransportInternal* transport) { - RTC_DCHECK(network_thread_->IsCurrent()); // Note: since the role conflict is handled entirely on the network thread, // we don't need to worry about role conflicts occurring on two ports at // once. The first one encountered should immediately reverse the role. @@ -1202,7 +1269,6 @@ void JsepTransportController::OnTransportRoleConflict_n( void JsepTransportController::OnTransportStateChanged_n( cricket::IceTransportInternal* transport) { - RTC_DCHECK(network_thread_->IsCurrent()); RTC_LOG(LS_INFO) << transport->transport_name() << " Transport " << transport->component() << " state changed. Check if state is complete."; @@ -1210,8 +1276,6 @@ void JsepTransportController::OnTransportStateChanged_n( } void JsepTransportController::UpdateAggregateStates_n() { - RTC_DCHECK(network_thread_->IsCurrent()); - auto dtls_transports = GetDtlsTransports(); cricket::IceConnectionState new_connection_state = cricket::kIceConnectionConnecting; @@ -1260,10 +1324,7 @@ void JsepTransportController::UpdateAggregateStates_n() { if (ice_connection_state_ != new_connection_state) { ice_connection_state_ = new_connection_state; - invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread_, [this, new_connection_state] { - SignalIceConnectionState.Send(new_connection_state); - }); + signal_ice_connection_state_.Send(new_connection_state); } // Compute the current RTCIceConnectionState as described in @@ -1319,16 +1380,11 @@ void JsepTransportController::UpdateAggregateStates_n() { new_ice_connection_state == PeerConnectionInterface::kIceConnectionCompleted) { // Ensure that we never skip over the "connected" state. - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, [this] { - SignalStandardizedIceConnectionState( - PeerConnectionInterface::kIceConnectionConnected); - }); + signal_standardized_ice_connection_state_.Send( + PeerConnectionInterface::kIceConnectionConnected); } standardized_ice_connection_state_ = new_ice_connection_state; - invoker_.AsyncInvoke( - RTC_FROM_HERE, signaling_thread_, [this, new_ice_connection_state] { - SignalStandardizedIceConnectionState(new_ice_connection_state); - }); + signal_standardized_ice_connection_state_.Send(new_ice_connection_state); } // Compute the current RTCPeerConnectionState as described in @@ -1379,10 +1435,7 @@ void JsepTransportController::UpdateAggregateStates_n() { if (combined_connection_state_ != new_combined_state) { combined_connection_state_ = new_combined_state; - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, - [this, new_combined_state] { - SignalConnectionState(new_combined_state); - }); + signal_connection_state_.Send(new_combined_state); } // Compute the gathering state. @@ -1395,17 +1448,10 @@ void JsepTransportController::UpdateAggregateStates_n() { } if (ice_gathering_state_ != new_gathering_state) { ice_gathering_state_ = new_gathering_state; - invoker_.AsyncInvoke(RTC_FROM_HERE, signaling_thread_, - [this, new_gathering_state] { - SignalIceGatheringState(new_gathering_state); - }); + signal_ice_gathering_state_.Send(new_gathering_state); } } -void JsepTransportController::ErrorDemuxingPacket_n(uint32_t ssrc) { - SignalErrorDemuxingPacket.emit(ssrc); -} - void JsepTransportController::OnRtcpPacketReceived_n( rtc::CopyOnWriteBuffer* packet, int64_t packet_time_us) { @@ -1415,7 +1461,7 @@ void JsepTransportController::OnRtcpPacketReceived_n( void JsepTransportController::OnDtlsHandshakeError( rtc::SSLHandshakeError error) { - SignalDtlsHandshakeError(error); + config_.on_dtls_handshake_error_(error); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h index 025a7a1fc..e3c1187fb 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h @@ -11,32 +11,60 @@ #ifndef PC_JSEP_TRANSPORT_CONTROLLER_H_ #define PC_JSEP_TRANSPORT_CONTROLLER_H_ +#include + +#include #include #include #include #include #include +#include "absl/types/optional.h" +#include "api/async_dns_resolver.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" #include "api/ice_transport_factory.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "api/transport/data_channel_transport_interface.h" +#include "api/transport/sctp_transport_factory_interface.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/dtls_transport.h" #include "p2p/base/dtls_transport_factory.h" +#include "p2p/base/dtls_transport_internal.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_info.h" #include "pc/channel.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" #include "pc/jsep_transport.h" #include "pc/rtp_transport.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_transport.h" +#include "pc/session_description.h" #include "pc/srtp_transport.h" -#include "rtc_base/async_invoker.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/ref_counted_object.h" +#include "pc/transport_stats.h" #include "rtc_base/callback_list.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/helpers.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace rtc { class Thread; @@ -98,20 +126,25 @@ class JsepTransportController : public sigslot::has_slots<> { std::function rtcp_handler; + // Initial value for whether DtlsTransport reset causes a reset + // of SRTP parameters. bool active_reset_srtp_params = false; RtcEventLog* event_log = nullptr; // Factory for SCTP transports. SctpTransportFactoryInterface* sctp_factory = nullptr; + std::function on_dtls_handshake_error_; }; - // The ICE related events are signaled on the |signaling_thread|. - // All the transport related methods are called on the |network_thread|. - JsepTransportController(rtc::Thread* signaling_thread, - rtc::Thread* network_thread, - cricket::PortAllocator* port_allocator, - AsyncResolverFactory* async_resolver_factory, - Config config); + // The ICE related events are fired on the |network_thread|. + // All the transport related methods are called on the |network_thread| + // and destruction of the JsepTransportController must occur on the + // |network_thread|. + JsepTransportController( + rtc::Thread* network_thread, + cricket::PortAllocator* port_allocator, + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + Config config); virtual ~JsepTransportController(); // The main method to be called; applies a description at the transport @@ -192,55 +225,115 @@ class JsepTransportController : public sigslot::has_slots<> { // and deletes unused transports, but doesn't consider anything more complex. void RollbackTransports(); - // All of these signals are fired on the signaling thread. + // F: void(const std::string&, const std::vector&) + template + void SubscribeIceCandidateGathered(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_ice_candidates_gathered_.AddReceiver(std::forward(callback)); + } + + // F: void(cricket::IceConnectionState) + template + void SubscribeIceConnectionState(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_ice_connection_state_.AddReceiver(std::forward(callback)); + } + + // F: void(PeerConnectionInterface::PeerConnectionState) + template + void SubscribeConnectionState(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_connection_state_.AddReceiver(std::forward(callback)); + } + + // F: void(PeerConnectionInterface::IceConnectionState) + template + void SubscribeStandardizedIceConnectionState(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_standardized_ice_connection_state_.AddReceiver( + std::forward(callback)); + } + + // F: void(cricket::IceGatheringState) + template + void SubscribeIceGatheringState(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_ice_gathering_state_.AddReceiver(std::forward(callback)); + } + + // F: void(const cricket::IceCandidateErrorEvent&) + template + void SubscribeIceCandidateError(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_ice_candidate_error_.AddReceiver(std::forward(callback)); + } + + // F: void(const std::vector&) + template + void SubscribeIceCandidatesRemoved(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_ice_candidates_removed_.AddReceiver(std::forward(callback)); + } + + // F: void(const cricket::CandidatePairChangeEvent&) + template + void SubscribeIceCandidatePairChanged(F&& callback) { + RTC_DCHECK_RUN_ON(network_thread_); + signal_ice_candidate_pair_changed_.AddReceiver(std::forward(callback)); + } + + private: + // All of these callbacks are fired on the network thread. // If any transport failed => failed, // Else if all completed => completed, // Else if all connected => connected, // Else => connecting - CallbackList SignalIceConnectionState; + CallbackList signal_ice_connection_state_ + RTC_GUARDED_BY(network_thread_); - sigslot::signal1 - SignalConnectionState; + CallbackList + signal_connection_state_ RTC_GUARDED_BY(network_thread_); - sigslot::signal1 - SignalStandardizedIceConnectionState; + CallbackList + signal_standardized_ice_connection_state_ RTC_GUARDED_BY(network_thread_); // If all transports done gathering => complete, // Else if any are gathering => gathering, // Else => new - sigslot::signal1 SignalIceGatheringState; + CallbackList signal_ice_gathering_state_ + RTC_GUARDED_BY(network_thread_); - // (mid, candidates) - sigslot::signal2&> - SignalIceCandidatesGathered; + // [mid, candidates] + CallbackList&> + signal_ice_candidates_gathered_ RTC_GUARDED_BY(network_thread_); - sigslot::signal1 - SignalIceCandidateError; + CallbackList + signal_ice_candidate_error_ RTC_GUARDED_BY(network_thread_); - sigslot::signal1&> - SignalIceCandidatesRemoved; + CallbackList&> + signal_ice_candidates_removed_ RTC_GUARDED_BY(network_thread_); - sigslot::signal1 - SignalIceCandidatePairChanged; + CallbackList + signal_ice_candidate_pair_changed_ RTC_GUARDED_BY(network_thread_); - sigslot::signal1 SignalDtlsHandshakeError; - - sigslot::signal1 SignalErrorDemuxingPacket; - - private: RTCError ApplyDescription_n(bool local, SdpType type, - const cricket::SessionDescription* description); - RTCError ValidateAndMaybeUpdateBundleGroup( + const cricket::SessionDescription* description) + RTC_RUN_ON(network_thread_); + RTCError ValidateAndMaybeUpdateBundleGroups( bool local, SdpType type, const cricket::SessionDescription* description); RTCError ValidateContent(const cricket::ContentInfo& content_info); void HandleRejectedContent(const cricket::ContentInfo& content_info, - const cricket::SessionDescription* description); - bool HandleBundledContent(const cricket::ContentInfo& content_info); + std::map& + established_bundle_groups_by_mid) + RTC_RUN_ON(network_thread_); + bool HandleBundledContent(const cricket::ContentInfo& content_info, + const cricket::ContentGroup& bundle_group) + RTC_RUN_ON(network_thread_); bool SetTransportForMid(const std::string& mid, cricket::JsepTransport* jsep_transport); @@ -252,22 +345,12 @@ class JsepTransportController : public sigslot::has_slots<> { const std::vector& encrypted_extension_ids, int rtp_abs_sendtime_extn_id); - absl::optional bundled_mid() const { - absl::optional bundled_mid; - if (bundle_group_ && bundle_group_->FirstContentName()) { - bundled_mid = *(bundle_group_->FirstContentName()); - } - return bundled_mid; - } - - bool IsBundled(const std::string& mid) const { - return bundle_group_ && bundle_group_->HasContentName(mid); - } - bool ShouldUpdateBundleGroup(SdpType type, const cricket::SessionDescription* description); - std::vector MergeEncryptedHeaderExtensionIdsForBundle( + std::map> + MergeEncryptedHeaderExtensionIdsForBundles( + const std::map& bundle_groups_by_mid, const cricket::SessionDescription* description); std::vector GetEncryptedHeaderExtensionIds( const cricket::ContentInfo& content_info); @@ -280,15 +363,16 @@ class JsepTransportController : public sigslot::has_slots<> { // transports are bundled on (In current implementation, it is the first // content in the BUNDLE group). const cricket::JsepTransport* GetJsepTransportForMid( - const std::string& mid) const; - cricket::JsepTransport* GetJsepTransportForMid(const std::string& mid); + const std::string& mid) const RTC_RUN_ON(network_thread_); + cricket::JsepTransport* GetJsepTransportForMid(const std::string& mid) + RTC_RUN_ON(network_thread_); // Get the JsepTransport without considering the BUNDLE group. Return nullptr // if the JsepTransport is destroyed. const cricket::JsepTransport* GetJsepTransportByName( - const std::string& transport_name) const; + const std::string& transport_name) const RTC_RUN_ON(network_thread_); cricket::JsepTransport* GetJsepTransportByName( - const std::string& transport_name); + const std::string& transport_name) RTC_RUN_ON(network_thread_); // Creates jsep transport. Noop if transport is already created. // Transport is created either during SetLocalDescription (|local| == true) or @@ -297,12 +381,14 @@ class JsepTransportController : public sigslot::has_slots<> { RTCError MaybeCreateJsepTransport( bool local, const cricket::ContentInfo& content_info, - const cricket::SessionDescription& description); + const cricket::SessionDescription& description) + RTC_RUN_ON(network_thread_); - void MaybeDestroyJsepTransport(const std::string& mid); - void DestroyAllJsepTransports_n(); + void MaybeDestroyJsepTransport(const std::string& mid) + RTC_RUN_ON(network_thread_); + void DestroyAllJsepTransports_n() RTC_RUN_ON(network_thread_); - void SetIceRole_n(cricket::IceRole ice_role); + void SetIceRole_n(cricket::IceRole ice_role) RTC_RUN_ON(network_thread_); cricket::IceRole DetermineIceRole( cricket::JsepTransport* jsep_transport, @@ -336,38 +422,46 @@ class JsepTransportController : public sigslot::has_slots<> { std::vector GetDtlsTransports(); // Handlers for signals from Transport. - void OnTransportWritableState_n(rtc::PacketTransportInternal* transport); - void OnTransportReceivingState_n(rtc::PacketTransportInternal* transport); - void OnTransportGatheringState_n(cricket::IceTransportInternal* transport); + void OnTransportWritableState_n(rtc::PacketTransportInternal* transport) + RTC_RUN_ON(network_thread_); + void OnTransportReceivingState_n(rtc::PacketTransportInternal* transport) + RTC_RUN_ON(network_thread_); + void OnTransportGatheringState_n(cricket::IceTransportInternal* transport) + RTC_RUN_ON(network_thread_); void OnTransportCandidateGathered_n(cricket::IceTransportInternal* transport, - const cricket::Candidate& candidate); - void OnTransportCandidateError_n( - cricket::IceTransportInternal* transport, - const cricket::IceCandidateErrorEvent& event); + const cricket::Candidate& candidate) + RTC_RUN_ON(network_thread_); + void OnTransportCandidateError_n(cricket::IceTransportInternal* transport, + const cricket::IceCandidateErrorEvent& event) + RTC_RUN_ON(network_thread_); void OnTransportCandidatesRemoved_n(cricket::IceTransportInternal* transport, - const cricket::Candidates& candidates); - void OnTransportRoleConflict_n(cricket::IceTransportInternal* transport); - void OnTransportStateChanged_n(cricket::IceTransportInternal* transport); + const cricket::Candidates& candidates) + RTC_RUN_ON(network_thread_); + void OnTransportRoleConflict_n(cricket::IceTransportInternal* transport) + RTC_RUN_ON(network_thread_); + void OnTransportStateChanged_n(cricket::IceTransportInternal* transport) + RTC_RUN_ON(network_thread_); void OnTransportCandidatePairChanged_n( - const cricket::CandidatePairChangeEvent& event); - void UpdateAggregateStates_n(); - void ErrorDemuxingPacket_n(uint32_t ssrc); + const cricket::CandidatePairChangeEvent& event) + RTC_RUN_ON(network_thread_); + void UpdateAggregateStates_n() RTC_RUN_ON(network_thread_); void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us); + int64_t packet_time_us) + RTC_RUN_ON(network_thread_); void OnDtlsHandshakeError(rtc::SSLHandshakeError error); - rtc::Thread* const signaling_thread_ = nullptr; rtc::Thread* const network_thread_ = nullptr; cricket::PortAllocator* const port_allocator_ = nullptr; - AsyncResolverFactory* const async_resolver_factory_ = nullptr; + AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ = nullptr; std::map> - jsep_transports_by_name_; + jsep_transports_by_name_ RTC_GUARDED_BY(network_thread_); // This keeps track of the mapping between media section // (BaseChannel/SctpTransport) and the JsepTransport underneath. - std::map mid_to_transport_; + std::map mid_to_transport_ + RTC_GUARDED_BY(network_thread_); // Keep track of mids that have been mapped to transports. Used for rollback. std::vector pending_mids_ RTC_GUARDED_BY(network_thread_); // Aggregate states for Transports. @@ -382,19 +476,20 @@ class JsepTransportController : public sigslot::has_slots<> { PeerConnectionInterface::PeerConnectionState::kNew; cricket::IceGatheringState ice_gathering_state_ = cricket::kIceGatheringNew; - Config config_; + const Config config_; + bool active_reset_srtp_params_ RTC_GUARDED_BY(network_thread_); const cricket::SessionDescription* local_desc_ = nullptr; const cricket::SessionDescription* remote_desc_ = nullptr; absl::optional initial_offerer_; - absl::optional bundle_group_; + // Use unique_ptr<> to get a stable address. + std::vector> bundle_groups_; cricket::IceConfig ice_config_; cricket::IceRole ice_role_ = cricket::ICEROLE_CONTROLLING; uint64_t ice_tiebreaker_ = rtc::CreateRandomId64(); rtc::scoped_refptr certificate_; - rtc::AsyncInvoker invoker_; RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransportController); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc b/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc index 22ab1c39c..3fcad50a1 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/local_audio_source.cc @@ -18,8 +18,7 @@ namespace webrtc { rtc::scoped_refptr LocalAudioSource::Create( const cricket::AudioOptions* audio_options) { - rtc::scoped_refptr source( - new rtc::RefCountedObject()); + auto source = rtc::make_ref_counted(); source->Initialize(audio_options); return source; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc b/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc index 3def3f0f2..ae4fcf339 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_protocol_names.cc @@ -10,6 +10,9 @@ #include "pc/media_protocol_names.h" +#include +#include + namespace cricket { // There are multiple variants of the RTP protocol stack, including diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc index 6d8a9a489..b4fc63439 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc @@ -10,8 +10,9 @@ #include "pc/media_session.h" +#include + #include -#include #include #include #include @@ -20,20 +21,24 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/crypto_params.h" -#include "media/base/h264_profile_level_id.h" +#include "api/video_codecs/h264_profile_level_id.h" +#include "media/base/codec.h" #include "media/base/media_constants.h" +#include "media/base/sdp_video_format_utils.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "pc/channel_manager.h" #include "pc/media_protocol_names.h" #include "pc/rtp_media_utils.h" -#include "pc/srtp_filter.h" #include "pc/used_ids.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" #include "rtc_base/third_party/base64/base64.h" #include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/field_trial.h" @@ -789,10 +794,16 @@ static void NegotiateCodecs(const std::vector& local_codecs, // FindMatchingCodec shouldn't return something with no apt value. RTC_DCHECK(apt_it != theirs.params.end()); negotiated.SetParam(kCodecParamAssociatedPayloadType, apt_it->second); + + // We support parsing the declarative rtx-time parameter. + const auto rtx_time_it = theirs.params.find(kCodecParamRtxTime); + if (rtx_time_it != theirs.params.end()) { + negotiated.SetParam(kCodecParamRtxTime, rtx_time_it->second); + } } if (absl::EqualsIgnoreCase(ours.name, kH264CodecName)) { - webrtc::H264::GenerateProfileLevelIdForAnswer( - ours.params, theirs.params, &negotiated.params); + webrtc::H264GenerateProfileLevelIdForAnswer(ours.params, theirs.params, + &negotiated.params); } negotiated.id = theirs.id; negotiated.name = theirs.name; @@ -1370,14 +1381,6 @@ void MediaDescriptionOptions::AddVideoSender( num_sim_layers); } -void MediaDescriptionOptions::AddRtpDataChannel(const std::string& track_id, - const std::string& stream_id) { - RTC_DCHECK(type == MEDIA_TYPE_DATA); - // TODO(steveanton): Is it the case that RtpDataChannel will never have more - // than one stream? - AddSenderInternal(track_id, {stream_id}, {}, SimulcastLayerList(), 1); -} - void MediaDescriptionOptions::AddSenderInternal( const std::string& track_id, const std::vector& stream_ids, @@ -1418,7 +1421,6 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_); channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_); channel_manager->GetSupportedVideoReceiveCodecs(&video_recv_codecs_); - channel_manager->GetSupportedDataCodecs(&rtp_data_codecs_); ComputeAudioCodecsIntersectionAndUnion(); ComputeVideoCodecsIntersectionAndUnion(); } @@ -1511,13 +1513,8 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( AudioCodecs offer_audio_codecs; VideoCodecs offer_video_codecs; - RtpDataCodecs offer_rtp_data_codecs; GetCodecsForOffer(current_active_contents, &offer_audio_codecs, - &offer_video_codecs, &offer_rtp_data_codecs); - if (!session_options.vad_enabled) { - // If application doesn't want CN codecs in offer. - StripCNCodecs(&offer_audio_codecs); - } + &offer_video_codecs); AudioVideoRtpHeaderExtensions extensions_with_ids = GetOfferedRtpHeaderExtensionsWithIds( current_active_contents, session_options.offer_extmap_allow_mixed, @@ -1561,8 +1558,8 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( case MEDIA_TYPE_DATA: if (!AddDataContentForOffer(media_description_options, session_options, current_content, current_description, - offer_rtp_data_codecs, ¤t_streams, - offer.get(), &ice_credentials)) { + ¤t_streams, offer.get(), + &ice_credentials)) { return nullptr; } break; @@ -1660,23 +1657,26 @@ MediaSessionDescriptionFactory::CreateAnswer( // sections. AudioCodecs answer_audio_codecs; VideoCodecs answer_video_codecs; - RtpDataCodecs answer_rtp_data_codecs; GetCodecsForAnswer(current_active_contents, *offer, &answer_audio_codecs, - &answer_video_codecs, &answer_rtp_data_codecs); - - if (!session_options.vad_enabled) { - // If application doesn't want CN codecs in answer. - StripCNCodecs(&answer_audio_codecs); - } + &answer_video_codecs); auto answer = std::make_unique(); // If the offer supports BUNDLE, and we want to use it too, create a BUNDLE // group in the answer with the appropriate content names. - const ContentGroup* offer_bundle = offer->GetGroupByName(GROUP_TYPE_BUNDLE); - ContentGroup answer_bundle(GROUP_TYPE_BUNDLE); - // Transport info shared by the bundle group. - std::unique_ptr bundle_transport; + std::vector offer_bundles = + offer->GetGroupsByName(GROUP_TYPE_BUNDLE); + // There are as many answer BUNDLE groups as offer BUNDLE groups (even if + // rejected, we respond with an empty group). |offer_bundles|, + // |answer_bundles| and |bundle_transports| share the same size and indices. + std::vector answer_bundles; + std::vector> bundle_transports; + answer_bundles.reserve(offer_bundles.size()); + bundle_transports.reserve(offer_bundles.size()); + for (size_t i = 0; i < offer_bundles.size(); ++i) { + answer_bundles.emplace_back(GROUP_TYPE_BUNDLE); + bundle_transports.emplace_back(nullptr); + } answer->set_extmap_allow_mixed(offer->extmap_allow_mixed()); @@ -1691,6 +1691,18 @@ MediaSessionDescriptionFactory::CreateAnswer( RTC_DCHECK( IsMediaContentOfType(offer_content, media_description_options.type)); RTC_DCHECK(media_description_options.mid == offer_content->name); + // Get the index of the BUNDLE group that this MID belongs to, if any. + absl::optional bundle_index; + for (size_t i = 0; i < offer_bundles.size(); ++i) { + if (offer_bundles[i]->HasContentName(media_description_options.mid)) { + bundle_index = i; + break; + } + } + TransportInfo* bundle_transport = + bundle_index.has_value() ? bundle_transports[bundle_index.value()].get() + : nullptr; + const ContentInfo* current_content = nullptr; if (current_description && msection_index < current_description->contents().size()) { @@ -1703,26 +1715,25 @@ MediaSessionDescriptionFactory::CreateAnswer( case MEDIA_TYPE_AUDIO: if (!AddAudioContentForAnswer( media_description_options, session_options, offer_content, - offer, current_content, current_description, - bundle_transport.get(), answer_audio_codecs, header_extensions, - ¤t_streams, answer.get(), &ice_credentials)) { + offer, current_content, current_description, bundle_transport, + answer_audio_codecs, header_extensions, ¤t_streams, + answer.get(), &ice_credentials)) { return nullptr; } break; case MEDIA_TYPE_VIDEO: if (!AddVideoContentForAnswer( media_description_options, session_options, offer_content, - offer, current_content, current_description, - bundle_transport.get(), answer_video_codecs, header_extensions, - ¤t_streams, answer.get(), &ice_credentials)) { + offer, current_content, current_description, bundle_transport, + answer_video_codecs, header_extensions, ¤t_streams, + answer.get(), &ice_credentials)) { return nullptr; } break; case MEDIA_TYPE_DATA: if (!AddDataContentForAnswer( media_description_options, session_options, offer_content, - offer, current_content, current_description, - bundle_transport.get(), answer_rtp_data_codecs, + offer, current_content, current_description, bundle_transport, ¤t_streams, answer.get(), &ice_credentials)) { return nullptr; } @@ -1730,8 +1741,8 @@ MediaSessionDescriptionFactory::CreateAnswer( case MEDIA_TYPE_UNSUPPORTED: if (!AddUnsupportedContentForAnswer( media_description_options, session_options, offer_content, - offer, current_content, current_description, - bundle_transport.get(), answer.get(), &ice_credentials)) { + offer, current_content, current_description, bundle_transport, + answer.get(), &ice_credentials)) { return nullptr; } break; @@ -1742,37 +1753,41 @@ MediaSessionDescriptionFactory::CreateAnswer( // See if we can add the newly generated m= section to the BUNDLE group in // the answer. ContentInfo& added = answer->contents().back(); - if (!added.rejected && session_options.bundle_enabled && offer_bundle && - offer_bundle->HasContentName(added.name)) { - answer_bundle.AddContentName(added.name); - bundle_transport.reset( + if (!added.rejected && session_options.bundle_enabled && + bundle_index.has_value()) { + // The |bundle_index| is for |media_description_options.mid|. + RTC_DCHECK_EQ(media_description_options.mid, added.name); + answer_bundles[bundle_index.value()].AddContentName(added.name); + bundle_transports[bundle_index.value()].reset( new TransportInfo(*answer->GetTransportInfoByName(added.name))); } } - // If a BUNDLE group was offered, put a BUNDLE group in the answer even if - // it's empty. RFC5888 says: + // If BUNDLE group(s) were offered, put the same number of BUNDLE groups in + // the answer even if they're empty. RFC5888 says: // // A SIP entity that receives an offer that contains an "a=group" line // with semantics that are understood MUST return an answer that // contains an "a=group" line with the same semantics. - if (offer_bundle) { - answer->AddGroup(answer_bundle); - } + if (!offer_bundles.empty()) { + for (const ContentGroup& answer_bundle : answer_bundles) { + answer->AddGroup(answer_bundle); - if (answer_bundle.FirstContentName()) { - // Share the same ICE credentials and crypto params across all contents, - // as BUNDLE requires. - if (!UpdateTransportInfoForBundle(answer_bundle, answer.get())) { - RTC_LOG(LS_ERROR) - << "CreateAnswer failed to UpdateTransportInfoForBundle."; - return NULL; - } + if (answer_bundle.FirstContentName()) { + // Share the same ICE credentials and crypto params across all contents, + // as BUNDLE requires. + if (!UpdateTransportInfoForBundle(answer_bundle, answer.get())) { + RTC_LOG(LS_ERROR) + << "CreateAnswer failed to UpdateTransportInfoForBundle."; + return NULL; + } - if (!UpdateCryptoParamsForBundle(answer_bundle, answer.get())) { - RTC_LOG(LS_ERROR) - << "CreateAnswer failed to UpdateCryptoParamsForBundle."; - return NULL; + if (!UpdateCryptoParamsForBundle(answer_bundle, answer.get())) { + RTC_LOG(LS_ERROR) + << "CreateAnswer failed to UpdateCryptoParamsForBundle."; + return NULL; + } + } } } @@ -1886,7 +1901,6 @@ void MergeCodecsFromDescription( const std::vector& current_active_contents, AudioCodecs* audio_codecs, VideoCodecs* video_codecs, - RtpDataCodecs* rtp_data_codecs, UsedPayloadTypes* used_pltypes) { for (const ContentInfo* content : current_active_contents) { if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) { @@ -1897,14 +1911,6 @@ void MergeCodecsFromDescription( const VideoContentDescription* video = content->media_description()->as_video(); MergeCodecs(video->codecs(), video_codecs, used_pltypes); - } else if (IsMediaContentOfType(content, MEDIA_TYPE_DATA)) { - const RtpDataContentDescription* data = - content->media_description()->as_rtp_data(); - if (data) { - // Only relevant for RTP datachannels - MergeCodecs(data->codecs(), rtp_data_codecs, - used_pltypes); - } } } } @@ -1918,19 +1924,17 @@ void MergeCodecsFromDescription( void MediaSessionDescriptionFactory::GetCodecsForOffer( const std::vector& current_active_contents, AudioCodecs* audio_codecs, - VideoCodecs* video_codecs, - RtpDataCodecs* rtp_data_codecs) const { + VideoCodecs* video_codecs) const { // First - get all codecs from the current description if the media type // is used. Add them to |used_pltypes| so the payload type is not reused if a // new media type is added. UsedPayloadTypes used_pltypes; MergeCodecsFromDescription(current_active_contents, audio_codecs, - video_codecs, rtp_data_codecs, &used_pltypes); + video_codecs, &used_pltypes); // Add our codecs that are not in the current description. MergeCodecs(all_audio_codecs_, audio_codecs, &used_pltypes); MergeCodecs(all_video_codecs_, video_codecs, &used_pltypes); - MergeCodecs(rtp_data_codecs_, rtp_data_codecs, &used_pltypes); } // Getting codecs for an answer involves these steps: @@ -1944,19 +1948,17 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( const std::vector& current_active_contents, const SessionDescription& remote_offer, AudioCodecs* audio_codecs, - VideoCodecs* video_codecs, - RtpDataCodecs* rtp_data_codecs) const { + VideoCodecs* video_codecs) const { // First - get all codecs from the current description if the media type // is used. Add them to |used_pltypes| so the payload type is not reused if a // new media type is added. UsedPayloadTypes used_pltypes; MergeCodecsFromDescription(current_active_contents, audio_codecs, - video_codecs, rtp_data_codecs, &used_pltypes); + video_codecs, &used_pltypes); // Second - filter out codecs that we don't support at all and should ignore. AudioCodecs filtered_offered_audio_codecs; VideoCodecs filtered_offered_video_codecs; - RtpDataCodecs filtered_offered_rtp_data_codecs; for (const ContentInfo& content : remote_offer.contents()) { if (IsMediaContentOfType(&content, MEDIA_TYPE_AUDIO)) { const AudioContentDescription* audio = @@ -1982,22 +1984,6 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( filtered_offered_video_codecs.push_back(offered_video_codec); } } - } else if (IsMediaContentOfType(&content, MEDIA_TYPE_DATA)) { - const RtpDataContentDescription* data = - content.media_description()->as_rtp_data(); - if (data) { - // RTP data. This part is inactive for SCTP data. - for (const RtpDataCodec& offered_rtp_data_codec : data->codecs()) { - if (!FindMatchingCodec( - data->codecs(), filtered_offered_rtp_data_codecs, - offered_rtp_data_codec, nullptr) && - FindMatchingCodec(data->codecs(), rtp_data_codecs_, - offered_rtp_data_codec, - nullptr)) { - filtered_offered_rtp_data_codecs.push_back(offered_rtp_data_codec); - } - } - } } } @@ -2007,8 +1993,6 @@ void MediaSessionDescriptionFactory::GetCodecsForAnswer( &used_pltypes); MergeCodecs(filtered_offered_video_codecs, video_codecs, &used_pltypes); - MergeCodecs(filtered_offered_rtp_data_codecs, rtp_data_codecs, - &used_pltypes); } MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions @@ -2190,6 +2174,10 @@ bool MediaSessionDescriptionFactory::AddAudioContentForOffer( } } } + if (!session_options.vad_enabled) { + // If application doesn't want CN codecs in offer. + StripCNCodecs(&filtered_codecs); + } cricket::SecurePolicy sdes_policy = IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED @@ -2317,7 +2305,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForOffer( return true; } -bool MediaSessionDescriptionFactory::AddSctpDataContentForOffer( +bool MediaSessionDescriptionFactory::AddDataContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, @@ -2362,73 +2350,6 @@ bool MediaSessionDescriptionFactory::AddSctpDataContentForOffer( return true; } -bool MediaSessionDescriptionFactory::AddRtpDataContentForOffer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* current_content, - const SessionDescription* current_description, - const RtpDataCodecs& rtp_data_codecs, - StreamParamsVec* current_streams, - SessionDescription* desc, - IceCredentialsIterator* ice_credentials) const { - auto data = std::make_unique(); - bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED); - - cricket::SecurePolicy sdes_policy = - IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED - : secure(); - std::vector crypto_suites; - GetSupportedDataSdesCryptoSuiteNames(session_options.crypto_options, - &crypto_suites); - if (!CreateMediaContentOffer(media_description_options, session_options, - rtp_data_codecs, sdes_policy, - GetCryptos(current_content), crypto_suites, - RtpHeaderExtensions(), ssrc_generator_, - current_streams, data.get())) { - return false; - } - - data->set_bandwidth(kRtpDataMaxBandwidth); - SetMediaProtocol(secure_transport, data.get()); - desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, - media_description_options.stopped, std::move(data)); - if (!AddTransportOffer(media_description_options.mid, - media_description_options.transport_options, - current_description, desc, ice_credentials)) { - return false; - } - return true; -} - -bool MediaSessionDescriptionFactory::AddDataContentForOffer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* current_content, - const SessionDescription* current_description, - const RtpDataCodecs& rtp_data_codecs, - StreamParamsVec* current_streams, - SessionDescription* desc, - IceCredentialsIterator* ice_credentials) const { - bool is_sctp = (session_options.data_channel_type == DCT_SCTP); - // If the DataChannel type is not specified, use the DataChannel type in - // the current description. - if (session_options.data_channel_type == DCT_NONE && current_content) { - RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_DATA)); - is_sctp = (current_content->media_description()->protocol() == - kMediaProtocolSctp); - } - if (is_sctp) { - return AddSctpDataContentForOffer( - media_description_options, session_options, current_content, - current_description, current_streams, desc, ice_credentials); - } else { - return AddRtpDataContentForOffer(media_description_options, session_options, - current_content, current_description, - rtp_data_codecs, current_streams, desc, - ice_credentials); - } -} - bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, @@ -2532,6 +2453,10 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( } } } + if (!session_options.vad_enabled) { + // If application doesn't want CN codecs in answer. + StripCNCodecs(&filtered_codecs); + } bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) && session_options.bundle_enabled; @@ -2702,7 +2627,6 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( const ContentInfo* current_content, const SessionDescription* current_description, const TransportInfo* bundle_transport, - const RtpDataCodecs& rtp_data_codecs, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { @@ -2750,32 +2674,13 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( bool offer_uses_sctpmap = offer_data_description->use_sctpmap(); data_answer->as_sctp()->set_use_sctpmap(offer_uses_sctpmap); } else { - // RTP offer - data_answer = std::make_unique(); - - const RtpDataContentDescription* offer_data_description = - offer_content->media_description()->as_rtp_data(); - RTC_CHECK(offer_data_description); - if (!SetCodecsInAnswer(offer_data_description, rtp_data_codecs, - media_description_options, session_options, - ssrc_generator_, current_streams, - data_answer->as_rtp_data())) { - return false; - } - if (!CreateMediaContentAnswer( - offer_data_description, media_description_options, session_options, - sdes_policy, GetCryptos(current_content), RtpHeaderExtensions(), - ssrc_generator_, enable_encrypted_rtp_header_extensions_, - current_streams, bundle_enabled, data_answer.get())) { - return false; // Fails the session setup. - } + RTC_NOTREACHED() << "Non-SCTP data content found"; } bool secure = bundle_transport ? bundle_transport->description.secure() : data_transport->secure(); - bool rejected = session_options.data_channel_type == DCT_NONE || - media_description_options.stopped || + bool rejected = media_description_options.stopped || offer_content->rejected || !IsMediaProtocolSupported(MEDIA_TYPE_DATA, data_answer->protocol(), secure); @@ -2784,13 +2689,6 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( return false; } - if (!rejected && session_options.data_channel_type == DCT_RTP) { - data_answer->set_bandwidth(kRtpDataMaxBandwidth); - } else { - // RFC 3264 - // The answer MUST contain the same number of m-lines as the offer. - RTC_LOG(LS_INFO) << "Data is not supported in the answer."; - } answer->AddContent(media_description_options.mid, offer_content->type, rejected, std::move(data_answer)); return true; @@ -2975,12 +2873,6 @@ const VideoContentDescription* GetFirstVideoContentDescription( return desc ? desc->as_video() : nullptr; } -const RtpDataContentDescription* GetFirstRtpDataContentDescription( - const SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA); - return desc ? desc->as_rtp_data() : nullptr; -} - const SctpDataContentDescription* GetFirstSctpDataContentDescription( const SessionDescription* sdesc) { auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA); @@ -3053,12 +2945,6 @@ VideoContentDescription* GetFirstVideoContentDescription( return desc ? desc->as_video() : nullptr; } -RtpDataContentDescription* GetFirstRtpDataContentDescription( - SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA); - return desc ? desc->as_rtp_data() : nullptr; -} - SctpDataContentDescription* GetFirstSctpDataContentDescription( SessionDescription* sdesc) { auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA); diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.h b/TMessagesProj/jni/voip/webrtc/pc/media_session.h index 58a31a2ab..d4c8025bc 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.h @@ -18,14 +18,21 @@ #include #include +#include "api/crypto/crypto_options.h" #include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" #include "media/base/media_constants.h" -#include "media/base/media_engine.h" // For DataChannelType +#include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "p2p/base/ice_credentials_iterator.h" +#include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" +#include "p2p/base/transport_info.h" #include "pc/jsep_transport.h" #include "pc/media_protocol_names.h" #include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "rtc_base/unique_id_generator.h" namespace cricket { @@ -65,10 +72,6 @@ struct MediaDescriptionOptions { const SimulcastLayerList& simulcast_layers, int num_sim_layers); - // Internally just uses sender_options. - void AddRtpDataChannel(const std::string& track_id, - const std::string& stream_id); - MediaType type; std::string mid; webrtc::RtpTransceiverDirection direction; @@ -102,7 +105,6 @@ struct MediaSessionOptions { bool HasMediaDescription(MediaType type) const; - DataChannelType data_channel_type = DCT_NONE; bool vad_enabled = true; // When disabled, removes all CN codecs from SDP. bool rtcp_mux_enabled = true; bool bundle_enabled = false; @@ -154,10 +156,6 @@ class MediaSessionDescriptionFactory { const VideoCodecs& recv_codecs); RtpHeaderExtensions filtered_rtp_header_extensions( RtpHeaderExtensions extensions) const; - const RtpDataCodecs& rtp_data_codecs() const { return rtp_data_codecs_; } - void set_rtp_data_codecs(const RtpDataCodecs& codecs) { - rtp_data_codecs_ = codecs; - } SecurePolicy secure() const { return secure_; } void set_secure(SecurePolicy s) { secure_ = s; } @@ -196,14 +194,12 @@ class MediaSessionDescriptionFactory { void GetCodecsForOffer( const std::vector& current_active_contents, AudioCodecs* audio_codecs, - VideoCodecs* video_codecs, - RtpDataCodecs* rtp_data_codecs) const; + VideoCodecs* video_codecs) const; void GetCodecsForAnswer( const std::vector& current_active_contents, const SessionDescription& remote_offer, AudioCodecs* audio_codecs, - VideoCodecs* video_codecs, - RtpDataCodecs* rtp_data_codecs) const; + VideoCodecs* video_codecs) const; AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds( const std::vector& current_active_contents, bool extmap_allow_mixed, @@ -253,32 +249,11 @@ class MediaSessionDescriptionFactory { SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; - bool AddSctpDataContentForOffer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* current_content, - const SessionDescription* current_description, - StreamParamsVec* current_streams, - SessionDescription* desc, - IceCredentialsIterator* ice_credentials) const; - bool AddRtpDataContentForOffer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* current_content, - const SessionDescription* current_description, - const RtpDataCodecs& rtp_data_codecs, - StreamParamsVec* current_streams, - SessionDescription* desc, - IceCredentialsIterator* ice_credentials) const; - // This function calls either AddRtpDataContentForOffer or - // AddSctpDataContentForOffer depending on protocol. - // The codecs argument is ignored for SCTP. bool AddDataContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, const SessionDescription* current_description, - const RtpDataCodecs& rtp_data_codecs, StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; @@ -327,7 +302,6 @@ class MediaSessionDescriptionFactory { const ContentInfo* current_content, const SessionDescription* current_description, const TransportInfo* bundle_transport, - const RtpDataCodecs& rtp_data_codecs, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; @@ -360,7 +334,6 @@ class MediaSessionDescriptionFactory { VideoCodecs video_sendrecv_codecs_; // Union of send and recv. VideoCodecs all_video_codecs_; - RtpDataCodecs rtp_data_codecs_; // This object is not owned by the channel so it must outlive it. rtc::UniqueRandomIdGenerator* const ssrc_generator_; bool enable_encrypted_rtp_header_extensions_ = false; @@ -390,8 +363,6 @@ const AudioContentDescription* GetFirstAudioContentDescription( const SessionDescription* sdesc); const VideoContentDescription* GetFirstVideoContentDescription( const SessionDescription* sdesc); -const RtpDataContentDescription* GetFirstRtpDataContentDescription( - const SessionDescription* sdesc); const SctpDataContentDescription* GetFirstSctpDataContentDescription( const SessionDescription* sdesc); // Non-const versions of the above functions. @@ -409,8 +380,6 @@ AudioContentDescription* GetFirstAudioContentDescription( SessionDescription* sdesc); VideoContentDescription* GetFirstVideoContentDescription( SessionDescription* sdesc); -RtpDataContentDescription* GetFirstRtpDataContentDescription( - SessionDescription* sdesc); SctpDataContentDescription* GetFirstSctpDataContentDescription( SessionDescription* sdesc); diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc b/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc index 00f491b3c..08a2a723d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc @@ -31,9 +31,7 @@ static typename V::iterator FindTrack(V* vector, const std::string& track_id) { } rtc::scoped_refptr MediaStream::Create(const std::string& id) { - rtc::RefCountedObject* stream = - new rtc::RefCountedObject(id); - return stream; + return rtc::make_ref_counted(id); } MediaStream::MediaStream(const std::string& id) : id_(id) {} diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc index dd8f7d0ff..e2b3b6105 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc @@ -12,6 +12,7 @@ #include #include + #include #include #include @@ -33,26 +34,27 @@ #include "media/base/rid_description.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/connection.h" #include "p2p/base/connection_info.h" #include "p2p/base/dtls_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/transport_info.h" +#include "pc/channel.h" #include "pc/ice_server_parsing.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/sctp_transport.h" #include "pc/simulcast_description.h" #include "pc/webrtc_session_description_factory.h" -#include "rtc_base/bind.h" #include "rtc_base/helpers.h" #include "rtc_base/ip_address.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" #include "rtc_base/network_constants.h" -#include "rtc_base/callback_list.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" #include "rtc_base/task_utils/to_queued_task.h" @@ -88,7 +90,6 @@ const char kSimulcastNumberOfEncodings[] = static const int REPORT_USAGE_PATTERN_DELAY_MS = 60000; - uint32_t ConvertIceTransportTypeToCandidateFilter( PeerConnectionInterface::IceTransportsType type) { switch (type) { @@ -179,7 +180,6 @@ IceCandidatePairType GetIceCandidatePairCounter( return kIceCandidatePairMax; } - absl::optional RTCConfigurationToIceConfigOptionalInt( int rtc_configuration_parameter) { if (rtc_configuration_parameter == @@ -247,6 +247,8 @@ cricket::IceConfig ParseIceConfig( ice_config.ice_inactive_timeout = config.ice_inactive_timeout; ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval; ice_config.network_preference = config.network_preference; + ice_config.stable_writable_connection_ping_interval = + config.stable_writable_connection_ping_interval_ms; return ice_config; } @@ -264,6 +266,20 @@ bool HasRtcpMuxEnabled(const cricket::ContentInfo* content) { return content->media_description()->rtcp_mux(); } +bool DtlsEnabled(const PeerConnectionInterface::RTCConfiguration& configuration, + const PeerConnectionFactoryInterface::Options& options, + const PeerConnectionDependencies& dependencies) { + if (options.disable_encryption) + return false; + + // Enable DTLS by default if we have an identity store or a certificate. + bool default_enabled = + (dependencies.cert_generator || !configuration.certificates.empty()); + + // The |configuration| can override the default value. + return configuration.enable_dtls_srtp.value_or(default_enabled); +} + } // namespace bool PeerConnectionInterface::RTCConfiguration::operator==( @@ -319,6 +335,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( bool enable_implicit_rollback; absl::optional allow_codec_switching; absl::optional report_usage_pattern_delay_ms; + absl::optional stable_writable_connection_ping_interval_ms; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -347,7 +364,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( disable_ipv6_on_wifi == o.disable_ipv6_on_wifi && max_ipv6_networks == o.max_ipv6_networks && disable_link_local_networks == o.disable_link_local_networks && - enable_rtp_data_channel == o.enable_rtp_data_channel && screencast_min_bitrate == o.screencast_min_bitrate && combined_audio_video_bwe == o.combined_audio_video_bwe && enable_dtls_srtp == o.enable_dtls_srtp && @@ -379,7 +395,9 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( turn_logging_id == o.turn_logging_id && enable_implicit_rollback == o.enable_implicit_rollback && allow_codec_switching == o.allow_codec_switching && - report_usage_pattern_delay_ms == o.report_usage_pattern_delay_ms; + report_usage_pattern_delay_ms == o.report_usage_pattern_delay_ms && + stable_writable_connection_ping_interval_ms == + o.stable_writable_connection_ping_interval_ms; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -387,7 +405,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator!=( return !(*this == o); } -rtc::scoped_refptr PeerConnection::Create( +RTCErrorOr> PeerConnection::Create( rtc::scoped_refptr context, const PeerConnectionFactoryInterface::Options& options, std::unique_ptr event_log, @@ -397,33 +415,63 @@ rtc::scoped_refptr PeerConnection::Create( RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig( ParseIceConfig(configuration)); if (!config_error.ok()) { - RTC_LOG(LS_ERROR) << "Invalid configuration: " << config_error.message(); - return nullptr; + RTC_LOG(LS_ERROR) << "Invalid ICE configuration: " + << config_error.message(); + return config_error; } if (!dependencies.allocator) { RTC_LOG(LS_ERROR) << "PeerConnection initialized without a PortAllocator? " "This shouldn't happen if using PeerConnectionFactory."; - return nullptr; + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "Attempt to create a PeerConnection without a PortAllocatorFactory"); } if (!dependencies.observer) { // TODO(deadbeef): Why do we do this? RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " "PeerConnectionObserver"; - return nullptr; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Attempt to create a PeerConnection without an observer"); } bool is_unified_plan = configuration.sdp_semantics == SdpSemantics::kUnifiedPlan; + bool dtls_enabled = DtlsEnabled(configuration, options, dependencies); + + // Interim code: If an AsyncResolverFactory is given, but not an + // AsyncDnsResolverFactory, wrap it in a WrappingAsyncDnsResolverFactory + // If neither is given, create a WrappingAsyncDnsResolverFactory wrapping + // a BasicAsyncResolver. + // TODO(bugs.webrtc.org/12598): Remove code once all callers pass a + // AsyncDnsResolverFactory. + if (dependencies.async_dns_resolver_factory && + dependencies.async_resolver_factory) { + RTC_LOG(LS_ERROR) + << "Attempt to set both old and new type of DNS resolver factory"; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Both old and new type of DNS resolver given"); + } + if (dependencies.async_resolver_factory) { + dependencies.async_dns_resolver_factory = + std::make_unique( + std::move(dependencies.async_resolver_factory)); + } else { + dependencies.async_dns_resolver_factory = + std::make_unique( + std::make_unique()); + } + // The PeerConnection constructor consumes some, but not all, dependencies. - rtc::scoped_refptr pc( - new rtc::RefCountedObject( - context, options, is_unified_plan, std::move(event_log), - std::move(call), dependencies)); - if (!pc->Initialize(configuration, std::move(dependencies))) { - return nullptr; + auto pc = rtc::make_ref_counted( + context, options, is_unified_plan, std::move(event_log), std::move(call), + dependencies, dtls_enabled); + RTCError init_error = pc->Initialize(configuration, std::move(dependencies)); + if (!init_error.ok()) { + RTC_LOG(LS_ERROR) << "PeerConnection initialization failed"; + return init_error; } return pc; } @@ -434,21 +482,37 @@ PeerConnection::PeerConnection( bool is_unified_plan, std::unique_ptr event_log, std::unique_ptr call, - PeerConnectionDependencies& dependencies) + PeerConnectionDependencies& dependencies, + bool dtls_enabled) : context_(context), options_(options), observer_(dependencies.observer), is_unified_plan_(is_unified_plan), event_log_(std::move(event_log)), event_log_ptr_(event_log_.get()), - async_resolver_factory_(std::move(dependencies.async_resolver_factory)), + async_dns_resolver_factory_( + std::move(dependencies.async_dns_resolver_factory)), port_allocator_(std::move(dependencies.allocator)), ice_transport_factory_(std::move(dependencies.ice_transport_factory)), tls_cert_verifier_(std::move(dependencies.tls_cert_verifier)), call_(std::move(call)), call_ptr_(call_.get()), + // RFC 3264: The numeric value of the session id and version in the + // o line MUST be representable with a "64 bit signed integer". + // Due to this constraint session id |session_id_| is max limited to + // LLONG_MAX. + session_id_(rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX)), + dtls_enabled_(dtls_enabled), data_channel_controller_(this), - message_handler_(signaling_thread()) {} + message_handler_(signaling_thread()), + weak_factory_(this) { + worker_thread()->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(worker_thread()); + worker_thread_safety_ = PendingTaskSafetyFlag::Create(); + if (!call_) + worker_thread_safety_->SetNotAlive(); + }); +} PeerConnection::~PeerConnection() { TRACE_EVENT0("webrtc", "PeerConnection::~PeerConnection"); @@ -482,24 +546,29 @@ PeerConnection::~PeerConnection() { sdp_handler_->ResetSessionDescFactory(); } - transport_controller_.reset(); - // port_allocator_ lives on the network thread and should be destroyed there. + // port_allocator_ and transport_controller_ live on the network thread and + // should be destroyed there. network_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(network_thread()); + TeardownDataChannelTransport_n(); + transport_controller_.reset(); port_allocator_.reset(); + if (network_thread_safety_) + network_thread_safety_->SetNotAlive(); }); + // call_ and event_log_ must be destroyed on the worker thread. worker_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(worker_thread()); - call_safety_.reset(); + worker_thread_safety_->SetNotAlive(); call_.reset(); // The event log must outlive call (and any other object that uses it). event_log_.reset(); }); } -bool PeerConnection::Initialize( +RTCError PeerConnection::Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -511,7 +580,7 @@ bool PeerConnection::Initialize( RTCErrorType parse_error = ParseIceServers(configuration.servers, &stun_servers, &turn_servers); if (parse_error != RTCErrorType::NONE) { - return false; + return RTCError(parse_error, "ICE server parse failed"); } // Add the turn logging id to all turn servers @@ -519,14 +588,6 @@ bool PeerConnection::Initialize( turn_server.turn_logging_id = configuration.turn_logging_id; } - // The port allocator lives on the network thread and should be initialized - // there. - const auto pa_result = - network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::InitializePortAllocator_n, this, - stun_servers, turn_servers, configuration)); - // Note if STUN or TURN servers were supplied. if (!stun_servers.empty()) { NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); @@ -535,21 +596,65 @@ bool PeerConnection::Initialize( NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); } - // Send information about IPv4/IPv6 status. - PeerConnectionAddressFamilyCounter address_family; - if (pa_result.enable_ipv6) { - address_family = kPeerConnection_IPv6; - } else { - address_family = kPeerConnection_IPv4; - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, - kPeerConnectionAddressFamilyCounter_Max); + // Network thread initialization. + network_thread()->Invoke(RTC_FROM_HERE, [this, &stun_servers, + &turn_servers, &configuration, + &dependencies] { + RTC_DCHECK_RUN_ON(network_thread()); + network_thread_safety_ = PendingTaskSafetyFlag::Create(); + InitializePortAllocatorResult pa_result = + InitializePortAllocator_n(stun_servers, turn_servers, configuration); + // Send information about IPv4/IPv6 status. + PeerConnectionAddressFamilyCounter address_family = + pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4; + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, + kPeerConnectionAddressFamilyCounter_Max); + InitializeTransportController_n(configuration, dependencies); + }); - // RFC 3264: The numeric value of the session id and version in the - // o line MUST be representable with a "64 bit signed integer". - // Due to this constraint session id |session_id_| is max limited to - // LLONG_MAX. - session_id_ = rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX); + configuration_ = configuration; + + stats_ = std::make_unique(this); + stats_collector_ = RTCStatsCollector::Create(this); + + sdp_handler_ = + SdpOfferAnswerHandler::Create(this, configuration, dependencies); + + rtp_manager_ = std::make_unique( + IsUnifiedPlan(), signaling_thread(), worker_thread(), channel_manager(), + &usage_pattern_, observer_, stats_.get(), [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sdp_handler_->UpdateNegotiationNeeded(); + }); + + // Add default audio/video transceivers for Plan B SDP. + if (!IsUnifiedPlan()) { + rtp_manager()->transceivers()->Add( + RtpTransceiverProxyWithInternal::Create( + signaling_thread(), + new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO, channel_manager()))); + rtp_manager()->transceivers()->Add( + RtpTransceiverProxyWithInternal::Create( + signaling_thread(), + new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO, channel_manager()))); + } + + int delay_ms = configuration.report_usage_pattern_delay_ms + ? *configuration.report_usage_pattern_delay_ms + : REPORT_USAGE_PATTERN_DELAY_MS; + message_handler_.RequestUsagePatternReport( + [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ReportUsagePattern(); + }, + delay_ms); + + return RTCError::OK(); +} + +void PeerConnection::InitializeTransportController_n( + const RTCConfiguration& configuration, + const PeerConnectionDependencies& dependencies) { JsepTransportController::Config config; config.redetermine_role_on_ice_restart = configuration.redetermine_role_on_ice_restart; @@ -570,101 +675,102 @@ bool PeerConnection::Initialize( #endif config.active_reset_srtp_params = configuration.active_reset_srtp_params; - if (options_.disable_encryption) { - dtls_enabled_ = false; - } else { - // Enable DTLS by default if we have an identity store or a certificate. - dtls_enabled_ = - (dependencies.cert_generator || !configuration.certificates.empty()); - // |configuration| can override the default |dtls_enabled_| value. - if (configuration.enable_dtls_srtp) { - dtls_enabled_ = *(configuration.enable_dtls_srtp); - } - } - - if (configuration.enable_rtp_data_channel) { - // Enable creation of RTP data channels if the kEnableRtpDataChannels is - // set. It takes precendence over the disable_sctp_data_channels - // PeerConnectionFactoryInterface::Options. - data_channel_controller_.set_data_channel_type(cricket::DCT_RTP); - } else { - // DTLS has to be enabled to use SCTP. - if (!options_.disable_sctp_data_channels && dtls_enabled_) { - data_channel_controller_.set_data_channel_type(cricket::DCT_SCTP); - config.sctp_factory = context_->sctp_transport_factory(); - } + // DTLS has to be enabled to use SCTP. + if (dtls_enabled_) { + config.sctp_factory = context_->sctp_transport_factory(); } config.ice_transport_factory = ice_transport_factory_.get(); + config.on_dtls_handshake_error_ = + [weak_ptr = weak_factory_.GetWeakPtr()](rtc::SSLHandshakeError s) { + if (weak_ptr) { + weak_ptr->OnTransportControllerDtlsHandshakeError(s); + } + }; - transport_controller_.reset(new JsepTransportController( - signaling_thread(), network_thread(), port_allocator_.get(), - async_resolver_factory_.get(), config)); - transport_controller_->SignalStandardizedIceConnectionState.connect( - this, &PeerConnection::SetStandardizedIceConnectionState); - transport_controller_->SignalConnectionState.connect( - this, &PeerConnection::SetConnectionState); - transport_controller_->SignalIceGatheringState.connect( - this, &PeerConnection::OnTransportControllerGatheringState); - transport_controller_->SignalIceCandidatesGathered.connect( - this, &PeerConnection::OnTransportControllerCandidatesGathered); - transport_controller_->SignalIceCandidateError.connect( - this, &PeerConnection::OnTransportControllerCandidateError); - transport_controller_->SignalIceCandidatesRemoved.connect( - this, &PeerConnection::OnTransportControllerCandidatesRemoved); - transport_controller_->SignalDtlsHandshakeError.connect( - this, &PeerConnection::OnTransportControllerDtlsHandshakeError); - transport_controller_->SignalIceCandidatePairChanged.connect( - this, &PeerConnection::OnTransportControllerCandidateChanged); - transport_controller_->SignalErrorDemuxingPacket.connect( - this, &PeerConnection::OnErrorDemuxingPacket); + transport_controller_.reset( + new JsepTransportController(network_thread(), port_allocator_.get(), + async_dns_resolver_factory_.get(), config)); - transport_controller_->SignalIceConnectionState.AddReceiver( + transport_controller_->SubscribeIceConnectionState( [this](cricket::IceConnectionState s) { - RTC_DCHECK_RUN_ON(signaling_thread()); - OnTransportControllerConnectionState(s); + RTC_DCHECK_RUN_ON(network_thread()); + if (s == cricket::kIceConnectionConnected) { + ReportTransportStats(); + } + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerConnectionState(s); + })); + }); + transport_controller_->SubscribeConnectionState( + [this](PeerConnectionInterface::PeerConnectionState s) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + SetConnectionState(s); + })); + }); + transport_controller_->SubscribeStandardizedIceConnectionState( + [this](PeerConnectionInterface::IceConnectionState s) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + SetStandardizedIceConnectionState(s); + })); + }); + transport_controller_->SubscribeIceGatheringState( + [this](cricket::IceGatheringState s) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), [this, s]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerGatheringState(s); + })); + }); + transport_controller_->SubscribeIceCandidateGathered( + [this](const std::string& transport, + const std::vector& candidates) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), + [this, t = transport, c = candidates]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerCandidatesGathered(t, c); + })); + }); + transport_controller_->SubscribeIceCandidateError( + [this](const cricket::IceCandidateErrorEvent& event) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask(ToQueuedTask( + signaling_thread_safety_.flag(), [this, event = event]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerCandidateError(event); + })); + }); + transport_controller_->SubscribeIceCandidatesRemoved( + [this](const std::vector& c) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), [this, c = c]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerCandidatesRemoved(c); + })); + }); + transport_controller_->SubscribeIceCandidatePairChanged( + [this](const cricket::CandidatePairChangeEvent& event) { + RTC_DCHECK_RUN_ON(network_thread()); + signaling_thread()->PostTask(ToQueuedTask( + signaling_thread_safety_.flag(), [this, event = event]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + OnTransportControllerCandidateChanged(event); + })); }); - - configuration_ = configuration; transport_controller_->SetIceConfig(ParseIceConfig(configuration)); - - stats_ = std::make_unique(this); - stats_collector_ = RTCStatsCollector::Create(this); - - demuxing_observer_ = new rtc::RefCountedObject(observer_); - - sdp_handler_ = - SdpOfferAnswerHandler::Create(this, configuration, dependencies); - - rtp_manager_ = std::make_unique( - IsUnifiedPlan(), signaling_thread(), worker_thread(), channel_manager(), - &usage_pattern_, observer_, stats_.get(), [this]() { - RTC_DCHECK_RUN_ON(signaling_thread()); - sdp_handler_->UpdateNegotiationNeeded(); - }); - - // Add default audio/video transceivers for Plan B SDP. - if (!IsUnifiedPlan()) { - rtp_manager()->transceivers()->Add( - RtpTransceiverProxyWithInternal::Create( - signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO))); - rtp_manager()->transceivers()->Add( - RtpTransceiverProxyWithInternal::Create( - signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO))); - } - - int delay_ms = configuration.report_usage_pattern_delay_ms - ? *configuration.report_usage_pattern_delay_ms - : REPORT_USAGE_PATTERN_DELAY_MS; - message_handler_.RequestUsagePatternReport( - [this]() { - RTC_DCHECK_RUN_ON(signaling_thread()); - ReportUsagePattern(); - }, - delay_ms); - - return true; } rtc::scoped_refptr PeerConnection::local_streams() { @@ -790,6 +896,16 @@ PeerConnection::AddTransceiver( return AddTransceiver(track, RtpTransceiverInit()); } +RtpTransportInternal* PeerConnection::GetRtpTransport(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + return network_thread()->Invoke( + RTC_FROM_HERE, [this, &mid] { + auto rtp_transport = transport_controller_->GetRtpTransport(mid); + RTC_DCHECK(rtp_transport); + return rtp_transport; + }); +} + RTCErrorOr> PeerConnection::AddTransceiver( rtc::scoped_refptr track, @@ -881,9 +997,11 @@ PeerConnection::AddTransceiver( parameters.encodings = init.send_encodings; // Encodings are dropped from the tail if too many are provided. - if (parameters.encodings.size() > kMaxSimulcastStreams) { + size_t max_simulcast_streams = + media_type == cricket::MEDIA_TYPE_VIDEO ? kMaxSimulcastStreams : 1u; + if (parameters.encodings.size() > max_simulcast_streams) { parameters.encodings.erase( - parameters.encodings.begin() + kMaxSimulcastStreams, + parameters.encodings.begin() + max_simulcast_streams, parameters.encodings.end()); } @@ -1031,6 +1149,8 @@ bool PeerConnection::GetStats(StatsObserver* observer, return false; } + RTC_LOG_THREAD_BLOCK_COUNT(); + stats_->UpdateStats(level); // The StatsCollector is used to tell if a track is valid because it may // remember tracks that the PeerConnection previously removed. @@ -1040,6 +1160,7 @@ bool PeerConnection::GetStats(StatsObserver* observer, return false; } message_handler_.PostGetStats(observer, stats_.get(), track); + return true; } @@ -1048,6 +1169,7 @@ void PeerConnection::GetStats(RTCStatsCollectorCallback* callback) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(stats_collector_); RTC_DCHECK(callback); + RTC_LOG_THREAD_BLOCK_COUNT(); stats_collector_->GetStatsReport(callback); } @@ -1176,9 +1298,9 @@ rtc::scoped_refptr PeerConnection::CreateDataChannel( return nullptr; } - // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or + // Trigger the onRenegotiationNeeded event for // the first SCTP DataChannel. - if (data_channel_type() == cricket::DCT_RTP || first_datachannel) { + if (first_datachannel) { sdp_handler_->UpdateNegotiationNeeded(); } NoteUsageEvent(UsageEvent::DATA_ADDED); @@ -1303,6 +1425,8 @@ RTCError PeerConnection::SetConfiguration( configuration.active_reset_srtp_params; modified_config.turn_logging_id = configuration.turn_logging_id; modified_config.allow_codec_switching = configuration.allow_codec_switching; + modified_config.stable_writable_connection_ping_interval_ms = + configuration.stable_writable_connection_ping_interval_ms; if (configuration != modified_config) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, "Modifying the configuration in an unsupported way."); @@ -1342,36 +1466,46 @@ RTCError PeerConnection::SetConfiguration( NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); } - // In theory this shouldn't fail. - if (!network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::ReconfigurePortAllocator_n, this, - stun_servers, turn_servers, modified_config.type, - modified_config.ice_candidate_pool_size, - modified_config.GetTurnPortPrunePolicy(), - modified_config.turn_customizer, - modified_config.stun_candidate_keepalive_interval, - static_cast(local_description())))) { - LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, - "Failed to apply configuration to PortAllocator."); - } + const bool has_local_description = local_description() != nullptr; - // As described in JSEP, calling setConfiguration with new ICE servers or - // candidate policy must set a "needs-ice-restart" bit so that the next offer - // triggers an ICE restart which will pick up the changes. - if (modified_config.servers != configuration_.servers || + const bool needs_ice_restart = + modified_config.servers != configuration_.servers || NeedIceRestart( configuration_.surface_ice_candidates_on_ice_transport_type_changed, configuration_.type, modified_config.type) || modified_config.GetTurnPortPrunePolicy() != - configuration_.GetTurnPortPrunePolicy()) { - transport_controller_->SetNeedsIceRestartFlag(); - } + configuration_.GetTurnPortPrunePolicy(); + cricket::IceConfig ice_config = ParseIceConfig(modified_config); - transport_controller_->SetIceConfig(ParseIceConfig(modified_config)); + // Apply part of the configuration on the network thread. In theory this + // shouldn't fail. + if (!network_thread()->Invoke( + RTC_FROM_HERE, + [this, needs_ice_restart, &ice_config, &stun_servers, &turn_servers, + &modified_config, has_local_description] { + // As described in JSEP, calling setConfiguration with new ICE + // servers or candidate policy must set a "needs-ice-restart" bit so + // that the next offer triggers an ICE restart which will pick up + // the changes. + if (needs_ice_restart) + transport_controller_->SetNeedsIceRestartFlag(); + + transport_controller_->SetIceConfig(ice_config); + return ReconfigurePortAllocator_n( + stun_servers, turn_servers, modified_config.type, + modified_config.ice_candidate_pool_size, + modified_config.GetTurnPortPrunePolicy(), + modified_config.turn_customizer, + modified_config.stun_candidate_keepalive_interval, + has_local_description); + })) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to apply configuration to PortAllocator."); + } if (configuration_.active_reset_srtp_params != modified_config.active_reset_srtp_params) { + // TODO(tommi): move to the network thread - this hides an invoke. transport_controller_->SetActiveResetSrtpParams( modified_config.active_reset_srtp_params); } @@ -1466,8 +1600,7 @@ RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { void PeerConnection::SetAudioPlayout(bool playout) { if (!worker_thread()->IsCurrent()) { worker_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetAudioPlayout, this, playout)); + RTC_FROM_HERE, [this, playout] { SetAudioPlayout(playout); }); return; } auto audio_state = @@ -1478,8 +1611,7 @@ void PeerConnection::SetAudioPlayout(bool playout) { void PeerConnection::SetAudioRecording(bool recording) { if (!worker_thread()->IsCurrent()) { worker_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetAudioRecording, this, recording)); + RTC_FROM_HERE, [this, recording] { SetAudioRecording(recording); }); return; } auto audio_state = @@ -1522,13 +1654,12 @@ bool PeerConnection::StartRtcEventLog( } void PeerConnection::StopRtcEventLog() { - worker_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&PeerConnection::StopRtcEventLog_w, this)); + worker_thread()->Invoke(RTC_FROM_HERE, [this] { StopRtcEventLog_w(); }); } rtc::scoped_refptr PeerConnection::LookupDtlsTransportByMid(const std::string& mid) { - RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_->LookupDtlsTransportByMid(mid); } @@ -1540,11 +1671,11 @@ PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { rtc::scoped_refptr PeerConnection::GetSctpTransport() const { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (!sctp_mid_s_) { + RTC_DCHECK_RUN_ON(network_thread()); + if (!sctp_mid_n_) return nullptr; - } - return transport_controller_->GetSctpTransport(*sctp_mid_s_); + + return transport_controller_->GetSctpTransport(*sctp_mid_n_); } const SessionDescriptionInterface* PeerConnection::local_description() const { @@ -1585,6 +1716,8 @@ void PeerConnection::Close() { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "PeerConnection::Close"); + RTC_LOG_THREAD_BLOCK_COUNT(); + if (IsClosed()) { return; } @@ -1625,16 +1758,24 @@ void PeerConnection::Close() { // WebRTC session description factory, the session description factory would // call the transport controller. sdp_handler_->ResetSessionDescFactory(); - transport_controller_.reset(); rtp_manager_->Close(); - network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator_.get())); + network_thread()->Invoke(RTC_FROM_HERE, [this] { + // Data channels will already have been unset via the DestroyAllChannels() + // call above, which triggers a call to TeardownDataChannelTransport_n(). + // TODO(tommi): ^^ That's not exactly optimal since this is yet another + // blocking hop to the network thread during Close(). Further still, the + // voice/video/data channels will be cleared on the worker thread. + transport_controller_.reset(); + port_allocator_->DiscardCandidatePool(); + if (network_thread_safety_) { + network_thread_safety_->SetNotAlive(); + } + }); worker_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(worker_thread()); - call_safety_.reset(); + worker_thread_safety_->SetNotAlive(); call_.reset(); // The event log must outlive call (and any other object that uses it). event_log_.reset(); @@ -1643,6 +1784,10 @@ void PeerConnection::Close() { // The .h file says that observer can be discarded after close() returns. // Make sure this is true. observer_ = nullptr; + + // Signal shutdown to the sdp handler. This invalidates weak pointers for + // internal pending callbacks. + sdp_handler_->PrepareForShutdown(); } void PeerConnection::SetIceConnectionState(IceConnectionState new_state) { @@ -1691,6 +1836,52 @@ void PeerConnection::SetConnectionState( return; connection_state_ = new_state; Observer()->OnConnectionChange(new_state); + + if (new_state == PeerConnectionState::kConnected && !was_ever_connected_) { + was_ever_connected_ = true; + + // The first connection state change to connected happens once per + // connection which makes it a good point to report metrics. + // Record bundle-policy from configuration. Done here from + // connectionStateChange to limit to actually established connections. + BundlePolicyUsage policy = kBundlePolicyUsageMax; + switch (configuration_.bundle_policy) { + case kBundlePolicyBalanced: + policy = kBundlePolicyUsageBalanced; + break; + case kBundlePolicyMaxBundle: + policy = kBundlePolicyUsageMaxBundle; + break; + case kBundlePolicyMaxCompat: + policy = kBundlePolicyUsageMaxCompat; + break; + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundlePolicy", policy, + kBundlePolicyUsageMax); + + // Record configured ice candidate pool size depending on the + // BUNDLE policy. See + // https://w3c.github.io/webrtc-pc/#dom-rtcconfiguration-icecandidatepoolsize + // The ICE candidate pool size is an optimization and it may be desirable + // to restrict the maximum size of the pre-gathered candidates. + switch (configuration_.bundle_policy) { + case kBundlePolicyBalanced: + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.PeerConnection.CandidatePoolUsage.Balanced", + configuration_.ice_candidate_pool_size, 0, 255, 256); + break; + case kBundlePolicyMaxBundle: + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.PeerConnection.CandidatePoolUsage.MaxBundle", + configuration_.ice_candidate_pool_size, 0, 255, 256); + break; + case kBundlePolicyMaxCompat: + RTC_HISTOGRAM_COUNTS_LINEAR( + "WebRTC.PeerConnection.CandidatePoolUsage.MaxCompat", + configuration_.ice_candidate_pool_size, 0, 255, 256); + break; + } + } } void PeerConnection::OnIceGatheringChange( @@ -1750,17 +1941,18 @@ void PeerConnection::OnSelectedCandidatePairChanged( absl::optional PeerConnection::GetDataMid() const { RTC_DCHECK_RUN_ON(signaling_thread()); - switch (data_channel_type()) { - case cricket::DCT_RTP: - if (!data_channel_controller_.rtp_data_channel()) { - return absl::nullopt; - } - return data_channel_controller_.rtp_data_channel()->content_name(); - case cricket::DCT_SCTP: - return sctp_mid_s_; - default: - return absl::nullopt; - } + return sctp_mid_s_; +} + +void PeerConnection::SetSctpDataMid(const std::string& mid) { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_mid_s_ = mid; +} + +void PeerConnection::ResetSctpDataMid() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_mid_s_.reset(); + sctp_transport_name_s_.clear(); } void PeerConnection::OnSctpDataChannelClosed(DataChannelInterface* channel) { @@ -1895,16 +2087,12 @@ void PeerConnection::StopRtcEventLog_w() { cricket::ChannelInterface* PeerConnection::GetChannel( const std::string& content_name) { - for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (channel && channel->content_name() == content_name) { return channel; } } - if (rtp_data_channel() && - rtp_data_channel()->content_name() == content_name) { - return rtp_data_channel(); - } return nullptr; } @@ -1976,59 +2164,33 @@ std::vector PeerConnection::GetDataChannelStats() const { absl::optional PeerConnection::sctp_transport_name() const { RTC_DCHECK_RUN_ON(signaling_thread()); - if (sctp_mid_s_ && transport_controller_) { - auto dtls_transport = transport_controller_->GetDtlsTransport(*sctp_mid_s_); - if (dtls_transport) { - return dtls_transport->transport_name(); - } - return absl::optional(); - } + if (sctp_mid_s_ && transport_controller_) + return sctp_transport_name_s_; return absl::optional(); } -cricket::CandidateStatsList PeerConnection::GetPooledCandidateStats() const { - cricket::CandidateStatsList candidate_states_list; - network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetCandidateStatsFromPooledSessions, - port_allocator_.get(), &candidate_states_list)); - return candidate_states_list; +absl::optional PeerConnection::sctp_mid() const { + RTC_DCHECK_RUN_ON(signaling_thread()); + return sctp_mid_s_; } -std::map PeerConnection::GetTransportNamesByMid() - const { - RTC_DCHECK_RUN_ON(signaling_thread()); - std::map transport_names_by_mid; - for (const auto& transceiver : rtp_manager()->transceivers()->List()) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel) { - transport_names_by_mid[channel->content_name()] = - channel->transport_name(); - } - } - if (data_channel_controller_.rtp_data_channel()) { - transport_names_by_mid[data_channel_controller_.rtp_data_channel() - ->content_name()] = - data_channel_controller_.rtp_data_channel()->transport_name(); - } - if (data_channel_controller_.data_channel_transport()) { - absl::optional transport_name = sctp_transport_name(); - RTC_DCHECK(transport_name); - transport_names_by_mid[*sctp_mid_s_] = *transport_name; - } - return transport_names_by_mid; +cricket::CandidateStatsList PeerConnection::GetPooledCandidateStats() const { + RTC_DCHECK_RUN_ON(network_thread()); + if (!network_thread_safety_->alive()) + return {}; + cricket::CandidateStatsList candidate_states_list; + port_allocator_->GetCandidateStatsFromPooledSessions(&candidate_states_list); + return candidate_states_list; } std::map PeerConnection::GetTransportStatsByNames( const std::set& transport_names) { - if (!network_thread()->IsCurrent()) { - return network_thread() - ->Invoke>( - RTC_FROM_HERE, - [&] { return GetTransportStatsByNames(transport_names); }); - } RTC_DCHECK_RUN_ON(network_thread()); + if (!network_thread_safety_->alive()) + return {}; + + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map transport_stats_by_name; for (const std::string& transport_name : transport_names) { cricket::TransportStats transport_stats; @@ -2047,7 +2209,8 @@ PeerConnection::GetTransportStatsByNames( bool PeerConnection::GetLocalCertificate( const std::string& transport_name, rtc::scoped_refptr* certificate) { - if (!certificate) { + RTC_DCHECK_RUN_ON(network_thread()); + if (!network_thread_safety_->alive() || !certificate) { return false; } *certificate = transport_controller_->GetLocalCertificate(transport_name); @@ -2056,20 +2219,20 @@ bool PeerConnection::GetLocalCertificate( std::unique_ptr PeerConnection::GetRemoteSSLCertChain( const std::string& transport_name) { + RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_->GetRemoteSSLCertChain(transport_name); } -cricket::DataChannelType PeerConnection::data_channel_type() const { - return data_channel_controller_.data_channel_type(); -} - bool PeerConnection::IceRestartPending(const std::string& content_name) const { RTC_DCHECK_RUN_ON(signaling_thread()); return sdp_handler_->IceRestartPending(content_name); } bool PeerConnection::NeedsIceRestart(const std::string& content_name) const { - return transport_controller_->NeedsIceRestart(content_name); + return network_thread()->Invoke(RTC_FROM_HERE, [this, &content_name] { + RTC_DCHECK_RUN_ON(network_thread()); + return transport_controller_->NeedsIceRestart(content_name); + }); } void PeerConnection::OnTransportControllerConnectionState( @@ -2109,8 +2272,8 @@ void PeerConnection::OnTransportControllerConnectionState( SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected); } SetIceConnectionState(PeerConnectionInterface::kIceConnectionCompleted); + NoteUsageEvent(UsageEvent::ICE_STATE_CONNECTED); - ReportTransportStats(); break; default: RTC_NOTREACHED(); @@ -2120,6 +2283,8 @@ void PeerConnection::OnTransportControllerConnectionState( void PeerConnection::OnTransportControllerCandidatesGathered( const std::string& transport_name, const cricket::Candidates& candidates) { + // TODO(bugs.webrtc.org/12427): Expect this to come in on the network thread + // (not signaling as it currently does), handle appropriately. int sdp_mline_index; if (!GetLocalCandidateMediaIndex(transport_name, &sdp_mline_index)) { RTC_LOG(LS_ERROR) @@ -2164,10 +2329,6 @@ void PeerConnection::OnTransportControllerCandidateChanged( OnSelectedCandidatePairChanged(event); } -void PeerConnection::OnErrorDemuxingPacket(uint32_t ssrc) { - message_handler_.PostErrorDemuxingPacket(demuxing_observer_, ssrc); -} - void PeerConnection::OnTransportControllerDtlsHandshakeError( rtc::SSLHandshakeError error) { RTC_HISTOGRAM_ENUMERATION( @@ -2198,7 +2359,7 @@ bool PeerConnection::GetLocalCandidateMediaIndex( Call::Stats PeerConnection::GetCallStats() { if (!worker_thread()->IsCurrent()) { return worker_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&PeerConnection::GetCallStats, this)); + RTC_FROM_HERE, [this] { return GetCallStats(); }); } RTC_DCHECK_RUN_ON(worker_thread()); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; @@ -2223,6 +2384,16 @@ bool PeerConnection::SetupDataChannelTransport_n(const std::string& mid) { data_channel_controller_.set_data_channel_transport(transport); data_channel_controller_.SetupDataChannelTransport_n(); sctp_mid_n_ = mid; + cricket::DtlsTransportInternal* dtls_transport = + transport_controller_->GetDtlsTransport(mid); + if (dtls_transport) { + signaling_thread()->PostTask( + ToQueuedTask(signaling_thread_safety_.flag(), + [this, name = dtls_transport->transport_name()] { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_transport_name_s_ = std::move(name); + })); + } // Note: setting the data sink and checking initial state must be done last, // after setting up the data channel. Setting the data sink may trigger @@ -2233,34 +2404,32 @@ bool PeerConnection::SetupDataChannelTransport_n(const std::string& mid) { } void PeerConnection::TeardownDataChannelTransport_n() { - if (!sctp_mid_n_ && !data_channel_controller_.data_channel_transport()) { - return; + if (sctp_mid_n_) { + // |sctp_mid_| may still be active through an SCTP transport. If not, unset + // it. + RTC_LOG(LS_INFO) << "Tearing down data channel transport for mid=" + << *sctp_mid_n_; + sctp_mid_n_.reset(); } - RTC_LOG(LS_INFO) << "Tearing down data channel transport for mid=" - << *sctp_mid_n_; - // |sctp_mid_| may still be active through an SCTP transport. If not, unset - // it. - sctp_mid_n_.reset(); data_channel_controller_.TeardownDataChannelTransport_n(); } // Returns false if bundle is enabled and rtcp_mux is disabled. -bool PeerConnection::ValidateBundleSettings(const SessionDescription* desc) { - bool bundle_enabled = desc->HasGroup(cricket::GROUP_TYPE_BUNDLE); - if (!bundle_enabled) +bool PeerConnection::ValidateBundleSettings( + const SessionDescription* desc, + const std::map& + bundle_groups_by_mid) { + if (bundle_groups_by_mid.empty()) return true; - const cricket::ContentGroup* bundle_group = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - RTC_DCHECK(bundle_group != NULL); - const cricket::ContentInfos& contents = desc->contents(); for (cricket::ContentInfos::const_iterator citer = contents.begin(); citer != contents.end(); ++citer) { const cricket::ContentInfo* content = (&*citer); RTC_DCHECK(content != NULL); - if (bundle_group->HasContentName(content->name) && !content->rejected && + auto it = bundle_groups_by_mid.find(content->name); + if (it != bundle_groups_by_mid.end() && !content->rejected && content->type == MediaProtocolType::kRtp) { if (!HasRtcpMuxEnabled(content)) return false; @@ -2271,12 +2440,13 @@ bool PeerConnection::ValidateBundleSettings(const SessionDescription* desc) { } void PeerConnection::ReportSdpFormatReceived( - const SessionDescriptionInterface& remote_offer) { + const SessionDescriptionInterface& remote_description) { int num_audio_mlines = 0; int num_video_mlines = 0; int num_audio_tracks = 0; int num_video_tracks = 0; - for (const ContentInfo& content : remote_offer.description()->contents()) { + for (const ContentInfo& content : + remote_description.description()->contents()) { cricket::MediaType media_type = content.media_description()->type(); int num_tracks = std::max( 1, static_cast(content.media_description()->streams().size())); @@ -2296,7 +2466,7 @@ void PeerConnection::ReportSdpFormatReceived( } else if (num_audio_tracks > 0 || num_video_tracks > 0) { format = kSdpFormatReceivedSimple; } - switch (remote_offer.GetType()) { + switch (remote_description.GetType()) { case SdpType::kOffer: // Historically only offers were counted. RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SdpFormatReceived", @@ -2308,11 +2478,57 @@ void PeerConnection::ReportSdpFormatReceived( break; default: RTC_LOG(LS_ERROR) << "Can not report SdpFormatReceived for " - << SdpTypeToString(remote_offer.GetType()); + << SdpTypeToString(remote_description.GetType()); break; } } +void PeerConnection::ReportSdpBundleUsage( + const SessionDescriptionInterface& remote_description) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + bool using_bundle = + remote_description.description()->HasGroup(cricket::GROUP_TYPE_BUNDLE); + int num_audio_mlines = 0; + int num_video_mlines = 0; + int num_data_mlines = 0; + for (const ContentInfo& content : + remote_description.description()->contents()) { + cricket::MediaType media_type = content.media_description()->type(); + if (media_type == cricket::MEDIA_TYPE_AUDIO) { + num_audio_mlines += 1; + } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + num_video_mlines += 1; + } else if (media_type == cricket::MEDIA_TYPE_DATA) { + num_data_mlines += 1; + } + } + bool simple = num_audio_mlines <= 1 && num_video_mlines <= 1; + BundleUsage usage = kBundleUsageMax; + if (num_audio_mlines == 0 && num_video_mlines == 0) { + if (num_data_mlines > 0) { + usage = using_bundle ? kBundleUsageBundleDatachannelOnly + : kBundleUsageNoBundleDatachannelOnly; + } else { + usage = kBundleUsageEmpty; + } + } else if (configuration_.sdp_semantics == SdpSemantics::kPlanB) { + // In plan-b, simple/complex usage will not show up in the number of + // m-lines or BUNDLE. + usage = using_bundle ? kBundleUsageBundlePlanB : kBundleUsageNoBundlePlanB; + } else { + if (simple) { + usage = + using_bundle ? kBundleUsageBundleSimple : kBundleUsageNoBundleSimple; + } else { + usage = using_bundle ? kBundleUsageBundleComplex + : kBundleUsageNoBundleComplex; + } + } + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.BundleUsage", usage, + kBundleUsageMax); +} + void PeerConnection::ReportIceCandidateCollected( const cricket::Candidate& candidate) { NoteUsageEvent(UsageEvent::CANDIDATE_COLLECTED); @@ -2332,11 +2548,70 @@ void PeerConnection::NoteUsageEvent(UsageEvent event) { usage_pattern_.NoteUsageEvent(event); } +// Asynchronously adds remote candidates on the network thread. +void PeerConnection::AddRemoteCandidate(const std::string& mid, + const cricket::Candidate& candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + network_thread()->PostTask(ToQueuedTask( + network_thread_safety_, [this, mid = mid, candidate = candidate] { + RTC_DCHECK_RUN_ON(network_thread()); + std::vector candidates = {candidate}; + RTCError error = + transport_controller_->AddRemoteCandidates(mid, candidates); + if (error.ok()) { + signaling_thread()->PostTask(ToQueuedTask( + signaling_thread_safety_.flag(), + [this, candidate = std::move(candidate)] { + ReportRemoteIceCandidateAdded(candidate); + // Candidates successfully submitted for checking. + if (ice_connection_state() == + PeerConnectionInterface::kIceConnectionNew || + ice_connection_state() == + PeerConnectionInterface::kIceConnectionDisconnected) { + // If state is New, then the session has just gotten its first + // remote ICE candidates, so go to Checking. If state is + // Disconnected, the session is re-using old candidates or + // receiving additional ones, so go to Checking. If state is + // Connected, stay Connected. + // TODO(bemasc): If state is Connected, and the new candidates + // are for a newly added transport, then the state actually + // _should_ move to checking. Add a way to distinguish that + // case. + SetIceConnectionState( + PeerConnectionInterface::kIceConnectionChecking); + } + // TODO(bemasc): If state is Completed, go back to Connected. + })); + } else { + RTC_LOG(LS_WARNING) << error.message(); + } + })); +} + void PeerConnection::ReportUsagePattern() const { usage_pattern_.ReportUsagePattern(observer_); } +void PeerConnection::ReportRemoteIceCandidateAdded( + const cricket::Candidate& candidate) { + RTC_DCHECK_RUN_ON(signaling_thread()); + + NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); + + if (candidate.address().IsPrivateIP()) { + NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED); + } + if (candidate.address().IsUnresolvedIP()) { + NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED); + } + if (candidate.address().family() == AF_INET6) { + NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED); + } +} + bool PeerConnection::SrtpRequired() const { + RTC_DCHECK_RUN_ON(signaling_thread()); return (dtls_enabled_ || sdp_handler_->webrtc_session_desc_factory()->SdesPolicy() == cricket::SEC_REQUIRED); @@ -2357,10 +2632,12 @@ void PeerConnection::OnTransportControllerGatheringState( } } +// Runs on network_thread(). void PeerConnection::ReportTransportStats() { + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map> media_types_by_transport_name; - for (const auto& transceiver : rtp_manager()->transceivers()->List()) { + for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { if (transceiver->internal()->channel()) { const std::string& transport_name = transceiver->internal()->channel()->transport_name(); @@ -2368,15 +2645,14 @@ void PeerConnection::ReportTransportStats() { transceiver->media_type()); } } - if (rtp_data_channel()) { - media_types_by_transport_name[rtp_data_channel()->transport_name()].insert( - cricket::MEDIA_TYPE_DATA); - } - absl::optional transport_name = sctp_transport_name(); - if (transport_name) { - media_types_by_transport_name[*transport_name].insert( - cricket::MEDIA_TYPE_DATA); + if (sctp_mid_n_) { + cricket::DtlsTransportInternal* dtls_transport = + transport_controller_->GetDtlsTransport(*sctp_mid_n_); + if (dtls_transport) { + media_types_by_transport_name[dtls_transport->transport_name()].insert( + cricket::MEDIA_TYPE_DATA); + } } for (const auto& entry : media_types_by_transport_name) { @@ -2385,12 +2661,14 @@ void PeerConnection::ReportTransportStats() { cricket::TransportStats stats; if (transport_controller_->GetStats(transport_name, &stats)) { ReportBestConnectionState(stats); - ReportNegotiatedCiphers(stats, media_types); + ReportNegotiatedCiphers(dtls_enabled_, stats, media_types); } } } + // Walk through the ConnectionInfos to gather best connection usage // for IPv4 and IPv6. +// static (no member state required) void PeerConnection::ReportBestConnectionState( const cricket::TransportStats& stats) { for (const cricket::TransportChannelStats& channel_stats : @@ -2438,10 +2716,12 @@ void PeerConnection::ReportBestConnectionState( } } +// static void PeerConnection::ReportNegotiatedCiphers( + bool dtls_enabled, const cricket::TransportStats& stats, const std::set& media_types) { - if (!dtls_enabled_ || stats.channel_stats.empty()) { + if (!dtls_enabled || stats.channel_stats.empty()) { return; } @@ -2503,12 +2783,6 @@ void PeerConnection::ReportNegotiatedCiphers( } } -void PeerConnection::OnSentPacket_w(const rtc::SentPacket& sent_packet) { - RTC_DCHECK_RUN_ON(worker_thread()); - RTC_DCHECK(call_); - call_->OnSentPacket(sent_packet); -} - bool PeerConnection::OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, @@ -2520,9 +2794,19 @@ bool PeerConnection::OnTransportChanged( if (base_channel) { ret = base_channel->SetRtpTransport(rtp_transport); } + if (mid == sctp_mid_n_) { data_channel_controller_.OnTransportChanged(data_channel_transport); + if (dtls_transport) { + signaling_thread()->PostTask(ToQueuedTask( + signaling_thread_safety_.flag(), + [this, name = dtls_transport->internal()->transport_name()] { + RTC_DCHECK_RUN_ON(signaling_thread()); + sctp_transport_name_s_ = std::move(name); + })); + } } + return ret; } @@ -2532,6 +2816,23 @@ PeerConnectionObserver* PeerConnection::Observer() const { return observer_; } +void PeerConnection::StartSctpTransport(int local_port, + int remote_port, + int max_message_size) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (!sctp_mid_s_) + return; + + network_thread()->PostTask(ToQueuedTask( + network_thread_safety_, + [this, mid = *sctp_mid_s_, local_port, remote_port, max_message_size] { + rtc::scoped_refptr sctp_transport = + transport_controller()->GetSctpTransport(mid); + if (sctp_transport) + sctp_transport->Start(local_port, remote_port, max_message_size); + })); +} + CryptoOptions PeerConnection::GetCryptoOptions() { RTC_DCHECK_RUN_ON(signaling_thread()); // TODO(bugs.webrtc.org/9891) - Remove PeerConnectionFactory::CryptoOptions @@ -2565,24 +2866,9 @@ void PeerConnection::RequestUsagePatternReportForTesting() { std::function PeerConnection::InitializeRtcpCallback() { - RTC_DCHECK_RUN_ON(signaling_thread()); - - auto flag = - worker_thread()->Invoke>( - RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(worker_thread()); - if (!call_) - return rtc::scoped_refptr(); - if (!call_safety_) - call_safety_.reset(new ScopedTaskSafety()); - return call_safety_->flag(); - }); - - if (!flag) - return [](const rtc::CopyOnWriteBuffer&, int64_t) {}; - - return [this, flag = std::move(flag)](const rtc::CopyOnWriteBuffer& packet, - int64_t packet_time_us) { + RTC_DCHECK_RUN_ON(network_thread()); + return [this, flag = worker_thread_safety_]( + const rtc::CopyOnWriteBuffer& packet, int64_t packet_time_us) { RTC_DCHECK_RUN_ON(network_thread()); // TODO(bugs.webrtc.org/11993): We should actually be delivering this call // directly to the Call class somehow directly on the network thread and not diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h index 9c0541cc4..7be137a6a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h @@ -12,6 +12,7 @@ #define PC_PEER_CONNECTION_H_ #include + #include #include #include @@ -22,6 +23,7 @@ #include "absl/types/optional.h" #include "api/adaptation/resource.h" +#include "api/async_dns_resolver.h" #include "api/async_resolver_factory.h" #include "api/audio_options.h" #include "api/candidate.h" @@ -43,6 +45,7 @@ #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" +#include "api/sequence_checker.h" #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" @@ -69,7 +72,6 @@ #include "pc/peer_connection_internal.h" #include "pc/peer_connection_message_handler.h" #include "pc/rtc_stats_collector.h" -#include "pc/rtp_data_channel.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/rtp_transceiver.h" @@ -86,17 +88,16 @@ #include "pc/usage_pattern.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/deprecation.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" +#include "rtc_base/weak_ptr.h" namespace webrtc { @@ -124,7 +125,7 @@ class PeerConnection : public PeerConnectionInternal, // // Note that the function takes ownership of dependencies, and will // either use them or release them, whether it succeeds or fails. - static rtc::scoped_refptr Create( + static RTCErrorOr> Create( rtc::scoped_refptr context, const PeerConnectionFactoryInterface::Options& options, std::unique_ptr event_log, @@ -169,7 +170,6 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr CreateDataChannel( const std::string& label, const DataChannelInit* config) override; - // WARNING: LEGACY. See peerconnectioninterface.h bool GetStats(StatsObserver* observer, webrtc::MediaStreamTrackInterface* track, @@ -272,7 +272,6 @@ class PeerConnection : public PeerConnectionInternal, rtc::Thread* worker_thread() const final { return context_->worker_thread(); } std::string session_id() const override { - RTC_DCHECK_RUN_ON(signaling_thread()); return session_id_; } @@ -288,24 +287,16 @@ class PeerConnection : public PeerConnectionInternal, return rtp_manager()->transceivers()->List(); } - sigslot::signal1& SignalRtpDataChannelCreated() override { - return data_channel_controller_.SignalRtpDataChannelCreated(); - } - sigslot::signal1& SignalSctpDataChannelCreated() override { return data_channel_controller_.SignalSctpDataChannelCreated(); } - cricket::RtpDataChannel* rtp_data_channel() const override { - return data_channel_controller_.rtp_data_channel(); - } - std::vector GetDataChannelStats() const override; absl::optional sctp_transport_name() const override; + absl::optional sctp_mid() const override; cricket::CandidateStatsList GetPooledCandidateStats() const override; - std::map GetTransportNamesByMid() const override; std::map GetTransportStatsByNames( const std::set& transport_names) override; Call::Stats GetCallStats() override; @@ -325,7 +316,8 @@ class PeerConnection : public PeerConnectionInternal, PeerConnectionObserver* Observer() const; bool IsClosed() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; + return !sdp_handler_ || + sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; } // Get current SSL role used by SCTP's underlying transport. bool GetSctpSslRole(rtc::SSLRole* role); @@ -351,10 +343,6 @@ class PeerConnection : public PeerConnectionInternal, RTC_DCHECK_RUN_ON(signaling_thread()); return &configuration_; } - absl::optional sctp_mid() { - RTC_DCHECK_RUN_ON(signaling_thread()); - return sctp_mid_s_; - } PeerConnectionMessageHandler* message_handler() { RTC_DCHECK_RUN_ON(signaling_thread()); return &message_handler_; @@ -376,12 +364,20 @@ class PeerConnection : public PeerConnectionInternal, const PeerConnectionFactoryInterface::Options* options() const { return &options_; } - cricket::DataChannelType data_channel_type() const; void SetIceConnectionState(IceConnectionState new_state); void NoteUsageEvent(UsageEvent event); - // Report the UMA metric SdpFormatReceived for the given remote offer. - void ReportSdpFormatReceived(const SessionDescriptionInterface& remote_offer); + // Asynchronously adds a remote candidate on the network thread. + void AddRemoteCandidate(const std::string& mid, + const cricket::Candidate& candidate); + + // Report the UMA metric SdpFormatReceived for the given remote description. + void ReportSdpFormatReceived( + const SessionDescriptionInterface& remote_description); + + // Report the UMA metric BundleUsage for the given remote description. + void ReportSdpBundleUsage( + const SessionDescriptionInterface& remote_description); // Returns true if the PeerConnection is configured to use Unified Plan // semantics for creating offers/answers and setting local/remote @@ -393,21 +389,25 @@ class PeerConnection : public PeerConnectionInternal, RTC_DCHECK_RUN_ON(signaling_thread()); return is_unified_plan_; } - bool ValidateBundleSettings(const cricket::SessionDescription* desc); + bool ValidateBundleSettings( + const cricket::SessionDescription* desc, + const std::map& + bundle_groups_by_mid); - // Returns the MID for the data section associated with either the - // RtpDataChannel or SCTP data channel, if it has been set. If no data + // Returns the MID for the data section associated with the + // SCTP data channel, if it has been set. If no data // channels are configured this will return nullopt. absl::optional GetDataMid() const; - void SetSctpDataMid(const std::string& mid) { - RTC_DCHECK_RUN_ON(signaling_thread()); - sctp_mid_s_ = mid; - } - void ResetSctpDataMid() { - RTC_DCHECK_RUN_ON(signaling_thread()); - sctp_mid_s_.reset(); - } + void SetSctpDataMid(const std::string& mid); + + void ResetSctpDataMid(); + + // Asynchronously calls SctpTransport::Start() on the network thread for + // |sctp_mid()| if set. Called as part of setting the local description. + void StartSctpTransport(int local_port, + int remote_port, + int max_message_size); // Returns the CryptoOptions for this PeerConnection. This will always // return the RTCConfiguration.crypto_options if set and will only default @@ -423,23 +423,17 @@ class PeerConnection : public PeerConnectionInternal, bool fire_callback = true); // Returns rtp transport, result can not be nullptr. - RtpTransportInternal* GetRtpTransport(const std::string& mid) { - RTC_DCHECK_RUN_ON(signaling_thread()); - auto rtp_transport = transport_controller_->GetRtpTransport(mid); - RTC_DCHECK(rtp_transport); - return rtp_transport; - } + RtpTransportInternal* GetRtpTransport(const std::string& mid); // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by // this session. - bool SrtpRequired() const RTC_RUN_ON(signaling_thread()); - - void OnSentPacket_w(const rtc::SentPacket& sent_packet); + bool SrtpRequired() const; bool SetupDataChannelTransport_n(const std::string& mid) RTC_RUN_ON(network_thread()); void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); - cricket::ChannelInterface* GetChannel(const std::string& content_name); + cricket::ChannelInterface* GetChannel(const std::string& content_name) + RTC_RUN_ON(network_thread()); // Functions made public for testing. void ReturnHistogramVeryQuicklyForTesting() { @@ -455,14 +449,19 @@ class PeerConnection : public PeerConnectionInternal, bool is_unified_plan, std::unique_ptr event_log, std::unique_ptr call, - PeerConnectionDependencies& dependencies); + PeerConnectionDependencies& dependencies, + bool dtls_enabled); ~PeerConnection() override; private: - bool Initialize( + RTCError Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies); + void InitializeTransportController_n( + const RTCConfiguration& configuration, + const PeerConnectionDependencies& dependencies) + RTC_RUN_ON(network_thread()); rtc::scoped_refptr> FindTransceiverBySender(rtc::scoped_refptr sender) @@ -496,10 +495,8 @@ class PeerConnection : public PeerConnectionInternal, const cricket::CandidatePairChangeEvent& event) RTC_RUN_ON(signaling_thread()); - void OnNegotiationNeeded(); - // Returns the specified SCTP DataChannel in sctp_data_channels_, // or nullptr if not found. SctpDataChannel* FindDataChannelBySid(int sid) const @@ -568,23 +565,24 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(signaling_thread()); void OnTransportControllerDtlsHandshakeError(rtc::SSLHandshakeError error); - void OnErrorDemuxingPacket(uint32_t ssrc); - // Invoked when TransportController connection completion is signaled. // Reports stats for all transports in use. - void ReportTransportStats() RTC_RUN_ON(signaling_thread()); + void ReportTransportStats() RTC_RUN_ON(network_thread()); // Gather the usage of IPv4/IPv6 as best connection. - void ReportBestConnectionState(const cricket::TransportStats& stats); + static void ReportBestConnectionState(const cricket::TransportStats& stats); - void ReportNegotiatedCiphers(const cricket::TransportStats& stats, - const std::set& media_types) - RTC_RUN_ON(signaling_thread()); + static void ReportNegotiatedCiphers( + bool dtls_enabled, + const cricket::TransportStats& stats, + const std::set& media_types); void ReportIceCandidateCollected(const cricket::Candidate& candidate) RTC_RUN_ON(signaling_thread()); void ReportUsagePattern() const RTC_RUN_ON(signaling_thread()); + void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate); + // JsepTransportController::Observer override. // // Called by |transport_controller_| when processing transport information @@ -627,10 +625,8 @@ class PeerConnection : public PeerConnectionInternal, PeerConnectionInterface::RTCConfiguration configuration_ RTC_GUARDED_BY(signaling_thread()); - // TODO(zstein): |async_resolver_factory_| can currently be nullptr if it - // is not injected. It should be required once chromium supplies it. - const std::unique_ptr async_resolver_factory_ - RTC_GUARDED_BY(signaling_thread()); + const std::unique_ptr + async_dns_resolver_factory_; std::unique_ptr port_allocator_; // TODO(bugs.webrtc.org/9987): Accessed on both // signaling and network thread. @@ -646,8 +642,9 @@ class PeerConnection : public PeerConnectionInternal, // The unique_ptr belongs to the worker thread, but the Call object manages // its own thread safety. std::unique_ptr call_ RTC_GUARDED_BY(worker_thread()); - std::unique_ptr call_safety_ - RTC_GUARDED_BY(worker_thread()); + ScopedTaskSafety signaling_thread_safety_; + rtc::scoped_refptr network_thread_safety_; + rtc::scoped_refptr worker_thread_safety_; // Points to the same thing as `call_`. Since it's const, we may read the // pointer from any thread. @@ -660,10 +657,7 @@ class PeerConnection : public PeerConnectionInternal, rtc::scoped_refptr stats_collector_ RTC_GUARDED_BY(signaling_thread()); - rtc::scoped_refptr demuxing_observer_ - RTC_GUARDED_BY(signaling_thread()); - - std::string session_id_ RTC_GUARDED_BY(signaling_thread()); + const std::string session_id_; std::unique_ptr transport_controller_; // TODO(bugs.webrtc.org/9987): Accessed on both @@ -678,12 +672,13 @@ class PeerConnection : public PeerConnectionInternal, // thread, but applied first on the networking thread via an invoke(). absl::optional sctp_mid_s_ RTC_GUARDED_BY(signaling_thread()); absl::optional sctp_mid_n_ RTC_GUARDED_BY(network_thread()); + std::string sctp_transport_name_s_ RTC_GUARDED_BY(signaling_thread()); // The machinery for handling offers and answers. Const after initialization. std::unique_ptr sdp_handler_ RTC_GUARDED_BY(signaling_thread()); - bool dtls_enabled_ RTC_GUARDED_BY(signaling_thread()) = false; + const bool dtls_enabled_; UsagePattern usage_pattern_ RTC_GUARDED_BY(signaling_thread()); bool return_histogram_very_quickly_ RTC_GUARDED_BY(signaling_thread()) = @@ -697,6 +692,12 @@ class PeerConnection : public PeerConnectionInternal, // Administration of senders, receivers and transceivers // Accessed on both signaling and network thread. Const after Initialize(). std::unique_ptr rtp_manager_; + + rtc::WeakPtrFactory weak_factory_; + + // Did the connectionState ever change to `connected`? + // Used to gather metrics only the first such state change. + bool was_ever_connected_ RTC_GUARDED_BY(signaling_thread()) = false; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc index 71d054eb9..dfb12971b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc @@ -50,7 +50,7 @@ TrackWithPeriodicSource CreateTrackWithPeriodicSource( periodic_track_source_config.frame_interval_ms = 100; periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis(); rtc::scoped_refptr periodic_track_source = - new rtc::RefCountedObject( + rtc::make_ref_counted( periodic_track_source_config, /* remote */ false); TrackWithPeriodicSource track_with_source; track_with_source.track = @@ -83,7 +83,7 @@ class PeerConnectionAdaptationIntegrationTest : public ::testing::Test { rtc::scoped_refptr CreatePcWrapper( const char* name) { rtc::scoped_refptr pc_wrapper = - new rtc::RefCountedObject( + rtc::make_ref_counted( name, network_thread_.get(), worker_thread_.get()); PeerConnectionInterface::RTCConfiguration config; config.sdp_semantics = SdpSemantics::kUnifiedPlan; diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc index da42e5a09..81a4cd845 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc @@ -10,9 +10,7 @@ #include "pc/peer_connection_factory.h" -#include #include -#include #include #include "absl/strings/match.h" @@ -27,6 +25,7 @@ #include "api/peer_connection_factory_proxy.h" #include "api/peer_connection_proxy.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" #include "api/transport/bitrate_settings.h" #include "api/units/data_rate.h" #include "call/audio_state.h" @@ -34,6 +33,7 @@ #include "p2p/base/basic_async_resolver_factory.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/default_ice_transport_factory.h" +#include "p2p/base/port_allocator.h" #include "p2p/client/basic_port_allocator.h" #include "pc/audio_track.h" #include "pc/local_audio_source.h" @@ -42,7 +42,6 @@ #include "pc/rtp_parameters_conversion.h" #include "pc/session_description.h" #include "pc/video_track.h" -#include "rtc_base/bind.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" @@ -50,7 +49,7 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/ref_counted_object.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/system/file_wrapper.h" namespace webrtc { @@ -76,8 +75,8 @@ CreateModularPeerConnectionFactory( // Verify that the invocation and the initialization ended up agreeing on the // thread. RTC_DCHECK_RUN_ON(pc_factory->signaling_thread()); - return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(), - pc_factory); + return PeerConnectionFactoryProxy::Create( + pc_factory->signaling_thread(), pc_factory->worker_thread(), pc_factory); } // Static @@ -87,8 +86,7 @@ rtc::scoped_refptr PeerConnectionFactory::Create( if (!context) { return nullptr; } - return new rtc::RefCountedObject(context, - &dependencies); + return rtc::make_ref_counted(context, &dependencies); } PeerConnectionFactory::PeerConnectionFactory( @@ -141,6 +139,7 @@ RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities( case cricket::MEDIA_TYPE_UNSUPPORTED: return RtpCapabilities(); } + RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind; RTC_CHECK_NOTREACHED(); } @@ -167,6 +166,7 @@ RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities( case cricket::MEDIA_TYPE_UNSUPPORTED: return RtpCapabilities(); } + RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind; RTC_CHECK_NOTREACHED(); } @@ -179,31 +179,17 @@ PeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) { } bool PeerConnectionFactory::StartAecDump(FILE* file, int64_t max_size_bytes) { - RTC_DCHECK(signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread()); return channel_manager()->StartAecDump(FileWrapper(file), max_size_bytes); } void PeerConnectionFactory::StopAecDump() { - RTC_DCHECK(signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(worker_thread()); channel_manager()->StopAecDump(); } -rtc::scoped_refptr -PeerConnectionFactory::CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration& configuration, - std::unique_ptr allocator, - std::unique_ptr cert_generator, - PeerConnectionObserver* observer) { - // Convert the legacy API into the new dependency structure. - PeerConnectionDependencies dependencies(observer); - dependencies.allocator = std::move(allocator); - dependencies.cert_generator = std::move(cert_generator); - // Pass that into the new API. - return CreatePeerConnection(configuration, std::move(dependencies)); -} - -rtc::scoped_refptr -PeerConnectionFactory::CreatePeerConnection( +RTCErrorOr> +PeerConnectionFactory::CreatePeerConnectionOrError( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -243,20 +229,28 @@ PeerConnectionFactory::CreatePeerConnection( std::unique_ptr event_log = worker_thread()->Invoke>( - RTC_FROM_HERE, - rtc::Bind(&PeerConnectionFactory::CreateRtcEventLog_w, this)); + RTC_FROM_HERE, [this] { return CreateRtcEventLog_w(); }); std::unique_ptr call = worker_thread()->Invoke>( RTC_FROM_HERE, - rtc::Bind(&PeerConnectionFactory::CreateCall_w, this, event_log.get())); + [this, &event_log] { return CreateCall_w(event_log.get()); }); - rtc::scoped_refptr pc = PeerConnection::Create( - context_, options_, std::move(event_log), std::move(call), configuration, - std::move(dependencies)); - if (!pc) { - return nullptr; + auto result = PeerConnection::Create(context_, options_, std::move(event_log), + std::move(call), configuration, + std::move(dependencies)); + if (!result.ok()) { + return result.MoveError(); } - return PeerConnectionProxy::Create(signaling_thread(), pc); + // We configure the proxy with a pointer to the network thread for methods + // that need to be invoked there rather than on the signaling thread. + // Internally, the proxy object has a member variable named |worker_thread_| + // which will point to the network thread (and not the factory's + // worker_thread()). All such methods have thread checks though, so the code + // should still be clear (outside of macro expansion). + rtc::scoped_refptr result_proxy = + PeerConnectionProxy::Create(signaling_thread(), network_thread(), + result.MoveValue()); + return result_proxy; } rtc::scoped_refptr @@ -302,7 +296,7 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( RtcEventLog* event_log) { RTC_DCHECK_RUN_ON(worker_thread()); - webrtc::Call::Config call_config(event_log); + webrtc::Call::Config call_config(event_log, network_thread()); if (!channel_manager()->media_engine() || !context_->call_factory()) { return nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h index 427207f9c..bd2efe457 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h @@ -25,21 +25,24 @@ #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/network_control.h" #include "api/transport/sctp_transport_factory_interface.h" #include "api/transport/webrtc_key_value_config.h" #include "call/call.h" -#include "media/sctp/sctp_transport_internal.h" #include "p2p/base/port_allocator.h" #include "pc/channel_manager.h" #include "pc/connection_context.h" +#include "rtc_base/checks.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace rtc { class BasicNetworkManager; @@ -62,13 +65,8 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { void SetOptions(const Options& options) override; - rtc::scoped_refptr CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration& configuration, - std::unique_ptr allocator, - std::unique_ptr cert_generator, - PeerConnectionObserver* observer) override; - - rtc::scoped_refptr CreatePeerConnection( + RTCErrorOr> + CreatePeerConnectionOrError( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) override; @@ -107,6 +105,8 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { return context_->signaling_thread(); } + rtc::Thread* worker_thread() const { return context_->worker_thread(); } + const Options& options() const { RTC_DCHECK_RUN_ON(signaling_thread()); return options_; @@ -127,7 +127,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { virtual ~PeerConnectionFactory(); private: - rtc::Thread* worker_thread() const { return context_->worker_thread(); } rtc::Thread* network_thread() const { return context_->network_thread(); } bool IsTrialEnabled(absl::string_view key) const; diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h index 029febab2..6f9761291 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h @@ -19,7 +19,6 @@ #include "api/peer_connection_interface.h" #include "call/call.h" -#include "pc/rtp_data_channel.h" #include "pc/rtp_transceiver.h" #include "pc/sctp_data_channel.h" @@ -41,13 +40,9 @@ class PeerConnectionInternal : public PeerConnectionInterface { rtc::scoped_refptr>> GetTransceiversInternal() const = 0; - virtual sigslot::signal1& SignalRtpDataChannelCreated() = 0; virtual sigslot::signal1& SignalSctpDataChannelCreated() = 0; - // Only valid when using deprecated RTP data channels. - virtual cricket::RtpDataChannel* rtp_data_channel() const = 0; - // Call on the network thread to fetch stats for all the data channels. // TODO(tommi): Make pure virtual after downstream updates. virtual std::vector GetDataChannelStats() const { @@ -55,14 +50,13 @@ class PeerConnectionInternal : public PeerConnectionInterface { } virtual absl::optional sctp_transport_name() const = 0; + virtual absl::optional sctp_mid() const = 0; virtual cricket::CandidateStatsList GetPooledCandidateStats() const = 0; - // Returns a map from MID to transport name for all active media sections. - virtual std::map GetTransportNamesByMid() const = 0; - // Returns a map from transport name to transport stats for all given // transport names. + // Must be called on the network thread. virtual std::map GetTransportStatsByNames(const std::set& transport_names) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc index 7988339c5..4b7913d67 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc @@ -15,8 +15,12 @@ #include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/stats_types.h" #include "pc/stats_collector_interface.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" namespace webrtc { @@ -28,7 +32,6 @@ enum { MSG_CREATE_SESSIONDESCRIPTION_FAILED, MSG_GETSTATS, MSG_REPORT_USAGE_PATTERN, - MSG_ON_ERROR_DEMUXING_PACKET, }; struct SetSessionDescriptionMsg : public rtc::MessageData { @@ -65,15 +68,6 @@ struct RequestUsagePatternMsg : public rtc::MessageData { std::function function; }; -struct OnErrorDemuxingPacketMsg : public rtc::MessageData { - explicit OnErrorDemuxingPacketMsg(webrtc::ErrorDemuxingPacketObserver* observer, - uint32_t ssrc) - : observer(observer), ssrc(ssrc) {} - - rtc::scoped_refptr observer; - uint32_t ssrc; -}; - } // namespace PeerConnectionMessageHandler::~PeerConnectionMessageHandler() { @@ -134,12 +128,6 @@ void PeerConnectionMessageHandler::OnMessage(rtc::Message* msg) { delete param; break; } - case MSG_ON_ERROR_DEMUXING_PACKET: { - OnErrorDemuxingPacketMsg* param = static_cast(msg->pdata); - param->observer->OnErrorDemuxingPacket(param->ssrc); - delete param; - break; - } default: RTC_NOTREACHED() << "Not implemented"; break; @@ -189,12 +177,4 @@ void PeerConnectionMessageHandler::RequestUsagePatternReport( new RequestUsagePatternMsg(func)); } -void PeerConnectionMessageHandler::PostErrorDemuxingPacket( - ErrorDemuxingPacketObserver* observer, - uint32_t ssrc) { - signaling_thread()->Post(RTC_FROM_HERE, this, - MSG_ON_ERROR_DEMUXING_PACKET, - new OnErrorDemuxingPacketMsg(observer, ssrc)); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h index 858426d6f..c19f5a4e5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.h @@ -11,11 +11,17 @@ #ifndef PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ #define PC_PEER_CONNECTION_MESSAGE_HANDLER_H_ -#include +#include + +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/stats_types.h" +#include "pc/stats_collector_interface.h" #include "rtc_base/message_handler.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_message.h" namespace webrtc { @@ -24,7 +30,6 @@ class SetSessionDescriptionObserver; class StatsCollectorInterface; class StatsObserver; class MediaStreamTrackInterface; -class ErrorDemuxingPacketObserver; class PeerConnectionMessageHandler : public rtc::MessageHandler { public: @@ -45,8 +50,6 @@ class PeerConnectionMessageHandler : public rtc::MessageHandler { StatsCollectorInterface* stats, MediaStreamTrackInterface* track); void RequestUsagePatternReport(std::function, int delay_ms); - void PostErrorDemuxingPacket(ErrorDemuxingPacketObserver* observer, - uint32_t ssrc); private: rtc::Thread* signaling_thread() const { return signaling_thread_; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc index 328f5795e..6aed8f1de 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc @@ -48,7 +48,10 @@ PeerConnectionWrapper::PeerConnectionWrapper( observer_->SetPeerConnectionInterface(pc_.get()); } -PeerConnectionWrapper::~PeerConnectionWrapper() = default; +PeerConnectionWrapper::~PeerConnectionWrapper() { + if (pc_) + pc_->Close(); +} PeerConnectionFactoryInterface* PeerConnectionWrapper::pc_factory() { return pc_factory_.get(); @@ -133,8 +136,7 @@ PeerConnectionWrapper::CreateRollback() { std::unique_ptr PeerConnectionWrapper::CreateSdp( rtc::FunctionView fn, std::string* error_out) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); + auto observer = rtc::make_ref_counted(); fn(observer); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); if (error_out && !observer->result()) { @@ -179,8 +181,7 @@ bool PeerConnectionWrapper::SetRemoteDescription( bool PeerConnectionWrapper::SetSdp( rtc::FunctionView fn, std::string* error_out) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject()); + auto observer = rtc::make_ref_counted(); fn(observer); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); if (error_out && !observer->result()) { @@ -323,8 +324,7 @@ bool PeerConnectionWrapper::IsIceConnected() { rtc::scoped_refptr PeerConnectionWrapper::GetStats() { - rtc::scoped_refptr callback( - new rtc::RefCountedObject()); + auto callback = rtc::make_ref_counted(); pc()->GetStats(callback); EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout); return callback->report(); diff --git a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc index 8ae061254..dc890e737 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc @@ -13,17 +13,15 @@ #include #include -#include #include "absl/algorithm/container.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_format.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -51,54 +49,63 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { const rtc::scoped_refptr source_; }; -RemoteAudioSource::RemoteAudioSource(rtc::Thread* worker_thread) +RemoteAudioSource::RemoteAudioSource( + rtc::Thread* worker_thread, + OnAudioChannelGoneAction on_audio_channel_gone_action) : main_thread_(rtc::Thread::Current()), worker_thread_(worker_thread), + on_audio_channel_gone_action_(on_audio_channel_gone_action), state_(MediaSourceInterface::kLive) { RTC_DCHECK(main_thread_); RTC_DCHECK(worker_thread_); } RemoteAudioSource::~RemoteAudioSource() { - RTC_DCHECK(main_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(main_thread_); RTC_DCHECK(audio_observers_.empty()); - RTC_DCHECK(sinks_.empty()); + if (!sinks_.empty()) { + RTC_LOG(LS_WARNING) + << "RemoteAudioSource destroyed while sinks_ is non-empty."; + } } void RemoteAudioSource::Start(cricket::VoiceMediaChannel* media_channel, absl::optional ssrc) { - RTC_DCHECK_RUN_ON(main_thread_); - RTC_DCHECK(media_channel); + RTC_DCHECK_RUN_ON(worker_thread_); // Register for callbacks immediately before AddSink so that we always get // notified when a channel goes out of scope (signaled when "AudioDataProxy" // is destroyed). - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - ssrc ? media_channel->SetRawAudioSink( - *ssrc, std::make_unique(this)) - : media_channel->SetDefaultRawAudioSink( - std::make_unique(this)); - }); + RTC_DCHECK(media_channel); + ssrc ? media_channel->SetRawAudioSink(*ssrc, + std::make_unique(this)) + : media_channel->SetDefaultRawAudioSink( + std::make_unique(this)); } void RemoteAudioSource::Stop(cricket::VoiceMediaChannel* media_channel, absl::optional ssrc) { - RTC_DCHECK_RUN_ON(main_thread_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel); + ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr) + : media_channel->SetDefaultRawAudioSink(nullptr); +} - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr) - : media_channel->SetDefaultRawAudioSink(nullptr); - }); +void RemoteAudioSource::SetState(SourceState new_state) { + RTC_DCHECK_RUN_ON(main_thread_); + if (state_ != new_state) { + state_ = new_state; + FireOnChanged(); + } } MediaSourceInterface::SourceState RemoteAudioSource::state() const { - RTC_DCHECK(main_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(main_thread_); return state_; } bool RemoteAudioSource::remote() const { - RTC_DCHECK(main_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(main_thread_); return true; } @@ -124,7 +131,7 @@ void RemoteAudioSource::UnregisterAudioObserver(AudioObserver* observer) { } void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) { - RTC_DCHECK(main_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(main_thread_); RTC_DCHECK(sink); if (state_ != MediaSourceInterface::kLive) { @@ -138,7 +145,7 @@ void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) { } void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) { - RTC_DCHECK(main_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(main_thread_); RTC_DCHECK(sink); MutexLock lock(&sink_lock_); @@ -158,6 +165,9 @@ void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) { } void RemoteAudioSource::OnAudioChannelGone() { + if (on_audio_channel_gone_action_ != OnAudioChannelGoneAction::kEnd) { + return; + } // Called when the audio channel is deleted. It may be the worker thread // in libjingle or may be a different worker thread. // This object needs to live long enough for the cleanup logic in OnMessage to @@ -170,10 +180,9 @@ void RemoteAudioSource::OnAudioChannelGone() { } void RemoteAudioSource::OnMessage(rtc::Message* msg) { - RTC_DCHECK(main_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(main_thread_); sinks_.clear(); - state_ = MediaSourceInterface::kEnded; - FireOnChanged(); + SetState(MediaSourceInterface::kEnded); // Will possibly delete this RemoteAudioSource since it is reference counted // in the message. delete msg->pdata; diff --git a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h index 9ec09165c..2eae07327 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.h @@ -11,15 +11,21 @@ #ifndef PC_REMOTE_AUDIO_SOURCE_H_ #define PC_REMOTE_AUDIO_SOURCE_H_ +#include + #include #include #include "absl/types/optional.h" #include "api/call/audio_sink.h" +#include "api/media_stream_interface.h" #include "api/notifier.h" +#include "media/base/media_channel.h" #include "pc/channel.h" #include "rtc_base/message_handler.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_message.h" namespace rtc { struct Message; @@ -34,7 +40,21 @@ namespace webrtc { class RemoteAudioSource : public Notifier, rtc::MessageHandler { public: - explicit RemoteAudioSource(rtc::Thread* worker_thread); + // In Unified Plan, receivers map to m= sections and their tracks and sources + // survive SSRCs being reconfigured. The life cycle of the remote audio source + // is associated with the life cycle of the m= section, and thus even if an + // audio channel is destroyed the RemoteAudioSource should kSurvive. + // + // In Plan B however, remote audio sources map 1:1 with an SSRCs and if an + // audio channel is destroyed, the RemoteAudioSource should kEnd. + enum class OnAudioChannelGoneAction { + kSurvive, + kEnd, + }; + + explicit RemoteAudioSource( + rtc::Thread* worker_thread, + OnAudioChannelGoneAction on_audio_channel_gone_action); // Register and unregister remote audio source with the underlying media // engine. @@ -42,6 +62,7 @@ class RemoteAudioSource : public Notifier, absl::optional ssrc); void Stop(cricket::VoiceMediaChannel* media_channel, absl::optional ssrc); + void SetState(SourceState new_state); // MediaSourceInterface implementation. MediaSourceInterface::SourceState state() const override; @@ -61,6 +82,7 @@ class RemoteAudioSource : public Notifier, private: // These are callbacks from the media engine. class AudioDataProxy; + void OnData(const AudioSinkInterface::Data& audio); void OnAudioChannelGone(); @@ -68,6 +90,7 @@ class RemoteAudioSource : public Notifier, rtc::Thread* const main_thread_; rtc::Thread* const worker_thread_; + const OnAudioChannelGoneAction on_audio_channel_gone_action_; std::list audio_observers_; Mutex sink_lock_; std::list sinks_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc index 529200894..7e781a724 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc @@ -10,23 +10,52 @@ #include "pc/rtc_stats_collector.h" +#include + +#include +#include #include #include #include #include #include +#include "api/array_view.h" #include "api/candidate.h" #include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/sequence_checker.h" +#include "api/stats/rtc_stats.h" +#include "api/stats/rtcstats_objects.h" +#include "api/task_queue/queued_task.h" #include "api/video/video_content_type.h" +#include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" +#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/dtls_transport_internal.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "pc/peer_connection.h" +#include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/data_channel_utils.h" #include "pc/rtc_stats_traversal.h" #include "pc/webrtc_sdp.h" #include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -80,17 +109,23 @@ std::string RTCTransportStatsIDFromTransportChannel( return sb.str(); } -std::string RTCInboundRTPStreamStatsIDFromSSRC(bool audio, uint32_t ssrc) { +std::string RTCInboundRTPStreamStatsIDFromSSRC(cricket::MediaType media_type, + uint32_t ssrc) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCInboundRTP" << (audio ? "Audio" : "Video") << "Stream_" << ssrc; + sb << "RTCInboundRTP" + << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") + << "Stream_" << ssrc; return sb.str(); } -std::string RTCOutboundRTPStreamStatsIDFromSSRC(bool audio, uint32_t ssrc) { +std::string RTCOutboundRTPStreamStatsIDFromSSRC(cricket::MediaType media_type, + uint32_t ssrc) { char buf[1024]; rtc::SimpleStringBuilder sb(buf); - sb << "RTCOutboundRTP" << (audio ? "Audio" : "Video") << "Stream_" << ssrc; + sb << "RTCOutboundRTP" + << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") + << "Stream_" << ssrc; return sb.str(); } @@ -105,6 +140,17 @@ std::string RTCRemoteInboundRtpStreamStatsIdFromSourceSsrc( return sb.str(); } +std::string RTCRemoteOutboundRTPStreamStatsIDFromSSRC( + cricket::MediaType media_type, + uint32_t source_ssrc) { + char buf[1024]; + rtc::SimpleStringBuilder sb(buf); + sb << "RTCRemoteOutboundRTP" + << (media_type == cricket::MEDIA_TYPE_AUDIO ? "Audio" : "Video") + << "Stream_" << source_ssrc; + return sb.str(); +} + std::string RTCMediaSourceStatsIDFromKindAndAttachment( cricket::MediaType media_type, int attachment_id) { @@ -268,8 +314,6 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( RTCInboundRTPStreamStats* inbound_stats) { RTC_DCHECK(inbound_stats); inbound_stats->ssrc = media_receiver_info.ssrc(); - // TODO(hbos): Support the remote case. https://crbug.com/657855 - inbound_stats->is_remote = false; inbound_stats->packets_received = static_cast(media_receiver_info.packets_rcvd); inbound_stats->bytes_received = @@ -280,17 +324,21 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( static_cast(media_receiver_info.packets_lost); } -void SetInboundRTPStreamStatsFromVoiceReceiverInfo( - const std::string& mid, +std::unique_ptr CreateInboundAudioStreamStats( const cricket::VoiceReceiverInfo& voice_receiver_info, - RTCInboundRTPStreamStats* inbound_audio) { + const std::string& mid, + int64_t timestamp_us) { + auto inbound_audio = std::make_unique( + /*id=*/RTCInboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_AUDIO, + voice_receiver_info.ssrc()), + timestamp_us); SetInboundRTPStreamStatsFromMediaReceiverInfo(voice_receiver_info, - inbound_audio); + inbound_audio.get()); inbound_audio->media_type = "audio"; inbound_audio->kind = "audio"; if (voice_receiver_info.codec_payload_type) { inbound_audio->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, true, *voice_receiver_info.codec_payload_type); + mid, /*inbound=*/true, *voice_receiver_info.codec_payload_type); } inbound_audio->jitter = static_cast(voice_receiver_info.jitter_ms) / rtc::kNumMillisecsPerSec; @@ -318,12 +366,11 @@ void SetInboundRTPStreamStatsFromVoiceReceiverInfo( // |fir_count|, |pli_count| and |sli_count| are only valid for video and are // purposefully left undefined for audio. if (voice_receiver_info.last_packet_received_timestamp_ms) { - inbound_audio->last_packet_received_timestamp = - static_cast( - *voice_receiver_info.last_packet_received_timestamp_ms) / - rtc::kNumMillisecsPerSec; + inbound_audio->last_packet_received_timestamp = static_cast( + *voice_receiver_info.last_packet_received_timestamp_ms); } if (voice_receiver_info.estimated_playout_ntp_timestamp_ms) { + // TODO(bugs.webrtc.org/10529): Fix time origin. inbound_audio->estimated_playout_timestamp = static_cast( *voice_receiver_info.estimated_playout_ntp_timestamp_ms); } @@ -331,6 +378,51 @@ void SetInboundRTPStreamStatsFromVoiceReceiverInfo( voice_receiver_info.fec_packets_received; inbound_audio->fec_packets_discarded = voice_receiver_info.fec_packets_discarded; + return inbound_audio; +} + +std::unique_ptr +CreateRemoteOutboundAudioStreamStats( + const cricket::VoiceReceiverInfo& voice_receiver_info, + const std::string& mid, + const std::string& inbound_audio_id, + const std::string& transport_id) { + if (!voice_receiver_info.last_sender_report_timestamp_ms.has_value()) { + // Cannot create `RTCRemoteOutboundRtpStreamStats` when the RTCP SR arrival + // timestamp is not available - i.e., until the first sender report is + // received. + return nullptr; + } + RTC_DCHECK_GT(voice_receiver_info.sender_reports_reports_count, 0); + + // Create. + auto stats = std::make_unique( + /*id=*/RTCRemoteOutboundRTPStreamStatsIDFromSSRC( + cricket::MEDIA_TYPE_AUDIO, voice_receiver_info.ssrc()), + /*timestamp_us=*/rtc::kNumMicrosecsPerMillisec * + voice_receiver_info.last_sender_report_timestamp_ms.value()); + + // Populate. + // - RTCRtpStreamStats. + stats->ssrc = voice_receiver_info.ssrc(); + stats->kind = "audio"; + stats->transport_id = transport_id; + stats->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( + mid, + /*inbound=*/true, // Remote-outbound same as local-inbound. + *voice_receiver_info.codec_payload_type); + // - RTCSentRtpStreamStats. + stats->packets_sent = voice_receiver_info.sender_reports_packets_sent; + stats->bytes_sent = voice_receiver_info.sender_reports_bytes_sent; + // - RTCRemoteOutboundRtpStreamStats. + stats->local_id = inbound_audio_id; + RTC_DCHECK( + voice_receiver_info.last_sender_report_remote_timestamp_ms.has_value()); + stats->remote_timestamp = static_cast( + voice_receiver_info.last_sender_report_remote_timestamp_ms.value()); + stats->reports_sent = voice_receiver_info.sender_reports_reports_count; + + return stats; } void SetInboundRTPStreamStatsFromVideoReceiverInfo( @@ -343,8 +435,10 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( inbound_video->kind = "video"; if (video_receiver_info.codec_payload_type) { inbound_video->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, true, *video_receiver_info.codec_payload_type); + mid, /*inbound=*/true, *video_receiver_info.codec_payload_type); } + inbound_video->jitter = static_cast(video_receiver_info.jitter_ms) / + rtc::kNumMillisecsPerSec; inbound_video->fir_count = static_cast(video_receiver_info.firs_sent); inbound_video->pli_count = @@ -376,17 +470,16 @@ void SetInboundRTPStreamStatsFromVideoReceiverInfo( inbound_video->total_squared_inter_frame_delay = video_receiver_info.total_squared_inter_frame_delay; if (video_receiver_info.last_packet_received_timestamp_ms) { - inbound_video->last_packet_received_timestamp = - static_cast( - *video_receiver_info.last_packet_received_timestamp_ms) / - rtc::kNumMillisecsPerSec; + inbound_video->last_packet_received_timestamp = static_cast( + *video_receiver_info.last_packet_received_timestamp_ms); } if (video_receiver_info.estimated_playout_ntp_timestamp_ms) { + // TODO(bugs.webrtc.org/10529): Fix time origin if needed. inbound_video->estimated_playout_timestamp = static_cast( *video_receiver_info.estimated_playout_ntp_timestamp_ms); } - // TODO(https://crbug.com/webrtc/10529): When info's |content_info| is - // optional, support the "unspecified" value. + // TODO(bugs.webrtc.org/10529): When info's |content_info| is optional + // support the "unspecified" value. if (video_receiver_info.content_type == VideoContentType::SCREENSHARE) inbound_video->content_type = RTCContentType::kScreenshare; if (!video_receiver_info.decoder_implementation_name.empty()) { @@ -401,8 +494,6 @@ void SetOutboundRTPStreamStatsFromMediaSenderInfo( RTCOutboundRTPStreamStats* outbound_stats) { RTC_DCHECK(outbound_stats); outbound_stats->ssrc = media_sender_info.ssrc(); - // TODO(hbos): Support the remote case. https://crbug.com/657856 - outbound_stats->is_remote = false; outbound_stats->packets_sent = static_cast(media_sender_info.packets_sent); outbound_stats->retransmitted_packets_sent = @@ -425,7 +516,7 @@ void SetOutboundRTPStreamStatsFromVoiceSenderInfo( outbound_audio->kind = "audio"; if (voice_sender_info.codec_payload_type) { outbound_audio->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, false, *voice_sender_info.codec_payload_type); + mid, /*inbound=*/false, *voice_sender_info.codec_payload_type); } // |fir_count|, |pli_count| and |sli_count| are only valid for video and are // purposefully left undefined for audio. @@ -441,7 +532,7 @@ void SetOutboundRTPStreamStatsFromVideoSenderInfo( outbound_video->kind = "video"; if (video_sender_info.codec_payload_type) { outbound_video->codec_id = RTCCodecStatsIDFromMidDirectionAndPayload( - mid, false, *video_sender_info.codec_payload_type); + mid, /*inbound=*/false, *video_sender_info.codec_payload_type); } outbound_video->fir_count = static_cast(video_sender_info.firs_rcvd); @@ -510,12 +601,19 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( remote_inbound->kind = media_type == cricket::MEDIA_TYPE_AUDIO ? "audio" : "video"; remote_inbound->packets_lost = report_block.packets_lost; + remote_inbound->fraction_lost = + static_cast(report_block.fraction_lost) / (1 << 8); remote_inbound->round_trip_time = static_cast(report_block_data.last_rtt_ms()) / rtc::kNumMillisecsPerSec; + remote_inbound->total_round_trip_time = + static_cast(report_block_data.sum_rtt_ms()) / + rtc::kNumMillisecsPerSec; + remote_inbound->round_trip_time_measurements = + report_block_data.num_rtts(); - std::string local_id = RTCOutboundRTPStreamStatsIDFromSSRC( - media_type == cricket::MEDIA_TYPE_AUDIO, report_block.source_ssrc); + std::string local_id = + RTCOutboundRTPStreamStatsIDFromSSRC(media_type, report_block.source_ssrc); // Look up local stat from |outbound_rtps| where the pointers are non-const. auto local_id_it = outbound_rtps.find(local_id); if (local_id_it != outbound_rtps.end()) { @@ -616,6 +714,7 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, candidate.network_type()); } candidate_stats->ip = candidate.address().ipaddr().ToString(); + candidate_stats->address = candidate.address().ipaddr().ToString(); candidate_stats->port = static_cast(candidate.address().port()); candidate_stats->protocol = candidate.protocol(); candidate_stats->candidate_type = @@ -998,8 +1097,7 @@ RTCStatsCollector::RequestInfo::RequestInfo( rtc::scoped_refptr RTCStatsCollector::Create( PeerConnectionInternal* pc, int64_t cache_lifetime_us) { - return rtc::scoped_refptr( - new rtc::RefCountedObject(pc, cache_lifetime_us)); + return rtc::make_ref_counted(pc, cache_lifetime_us); } RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc, @@ -1019,8 +1117,6 @@ RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc, RTC_DCHECK(worker_thread_); RTC_DCHECK(network_thread_); RTC_DCHECK_GE(cache_lifetime_us_, 0); - pc_->SignalRtpDataChannelCreated().connect( - this, &RTCStatsCollector::OnRtpDataChannelCreated); pc_->SignalSctpDataChannelCreated().connect( this, &RTCStatsCollector::OnSctpDataChannelCreated); } @@ -1048,7 +1144,7 @@ void RTCStatsCollector::GetStatsReport( void RTCStatsCollector::GetStatsReportInternal( RTCStatsCollector::RequestInfo request) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); requests_.push_back(std::move(request)); // "Now" using a monotonically increasing timer. @@ -1060,9 +1156,30 @@ void RTCStatsCollector::GetStatsReportInternal( // reentrancy problems. std::vector requests; requests.swap(requests_); - signaling_thread_->PostTask( - RTC_FROM_HERE, rtc::Bind(&RTCStatsCollector::DeliverCachedReport, this, - cached_report_, std::move(requests))); + + // Task subclass to take ownership of the requests. + // TODO(nisse): Delete when we can use C++14, and do lambda capture with + // std::move. + class DeliveryTask : public QueuedTask { + public: + DeliveryTask(rtc::scoped_refptr collector, + rtc::scoped_refptr cached_report, + std::vector requests) + : collector_(collector), + cached_report_(cached_report), + requests_(std::move(requests)) {} + bool Run() override { + collector_->DeliverCachedReport(cached_report_, std::move(requests_)); + return true; + } + + private: + rtc::scoped_refptr collector_; + rtc::scoped_refptr cached_report_; + std::vector requests_; + }; + signaling_thread_->PostTask(std::make_unique( + this, cached_report_, std::move(requests))); } else if (!num_pending_partial_reports_) { // Only start gathering stats if we're not already gathering stats. In the // case of already gathering stats, |callback_| will be invoked when there @@ -1079,30 +1196,30 @@ void RTCStatsCollector::GetStatsReportInternal( // Prepare |transceiver_stats_infos_| and |call_stats_| for use in // |ProducePartialResultsOnNetworkThread| and // |ProducePartialResultsOnSignalingThread|. - PrepareTransceiverStatsInfosAndCallStats_s_w(); - // Prepare |transport_names_| for use in - // |ProducePartialResultsOnNetworkThread|. - transport_names_ = PrepareTransportNames_s(); - + PrepareTransceiverStatsInfosAndCallStats_s_w_n(); // Don't touch |network_report_| on the signaling thread until // ProducePartialResultsOnNetworkThread() has signaled the // |network_report_event_|. network_report_event_.Reset(); + rtc::scoped_refptr collector(this); network_thread_->PostTask( RTC_FROM_HERE, - rtc::Bind(&RTCStatsCollector::ProducePartialResultsOnNetworkThread, - this, timestamp_us)); + [collector, sctp_transport_name = pc_->sctp_transport_name(), + timestamp_us]() mutable { + collector->ProducePartialResultsOnNetworkThread( + timestamp_us, std::move(sctp_transport_name)); + }); ProducePartialResultsOnSignalingThread(timestamp_us); } } void RTCStatsCollector::ClearCachedStatsReport() { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); cached_report_ = nullptr; } void RTCStatsCollector::WaitForPendingRequest() { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); // If a request is pending, blocks until the |network_report_event_| is // signaled and then delivers the result. Otherwise this is a NO-OP. MergeNetworkReport_s(); @@ -1110,7 +1227,7 @@ void RTCStatsCollector::WaitForPendingRequest() { void RTCStatsCollector::ProducePartialResultsOnSignalingThread( int64_t timestamp_us) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; partial_report_ = RTCStatsReport::Create(timestamp_us); @@ -1129,7 +1246,7 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThread( void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl( int64_t timestamp_us, RTCStatsReport* partial_report) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; ProduceDataChannelStats_s(timestamp_us, partial_report); @@ -1140,16 +1257,27 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl( } void RTCStatsCollector::ProducePartialResultsOnNetworkThread( - int64_t timestamp_us) { - RTC_DCHECK(network_thread_->IsCurrent()); + int64_t timestamp_us, + absl::optional sctp_transport_name) { + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; // Touching |network_report_| on this thread is safe by this method because // |network_report_event_| is reset before this method is invoked. network_report_ = RTCStatsReport::Create(timestamp_us); + std::set transport_names; + if (sctp_transport_name) { + transport_names.emplace(std::move(*sctp_transport_name)); + } + + for (const auto& info : transceiver_stats_infos_) { + if (info.transport_name) + transport_names.insert(*info.transport_name); + } + std::map transport_stats_by_name = - pc_->GetTransportStatsByNames(transport_names_); + pc_->GetTransportStatsByNames(transport_names); std::map transport_cert_stats = PrepareTransportCertificateStats_n(transport_stats_by_name); @@ -1160,8 +1288,9 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThread( // Signal that it is now safe to touch |network_report_| on the signaling // thread, and post a task to merge it into the final results. network_report_event_.Set(); + rtc::scoped_refptr collector(this); signaling_thread_->PostTask( - RTC_FROM_HERE, rtc::Bind(&RTCStatsCollector::MergeNetworkReport_s, this)); + RTC_FROM_HERE, [collector] { collector->MergeNetworkReport_s(); }); } void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( @@ -1170,7 +1299,7 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* partial_report) { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; ProduceCertificateStats_n(timestamp_us, transport_cert_stats, partial_report); @@ -1184,7 +1313,7 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( } void RTCStatsCollector::MergeNetworkReport_s() { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); // The |network_report_event_| must be signaled for it to be safe to touch // |network_report_|. This is normally not blocking, but if // WaitForPendingRequest() is called while a request is pending, we might have @@ -1227,7 +1356,7 @@ void RTCStatsCollector::MergeNetworkReport_s() { void RTCStatsCollector::DeliverCachedReport( rtc::scoped_refptr cached_report, std::vector requests) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!requests.empty()); RTC_DCHECK(cached_report); @@ -1258,7 +1387,7 @@ void RTCStatsCollector::ProduceCertificateStats_n( int64_t timestamp_us, const std::map& transport_cert_stats, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& transport_cert_stats_pair : transport_cert_stats) { @@ -1277,7 +1406,7 @@ void RTCStatsCollector::ProduceCodecStats_n( int64_t timestamp_us, const std::vector& transceiver_stats_infos, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& stats : transceiver_stats_infos) { @@ -1349,7 +1478,7 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( transport_stats_by_name, const Call::Stats& call_stats, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& entry : transport_stats_by_name) { @@ -1431,7 +1560,7 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( void RTCStatsCollector::ProduceMediaStreamStats_s( int64_t timestamp_us, RTCStatsReport* report) const { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map> track_ids; @@ -1468,7 +1597,7 @@ void RTCStatsCollector::ProduceMediaStreamStats_s( void RTCStatsCollector::ProduceMediaStreamTrackStats_s( int64_t timestamp_us, RTCStatsReport* report) const { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos_) { @@ -1491,7 +1620,7 @@ void RTCStatsCollector::ProduceMediaStreamTrackStats_s( void RTCStatsCollector::ProduceMediaSourceStats_s( int64_t timestamp_us, RTCStatsReport* report) const { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const RtpTransceiverStatsInfo& transceiver_stats_info : @@ -1560,6 +1689,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( if (video_sender_info) { video_source_stats->frames_per_second = video_sender_info->framerate_input; + video_source_stats->frames = video_sender_info->frames; } } media_source_stats = std::move(video_source_stats); @@ -1574,7 +1704,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( void RTCStatsCollector::ProducePeerConnectionStats_s( int64_t timestamp_us, RTCStatsReport* report) const { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::unique_ptr stats( @@ -1588,7 +1718,7 @@ void RTCStatsCollector::ProduceRTPStreamStats_n( int64_t timestamp_us, const std::vector& transceiver_stats_infos, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos) { @@ -1606,7 +1736,7 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( int64_t timestamp_us, const RtpTransceiverStatsInfo& stats, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (!stats.mid || !stats.transport_name) { @@ -1618,16 +1748,16 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( std::string mid = *stats.mid; std::string transport_id = RTCTransportStatsIDFromTransportChannel( *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); - // Inbound + // Inbound and remote-outbound. + // The remote-outbound stats are based on RTCP sender reports sent from the + // remote endpoint providing metrics about the remote outbound streams. for (const cricket::VoiceReceiverInfo& voice_receiver_info : track_media_info_map.voice_media_info()->receivers) { if (!voice_receiver_info.connected()) continue; - auto inbound_audio = std::make_unique( - RTCInboundRTPStreamStatsIDFromSSRC(true, voice_receiver_info.ssrc()), - timestamp_us); - SetInboundRTPStreamStatsFromVoiceReceiverInfo(mid, voice_receiver_info, - inbound_audio.get()); + // Inbound. + auto inbound_audio = + CreateInboundAudioStreamStats(voice_receiver_info, mid, timestamp_us); // TODO(hta): This lookup should look for the sender, not the track. rtc::scoped_refptr audio_track = track_media_info_map.GetAudioTrack(voice_receiver_info); @@ -1638,16 +1768,27 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( track_media_info_map.GetAttachmentIdByTrack(audio_track).value()); } inbound_audio->transport_id = transport_id; + // Remote-outbound. + auto remote_outbound_audio = CreateRemoteOutboundAudioStreamStats( + voice_receiver_info, mid, inbound_audio->id(), transport_id); + // Add stats. + if (remote_outbound_audio) { + // When the remote outbound stats are available, the remote ID for the + // local inbound stats is set. + inbound_audio->remote_id = remote_outbound_audio->id(); + report->AddStats(std::move(remote_outbound_audio)); + } report->AddStats(std::move(inbound_audio)); } - // Outbound + // Outbound. std::map audio_outbound_rtps; for (const cricket::VoiceSenderInfo& voice_sender_info : track_media_info_map.voice_media_info()->senders) { if (!voice_sender_info.connected()) continue; auto outbound_audio = std::make_unique( - RTCOutboundRTPStreamStatsIDFromSSRC(true, voice_sender_info.ssrc()), + RTCOutboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_AUDIO, + voice_sender_info.ssrc()), timestamp_us); SetOutboundRTPStreamStatsFromVoiceSenderInfo(mid, voice_sender_info, outbound_audio.get()); @@ -1668,7 +1809,7 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( std::make_pair(outbound_audio->id(), outbound_audio.get())); report->AddStats(std::move(outbound_audio)); } - // Remote-inbound + // Remote-inbound. // These are Report Block-based, information sent from the remote endpoint, // providing metrics about our Outbound streams. We take advantage of the fact // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already @@ -1687,7 +1828,7 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( int64_t timestamp_us, const RtpTransceiverStatsInfo& stats, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (!stats.mid || !stats.transport_name) { @@ -1705,7 +1846,8 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( if (!video_receiver_info.connected()) continue; auto inbound_video = std::make_unique( - RTCInboundRTPStreamStatsIDFromSSRC(false, video_receiver_info.ssrc()), + RTCInboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_VIDEO, + video_receiver_info.ssrc()), timestamp_us); SetInboundRTPStreamStatsFromVideoReceiverInfo(mid, video_receiver_info, inbound_video.get()); @@ -1719,6 +1861,7 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( } inbound_video->transport_id = transport_id; report->AddStats(std::move(inbound_video)); + // TODO(crbug.com/webrtc/12529): Add remote-outbound stats. } // Outbound std::map video_outbound_rtps; @@ -1727,7 +1870,8 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( if (!video_sender_info.connected()) continue; auto outbound_video = std::make_unique( - RTCOutboundRTPStreamStatsIDFromSSRC(false, video_sender_info.ssrc()), + RTCOutboundRTPStreamStatsIDFromSSRC(cricket::MEDIA_TYPE_VIDEO, + video_sender_info.ssrc()), timestamp_us); SetOutboundRTPStreamStatsFromVideoSenderInfo(mid, video_sender_info, outbound_video.get()); @@ -1769,7 +1913,7 @@ void RTCStatsCollector::ProduceTransportStats_n( transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* report) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& entry : transport_stats_by_name) { @@ -1867,7 +2011,7 @@ std::map RTCStatsCollector::PrepareTransportCertificateStats_n( const std::map& transport_stats_by_name) const { - RTC_DCHECK(network_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(network_thread_); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map transport_cert_stats; @@ -1893,8 +2037,8 @@ RTCStatsCollector::PrepareTransportCertificateStats_n( return transport_cert_stats; } -void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w() { - RTC_DCHECK(signaling_thread_->IsCurrent()); +void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { + RTC_DCHECK_RUN_ON(signaling_thread_); transceiver_stats_infos_.clear(); // These are used to invoke GetStats for all the media channels together in @@ -1906,20 +2050,26 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w() { std::unique_ptr> video_stats; - { + auto transceivers = pc_->GetTransceiversInternal(); + + // TODO(tommi): See if we can avoid synchronously blocking the signaling + // thread while we do this (or avoid the Invoke at all). + network_thread_->Invoke(RTC_FROM_HERE, [this, &transceivers, + &voice_stats, &video_stats] { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - for (const auto& transceiver : pc_->GetTransceiversInternal()) { + for (const auto& transceiver_proxy : transceivers) { + RtpTransceiver* transceiver = transceiver_proxy->internal(); cricket::MediaType media_type = transceiver->media_type(); // Prepare stats entry. The TrackMediaInfoMap will be filled in after the // stats have been fetched on the worker thread. transceiver_stats_infos_.emplace_back(); RtpTransceiverStatsInfo& stats = transceiver_stats_infos_.back(); - stats.transceiver = transceiver->internal(); + stats.transceiver = transceiver; stats.media_type = media_type; - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + cricket::ChannelInterface* channel = transceiver->channel(); if (!channel) { // The remaining fields require a BaseChannel. continue; @@ -1944,7 +2094,7 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w() { RTC_NOTREACHED(); } } - } + }); // We jump to the worker thread and call GetStats() on each media channel as // well as GetCallStats(). At the same time we construct the @@ -2003,38 +2153,13 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w() { }); } -std::set RTCStatsCollector::PrepareTransportNames_s() const { - RTC_DCHECK(signaling_thread_->IsCurrent()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - - std::set transport_names; - for (const auto& transceiver : pc_->GetTransceiversInternal()) { - if (transceiver->internal()->channel()) { - transport_names.insert( - transceiver->internal()->channel()->transport_name()); - } - } - if (pc_->rtp_data_channel()) { - transport_names.insert(pc_->rtp_data_channel()->transport_name()); - } - if (pc_->sctp_transport_name()) { - transport_names.insert(*pc_->sctp_transport_name()); - } - return transport_names; -} - -void RTCStatsCollector::OnRtpDataChannelCreated(RtpDataChannel* channel) { - channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened); - channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed); -} - void RTCStatsCollector::OnSctpDataChannelCreated(SctpDataChannel* channel) { channel->SignalOpened.connect(this, &RTCStatsCollector::OnDataChannelOpened); channel->SignalClosed.connect(this, &RTCStatsCollector::OnDataChannelClosed); } void RTCStatsCollector::OnDataChannelOpened(DataChannelInterface* channel) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); bool result = internal_record_.opened_data_channels .insert(reinterpret_cast(channel)) .second; @@ -2043,7 +2168,7 @@ void RTCStatsCollector::OnDataChannelOpened(DataChannelInterface* channel) { } void RTCStatsCollector::OnDataChannelClosed(DataChannelInterface* channel) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); // Only channels that have been fully opened (and have increased the // |data_channels_opened_| counter) increase the closed counter. if (internal_record_.opened_data_channels.erase( diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h index 35576e91d..5f13f54d2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h @@ -11,6 +11,7 @@ #ifndef PC_RTC_STATS_COLLECTOR_H_ #define PC_RTC_STATS_COLLECTOR_H_ +#include #include #include #include @@ -18,6 +19,8 @@ #include #include "absl/types/optional.h" +#include "api/data_channel_interface.h" +#include "api/media_types.h" #include "api/scoped_refptr.h" #include "api/stats/rtc_stats_collector_callback.h" #include "api/stats/rtc_stats_report.h" @@ -26,11 +29,19 @@ #include "media/base/media_channel.h" #include "pc/data_channel_utils.h" #include "pc/peer_connection_internal.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_transceiver.h" +#include "pc/sctp_data_channel.h" #include "pc/track_media_info_map.h" +#include "pc/transport_stats.h" +#include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/ref_count.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -42,7 +53,7 @@ class RtpReceiverInternal; // Stats are gathered on the signaling, worker and network threads // asynchronously. The callback is invoked on the signaling thread. Resulting // reports are cached for |cache_lifetime_| ms. -class RTCStatsCollector : public virtual rtc::RefCountInterface, +class RTCStatsCollector : public rtc::RefCountInterface, public sigslot::has_slots<> { public: static rtc::scoped_refptr Create( @@ -216,18 +227,18 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface, const std::map& transport_stats_by_name) const; // The results are stored in |transceiver_stats_infos_| and |call_stats_|. - void PrepareTransceiverStatsInfosAndCallStats_s_w(); - std::set PrepareTransportNames_s() const; + void PrepareTransceiverStatsInfosAndCallStats_s_w_n(); // Stats gathering on a particular thread. void ProducePartialResultsOnSignalingThread(int64_t timestamp_us); - void ProducePartialResultsOnNetworkThread(int64_t timestamp_us); + void ProducePartialResultsOnNetworkThread( + int64_t timestamp_us, + absl::optional sctp_transport_name); // Merges |network_report_| into |partial_report_| and completes the request. // This is a NO-OP if |network_report_| is null. void MergeNetworkReport_s(); // Slots for signals (sigslot) that are wired up to |pc_|. - void OnRtpDataChannelCreated(RtpDataChannel* channel); void OnSctpDataChannelCreated(SctpDataChannel* channel); // Slots for signals (sigslot) that are wired up to |channel|. void OnDataChannelOpened(DataChannelInterface* channel); @@ -256,12 +267,16 @@ class RTCStatsCollector : public virtual rtc::RefCountInterface, // has updated the value of |network_report_|. rtc::Event network_report_event_; - // Set in |GetStatsReport|, read in |ProducePartialResultsOnNetworkThread| and - // |ProducePartialResultsOnSignalingThread|, reset after work is complete. Not - // passed as arguments to avoid copies. This is thread safe - when we - // set/reset we know there are no pending stats requests in progress. + // Cleared and set in `PrepareTransceiverStatsInfosAndCallStats_s_w_n`, + // starting out on the signaling thread, then network. Later read on the + // network and signaling threads as part of collecting stats and finally + // reset when the work is done. Initially this variable was added and not + // passed around as an arguments to avoid copies. This is thread safe due to + // how operations are sequenced and we don't start the stats collection + // sequence if one is in progress. As a future improvement though, we could + // now get rid of the variable and keep the data scoped within a stats + // collection sequence. std::vector transceiver_stats_infos_; - std::set transport_names_; Call::Stats call_stats_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc index ee68ec9a0..d92e7ff29 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc @@ -114,9 +114,9 @@ class RTCStatsIntegrationTest : public ::testing::Test { RTC_CHECK(network_thread_->Start()); RTC_CHECK(worker_thread_->Start()); - caller_ = new rtc::RefCountedObject( + caller_ = rtc::make_ref_counted( "caller", network_thread_.get(), worker_thread_.get()); - callee_ = new rtc::RefCountedObject( + callee_ = rtc::make_ref_counted( "callee", network_thread_.get(), worker_thread_.get()); } @@ -399,6 +399,9 @@ class RTCStatsReportVerifier { } else if (stats.type() == RTCRemoteInboundRtpStreamStats::kType) { verify_successful &= VerifyRTCRemoteInboundRtpStreamStats( stats.cast_to()); + } else if (stats.type() == RTCRemoteOutboundRtpStreamStats::kType) { + verify_successful &= VerifyRTCRemoteOutboundRTPStreamStats( + stats.cast_to()); } else if (stats.type() == RTCAudioSourceStats::kType) { // RTCAudioSourceStats::kType and RTCVideoSourceStats::kType both have // the value "media-source", but they are distinguishable with pointer @@ -528,12 +531,12 @@ class RTCStatsReportVerifier { verifier.TestMemberIsDefined(candidate.network_type); } verifier.TestMemberIsDefined(candidate.ip); + verifier.TestMemberIsDefined(candidate.address); verifier.TestMemberIsNonNegative(candidate.port); verifier.TestMemberIsDefined(candidate.protocol); verifier.TestMemberIsDefined(candidate.candidate_type); verifier.TestMemberIsNonNegative(candidate.priority); verifier.TestMemberIsUndefined(candidate.url); - verifier.TestMemberIsDefined(candidate.deleted); verifier.TestMemberIsUndefined(candidate.relay_protocol); return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -768,32 +771,38 @@ class RTCStatsReportVerifier { } void VerifyRTCRTPStreamStats(const RTCRTPStreamStats& stream, - RTCStatsVerifier* verifier) { - verifier->TestMemberIsDefined(stream.ssrc); - verifier->TestMemberIsDefined(stream.is_remote); - verifier->TestMemberIsDefined(stream.media_type); - verifier->TestMemberIsDefined(stream.kind); - verifier->TestMemberIsIDReference(stream.track_id, - RTCMediaStreamTrackStats::kType); - verifier->TestMemberIsIDReference(stream.transport_id, - RTCTransportStats::kType); - verifier->TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType); - if (stream.media_type.is_defined() && *stream.media_type == "video") { - verifier->TestMemberIsNonNegative(stream.fir_count); - verifier->TestMemberIsNonNegative(stream.pli_count); - verifier->TestMemberIsNonNegative(stream.nack_count); + RTCStatsVerifier& verifier) { + verifier.TestMemberIsDefined(stream.ssrc); + verifier.TestMemberIsDefined(stream.kind); + // Some legacy metrics are only defined for some of the RTP types in the + // hierarcy. + if (stream.type() == RTCInboundRTPStreamStats::kType || + stream.type() == RTCOutboundRTPStreamStats::kType) { + verifier.TestMemberIsDefined(stream.media_type); + verifier.TestMemberIsIDReference(stream.track_id, + RTCMediaStreamTrackStats::kType); } else { - verifier->TestMemberIsUndefined(stream.fir_count); - verifier->TestMemberIsUndefined(stream.pli_count); - verifier->TestMemberIsUndefined(stream.nack_count); + verifier.TestMemberIsUndefined(stream.media_type); + verifier.TestMemberIsUndefined(stream.track_id); } - verifier->TestMemberIsUndefined(stream.sli_count); + verifier.TestMemberIsIDReference(stream.transport_id, + RTCTransportStats::kType); + verifier.TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType); + } + + void VerifyRTCSentRTPStreamStats(const RTCSentRtpStreamStats& sent_stream, + RTCStatsVerifier& verifier) { + VerifyRTCRTPStreamStats(sent_stream, verifier); + verifier.TestMemberIsDefined(sent_stream.packets_sent); + verifier.TestMemberIsDefined(sent_stream.bytes_sent); } bool VerifyRTCInboundRTPStreamStats( const RTCInboundRTPStreamStats& inbound_stream) { RTCStatsVerifier verifier(report_, &inbound_stream); - VerifyRTCRTPStreamStats(inbound_stream, &verifier); + VerifyRTCReceivedRtpStreamStats(inbound_stream, verifier); + verifier.TestMemberIsOptionalIDReference( + inbound_stream.remote_id, RTCRemoteOutboundRtpStreamStats::kType); if (inbound_stream.media_type.is_defined() && *inbound_stream.media_type == "video") { verifier.TestMemberIsNonNegative(inbound_stream.qp_sum); @@ -816,9 +825,6 @@ class RTCStatsReportVerifier { verifier.TestMemberIsNonNegative(inbound_stream.bytes_received); verifier.TestMemberIsNonNegative( inbound_stream.header_bytes_received); - // packets_lost is defined as signed, but this should never happen in - // this test. See RFC 3550. - verifier.TestMemberIsNonNegative(inbound_stream.packets_lost); verifier.TestMemberIsDefined(inbound_stream.last_packet_received_timestamp); if (inbound_stream.frames_received.ValueOrDefault(0) > 0) { verifier.TestMemberIsNonNegative(inbound_stream.frame_width); @@ -836,7 +842,6 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(inbound_stream.frame_bit_depth); if (inbound_stream.media_type.is_defined() && *inbound_stream.media_type == "video") { - verifier.TestMemberIsUndefined(inbound_stream.jitter); verifier.TestMemberIsUndefined(inbound_stream.jitter_buffer_delay); verifier.TestMemberIsUndefined( inbound_stream.jitter_buffer_emitted_count); @@ -852,8 +857,13 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(inbound_stream.total_audio_energy); verifier.TestMemberIsUndefined(inbound_stream.total_samples_duration); verifier.TestMemberIsNonNegative(inbound_stream.frames_received); + verifier.TestMemberIsNonNegative(inbound_stream.fir_count); + verifier.TestMemberIsNonNegative(inbound_stream.pli_count); + verifier.TestMemberIsNonNegative(inbound_stream.nack_count); } else { - verifier.TestMemberIsNonNegative(inbound_stream.jitter); + verifier.TestMemberIsUndefined(inbound_stream.fir_count); + verifier.TestMemberIsUndefined(inbound_stream.pli_count); + verifier.TestMemberIsUndefined(inbound_stream.nack_count); verifier.TestMemberIsNonNegative( inbound_stream.jitter_buffer_delay); verifier.TestMemberIsNonNegative( @@ -920,17 +930,23 @@ class RTCStatsReportVerifier { bool VerifyRTCOutboundRTPStreamStats( const RTCOutboundRTPStreamStats& outbound_stream) { RTCStatsVerifier verifier(report_, &outbound_stream); - VerifyRTCRTPStreamStats(outbound_stream, &verifier); + VerifyRTCRTPStreamStats(outbound_stream, verifier); if (outbound_stream.media_type.is_defined() && *outbound_stream.media_type == "video") { verifier.TestMemberIsIDReference(outbound_stream.media_source_id, RTCVideoSourceStats::kType); + verifier.TestMemberIsNonNegative(outbound_stream.fir_count); + verifier.TestMemberIsNonNegative(outbound_stream.pli_count); + verifier.TestMemberIsNonNegative(outbound_stream.nack_count); if (*outbound_stream.frames_encoded > 0) { verifier.TestMemberIsNonNegative(outbound_stream.qp_sum); } else { verifier.TestMemberIsUndefined(outbound_stream.qp_sum); } } else { + verifier.TestMemberIsUndefined(outbound_stream.fir_count); + verifier.TestMemberIsUndefined(outbound_stream.pli_count); + verifier.TestMemberIsUndefined(outbound_stream.nack_count); verifier.TestMemberIsIDReference(outbound_stream.media_source_id, RTCAudioSourceStats::kType); verifier.TestMemberIsUndefined(outbound_stream.qp_sum); @@ -1004,23 +1020,40 @@ class RTCStatsReportVerifier { return verifier.ExpectAllMembersSuccessfullyTested(); } + void VerifyRTCReceivedRtpStreamStats( + const RTCReceivedRtpStreamStats& received_rtp, + RTCStatsVerifier& verifier) { + VerifyRTCRTPStreamStats(received_rtp, verifier); + verifier.TestMemberIsNonNegative(received_rtp.jitter); + verifier.TestMemberIsDefined(received_rtp.packets_lost); + } + bool VerifyRTCRemoteInboundRtpStreamStats( const RTCRemoteInboundRtpStreamStats& remote_inbound_stream) { RTCStatsVerifier verifier(report_, &remote_inbound_stream); - verifier.TestMemberIsDefined(remote_inbound_stream.ssrc); - verifier.TestMemberIsDefined(remote_inbound_stream.kind); - verifier.TestMemberIsIDReference(remote_inbound_stream.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsIDReference(remote_inbound_stream.codec_id, - RTCCodecStats::kType); - verifier.TestMemberIsDefined(remote_inbound_stream.packets_lost); - // Note that the existance of RTCCodecStats is needed for |codec_id| and - // |jitter| to be present. - verifier.TestMemberIsNonNegative(remote_inbound_stream.jitter); + VerifyRTCReceivedRtpStreamStats(remote_inbound_stream, verifier); + verifier.TestMemberIsDefined(remote_inbound_stream.fraction_lost); verifier.TestMemberIsIDReference(remote_inbound_stream.local_id, RTCOutboundRTPStreamStats::kType); verifier.TestMemberIsNonNegative( remote_inbound_stream.round_trip_time); + verifier.TestMemberIsNonNegative( + remote_inbound_stream.total_round_trip_time); + verifier.TestMemberIsNonNegative( + remote_inbound_stream.round_trip_time_measurements); + return verifier.ExpectAllMembersSuccessfullyTested(); + } + + bool VerifyRTCRemoteOutboundRTPStreamStats( + const RTCRemoteOutboundRtpStreamStats& remote_outbound_stream) { + RTCStatsVerifier verifier(report_, &remote_outbound_stream); + VerifyRTCRTPStreamStats(remote_outbound_stream, verifier); + VerifyRTCSentRTPStreamStats(remote_outbound_stream, verifier); + verifier.TestMemberIsIDReference(remote_outbound_stream.local_id, + RTCOutboundRTPStreamStats::kType); + verifier.TestMemberIsNonNegative( + remote_outbound_stream.remote_timestamp); + verifier.TestMemberIsDefined(remote_outbound_stream.reports_sent); return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -1057,9 +1090,7 @@ class RTCStatsReportVerifier { // reflect real code. verifier.TestMemberIsUndefined(video_source.width); verifier.TestMemberIsUndefined(video_source.height); - // TODO(hbos): When |frames| is implemented test that this member should be - // expected to be non-negative. - verifier.TestMemberIsUndefined(video_source.frames); + verifier.TestMemberIsNonNegative(video_source.frames); verifier.TestMemberIsNonNegative(video_source.frames_per_second); return verifier.ExpectAllMembersSuccessfullyTested(); } @@ -1091,7 +1122,7 @@ class RTCStatsReportVerifier { rtc::scoped_refptr report_; }; -#ifdef HAVE_SCTP +#ifdef WEBRTC_HAVE_SCTP TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) { StartCall(); @@ -1254,7 +1285,21 @@ TEST_F(RTCStatsIntegrationTest, GetStatsReferencedIds) { } } } -#endif // HAVE_SCTP + +TEST_F(RTCStatsIntegrationTest, GetStatsContainsNoDuplicateMembers) { + StartCall(); + + rtc::scoped_refptr report = GetStatsFromCallee(); + for (const RTCStats& stats : *report) { + std::set member_names; + for (const auto* member : stats.Members()) { + EXPECT_TRUE(member_names.find(member->name()) == member_names.end()) + << member->name() << " is a duplicate!"; + member_names.insert(member->name()); + } + } +} +#endif // WEBRTC_HAVE_SCTP } // namespace diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc index aa53dde18..e579072ea 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_traversal.cc @@ -99,24 +99,36 @@ std::vector GetStatsReferencedIds(const RTCStats& stats) { AddIdIfDefined(track.media_source_id, &neighbor_ids); } else if (type == RTCPeerConnectionStats::kType) { // RTCPeerConnectionStats does not have any neighbor references. - } else if (type == RTCInboundRTPStreamStats::kType || - type == RTCOutboundRTPStreamStats::kType) { - const auto& rtp = static_cast(stats); - AddIdIfDefined(rtp.track_id, &neighbor_ids); - AddIdIfDefined(rtp.transport_id, &neighbor_ids); - AddIdIfDefined(rtp.codec_id, &neighbor_ids); - if (type == RTCOutboundRTPStreamStats::kType) { - const auto& outbound_rtp = - static_cast(stats); - AddIdIfDefined(outbound_rtp.media_source_id, &neighbor_ids); - AddIdIfDefined(outbound_rtp.remote_id, &neighbor_ids); - } + } else if (type == RTCInboundRTPStreamStats::kType) { + const auto& inbound_rtp = + static_cast(stats); + AddIdIfDefined(inbound_rtp.remote_id, &neighbor_ids); + AddIdIfDefined(inbound_rtp.track_id, &neighbor_ids); + AddIdIfDefined(inbound_rtp.transport_id, &neighbor_ids); + AddIdIfDefined(inbound_rtp.codec_id, &neighbor_ids); + } else if (type == RTCOutboundRTPStreamStats::kType) { + const auto& outbound_rtp = + static_cast(stats); + AddIdIfDefined(outbound_rtp.remote_id, &neighbor_ids); + AddIdIfDefined(outbound_rtp.track_id, &neighbor_ids); + AddIdIfDefined(outbound_rtp.transport_id, &neighbor_ids); + AddIdIfDefined(outbound_rtp.codec_id, &neighbor_ids); + AddIdIfDefined(outbound_rtp.media_source_id, &neighbor_ids); } else if (type == RTCRemoteInboundRtpStreamStats::kType) { const auto& remote_inbound_rtp = static_cast(stats); AddIdIfDefined(remote_inbound_rtp.transport_id, &neighbor_ids); AddIdIfDefined(remote_inbound_rtp.codec_id, &neighbor_ids); AddIdIfDefined(remote_inbound_rtp.local_id, &neighbor_ids); + } else if (type == RTCRemoteOutboundRtpStreamStats::kType) { + const auto& remote_outbound_rtp = + static_cast(stats); + // Inherited from `RTCRTPStreamStats`. + AddIdIfDefined(remote_outbound_rtp.track_id, &neighbor_ids); + AddIdIfDefined(remote_outbound_rtp.transport_id, &neighbor_ids); + AddIdIfDefined(remote_outbound_rtp.codec_id, &neighbor_ids); + // Direct members of `RTCRemoteOutboundRtpStreamStats`. + AddIdIfDefined(remote_outbound_rtp.local_id, &neighbor_ids); } else if (type == RTCAudioSourceStats::kType || type == RTCVideoSourceStats::kType) { // RTC[Audio/Video]SourceStats does not have any neighbor references. diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.cc deleted file mode 100644 index b08b2b2ff..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.cc +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Copyright 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/rtp_data_channel.h" - -#include -#include -#include - -#include "api/proxy.h" -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/thread.h" - -namespace webrtc { - -namespace { - -static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024; - -static std::atomic g_unique_id{0}; - -int GenerateUniqueId() { - return ++g_unique_id; -} - -// Define proxy for DataChannelInterface. -BEGIN_SIGNALING_PROXY_MAP(DataChannel) -PROXY_SIGNALING_THREAD_DESTRUCTOR() -PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*) -PROXY_METHOD0(void, UnregisterObserver) -BYPASS_PROXY_CONSTMETHOD0(std::string, label) -BYPASS_PROXY_CONSTMETHOD0(bool, reliable) -BYPASS_PROXY_CONSTMETHOD0(bool, ordered) -BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime) -BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits) -BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxRetransmitsOpt) -BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxPacketLifeTime) -BYPASS_PROXY_CONSTMETHOD0(std::string, protocol) -BYPASS_PROXY_CONSTMETHOD0(bool, negotiated) -// Can't bypass the proxy since the id may change. -PROXY_CONSTMETHOD0(int, id) -BYPASS_PROXY_CONSTMETHOD0(Priority, priority) -PROXY_CONSTMETHOD0(DataState, state) -PROXY_CONSTMETHOD0(RTCError, error) -PROXY_CONSTMETHOD0(uint32_t, messages_sent) -PROXY_CONSTMETHOD0(uint64_t, bytes_sent) -PROXY_CONSTMETHOD0(uint32_t, messages_received) -PROXY_CONSTMETHOD0(uint64_t, bytes_received) -PROXY_CONSTMETHOD0(uint64_t, buffered_amount) -PROXY_METHOD0(void, Close) -// TODO(bugs.webrtc.org/11547): Change to run on the network thread. -PROXY_METHOD1(bool, Send, const DataBuffer&) -END_PROXY_MAP() - -} // namespace - -rtc::scoped_refptr RtpDataChannel::Create( - RtpDataChannelProviderInterface* provider, - const std::string& label, - const DataChannelInit& config, - rtc::Thread* signaling_thread) { - rtc::scoped_refptr channel( - new rtc::RefCountedObject(config, provider, label, - signaling_thread)); - if (!channel->Init()) { - return nullptr; - } - return channel; -} - -// static -rtc::scoped_refptr RtpDataChannel::CreateProxy( - rtc::scoped_refptr channel) { - return DataChannelProxy::Create(channel->signaling_thread_, channel.get()); -} - -RtpDataChannel::RtpDataChannel(const DataChannelInit& config, - RtpDataChannelProviderInterface* provider, - const std::string& label, - rtc::Thread* signaling_thread) - : signaling_thread_(signaling_thread), - internal_id_(GenerateUniqueId()), - label_(label), - config_(config), - provider_(provider) { - RTC_DCHECK_RUN_ON(signaling_thread_); -} - -bool RtpDataChannel::Init() { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (config_.reliable || config_.id != -1 || config_.maxRetransmits || - config_.maxRetransmitTime) { - RTC_LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to " - "invalid DataChannelInit."; - return false; - } - - return true; -} - -RtpDataChannel::~RtpDataChannel() { - RTC_DCHECK_RUN_ON(signaling_thread_); -} - -void RtpDataChannel::RegisterObserver(DataChannelObserver* observer) { - RTC_DCHECK_RUN_ON(signaling_thread_); - observer_ = observer; - DeliverQueuedReceivedData(); -} - -void RtpDataChannel::UnregisterObserver() { - RTC_DCHECK_RUN_ON(signaling_thread_); - observer_ = nullptr; -} - -void RtpDataChannel::Close() { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (state_ == kClosed) - return; - send_ssrc_ = 0; - send_ssrc_set_ = false; - SetState(kClosing); - UpdateState(); -} - -RtpDataChannel::DataState RtpDataChannel::state() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - return state_; -} - -RTCError RtpDataChannel::error() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - return error_; -} - -uint32_t RtpDataChannel::messages_sent() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - return messages_sent_; -} - -uint64_t RtpDataChannel::bytes_sent() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - return bytes_sent_; -} - -uint32_t RtpDataChannel::messages_received() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - return messages_received_; -} - -uint64_t RtpDataChannel::bytes_received() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - return bytes_received_; -} - -bool RtpDataChannel::Send(const DataBuffer& buffer) { - RTC_DCHECK_RUN_ON(signaling_thread_); - - if (state_ != kOpen) { - return false; - } - - // TODO(jiayl): the spec is unclear about if the remote side should get the - // onmessage event. We need to figure out the expected behavior and change the - // code accordingly. - if (buffer.size() == 0) { - return true; - } - - return SendDataMessage(buffer); -} - -void RtpDataChannel::SetReceiveSsrc(uint32_t receive_ssrc) { - RTC_DCHECK_RUN_ON(signaling_thread_); - - if (receive_ssrc_set_) { - return; - } - receive_ssrc_ = receive_ssrc; - receive_ssrc_set_ = true; - UpdateState(); -} - -void RtpDataChannel::OnTransportChannelClosed() { - RTCError error = RTCError(RTCErrorType::OPERATION_ERROR_WITH_DATA, - "Transport channel closed"); - CloseAbruptlyWithError(std::move(error)); -} - -DataChannelStats RtpDataChannel::GetStats() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - DataChannelStats stats{internal_id_, id(), label(), - protocol(), state(), messages_sent(), - messages_received(), bytes_sent(), bytes_received()}; - return stats; -} - -// The remote peer request that this channel shall be closed. -void RtpDataChannel::RemotePeerRequestClose() { - // Close with error code explicitly set to OK. - CloseAbruptlyWithError(RTCError()); -} - -void RtpDataChannel::SetSendSsrc(uint32_t send_ssrc) { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (send_ssrc_set_) { - return; - } - send_ssrc_ = send_ssrc; - send_ssrc_set_ = true; - UpdateState(); -} - -void RtpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& payload) { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (params.ssrc != receive_ssrc_) { - return; - } - - RTC_DCHECK(params.type == cricket::DMT_BINARY || - params.type == cricket::DMT_TEXT); - - RTC_LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " - << params.sid; - - bool binary = (params.type == cricket::DMT_BINARY); - auto buffer = std::make_unique(payload, binary); - if (state_ == kOpen && observer_) { - ++messages_received_; - bytes_received_ += buffer->size(); - observer_->OnMessage(*buffer.get()); - } else { - if (queued_received_data_.byte_count() + payload.size() > - kMaxQueuedReceivedDataBytes) { - RTC_LOG(LS_ERROR) << "Queued received data exceeds the max buffer size."; - - queued_received_data_.Clear(); - CloseAbruptlyWithError( - RTCError(RTCErrorType::RESOURCE_EXHAUSTED, - "Queued received data exceeds the max buffer size.")); - - return; - } - queued_received_data_.PushBack(std::move(buffer)); - } -} - -void RtpDataChannel::OnChannelReady(bool writable) { - RTC_DCHECK_RUN_ON(signaling_thread_); - - writable_ = writable; - if (!writable) { - return; - } - - UpdateState(); -} - -void RtpDataChannel::CloseAbruptlyWithError(RTCError error) { - RTC_DCHECK_RUN_ON(signaling_thread_); - - if (state_ == kClosed) { - return; - } - - if (connected_to_provider_) { - DisconnectFromProvider(); - } - - // Still go to "kClosing" before "kClosed", since observers may be expecting - // that. - SetState(kClosing); - error_ = std::move(error); - SetState(kClosed); -} - -void RtpDataChannel::UpdateState() { - RTC_DCHECK_RUN_ON(signaling_thread_); - // UpdateState determines what to do from a few state variables. Include - // all conditions required for each state transition here for - // clarity. - switch (state_) { - case kConnecting: { - if (send_ssrc_set_ == receive_ssrc_set_) { - if (!connected_to_provider_) { - connected_to_provider_ = provider_->ConnectDataChannel(this); - } - if (connected_to_provider_ && writable_) { - SetState(kOpen); - // If we have received buffers before the channel got writable. - // Deliver them now. - DeliverQueuedReceivedData(); - } - } - break; - } - case kOpen: { - break; - } - case kClosing: { - // For RTP data channels, we can go to "closed" after we finish - // sending data and the send/recv SSRCs are unset. - if (connected_to_provider_) { - DisconnectFromProvider(); - } - if (!send_ssrc_set_ && !receive_ssrc_set_) { - SetState(kClosed); - } - break; - } - case kClosed: - break; - } -} - -void RtpDataChannel::SetState(DataState state) { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (state_ == state) { - return; - } - - state_ = state; - if (observer_) { - observer_->OnStateChange(); - } - if (state_ == kOpen) { - SignalOpened(this); - } else if (state_ == kClosed) { - SignalClosed(this); - } -} - -void RtpDataChannel::DisconnectFromProvider() { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (!connected_to_provider_) - return; - - provider_->DisconnectDataChannel(this); - connected_to_provider_ = false; -} - -void RtpDataChannel::DeliverQueuedReceivedData() { - RTC_DCHECK_RUN_ON(signaling_thread_); - if (!observer_) { - return; - } - - while (!queued_received_data_.Empty()) { - std::unique_ptr buffer = queued_received_data_.PopFront(); - ++messages_received_; - bytes_received_ += buffer->size(); - observer_->OnMessage(*buffer); - } -} - -bool RtpDataChannel::SendDataMessage(const DataBuffer& buffer) { - RTC_DCHECK_RUN_ON(signaling_thread_); - cricket::SendDataParams send_params; - - send_params.ssrc = send_ssrc_; - send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT; - - cricket::SendDataResult send_result = cricket::SDR_SUCCESS; - bool success = provider_->SendData(send_params, buffer.data, &send_result); - - if (success) { - ++messages_sent_; - bytes_sent_ += buffer.size(); - if (observer_ && buffer.size() > 0) { - observer_->OnBufferedAmountChange(buffer.size()); - } - return true; - } - - return false; -} - -// static -void RtpDataChannel::ResetInternalIdAllocatorForTesting(int new_value) { - g_unique_id = new_value; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h deleted file mode 100644 index ea2de49b5..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_data_channel.h +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_RTP_DATA_CHANNEL_H_ -#define PC_RTP_DATA_CHANNEL_H_ - -#include -#include - -#include "api/data_channel_interface.h" -#include "api/priority.h" -#include "api/scoped_refptr.h" -#include "api/transport/data_channel_transport_interface.h" -#include "media/base/media_channel.h" -#include "pc/channel.h" -#include "pc/data_channel_utils.h" -#include "rtc_base/async_invoker.h" -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace webrtc { - -class RtpDataChannel; - -// TODO(deadbeef): Once RTP data channels go away, get rid of this and have -// DataChannel depend on SctpTransportInternal (pure virtual SctpTransport -// interface) instead. -class RtpDataChannelProviderInterface { - public: - // Sends the data to the transport. - virtual bool SendData(const cricket::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload, - cricket::SendDataResult* result) = 0; - // Connects to the transport signals. - virtual bool ConnectDataChannel(RtpDataChannel* data_channel) = 0; - // Disconnects from the transport signals. - virtual void DisconnectDataChannel(RtpDataChannel* data_channel) = 0; - // Returns true if the transport channel is ready to send data. - virtual bool ReadyToSendData() const = 0; - - protected: - virtual ~RtpDataChannelProviderInterface() {} -}; - -// RtpDataChannel is an implementation of the DataChannelInterface based on -// libjingle's data engine. It provides an implementation of unreliable data -// channels. - -// DataChannel states: -// kConnecting: The channel has been created the transport might not yet be -// ready. -// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc -// and a remote SSRC set by call to UpdateReceiveSsrc and the transport -// has been writable once. -// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc -// has been called with SSRC==0 -// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with -// SSRC==0. -class RtpDataChannel : public DataChannelInterface, - public sigslot::has_slots<> { - public: - static rtc::scoped_refptr Create( - RtpDataChannelProviderInterface* provider, - const std::string& label, - const DataChannelInit& config, - rtc::Thread* signaling_thread); - - // Instantiates an API proxy for a DataChannel instance that will be handed - // out to external callers. - static rtc::scoped_refptr CreateProxy( - rtc::scoped_refptr channel); - - void RegisterObserver(DataChannelObserver* observer) override; - void UnregisterObserver() override; - - std::string label() const override { return label_; } - bool reliable() const override { return false; } - bool ordered() const override { return config_.ordered; } - // Backwards compatible accessors - uint16_t maxRetransmitTime() const override { - return config_.maxRetransmitTime ? *config_.maxRetransmitTime - : static_cast(-1); - } - uint16_t maxRetransmits() const override { - return config_.maxRetransmits ? *config_.maxRetransmits - : static_cast(-1); - } - absl::optional maxPacketLifeTime() const override { - return config_.maxRetransmitTime; - } - absl::optional maxRetransmitsOpt() const override { - return config_.maxRetransmits; - } - std::string protocol() const override { return config_.protocol; } - bool negotiated() const override { return config_.negotiated; } - int id() const override { return config_.id; } - Priority priority() const override { - return config_.priority ? *config_.priority : Priority::kLow; - } - - virtual int internal_id() const { return internal_id_; } - - uint64_t buffered_amount() const override { return 0; } - void Close() override; - DataState state() const override; - RTCError error() const override; - uint32_t messages_sent() const override; - uint64_t bytes_sent() const override; - uint32_t messages_received() const override; - uint64_t bytes_received() const override; - bool Send(const DataBuffer& buffer) override; - - // Close immediately, ignoring any queued data or closing procedure. - // This is called when SDP indicates a channel should be removed. - void CloseAbruptlyWithError(RTCError error); - - // Called when the channel's ready to use. That can happen when the - // underlying DataMediaChannel becomes ready, or when this channel is a new - // stream on an existing DataMediaChannel, and we've finished negotiation. - void OnChannelReady(bool writable); - - // Slots for provider to connect signals to. - void OnDataReceived(const cricket::ReceiveDataParams& params, - const rtc::CopyOnWriteBuffer& payload); - - // Called when the transport channel is unusable. - // This method makes sure the DataChannel is disconnected and changes state - // to kClosed. - void OnTransportChannelClosed(); - - DataChannelStats GetStats() const; - - // The remote peer requested that this channel should be closed. - void RemotePeerRequestClose(); - // Set the SSRC this channel should use to send data on the - // underlying data engine. |send_ssrc| == 0 means that the channel is no - // longer part of the session negotiation. - void SetSendSsrc(uint32_t send_ssrc); - // Set the SSRC this channel should use to receive data from the - // underlying data engine. - void SetReceiveSsrc(uint32_t receive_ssrc); - - // Emitted when state transitions to kOpen. - sigslot::signal1 SignalOpened; - // Emitted when state transitions to kClosed. - sigslot::signal1 SignalClosed; - - // Reset the allocator for internal ID values for testing, so that - // the internal IDs generated are predictable. Test only. - static void ResetInternalIdAllocatorForTesting(int new_value); - - protected: - RtpDataChannel(const DataChannelInit& config, - RtpDataChannelProviderInterface* client, - const std::string& label, - rtc::Thread* signaling_thread); - ~RtpDataChannel() override; - - private: - bool Init(); - void UpdateState(); - void SetState(DataState state); - void DisconnectFromProvider(); - - void DeliverQueuedReceivedData(); - - bool SendDataMessage(const DataBuffer& buffer); - - rtc::Thread* const signaling_thread_; - const int internal_id_; - const std::string label_; - const DataChannelInit config_; - DataChannelObserver* observer_ RTC_GUARDED_BY(signaling_thread_) = nullptr; - DataState state_ RTC_GUARDED_BY(signaling_thread_) = kConnecting; - RTCError error_ RTC_GUARDED_BY(signaling_thread_); - uint32_t messages_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; - uint64_t bytes_sent_ RTC_GUARDED_BY(signaling_thread_) = 0; - uint32_t messages_received_ RTC_GUARDED_BY(signaling_thread_) = 0; - uint64_t bytes_received_ RTC_GUARDED_BY(signaling_thread_) = 0; - RtpDataChannelProviderInterface* const provider_; - bool connected_to_provider_ RTC_GUARDED_BY(signaling_thread_) = false; - bool send_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false; - bool receive_ssrc_set_ RTC_GUARDED_BY(signaling_thread_) = false; - bool writable_ RTC_GUARDED_BY(signaling_thread_) = false; - uint32_t send_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0; - uint32_t receive_ssrc_ RTC_GUARDED_BY(signaling_thread_) = 0; - PacketQueue queued_received_data_ RTC_GUARDED_BY(signaling_thread_); -}; - -} // namespace webrtc - -#endif // PC_RTP_DATA_CHANNEL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h index f556fe397..d45cc744a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h @@ -11,6 +11,7 @@ #ifndef PC_RTP_MEDIA_UTILS_H_ #define PC_RTP_MEDIA_UTILS_H_ +#include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" namespace webrtc { @@ -49,13 +50,13 @@ RtpTransceiverDirection RtpTransceiverDirectionIntersection( RtpTransceiverDirection lhs, RtpTransceiverDirection rhs); -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& os, // no-presubmit-check TODO(webrtc:8982) RtpTransceiverDirection direction) { return os << RtpTransceiverDirectionToString(direction); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc index 68a948ea8..8d3064ed9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc @@ -10,10 +10,10 @@ #include "pc/rtp_parameters_conversion.h" +#include #include #include #include -#include #include #include "api/array_view.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc index f65afd7dc..88f32d88e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc @@ -16,12 +16,8 @@ #include #include "api/media_stream_proxy.h" -#include "api/media_stream_track_proxy.h" #include "pc/media_stream.h" -#include "rtc_base/checks.h" #include "rtc_base/location.h" -#include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" namespace webrtc { @@ -43,20 +39,4 @@ RtpReceiverInternal::CreateStreamsFromIds(std::vector stream_ids) { return streams; } -// Attempt to attach the frame decryptor to the current media channel on the -// correct worker thread only if both the media channel exists and a ssrc has -// been allocated to the stream. -void RtpReceiverInternal::MaybeAttachFrameDecryptorToMediaChannel( - const absl::optional& ssrc, - rtc::Thread* worker_thread, - rtc::scoped_refptr frame_decryptor, - cricket::MediaChannel* media_channel, - bool stopped) { - if (media_channel && frame_decryptor && ssrc.has_value() && !stopped) { - worker_thread->Invoke(RTC_FROM_HERE, [&] { - media_channel->SetFrameDecryptor(*ssrc, frame_decryptor); - }); - } -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h index 2cfccd4e6..73fc5b985 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h @@ -22,6 +22,7 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/rtp_parameters.h" @@ -91,13 +92,6 @@ class RtpReceiverInternal : public RtpReceiverInterface { static std::vector> CreateStreamsFromIds(std::vector stream_ids); - - static void MaybeAttachFrameDecryptorToMediaChannel( - const absl::optional& ssrc, - rtc::Thread* worker_thread, - rtc::scoped_refptr frame_decryptor, - cricket::MediaChannel* media_channel, - bool stopped); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc index 0da6dfca8..aa268cef4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc @@ -10,18 +10,22 @@ #include "pc/rtp_sender.h" +#include #include #include #include +#include "absl/algorithm/container.h" #include "api/audio_options.h" #include "api/media_stream_interface.h" +#include "api/priority.h" #include "media/base/media_engine.h" #include "pc/stats_collector_interface.h" #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -405,6 +409,7 @@ void LocalAudioSinkAdapter::OnData( if (sink_) { sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, number_of_frames, absolute_capture_timestamp_ms); + num_preferred_channels_ = sink_->NumPreferredChannels(); } } @@ -419,9 +424,8 @@ rtc::scoped_refptr AudioRtpSender::Create( const std::string& id, StatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) { - return rtc::scoped_refptr( - new rtc::RefCountedObject(worker_thread, id, stats, - set_streams_observer)); + return rtc::make_ref_counted(worker_thread, id, stats, + set_streams_observer); } AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread, @@ -566,9 +570,8 @@ rtc::scoped_refptr VideoRtpSender::Create( rtc::Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer) { - return rtc::scoped_refptr( - new rtc::RefCountedObject(worker_thread, id, - set_streams_observer)); + return rtc::make_ref_counted(worker_thread, id, + set_streams_observer); } VideoRtpSender::VideoRtpSender(rtc::Thread* worker_thread, diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h index c2fe91f01..0b4c20490 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h @@ -15,16 +15,30 @@ #ifndef PC_RTP_SENDER_H_ #define PC_RTP_SENDER_H_ +#include +#include #include #include #include +#include "absl/types/optional.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/dtmf_sender_interface.h" +#include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" #include "media/base/audio_source.h" #include "media/base/media_channel.h" #include "pc/dtmf_sender.h" +#include "pc/stats_collector_interface.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" namespace webrtc { @@ -237,12 +251,16 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface, /*absolute_capture_timestamp_ms=*/absl::nullopt); } + // AudioSinkInterface implementation. + int NumPreferredChannels() const override { return num_preferred_channels_; } + // cricket::AudioSource implementation. void SetSink(cricket::AudioSource::Sink* sink) override; cricket::AudioSource::Sink* sink_; // Critical section protecting |sink_|. Mutex lock_; + int num_preferred_channels_ = -1; }; class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc index 6b3032e27..a78b9d6be 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc @@ -10,16 +10,23 @@ #include "pc/rtp_transceiver.h" +#include #include #include +#include #include "absl/algorithm/container.h" #include "api/rtp_parameters.h" +#include "api/sequence_checker.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" #include "pc/channel_manager.h" #include "pc/rtp_media_utils.h" -#include "pc/rtp_parameters_conversion.h" +#include "pc/session_description.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread.h" namespace webrtc { namespace { @@ -106,12 +113,16 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() { } // namespace -RtpTransceiver::RtpTransceiver(cricket::MediaType media_type) +RtpTransceiver::RtpTransceiver( + cricket::MediaType media_type, + cricket::ChannelManager* channel_manager /* = nullptr*/) : thread_(GetCurrentTaskQueueOrThread()), unified_plan_(false), - media_type_(media_type) { + media_type_(media_type), + channel_manager_(channel_manager) { RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(channel_manager_); } RtpTransceiver::RtpTransceiver( @@ -130,52 +141,86 @@ RtpTransceiver::RtpTransceiver( RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || media_type_ == cricket::MEDIA_TYPE_VIDEO); RTC_DCHECK_EQ(sender->media_type(), receiver->media_type()); + RTC_DCHECK(channel_manager_); senders_.push_back(sender); receivers_.push_back(receiver); } RtpTransceiver::~RtpTransceiver() { - StopInternal(); + // TODO(tommi): On Android, when running PeerConnectionClientTest (e.g. + // PeerConnectionClientTest#testCameraSwitch), the instance doesn't get + // deleted on `thread_`. See if we can fix that. + if (!stopped_) { + RTC_DCHECK_RUN_ON(thread_); + StopInternal(); + } } void RtpTransceiver::SetChannel(cricket::ChannelInterface* channel) { + RTC_DCHECK_RUN_ON(thread_); // Cannot set a non-null channel on a stopped transceiver. if (stopped_ && channel) { return; } + RTC_DCHECK(channel || channel_); + + RTC_LOG_THREAD_BLOCK_COUNT(); + + if (channel_) { + signaling_thread_safety_->SetNotAlive(); + signaling_thread_safety_ = nullptr; + } + if (channel) { RTC_DCHECK_EQ(media_type(), channel->media_type()); + signaling_thread_safety_ = PendingTaskSafetyFlag::Create(); } - if (channel_) { - channel_->SignalFirstPacketReceived().disconnect(this); - } + // An alternative to this, could be to require SetChannel to be called + // on the network thread. The channel object operates for the most part + // on the network thread, as part of its initialization being on the network + // thread is required, so setting a channel object as part of the construction + // (without thread hopping) might be the more efficient thing to do than + // how SetChannel works today. + // Similarly, if the channel() accessor is limited to the network thread, that + // helps with keeping the channel implementation requirements being met and + // avoids synchronization for accessing the pointer or network related state. + channel_manager_->network_thread()->Invoke(RTC_FROM_HERE, [&]() { + if (channel_) { + channel_->SetFirstPacketReceivedCallback(nullptr); + } - channel_ = channel; + channel_ = channel; - if (channel_) { - channel_->SignalFirstPacketReceived().connect( - this, &RtpTransceiver::OnFirstPacketReceived); - } + if (channel_) { + channel_->SetFirstPacketReceivedCallback( + [thread = thread_, flag = signaling_thread_safety_, this]() mutable { + thread->PostTask(ToQueuedTask( + std::move(flag), [this]() { OnFirstPacketReceived(); })); + }); + } + }); for (const auto& sender : senders_) { sender->internal()->SetMediaChannel(channel_ ? channel_->media_channel() : nullptr); } + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + for (const auto& receiver : receivers_) { if (!channel_) { receiver->internal()->Stop(); + } else { + receiver->internal()->SetMediaChannel(channel_->media_channel()); } - - receiver->internal()->SetMediaChannel(channel_ ? channel_->media_channel() - : nullptr); } } void RtpTransceiver::AddSender( rtc::scoped_refptr> sender) { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(!unified_plan_); RTC_DCHECK(sender); @@ -201,6 +246,7 @@ bool RtpTransceiver::RemoveSender(RtpSenderInterface* sender) { void RtpTransceiver::AddReceiver( rtc::scoped_refptr> receiver) { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(!unified_plan_); RTC_DCHECK(receiver); @@ -218,12 +264,8 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { if (it == receivers_.end()) { return false; } + // `Stop()` will clear the internally cached pointer to the media channel. (*it)->internal()->Stop(); - // After the receiver has been removed, there's no guarantee that the - // contained media channel isn't deleted shortly after this. To make sure that - // the receiver doesn't spontaneously try to use it's (potentially stale) - // media channel reference, we clear it out. - (*it)->internal()->SetMediaChannel(nullptr); receivers_.erase(it); return true; } @@ -249,7 +291,7 @@ absl::optional RtpTransceiver::mid() const { return mid_; } -void RtpTransceiver::OnFirstPacketReceived(cricket::ChannelInterface*) { +void RtpTransceiver::OnFirstPacketReceived() { for (const auto& receiver : receivers_) { receiver->internal()->NotifyFirstPacketReceived(); } @@ -286,6 +328,7 @@ void RtpTransceiver::set_fired_direction(RtpTransceiverDirection direction) { } bool RtpTransceiver::stopped() const { + RTC_DCHECK_RUN_ON(thread_); return stopped_; } @@ -386,6 +429,7 @@ RTCError RtpTransceiver::StopStandard() { } void RtpTransceiver::StopInternal() { + RTC_DCHECK_RUN_ON(thread_); StopTransceiverProcedure(); } @@ -455,6 +499,16 @@ RtpTransceiver::HeaderExtensionsToOffer() const { return header_extensions_to_offer_; } +std::vector +RtpTransceiver::HeaderExtensionsNegotiated() const { + RTC_DCHECK_RUN_ON(thread_); + std::vector result; + for (const auto& ext : negotiated_header_extensions_) { + result.emplace_back(ext.uri, ext.id, RtpTransceiverDirection::kSendRecv); + } + return result; +} + RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions( rtc::ArrayView header_extensions_to_offer) { @@ -472,7 +526,7 @@ RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions( header_extensions_to_offer_.begin(), header_extensions_to_offer_.end(), [&entry](const auto& offered) { return entry.uri == offered.uri; }); if (it == header_extensions_to_offer_.end()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, + return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, "Attempted to modify an unoffered extension."); } @@ -499,6 +553,15 @@ RTCError RtpTransceiver::SetOfferedRtpHeaderExtensions( return RTCError::OK(); } +void RtpTransceiver::OnNegotiationUpdate( + SdpType sdp_type, + const cricket::MediaContentDescription* content) { + RTC_DCHECK_RUN_ON(thread_); + RTC_DCHECK(content); + if (sdp_type == SdpType::kAnswer) + negotiated_header_extensions_ = content->rtp_header_extensions(); +} + void RtpTransceiver::SetPeerConnectionClosed() { is_pc_closed_ = true; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h index 4d9716c89..35dea25a7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h @@ -11,14 +11,33 @@ #ifndef PC_RTP_TRANSCEIVER_H_ #define PC_RTP_TRANSCEIVER_H_ +#include + +#include +#include #include #include +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/media_types.h" +#include "api/proxy.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "pc/channel_interface.h" #include "pc/channel_manager.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -60,7 +79,8 @@ class RtpTransceiver final // channel set. // |media_type| specifies the type of RtpTransceiver (and, by transitivity, // the type of senders, receivers, and channel). Can either by audio or video. - explicit RtpTransceiver(cricket::MediaType media_type); + RtpTransceiver(cricket::MediaType media_type, + cricket::ChannelManager* channel_manager); // Construct a Unified Plan-style RtpTransceiver with the given sender and // receiver. The media type will be derived from the media types of the sender // and receiver. The sender and receiver should have the same media type. @@ -207,25 +227,38 @@ class RtpTransceiver final } std::vector HeaderExtensionsToOffer() const override; + std::vector HeaderExtensionsNegotiated() + const override; RTCError SetOfferedRtpHeaderExtensions( rtc::ArrayView header_extensions_to_offer) override; + // Called on the signaling thread when the local or remote content description + // is updated. Used to update the negotiated header extensions. + // TODO(tommi): The implementation of this method is currently very simple and + // only used for updating the negotiated headers. However, we're planning to + // move all the updates done on the channel from the transceiver into this + // method. This will happen with the ownership of the channel object being + // moved into the transceiver. + void OnNegotiationUpdate(SdpType sdp_type, + const cricket::MediaContentDescription* content); + private: - void OnFirstPacketReceived(cricket::ChannelInterface* channel); + void OnFirstPacketReceived(); void StopSendingAndReceiving(); // Enforce that this object is created, used and destroyed on one thread. - const TaskQueueBase* thread_; + TaskQueueBase* const thread_; const bool unified_plan_; const cricket::MediaType media_type_; + rtc::scoped_refptr signaling_thread_safety_; std::vector>> senders_; std::vector< rtc::scoped_refptr>> receivers_; - bool stopped_ = false; + bool stopped_ RTC_GUARDED_BY(thread_) = false; bool stopping_ RTC_GUARDED_BY(thread_) = false; bool is_pc_closed_ = false; RtpTransceiverDirection direction_ = RtpTransceiverDirection::kInactive; @@ -241,11 +274,19 @@ class RtpTransceiver final cricket::ChannelManager* channel_manager_ = nullptr; std::vector codec_preferences_; std::vector header_extensions_to_offer_; + + // |negotiated_header_extensions_| is read and written to on the signaling + // thread from the SdpOfferAnswerHandler class (e.g. + // PushdownMediaDescription(). + cricket::RtpHeaderExtensions negotiated_header_extensions_ + RTC_GUARDED_BY(thread_); + const std::function on_negotiation_needed_; }; -BEGIN_SIGNALING_PROXY_MAP(RtpTransceiver) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PRIMARY_PROXY_MAP(RtpTransceiver) + +PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) PROXY_CONSTMETHOD0(absl::optional, mid) PROXY_CONSTMETHOD0(rtc::scoped_refptr, sender) @@ -264,6 +305,8 @@ PROXY_METHOD1(webrtc::RTCError, PROXY_CONSTMETHOD0(std::vector, codec_preferences) PROXY_CONSTMETHOD0(std::vector, HeaderExtensionsToOffer) +PROXY_CONSTMETHOD0(std::vector, + HeaderExtensionsNegotiated) PROXY_METHOD1(webrtc::RTCError, SetOfferedRtpHeaderExtensions, rtc::ArrayView) diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc index e796f9b1b..9040a6969 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc @@ -11,6 +11,7 @@ #include "pc/rtp_transmission_manager.h" #include +#include #include "absl/types/optional.h" #include "api/peer_connection_interface.h" @@ -240,14 +241,17 @@ RtpTransmissionManager::CreateReceiver(cricket::MediaType media_type, receiver; if (media_type == cricket::MEDIA_TYPE_AUDIO) { receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), new AudioRtpReceiver(worker_thread(), receiver_id, - std::vector({}))); + signaling_thread(), worker_thread(), + rtc::make_ref_counted(worker_thread(), receiver_id, + std::vector({}), + IsUnifiedPlan())); NoteUsageEvent(UsageEvent::AUDIO_ADDED); } else { RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), new VideoRtpReceiver(worker_thread(), receiver_id, - std::vector({}))); + signaling_thread(), worker_thread(), + rtc::make_ref_counted(worker_thread(), receiver_id, + std::vector({}))); NoteUsageEvent(UsageEvent::VIDEO_ADDED); } return receiver; @@ -452,8 +456,8 @@ void RtpTransmissionManager::CreateAudioReceiver( streams.push_back(rtc::scoped_refptr(stream)); // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use // the constructor taking stream IDs instead. - auto* audio_receiver = new AudioRtpReceiver( - worker_thread(), remote_sender_info.sender_id, streams); + auto audio_receiver = rtc::make_ref_counted( + worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan()); audio_receiver->SetMediaChannel(voice_media_channel()); if (remote_sender_info.sender_id == kDefaultAudioSenderId) { audio_receiver->SetupUnsignaledMediaChannel(); @@ -461,7 +465,7 @@ void RtpTransmissionManager::CreateAudioReceiver( audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); } auto receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), audio_receiver); + signaling_thread(), worker_thread(), std::move(audio_receiver)); GetAudioTransceiver()->internal()->AddReceiver(receiver); Observer()->OnAddTrack(receiver, streams); NoteUsageEvent(UsageEvent::AUDIO_ADDED); @@ -475,7 +479,7 @@ void RtpTransmissionManager::CreateVideoReceiver( streams.push_back(rtc::scoped_refptr(stream)); // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use // the constructor taking stream IDs instead. - auto* video_receiver = new VideoRtpReceiver( + auto video_receiver = rtc::make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams); video_receiver->SetMediaChannel(video_media_channel()); if (remote_sender_info.sender_id == kDefaultVideoSenderId) { @@ -484,7 +488,7 @@ void RtpTransmissionManager::CreateVideoReceiver( video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); } auto receiver = RtpReceiverProxyWithInternal::Create( - signaling_thread(), video_receiver); + signaling_thread(), worker_thread(), std::move(video_receiver)); GetVideoTransceiver()->internal()->AddReceiver(receiver); Observer()->OnAddTrack(receiver, streams); NoteUsageEvent(UsageEvent::VIDEO_ADDED); diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h index 731c3b74d..fe0e3abdd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h @@ -12,6 +12,7 @@ #define PC_RTP_TRANSMISSION_MANAGER_H_ #include + #include #include #include @@ -24,6 +25,7 @@ #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "media/base/media_channel.h" #include "pc/channel_manager.h" #include "pc/rtp_receiver.h" @@ -32,10 +34,10 @@ #include "pc/stats_collector_interface.h" #include "pc/transceiver_list.h" #include "pc/usage_pattern.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" +#include "rtc_base/weak_ptr.h" namespace rtc { class Thread; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc index 1f577e67e..6b7906345 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc @@ -11,12 +11,11 @@ #include "pc/rtp_transport.h" #include - #include #include -#include "api/rtp_headers.h" -#include "api/rtp_parameters.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" @@ -182,30 +181,25 @@ bool RtpTransport::UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) { void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - webrtc::RtpPacketReceived parsed_packet(&header_extension_map_); + webrtc::RtpPacketReceived parsed_packet( + &header_extension_map_, packet_time_us == -1 + ? Timestamp::MinusInfinity() + : Timestamp::Micros(packet_time_us)); if (!parsed_packet.Parse(packet)) { RTC_LOG(LS_ERROR) << "Failed to parse the incoming RTP packet before demuxing. Drop it."; return; } - if (packet_time_us != -1) { - parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000); - } if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) { SignalRtpPacketReceived.emit(&packet, packet_time_us, true); RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: " << RtpDemuxer::DescribePacket(parsed_packet); - uint32_t ssrc = parsed_packet.Ssrc(); - OnErrorDemuxingPacket(ssrc); } else { SignalRtpPacketReceived.emit(&packet, packet_time_us, false); } } -void RtpTransport::OnErrorDemuxingPacket(uint32_t ssrc) { -} - bool RtpTransport::IsTransportWritable() { auto rtcp_packet_transport = rtcp_mux_enabled_ ? nullptr : rtcp_packet_transport_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h index 3191e852b..893d91e73 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h @@ -11,11 +11,22 @@ #ifndef PC_RTP_TRANSPORT_H_ #define PC_RTP_TRANSPORT_H_ +#include +#include + #include +#include "absl/types/optional.h" #include "call/rtp_demuxer.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "p2p/base/packet_transport_internal.h" #include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -76,8 +87,6 @@ class RtpTransport : public RtpTransportInternal { bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override; - virtual void OnErrorDemuxingPacket(uint32_t ssrc); - protected: // These methods will be used in the subclasses. void DemuxPacket(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us); diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h index 64c66510a..0febda2e8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport_internal.h @@ -57,12 +57,12 @@ class RtpTransportInternal : public sigslot::has_slots<> { // the RtpDemuxer callback. sigslot::signal2 SignalRtcpPacketReceived; - sigslot::signal3 SignalRtpPacketReceived; - // Called whenever the network route of the P2P layer transport changes. // The argument is an optional network route. sigslot::signal1> SignalNetworkRouteChanged; + sigslot::signal3 SignalRtpPacketReceived; + // Called whenever a transport's writable state might change. The argument is // true if the transport is writable, otherwise it is false. sigslot::signal1 SignalWritableState; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc index c4357a8da..682d76829 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.cc @@ -10,6 +10,7 @@ #include "pc/sctp_data_channel.h" +#include #include #include #include @@ -38,8 +39,8 @@ int GenerateUniqueId() { } // Define proxy for DataChannelInterface. -BEGIN_SIGNALING_PROXY_MAP(DataChannel) -PROXY_SIGNALING_THREAD_DESTRUCTOR() +BEGIN_PRIMARY_PROXY_MAP(DataChannel) +PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*) PROXY_METHOD0(void, UnregisterObserver) BYPASS_PROXY_CONSTMETHOD0(std::string, label) @@ -78,17 +79,27 @@ InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base) // Specified in createDataChannel, WebRTC spec section 6.1 bullet 13. id = -1; } - // Backwards compatibility: If base.maxRetransmits or base.maxRetransmitTime - // have been set to -1, unset them. - if (maxRetransmits && *maxRetransmits == -1) { - RTC_LOG(LS_ERROR) - << "Accepting maxRetransmits = -1 for backwards compatibility"; - maxRetransmits = absl::nullopt; + // Backwards compatibility: If maxRetransmits or maxRetransmitTime + // are negative, the feature is not enabled. + // Values are clamped to a 16bit range. + if (maxRetransmits) { + if (*maxRetransmits < 0) { + RTC_LOG(LS_ERROR) + << "Accepting maxRetransmits < 0 for backwards compatibility"; + maxRetransmits = absl::nullopt; + } else if (*maxRetransmits > std::numeric_limits::max()) { + maxRetransmits = std::numeric_limits::max(); + } } - if (maxRetransmitTime && *maxRetransmitTime == -1) { - RTC_LOG(LS_ERROR) - << "Accepting maxRetransmitTime = -1 for backwards compatibility"; - maxRetransmitTime = absl::nullopt; + + if (maxRetransmitTime) { + if (*maxRetransmitTime < 0) { + RTC_LOG(LS_ERROR) + << "Accepting maxRetransmitTime < 0 for backwards compatibility"; + maxRetransmitTime = absl::nullopt; + } else if (*maxRetransmitTime > std::numeric_limits::max()) { + maxRetransmitTime = std::numeric_limits::max(); + } } } @@ -135,9 +146,8 @@ rtc::scoped_refptr SctpDataChannel::Create( const InternalDataChannelInit& config, rtc::Thread* signaling_thread, rtc::Thread* network_thread) { - rtc::scoped_refptr channel( - new rtc::RefCountedObject( - config, provider, label, signaling_thread, network_thread)); + auto channel = rtc::make_ref_counted( + config, provider, label, signaling_thread, network_thread); if (!channel->Init()) { return nullptr; } @@ -294,13 +304,6 @@ bool SctpDataChannel::Send(const DataBuffer& buffer) { return false; } - // TODO(jiayl): the spec is unclear about if the remote side should get the - // onmessage event. We need to figure out the expected behavior and change the - // code accordingly. - if (buffer.size() == 0) { - return true; - } - buffered_amount_ += buffer.size(); // If the queue is non-empty, we're waiting for SignalReadyToSend, @@ -403,7 +406,7 @@ void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, return; } - if (params.type == cricket::DMT_CONTROL) { + if (params.type == DataMessageType::kControl) { if (handshake_state_ != kHandshakeWaitingForAck) { // Ignore it if we are not expecting an ACK message. RTC_LOG(LS_WARNING) @@ -424,8 +427,8 @@ void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, return; } - RTC_DCHECK(params.type == cricket::DMT_BINARY || - params.type == cricket::DMT_TEXT); + RTC_DCHECK(params.type == DataMessageType::kBinary || + params.type == DataMessageType::kText); RTC_LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " << params.sid; @@ -436,7 +439,7 @@ void SctpDataChannel::OnDataReceived(const cricket::ReceiveDataParams& params, handshake_state_ = kHandshakeReady; } - bool binary = (params.type == cricket::DMT_BINARY); + bool binary = (params.type == webrtc::DataMessageType::kBinary); auto buffer = std::make_unique(payload, binary); if (state_ == kOpen && observer_) { ++messages_received_; @@ -617,7 +620,7 @@ void SctpDataChannel::SendQueuedDataMessages() { bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked) { RTC_DCHECK_RUN_ON(signaling_thread_); - cricket::SendDataParams send_params; + SendDataParams send_params; send_params.ordered = config_.ordered; // Send as ordered if it is still going through OPEN/ACK signaling. @@ -628,15 +631,14 @@ bool SctpDataChannel::SendDataMessage(const DataBuffer& buffer, "because the OPEN_ACK message has not been received."; } - send_params.max_rtx_count = - config_.maxRetransmits ? *config_.maxRetransmits : -1; - send_params.max_rtx_ms = - config_.maxRetransmitTime ? *config_.maxRetransmitTime : -1; - send_params.sid = config_.id; - send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT; + send_params.max_rtx_count = config_.maxRetransmits; + send_params.max_rtx_ms = config_.maxRetransmitTime; + send_params.type = + buffer.binary ? DataMessageType::kBinary : DataMessageType::kText; cricket::SendDataResult send_result = cricket::SDR_SUCCESS; - bool success = provider_->SendData(send_params, buffer.data, &send_result); + bool success = + provider_->SendData(config_.id, send_params, buffer.data, &send_result); if (success) { ++messages_sent_; @@ -702,16 +704,16 @@ bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen; RTC_DCHECK(!is_open_message || !config_.negotiated); - cricket::SendDataParams send_params; - send_params.sid = config_.id; + SendDataParams send_params; // Send data as ordered before we receive any message from the remote peer to // make sure the remote peer will not receive any data before it receives the // OPEN message. send_params.ordered = config_.ordered || is_open_message; - send_params.type = cricket::DMT_CONTROL; + send_params.type = DataMessageType::kControl; cricket::SendDataResult send_result = cricket::SDR_SUCCESS; - bool retval = provider_->SendData(send_params, buffer, &send_result); + bool retval = + provider_->SendData(config_.id, send_params, buffer, &send_result); if (retval) { RTC_LOG(LS_VERBOSE) << "Sent CONTROL message on channel " << config_.id; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h index 6d121e6f8..1d7a3c73f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel.h @@ -11,18 +11,25 @@ #ifndef PC_SCTP_DATA_CHANNEL_H_ #define PC_SCTP_DATA_CHANNEL_H_ +#include + #include #include #include +#include "absl/types/optional.h" #include "api/data_channel_interface.h" #include "api/priority.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/transport/data_channel_transport_interface.h" #include "media/base/media_channel.h" #include "pc/data_channel_utils.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/ssl_stream_adapter.h" // For SSLRole #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -33,7 +40,8 @@ class SctpDataChannel; class SctpDataChannelProviderInterface { public: // Sends the data to the transport. - virtual bool SendData(const cricket::SendDataParams& params, + virtual bool SendData(int sid, + const SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) = 0; // Connects to the transport signals. diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc index 497e11fcc..bb81156a2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc @@ -9,6 +9,8 @@ */ #include "pc/sctp_data_channel_transport.h" + +#include "absl/types/optional.h" #include "pc/sctp_utils.h" namespace webrtc { @@ -37,18 +39,8 @@ RTCError SctpDataChannelTransport::SendData( int channel_id, const SendDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { - // Map webrtc::SendDataParams to cricket::SendDataParams. - // TODO(mellem): See about unifying these structs. - cricket::SendDataParams sd_params; - sd_params.sid = channel_id; - sd_params.type = ToCricketDataMessageType(params.type); - sd_params.ordered = params.ordered; - sd_params.reliable = !(params.max_rtx_count || params.max_rtx_ms); - sd_params.max_rtx_count = params.max_rtx_count.value_or(-1); - sd_params.max_rtx_ms = params.max_rtx_ms.value_or(-1); - cricket::SendDataResult result; - sctp_transport_->SendData(sd_params, buffer, &result); + sctp_transport_->SendData(channel_id, params, buffer, &result); // TODO(mellem): See about changing the interfaces to not require mapping // SendDataResult to RTCError and back again. @@ -93,8 +85,7 @@ void SctpDataChannelTransport::OnDataReceived( const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) { if (sink_) { - sink_->OnDataReceived(params.sid, ToWebrtcDataMessageType(params.type), - buffer); + sink_->OnDataReceived(params.sid, params.type, buffer); } } diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h index 623a49005..30818abc4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.h @@ -11,8 +11,11 @@ #ifndef PC_SCTP_DATA_CHANNEL_TRANSPORT_H_ #define PC_SCTP_DATA_CHANNEL_TRANSPORT_H_ +#include "api/rtc_error.h" #include "api/transport/data_channel_transport_interface.h" +#include "media/base/media_channel.h" #include "media/sctp/sctp_transport_internal.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc index ea1165f94..14a09d77e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.cc @@ -13,6 +13,12 @@ #include #include +#include "absl/types/optional.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" + namespace webrtc { SctpTransport::SctpTransport( @@ -39,7 +45,15 @@ SctpTransport::~SctpTransport() { } SctpTransportInformation SctpTransport::Information() const { - MutexLock lock(&lock_); + // TODO(tommi): Update PeerConnection::GetSctpTransport to hand out a proxy + // to the transport so that we can be sure that methods get called on the + // expected thread. Chromium currently calls this method from + // TransceiverStateSurfacer. + if (!owner_thread_->IsCurrent()) { + return owner_thread_->Invoke( + RTC_FROM_HERE, [this] { return Information(); }); + } + RTC_DCHECK_RUN_ON(owner_thread_); return info_; } @@ -65,102 +79,82 @@ rtc::scoped_refptr SctpTransport::dtls_transport() void SctpTransport::Clear() { RTC_DCHECK_RUN_ON(owner_thread_); RTC_DCHECK(internal()); - { - MutexLock lock(&lock_); - // Note that we delete internal_sctp_transport_, but - // only drop the reference to dtls_transport_. - dtls_transport_ = nullptr; - internal_sctp_transport_ = nullptr; - } + // Note that we delete internal_sctp_transport_, but + // only drop the reference to dtls_transport_. + dtls_transport_ = nullptr; + internal_sctp_transport_ = nullptr; UpdateInformation(SctpTransportState::kClosed); } void SctpTransport::SetDtlsTransport( rtc::scoped_refptr transport) { RTC_DCHECK_RUN_ON(owner_thread_); - SctpTransportState next_state; - { - MutexLock lock(&lock_); - next_state = info_.state(); - dtls_transport_ = transport; - if (internal_sctp_transport_) { - if (transport) { - internal_sctp_transport_->SetDtlsTransport(transport->internal()); - transport->internal()->SignalDtlsState.connect( - this, &SctpTransport::OnDtlsStateChange); - if (info_.state() == SctpTransportState::kNew) { - next_state = SctpTransportState::kConnecting; - } - } else { - internal_sctp_transport_->SetDtlsTransport(nullptr); + SctpTransportState next_state = info_.state(); + dtls_transport_ = transport; + if (internal_sctp_transport_) { + if (transport) { + internal_sctp_transport_->SetDtlsTransport(transport->internal()); + + transport->internal()->SubscribeDtlsState( + [this](cricket::DtlsTransportInternal* transport, + cricket::DtlsTransportState state) { + OnDtlsStateChange(transport, state); + }); + if (info_.state() == SctpTransportState::kNew) { + next_state = SctpTransportState::kConnecting; } + } else { + internal_sctp_transport_->SetDtlsTransport(nullptr); } } + UpdateInformation(next_state); } void SctpTransport::Start(int local_port, int remote_port, int max_message_size) { - { - MutexLock lock(&lock_); - // Record max message size on calling thread. - info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(), - max_message_size, info_.MaxChannels()); - } - if (owner_thread_->IsCurrent()) { - if (!internal()->Start(local_port, remote_port, max_message_size)) { - RTC_LOG(LS_ERROR) << "Failed to push down SCTP parameters, closing."; - UpdateInformation(SctpTransportState::kClosed); - } - } else { - owner_thread_->Invoke( - RTC_FROM_HERE, rtc::Bind(&SctpTransport::Start, this, local_port, - remote_port, max_message_size)); + RTC_DCHECK_RUN_ON(owner_thread_); + info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(), + max_message_size, info_.MaxChannels()); + + if (!internal()->Start(local_port, remote_port, max_message_size)) { + RTC_LOG(LS_ERROR) << "Failed to push down SCTP parameters, closing."; + UpdateInformation(SctpTransportState::kClosed); } } void SctpTransport::UpdateInformation(SctpTransportState state) { RTC_DCHECK_RUN_ON(owner_thread_); - bool must_send_update; - SctpTransportInformation info_copy(SctpTransportState::kNew); - { - MutexLock lock(&lock_); - must_send_update = (state != info_.state()); - // TODO(https://bugs.webrtc.org/10358): Update max channels from internal - // SCTP transport when available. - if (internal_sctp_transport_) { - info_ = SctpTransportInformation( - state, dtls_transport_, info_.MaxMessageSize(), info_.MaxChannels()); - } else { - info_ = SctpTransportInformation( - state, dtls_transport_, info_.MaxMessageSize(), info_.MaxChannels()); - } - if (observer_ && must_send_update) { - info_copy = info_; - } + bool must_send_update = (state != info_.state()); + // TODO(https://bugs.webrtc.org/10358): Update max channels from internal + // SCTP transport when available. + if (internal_sctp_transport_) { + info_ = SctpTransportInformation( + state, dtls_transport_, info_.MaxMessageSize(), info_.MaxChannels()); + } else { + info_ = SctpTransportInformation( + state, dtls_transport_, info_.MaxMessageSize(), info_.MaxChannels()); } - // We call the observer without holding the lock. + if (observer_ && must_send_update) { - observer_->OnStateChange(info_copy); + observer_->OnStateChange(info_); } } void SctpTransport::OnAssociationChangeCommunicationUp() { RTC_DCHECK_RUN_ON(owner_thread_); - { - MutexLock lock(&lock_); - RTC_DCHECK(internal_sctp_transport_); - if (internal_sctp_transport_->max_outbound_streams() && - internal_sctp_transport_->max_inbound_streams()) { - int max_channels = - std::min(*(internal_sctp_transport_->max_outbound_streams()), - *(internal_sctp_transport_->max_inbound_streams())); - // Record max channels. - info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(), - info_.MaxMessageSize(), max_channels); - } + RTC_DCHECK(internal_sctp_transport_); + if (internal_sctp_transport_->max_outbound_streams() && + internal_sctp_transport_->max_inbound_streams()) { + int max_channels = + std::min(*(internal_sctp_transport_->max_outbound_streams()), + *(internal_sctp_transport_->max_inbound_streams())); + // Record max channels. + info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(), + info_.MaxMessageSize(), max_channels); } + UpdateInformation(SctpTransportState::kConnected); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h index a902ff02e..a8bc45b77 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h @@ -13,11 +13,15 @@ #include +#include "api/dtls_transport_interface.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" -#include "media/sctp/sctp_transport.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/dtls_transport_internal.h" #include "pc/dtls_transport.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -48,12 +52,12 @@ class SctpTransport : public SctpTransportInterface, // internal() to be functions on the webrtc::SctpTransport interface, // and make the internal() function private. cricket::SctpTransportInternal* internal() { - MutexLock lock(&lock_); + RTC_DCHECK_RUN_ON(owner_thread_); return internal_sctp_transport_.get(); } const cricket::SctpTransportInternal* internal() const { - MutexLock lock(&lock_); + RTC_DCHECK_RUN_ON(owner_thread_); return internal_sctp_transport_.get(); } @@ -69,15 +73,12 @@ class SctpTransport : public SctpTransportInterface, void OnDtlsStateChange(cricket::DtlsTransportInternal* transport, cricket::DtlsTransportState state); - // Note - owner_thread never changes, but can't be const if we do - // Invoke() on it. - rtc::Thread* owner_thread_; - mutable Mutex lock_; - // Variables accessible off-thread, guarded by lock_ - SctpTransportInformation info_ RTC_GUARDED_BY(lock_); + // NOTE: |owner_thread_| is the thread that the SctpTransport object is + // constructed on. In the context of PeerConnection, it's the network thread. + rtc::Thread* const owner_thread_; + SctpTransportInformation info_ RTC_GUARDED_BY(owner_thread_); std::unique_ptr internal_sctp_transport_ - RTC_GUARDED_BY(lock_); - // Variables only accessed on-thread + RTC_GUARDED_BY(owner_thread_); SctpTransportObserverInterface* observer_ RTC_GUARDED_BY(owner_thread_) = nullptr; rtc::scoped_refptr dtls_transport_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc index 1882a1525..f7458405e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc @@ -13,8 +13,10 @@ #include #include +#include "absl/types/optional.h" #include "api/priority.h" #include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" @@ -228,33 +230,4 @@ void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload) { payload->SetData(&data, sizeof(data)); } -cricket::DataMessageType ToCricketDataMessageType(DataMessageType type) { - switch (type) { - case DataMessageType::kText: - return cricket::DMT_TEXT; - case DataMessageType::kBinary: - return cricket::DMT_BINARY; - case DataMessageType::kControl: - return cricket::DMT_CONTROL; - default: - return cricket::DMT_NONE; - } - return cricket::DMT_NONE; -} - -DataMessageType ToWebrtcDataMessageType(cricket::DataMessageType type) { - switch (type) { - case cricket::DMT_TEXT: - return DataMessageType::kText; - case cricket::DMT_BINARY: - return DataMessageType::kBinary; - case cricket::DMT_CONTROL: - return DataMessageType::kControl; - case cricket::DMT_NONE: - default: - RTC_NOTREACHED(); - } - return DataMessageType::kControl; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.h index 339ef2116..da854458f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.h @@ -16,6 +16,7 @@ #include "api/data_channel_interface.h" #include "api/transport/data_channel_transport_interface.h" #include "media/base/media_channel.h" +#include "rtc_base/copy_on_write_buffer.h" namespace rtc { class CopyOnWriteBuffer; @@ -39,10 +40,6 @@ bool WriteDataChannelOpenMessage(const std::string& label, void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload); -cricket::DataMessageType ToCricketDataMessageType(DataMessageType type); - -DataMessageType ToWebrtcDataMessageType(cricket::DataMessageType type); - } // namespace webrtc #endif // PC_SCTP_UTILS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc index fd697ce8b..c04eda0ef 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc @@ -22,45 +22,39 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/crypto/crypto_options.h" -#include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" #include "api/media_stream_proxy.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" -#include "api/uma_metrics.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "media/base/codec.h" #include "media/base/media_engine.h" #include "media/base/rid_description.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/port.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" #include "p2p/base/transport_info.h" -#include "pc/connection_context.h" #include "pc/data_channel_utils.h" -#include "pc/media_protocol_names.h" +#include "pc/dtls_transport.h" #include "pc/media_stream.h" #include "pc/peer_connection.h" #include "pc/peer_connection_message_handler.h" -#include "pc/rtp_data_channel.h" #include "pc/rtp_media_utils.h" #include "pc/rtp_sender.h" #include "pc/rtp_transport_internal.h" -#include "pc/sctp_transport.h" #include "pc/simulcast_description.h" #include "pc/stats_collector.h" #include "pc/usage_pattern.h" #include "pc/webrtc_session_description_factory.h" -#include "rtc_base/bind.h" #include "rtc_base/helpers.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/rtc_certificate.h" -#include "rtc_base/socket_address.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" @@ -170,6 +164,19 @@ void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, } } +std::map GetBundleGroupsByMid( + const SessionDescription* desc) { + std::vector bundle_groups = + desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + std::map bundle_groups_by_mid; + for (const cricket::ContentGroup* bundle_group : bundle_groups) { + for (const std::string& content_name : bundle_group->content_names()) { + bundle_groups_by_mid[content_name] = bundle_group; + } + } + return bundle_groups_by_mid; +} + // Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd). bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, const SessionDescriptionInterface* new_desc, @@ -253,7 +260,7 @@ void ReportSimulcastApiVersion(const char* name, } const ContentInfo* FindTransceiverMSection( - RtpTransceiverProxyWithInternal* transceiver, + RtpTransceiver* transceiver, const SessionDescriptionInterface* session_description) { return transceiver->mid() ? session_description->description()->GetContentByName( @@ -340,9 +347,10 @@ bool MediaSectionsHaveSameCount(const SessionDescription& desc1, // needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint // to SDES keys, will be caught in JsepTransport negotiation, and backstopped // by Channel's |srtp_required| check. -RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); +RTCError VerifyCrypto(const SessionDescription* desc, + bool dtls_enabled, + const std::map& + bundle_groups_by_mid) { for (const cricket::ContentInfo& content_info : desc->contents()) { if (content_info.rejected) { continue; @@ -352,8 +360,10 @@ RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { : webrtc::kEnumCounterKeyProtocolSdes, content_info.media_description()->type()); const std::string& mid = content_info.name; - if (bundle && bundle->HasContentName(mid) && - mid != *(bundle->FirstContentName())) { + auto it = bundle_groups_by_mid.find(mid); + const cricket::ContentGroup* bundle = + it != bundle_groups_by_mid.end() ? it->second : nullptr; + if (bundle && mid != *(bundle->FirstContentName())) { // This isn't the first media section in the BUNDLE group, so it's not // required to have crypto attributes, since only the crypto attributes // from the first section actually get used. @@ -390,16 +400,19 @@ RTCError VerifyCrypto(const SessionDescription* desc, bool dtls_enabled) { // Checks that each non-rejected content has ice-ufrag and ice-pwd set, unless // it's in a BUNDLE group, in which case only the BUNDLE-tag section (first // media section/description in the BUNDLE group) needs a ufrag and pwd. -bool VerifyIceUfragPwdPresent(const SessionDescription* desc) { - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); +bool VerifyIceUfragPwdPresent( + const SessionDescription* desc, + const std::map& + bundle_groups_by_mid) { for (const cricket::ContentInfo& content_info : desc->contents()) { if (content_info.rejected) { continue; } const std::string& mid = content_info.name; - if (bundle && bundle->HasContentName(mid) && - mid != *(bundle->FirstContentName())) { + auto it = bundle_groups_by_mid.find(mid); + const cricket::ContentGroup* bundle = + it != bundle_groups_by_mid.end() ? it->second : nullptr; + if (bundle && mid != *(bundle->FirstContentName())) { // This isn't the first media section in the BUNDLE group, so it's not // required to have ufrag/password, since only the ufrag/password from // the first section actually get used. @@ -423,7 +436,7 @@ bool VerifyIceUfragPwdPresent(const SessionDescription* desc) { return true; } -static RTCError ValidateMids(const cricket::SessionDescription& description) { +RTCError ValidateMids(const cricket::SessionDescription& description) { std::set mids; for (const cricket::ContentInfo& content : description.contents()) { if (content.name.empty()) { @@ -475,7 +488,7 @@ std::string GetSignalingStateString( // This method will extract any send encodings that were sent by the remote // connection. This is currently only relevant for Simulcast scenario (where // the number of layers may be communicated by the server). -static std::vector GetSendEncodingsFromRemoteDescription( +std::vector GetSendEncodingsFromRemoteDescription( const MediaContentDescription& desc) { if (!desc.HasSimulcast()) { return {}; @@ -499,7 +512,7 @@ static std::vector GetSendEncodingsFromRemoteDescription( return result; } -static RTCError UpdateSimulcastLayerStatusInSender( +RTCError UpdateSimulcastLayerStatusInSender( const std::vector& layers, rtc::scoped_refptr sender) { RTC_DCHECK(sender); @@ -530,9 +543,8 @@ static RTCError UpdateSimulcastLayerStatusInSender( return result; } -static bool SimulcastIsRejected( - const ContentInfo* local_content, - const MediaContentDescription& answer_media_desc) { +bool SimulcastIsRejected(const ContentInfo* local_content, + const MediaContentDescription& answer_media_desc) { bool simulcast_offered = local_content && local_content->media_description() && local_content->media_description()->HasSimulcast(); @@ -542,7 +554,7 @@ static bool SimulcastIsRejected( return simulcast_offered && (!simulcast_answered || !rids_supported); } -static RTCError DisableSimulcastInSender( +RTCError DisableSimulcastInSender( rtc::scoped_refptr sender) { RTC_DCHECK(sender); RtpParameters parameters = sender->GetParametersInternal(); @@ -560,7 +572,7 @@ static RTCError DisableSimulcastInSender( // The SDP parser used to populate these values by default for the 'content // name' if an a=mid line was absent. -static absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { +absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { switch (media_type) { case cricket::MEDIA_TYPE_AUDIO: return cricket::CN_AUDIO; @@ -599,10 +611,8 @@ void AddPlanBRtpSenderOptions( } } -static cricket::MediaDescriptionOptions -GetMediaDescriptionOptionsForTransceiver( - rtc::scoped_refptr> - transceiver, +cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( + RtpTransceiver* transceiver, const std::string& mid, bool is_create_offer) { // NOTE: a stopping transceiver should be treated as a stopped one in @@ -622,7 +632,7 @@ GetMediaDescriptionOptionsForTransceiver( // 2. If the MSID is included, then it must be included in any subsequent // offer/answer exactly the same until the RtpTransceiver is stopped. if (stopped || (!RtpTransceiverDirectionHasSend(transceiver->direction()) && - !transceiver->internal()->has_ever_been_used_to_send())) { + !transceiver->has_ever_been_used_to_send())) { return media_description_options; } @@ -633,7 +643,7 @@ GetMediaDescriptionOptionsForTransceiver( // The following sets up RIDs and Simulcast. // RIDs are included if Simulcast is requested or if any RID was specified. RtpParameters send_parameters = - transceiver->internal()->sender_internal()->GetParametersInternal(); + transceiver->sender_internal()->GetParametersInternal(); bool has_rids = std::any_of(send_parameters.encodings.begin(), send_parameters.encodings.end(), [](const RtpEncodingParameters& encoding) { @@ -665,9 +675,8 @@ GetMediaDescriptionOptionsForTransceiver( } // Returns the ContentInfo at mline index |i|, or null if none exists. -static const ContentInfo* GetContentByIndex( - const SessionDescriptionInterface* sdesc, - size_t i) { +const ContentInfo* GetContentByIndex(const SessionDescriptionInterface* sdesc, + size_t i) { if (!sdesc) { return nullptr; } @@ -696,27 +705,6 @@ std::string GenerateRtcpCname() { return cname; } -// Add options to |session_options| from |rtp_data_channels|. -void AddRtpDataChannelOptions( - const std::map>& - rtp_data_channels, - cricket::MediaDescriptionOptions* data_media_description_options) { - if (!data_media_description_options) { - return; - } - // Check for data channels. - for (const auto& kv : rtp_data_channels) { - const RtpDataChannel* channel = kv.second; - if (channel->state() == RtpDataChannel::kConnecting || - channel->state() == RtpDataChannel::kOpen) { - // Legacy RTP data channels are signaled with the track/stream ID set to - // the data channel's label. - data_media_description_options->AddRtpDataChannel(channel->label(), - channel->label()); - } - } -} - // Check if we can send |new_stream| on a PeerConnection. bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, webrtc::MediaStreamInterface* new_stream) { @@ -731,6 +719,21 @@ bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, return true; } +rtc::scoped_refptr LookupDtlsTransportByMid( + rtc::Thread* network_thread, + JsepTransportController* controller, + const std::string& mid) { + // TODO(tommi): Can we post this (and associated operations where this + // function is called) to the network thread and avoid this Invoke? + // We might be able to simplify a few things if we set the transport on + // the network thread and then update the implementation to check that + // the set_ and relevant get methods are always called on the network + // thread (we'll need to update proxy maps). + return network_thread->Invoke>( + RTC_FROM_HERE, + [controller, &mid] { return controller->LookupDtlsTransportByMid(mid); }); +} + } // namespace // Used by parameterless SetLocalDescription() to create an offer or answer. @@ -1241,7 +1244,9 @@ void SdpOfferAnswerHandler::SetLocalDescription( } RTCError SdpOfferAnswerHandler::ApplyLocalDescription( - std::unique_ptr desc) { + std::unique_ptr desc, + const std::map& + bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(desc); @@ -1295,13 +1300,14 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( if (IsUnifiedPlan()) { RTCError error = UpdateTransceiversAndDataChannels( cricket::CS_LOCAL, *local_description(), old_local_description, - remote_description()); + remote_description(), bundle_groups_by_mid); if (!error.ok()) { return error; } std::vector> remove_list; std::vector> removed_streams; - for (const auto& transceiver : transceivers()->List()) { + for (const auto& transceiver_ext : transceivers()->List()) { + auto transceiver = transceiver_ext->internal(); if (transceiver->stopped()) { continue; } @@ -1310,12 +1316,10 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // Note that code paths that don't set MID won't be able to use // information about DTLS transports. if (transceiver->mid()) { - auto dtls_transport = transport_controller()->LookupDtlsTransportByMid( - *transceiver->mid()); - transceiver->internal()->sender_internal()->set_transport( - dtls_transport); - transceiver->internal()->receiver_internal()->set_transport( - dtls_transport); + auto dtls_transport = LookupDtlsTransportByMid( + pc_->network_thread(), transport_controller(), *transceiver->mid()); + transceiver->sender_internal()->set_transport(dtls_transport); + transceiver->receiver_internal()->set_transport(dtls_transport); } const ContentInfo* content = @@ -1332,16 +1336,15 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // "recvonly", process the removal of a remote track for the media // description, given transceiver, removeList, and muteTracks. if (!RtpTransceiverDirectionHasRecv(media_desc->direction()) && - (transceiver->internal()->fired_direction() && - RtpTransceiverDirectionHasRecv( - *transceiver->internal()->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver, &remove_list, + (transceiver->fired_direction() && + RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, &removed_streams); } // 2.2.7.1.6.2: Set transceiver's [[CurrentDirection]] and // [[FiredDirection]] slots to direction. - transceiver->internal()->set_current_direction(media_desc->direction()); - transceiver->internal()->set_fired_direction(media_desc->direction()); + transceiver->set_current_direction(media_desc->direction()); + transceiver->set_fired_direction(media_desc->direction()); } } auto observer = pc_->Observer(); @@ -1367,7 +1370,8 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( } error = UpdateSessionState(type, cricket::CS_LOCAL, - local_description()->description()); + local_description()->description(), + bundle_groups_by_mid); if (!error.ok()) { return error; } @@ -1385,12 +1389,15 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // If setting the description decided our SSL role, allocate any necessary // SCTP sids. rtc::SSLRole role; - if (IsSctpLike(pc_->data_channel_type()) && pc_->GetSctpSslRole(&role)) { + if (pc_->GetSctpSslRole(&role)) { data_channel_controller()->AllocateSctpSids(role); } if (IsUnifiedPlan()) { - for (const auto& transceiver : transceivers()->List()) { + // We must use List and not ListInternal here because + // transceivers()->StableState() is indexed by the non-internal refptr. + for (const auto& transceiver_ext : transceivers()->List()) { + auto transceiver = transceiver_ext->internal(); if (transceiver->stopped()) { continue; } @@ -1399,20 +1406,24 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( if (!content) { continue; } - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + cricket::ChannelInterface* channel = transceiver->channel(); if (content->rejected || !channel || channel->local_streams().empty()) { // 0 is a special value meaning "this sender has no associated send // stream". Need to call this so the sender won't attempt to configure // a no longer existing stream and run into DCHECKs in the lower // layers. - transceiver->internal()->sender_internal()->SetSsrc(0); + transceiver->sender_internal()->SetSsrc(0); } else { // Get the StreamParams from the channel which could generate SSRCs. const std::vector& streams = channel->local_streams(); - transceiver->internal()->sender_internal()->set_stream_ids( - streams[0].stream_ids()); - transceiver->internal()->sender_internal()->SetSsrc( - streams[0].first_ssrc()); + transceiver->sender_internal()->set_stream_ids(streams[0].stream_ids()); + auto encodings = transceiver->sender_internal()->init_send_encodings(); + transceiver->sender_internal()->SetSsrc(streams[0].first_ssrc()); + if (!encodings.empty()) { + transceivers() + ->StableState(transceiver_ext) + ->SetInitSendEncodings(encodings); + } } } } else { @@ -1445,17 +1456,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( } } - const cricket::ContentInfo* data_content = - GetFirstDataContent(local_description()->description()); - if (data_content) { - const cricket::RtpDataContentDescription* rtp_data_desc = - data_content->media_description()->as_rtp_data(); - // rtp_data_desc will be null if this is an SCTP description. - if (rtp_data_desc) { - data_channel_controller()->UpdateLocalRtpDataChannels( - rtp_data_desc->streams()); - } - } + // This function does nothing with data content. if (type == SdpType::kAnswer && local_ice_credentials_to_replace_->SatisfiesIceRestart( @@ -1532,7 +1533,9 @@ void SdpOfferAnswerHandler::SetRemoteDescription( } RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( - std::unique_ptr desc) { + std::unique_ptr desc, + const std::map& + bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(desc); @@ -1576,7 +1579,7 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( if (IsUnifiedPlan()) { RTCError error = UpdateTransceiversAndDataChannels( cricket::CS_REMOTE, *remote_description(), local_description(), - old_remote_description); + old_remote_description, bundle_groups_by_mid); if (!error.ok()) { return error; } @@ -1598,7 +1601,8 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( // NOTE: Candidates allocation will be initiated only when // SetLocalDescription is called. error = UpdateSessionState(type, cricket::CS_REMOTE, - remote_description()->description()); + remote_description()->description(), + bundle_groups_by_mid); if (!error.ok()) { return error; } @@ -1657,7 +1661,7 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( // If setting the description decided our SSL role, allocate any necessary // SCTP sids. rtc::SSLRole role; - if (IsSctpLike(pc_->data_channel_type()) && pc_->GetSctpSslRole(&role)) { + if (pc_->GetSctpSslRole(&role)) { data_channel_controller()->AllocateSctpSids(role); } @@ -1667,7 +1671,8 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( std::vector> remove_list; std::vector> added_streams; std::vector> removed_streams; - for (const auto& transceiver : transceivers()->List()) { + for (const auto& transceiver_ext : transceivers()->List()) { + const auto transceiver = transceiver_ext->internal(); const ContentInfo* content = FindMediaSectionForTransceiver(transceiver, remote_description()); if (!content) { @@ -1687,14 +1692,13 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( stream_ids = media_desc->streams()[0].stream_ids(); } transceivers() - ->StableState(transceiver) + ->StableState(transceiver_ext) ->SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name << " (" << GetStreamIdsString(stream_ids) << ")."; - SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), - stream_ids, &added_streams, - &removed_streams); + SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids, + &added_streams, &removed_streams); // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, // and transceiver's current direction is neither sendrecv nor recvonly, @@ -1714,26 +1718,24 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( if (!RtpTransceiverDirectionHasRecv(local_direction) && (transceiver->fired_direction() && RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver, &remove_list, + ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, &removed_streams); } // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. - transceiver->internal()->set_fired_direction(local_direction); + transceiver->set_fired_direction(local_direction); // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run // the following steps: if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to // direction. - transceiver->internal()->set_current_direction(local_direction); + transceiver->set_current_direction(local_direction); // 2.2.8.1.11.[3-6]: Set the transport internal slots. if (transceiver->mid()) { - auto dtls_transport = - transport_controller()->LookupDtlsTransportByMid( - *transceiver->mid()); - transceiver->internal()->sender_internal()->set_transport( - dtls_transport); - transceiver->internal()->receiver_internal()->set_transport( - dtls_transport); + auto dtls_transport = LookupDtlsTransportByMid(pc_->network_thread(), + transport_controller(), + *transceiver->mid()); + transceiver->sender_internal()->set_transport(dtls_transport); + transceiver->receiver_internal()->set_transport(dtls_transport); } } // 2.2.8.1.12: If the media description is rejected, and transceiver is @@ -1741,18 +1743,16 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( if (content->rejected && !transceiver->stopped()) { RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name << " since the media section was rejected."; - transceiver->internal()->StopTransceiverProcedure(); + transceiver->StopTransceiverProcedure(); } if (!content->rejected && RtpTransceiverDirectionHasRecv(local_direction)) { if (!media_desc->streams().empty() && media_desc->streams()[0].has_ssrcs()) { uint32_t ssrc = media_desc->streams()[0].first_ssrc(); - transceiver->internal()->receiver_internal()->SetupMediaChannel(ssrc); + transceiver->receiver_internal()->SetupMediaChannel(ssrc); } else { - transceiver->internal() - ->receiver_internal() - ->SetupUnsignaledMediaChannel(); + transceiver->receiver_internal()->SetupUnsignaledMediaChannel(); } } } @@ -1783,8 +1783,6 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( GetFirstAudioContentDescription(remote_description()->description()); const cricket::VideoContentDescription* video_desc = GetFirstVideoContentDescription(remote_description()->description()); - const cricket::RtpDataContentDescription* rtp_data_desc = - GetFirstRtpDataContentDescription(remote_description()->description()); // Check if the descriptions include streams, just in case the peer supports // MSID, but doesn't indicate so with "a=msid-semantic". @@ -1837,13 +1835,6 @@ RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( } } - // If this is an RTP data transport, update the DataChannels with the - // information from the remote peer. - if (rtp_data_desc) { - data_channel_controller()->UpdateRemoteRtpDataChannels( - GetActiveStreams(rtp_data_desc)); - } - // Iterate new_streams and notify the observer about new MediaStreams. auto observer = pc_->Observer(); for (size_t i = 0; i < new_streams->count(); ++i) { @@ -1904,7 +1895,10 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( return; } - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL); + std::map bundle_groups_by_mid = + GetBundleGroupsByMid(desc->description()); + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL, + bundle_groups_by_mid); if (!error.ok()) { std::string error_message = GetSetDescriptionErrorMessage( cricket::CS_LOCAL, desc->GetType(), error); @@ -1918,7 +1912,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // which may destroy it before returning. const SdpType type = desc->GetType(); - error = ApplyLocalDescription(std::move(desc)); + error = ApplyLocalDescription(std::move(desc), bundle_groups_by_mid); // |desc| may be destroyed at this point. if (!error.ok()) { @@ -1941,8 +1935,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // TODO(deadbeef): We already had to hop to the network thread for // MaybeStartGathering... pc_->network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator())); + RTC_FROM_HERE, [this] { port_allocator()->DiscardCandidatePool(); }); // Make UMA notes about what was agreed to. ReportNegotiatedSdpSemantics(*local_description()); } @@ -2158,13 +2151,17 @@ void SdpOfferAnswerHandler::DoSetRemoteDescription( desc->GetType() == SdpType::kAnswer) { // Report to UMA the format of the received offer or answer. pc_->ReportSdpFormatReceived(*desc); + pc_->ReportSdpBundleUsage(*desc); } // Handle remote descriptions missing a=mid lines for interop with legacy end // points. FillInMissingRemoteMids(desc->description()); - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE); + std::map bundle_groups_by_mid = + GetBundleGroupsByMid(desc->description()); + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE, + bundle_groups_by_mid); if (!error.ok()) { std::string error_message = GetSetDescriptionErrorMessage( cricket::CS_REMOTE, desc->GetType(), error); @@ -2178,7 +2175,7 @@ void SdpOfferAnswerHandler::DoSetRemoteDescription( // ApplyRemoteDescription, which may destroy it before returning. const SdpType type = desc->GetType(); - error = ApplyRemoteDescription(std::move(desc)); + error = ApplyRemoteDescription(std::move(desc), bundle_groups_by_mid); // |desc| may be destroyed at this point. if (!error.ok()) { @@ -2200,8 +2197,7 @@ void SdpOfferAnswerHandler::DoSetRemoteDescription( // TODO(deadbeef): We already had to hop to the network thread for // MaybeStartGathering... pc_->network_thread()->Invoke( - RTC_FROM_HERE, rtc::Bind(&cricket::PortAllocator::DiscardCandidatePool, - port_allocator())); + RTC_FROM_HERE, [this] { port_allocator()->DiscardCandidatePool(); }); // Make UMA notes about what was agreed to. ReportNegotiatedSdpSemantics(*remote_description()); } @@ -2268,55 +2264,58 @@ void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( bool SdpOfferAnswerHandler::AddIceCandidate( const IceCandidateInterface* ice_candidate) { + const AddIceCandidateResult result = AddIceCandidateInternal(ice_candidate); + NoteAddIceCandidateResult(result); + // If the return value is kAddIceCandidateFailNotReady, the candidate has been + // added, although not 'ready', but that's a success. + return result == kAddIceCandidateSuccess || + result == kAddIceCandidateFailNotReady; +} + +AddIceCandidateResult SdpOfferAnswerHandler::AddIceCandidateInternal( + const IceCandidateInterface* ice_candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate"); if (pc_->IsClosed()) { RTC_LOG(LS_ERROR) << "AddIceCandidate: PeerConnection is closed."; - NoteAddIceCandidateResult(kAddIceCandidateFailClosed); - return false; + return kAddIceCandidateFailClosed; } if (!remote_description()) { RTC_LOG(LS_ERROR) << "AddIceCandidate: ICE candidates can't be added " "without any remote session description."; - NoteAddIceCandidateResult(kAddIceCandidateFailNoRemoteDescription); - return false; + return kAddIceCandidateFailNoRemoteDescription; } if (!ice_candidate) { RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate is null."; - NoteAddIceCandidateResult(kAddIceCandidateFailNullCandidate); - return false; + return kAddIceCandidateFailNullCandidate; } bool valid = false; bool ready = ReadyToUseRemoteCandidate(ice_candidate, nullptr, &valid); if (!valid) { - NoteAddIceCandidateResult(kAddIceCandidateFailNotValid); - return false; + return kAddIceCandidateFailNotValid; } // Add this candidate to the remote session description. if (!mutable_remote_description()->AddCandidate(ice_candidate)) { RTC_LOG(LS_ERROR) << "AddIceCandidate: Candidate cannot be used."; - NoteAddIceCandidateResult(kAddIceCandidateFailInAddition); - return false; + return kAddIceCandidateFailInAddition; } - if (ready) { - bool result = UseCandidate(ice_candidate); - if (result) { - pc_->NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED); - NoteAddIceCandidateResult(kAddIceCandidateSuccess); - } else { - NoteAddIceCandidateResult(kAddIceCandidateFailNotUsable); - } - return result; - } else { + if (!ready) { RTC_LOG(LS_INFO) << "AddIceCandidate: Not ready to use candidate."; - NoteAddIceCandidateResult(kAddIceCandidateFailNotReady); - return true; + return kAddIceCandidateFailNotReady; } + + if (!UseCandidate(ice_candidate)) { + return kAddIceCandidateFailNotUsable; + } + + pc_->NoteUsageEvent(UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED); + + return kAddIceCandidateSuccess; } void SdpOfferAnswerHandler::AddIceCandidate( @@ -2330,23 +2329,25 @@ void SdpOfferAnswerHandler::AddIceCandidate( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), candidate = std::move(candidate), callback = std::move(callback)]( std::function operations_chain_callback) { - if (!this_weak_ptr) { - operations_chain_callback(); + auto result = + this_weak_ptr + ? this_weak_ptr->AddIceCandidateInternal(candidate.get()) + : kAddIceCandidateFailClosed; + NoteAddIceCandidateResult(result); + operations_chain_callback(); + if (result == kAddIceCandidateFailClosed) { callback(RTCError( RTCErrorType::INVALID_STATE, "AddIceCandidate failed because the session was shut down")); - return; - } - if (!this_weak_ptr->AddIceCandidate(candidate.get())) { - operations_chain_callback(); + } else if (result != kAddIceCandidateSuccess && + result != kAddIceCandidateFailNotReady) { // Fail with an error type and message consistent with Chromium. // TODO(hbos): Fail with error types according to spec. callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Error processing ICE candidate")); - return; + } else { + callback(RTCError::OK()); } - operations_chain_callback(); - callback(RTCError::OK()); }); } @@ -2466,7 +2467,9 @@ void SdpOfferAnswerHandler::ChangeSignalingState( RTCError SdpOfferAnswerHandler::UpdateSessionState( SdpType type, cricket::ContentSource source, - const cricket::SessionDescription* description) { + const cricket::SessionDescription* description, + const std::map& + bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); // If there's already a pending error then no state transition should happen. @@ -2492,17 +2495,11 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState( RTC_DCHECK(type == SdpType::kAnswer); ChangeSignalingState(PeerConnectionInterface::kStable); transceivers()->DiscardStableStates(); - have_pending_rtp_data_channel_ = false; } // Update internal objects according to the session description's media // descriptions. - RTCError error = PushdownMediaDescription(type, source); - if (!error.ok()) { - return error; - } - - return RTCError::OK(); + return PushdownMediaDescription(type, source, bundle_groups_by_mid); } bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent( @@ -2706,16 +2703,16 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { transceivers()->Remove(transceiver); } } + if (state.init_send_encodings()) { + transceiver->internal()->sender_internal()->set_init_send_encodings( + state.init_send_encodings().value()); + } transceiver->internal()->sender_internal()->set_transport(nullptr); transceiver->internal()->receiver_internal()->set_transport(nullptr); transceiver->internal()->set_mid(state.mid()); transceiver->internal()->set_mline_index(state.mline_index()); } transport_controller()->RollbackTransports(); - if (have_pending_rtp_data_channel_) { - DestroyDataChannelTransport(); - have_pending_rtp_data_channel_ = false; - } transceivers()->DiscardStableStates(); pending_local_description_.reset(); pending_remote_description_.reset(); @@ -2782,7 +2779,7 @@ bool SdpOfferAnswerHandler::IceRestartPending( bool SdpOfferAnswerHandler::NeedsIceRestart( const std::string& content_name) const { - return transport_controller()->NeedsIceRestart(content_name); + return pc_->NeedsIceRestart(content_name); } absl::optional SdpOfferAnswerHandler::GetDtlsRole( @@ -2878,12 +2875,12 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { // 5. For each transceiver in connection's set of transceivers, perform the // following checks: - for (const auto& transceiver : transceivers()->List()) { + for (const auto& transceiver : transceivers()->ListInternal()) { const ContentInfo* current_local_msection = - FindTransceiverMSection(transceiver.get(), description); + FindTransceiverMSection(transceiver, description); - const ContentInfo* current_remote_msection = FindTransceiverMSection( - transceiver.get(), current_remote_description()); + const ContentInfo* current_remote_msection = + FindTransceiverMSection(transceiver, current_remote_description()); // 5.4 If transceiver is stopped and is associated with an m= section, // but the associated m= section is not yet rejected in @@ -2971,7 +2968,7 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { return true; const ContentInfo* offered_remote_msection = - FindTransceiverMSection(transceiver.get(), remote_description()); + FindTransceiverMSection(transceiver, remote_description()); RtpTransceiverDirection offered_direction = offered_remote_msection @@ -3000,7 +2997,9 @@ void SdpOfferAnswerHandler::GenerateNegotiationNeededEvent() { RTCError SdpOfferAnswerHandler::ValidateSessionDescription( const SessionDescriptionInterface* sdesc, - cricket::ContentSource source) { + cricket::ContentSource source, + const std::map& + bundle_groups_by_mid) { if (session_error() != SessionError::kNone) { LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); } @@ -3026,20 +3025,21 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( std::string crypto_error; if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED || pc_->dtls_enabled()) { - RTCError crypto_error = - VerifyCrypto(sdesc->description(), pc_->dtls_enabled()); + RTCError crypto_error = VerifyCrypto( + sdesc->description(), pc_->dtls_enabled(), bundle_groups_by_mid); if (!crypto_error.ok()) { return crypto_error; } } // Verify ice-ufrag and ice-pwd. - if (!VerifyIceUfragPwdPresent(sdesc->description())) { + if (!VerifyIceUfragPwdPresent(sdesc->description(), bundle_groups_by_mid)) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kSdpWithoutIceUfragPwd); } - if (!pc_->ValidateBundleSettings(sdesc->description())) { + if (!pc_->ValidateBundleSettings(sdesc->description(), + bundle_groups_by_mid)) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kBundleWithoutRtcpMux); } @@ -3112,18 +3112,23 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( cricket::ContentSource source, const SessionDescriptionInterface& new_session, const SessionDescriptionInterface* old_local_description, - const SessionDescriptionInterface* old_remote_description) { + const SessionDescriptionInterface* old_remote_description, + const std::map& + bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(IsUnifiedPlan()); - const cricket::ContentGroup* bundle_group = nullptr; if (new_session.GetType() == SdpType::kOffer) { - auto bundle_group_or_error = - GetEarlyBundleGroup(*new_session.description()); - if (!bundle_group_or_error.ok()) { - return bundle_group_or_error.MoveError(); + // If the BUNDLE policy is max-bundle, then we know for sure that all + // transports will be bundled from the start. Return an error if max-bundle + // is specified but the session description does not have a BUNDLE group. + if (pc_->configuration()->bundle_policy == + PeerConnectionInterface::kBundlePolicyMaxBundle && + bundle_groups_by_mid.empty()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max-bundle configured but session description " + "has no BUNDLE group"); } - bundle_group = bundle_group_or_error.MoveValue(); } const ContentInfos& new_contents = new_session.description()->contents(); @@ -3131,6 +3136,9 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( const cricket::ContentInfo& new_content = new_contents[i]; cricket::MediaType media_type = new_content.media_description()->type(); mid_generator_.AddKnownId(new_content.name); + auto it = bundle_groups_by_mid.find(new_content.name); + const cricket::ContentGroup* bundle_group = + it != bundle_groups_by_mid.end() ? it->second : nullptr; if (media_type == cricket::MEDIA_TYPE_AUDIO || media_type == cricket::MEDIA_TYPE_VIDEO) { const cricket::ContentInfo* old_local_content = nullptr; @@ -3319,22 +3327,6 @@ SdpOfferAnswerHandler::AssociateTransceiver( return std::move(transceiver); } -RTCErrorOr -SdpOfferAnswerHandler::GetEarlyBundleGroup( - const SessionDescription& desc) const { - const cricket::ContentGroup* bundle_group = nullptr; - if (pc_->configuration()->bundle_policy == - PeerConnectionInterface::kBundlePolicyMaxBundle) { - bundle_group = desc.GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - if (!bundle_group) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "max-bundle configured but session description " - "has no BUNDLE group"); - } - } - return bundle_group; -} - RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( rtc::scoped_refptr> transceiver, @@ -3371,30 +3363,17 @@ RTCError SdpOfferAnswerHandler::UpdateDataChannel( cricket::ContentSource source, const cricket::ContentInfo& content, const cricket::ContentGroup* bundle_group) { - if (pc_->data_channel_type() == cricket::DCT_NONE) { - // If data channels are disabled, ignore this media section. CreateAnswer - // will take care of rejecting it. - return RTCError::OK(); - } if (content.rejected) { RTC_LOG(LS_INFO) << "Rejected data channel, mid=" << content.mid(); DestroyDataChannelTransport(); } else { - if (!data_channel_controller()->rtp_data_channel() && - !data_channel_controller()->data_channel_transport()) { + if (!data_channel_controller()->data_channel_transport()) { RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); if (!CreateDataChannel(content.name)) { LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, "Failed to create data channel."); } } - if (source == cricket::CS_REMOTE) { - const MediaContentDescription* data_desc = content.media_description(); - if (data_desc && cricket::IsRtpProtocol(data_desc->protocol())) { - data_channel_controller()->UpdateRemoteRtpDataChannels( - GetActiveStreams(data_desc)); - } - } } return RTCError::OK(); } @@ -3488,19 +3467,17 @@ SdpOfferAnswerHandler::FindAvailableTransceiverToReceive( const cricket::ContentInfo* SdpOfferAnswerHandler::FindMediaSectionForTransceiver( - rtc::scoped_refptr> - transceiver, + const RtpTransceiver* transceiver, const SessionDescriptionInterface* sdesc) const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(transceiver); RTC_DCHECK(sdesc); if (IsUnifiedPlan()) { - if (!transceiver->internal()->mid()) { + if (!transceiver->mid()) { // This transceiver is not associated with a media section yet. return nullptr; } - return sdesc->description()->GetContentByName( - *transceiver->internal()->mid()); + return sdesc->description()->GetContentByName(*transceiver->mid()); } else { // Plan B only allows at most one audio and one video section, so use the // first media section of that type. @@ -3521,16 +3498,6 @@ void SdpOfferAnswerHandler::GetOptionsForOffer( GetOptionsForPlanBOffer(offer_answer_options, session_options); } - // Intentionally unset the data channel type for RTP data channel with the - // second condition. Otherwise the RTP data channels would be successfully - // negotiated by default and the unit tests in WebRtcDataBrowserTest will fail - // when building with chromium. We want to leave RTP data channels broken, so - // people won't try to use them. - if (data_channel_controller()->HasRtpDataChannels() || - pc_->data_channel_type() != cricket::DCT_RTP) { - session_options->data_channel_type = pc_->data_channel_type(); - } - // Apply ICE restart flag and renomination flag. bool ice_restart = offer_answer_options.ice_restart || HasNewIceCredentials(); for (auto& options : session_options->media_description_options) { @@ -3544,8 +3511,7 @@ void SdpOfferAnswerHandler::GetOptionsForOffer( session_options->pooled_ice_credentials = pc_->network_thread()->Invoke>( RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, - port_allocator())); + [this] { return port_allocator()->GetPooledIceCredentials(); }); session_options->offer_extmap_allow_mixed = pc_->configuration()->offer_extmap_allow_mixed; @@ -3708,7 +3674,7 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( } else { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForTransceiver( - transceiver, mid, + transceiver->internal(), mid, /*is_create_offer=*/true)); // CreateOffer shouldn't really cause any state changes in // PeerConnection, but we need a way to match new transceivers to new @@ -3746,7 +3712,7 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( // and not associated). Reuse media sections marked as recyclable first, // otherwise append to the end of the offer. New media sections should be // added in the order they were added to the PeerConnection. - for (const auto& transceiver : transceivers()->List()) { + for (const auto& transceiver : transceivers()->ListInternal()) { if (transceiver->mid() || transceiver->stopping()) { continue; } @@ -3766,7 +3732,7 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( /*is_create_offer=*/true)); } // See comment above for why CreateOffer changes the transceiver's state. - transceiver->internal()->set_mline_index(mline_index); + transceiver->set_mline_index(mline_index); } // Lastly, add a m-section if we have local data channels and an m section // does not already exist. @@ -3789,15 +3755,6 @@ void SdpOfferAnswerHandler::GetOptionsForAnswer( GetOptionsForPlanBAnswer(offer_answer_options, session_options); } - // Intentionally unset the data channel type for RTP data channel. Otherwise - // the RTP data channels would be successfully negotiated by default and the - // unit tests in WebRtcDataBrowserTest will fail when building with chromium. - // We want to leave RTP data channels broken, so people won't try to use them. - if (data_channel_controller()->HasRtpDataChannels() || - pc_->data_channel_type() != cricket::DCT_RTP) { - session_options->data_channel_type = pc_->data_channel_type(); - } - // Apply ICE renomination flag. for (auto& options : session_options->media_description_options) { options.transport_options.enable_ice_renomination = @@ -3809,8 +3766,7 @@ void SdpOfferAnswerHandler::GetOptionsForAnswer( session_options->pooled_ice_credentials = pc_->network_thread()->Invoke>( RTC_FROM_HERE, - rtc::Bind(&cricket::PortAllocator::GetPooledIceCredentials, - port_allocator())); + [this] { return port_allocator()->GetPooledIceCredentials(); }); } void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( @@ -3879,7 +3835,7 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer( if (transceiver) { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForTransceiver( - transceiver, content.name, + transceiver->internal(), content.name, /*is_create_offer=*/false)); } else { // This should only happen with rejected transceivers. @@ -3900,8 +3856,7 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer( // Reject all data sections if data channels are disabled. // Reject a data section if it has already been rejected. // Reject all data sections except for the first one. - if (pc_->data_channel_type() == cricket::DCT_NONE || content.rejected || - content.name != *(pc_->GetDataMid())) { + if (content.rejected || content.name != *(pc_->GetDataMid())) { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForRejectedData(content.name)); } else { @@ -4203,29 +4158,26 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( void SdpOfferAnswerHandler::EnableSending() { RTC_DCHECK_RUN_ON(signaling_thread()); - for (const auto& transceiver : transceivers()->List()) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel && !channel->enabled()) { + for (const auto& transceiver : transceivers()->ListInternal()) { + cricket::ChannelInterface* channel = transceiver->channel(); + if (channel) { channel->Enable(true); } } - - if (data_channel_controller()->rtp_data_channel() && - !data_channel_controller()->rtp_data_channel()->enabled()) { - data_channel_controller()->rtp_data_channel()->Enable(true); - } } RTCError SdpOfferAnswerHandler::PushdownMediaDescription( SdpType type, - cricket::ContentSource source) { + cricket::ContentSource source, + const std::map& + bundle_groups_by_mid) { const SessionDescriptionInterface* sdesc = (source == cricket::CS_LOCAL ? local_description() : remote_description()); RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(sdesc); - if (!UpdatePayloadTypeDemuxingState(source)) { + if (!UpdatePayloadTypeDemuxingState(source, bundle_groups_by_mid)) { // Note that this is never expected to fail, since RtpDemuxer doesn't return // an error when changing payload type demux criteria, which is all this // does. @@ -4234,10 +4186,14 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( } // Push down the new SDP media section for each audio/video transceiver. - for (const auto& transceiver : transceivers()->List()) { + auto rtp_transceivers = transceivers()->ListInternal(); + std::vector< + std::pair> + channels; + for (const auto& transceiver : rtp_transceivers) { const ContentInfo* content_info = FindMediaSectionForTransceiver(transceiver, sdesc); - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + cricket::ChannelInterface* channel = transceiver->channel(); if (!channel || !content_info || content_info->rejected) { continue; } @@ -4246,48 +4202,39 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( if (!content_desc) { continue; } - std::string error; - bool success = (source == cricket::CS_LOCAL) - ? channel->SetLocalContent(content_desc, type, &error) - : channel->SetRemoteContent(content_desc, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } + + transceiver->OnNegotiationUpdate(type, content_desc); + channels.push_back(std::make_pair(channel, content_desc)); } - // If using the RtpDataChannel, push down the new SDP section for it too. - if (data_channel_controller()->rtp_data_channel()) { - const ContentInfo* data_content = - cricket::GetFirstDataContent(sdesc->description()); - if (data_content && !data_content->rejected) { - const MediaContentDescription* data_desc = - data_content->media_description(); - if (data_desc) { - std::string error; - bool success = (source == cricket::CS_LOCAL) - ? data_channel_controller() - ->rtp_data_channel() - ->SetLocalContent(data_desc, type, &error) - : data_channel_controller() - ->rtp_data_channel() - ->SetRemoteContent(data_desc, type, &error); - if (!success) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); - } - } + if (!channels.empty()) { + RTCError error = + pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { + std::string error; + for (const auto& entry : channels) { + bool success = + (source == cricket::CS_LOCAL) + ? entry.first->SetLocalContent(entry.second, type, &error) + : entry.first->SetRemoteContent(entry.second, type, &error); + if (!success) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); + } + } + return RTCError::OK(); + }); + if (!error.ok()) { + return error; } } // Need complete offer/answer with an SCTP m= section before starting SCTP, // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 if (pc_->sctp_mid() && local_description() && remote_description()) { - rtc::scoped_refptr sctp_transport = - transport_controller()->GetSctpTransport(*(pc_->sctp_mid())); auto local_sctp_description = cricket::GetFirstSctpDataContentDescription( local_description()->description()); auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription( remote_description()->description()); - if (sctp_transport && local_sctp_description && remote_sctp_description) { + if (local_sctp_description && remote_sctp_description) { int max_message_size; // A remote max message size of zero means "any size supported". // We configure the connection with our own max message size. @@ -4298,8 +4245,9 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( std::min(local_sctp_description->max_message_size(), remote_sctp_description->max_message_size()); } - sctp_transport->Start(local_sctp_description->port(), - remote_sctp_description->port(), max_message_size); + pc_->StartSctpTransport(local_sctp_description->port(), + remote_sctp_description->port(), + max_message_size); } } @@ -4341,27 +4289,23 @@ void SdpOfferAnswerHandler::RemoveStoppedTransceivers() { if (!transceiver->stopped()) { continue; } - const ContentInfo* local_content = - FindMediaSectionForTransceiver(transceiver, local_description()); - const ContentInfo* remote_content = - FindMediaSectionForTransceiver(transceiver, remote_description()); + const ContentInfo* local_content = FindMediaSectionForTransceiver( + transceiver->internal(), local_description()); + const ContentInfo* remote_content = FindMediaSectionForTransceiver( + transceiver->internal(), remote_description()); if ((local_content && local_content->rejected) || (remote_content && remote_content->rejected)) { RTC_LOG(LS_INFO) << "Dissociating transceiver" - << " since the media section is being recycled."; + " since the media section is being recycled."; transceiver->internal()->set_mid(absl::nullopt); transceiver->internal()->set_mline_index(absl::nullopt); - transceivers()->Remove(transceiver); - continue; - } - if (!local_content && !remote_content) { + } else if (!local_content && !remote_content) { // TODO(bugs.webrtc.org/11973): Consider if this should be removed already // See https://github.com/w3c/webrtc-pc/issues/2576 RTC_LOG(LS_INFO) << "Dropping stopped transceiver that was never associated"; - transceivers()->Remove(transceiver); - continue; } + transceivers()->Remove(transceiver); } } @@ -4459,40 +4403,23 @@ bool SdpOfferAnswerHandler::UseCandidatesInSessionDescription( bool SdpOfferAnswerHandler::UseCandidate( const IceCandidateInterface* candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); + + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + RTCErrorOr result = FindContentInfo(remote_description(), candidate); - if (!result.ok()) { - RTC_LOG(LS_ERROR) << "UseCandidate: Invalid candidate. " - << result.error().message(); + if (!result.ok()) return false; + + const cricket::Candidate& c = candidate->candidate(); + RTCError error = cricket::VerifyCandidate(c); + if (!error.ok()) { + RTC_LOG(LS_WARNING) << "Invalid candidate: " << c.ToString(); + return true; } - std::vector candidates; - candidates.push_back(candidate->candidate()); - // Invoking BaseSession method to handle remote candidates. - RTCError error = transport_controller()->AddRemoteCandidates( - result.value()->name, candidates); - if (error.ok()) { - ReportRemoteIceCandidateAdded(candidate->candidate()); - // Candidates successfully submitted for checking. - if (pc_->ice_connection_state() == - PeerConnectionInterface::kIceConnectionNew || - pc_->ice_connection_state() == - PeerConnectionInterface::kIceConnectionDisconnected) { - // If state is New, then the session has just gotten its first remote ICE - // candidates, so go to Checking. - // If state is Disconnected, the session is re-using old candidates or - // receiving additional ones, so go to Checking. - // If state is Connected, stay Connected. - // TODO(bemasc): If state is Connected, and the new candidates are for a - // newly added transport, then the state actually _should_ move to - // checking. Add a way to distinguish that case. - pc_->SetIceConnectionState( - PeerConnectionInterface::kIceConnectionChecking); - } - // TODO(bemasc): If state is Completed, go back to Connected. - } else { - RTC_LOG(LS_WARNING) << error.message(); - } + + pc_->AddRemoteCandidate(result.value()->name, c); + return true; } @@ -4525,41 +4452,13 @@ bool SdpOfferAnswerHandler::ReadyToUseRemoteCandidate( return false; } - std::string transport_name = GetTransportName(result.value()->name); - return !transport_name.empty(); -} - -void SdpOfferAnswerHandler::ReportRemoteIceCandidateAdded( - const cricket::Candidate& candidate) { - pc_->NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); - if (candidate.address().IsPrivateIP()) { - pc_->NoteUsageEvent(UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED); - } - if (candidate.address().IsUnresolvedIP()) { - pc_->NoteUsageEvent(UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED); - } - if (candidate.address().family() == AF_INET6) { - pc_->NoteUsageEvent(UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED); - } + return true; } RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( const SessionDescriptionInterface* description, const IceCandidateInterface* candidate) { - if (candidate->sdp_mline_index() >= 0) { - size_t mediacontent_index = - static_cast(candidate->sdp_mline_index()); - size_t content_size = description->description()->contents().size(); - if (mediacontent_index < content_size) { - return &description->description()->contents()[mediacontent_index]; - } else { - return RTCError(RTCErrorType::INVALID_RANGE, - "Media line index (" + - rtc::ToString(candidate->sdp_mline_index()) + - ") out of range (number of mlines: " + - rtc::ToString(content_size) + ")."); - } - } else if (!candidate->sdp_mid().empty()) { + if (!candidate->sdp_mid().empty()) { auto& contents = description->description()->contents(); auto it = absl::c_find_if( contents, [candidate](const cricket::ContentInfo& content_info) { @@ -4573,6 +4472,19 @@ RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( } else { return &*it; } + } else if (candidate->sdp_mline_index() >= 0) { + size_t mediacontent_index = + static_cast(candidate->sdp_mline_index()); + size_t content_size = description->description()->contents().size(); + if (mediacontent_index < content_size) { + return &description->description()->contents()[mediacontent_index]; + } else { + return RTCError(RTCErrorType::INVALID_RANGE, + "Media line index (" + + rtc::ToString(candidate->sdp_mline_index()) + + ") out of range (number of mlines: " + + rtc::ToString(content_size) + ")."); + } } return RTCError(RTCErrorType::INVALID_PARAMETER, @@ -4606,8 +4518,7 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { } const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); - if (pc_->data_channel_type() != cricket::DCT_NONE && data && - !data->rejected && !data_channel_controller()->rtp_data_channel() && + if (data && !data->rejected && !data_channel_controller()->data_channel_transport()) { if (!CreateDataChannel(data->name)) { LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, @@ -4622,140 +4533,118 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel( const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!channel_manager()->media_engine()) + return nullptr; + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); // TODO(bugs.webrtc.org/11992): CreateVoiceChannel internally switches to the // worker thread. We shouldn't be using the |call_ptr_| hack here but simply // be on the worker thread and use |call_| (update upstream code). - cricket::VoiceChannel* voice_channel; - { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - voice_channel = channel_manager()->CreateVoiceChannel( - pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, - signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), - &ssrc_generator_, audio_options()); - } - if (!voice_channel) { - return nullptr; - } - voice_channel->SignalSentPacket().connect(pc_, - &PeerConnection::OnSentPacket_w); - voice_channel->SetRtpTransport(rtp_transport); - - return voice_channel; + return channel_manager()->CreateVoiceChannel( + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, audio_options()); } // TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. cricket::VideoChannel* SdpOfferAnswerHandler::CreateVideoChannel( const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); + if (!channel_manager()->media_engine()) + return nullptr; + + // NOTE: This involves a non-ideal hop (Invoke) over to the network thread. RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to the // worker thread. We shouldn't be using the |call_ptr_| hack here but simply // be on the worker thread and use |call_| (update upstream code). - cricket::VideoChannel* video_channel; - { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - video_channel = channel_manager()->CreateVideoChannel( - pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, - signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), - &ssrc_generator_, video_options(), - video_bitrate_allocator_factory_.get()); - } - if (!video_channel) { - return nullptr; - } - video_channel->SignalSentPacket().connect(pc_, - &PeerConnection::OnSentPacket_w); - video_channel->SetRtpTransport(rtp_transport); - - return video_channel; + return channel_manager()->CreateVideoChannel( + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, video_options(), + video_bitrate_allocator_factory_.get()); } bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - switch (pc_->data_channel_type()) { - case cricket::DCT_SCTP: - if (pc_->network_thread()->Invoke( - RTC_FROM_HERE, - rtc::Bind(&PeerConnection::SetupDataChannelTransport_n, pc_, - mid))) { - pc_->SetSctpDataMid(mid); - } else { - return false; - } - return true; - case cricket::DCT_RTP: - default: - RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); - // TODO(bugs.webrtc.org/9987): set_rtp_data_channel() should be called on - // the network thread like set_data_channel_transport is. - { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - data_channel_controller()->set_rtp_data_channel( - channel_manager()->CreateRtpDataChannel( - pc_->configuration()->media_config, rtp_transport, - signaling_thread(), mid, pc_->SrtpRequired(), - pc_->GetCryptoOptions(), &ssrc_generator_)); - } - if (!data_channel_controller()->rtp_data_channel()) { - return false; - } - data_channel_controller()->rtp_data_channel()->SignalSentPacket().connect( - pc_, &PeerConnection::OnSentPacket_w); - data_channel_controller()->rtp_data_channel()->SetRtpTransport( - rtp_transport); - SetHavePendingRtpDataChannel(); - return true; + if (!pc_->network_thread()->Invoke(RTC_FROM_HERE, [this, &mid] { + RTC_DCHECK_RUN_ON(pc_->network_thread()); + return pc_->SetupDataChannelTransport_n(mid); + })) { + return false; } - return false; + // TODO(tommi): Is this necessary? SetupDataChannelTransport_n() above + // will have queued up updating the transport name on the signaling thread + // and could update the mid at the same time. This here is synchronous + // though, but it changes the state of PeerConnection and makes it be + // out of sync (transport name not set while the mid is set). + pc_->SetSctpDataMid(mid); + return true; } void SdpOfferAnswerHandler::DestroyTransceiverChannel( rtc::scoped_refptr> transceiver) { RTC_DCHECK(transceiver); + RTC_LOG_THREAD_BLOCK_COUNT(); + + // TODO(tommi): We're currently on the signaling thread. + // There are multiple hops to the worker ahead. + // Consider if we can make the call to SetChannel() on the worker thread + // (and require that to be the context it's always called in) and also + // call DestroyChannelInterface there, since it also needs to hop to the + // worker. cricket::ChannelInterface* channel = transceiver->internal()->channel(); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); if (channel) { + // TODO(tommi): VideoRtpReceiver::SetMediaChannel blocks and jumps to the + // worker thread. When being set to nullptr, there are additional + // blocking calls to e.g. ClearRecordableEncodedFrameCallback which triggers + // another blocking call or Stop() for video channels. + // The channel object also needs to be de-initialized on the network thread + // so if ownership of the channel object lies with the transceiver, we could + // un-set the channel pointer and uninitialize/destruct the channel object + // at the same time, rather than in separate steps. transceiver->internal()->SetChannel(nullptr); + // TODO(tommi): All channel objects end up getting deleted on the + // worker thread (ideally should be on the network thread but the + // MediaChannel objects are tied to the worker. Can the teardown be done + // asynchronously across the threads rather than blocking? DestroyChannelInterface(channel); } } void SdpOfferAnswerHandler::DestroyDataChannelTransport() { RTC_DCHECK_RUN_ON(signaling_thread()); - if (data_channel_controller()->rtp_data_channel()) { - data_channel_controller()->OnTransportChannelClosed(); - DestroyChannelInterface(data_channel_controller()->rtp_data_channel()); - data_channel_controller()->set_rtp_data_channel(nullptr); - } + const bool has_sctp = pc_->sctp_mid().has_value(); - // Note: Cannot use rtc::Bind to create a functor to invoke because it will - // grab a reference to this PeerConnection. If this is called from the - // PeerConnection destructor, the RefCountedObject vtable will have already - // been destroyed (since it is a subclass of PeerConnection) and using - // rtc::Bind will cause "Pure virtual function called" error to appear. - - if (pc_->sctp_mid()) { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + if (has_sctp) data_channel_controller()->OnTransportChannelClosed(); - pc_->network_thread()->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(pc_->network_thread()); - pc_->TeardownDataChannelTransport_n(); - }); + + pc_->network_thread()->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(pc_->network_thread()); + pc_->TeardownDataChannelTransport_n(); + }); + + if (has_sctp) pc_->ResetSctpDataMid(); - } } void SdpOfferAnswerHandler::DestroyChannelInterface( cricket::ChannelInterface* channel) { + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(channel_manager()->media_engine()); + RTC_DCHECK(channel); + // TODO(bugs.webrtc.org/11992): All the below methods should be called on the // worker thread. (they switch internally anyway). Change // DestroyChannelInterface to either be called on the worker thread, or do // this asynchronously on the worker. - RTC_DCHECK(channel); + RTC_LOG_THREAD_BLOCK_COUNT(); + switch (channel->media_type()) { case cricket::MEDIA_TYPE_AUDIO: channel_manager()->DestroyVoiceChannel( @@ -4766,13 +4655,19 @@ void SdpOfferAnswerHandler::DestroyChannelInterface( static_cast(channel)); break; case cricket::MEDIA_TYPE_DATA: - channel_manager()->DestroyRtpDataChannel( - static_cast(channel)); + RTC_NOTREACHED() + << "Trying to destroy datachannel through DestroyChannelInterface"; break; default: RTC_NOTREACHED() << "Unknown media type: " << channel->media_type(); break; } + + // TODO(tommi): Figure out why we can get 2 blocking calls when running + // PeerConnectionCryptoTest.CreateAnswerWithDifferentSslRoles. + // and 3 when running + // PeerConnectionCryptoTest.CreateAnswerWithDifferentSslRoles + // RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); } void SdpOfferAnswerHandler::DestroyAllChannels() { @@ -4780,18 +4675,25 @@ void SdpOfferAnswerHandler::DestroyAllChannels() { if (!transceivers()) { return; } + + RTC_LOG_THREAD_BLOCK_COUNT(); + // Destroy video channels first since they may have a pointer to a voice // channel. - for (const auto& transceiver : transceivers()->List()) { + auto list = transceivers()->List(); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + + for (const auto& transceiver : list) { if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { DestroyTransceiverChannel(transceiver); } } - for (const auto& transceiver : transceivers()->List()) { + for (const auto& transceiver : list) { if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { DestroyTransceiverChannel(transceiver); } } + DestroyDataChannelTransport(); } @@ -4870,8 +4772,6 @@ SdpOfferAnswerHandler::GetMediaDescriptionOptionsForActiveData( cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, RtpTransceiverDirection::kSendRecv, /*stopped=*/false); - AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()), - &options); return options; } @@ -4882,30 +4782,13 @@ SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData( cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, RtpTransceiverDirection::kInactive, /*stopped=*/true); - AddRtpDataChannelOptions(*(data_channel_controller()->rtp_data_channels()), - &options); return options; } -const std::string SdpOfferAnswerHandler::GetTransportName( - const std::string& content_name) { - RTC_DCHECK_RUN_ON(signaling_thread()); - cricket::ChannelInterface* channel = pc_->GetChannel(content_name); - if (channel) { - return channel->transport_name(); - } - if (data_channel_controller()->data_channel_transport()) { - RTC_DCHECK(pc_->sctp_mid()); - if (content_name == *(pc_->sctp_mid())) { - return *(pc_->sctp_transport_name()); - } - } - // Return an empty string if failed to retrieve the transport name. - return ""; -} - bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( - cricket::ContentSource source) { + cricket::ContentSource source, + const std::map& + bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); // We may need to delete any created default streams and disable creation of // new ones on the basis of payload type. This is needed to avoid SSRC @@ -4918,19 +4801,24 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( const SessionDescriptionInterface* sdesc = (source == cricket::CS_LOCAL ? local_description() : remote_description()); - const cricket::ContentGroup* bundle_group = - sdesc->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); - std::set audio_payload_types; - std::set video_payload_types; - bool pt_demuxing_enabled_audio = true; - bool pt_demuxing_enabled_video = true; + struct PayloadTypes { + std::set audio_payload_types; + std::set video_payload_types; + bool pt_demuxing_enabled_audio = true; + bool pt_demuxing_enabled_video = true; + }; + std::map payload_types_by_bundle; for (auto& content_info : sdesc->description()->contents()) { + auto it = bundle_groups_by_mid.find(content_info.name); + const cricket::ContentGroup* bundle_group = + it != bundle_groups_by_mid.end() ? it->second : nullptr; // If this m= section isn't bundled, it's safe to demux by payload type // since other m= sections using the same payload type will also be using // different transports. - if (!bundle_group || !bundle_group->HasContentName(content_info.name)) { + if (!bundle_group) { continue; } + PayloadTypes* payload_types = &payload_types_by_bundle[bundle_group]; if (content_info.rejected || (source == cricket::ContentSource::CS_LOCAL && !RtpTransceiverDirectionHasRecv( @@ -4946,12 +4834,12 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( const cricket::AudioContentDescription* audio_desc = content_info.media_description()->as_audio(); for (const cricket::AudioCodec& audio : audio_desc->codecs()) { - if (audio_payload_types.count(audio.id)) { + if (payload_types->audio_payload_types.count(audio.id)) { // Two m= sections are using the same payload type, thus demuxing // by payload type is not possible. - pt_demuxing_enabled_audio = false; + payload_types->pt_demuxing_enabled_audio = false; } - audio_payload_types.insert(audio.id); + payload_types->audio_payload_types.insert(audio.id); } break; } @@ -4959,12 +4847,12 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( const cricket::VideoContentDescription* video_desc = content_info.media_description()->as_video(); for (const cricket::VideoCodec& video : video_desc->codecs()) { - if (video_payload_types.count(video.id)) { + if (payload_types->video_payload_types.count(video.id)) { // Two m= sections are using the same payload type, thus demuxing // by payload type is not possible. - pt_demuxing_enabled_video = false; + payload_types->pt_demuxing_enabled_video = false; } - video_payload_types.insert(video.id); + payload_types->video_payload_types.insert(video.id); } break; } @@ -4978,8 +4866,8 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( // single Invoke; necessary due to thread guards. std::vector> channels_to_update; - for (const auto& transceiver : transceivers()->List()) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + for (const auto& transceiver : transceivers()->ListInternal()) { + cricket::ChannelInterface* channel = transceiver->channel(); const ContentInfo* content = FindMediaSectionForTransceiver(transceiver, sdesc); if (!channel || !content) { @@ -4990,31 +4878,34 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( if (source == cricket::CS_REMOTE) { local_direction = RtpTransceiverDirectionReversed(local_direction); } - channels_to_update.emplace_back(local_direction, - transceiver->internal()->channel()); + channels_to_update.emplace_back(local_direction, transceiver->channel()); } if (channels_to_update.empty()) { return true; } return pc_->worker_thread()->Invoke( - RTC_FROM_HERE, [&channels_to_update, bundle_group, - pt_demuxing_enabled_audio, pt_demuxing_enabled_video]() { + RTC_FROM_HERE, + [&channels_to_update, &bundle_groups_by_mid, &payload_types_by_bundle]() { for (const auto& it : channels_to_update) { RtpTransceiverDirection local_direction = it.first; cricket::ChannelInterface* channel = it.second; cricket::MediaType media_type = channel->media_type(); - bool in_bundle_group = (bundle_group && bundle_group->HasContentName( - channel->content_name())); + auto bundle_it = bundle_groups_by_mid.find(channel->content_name()); + const cricket::ContentGroup* bundle_group = + bundle_it != bundle_groups_by_mid.end() ? bundle_it->second + : nullptr; if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { if (!channel->SetPayloadTypeDemuxingEnabled( - (!in_bundle_group || pt_demuxing_enabled_audio) && + (!bundle_group || payload_types_by_bundle[bundle_group] + .pt_demuxing_enabled_audio) && RtpTransceiverDirectionHasRecv(local_direction))) { return false; } } else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) { if (!channel->SetPayloadTypeDemuxingEnabled( - (!in_bundle_group || pt_demuxing_enabled_video) && + (!bundle_group || payload_types_by_bundle[bundle_group] + .pt_demuxing_enabled_video) && RtpTransceiverDirectionHasRecv(local_direction))) { return false; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h index 43a3dbb5a..1ef124bae 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h @@ -33,10 +33,12 @@ #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" #include "api/transport/data_channel_transport_interface.h" #include "api/turn_customizer.h" +#include "api/uma_metrics.h" #include "api/video/video_bitrate_allocator_factory.h" #include "media/base/media_channel.h" #include "media/base/stream_params.h" @@ -69,7 +71,6 @@ #include "rtc_base/race_checker.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -172,19 +173,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider, absl::optional is_caller(); bool HasNewIceCredentials(); void UpdateNegotiationNeeded(); - void SetHavePendingRtpDataChannel() { - RTC_DCHECK_RUN_ON(signaling_thread()); - have_pending_rtp_data_channel_ = true; - } - - // Returns the media section in the given session description that is - // associated with the RtpTransceiver. Returns null if none found or this - // RtpTransceiver is not associated. Logic varies depending on the - // SdpSemantics specified in the configuration. - const cricket::ContentInfo* FindMediaSectionForTransceiver( - rtc::scoped_refptr> - transceiver, - const SessionDescriptionInterface* sdesc) const; // Destroys all BaseChannels and destroys the SCTP data channel, if present. void DestroyAllChannels(); @@ -239,9 +227,13 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // Synchronous implementations of SetLocalDescription/SetRemoteDescription // that return an RTCError instead of invoking a callback. RTCError ApplyLocalDescription( - std::unique_ptr desc); + std::unique_ptr desc, + const std::map& + bundle_groups_by_mid); RTCError ApplyRemoteDescription( - std::unique_ptr desc); + std::unique_ptr desc, + const std::map& + bundle_groups_by_mid); // Implementation of the offer/answer exchange operations. These are chained // onto the |operations_chain_| when the public CreateOffer(), CreateAnswer(), @@ -263,9 +255,12 @@ class SdpOfferAnswerHandler : public SdpStateProvider, void ChangeSignalingState( PeerConnectionInterface::SignalingState signaling_state); - RTCError UpdateSessionState(SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description); + RTCError UpdateSessionState( + SdpType type, + cricket::ContentSource source, + const cricket::SessionDescription* description, + const std::map& + bundle_groups_by_mid); bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread()); @@ -298,9 +293,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider, bool CheckIfNegotiationIsNeeded(); void GenerateNegotiationNeededEvent(); // Helper method which verifies SDP. - RTCError ValidateSessionDescription(const SessionDescriptionInterface* sdesc, - cricket::ContentSource source) - RTC_RUN_ON(signaling_thread()); + RTCError ValidateSessionDescription( + const SessionDescriptionInterface* sdesc, + cricket::ContentSource source, + const std::map& + bundle_groups_by_mid) RTC_RUN_ON(signaling_thread()); // Updates the local RtpTransceivers according to the JSEP rules. Called as // part of setting the local/remote description. @@ -308,7 +305,9 @@ class SdpOfferAnswerHandler : public SdpStateProvider, cricket::ContentSource source, const SessionDescriptionInterface& new_session, const SessionDescriptionInterface* old_local_description, - const SessionDescriptionInterface* old_remote_description); + const SessionDescriptionInterface* old_remote_description, + const std::map& + bundle_groups_by_mid); // Associate the given transceiver according to the JSEP rules. RTCErrorOr< @@ -321,14 +320,13 @@ class SdpOfferAnswerHandler : public SdpStateProvider, const cricket::ContentInfo* old_remote_content) RTC_RUN_ON(signaling_thread()); - // If the BUNDLE policy is max-bundle, then we know for sure that all - // transports will be bundled from the start. This method returns the BUNDLE - // group if that's the case, or null if BUNDLE will be negotiated later. An - // error is returned if max-bundle is specified but the session description - // does not have a BUNDLE group. - RTCErrorOr GetEarlyBundleGroup( - const cricket::SessionDescription& desc) const - RTC_RUN_ON(signaling_thread()); + // Returns the media section in the given session description that is + // associated with the RtpTransceiver. Returns null if none found or this + // RtpTransceiver is not associated. Logic varies depending on the + // SdpSemantics specified in the configuration. + const cricket::ContentInfo* FindMediaSectionForTransceiver( + const RtpTransceiver* transceiver, + const SessionDescriptionInterface* sdesc) const; // Either creates or destroys the transceiver's BaseChannel according to the // given media section. @@ -422,7 +420,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // |removed_streams| is the list of streams which no longer have a receiving // track so should be removed. void ProcessRemovalOfRemoteTrack( - rtc::scoped_refptr> + const rtc::scoped_refptr> transceiver, std::vector>* remove_list, std::vector>* removed_streams); @@ -456,13 +454,15 @@ class SdpOfferAnswerHandler : public SdpStateProvider, StreamCollection* new_streams); // Enables media channels to allow sending of media. - // This enables media to flow on all configured audio/video channels and the - // RtpDataChannel. + // This enables media to flow on all configured audio/video channels. void EnableSending(); // Push the media parts of the local or remote session description // down to all of the channels. - RTCError PushdownMediaDescription(SdpType type, - cricket::ContentSource source); + RTCError PushdownMediaDescription( + SdpType type, + cricket::ContentSource source, + const std::map& + bundle_groups_by_mid); RTCError PushdownTransportDescription(cricket::ContentSource source, SdpType type); @@ -493,8 +493,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider, bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate, const SessionDescriptionInterface* remote_desc, bool* valid); - void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate) - RTC_RUN_ON(signaling_thread()); RTCErrorOr FindContentInfo( const SessionDescriptionInterface* description, @@ -549,10 +547,12 @@ class SdpOfferAnswerHandler : public SdpStateProvider, cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( const std::string& mid) const; - const std::string GetTransportName(const std::string& content_name); // Based on number of transceivers per media type, enabled or disable // payload type based demuxing in the affected channels. - bool UpdatePayloadTypeDemuxingState(cricket::ContentSource source); + bool UpdatePayloadTypeDemuxingState( + cricket::ContentSource source, + const std::map& + bundle_groups_by_mid); // ================================================================== // Access to pc_ variables @@ -637,13 +637,15 @@ class SdpOfferAnswerHandler : public SdpStateProvider, rtc::scoped_refptr missing_msid_default_stream_ RTC_GUARDED_BY(signaling_thread()); - // Used when rolling back RTP data channels. - bool have_pending_rtp_data_channel_ RTC_GUARDED_BY(signaling_thread()) = - false; - // Updates the error state, signaling if necessary. void SetSessionError(SessionError error, const std::string& error_desc); + // Implements AddIceCandidate without reporting usage, but returns the + // particular success/error value that should be reported (and can be utilized + // for other purposes). + AddIceCandidateResult AddIceCandidateInternal( + const IceCandidateInterface* candidate); + SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) = SessionError::kNone; std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread()); @@ -656,8 +658,9 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // specified by the user (or by the remote party). // The generator is not used directly, instead it is passed on to the // channel manager and the session description factory. - rtc::UniqueRandomIdGenerator ssrc_generator_ - RTC_GUARDED_BY(signaling_thread()); + // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling + // and worker threads. See if we can't restrict usage to a single thread. + rtc::UniqueRandomIdGenerator ssrc_generator_; // A video bitrate allocator factory. // This can be injected using the PeerConnectionDependencies, diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc index 7ebaffda8..107431627 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc @@ -10,12 +10,14 @@ #include "pc/sdp_serializer.h" +#include +#include #include #include #include #include "absl/algorithm/container.h" -#include "api/jsep.h" +#include "absl/types/optional.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/checks.h" #include "rtc_base/string_encode.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.h index 476ebafbd..1223cd1af 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.h @@ -17,6 +17,7 @@ #include "api/rtc_error.h" #include "media/base/rid_description.h" #include "pc/session_description.h" +#include "pc/simulcast_description.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc index f5385a652..b750b04a4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc @@ -11,10 +11,10 @@ #include "pc/sdp_utils.h" #include -#include #include #include "api/jsep_session_description.h" +#include "rtc_base/checks.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.h index fc4b289f9..effd7cd03 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.h @@ -16,6 +16,7 @@ #include #include "api/jsep.h" +#include "p2p/base/transport_info.h" #include "pc/session_description.h" #include "rtc_base/system/rtc_export.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.cc b/TMessagesProj/jni/voip/webrtc/pc/session_description.cc index 87d666727..35b732d64 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.cc @@ -10,12 +10,10 @@ #include "pc/session_description.h" -#include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" -#include "pc/media_protocol_names.h" #include "rtc_base/checks.h" namespace cricket { @@ -261,6 +259,17 @@ const ContentGroup* SessionDescription::GetGroupByName( return NULL; } +std::vector SessionDescription::GetGroupsByName( + const std::string& name) const { + std::vector content_groups; + for (const ContentGroup& content_group : content_groups_) { + if (content_group.semantics() == name) { + content_groups.push_back(&content_group); + } + } + return content_groups; +} + ContentInfo::~ContentInfo() { } diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.h b/TMessagesProj/jni/voip/webrtc/pc/session_description.h index 52a3a1fe0..96aa99675 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.h @@ -14,6 +14,7 @@ #include #include +#include #include #include #include @@ -24,15 +25,18 @@ #include "api/crypto_params.h" #include "api/media_types.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" +#include "media/base/codec.h" #include "media/base/media_channel.h" #include "media/base/media_constants.h" +#include "media/base/rid_description.h" #include "media/base/stream_params.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" #include "pc/media_protocol_names.h" #include "pc/simulcast_description.h" -#include "rtc_base/deprecation.h" +#include "rtc_base/checks.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" @@ -40,7 +44,6 @@ namespace cricket { typedef std::vector AudioCodecs; typedef std::vector VideoCodecs; -typedef std::vector RtpDataCodecs; typedef std::vector CryptoParamsVec; typedef std::vector RtpHeaderExtensions; @@ -56,7 +59,6 @@ const int kAutoBandwidth = -1; class AudioContentDescription; class VideoContentDescription; -class RtpDataContentDescription; class SctpDataContentDescription; class UnsupportedContentDescription; @@ -79,11 +81,6 @@ class MediaContentDescription { virtual VideoContentDescription* as_video() { return nullptr; } virtual const VideoContentDescription* as_video() const { return nullptr; } - virtual RtpDataContentDescription* as_rtp_data() { return nullptr; } - virtual const RtpDataContentDescription* as_rtp_data() const { - return nullptr; - } - virtual SctpDataContentDescription* as_sctp() { return nullptr; } virtual const SctpDataContentDescription* as_sctp() const { return nullptr; } @@ -272,10 +269,7 @@ class MediaContentDescription { webrtc::RtpTransceiverDirection direction_ = webrtc::RtpTransceiverDirection::kSendRecv; rtc::SocketAddress connection_address_; - // Mixed one- and two-byte header not included in offer on media level or - // session level, but we will respond that we support it. The plan is to add - // it to our offer on session level. See todo in SessionDescription. - ExtmapAllowMixed extmap_allow_mixed_enum_ = kNo; + ExtmapAllowMixed extmap_allow_mixed_enum_ = kMedia; SimulcastDescription simulcast_; std::vector receive_rids_; @@ -360,20 +354,6 @@ class VideoContentDescription : public MediaContentDescriptionImpl { } }; -class RtpDataContentDescription - : public MediaContentDescriptionImpl { - public: - RtpDataContentDescription() {} - MediaType type() const override { return MEDIA_TYPE_DATA; } - RtpDataContentDescription* as_rtp_data() override { return this; } - const RtpDataContentDescription* as_rtp_data() const override { return this; } - - private: - RtpDataContentDescription* CloneInternal() const override { - return new RtpDataContentDescription(*this); - } -}; - class SctpDataContentDescription : public MediaContentDescription { public: SctpDataContentDescription() {} @@ -587,6 +567,8 @@ class SessionDescription { // Group accessors. const ContentGroups& groups() const { return content_groups_; } const ContentGroup* GetGroupByName(const std::string& name) const; + std::vector GetGroupsByName( + const std::string& name) const; bool HasGroup(const std::string& name) const; // Group mutators. @@ -633,12 +615,7 @@ class SessionDescription { // Default to what Plan B would do. // TODO(bugs.webrtc.org/8530): Change default to kMsidSignalingMediaSection. int msid_signaling_ = kMsidSignalingSsrcAttribute; - // TODO(webrtc:9985): Activate mixed one- and two-byte header extension in - // offer at session level. It's currently not included in offer by default - // because clients prior to https://bugs.webrtc.org/9712 cannot parse this - // correctly. If it's included in offer to us we will respond that we support - // it. - bool extmap_allow_mixed_ = false; + bool extmap_allow_mixed_ = true; }; // Indicates whether a session description was sent by the local client or diff --git a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc index 8b510feba..0ae3e2074 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc @@ -10,8 +10,6 @@ #include "pc/simulcast_description.h" -#include - #include "rtc_base/checks.h" namespace cricket { diff --git a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h index 1337a9ce4..f7ae28837 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h @@ -11,6 +11,8 @@ #ifndef PC_SIMULCAST_DESCRIPTION_H_ #define PC_SIMULCAST_DESCRIPTION_H_ +#include + #include #include diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc index bd48eac83..2f8d06cbe 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc @@ -11,8 +11,8 @@ #include "pc/srtp_filter.h" #include - #include +#include #include "absl/strings/match.h" #include "rtc_base/logging.h" @@ -210,9 +210,9 @@ bool SrtpFilter::ApplySendParams(const CryptoParams& send_params) { int send_key_len, send_salt_len; if (!rtc::GetSrtpKeyAndSaltLengths(*send_cipher_suite_, &send_key_len, &send_salt_len)) { - RTC_LOG(LS_WARNING) << "Could not get lengths for crypto suite(s):" - " send cipher_suite " - << send_params.cipher_suite; + RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):" + " send cipher_suite " + << send_params.cipher_suite; return false; } @@ -241,9 +241,9 @@ bool SrtpFilter::ApplyRecvParams(const CryptoParams& recv_params) { int recv_key_len, recv_salt_len; if (!rtc::GetSrtpKeyAndSaltLengths(*recv_cipher_suite_, &recv_key_len, &recv_salt_len)) { - RTC_LOG(LS_WARNING) << "Could not get lengths for crypto suite(s):" - " recv cipher_suite " - << recv_params.cipher_suite; + RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):" + " recv cipher_suite " + << recv_params.cipher_suite; return false; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h index fc60a356f..f1e164936 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h @@ -11,6 +11,9 @@ #ifndef PC_SRTP_FILTER_H_ #define PC_SRTP_FILTER_H_ +#include +#include + #include #include #include @@ -21,11 +24,11 @@ #include "api/array_view.h" #include "api/crypto_params.h" #include "api/jsep.h" +#include "api/sequence_checker.h" #include "pc/session_description.h" #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/thread_checker.h" // Forward declaration to avoid pulling in libsrtp headers here struct srtp_event_data_t; diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc index 3aa488003..45f6b67d1 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc @@ -10,11 +10,16 @@ #include "pc/srtp_session.h" +#include + #include "absl/base/attributes.h" #include "media/base/rtp_utils.h" #include "pc/external_hmac.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "third_party/libsrtp/include/srtp.h" #include "third_party/libsrtp/include/srtp_priv.h" @@ -26,7 +31,9 @@ namespace cricket { // in srtp.h. constexpr int kSrtpErrorCodeBoundary = 28; -SrtpSession::SrtpSession() {} +SrtpSession::SrtpSession() { + dump_plain_rtp_ = webrtc::field_trial::IsEnabled("WebRTC-Debugging-RtpDump"); +} SrtpSession::~SrtpSession() { if (session_) { @@ -73,12 +80,19 @@ bool SrtpSession::ProtectRtp(void* p, int in_len, int max_len, int* out_len) { return false; } + // Note: the need_len differs from the libsrtp recommendatіon to ensure + // SRTP_MAX_TRAILER_LEN bytes of free space after the data. WebRTC + // never includes a MKI, therefore the amount of bytes added by the + // srtp_protect call is known in advance and depends on the cipher suite. int need_len = in_len + rtp_auth_tag_len_; // NOLINT if (max_len < need_len) { RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet: The buffer length " << max_len << " is less than the needed " << need_len; return false; } + if (dump_plain_rtp_) { + DumpPacket(p, in_len, /*outbound=*/true); + } *out_len = in_len; int err = srtp_protect(session_, p, out_len); @@ -112,12 +126,19 @@ bool SrtpSession::ProtectRtcp(void* p, int in_len, int max_len, int* out_len) { return false; } + // Note: the need_len differs from the libsrtp recommendatіon to ensure + // SRTP_MAX_TRAILER_LEN bytes of free space after the data. WebRTC + // never includes a MKI, therefore the amount of bytes added by the + // srtp_protect_rtp call is known in advance and depends on the cipher suite. int need_len = in_len + sizeof(uint32_t) + rtcp_auth_tag_len_; // NOLINT if (max_len < need_len) { RTC_LOG(LS_WARNING) << "Failed to protect SRTCP packet: The buffer length " << max_len << " is less than the needed " << need_len; return false; } + if (dump_plain_rtp_) { + DumpPacket(p, in_len, /*outbound=*/true); + } *out_len = in_len; int err = srtp_protect_rtcp(session_, p, out_len); @@ -151,6 +172,9 @@ bool SrtpSession::UnprotectRtp(void* p, int in_len, int* out_len) { static_cast(err), kSrtpErrorCodeBoundary); return false; } + if (dump_plain_rtp_) { + DumpPacket(p, *out_len, /*outbound=*/false); + } return true; } @@ -169,6 +193,9 @@ bool SrtpSession::UnprotectRtcp(void* p, int in_len, int* out_len) { static_cast(err), kSrtpErrorCodeBoundary); return false; } + if (dump_plain_rtp_) { + DumpPacket(p, *out_len, /*outbound=*/false); + } return true; } @@ -242,42 +269,18 @@ bool SrtpSession::DoSetKey(int type, srtp_policy_t policy; memset(&policy, 0, sizeof(policy)); - if (cs == rtc::SRTP_AES128_CM_SHA1_80) { - srtp_crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtp); - srtp_crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtcp); - } else if (cs == rtc::SRTP_AES128_CM_SHA1_32) { - // RTP HMAC is shortened to 32 bits, but RTCP remains 80 bits. - srtp_crypto_policy_set_aes_cm_128_hmac_sha1_32(&policy.rtp); - srtp_crypto_policy_set_aes_cm_128_hmac_sha1_80(&policy.rtcp); - } else if (cs == rtc::SRTP_AEAD_AES_128_GCM) { - srtp_crypto_policy_set_aes_gcm_128_16_auth(&policy.rtp); - srtp_crypto_policy_set_aes_gcm_128_16_auth(&policy.rtcp); - } else if (cs == rtc::SRTP_AEAD_AES_256_GCM) { - srtp_crypto_policy_set_aes_gcm_256_16_auth(&policy.rtp); - srtp_crypto_policy_set_aes_gcm_256_16_auth(&policy.rtcp); - } else { - RTC_LOG(LS_WARNING) << "Failed to " << (session_ ? "update" : "create") - << " SRTP session: unsupported cipher_suite " << cs; + if (!(srtp_crypto_policy_set_from_profile_for_rtp( + &policy.rtp, (srtp_profile_t)cs) == srtp_err_status_ok && + srtp_crypto_policy_set_from_profile_for_rtcp( + &policy.rtcp, (srtp_profile_t)cs) == srtp_err_status_ok)) { + RTC_LOG(LS_ERROR) << "Failed to " << (session_ ? "update" : "create") + << " SRTP session: unsupported cipher_suite " << cs; return false; } - int expected_key_len; - int expected_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(cs, &expected_key_len, - &expected_salt_len)) { - // This should never happen. - RTC_NOTREACHED(); - RTC_LOG(LS_WARNING) - << "Failed to " << (session_ ? "update" : "create") - << " SRTP session: unsupported cipher_suite without length information" - << cs; - return false; - } - - if (!key || - len != static_cast(expected_key_len + expected_salt_len)) { - RTC_LOG(LS_WARNING) << "Failed to " << (session_ ? "update" : "create") - << " SRTP session: invalid key"; + if (!key || len != static_cast(policy.rtp.cipher_key_len)) { + RTC_LOG(LS_ERROR) << "Failed to " << (session_ ? "update" : "create") + << " SRTP session: invalid key"; return false; } @@ -444,4 +447,26 @@ void SrtpSession::HandleEventThunk(srtp_event_data_t* ev) { } } +// Logs the unencrypted packet in text2pcap format. This can then be +// extracted by searching for RTP_DUMP +// grep RTP_DUMP chrome_debug.log > in.txt +// and converted to pcap using +// text2pcap -D -u 1000,2000 -t %H:%M:%S. in.txt out.pcap +// The resulting file can be replayed using the WebRTC video_replay tool and +// be inspected in Wireshark using the RTP, VP8 and H264 dissectors. +void SrtpSession::DumpPacket(const void* buf, int len, bool outbound) { + int64_t time_of_day = rtc::TimeUTCMillis() % (24 * 3600 * 1000); + int64_t hours = time_of_day / (3600 * 1000); + int64_t minutes = (time_of_day / (60 * 1000)) % 60; + int64_t seconds = (time_of_day / 1000) % 60; + int64_t millis = time_of_day % 1000; + RTC_LOG(LS_VERBOSE) << "\n" << (outbound ? "O" : "I") << " " + << std::setfill('0') << std::setw(2) << hours << ":" + << std::setfill('0') << std::setw(2) << minutes << ":" + << std::setfill('0') << std::setw(2) << seconds << "." + << std::setfill('0') << std::setw(3) << millis << " " + << "000000 " << rtc::hex_encode_with_delimiter((const char *)buf, len, ' ') + << " # RTP_DUMP"; +} + } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h index 84445965b..039641248 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h @@ -14,9 +14,9 @@ #include #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_checker.h" // Forward declaration to avoid pulling in libsrtp headers here struct srtp_event_data_t; @@ -109,6 +109,10 @@ class SrtpSession { // Returns send stream current packet index from srtp db. bool GetSendStreamPacketIndex(void* data, int in_len, int64_t* index); + // Writes unencrypted packets in text2pcap format to the log file + // for debugging. + void DumpPacket(const void* buf, int len, bool outbound); + // These methods are responsible for initializing libsrtp (if the usage count // is incremented from 0 to 1) or deinitializing it (when decremented from 1 // to 0). @@ -120,16 +124,23 @@ class SrtpSession { void HandleEvent(const srtp_event_data_t* ev); static void HandleEventThunk(srtp_event_data_t* ev); - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; srtp_ctx_t_* session_ = nullptr; + + // Overhead of the SRTP auth tag for RTP and RTCP in bytes. + // Depends on the cipher suite used and is usually the same with the exception + // of the CS_AES_CM_128_HMAC_SHA1_32 cipher suite. The additional four bytes + // required for RTCP protection are not included. int rtp_auth_tag_len_ = 0; int rtcp_auth_tag_len_ = 0; + bool inited_ = false; static webrtc::GlobalMutex lock_; int last_send_seq_num_ = -1; bool external_auth_active_ = false; bool external_auth_enabled_ = false; int decryption_failure_count_ = 0; + bool dump_plain_rtp_ = false; RTC_DISALLOW_COPY_AND_ASSIGN(SrtpSession); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc index 71a58d085..ee073497e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_transport.cc @@ -10,7 +10,6 @@ #include "pc/srtp_transport.h" -#include #include #include @@ -128,7 +127,7 @@ bool SrtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet, rtc::PacketOptions updated_options = options; TRACE_EVENT0("webrtc", "SRTP Encode"); bool res; - uint8_t* data = packet->data(); + uint8_t* data = packet->MutableData(); int len = rtc::checked_cast(packet->size()); // If ENABLE_EXTERNAL_AUTH flag is on then packet authentication is not done // inside libsrtp for a RTP packet. A external HMAC module will be writing @@ -185,7 +184,7 @@ bool SrtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, } TRACE_EVENT0("webrtc", "SRTP Encode"); - uint8_t* data = packet->data(); + uint8_t* data = packet->MutableData(); int len = rtc::checked_cast(packet->size()); if (!ProtectRtcp(data, len, static_cast(packet->capacity()), &len)) { int type = -1; @@ -208,7 +207,7 @@ void SrtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet, return; } TRACE_EVENT0("webrtc", "SRTP Decode"); - char* data = packet.data(); + char* data = packet.MutableData(); int len = rtc::checked_cast(packet.size()); if (!UnprotectRtp(data, len, &len)) { int seq_num = -1; @@ -240,7 +239,7 @@ void SrtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet, return; } TRACE_EVENT0("webrtc", "SRTP Decode"); - char* data = packet.data(); + char* data = packet.MutableData(); int len = rtc::checked_cast(packet.size()); if (!UnprotectRtcp(data, len, &len)) { int type = -1; @@ -268,7 +267,7 @@ void SrtpTransport::OnNetworkRouteChanged( void SrtpTransport::OnWritableState( rtc::PacketTransportInternal* packet_transport) { - SignalWritableState(IsWritable(/*rtcp=*/true) && IsWritable(/*rtcp=*/true)); + SignalWritableState(IsWritable(/*rtcp=*/false) && IsWritable(/*rtcp=*/true)); } bool SrtpTransport::SetRtpParams(int send_cs, diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc index 991cc4eb2..6d4c224cb 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc @@ -10,14 +10,46 @@ #include "pc/stats_collector.h" +#include +#include + #include #include #include #include +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/candidate.h" +#include "api/data_channel_interface.h" +#include "api/media_types.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/video/video_content_type.h" +#include "api/video/video_timing.h" +#include "call/call.h" +#include "media/base/media_channel.h" +#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" #include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/data_channel_utils.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_transceiver.h" +#include "pc/transport_stats.h" #include "rtc_base/checks.h" -#include "rtc_base/third_party/base64/base64.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" #include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -508,7 +540,7 @@ StatsCollector::StatsCollector(PeerConnectionInternal* pc) } StatsCollector::~StatsCollector() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); } // Wallclock time in ms. @@ -519,7 +551,7 @@ double StatsCollector::GetTimeNow() { // Adds a MediaStream with tracks that can be used as a |selector| in a call // to GetStats. void StatsCollector::AddStream(MediaStreamInterface* stream) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); RTC_DCHECK(stream != NULL); CreateTrackReports(stream->GetAudioTracks(), &reports_, @@ -542,7 +574,7 @@ void StatsCollector::AddTrack(MediaStreamTrackInterface* track) { void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); RTC_DCHECK(audio_track != NULL); #if RTC_DCHECK_IS_ON for (const auto& track : local_audio_tracks_) @@ -576,7 +608,7 @@ void StatsCollector::RemoveLocalAudioTrack(AudioTrackInterface* audio_track, void StatsCollector::GetStats(MediaStreamTrackInterface* track, StatsReports* reports) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); RTC_DCHECK(reports != NULL); RTC_DCHECK(reports->empty()); @@ -616,26 +648,33 @@ void StatsCollector::GetStats(MediaStreamTrackInterface* track, void StatsCollector::UpdateStats( PeerConnectionInterface::StatsOutputLevel level) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); - double time_now = GetTimeNow(); - // Calls to UpdateStats() that occur less than kMinGatherStatsPeriod number of - // ms apart will be ignored. - const double kMinGatherStatsPeriod = 50; - if (stats_gathering_started_ != 0 && - stats_gathering_started_ + kMinGatherStatsPeriod > time_now) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + // Calls to UpdateStats() that occur less than kMinGatherStatsPeriodMs apart + // will be ignored. Using a monotonic clock specifically for this, while using + // a UTC clock for the reports themselves. + const int64_t kMinGatherStatsPeriodMs = 50; + int64_t cache_now_ms = rtc::TimeMillis(); + if (cache_timestamp_ms_ != 0 && + cache_timestamp_ms_ + kMinGatherStatsPeriodMs > cache_now_ms) { return; } - stats_gathering_started_ = time_now; + cache_timestamp_ms_ = cache_now_ms; + stats_gathering_started_ = GetTimeNow(); + + // TODO(tommi): ExtractSessionInfo now has a single hop to the network thread + // to fetch stats, then applies them on the signaling thread. See if we need + // to do this synchronously or if updating the stats without blocking is safe. + std::map transport_names_by_mid = + ExtractSessionInfo(); // TODO(tommi): All of these hop over to the worker thread to fetch - // information. We could use an AsyncInvoker to run all of these and post + // information. We could post a task to run all of these and post // the information back to the signaling thread where we can create and // update stats reports. That would also clean up the threading story a bit // since we'd be creating/updating the stats report objects consistently on // the same thread (this class has no locks right now). - ExtractSessionInfo(); ExtractBweInfo(); - ExtractMediaInfo(); + ExtractMediaInfo(transport_names_by_mid); ExtractSenderInfo(); ExtractDataInfo(); UpdateTrackReports(); @@ -646,7 +685,7 @@ StatsReport* StatsCollector::PrepareReport(bool local, const std::string& track_id, const StatsReport::Id& transport_id, StatsReport::Direction direction) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); StatsReport::Id id(StatsReport::NewIdWithDirection( local ? StatsReport::kStatsReportTypeSsrc : StatsReport::kStatsReportTypeRemoteSsrc, @@ -669,7 +708,7 @@ StatsReport* StatsCollector::PrepareReport(bool local, } StatsReport* StatsCollector::PrepareADMReport() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); StatsReport::Id id(StatsReport::NewTypedId( StatsReport::kStatsReportTypeSession, pc_->session_id())); StatsReport* report = reports_.FindOrAddNew(id); @@ -683,7 +722,7 @@ bool StatsCollector::IsValidTrack(const std::string& track_id) { StatsReport* StatsCollector::AddCertificateReports( std::unique_ptr cert_stats) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); StatsReport* first_report = nullptr; StatsReport* prev_report = nullptr; @@ -809,35 +848,56 @@ StatsReport* StatsCollector::AddCandidateReport( return report; } -void StatsCollector::ExtractSessionInfo() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); +std::map StatsCollector::ExtractSessionInfo() { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - // Extract information from the base session. - StatsReport::Id id(StatsReport::NewTypedId( - StatsReport::kStatsReportTypeSession, pc_->session_id())); - StatsReport* report = reports_.ReplaceOrAddNew(id); - report->set_timestamp(stats_gathering_started_); - report->AddBoolean(StatsReport::kStatsValueNameInitiator, - pc_->initial_offerer()); + SessionStats stats; + auto transceivers = pc_->GetTransceiversInternal(); + pc_->network_thread()->Invoke( + RTC_FROM_HERE, [&, sctp_transport_name = pc_->sctp_transport_name(), + sctp_mid = pc_->sctp_mid()]() mutable { + stats = ExtractSessionInfo_n( + transceivers, std::move(sctp_transport_name), std::move(sctp_mid)); + }); - cricket::CandidateStatsList pooled_candidate_stats_list = - pc_->GetPooledCandidateStats(); + ExtractSessionInfo_s(stats); - for (const cricket::CandidateStats& stats : pooled_candidate_stats_list) { - AddCandidateReport(stats, true); + return std::move(stats.transport_names_by_mid); +} + +StatsCollector::SessionStats StatsCollector::ExtractSessionInfo_n( + const std::vector>>& transceivers, + absl::optional sctp_transport_name, + absl::optional sctp_mid) { + RTC_DCHECK_RUN_ON(pc_->network_thread()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + SessionStats stats; + stats.candidate_stats = pc_->GetPooledCandidateStats(); + for (auto& transceiver : transceivers) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (channel) { + stats.transport_names_by_mid[channel->content_name()] = + channel->transport_name(); + } + } + + if (sctp_transport_name) { + RTC_DCHECK(sctp_mid); + stats.transport_names_by_mid[*sctp_mid] = *sctp_transport_name; } std::set transport_names; - for (const auto& entry : pc_->GetTransportNamesByMid()) { + for (const auto& entry : stats.transport_names_by_mid) { transport_names.insert(entry.second); } std::map transport_stats_by_name = pc_->GetTransportStatsByNames(transport_names); - for (const auto& entry : transport_stats_by_name) { - const std::string& transport_name = entry.first; - const cricket::TransportStats& transport_stats = entry.second; + for (auto& entry : transport_stats_by_name) { + stats.transport_stats.emplace_back(entry.first, std::move(entry.second)); + TransportStats& transport = stats.transport_stats.back(); // Attempt to get a copy of the certificates from the transport and // expose them in stats reports. All channels in a transport share the @@ -845,24 +905,59 @@ void StatsCollector::ExtractSessionInfo() { // StatsReport::Id local_cert_report_id, remote_cert_report_id; rtc::scoped_refptr certificate; - if (pc_->GetLocalCertificate(transport_name, &certificate)) { - StatsReport* r = AddCertificateReports( - certificate->GetSSLCertificateChain().GetStats()); + if (pc_->GetLocalCertificate(transport.name, &certificate)) { + transport.local_cert_stats = + certificate->GetSSLCertificateChain().GetStats(); + } + + std::unique_ptr remote_cert_chain = + pc_->GetRemoteSSLCertChain(transport.name); + if (remote_cert_chain) { + transport.remote_cert_stats = remote_cert_chain->GetStats(); + } + } + + return stats; +} + +void StatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + + StatsReport::Id id(StatsReport::NewTypedId( + StatsReport::kStatsReportTypeSession, pc_->session_id())); + StatsReport* report = reports_.ReplaceOrAddNew(id); + report->set_timestamp(stats_gathering_started_); + report->AddBoolean(StatsReport::kStatsValueNameInitiator, + pc_->initial_offerer()); + + for (const cricket::CandidateStats& stats : session_stats.candidate_stats) { + AddCandidateReport(stats, true); + } + + for (auto& transport : session_stats.transport_stats) { + // Attempt to get a copy of the certificates from the transport and + // expose them in stats reports. All channels in a transport share the + // same local and remote certificates. + // + StatsReport::Id local_cert_report_id, remote_cert_report_id; + if (transport.local_cert_stats) { + StatsReport* r = + AddCertificateReports(std::move(transport.local_cert_stats)); if (r) local_cert_report_id = r->id(); } - std::unique_ptr remote_cert_chain = - pc_->GetRemoteSSLCertChain(transport_name); - if (remote_cert_chain) { - StatsReport* r = AddCertificateReports(remote_cert_chain->GetStats()); + if (transport.remote_cert_stats) { + StatsReport* r = + AddCertificateReports(std::move(transport.remote_cert_stats)); if (r) remote_cert_report_id = r->id(); } - for (const auto& channel_iter : transport_stats.channel_stats) { + for (const auto& channel_iter : transport.stats.channel_stats) { StatsReport::Id id( - StatsReport::NewComponentId(transport_name, channel_iter.component)); + StatsReport::NewComponentId(transport.name, channel_iter.component)); StatsReport* channel_report = reports_.ReplaceOrAddNew(id); channel_report->set_timestamp(stats_gathering_started_); channel_report->AddInt(StatsReport::kStatsValueNameComponent, @@ -905,7 +1000,7 @@ void StatsCollector::ExtractSessionInfo() { for (const cricket::ConnectionInfo& info : channel_iter.ice_transport_stats.connection_infos) { StatsReport* connection_report = AddConnectionInfoReport( - transport_name, channel_iter.component, connection_id++, + transport.name, channel_iter.component, connection_id++, channel_report->id(), info); if (info.best_connection) { channel_report->AddId( @@ -918,7 +1013,7 @@ void StatsCollector::ExtractSessionInfo() { } void StatsCollector::ExtractBweInfo() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); if (pc_->signaling_state() == PeerConnectionInterface::kClosed) return; @@ -931,16 +1026,25 @@ void StatsCollector::ExtractBweInfo() { // Fill in target encoder bitrate, actual encoder bitrate, rtx bitrate, etc. // TODO(holmer): Also fill this in for audio. - for (const auto& transceiver : pc_->GetTransceiversInternal()) { + auto transceivers = pc_->GetTransceiversInternal(); + std::vector video_channels; + for (const auto& transceiver : transceivers) { if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) { continue; } auto* video_channel = static_cast(transceiver->internal()->channel()); - if (!video_channel) { - continue; + if (video_channel) { + video_channels.push_back(video_channel); } - video_channel->FillBitrateInfo(&bwe_info); + } + + if (!video_channels.empty()) { + pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&] { + for (const auto& channel : video_channels) { + channel->FillBitrateInfo(&bwe_info); + } + }); } StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId()); @@ -1053,14 +1157,16 @@ std::unique_ptr CreateMediaChannelStatsGatherer( } // namespace -void StatsCollector::ExtractMediaInfo() { +void StatsCollector::ExtractMediaInfo( + const std::map& transport_names_by_mid) { RTC_DCHECK_RUN_ON(pc_->signaling_thread()); std::vector> gatherers; + auto transceivers = pc_->GetTransceiversInternal(); { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - for (const auto& transceiver : pc_->GetTransceiversInternal()) { + for (const auto& transceiver : transceivers) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (!channel) { continue; @@ -1068,22 +1174,40 @@ void StatsCollector::ExtractMediaInfo() { std::unique_ptr gatherer = CreateMediaChannelStatsGatherer(channel->media_channel()); gatherer->mid = channel->content_name(); - gatherer->transport_name = channel->transport_name(); + gatherer->transport_name = transport_names_by_mid.at(gatherer->mid); + for (const auto& sender : transceiver->internal()->senders()) { - std::string track_id = (sender->track() ? sender->track()->id() : ""); + auto track = sender->track(); + std::string track_id = (track ? track->id() : ""); gatherer->sender_track_id_by_ssrc.insert( std::make_pair(sender->ssrc(), track_id)); } - for (const auto& receiver : transceiver->internal()->receivers()) { - gatherer->receiver_track_id_by_ssrc.insert(std::make_pair( - receiver->internal()->ssrc(), receiver->track()->id())); - } + + // Populating `receiver_track_id_by_ssrc` will be done on the worker + // thread as the `ssrc` property of the receiver needs to be accessed + // there. + gatherers.push_back(std::move(gatherer)); } } pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&] { rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + // Populate `receiver_track_id_by_ssrc` for the gatherers. + int i = 0; + for (const auto& transceiver : transceivers) { + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + if (!channel) + continue; + MediaChannelStatsGatherer* gatherer = gatherers[i++].get(); + RTC_DCHECK_EQ(gatherer->mid, channel->content_name()); + + for (const auto& receiver : transceiver->internal()->receivers()) { + gatherer->receiver_track_id_by_ssrc.insert(std::make_pair( + receiver->internal()->ssrc(), receiver->track()->id())); + } + } + for (auto it = gatherers.begin(); it != gatherers.end(); /* incremented manually */) { MediaChannelStatsGatherer* gatherer = it->get(); @@ -1109,7 +1233,7 @@ void StatsCollector::ExtractMediaInfo() { } void StatsCollector::ExtractSenderInfo() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); for (const auto& sender : pc_->GetSenders()) { // TODO(nisse): SSRC == 0 currently means none. Delete check when @@ -1142,7 +1266,7 @@ void StatsCollector::ExtractSenderInfo() { } void StatsCollector::ExtractDataInfo() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; @@ -1166,7 +1290,7 @@ void StatsCollector::ExtractDataInfo() { StatsReport* StatsCollector::GetReport(const StatsReport::StatsType& type, const std::string& id, StatsReport::Direction direction) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc || type == StatsReport::kStatsReportTypeRemoteSsrc); return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction)); @@ -1174,7 +1298,7 @@ StatsReport* StatsCollector::GetReport(const StatsReport::StatsType& type, void StatsCollector::UpdateStatsFromExistingLocalAudioTracks( bool has_remote_tracks) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); // Loop through the existing local audio tracks. for (const auto& it : local_audio_tracks_) { AudioTrackInterface* track = it.first; @@ -1202,7 +1326,7 @@ void StatsCollector::UpdateStatsFromExistingLocalAudioTracks( void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track, StatsReport* report, bool has_remote_tracks) { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); RTC_DCHECK(track != NULL); // Don't overwrite report values if they're not available. @@ -1224,7 +1348,7 @@ void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track, } void StatsCollector::UpdateTrackReports() { - RTC_DCHECK(pc_->signaling_thread()->IsCurrent()); + RTC_DCHECK_RUN_ON(pc_->signaling_thread()); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; @@ -1235,7 +1359,7 @@ void StatsCollector::UpdateTrackReports() { } void StatsCollector::ClearUpdateStatsCacheForTest() { - stats_gathering_started_ = 0; + cache_timestamp_ms_ = 0; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h index befbcabbf..2fd5d9d8f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h @@ -16,6 +16,8 @@ #include +#include +#include #include #include #include @@ -25,6 +27,7 @@ #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/stats_types.h" +#include "p2p/base/connection_info.h" #include "p2p/base/port.h" #include "pc/peer_connection_internal.h" #include "pc/stats_collector_interface.h" @@ -52,7 +55,7 @@ class StatsCollector : public StatsCollectorInterface { explicit StatsCollector(PeerConnectionInternal* pc); virtual ~StatsCollector(); - // Adds a MediaStream with tracks that can be used as a |selector| in a call + // Adds a MediaStream with tracks that can be used as a `selector` in a call // to GetStats. void AddStream(MediaStreamInterface* stream); void AddTrack(MediaStreamTrackInterface* track); @@ -70,12 +73,12 @@ class StatsCollector : public StatsCollectorInterface { void UpdateStats(PeerConnectionInterface::StatsOutputLevel level); // Gets a StatsReports of the last collected stats. Note that UpdateStats must - // be called before this function to get the most recent stats. |selector| is + // be called before this function to get the most recent stats. `selector` is // a track label or empty string. The most recent reports are stored in - // |reports|. + // `reports`. // TODO(tommi): Change this contract to accept a callback object instead - // of filling in |reports|. As is, there's a requirement that the caller - // uses |reports| immediately without allowing any async activity on + // of filling in `reports`. As is, there's a requirement that the caller + // uses `reports` immediately without allowing any async activity on // the thread (message handling etc) and then discard the results. void GetStats(MediaStreamTrackInterface* track, StatsReports* reports) override; @@ -103,19 +106,48 @@ class StatsCollector : public StatsCollectorInterface { private: friend class StatsCollectorTest; + // Struct that's populated on the network thread and carries the values to + // the signaling thread where the stats are added to the stats reports. + struct TransportStats { + TransportStats() = default; + TransportStats(std::string transport_name, + cricket::TransportStats transport_stats) + : name(std::move(transport_name)), stats(std::move(transport_stats)) {} + TransportStats(TransportStats&&) = default; + TransportStats(const TransportStats&) = delete; + + std::string name; + cricket::TransportStats stats; + std::unique_ptr local_cert_stats; + std::unique_ptr remote_cert_stats; + }; + + struct SessionStats { + SessionStats() = default; + SessionStats(SessionStats&&) = default; + SessionStats(const SessionStats&) = delete; + + SessionStats& operator=(SessionStats&&) = default; + SessionStats& operator=(SessionStats&) = delete; + + cricket::CandidateStatsList candidate_stats; + std::vector transport_stats; + std::map transport_names_by_mid; + }; + // Overridden in unit tests to fake timing. virtual double GetTimeNow(); bool CopySelectedReports(const std::string& selector, StatsReports* reports); - // Helper method for creating IceCandidate report. |is_local| indicates + // Helper method for creating IceCandidate report. `is_local` indicates // whether this candidate is local or remote. StatsReport* AddCandidateReport( const cricket::CandidateStats& candidate_stats, bool local); // Adds a report for this certificate and every certificate in its chain, and - // returns the leaf certificate's report (|cert_stats|'s report). + // returns the leaf certificate's report (`cert_stats`'s report). StatsReport* AddCertificateReports( std::unique_ptr cert_stats); @@ -126,9 +158,14 @@ class StatsCollector : public StatsCollectorInterface { const cricket::ConnectionInfo& info); void ExtractDataInfo(); - void ExtractSessionInfo(); + + // Returns the `transport_names_by_mid` member from the SessionStats as + // gathered and used to populate the stats. + std::map ExtractSessionInfo(); + void ExtractBweInfo(); - void ExtractMediaInfo(); + void ExtractMediaInfo( + const std::map& transport_names_by_mid); void ExtractSenderInfo(); webrtc::StatsReport* GetReport(const StatsReport::StatsType& type, const std::string& id, @@ -143,11 +180,19 @@ class StatsCollector : public StatsCollectorInterface { // Helper method to update the timestamp of track records. void UpdateTrackReports(); + SessionStats ExtractSessionInfo_n( + const std::vector>>& transceivers, + absl::optional sctp_transport_name, + absl::optional sctp_mid); + void ExtractSessionInfo_s(SessionStats& session_stats); + // A collection for all of our stats reports. StatsCollection reports_; TrackIdMap track_ids_; // Raw pointer to the peer connection the statistics are gathered from. PeerConnectionInternal* const pc_; + int64_t cache_timestamp_ms_ = 0; double stats_gathering_started_; const bool use_standard_bytes_stats_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h b/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h index 28cd46fc5..9bbf957ef 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h @@ -22,16 +22,12 @@ namespace webrtc { class StreamCollection : public StreamCollectionInterface { public: static rtc::scoped_refptr Create() { - rtc::RefCountedObject* implementation = - new rtc::RefCountedObject(); - return implementation; + return rtc::make_ref_counted(); } static rtc::scoped_refptr Create( StreamCollection* streams) { - rtc::RefCountedObject* implementation = - new rtc::RefCountedObject(streams); - return implementation; + return rtc::make_ref_counted(streams); } virtual size_t count() { return media_streams_.size(); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc index b3ec68bb2..66f4c461d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc @@ -10,10 +10,15 @@ #include "pc/track_media_info_map.h" +#include #include #include #include +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "media/base/stream_params.h" +#include "rtc_base/checks.h" #include "rtc_base/thread.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h index 542501eb1..c8c6da270 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h +++ b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.h @@ -11,12 +11,16 @@ #ifndef PC_TRACK_MEDIA_INFO_MAP_H_ #define PC_TRACK_MEDIA_INFO_MAP_H_ +#include + #include #include #include #include +#include "absl/types/optional.h" #include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "media/base/media_channel.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc index 5fe148a22..235c9af03 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc @@ -10,6 +10,8 @@ #include "pc/transceiver_list.h" +#include "rtc_base/checks.h" + namespace webrtc { void TransceiverStableState::set_newly_created() { @@ -34,8 +36,23 @@ void TransceiverStableState::SetRemoteStreamIdsIfUnset( } } +void TransceiverStableState::SetInitSendEncodings( + const std::vector& encodings) { + init_send_encodings_ = encodings; +} + +std::vector TransceiverList::ListInternal() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + std::vector internals; + for (auto transceiver : transceivers_) { + internals.push_back(transceiver->internal()); + } + return internals; +} + RtpTransceiverProxyRefPtr TransceiverList::FindBySender( rtc::scoped_refptr sender) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); for (auto transceiver : transceivers_) { if (transceiver->sender() == sender) { return transceiver; @@ -46,6 +63,7 @@ RtpTransceiverProxyRefPtr TransceiverList::FindBySender( RtpTransceiverProxyRefPtr TransceiverList::FindByMid( const std::string& mid) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); for (auto transceiver : transceivers_) { if (transceiver->mid() == mid) { return transceiver; @@ -56,6 +74,7 @@ RtpTransceiverProxyRefPtr TransceiverList::FindByMid( RtpTransceiverProxyRefPtr TransceiverList::FindByMLineIndex( size_t mline_index) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); for (auto transceiver : transceivers_) { if (transceiver->internal()->mline_index() == mline_index) { return transceiver; diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h index cd77d67f4..568c9c7e7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.h @@ -11,12 +11,24 @@ #ifndef PC_TRANSCEIVER_LIST_H_ #define PC_TRANSCEIVER_LIST_H_ +#include + #include #include #include #include +#include "absl/types/optional.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "pc/rtp_transceiver.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -32,11 +44,17 @@ class TransceiverStableState { void SetMSectionIfUnset(absl::optional mid, absl::optional mline_index); void SetRemoteStreamIdsIfUnset(const std::vector& ids); + void SetInitSendEncodings( + const std::vector& encodings); absl::optional mid() const { return mid_; } absl::optional mline_index() const { return mline_index_; } absl::optional> remote_stream_ids() const { return remote_stream_ids_; } + absl::optional> init_send_encodings() + const { + return init_send_encodings_; + } bool has_m_section() const { return has_m_section_; } bool newly_created() const { return newly_created_; } @@ -44,6 +62,7 @@ class TransceiverStableState { absl::optional mid_; absl::optional mline_index_; absl::optional> remote_stream_ids_; + absl::optional> init_send_encodings_; // Indicates that mid value from stable state has been captured and // that rollback has to restore the transceiver. Also protects against // subsequent overwrites. @@ -54,14 +73,36 @@ class TransceiverStableState { bool newly_created_ = false; }; +// This class encapsulates the active list of transceivers on a +// PeerConnection, and offers convenient functions on that list. +// It is a single-thread class; all operations must be performed +// on the same thread. class TransceiverList { public: - std::vector List() const { return transceivers_; } + // Returns a copy of the currently active list of transceivers. The + // list consists of rtc::scoped_refptrs, which will keep the transceivers + // from being deallocated, even if they are removed from the TransceiverList. + std::vector List() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return transceivers_; + } + // As above, but does not check thread ownership. Unsafe. + // TODO(bugs.webrtc.org/12692): Refactor and remove + std::vector UnsafeList() const { + return transceivers_; + } + + // Returns a list of the internal() pointers of the currently active list + // of transceivers. These raw pointers are not thread-safe, so need to + // be consumed on the same thread. + std::vector ListInternal() const; void Add(RtpTransceiverProxyRefPtr transceiver) { + RTC_DCHECK_RUN_ON(&sequence_checker_); transceivers_.push_back(transceiver); } void Remove(RtpTransceiverProxyRefPtr transceiver) { + RTC_DCHECK_RUN_ON(&sequence_checker_); transceivers_.erase( std::remove(transceivers_.begin(), transceivers_.end(), transceiver), transceivers_.end()); @@ -73,26 +114,33 @@ class TransceiverList { // Find or create the stable state for a transceiver. TransceiverStableState* StableState(RtpTransceiverProxyRefPtr transceiver) { + RTC_DCHECK_RUN_ON(&sequence_checker_); return &(transceiver_stable_states_by_transceivers_[transceiver]); } void DiscardStableStates() { + RTC_DCHECK_RUN_ON(&sequence_checker_); transceiver_stable_states_by_transceivers_.clear(); } std::map& StableStates() { + RTC_DCHECK_RUN_ON(&sequence_checker_); return transceiver_stable_states_by_transceivers_; } private: + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; std::vector transceivers_; + // TODO(bugs.webrtc.org/12692): Add RTC_GUARDED_BY(sequence_checker_); + // Holds changes made to transceivers during applying descriptors for // potential rollback. Gets cleared once signaling state goes to stable. std::map - transceiver_stable_states_by_transceivers_; + transceiver_stable_states_by_transceivers_ + RTC_GUARDED_BY(sequence_checker_); // Holds remote stream ids for transceivers from stable state. std::map> - remote_stream_ids_by_transceivers_; + remote_stream_ids_by_transceivers_ RTC_GUARDED_BY(sequence_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h b/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h index c4a8918ac..0182999d6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h +++ b/TMessagesProj/jni/voip/webrtc/pc/usage_pattern.h @@ -11,6 +11,8 @@ #ifndef PC_USAGE_PATTERN_H_ #define PC_USAGE_PATTERN_H_ +#include "api/peer_connection_interface.h" + namespace webrtc { class PeerConnectionObserver; diff --git a/TMessagesProj/jni/voip/webrtc/pc/used_ids.h b/TMessagesProj/jni/voip/webrtc/pc/used_ids.h index 78e64caa4..596019734 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/used_ids.h +++ b/TMessagesProj/jni/voip/webrtc/pc/used_ids.h @@ -60,7 +60,9 @@ class UsedIds { } protected: - bool IsIdUsed(int new_id) { return id_set_.find(new_id) != id_set_.end(); } + virtual bool IsIdUsed(int new_id) { + return id_set_.find(new_id) != id_set_.end(); + } const int min_allowed_id_; const int max_allowed_id_; @@ -92,11 +94,24 @@ class UsedIds { class UsedPayloadTypes : public UsedIds { public: UsedPayloadTypes() - : UsedIds(kDynamicPayloadTypeMin, kDynamicPayloadTypeMax) {} + : UsedIds(kFirstDynamicPayloadTypeLowerRange, + kLastDynamicPayloadTypeUpperRange) {} + + protected: + bool IsIdUsed(int new_id) override { + // Range marked for RTCP avoidance is "used". + if (new_id > kLastDynamicPayloadTypeLowerRange && + new_id < kFirstDynamicPayloadTypeUpperRange) + return true; + return UsedIds::IsIdUsed(new_id); + } private: - static const int kDynamicPayloadTypeMin = 96; - static const int kDynamicPayloadTypeMax = 127; + static const int kFirstDynamicPayloadTypeLowerRange = 35; + static const int kLastDynamicPayloadTypeLowerRange = 65; + + static const int kFirstDynamicPayloadTypeUpperRange = 96; + static const int kLastDynamicPayloadTypeUpperRange = 127; }; // Helper class used for finding duplicate RTP Header extension ids among diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc index dd601259e..34cfe96f2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc @@ -15,16 +15,12 @@ #include #include -#include "api/media_stream_proxy.h" +#include "api/video/recordable_encoded_frame.h" #include "api/video_track_source_proxy.h" -#include "pc/jitter_buffer_delay.h" -#include "pc/jitter_buffer_delay_proxy.h" -#include "pc/media_stream.h" #include "pc/video_track.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" namespace webrtc { @@ -41,7 +37,7 @@ VideoRtpReceiver::VideoRtpReceiver( const std::vector>& streams) : worker_thread_(worker_thread), id_(receiver_id), - source_(new RefCountedObject(this)), + source_(rtc::make_ref_counted(&source_callback_)), track_(VideoTrackProxyWithInternal::Create( rtc::Thread::Current(), worker_thread, @@ -51,111 +47,130 @@ VideoRtpReceiver::VideoRtpReceiver( worker_thread, source_), worker_thread))), - attachment_id_(GenerateUniqueId()), - delay_(JitterBufferDelayProxy::Create( - rtc::Thread::Current(), - worker_thread, - new rtc::RefCountedObject(worker_thread))) { + attachment_id_(GenerateUniqueId()) { RTC_DCHECK(worker_thread_); SetStreams(streams); source_->SetState(MediaSourceInterface::kLive); } VideoRtpReceiver::~VideoRtpReceiver() { - // Since cricket::VideoRenderer is not reference counted, - // we need to remove it from the channel before we are deleted. - Stop(); - // Make sure we can't be called by the |source_| anymore. - worker_thread_->Invoke(RTC_FROM_HERE, - [this] { source_->ClearCallback(); }); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK(stopped_); + RTC_DCHECK(!media_channel_); } std::vector VideoRtpReceiver::stream_ids() const { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); std::vector stream_ids(streams_.size()); for (size_t i = 0; i < streams_.size(); ++i) stream_ids[i] = streams_[i]->id(); return stream_ids; } +rtc::scoped_refptr VideoRtpReceiver::dtls_transport() + const { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + return dtls_transport_; +} + +std::vector> +VideoRtpReceiver::streams() const { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + return streams_; +} + RtpParameters VideoRtpReceiver::GetParameters() const { - if (!media_channel_ || stopped_) { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) return RtpParameters(); - } - return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_) - : media_channel_->GetDefaultRtpReceiveParameters(); - }); + return ssrc_ ? media_channel_->GetRtpReceiveParameters(*ssrc_) + : media_channel_->GetDefaultRtpReceiveParameters(); } void VideoRtpReceiver::SetFrameDecryptor( rtc::scoped_refptr frame_decryptor) { + RTC_DCHECK_RUN_ON(worker_thread_); frame_decryptor_ = std::move(frame_decryptor); // Special Case: Set the frame decryptor to any value on any existing channel. - if (media_channel_ && ssrc_.has_value() && !stopped_) { - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); - }); + if (media_channel_ && ssrc_) { + media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); } } rtc::scoped_refptr VideoRtpReceiver::GetFrameDecryptor() const { + RTC_DCHECK_RUN_ON(worker_thread_); return frame_decryptor_; } void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); - frame_transformer_ = std::move(frame_transformer); - if (media_channel_ && !stopped_) { - media_channel_->SetDepacketizerToDecoderFrameTransformer( - ssrc_.value_or(0), frame_transformer_); - } - }); + RTC_DCHECK_RUN_ON(worker_thread_); + frame_transformer_ = std::move(frame_transformer); + if (media_channel_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } } void VideoRtpReceiver::Stop() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); // TODO(deadbeef): Need to do more here to fully stop receiving packets. - if (stopped_) { - return; + + if (!stopped_) { + source_->SetState(MediaSourceInterface::kEnded); + stopped_ = true; } - source_->SetState(MediaSourceInterface::kEnded); - if (!media_channel_) { - RTC_LOG(LS_WARNING) << "VideoRtpReceiver::Stop: No video channel exists."; - } else { - // Allow that SetSink fails. This is the normal case when the underlying - // media channel has already been deleted. - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); + + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(worker_thread_); + if (media_channel_) { SetSink(nullptr); - }); - } - delay_->OnStop(); - stopped_ = true; + SetMediaChannel_w(nullptr); + } + source_->ClearCallback(); + }); } void VideoRtpReceiver::StopAndEndTrack() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); Stop(); track_->internal()->set_ended(); } void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { - RTC_DCHECK(media_channel_); - if (!stopped_ && ssrc_ == ssrc) { - return; - } - worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + + // `stopped_` will be `true` on construction. RestartMediaChannel + // can in this case function like "ensure started" and flip `stopped_` + // to false. + + // TODO(tommi): Can we restart the media channel without blocking? + bool ok = worker_thread_->Invoke(RTC_FROM_HERE, [&, was_stopped = + stopped_] { RTC_DCHECK_RUN_ON(worker_thread_); - if (!stopped_) { + if (!media_channel_) { + // Ignore further negotiations if we've already been stopped and don't + // have an associated media channel. + RTC_DCHECK(was_stopped); + return false; // Can't restart. + } + + if (!was_stopped && ssrc_ == ssrc) { + // Already running with that ssrc. + return true; + } + + // Disconnect from the previous ssrc. + if (!was_stopped) { SetSink(nullptr); } + bool encoded_sink_enabled = saved_encoded_sink_enabled_; SetEncodedSinkEnabled(false); - stopped_ = false; - - ssrc_ = ssrc; + // Set up the new ssrc. + ssrc_ = std::move(ssrc); SetSink(source_->sink()); if (encoded_sink_enabled) { SetEncodedSinkEnabled(true); @@ -165,47 +180,62 @@ void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { media_channel_->SetDepacketizerToDecoderFrameTransformer( ssrc_.value_or(0), frame_transformer_); } + + if (media_channel_ && ssrc_) { + if (frame_decryptor_) { + media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); + } + + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); + } + + return true; }); - // Attach any existing frame decryptor to the media channel. - MaybeAttachFrameDecryptorToMediaChannel( - ssrc, worker_thread_, frame_decryptor_, media_channel_, stopped_); - // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - // value. - delay_->OnStart(media_channel_, ssrc.value_or(0)); + if (!ok) + return; + + stopped_ = false; } +// RTC_RUN_ON(worker_thread_) void VideoRtpReceiver::SetSink(rtc::VideoSinkInterface* sink) { - RTC_DCHECK(media_channel_); if (ssrc_) { media_channel_->SetSink(*ssrc_, sink); - return; + } else { + media_channel_->SetDefaultSink(sink); } - media_channel_->SetDefaultSink(sink); } void VideoRtpReceiver::SetupMediaChannel(uint32_t ssrc) { - if (!media_channel_) { - RTC_LOG(LS_ERROR) - << "VideoRtpReceiver::SetupMediaChannel: No video channel exists."; - } + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RestartMediaChannel(ssrc); } void VideoRtpReceiver::SetupUnsignaledMediaChannel() { - if (!media_channel_) { - RTC_LOG(LS_ERROR) << "VideoRtpReceiver::SetupUnsignaledMediaChannel: No " - "video channel exists."; - } + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RestartMediaChannel(absl::nullopt); } +uint32_t VideoRtpReceiver::ssrc() const { + RTC_DCHECK_RUN_ON(worker_thread_); + return ssrc_.value_or(0); +} + void VideoRtpReceiver::set_stream_ids(std::vector stream_ids) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); SetStreams(CreateStreamsFromIds(std::move(stream_ids))); } +void VideoRtpReceiver::set_transport( + rtc::scoped_refptr dtls_transport) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + dtls_transport_ = std::move(dtls_transport); +} + void VideoRtpReceiver::SetStreams( const std::vector>& streams) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { bool removed = true; @@ -238,6 +268,7 @@ void VideoRtpReceiver::SetStreams( } void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); observer_ = observer; // Deliver any notifications the observer may have missed by being set late. if (received_first_packet_ && observer_) { @@ -247,40 +278,57 @@ void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { void VideoRtpReceiver::SetJitterBufferMinimumDelay( absl::optional delay_seconds) { - delay_->Set(delay_seconds); + RTC_DCHECK_RUN_ON(worker_thread_); + delay_.Set(delay_seconds); + if (media_channel_ && ssrc_) + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); } void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); + + if (stopped_ && !media_channel) + return; + worker_thread_->Invoke(RTC_FROM_HERE, [&] { RTC_DCHECK_RUN_ON(worker_thread_); - bool encoded_sink_enabled = saved_encoded_sink_enabled_; - if (encoded_sink_enabled && media_channel_) { - // Turn off the old sink, if any. - SetEncodedSinkEnabled(false); - } - - media_channel_ = static_cast(media_channel); - - if (media_channel_) { - if (saved_generate_keyframe_) { - // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - media_channel_->GenerateKeyFrame(ssrc_.value_or(0)); - saved_generate_keyframe_ = false; - } - if (encoded_sink_enabled) { - SetEncodedSinkEnabled(true); - } - if (frame_transformer_) { - media_channel_->SetDepacketizerToDecoderFrameTransformer( - ssrc_.value_or(0), frame_transformer_); - } - } + SetMediaChannel_w(media_channel); }); } +// RTC_RUN_ON(worker_thread_) +void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { + if (media_channel == media_channel_) + return; + + bool encoded_sink_enabled = saved_encoded_sink_enabled_; + if (encoded_sink_enabled && media_channel_) { + // Turn off the old sink, if any. + SetEncodedSinkEnabled(false); + } + + media_channel_ = static_cast(media_channel); + + if (media_channel_) { + if (saved_generate_keyframe_) { + // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC + media_channel_->GenerateKeyFrame(ssrc_.value_or(0)); + saved_generate_keyframe_ = false; + } + if (encoded_sink_enabled) { + SetEncodedSinkEnabled(true); + } + if (frame_transformer_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } + } +} + void VideoRtpReceiver::NotifyFirstPacketReceived() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (observer_) { observer_->OnFirstPacketReceived(media_type()); } @@ -288,11 +336,10 @@ void VideoRtpReceiver::NotifyFirstPacketReceived() { } std::vector VideoRtpReceiver::GetSources() const { - if (!media_channel_ || !ssrc_ || stopped_) { - return {}; - } - return worker_thread_->Invoke>( - RTC_FROM_HERE, [&] { return media_channel_->GetSources(*ssrc_); }); + RTC_DCHECK_RUN_ON(worker_thread_); + if (!ssrc_ || !media_channel_) + return std::vector(); + return media_channel_->GetSources(*ssrc_); } void VideoRtpReceiver::OnGenerateKeyFrame() { @@ -318,20 +365,21 @@ void VideoRtpReceiver::OnEncodedSinkEnabled(bool enable) { saved_encoded_sink_enabled_ = enable; } +// RTC_RUN_ON(worker_thread_) void VideoRtpReceiver::SetEncodedSinkEnabled(bool enable) { - if (media_channel_) { - if (enable) { - // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - auto source = source_; - media_channel_->SetRecordableEncodedFrameCallback( - ssrc_.value_or(0), - [source = std::move(source)](const RecordableEncodedFrame& frame) { - source->BroadcastRecordableEncodedFrame(frame); - }); - } else { - // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC - media_channel_->ClearRecordableEncodedFrameCallback(ssrc_.value_or(0)); - } + if (!media_channel_) + return; + + // TODO(bugs.webrtc.org/8694): Stop using 0 to mean unsignalled SSRC + const auto ssrc = ssrc_.value_or(0); + + if (enable) { + media_channel_->SetRecordableEncodedFrameCallback( + ssrc, [source = source_](const RecordableEncodedFrame& frame) { + source->BroadcastRecordableEncodedFrame(frame); + }); + } else { + media_channel_->ClearRecordableEncodedFrameCallback(ssrc); } } diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h index 74ae44431..89e15a5c7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h @@ -18,6 +18,7 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" #include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_stream_track_proxy.h" @@ -25,21 +26,24 @@ #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/transport/rtp/rtp_source.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/media_channel.h" -#include "pc/jitter_buffer_delay_interface.h" +#include "pc/jitter_buffer_delay.h" #include "pc/rtp_receiver.h" #include "pc/video_rtp_track_source.h" #include "pc/video_track.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { -class VideoRtpReceiver : public rtc::RefCountedObject, - public VideoRtpTrackSource::Callback { +class VideoRtpReceiver : public RtpReceiverInternal { public: // An SSRC of 0 will create a receiver that will match the first SSRC it // sees. Must be called on signaling thread. @@ -55,23 +59,16 @@ class VideoRtpReceiver : public rtc::RefCountedObject, virtual ~VideoRtpReceiver(); - rtc::scoped_refptr video_track() const { - return track_.get(); - } + rtc::scoped_refptr video_track() const { return track_; } // RtpReceiverInterface implementation rtc::scoped_refptr track() const override { - return track_.get(); - } - rtc::scoped_refptr dtls_transport() const override { - return dtls_transport_; + return track_; } + rtc::scoped_refptr dtls_transport() const override; std::vector stream_ids() const override; std::vector> streams() - const override { - return streams_; - } - + const override; cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_VIDEO; } @@ -94,13 +91,11 @@ class VideoRtpReceiver : public rtc::RefCountedObject, void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; - uint32_t ssrc() const override { return ssrc_.value_or(0); } + uint32_t ssrc() const override; void NotifyFirstPacketReceived() override; void set_stream_ids(std::vector stream_ids) override; void set_transport( - rtc::scoped_refptr dtls_transport) override { - dtls_transport_ = dtls_transport; - } + rtc::scoped_refptr dtls_transport) override; void SetStreams(const std::vector>& streams) override; @@ -119,33 +114,68 @@ class VideoRtpReceiver : public rtc::RefCountedObject, void RestartMediaChannel(absl::optional ssrc); void SetSink(rtc::VideoSinkInterface* sink) RTC_RUN_ON(worker_thread_); + void SetMediaChannel_w(cricket::MediaChannel* media_channel) + RTC_RUN_ON(worker_thread_); // VideoRtpTrackSource::Callback - void OnGenerateKeyFrame() override; - void OnEncodedSinkEnabled(bool enable) override; + void OnGenerateKeyFrame(); + void OnEncodedSinkEnabled(bool enable); + void SetEncodedSinkEnabled(bool enable) RTC_RUN_ON(worker_thread_); + class SourceCallback : public VideoRtpTrackSource::Callback { + public: + explicit SourceCallback(VideoRtpReceiver* receiver) : receiver_(receiver) {} + ~SourceCallback() override = default; + + private: + void OnGenerateKeyFrame() override { receiver_->OnGenerateKeyFrame(); } + void OnEncodedSinkEnabled(bool enable) override { + receiver_->OnEncodedSinkEnabled(enable); + } + + VideoRtpReceiver* const receiver_; + } source_callback_{this}; + + RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; rtc::Thread* const worker_thread_; const std::string id_; - cricket::VideoMediaChannel* media_channel_ = nullptr; - absl::optional ssrc_; + // See documentation for `stopped_` below for when a valid media channel + // has been assigned and when this pointer will be null. + cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = + nullptr; + absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); // |source_| is held here to be able to change the state of the source when // the VideoRtpReceiver is stopped. - rtc::scoped_refptr source_; - rtc::scoped_refptr> track_; - std::vector> streams_; - bool stopped_ = true; - RtpReceiverObserverInterface* observer_ = nullptr; - bool received_first_packet_ = false; - int attachment_id_ = 0; - rtc::scoped_refptr frame_decryptor_; - rtc::scoped_refptr dtls_transport_; + const rtc::scoped_refptr source_; + const rtc::scoped_refptr> track_; + std::vector> streams_ + RTC_GUARDED_BY(&signaling_thread_checker_); + // `stopped` is state that's used on the signaling thread to indicate whether + // a valid `media_channel_` has been assigned and configured. When an instance + // of VideoRtpReceiver is initially created, `stopped_` is true and will + // remain true until either `SetupMediaChannel` or + // `SetupUnsignaledMediaChannel` is called after assigning a media channel. + // After that, `stopped_` will remain false until `Stop()` is called. + // Note, for checking the state of the class on the worker thread, + // check `media_channel_` instead, as that's the main worker thread state. + bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true; + RtpReceiverObserverInterface* observer_ + RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; + bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = + false; + const int attachment_id_; + rtc::scoped_refptr frame_decryptor_ + RTC_GUARDED_BY(worker_thread_); + rtc::scoped_refptr dtls_transport_ + RTC_GUARDED_BY(&signaling_thread_checker_); rtc::scoped_refptr frame_transformer_ RTC_GUARDED_BY(worker_thread_); - // Allows to thread safely change jitter buffer delay. Handles caching cases + // Stores the minimum jitter buffer delay. Handles caching cases // if |SetJitterBufferMinimumDelay| is called before start. - rtc::scoped_refptr delay_; + JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_); + // Records if we should generate a keyframe when |media_channel_| gets set up // or switched. bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false; diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.cc b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.cc index f96db962b..bcfcdcbdf 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.cc @@ -10,6 +10,12 @@ #include "pc/video_rtp_track_source.h" +#include + +#include + +#include "rtc_base/checks.h" + namespace webrtc { VideoRtpTrackSource::VideoRtpTrackSource(Callback* callback) diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h index b88784931..47b7bc9ee 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h @@ -13,10 +13,17 @@ #include +#include "api/sequence_checker.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "media/base/video_broadcaster.h" #include "pc/video_track_source.h" -#include "rtc_base/callback.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -67,7 +74,7 @@ class VideoRtpTrackSource : public VideoTrackSource { rtc::VideoSinkInterface* sink) override; private: - SequenceChecker worker_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; // |broadcaster_| is needed since the decoder can only handle one sink. // It might be better if the decoder can handle multiple sinks and consider // the VideoSinkWants. diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track.cc index 55356e704..b4f511b5f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.cc @@ -14,6 +14,7 @@ #include #include "api/notifier.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/ref_counted_object.h" @@ -94,9 +95,7 @@ rtc::scoped_refptr VideoTrack::Create( const std::string& id, VideoTrackSourceInterface* source, rtc::Thread* worker_thread) { - rtc::RefCountedObject* track = - new rtc::RefCountedObject(id, source, worker_thread); - return track; + return rtc::make_ref_counted(id, source, worker_thread); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.h b/TMessagesProj/jni/voip/webrtc/pc/video_track.h index b7835dee2..bff63fcb9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.h @@ -16,13 +16,13 @@ #include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/video_source_base.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -58,7 +58,7 @@ class VideoTrack : public MediaStreamTrack, void OnChanged() override; rtc::Thread* const worker_thread_; - rtc::ThreadChecker signaling_thread_checker_; + SequenceChecker signaling_thread_checker_; rtc::scoped_refptr video_source_; ContentHint content_hint_ RTC_GUARDED_BY(signaling_thread_checker_); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h index 27331eac4..4a29381c4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h @@ -11,12 +11,16 @@ #ifndef PC_VIDEO_TRACK_SOURCE_H_ #define PC_VIDEO_TRACK_SOURCE_H_ +#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/notifier.h" +#include "api/sequence_checker.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "media/base/media_channel.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -52,7 +56,7 @@ class RTC_EXPORT VideoTrackSource : public Notifier { virtual rtc::VideoSourceInterface* source() = 0; private: - rtc::ThreadChecker worker_thread_checker_; + SequenceChecker worker_thread_checker_; SourceState state_; const bool remote_; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc index 9643dcc16..379b2f30c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc @@ -15,6 +15,7 @@ #include #include +#include #include #include #include @@ -24,29 +25,46 @@ #include #include "absl/algorithm/container.h" -#include "absl/strings/match.h" #include "api/candidate.h" #include "api/crypto_params.h" #include "api/jsep_ice_candidate.h" #include "api/jsep_session_description.h" #include "api/media_types.h" // for RtpExtension +#include "absl/types/optional.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" #include "media/base/codec.h" #include "media/base/media_constants.h" +#include "media/base/rid_description.h" #include "media/base/rtp_utils.h" +#include "media/base/stream_params.h" #include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/candidate_pair_interface.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_info.h" +#include "pc/media_protocol_names.h" #include "pc/media_session.h" #include "pc/sdp_serializer.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/message_digest.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/string_encode.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/third_party/base64/base64.h" using cricket::AudioContentDescription; using cricket::Candidate; @@ -64,7 +82,6 @@ using cricket::MediaContentDescription; using cricket::MediaProtocolType; using cricket::MediaType; using cricket::RidDescription; -using cricket::RtpDataContentDescription; using cricket::RtpHeaderExtensions; using cricket::SctpDataContentDescription; using cricket::SimulcastDescription; @@ -79,10 +96,6 @@ using cricket::UnsupportedContentDescription; using cricket::VideoContentDescription; using rtc::SocketAddress; -namespace cricket { -class SessionDescription; -} - // TODO(deadbeef): Switch to using anonymous namespace rather than declaring // everything "static". namespace webrtc { @@ -93,6 +106,15 @@ namespace webrtc { // the form: // = // where MUST be exactly one case-significant character. + +// Legal characters in a value (RFC 4566 section 9): +// token-char = %x21 / %x23-27 / %x2A-2B / %x2D-2E / %x30-39 +// / %x41-5A / %x5E-7E +static const char kLegalTokenCharacters[] = + "!#$%&'*+-." // %x21, %x23-27, %x2A-2B, %x2D-2E + "0123456789" // %x30-39 + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" // %x41-5A + "^_`abcdefghijklmnopqrstuvwxyz{|}~"; // %x5E-7E static const int kLinePrefixLength = 2; // Length of = static const char kLineTypeVersion = 'v'; static const char kLineTypeOrigin = 'o'; @@ -605,6 +627,22 @@ static bool GetValue(const std::string& message, return true; } +// Get a single [token] from : +static bool GetSingleTokenValue(const std::string& message, + const std::string& attribute, + std::string* value, + SdpParseError* error) { + if (!GetValue(message, attribute, value, error)) { + return false; + } + if (strspn(value->c_str(), kLegalTokenCharacters) != value->size()) { + rtc::StringBuilder description; + description << "Illegal character found in the value of " << attribute; + return ParseFailed(message, description.str(), error); + } + return true; +} + static bool CaseInsensitiveFind(std::string str1, std::string str2) { absl::c_transform(str1, str1.begin(), ::tolower); absl::c_transform(str2, str2.begin(), ::tolower); @@ -862,11 +900,11 @@ std::string SdpSerialize(const JsepSessionDescription& jdesc) { // Time Description. AddLine(kTimeDescription, &message); - // Group - if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) { + // BUNDLE Groups + std::vector groups = + desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + for (const cricket::ContentGroup* group : groups) { std::string group_line = kAttrGroup; - const cricket::ContentGroup* group = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); RTC_DCHECK(group != NULL); for (const std::string& content_name : group->content_names()) { group_line.append(" "); @@ -1376,12 +1414,7 @@ void BuildMediaDescription(const ContentInfo* content_info, fmt.append(kDefaultSctpmapProtocol); } } else { - const RtpDataContentDescription* rtp_data_desc = - media_desc->as_rtp_data(); - for (const cricket::RtpDataCodec& codec : rtp_data_desc->codecs()) { - fmt.append(" "); - fmt.append(rtc::ToString(codec.id)); - } + RTC_NOTREACHED() << "Data description without SCTP"; } } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { const UnsupportedContentDescription* unsupported_desc = @@ -1933,19 +1966,6 @@ void BuildRtpMap(const MediaContentDescription* media_desc, ptime = std::max(ptime, max_minptime); AddAttributeLine(kCodecParamPTime, ptime, message); } - } else if (media_type == cricket::MEDIA_TYPE_DATA) { - if (media_desc->as_rtp_data()) { - for (const cricket::RtpDataCodec& codec : - media_desc->as_rtp_data()->codecs()) { - // RFC 4566 - // a=rtpmap: / - // [/] - InitAttrLine(kAttributeRtpmap, &os); - os << kSdpDelimiterColon << codec.id << " " << codec.name << "/" - << codec.clockrate; - AddLine(os.str(), message); - } - } } } @@ -2273,12 +2293,6 @@ static bool ParseFingerprintAttribute( const std::string& line, std::unique_ptr* fingerprint, SdpParseError* error) { - if (!IsLineType(line, kLineTypeAttributes) || - !HasAttribute(line, kAttributeFingerprint)) { - return ParseFailedExpectLine(line, 0, kLineTypeAttributes, - kAttributeFingerprint, error); - } - std::vector fields; rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); const size_t expected_fields = 2; @@ -2576,6 +2590,7 @@ static std::unique_ptr ParseContentDescription( std::vector>* candidates, webrtc::SdpParseError* error) { auto media_desc = std::make_unique(); + media_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kNo); if (!ParseContent(message, media_type, mline_index, protocol, payload_types, pos, content_name, bundle_only, msid_signaling, media_desc.get(), transport, candidates, error)) { @@ -2665,6 +2680,10 @@ bool ParseMediaDescription( bool bundle_only = false; int section_msid_signaling = 0; const std::string& media_type = fields[0]; + if ((media_type == kMediaTypeVideo || media_type == kMediaTypeAudio) && + !cricket::IsRtpProtocol(protocol)) { + return ParseFailed(line, "Unsupported protocol for media type", error); + } if (media_type == kMediaTypeVideo) { content = ParseContentDescription( message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol, @@ -2702,13 +2721,7 @@ bool ParseMediaDescription( data_desc->set_protocol(protocol); content = std::move(data_desc); } else { - // RTP - std::unique_ptr data_desc = - ParseContentDescription( - message, cricket::MEDIA_TYPE_DATA, mline_index, protocol, - payload_types, pos, &content_name, &bundle_only, - §ion_msid_signaling, &transport, candidates, error); - content = std::move(data_desc); + return ParseFailed(line, "Unsupported protocol for media type", error); } } else { RTC_LOG(LS_WARNING) << "Unsupported media type: " << line; @@ -3036,21 +3049,6 @@ bool ParseContent(const std::string& message, return ParseFailed( line, "b=" + bandwidth_type + " value can't be negative.", error); } - // We should never use more than the default bandwidth for RTP-based - // data channels. Don't allow SDP to set the bandwidth, because - // that would give JS the opportunity to "break the Internet". - // See: https://code.google.com/p/chromium/issues/detail?id=280726 - // Disallow TIAS since it shouldn't be generated for RTP data channels in - // the first place and provides another way to get around the limitation. - if (media_type == cricket::MEDIA_TYPE_DATA && - cricket::IsRtpProtocol(protocol) && - (b > cricket::kRtpDataMaxBandwidth / 1000 || - bandwidth_type == kTransportSpecificBandwidth)) { - rtc::StringBuilder description; - description << "RTP-based data channels may not send more than " - << cricket::kRtpDataMaxBandwidth / 1000 << "kbps."; - return ParseFailed(line, description.str(), error); - } // Convert values. Prevent integer overflow. if (bandwidth_type == kApplicationSpecificBandwidth) { b = std::min(b, INT_MAX / 1000) * 1000; @@ -3059,6 +3057,7 @@ bool ParseContent(const std::string& message, } media_desc->set_bandwidth(b); media_desc->set_bandwidth_type(bandwidth_type); + continue; } // Parse the media level connection data. @@ -3073,7 +3072,7 @@ bool ParseContent(const std::string& message, if (!IsLineType(line, kLineTypeAttributes)) { // TODO(deadbeef): Handle other lines if needed. - RTC_LOG(LS_INFO) << "Ignored line: " << line; + RTC_LOG(LS_VERBOSE) << "Ignored line: " << line; continue; } @@ -3083,7 +3082,7 @@ bool ParseContent(const std::string& message, // mid-attribute = "a=mid:" identification-tag // identification-tag = token // Use the mid identification-tag as the content name. - if (!GetValue(line, kAttributeMid, &mline_id, error)) { + if (!GetSingleTokenValue(line, kAttributeMid, &mline_id, error)) { return false; } *content_name = mline_id; @@ -3128,37 +3127,33 @@ bool ParseContent(const std::string& message, return false; } } else if (cricket::IsDtlsSctp(protocol) && - HasAttribute(line, kAttributeSctpPort)) { - if (media_type != cricket::MEDIA_TYPE_DATA) { - return ParseFailed( - line, "sctp-port attribute found in non-data media description.", - error); + media_type == cricket::MEDIA_TYPE_DATA) { + // + // SCTP specific attributes + // + if (HasAttribute(line, kAttributeSctpPort)) { + if (media_desc->as_sctp()->use_sctpmap()) { + return ParseFailed( + line, "sctp-port attribute can't be used with sctpmap.", error); + } + int sctp_port; + if (!ParseSctpPort(line, &sctp_port, error)) { + return false; + } + media_desc->as_sctp()->set_port(sctp_port); + } else if (HasAttribute(line, kAttributeMaxMessageSize)) { + int max_message_size; + if (!ParseSctpMaxMessageSize(line, &max_message_size, error)) { + return false; + } + media_desc->as_sctp()->set_max_message_size(max_message_size); + } else if (HasAttribute(line, kAttributeSctpmap)) { + // Ignore a=sctpmap: from early versions of draft-ietf-mmusic-sctp-sdp + continue; } - if (media_desc->as_sctp()->use_sctpmap()) { - return ParseFailed( - line, "sctp-port attribute can't be used with sctpmap.", error); - } - int sctp_port; - if (!ParseSctpPort(line, &sctp_port, error)) { - return false; - } - media_desc->as_sctp()->set_port(sctp_port); - } else if (cricket::IsDtlsSctp(protocol) && - HasAttribute(line, kAttributeMaxMessageSize)) { - if (media_type != cricket::MEDIA_TYPE_DATA) { - return ParseFailed( - line, - "max-message-size attribute found in non-data media description.", - error); - } - int max_message_size; - if (!ParseSctpMaxMessageSize(line, &max_message_size, error)) { - return false; - } - media_desc->as_sctp()->set_max_message_size(max_message_size); } else if (cricket::IsRtpProtocol(protocol)) { // - // RTP specific attrubtes + // RTP specific attributes // if (HasAttribute(line, kAttributeRtcpMux)) { media_desc->set_rtcp_mux(true); @@ -3274,14 +3269,18 @@ bool ParseContent(const std::string& message, } simulcast = error_or_simulcast.value(); + } else if (HasAttribute(line, kAttributeRtcp)) { + // Ignore and do not log a=rtcp line. + // JSEP section 5.8.2 (media section parsing) says to ignore it. + continue; } else { // Unrecognized attribute in RTP protocol. - RTC_LOG(LS_INFO) << "Ignored line: " << line; + RTC_LOG(LS_VERBOSE) << "Ignored line: " << line; continue; } } else { // Only parse lines that we are interested of. - RTC_LOG(LS_INFO) << "Ignored line: " << line; + RTC_LOG(LS_VERBOSE) << "Ignored line: " << line; continue; } } @@ -3630,11 +3629,6 @@ bool ParseRtpmapAttribute(const std::string& line, AudioContentDescription* audio_desc = media_desc->as_audio(); UpdateCodec(payload_type, encoding_name, clock_rate, 0, channels, audio_desc); - } else if (media_type == cricket::MEDIA_TYPE_DATA) { - RtpDataContentDescription* data_desc = media_desc->as_rtp_data(); - if (data_desc) { - data_desc->AddCodec(cricket::RtpDataCodec(payload_type, encoding_name)); - } } return true; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h index 588e02f13..aa3317f34 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.h @@ -22,7 +22,12 @@ #include +#include "api/candidate.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" +#include "api/jsep_session_description.h" #include "media/base/codec.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/system/rtc_export.h" namespace cricket { diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc index 2a9dc3fbd..33826347f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc @@ -174,8 +174,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( // Generate certificate. certificate_request_state_ = CERTIFICATE_WAITING; - rtc::scoped_refptr callback( - new rtc::RefCountedObject()); + auto callback = rtc::make_ref_counted(); callback->SignalRequestFailed.connect( this, &WebRtcSessionDescriptionFactory::OnCertificateRequestFailed); callback->SignalCertificateReady.connect( @@ -194,7 +193,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( } WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); // Fail any requests that were asked for before identity generation completed. FailPendingRequests(kFailedDueToSessionShutdown); @@ -222,6 +221,7 @@ void WebRtcSessionDescriptionFactory::CreateOffer( CreateSessionDescriptionObserver* observer, const PeerConnectionInterface::RTCOfferAnswerOptions& options, const cricket::MediaSessionOptions& session_options) { + RTC_DCHECK_RUN_ON(signaling_thread_); std::string error = "CreateOffer"; if (certificate_request_state_ == CERTIFICATE_FAILED) { error += kFailedDueToIdentityFailed; @@ -441,7 +441,7 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( void WebRtcSessionDescriptionFactory::FailPendingRequests( const std::string& reason) { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); while (!create_session_description_requests_.empty()) { const CreateSessionDescriptionRequest& request = create_session_description_requests_.front(); @@ -476,7 +476,7 @@ void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded( } void WebRtcSessionDescriptionFactory::OnCertificateRequestFailed() { - RTC_DCHECK(signaling_thread_->IsCurrent()); + RTC_DCHECK_RUN_ON(signaling_thread_); RTC_LOG(LS_ERROR) << "Asynchronous certificate generation request failed."; certificate_request_state_ = CERTIFICATE_FAILED; diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h index 9256045d6..bd2636c0d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h @@ -12,6 +12,8 @@ #define PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_ #include + +#include #include #include #include diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/OWNERS b/TMessagesProj/jni/voip/webrtc/rtc_base/OWNERS index 107bbcd81..ce7968ca7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/OWNERS @@ -1,10 +1,8 @@ hta@webrtc.org juberti@webrtc.org -kwiberg@webrtc.org mflodman@webrtc.org -qingsi@webrtc.org -sergeyu@chromium.org tommi@webrtc.org +mbonadei@webrtc.org per-file rate_statistics*=sprang@webrtc.org per-file rate_statistics*=stefan@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc index 8b410a456..87d039373 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.cc @@ -15,12 +15,12 @@ namespace rtc { -AsyncInvoker::AsyncInvoker() +DEPRECATED_AsyncInvoker::DEPRECATED_AsyncInvoker() : pending_invocations_(0), - invocation_complete_(new RefCountedObject()), + invocation_complete_(make_ref_counted()), destroying_(false) {} -AsyncInvoker::~AsyncInvoker() { +DEPRECATED_AsyncInvoker::~DEPRECATED_AsyncInvoker() { destroying_.store(true, std::memory_order_relaxed); // Messages for this need to be cleared *before* our destructor is complete. ThreadManager::Clear(this); @@ -37,7 +37,7 @@ AsyncInvoker::~AsyncInvoker() { } } -void AsyncInvoker::OnMessage(Message* msg) { +void DEPRECATED_AsyncInvoker::OnMessage(Message* msg) { // Get the AsyncClosure shared ptr from this message's data. ScopedMessageData* data = static_cast*>(msg->pdata); @@ -46,7 +46,8 @@ void AsyncInvoker::OnMessage(Message* msg) { delete data; } -void AsyncInvoker::Flush(Thread* thread, uint32_t id /*= MQID_ANY*/) { +void DEPRECATED_AsyncInvoker::Flush(Thread* thread, + uint32_t id /*= MQID_ANY*/) { // If the destructor is waiting for invocations to finish, don't start // running even more tasks. if (destroying_.load(std::memory_order_relaxed)) @@ -55,7 +56,7 @@ void AsyncInvoker::Flush(Thread* thread, uint32_t id /*= MQID_ANY*/) { // Run this on |thread| to reduce the number of context switches. if (Thread::Current() != thread) { thread->Invoke(RTC_FROM_HERE, - Bind(&AsyncInvoker::Flush, this, thread, id)); + [this, thread, id] { Flush(thread, id); }); return; } @@ -67,14 +68,14 @@ void AsyncInvoker::Flush(Thread* thread, uint32_t id /*= MQID_ANY*/) { } } -void AsyncInvoker::Clear() { +void DEPRECATED_AsyncInvoker::Clear() { ThreadManager::Clear(this); } -void AsyncInvoker::DoInvoke(const Location& posted_from, - Thread* thread, - std::unique_ptr closure, - uint32_t id) { +void DEPRECATED_AsyncInvoker::DoInvoke(const Location& posted_from, + Thread* thread, + std::unique_ptr closure, + uint32_t id) { if (destroying_.load(std::memory_order_relaxed)) { // Note that this may be expected, if the application is AsyncInvoking // tasks that AsyncInvoke other tasks. But otherwise it indicates a race @@ -87,11 +88,12 @@ void AsyncInvoker::DoInvoke(const Location& posted_from, new ScopedMessageData(std::move(closure))); } -void AsyncInvoker::DoInvokeDelayed(const Location& posted_from, - Thread* thread, - std::unique_ptr closure, - uint32_t delay_ms, - uint32_t id) { +void DEPRECATED_AsyncInvoker::DoInvokeDelayed( + const Location& posted_from, + Thread* thread, + std::unique_ptr closure, + uint32_t delay_ms, + uint32_t id) { if (destroying_.load(std::memory_order_relaxed)) { // See above comment. RTC_LOG(LS_WARNING) << "Tried to invoke while destroying the invoker."; @@ -101,7 +103,7 @@ void AsyncInvoker::DoInvokeDelayed(const Location& posted_from, new ScopedMessageData(std::move(closure))); } -AsyncClosure::AsyncClosure(AsyncInvoker* invoker) +AsyncClosure::AsyncClosure(DEPRECATED_AsyncInvoker* invoker) : invoker_(invoker), invocation_complete_(invoker_->invocation_complete_) { invoker_->pending_invocations_.fetch_add(1, std::memory_order_relaxed); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h index 983e710bc..fd42ca76d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h @@ -15,9 +15,9 @@ #include #include +#include "absl/base/attributes.h" #include "api/scoped_refptr.h" #include "rtc_base/async_invoker_inl.h" -#include "rtc_base/bind.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/ref_counted_object.h" @@ -87,10 +87,10 @@ namespace rtc { // destruction. This can be done by starting each chain of invocations on the // same thread on which it will be destroyed, or by using some other // synchronization method. -class AsyncInvoker : public MessageHandlerAutoCleanup { +class DEPRECATED_AsyncInvoker : public MessageHandlerAutoCleanup { public: - AsyncInvoker(); - ~AsyncInvoker() override; + DEPRECATED_AsyncInvoker(); + ~DEPRECATED_AsyncInvoker() override; // Call |functor| asynchronously on |thread|, with no callback upon // completion. Returns immediately. @@ -157,7 +157,7 @@ class AsyncInvoker : public MessageHandlerAutoCleanup { // an AsyncClosure's destructor that's about to call // "invocation_complete_->Set()", it's not dereferenced after being // destroyed. - scoped_refptr> invocation_complete_; + rtc::Ref::Ptr invocation_complete_; // This flag is used to ensure that if an application AsyncInvokes tasks that // recursively AsyncInvoke other tasks ad infinitum, the cycle eventually @@ -166,9 +166,12 @@ class AsyncInvoker : public MessageHandlerAutoCleanup { friend class AsyncClosure; - RTC_DISALLOW_COPY_AND_ASSIGN(AsyncInvoker); + RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_AsyncInvoker); }; +using AsyncInvoker ABSL_DEPRECATED("bugs.webrtc.org/12339") = + DEPRECATED_AsyncInvoker; + } // namespace rtc #endif // RTC_BASE_ASYNC_INVOKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h index 6307afe22..9fb328782 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker_inl.h @@ -12,7 +12,6 @@ #define RTC_BASE_ASYNC_INVOKER_INL_H_ #include "api/scoped_refptr.h" -#include "rtc_base/bind.h" #include "rtc_base/event.h" #include "rtc_base/message_handler.h" #include "rtc_base/ref_counted_object.h" @@ -22,32 +21,33 @@ namespace rtc { -class AsyncInvoker; +class DEPRECATED_AsyncInvoker; // Helper class for AsyncInvoker. Runs a task and triggers a callback // on the calling thread if necessary. class AsyncClosure { public: - explicit AsyncClosure(AsyncInvoker* invoker); + explicit AsyncClosure(DEPRECATED_AsyncInvoker* invoker); virtual ~AsyncClosure(); // Runs the asynchronous task, and triggers a callback to the calling // thread if needed. Should be called from the target thread. virtual void Execute() = 0; protected: - AsyncInvoker* invoker_; + DEPRECATED_AsyncInvoker* invoker_; // Reference counted so that if the AsyncInvoker destructor finishes before // an AsyncClosure's destructor that's about to call // "invocation_complete_->Set()", it's not dereferenced after being // destroyed. - scoped_refptr> invocation_complete_; + rtc::Ref::Ptr invocation_complete_; }; // Simple closure that doesn't trigger a callback for the calling thread. template class FireAndForgetAsyncClosure : public AsyncClosure { public: - explicit FireAndForgetAsyncClosure(AsyncInvoker* invoker, FunctorT&& functor) + explicit FireAndForgetAsyncClosure(DEPRECATED_AsyncInvoker* invoker, + FunctorT&& functor) : AsyncClosure(invoker), functor_(std::forward(functor)) {} virtual void Execute() { functor_(); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc new file mode 100644 index 000000000..d482b4e68 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.cc @@ -0,0 +1,206 @@ +/* + * Copyright 2008 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/async_resolver.h" + +#include +#include +#include + +#include "api/ref_counted_base.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +#if defined(WEBRTC_WIN) +#include +#include + +#include "rtc_base/win32.h" +#endif +#if defined(WEBRTC_POSIX) && !defined(__native_client__) +#if defined(WEBRTC_ANDROID) +#include "rtc_base/ifaddrs_android.h" +#else +#include +#endif +#endif // defined(WEBRTC_POSIX) && !defined(__native_client__) + +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread... + +namespace rtc { + +int ResolveHostname(const std::string& hostname, + int family, + std::vector* addresses) { +#ifdef __native_client__ + RTC_NOTREACHED(); + RTC_LOG(LS_WARNING) << "ResolveHostname() is not implemented for NaCl"; + return -1; +#else // __native_client__ + if (!addresses) { + return -1; + } + addresses->clear(); + struct addrinfo* result = nullptr; + struct addrinfo hints = {0}; + hints.ai_family = family; + // |family| here will almost always be AF_UNSPEC, because |family| comes from + // AsyncResolver::addr_.family(), which comes from a SocketAddress constructed + // with a hostname. When a SocketAddress is constructed with a hostname, its + // family is AF_UNSPEC. However, if someday in the future we construct + // a SocketAddress with both a hostname and a family other than AF_UNSPEC, + // then it would be possible to get a specific family value here. + + // The behavior of AF_UNSPEC is roughly "get both ipv4 and ipv6", as + // documented by the various operating systems: + // Linux: http://man7.org/linux/man-pages/man3/getaddrinfo.3.html + // Windows: https://msdn.microsoft.com/en-us/library/windows/desktop/ + // ms738520(v=vs.85).aspx + // Mac: https://developer.apple.com/legacy/library/documentation/Darwin/ + // Reference/ManPages/man3/getaddrinfo.3.html + // Android (source code, not documentation): + // https://android.googlesource.com/platform/bionic/+/ + // 7e0bfb511e85834d7c6cb9631206b62f82701d60/libc/netbsd/net/getaddrinfo.c#1657 + hints.ai_flags = AI_ADDRCONFIG; + int ret = getaddrinfo(hostname.c_str(), nullptr, &hints, &result); + if (ret != 0) { + return ret; + } + struct addrinfo* cursor = result; + for (; cursor; cursor = cursor->ai_next) { + if (family == AF_UNSPEC || cursor->ai_family == family) { + IPAddress ip; + if (IPFromAddrInfo(cursor, &ip)) { + addresses->push_back(ip); + } + } + } + freeaddrinfo(result); + return 0; +#endif // !__native_client__ +} + +struct AsyncResolver::State : public RefCountedBase { + webrtc::Mutex mutex; + enum class Status { + kLive, + kDead + } status RTC_GUARDED_BY(mutex) = Status::kLive; +}; + +AsyncResolver::AsyncResolver() : error_(-1), state_(new State) {} + +AsyncResolver::~AsyncResolver() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + // Ensure the thread isn't using a stale reference to the current task queue, + // or calling into ResolveDone post destruction. + webrtc::MutexLock lock(&state_->mutex); + state_->status = State::Status::kDead; +} + +void RunResolution(void* obj) { + std::function* function_ptr = + static_cast*>(obj); + (*function_ptr)(); + delete function_ptr; +} + +void AsyncResolver::Start(const SocketAddress& addr) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + addr_ = addr; + PlatformThread::SpawnDetached( + [this, addr, caller_task_queue = webrtc::TaskQueueBase::Current(), + state = state_] { + std::vector addresses; + int error = + ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses); + webrtc::MutexLock lock(&state->mutex); + if (state->status == State::Status::kLive) { + caller_task_queue->PostTask(webrtc::ToQueuedTask( + [this, error, addresses = std::move(addresses), state] { + bool live; + { + // ResolveDone can lead to instance destruction, so make sure + // we don't deadlock. + webrtc::MutexLock lock(&state->mutex); + live = state->status == State::Status::kLive; + } + if (live) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ResolveDone(std::move(addresses), error); + } + })); + } + }, + "AsyncResolver"); +} + +bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + if (error_ != 0 || addresses_.empty()) + return false; + + *addr = addr_; + for (size_t i = 0; i < addresses_.size(); ++i) { + if (family == addresses_[i].family()) { + addr->SetResolvedIP(addresses_[i]); + return true; + } + } + return false; +} + +int AsyncResolver::GetError() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + return error_; +} + +void AsyncResolver::Destroy(bool wait) { + // Some callers have trouble guaranteeing that Destroy is called on the + // sequence guarded by |sequence_checker_|. + // RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + destroy_called_ = true; + MaybeSelfDestruct(); +} + +const std::vector& AsyncResolver::addresses() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(!destroy_called_); + return addresses_; +} + +void AsyncResolver::ResolveDone(std::vector addresses, int error) { + addresses_ = addresses; + error_ = error; + recursion_check_ = true; + SignalDone(this); + MaybeSelfDestruct(); +} + +void AsyncResolver::MaybeSelfDestruct() { + if (!recursion_check_) { + delete this; + } else { + recursion_check_ = false; + } +} + +} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h new file mode 100644 index 000000000..0c053eed8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_resolver.h @@ -0,0 +1,75 @@ +/* + * Copyright 2008 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_ASYNC_RESOLVER_H_ +#define RTC_BASE_ASYNC_RESOLVER_H_ + +#if defined(WEBRTC_POSIX) +#include +#elif WEBRTC_WIN +#include // NOLINT +#endif + +#include + +#include "api/sequence_checker.h" +#include "rtc_base/async_resolver_interface.h" +#include "rtc_base/event.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" + +namespace rtc { + +// AsyncResolver will perform async DNS resolution, signaling the result on +// the SignalDone from AsyncResolverInterface when the operation completes. +// +// This class is thread-compatible, and all methods and destruction needs to +// happen from the same rtc::Thread, except for Destroy which is allowed to +// happen on another context provided it's not happening concurrently to another +// public API call, and is the last access to the object. +class RTC_EXPORT AsyncResolver : public AsyncResolverInterface { + public: + AsyncResolver(); + ~AsyncResolver() override; + + void Start(const SocketAddress& addr) override; + bool GetResolvedAddress(int family, SocketAddress* addr) const override; + int GetError() const override; + void Destroy(bool wait) override; + + const std::vector& addresses() const; + + private: + // Fwd decl. + struct State; + + void ResolveDone(std::vector addresses, int error) + RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); + void MaybeSelfDestruct(); + + SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_); + std::vector addresses_ RTC_GUARDED_BY(sequence_checker_); + int error_ RTC_GUARDED_BY(sequence_checker_); + bool recursion_check_ = + false; // Protects against SignalDone calling into Destroy. + bool destroy_called_ = false; + scoped_refptr state_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; +}; + +} // namespace rtc + +#endif // RTC_BASE_ASYNC_RESOLVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/bind.h b/TMessagesProj/jni/voip/webrtc/rtc_base/bind.h deleted file mode 100644 index b61d189f7..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/bind.h +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Copyright 2012 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Bind() is an overloaded function that converts method calls into function -// objects (aka functors). The method object is captured as a scoped_refptr<> if -// possible, and as a raw pointer otherwise. Any arguments to the method are -// captured by value. The return value of Bind is a stateful, nullary function -// object. Care should be taken about the lifetime of objects captured by -// Bind(); the returned functor knows nothing about the lifetime of a non -// ref-counted method object or any arguments passed by pointer, and calling the -// functor with a destroyed object will surely do bad things. -// -// To prevent the method object from being captured as a scoped_refptr<>, you -// can use Unretained. But this should only be done when absolutely necessary, -// and when the caller knows the extra reference isn't needed. -// -// Example usage: -// struct Foo { -// int Test1() { return 42; } -// int Test2() const { return 52; } -// int Test3(int x) { return x*x; } -// float Test4(int x, float y) { return x + y; } -// }; -// -// int main() { -// Foo foo; -// cout << rtc::Bind(&Foo::Test1, &foo)() << endl; -// cout << rtc::Bind(&Foo::Test2, &foo)() << endl; -// cout << rtc::Bind(&Foo::Test3, &foo, 3)() << endl; -// cout << rtc::Bind(&Foo::Test4, &foo, 7, 8.5f)() << endl; -// } -// -// Example usage of ref counted objects: -// struct Bar { -// int AddRef(); -// int Release(); -// -// void Test() {} -// void BindThis() { -// // The functor passed to AsyncInvoke() will keep this object alive. -// invoker.AsyncInvoke(RTC_FROM_HERE,rtc::Bind(&Bar::Test, this)); -// } -// }; -// -// int main() { -// rtc::scoped_refptr bar = new rtc::RefCountedObject(); -// auto functor = rtc::Bind(&Bar::Test, bar); -// bar = nullptr; -// // The functor stores an internal scoped_refptr, so this is safe. -// functor(); -// } -// - -#ifndef RTC_BASE_BIND_H_ -#define RTC_BASE_BIND_H_ - -#include -#include - -#include "api/scoped_refptr.h" - -#define NONAME - -namespace rtc { -namespace detail { -// This is needed because the template parameters in Bind can't be resolved -// if they're used both as parameters of the function pointer type and as -// parameters to Bind itself: the function pointer parameters are exact -// matches to the function prototype, but the parameters to bind have -// references stripped. This trick allows the compiler to dictate the Bind -// parameter types rather than deduce them. -template -struct identity { - typedef T type; -}; - -// IsRefCounted::value will be true for types that can be used in -// rtc::scoped_refptr, i.e. types that implements nullary functions AddRef() -// and Release(), regardless of their return types. AddRef() and Release() can -// be defined in T or any superclass of T. -template -class IsRefCounted { - // This is a complex implementation detail done with SFINAE. - - // Define types such that sizeof(Yes) != sizeof(No). - struct Yes { - char dummy[1]; - }; - struct No { - char dummy[2]; - }; - // Define two overloaded template functions with return types of different - // size. This way, we can use sizeof() on the return type to determine which - // function the compiler would have chosen. One function will be preferred - // over the other if it is possible to create it without compiler errors, - // otherwise the compiler will simply remove it, and default to the less - // preferred function. - template - static Yes test(R* r, decltype(r->AddRef(), r->Release(), 42)); - template - static No test(...); - - public: - // Trick the compiler to tell if it's possible to call AddRef() and Release(). - static const bool value = sizeof(test((T*)nullptr, 42)) == sizeof(Yes); -}; - -// TernaryTypeOperator is a helper class to select a type based on a static bool -// value. -template -struct TernaryTypeOperator {}; - -template -struct TernaryTypeOperator { - typedef IfTrueT type; -}; - -template -struct TernaryTypeOperator { - typedef IfFalseT type; -}; - -// PointerType::type will be scoped_refptr for ref counted types, and T* -// otherwise. -template -struct PointerType { - typedef typename TernaryTypeOperator::value, - scoped_refptr, - T*>::type type; -}; - -template -class UnretainedWrapper { - public: - explicit UnretainedWrapper(T* o) : ptr_(o) {} - T* get() const { return ptr_; } - - private: - T* ptr_; -}; - -} // namespace detail - -template -static inline detail::UnretainedWrapper Unretained(T* o) { - return detail::UnretainedWrapper(o); -} - -template -class MethodFunctor { - public: - MethodFunctor(MethodT method, ObjectT* object, Args... args) - : method_(method), object_(object), args_(args...) {} - R operator()() const { - return CallMethod(std::index_sequence_for()); - } - - private: - template - R CallMethod(std::index_sequence) const { - return (object_->*method_)(std::get(args_)...); - } - - MethodT method_; - typename detail::PointerType::type object_; - typename std::tuple::type...> args_; -}; - -template -class UnretainedMethodFunctor { - public: - UnretainedMethodFunctor(MethodT method, - detail::UnretainedWrapper object, - Args... args) - : method_(method), object_(object.get()), args_(args...) {} - R operator()() const { - return CallMethod(std::index_sequence_for()); - } - - private: - template - R CallMethod(std::index_sequence) const { - return (object_->*method_)(std::get(args_)...); - } - - MethodT method_; - ObjectT* object_; - typename std::tuple::type...> args_; -}; - -template -class Functor { - public: - Functor(const FunctorT& functor, Args... args) - : functor_(functor), args_(args...) {} - R operator()() const { - return CallFunction(std::index_sequence_for()); - } - - private: - template - R CallFunction(std::index_sequence) const { - return functor_(std::get(args_)...); - } - - FunctorT functor_; - typename std::tuple::type...> args_; -}; - -#define FP_T(x) R (ObjectT::*x)(Args...) - -template -MethodFunctor Bind( - FP_T(method), - ObjectT* object, - typename detail::identity::type... args) { - return MethodFunctor(method, object, - args...); -} - -template -MethodFunctor Bind( - FP_T(method), - const scoped_refptr& object, - typename detail::identity::type... args) { - return MethodFunctor(method, object.get(), - args...); -} - -template -UnretainedMethodFunctor Bind( - FP_T(method), - detail::UnretainedWrapper object, - typename detail::identity::type... args) { - return UnretainedMethodFunctor( - method, object, args...); -} - -#undef FP_T -#define FP_T(x) R (ObjectT::*x)(Args...) const - -template -MethodFunctor Bind( - FP_T(method), - const ObjectT* object, - typename detail::identity::type... args) { - return MethodFunctor(method, object, - args...); -} -template -UnretainedMethodFunctor Bind( - FP_T(method), - detail::UnretainedWrapper object, - typename detail::identity::type... args) { - return UnretainedMethodFunctor( - method, object, args...); -} - -#undef FP_T -#define FP_T(x) R (*x)(Args...) - -template -Functor Bind( - FP_T(function), - typename detail::identity::type... args) { - return Functor(function, args...); -} - -#undef FP_T - -} // namespace rtc - -#undef NONAME - -#endif // RTC_BASE_BIND_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.cc index 540141fe5..d212ef563 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.cc @@ -83,36 +83,36 @@ uint64_t BitBuffer::RemainingBitCount() const { return (static_cast(byte_count_) - byte_offset_) * 8 - bit_offset_; } -bool BitBuffer::ReadUInt8(uint8_t* val) { +bool BitBuffer::ReadUInt8(uint8_t& val) { uint32_t bit_val; - if (!ReadBits(&bit_val, sizeof(uint8_t) * 8)) { + if (!ReadBits(sizeof(uint8_t) * 8, bit_val)) { return false; } RTC_DCHECK(bit_val <= std::numeric_limits::max()); - *val = static_cast(bit_val); + val = static_cast(bit_val); return true; } -bool BitBuffer::ReadUInt16(uint16_t* val) { +bool BitBuffer::ReadUInt16(uint16_t& val) { uint32_t bit_val; - if (!ReadBits(&bit_val, sizeof(uint16_t) * 8)) { + if (!ReadBits(sizeof(uint16_t) * 8, bit_val)) { return false; } RTC_DCHECK(bit_val <= std::numeric_limits::max()); - *val = static_cast(bit_val); + val = static_cast(bit_val); return true; } -bool BitBuffer::ReadUInt32(uint32_t* val) { - return ReadBits(val, sizeof(uint32_t) * 8); +bool BitBuffer::ReadUInt32(uint32_t& val) { + return ReadBits(sizeof(uint32_t) * 8, val); } -bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) { +bool BitBuffer::PeekBits(size_t bit_count, uint32_t& val) { // TODO(nisse): Could allow bit_count == 0 and always return success. But // current code reads one byte beyond end of buffer in the case that // RemainingBitCount() == 0 and bit_count == 0. RTC_DCHECK(bit_count > 0); - if (!val || bit_count > RemainingBitCount() || bit_count > 32) { + if (bit_count > RemainingBitCount() || bit_count > 32) { return false; } const uint8_t* bytes = bytes_ + byte_offset_; @@ -121,7 +121,7 @@ bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) { // If we're reading fewer bits than what's left in the current byte, just // return the portion of this byte that we need. if (bit_count < remaining_bits_in_current_byte) { - *val = HighestBits(bits, bit_offset_ + bit_count); + val = HighestBits(bits, bit_offset_ + bit_count); return true; } // Otherwise, subtract what we've read from the bit count and read as many @@ -137,12 +137,50 @@ bool BitBuffer::PeekBits(uint32_t* val, size_t bit_count) { bits <<= bit_count; bits |= HighestBits(*bytes, bit_count); } - *val = bits; + val = bits; return true; } -bool BitBuffer::ReadBits(uint32_t* val, size_t bit_count) { - return PeekBits(val, bit_count) && ConsumeBits(bit_count); +bool BitBuffer::PeekBits(size_t bit_count, uint64_t& val) { + // TODO(nisse): Could allow bit_count == 0 and always return success. But + // current code reads one byte beyond end of buffer in the case that + // RemainingBitCount() == 0 and bit_count == 0. + RTC_DCHECK(bit_count > 0); + if (bit_count > RemainingBitCount() || bit_count > 64) { + return false; + } + const uint8_t* bytes = bytes_ + byte_offset_; + size_t remaining_bits_in_current_byte = 8 - bit_offset_; + uint64_t bits = LowestBits(*bytes++, remaining_bits_in_current_byte); + // If we're reading fewer bits than what's left in the current byte, just + // return the portion of this byte that we need. + if (bit_count < remaining_bits_in_current_byte) { + val = HighestBits(bits, bit_offset_ + bit_count); + return true; + } + // Otherwise, subtract what we've read from the bit count and read as many + // full bytes as we can into bits. + bit_count -= remaining_bits_in_current_byte; + while (bit_count >= 8) { + bits = (bits << 8) | *bytes++; + bit_count -= 8; + } + // Whatever we have left is smaller than a byte, so grab just the bits we need + // and shift them into the lowest bits. + if (bit_count > 0) { + bits <<= bit_count; + bits |= HighestBits(*bytes, bit_count); + } + val = bits; + return true; +} + +bool BitBuffer::ReadBits(size_t bit_count, uint32_t& val) { + return PeekBits(bit_count, val) && ConsumeBits(bit_count); +} + +bool BitBuffer::ReadBits(size_t bit_count, uint64_t& val) { + return PeekBits(bit_count, val) && ConsumeBits(bit_count); } bool BitBuffer::ConsumeBytes(size_t byte_count) { @@ -159,39 +197,36 @@ bool BitBuffer::ConsumeBits(size_t bit_count) { return true; } -bool BitBuffer::ReadNonSymmetric(uint32_t* val, uint32_t num_values) { +bool BitBuffer::ReadNonSymmetric(uint32_t num_values, uint32_t& val) { RTC_DCHECK_GT(num_values, 0); RTC_DCHECK_LE(num_values, uint32_t{1} << 31); if (num_values == 1) { // When there is only one possible value, it requires zero bits to store it. // But ReadBits doesn't support reading zero bits. - *val = 0; + val = 0; return true; } size_t count_bits = CountBits(num_values); uint32_t num_min_bits_values = (uint32_t{1} << count_bits) - num_values; - if (!ReadBits(val, count_bits - 1)) { + if (!ReadBits(count_bits - 1, val)) { return false; } - if (*val < num_min_bits_values) { + if (val < num_min_bits_values) { return true; } uint32_t extra_bit; - if (!ReadBits(&extra_bit, /*bit_count=*/1)) { + if (!ReadBits(/*bit_count=*/1, extra_bit)) { return false; } - *val = (*val << 1) + extra_bit - num_min_bits_values; + val = (val << 1) + extra_bit - num_min_bits_values; return true; } -bool BitBuffer::ReadExponentialGolomb(uint32_t* val) { - if (!val) { - return false; - } +bool BitBuffer::ReadExponentialGolomb(uint32_t& val) { // Store off the current byte/bit offset, in case we want to restore them due // to a failed parse. size_t original_byte_offset = byte_offset_; @@ -200,35 +235,35 @@ bool BitBuffer::ReadExponentialGolomb(uint32_t* val) { // Count the number of leading 0 bits by peeking/consuming them one at a time. size_t zero_bit_count = 0; uint32_t peeked_bit; - while (PeekBits(&peeked_bit, 1) && peeked_bit == 0) { + while (PeekBits(1, peeked_bit) && peeked_bit == 0) { zero_bit_count++; ConsumeBits(1); } // We should either be at the end of the stream, or the next bit should be 1. - RTC_DCHECK(!PeekBits(&peeked_bit, 1) || peeked_bit == 1); + RTC_DCHECK(!PeekBits(1, peeked_bit) || peeked_bit == 1); // The bit count of the value is the number of zeros + 1. Make sure that many // bits fits in a uint32_t and that we have enough bits left for it, and then // read the value. size_t value_bit_count = zero_bit_count + 1; - if (value_bit_count > 32 || !ReadBits(val, value_bit_count)) { + if (value_bit_count > 32 || !ReadBits(value_bit_count, val)) { RTC_CHECK(Seek(original_byte_offset, original_bit_offset)); return false; } - *val -= 1; + val -= 1; return true; } -bool BitBuffer::ReadSignedExponentialGolomb(int32_t* val) { +bool BitBuffer::ReadSignedExponentialGolomb(int32_t& val) { uint32_t unsigned_val; - if (!ReadExponentialGolomb(&unsigned_val)) { + if (!ReadExponentialGolomb(unsigned_val)) { return false; } if ((unsigned_val & 1) == 0) { - *val = -static_cast(unsigned_val / 2); + val = -static_cast(unsigned_val / 2); } else { - *val = (unsigned_val + 1) / 2; + val = (unsigned_val + 1) / 2; } return true; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h index de7bf02d5..044e7560f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h @@ -14,6 +14,7 @@ #include // For size_t. #include // For integer types. +#include "absl/base/attributes.h" #include "rtc_base/constructor_magic.h" namespace rtc { @@ -38,18 +39,35 @@ class BitBuffer { // Reads byte-sized values from the buffer. Returns false if there isn't // enough data left for the specified type. - bool ReadUInt8(uint8_t* val); - bool ReadUInt16(uint16_t* val); - bool ReadUInt32(uint32_t* val); + bool ReadUInt8(uint8_t& val); + bool ReadUInt16(uint16_t& val); + bool ReadUInt32(uint32_t& val); + bool ReadUInt8(uint8_t* val) { + return val ? ReadUInt8(*val) : false; + } + bool ReadUInt16(uint16_t* val) { + return val ? ReadUInt16(*val) : false; + } + bool ReadUInt32(uint32_t* val) { + return val ? ReadUInt32(*val) : false; + } // Reads bit-sized values from the buffer. Returns false if there isn't enough // data left for the specified bit count. - bool ReadBits(uint32_t* val, size_t bit_count); + bool ReadBits(size_t bit_count, uint32_t& val); + bool ReadBits(size_t bit_count, uint64_t& val); + bool ReadBits(uint32_t* val, size_t bit_count) { + return val ? ReadBits(bit_count, *val) : false; + } // Peeks bit-sized values from the buffer. Returns false if there isn't enough // data left for the specified number of bits. Doesn't move the current // offset. - bool PeekBits(uint32_t* val, size_t bit_count); + bool PeekBits(size_t bit_count, uint32_t& val); + bool PeekBits(size_t bit_count, uint64_t& val); + bool PeekBits(uint32_t* val, size_t bit_count) { + return val ? PeekBits(bit_count, *val) : false; + } // Reads value in range [0, num_values - 1]. // This encoding is similar to ReadBits(val, Ceil(Log2(num_values)), @@ -61,7 +79,10 @@ class BitBuffer { // Value v in range [k, num_values - 1] is encoded as (v+k) in n bits. // https://aomediacodec.github.io/av1-spec/#nsn // Returns false if there isn't enough data left. - bool ReadNonSymmetric(uint32_t* val, uint32_t num_values); + bool ReadNonSymmetric(uint32_t num_values, uint32_t& val); + bool ReadNonSymmetric(uint32_t* val, uint32_t num_values) { + return val ? ReadNonSymmetric(num_values, *val) : false; + } // Reads the exponential golomb encoded value at the current offset. // Exponential golomb values are encoded as: @@ -71,11 +92,18 @@ class BitBuffer { // and increment the result by 1. // Returns false if there isn't enough data left for the specified type, or if // the value wouldn't fit in a uint32_t. - bool ReadExponentialGolomb(uint32_t* val); + bool ReadExponentialGolomb(uint32_t& val); + bool ReadExponentialGolomb(uint32_t* val) { + return val ? ReadExponentialGolomb(*val) : false; + } + // Reads signed exponential golomb values at the current offset. Signed // exponential golomb values are just the unsigned values mapped to the // sequence 0, 1, -1, 2, -2, etc. in order. - bool ReadSignedExponentialGolomb(int32_t* val); + bool ReadSignedExponentialGolomb(int32_t& val); + bool ReadSignedExponentialGolomb(int32_t* val) { + return val ? ReadSignedExponentialGolomb(*val) : false; + } // Moves current position |byte_count| bytes forward. Returns false if // there aren't enough bytes left in the buffer. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc new file mode 100644 index 000000000..bb14036a3 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.cc @@ -0,0 +1,410 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/boringssl_certificate.h" + +#if defined(WEBRTC_WIN) +// Must be included first before openssl headers. +#include "rtc_base/win32.h" // NOLINT +#endif // WEBRTC_WIN + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/logging.h" +#include "rtc_base/message_digest.h" +#include "rtc_base/openssl_digest.h" +#include "rtc_base/openssl_key_pair.h" +#include "rtc_base/openssl_utility.h" + +namespace rtc { +namespace { + +// List of OIDs of signature algorithms accepted by WebRTC. +// Taken from openssl/nid.h. +static const uint8_t kMD5WithRSA[] = {0x2b, 0x0e, 0x03, 0x02, 0x03}; +static const uint8_t kMD5WithRSAEncryption[] = {0x2a, 0x86, 0x48, 0x86, 0xf7, + 0x0d, 0x01, 0x01, 0x04}; +static const uint8_t kECDSAWithSHA1[] = {0x2a, 0x86, 0x48, 0xce, + 0x3d, 0x04, 0x01}; +static const uint8_t kDSAWithSHA1[] = {0x2a, 0x86, 0x48, 0xce, + 0x38, 0x04, 0x03}; +static const uint8_t kDSAWithSHA1_2[] = {0x2b, 0x0e, 0x03, 0x02, 0x1b}; +static const uint8_t kSHA1WithRSA[] = {0x2b, 0x0e, 0x03, 0x02, 0x1d}; +static const uint8_t kSHA1WithRSAEncryption[] = {0x2a, 0x86, 0x48, 0x86, 0xf7, + 0x0d, 0x01, 0x01, 0x05}; +static const uint8_t kECDSAWithSHA224[] = {0x2a, 0x86, 0x48, 0xce, + 0x3d, 0x04, 0x03, 0x01}; +static const uint8_t kSHA224WithRSAEncryption[] = {0x2a, 0x86, 0x48, 0x86, 0xf7, + 0x0d, 0x01, 0x01, 0x0e}; +static const uint8_t kDSAWithSHA224[] = {0x60, 0x86, 0x48, 0x01, 0x65, + 0x03, 0x04, 0x03, 0x01}; +static const uint8_t kECDSAWithSHA256[] = {0x2a, 0x86, 0x48, 0xce, + 0x3d, 0x04, 0x03, 0x02}; +static const uint8_t kSHA256WithRSAEncryption[] = {0x2a, 0x86, 0x48, 0x86, 0xf7, + 0x0d, 0x01, 0x01, 0x0b}; +static const uint8_t kDSAWithSHA256[] = {0x60, 0x86, 0x48, 0x01, 0x65, + 0x03, 0x04, 0x03, 0x02}; +static const uint8_t kECDSAWithSHA384[] = {0x2a, 0x86, 0x48, 0xce, + 0x3d, 0x04, 0x03, 0x03}; +static const uint8_t kSHA384WithRSAEncryption[] = {0x2a, 0x86, 0x48, 0x86, 0xf7, + 0x0d, 0x01, 0x01, 0x0c}; +static const uint8_t kECDSAWithSHA512[] = {0x2a, 0x86, 0x48, 0xce, + 0x3d, 0x04, 0x03, 0x04}; +static const uint8_t kSHA512WithRSAEncryption[] = {0x2a, 0x86, 0x48, 0x86, 0xf7, + 0x0d, 0x01, 0x01, 0x0d}; + +#if !defined(NDEBUG) +// Print a certificate to the log, for debugging. +static void PrintCert(BoringSSLCertificate* cert) { + // Since we're using CRYPTO_BUFFER, we can't use X509_print_ex, so we'll just + // print the PEM string. + RTC_DLOG(LS_VERBOSE) << "PEM representation of certificate:\n" + << cert->ToPEMString(); +} +#endif + +bool AddSHA256SignatureAlgorithm(CBB* cbb, KeyType key_type) { + // An AlgorithmIdentifier is described in RFC 5280, 4.1.1.2. + CBB sequence, oid, params; + if (!CBB_add_asn1(cbb, &sequence, CBS_ASN1_SEQUENCE) || + !CBB_add_asn1(&sequence, &oid, CBS_ASN1_OBJECT)) { + return false; + } + + switch (key_type) { + case KT_RSA: + if (!CBB_add_bytes(&oid, kSHA256WithRSAEncryption, + sizeof(kSHA256WithRSAEncryption)) || + !CBB_add_asn1(&sequence, ¶ms, CBS_ASN1_NULL)) { + return false; + } + break; + case KT_ECDSA: + if (!CBB_add_bytes(&oid, kECDSAWithSHA256, sizeof(kECDSAWithSHA256))) { + return false; + } + break; + default: + RTC_NOTREACHED(); + return false; + } + if (!CBB_flush(cbb)) { + return false; + } + return true; +} + +// Adds an X.509 Common Name to |cbb|. +bool AddCommonName(CBB* cbb, const std::string& common_name) { + // See RFC 4519. + static const uint8_t kCommonName[] = {0x55, 0x04, 0x03}; + + if (common_name.empty()) { + RTC_LOG(LS_ERROR) << "Common name cannot be empty."; + return false; + } + + // See RFC 5280, section 4.1.2.4. + CBB rdns; + if (!CBB_add_asn1(cbb, &rdns, CBS_ASN1_SEQUENCE)) { + return false; + } + + CBB rdn, attr, type, value; + if (!CBB_add_asn1(&rdns, &rdn, CBS_ASN1_SET) || + !CBB_add_asn1(&rdn, &attr, CBS_ASN1_SEQUENCE) || + !CBB_add_asn1(&attr, &type, CBS_ASN1_OBJECT) || + !CBB_add_bytes(&type, kCommonName, sizeof(kCommonName)) || + !CBB_add_asn1(&attr, &value, CBS_ASN1_UTF8STRING) || + !CBB_add_bytes(&value, + reinterpret_cast(common_name.c_str()), + common_name.size()) || + !CBB_flush(cbb)) { + return false; + } + + return true; +} + +bool AddTime(CBB* cbb, time_t time) { + bssl::UniquePtr asn1_time(ASN1_TIME_new()); + if (!asn1_time) { + return false; + } + + if (!ASN1_TIME_set(asn1_time.get(), time)) { + return false; + } + + unsigned tag; + switch (asn1_time->type) { + case V_ASN1_UTCTIME: + tag = CBS_ASN1_UTCTIME; + break; + case V_ASN1_GENERALIZEDTIME: + tag = CBS_ASN1_GENERALIZEDTIME; + break; + default: + return false; + } + + CBB child; + if (!CBB_add_asn1(cbb, &child, tag) || + !CBB_add_bytes(&child, asn1_time->data, asn1_time->length) || + !CBB_flush(cbb)) { + return false; + } + + return true; +} + +// Generate a self-signed certificate, with the public key from the +// given key pair. Caller is responsible for freeing the returned object. +static bssl::UniquePtr MakeCertificate( + EVP_PKEY* pkey, + const SSLIdentityParams& params) { + RTC_LOG(LS_INFO) << "Making certificate for " << params.common_name; + + // See RFC 5280, section 4.1. First, construct the TBSCertificate. + bssl::ScopedCBB cbb; + CBB tbs_cert, version, validity; + uint8_t* tbs_cert_bytes; + size_t tbs_cert_len; + uint64_t serial_number; + if (!CBB_init(cbb.get(), 64) || + !CBB_add_asn1(cbb.get(), &tbs_cert, CBS_ASN1_SEQUENCE) || + !CBB_add_asn1(&tbs_cert, &version, + CBS_ASN1_CONTEXT_SPECIFIC | CBS_ASN1_CONSTRUCTED | 0) || + !CBB_add_asn1_uint64(&version, 2) || + !RAND_bytes(reinterpret_cast(&serial_number), + sizeof(serial_number)) || + !CBB_add_asn1_uint64(&tbs_cert, serial_number) || + !AddSHA256SignatureAlgorithm(&tbs_cert, params.key_params.type()) || + !AddCommonName(&tbs_cert, params.common_name) || // issuer + !CBB_add_asn1(&tbs_cert, &validity, CBS_ASN1_SEQUENCE) || + !AddTime(&validity, params.not_before) || + !AddTime(&validity, params.not_after) || + !AddCommonName(&tbs_cert, params.common_name) || // subject + !EVP_marshal_public_key(&tbs_cert, pkey) || // subjectPublicKeyInfo + !CBB_finish(cbb.get(), &tbs_cert_bytes, &tbs_cert_len)) { + return nullptr; + } + + bssl::UniquePtr delete_tbs_cert_bytes(tbs_cert_bytes); + + // Sign the TBSCertificate and write the entire certificate. + CBB cert, signature; + bssl::ScopedEVP_MD_CTX ctx; + uint8_t* sig_out; + size_t sig_len; + uint8_t* cert_bytes; + size_t cert_len; + if (!CBB_init(cbb.get(), tbs_cert_len) || + !CBB_add_asn1(cbb.get(), &cert, CBS_ASN1_SEQUENCE) || + !CBB_add_bytes(&cert, tbs_cert_bytes, tbs_cert_len) || + !AddSHA256SignatureAlgorithm(&cert, params.key_params.type()) || + !CBB_add_asn1(&cert, &signature, CBS_ASN1_BITSTRING) || + !CBB_add_u8(&signature, 0 /* no unused bits */) || + !EVP_DigestSignInit(ctx.get(), nullptr, EVP_sha256(), nullptr, pkey) || + // Compute the maximum signature length. + !EVP_DigestSign(ctx.get(), nullptr, &sig_len, tbs_cert_bytes, + tbs_cert_len) || + !CBB_reserve(&signature, &sig_out, sig_len) || + // Actually sign the TBSCertificate. + !EVP_DigestSign(ctx.get(), sig_out, &sig_len, tbs_cert_bytes, + tbs_cert_len) || + !CBB_did_write(&signature, sig_len) || + !CBB_finish(cbb.get(), &cert_bytes, &cert_len)) { + return nullptr; + } + bssl::UniquePtr delete_cert_bytes(cert_bytes); + + RTC_LOG(LS_INFO) << "Returning certificate"; + return bssl::UniquePtr( + CRYPTO_BUFFER_new(cert_bytes, cert_len, openssl::GetBufferPool())); +} + +} // namespace + +BoringSSLCertificate::BoringSSLCertificate( + bssl::UniquePtr cert_buffer) + : cert_buffer_(std::move(cert_buffer)) { + RTC_DCHECK(cert_buffer_ != nullptr); +} + +std::unique_ptr BoringSSLCertificate::Generate( + OpenSSLKeyPair* key_pair, + const SSLIdentityParams& params) { + SSLIdentityParams actual_params(params); + if (actual_params.common_name.empty()) { + // Use a random string, arbitrarily 8 chars long. + actual_params.common_name = CreateRandomString(8); + } + bssl::UniquePtr cert_buffer = + MakeCertificate(key_pair->pkey(), actual_params); + if (!cert_buffer) { + openssl::LogSSLErrors("Generating certificate"); + return nullptr; + } + auto ret = std::make_unique(std::move(cert_buffer)); +#if !defined(NDEBUG) + PrintCert(ret.get()); +#endif + return ret; +} + +std::unique_ptr BoringSSLCertificate::FromPEMString( + const std::string& pem_string) { + std::string der; + if (!SSLIdentity::PemToDer(kPemTypeCertificate, pem_string, &der)) { + return nullptr; + } + bssl::UniquePtr cert_buffer( + CRYPTO_BUFFER_new(reinterpret_cast(der.c_str()), + der.length(), openssl::GetBufferPool())); + if (!cert_buffer) { + return nullptr; + } + return std::make_unique(std::move(cert_buffer)); +} + +#define OID_MATCHES(oid, oid_other) \ + (CBS_len(&oid) == sizeof(oid_other) && \ + 0 == memcmp(CBS_data(&oid), oid_other, sizeof(oid_other))) + +bool BoringSSLCertificate::GetSignatureDigestAlgorithm( + std::string* algorithm) const { + CBS oid; + if (!openssl::ParseCertificate(cert_buffer_.get(), &oid, nullptr)) { + RTC_LOG(LS_ERROR) << "Failed to parse certificate."; + return false; + } + if (OID_MATCHES(oid, kMD5WithRSA) || + OID_MATCHES(oid, kMD5WithRSAEncryption)) { + *algorithm = DIGEST_MD5; + return true; + } + if (OID_MATCHES(oid, kECDSAWithSHA1) || OID_MATCHES(oid, kDSAWithSHA1) || + OID_MATCHES(oid, kDSAWithSHA1_2) || OID_MATCHES(oid, kSHA1WithRSA) || + OID_MATCHES(oid, kSHA1WithRSAEncryption)) { + *algorithm = DIGEST_SHA_1; + return true; + } + if (OID_MATCHES(oid, kECDSAWithSHA224) || + OID_MATCHES(oid, kSHA224WithRSAEncryption) || + OID_MATCHES(oid, kDSAWithSHA224)) { + *algorithm = DIGEST_SHA_224; + return true; + } + if (OID_MATCHES(oid, kECDSAWithSHA256) || + OID_MATCHES(oid, kSHA256WithRSAEncryption) || + OID_MATCHES(oid, kDSAWithSHA256)) { + *algorithm = DIGEST_SHA_256; + return true; + } + if (OID_MATCHES(oid, kECDSAWithSHA384) || + OID_MATCHES(oid, kSHA384WithRSAEncryption)) { + *algorithm = DIGEST_SHA_384; + return true; + } + if (OID_MATCHES(oid, kECDSAWithSHA512) || + OID_MATCHES(oid, kSHA512WithRSAEncryption)) { + *algorithm = DIGEST_SHA_512; + return true; + } + // Unknown algorithm. There are several unhandled options that are less + // common and more complex. + RTC_LOG(LS_ERROR) << "Unknown signature algorithm."; + algorithm->clear(); + return false; +} + +bool BoringSSLCertificate::ComputeDigest(const std::string& algorithm, + unsigned char* digest, + size_t size, + size_t* length) const { + return ComputeDigest(cert_buffer_.get(), algorithm, digest, size, length); +} + +bool BoringSSLCertificate::ComputeDigest(const CRYPTO_BUFFER* cert_buffer, + const std::string& algorithm, + unsigned char* digest, + size_t size, + size_t* length) { + const EVP_MD* md = nullptr; + unsigned int n = 0; + if (!OpenSSLDigest::GetDigestEVP(algorithm, &md)) { + return false; + } + if (size < static_cast(EVP_MD_size(md))) { + return false; + } + if (!EVP_Digest(CRYPTO_BUFFER_data(cert_buffer), + CRYPTO_BUFFER_len(cert_buffer), digest, &n, md, nullptr)) { + return false; + } + *length = n; + return true; +} + +BoringSSLCertificate::~BoringSSLCertificate() {} + +std::unique_ptr BoringSSLCertificate::Clone() const { + return std::make_unique( + bssl::UpRef(cert_buffer_.get())); +} + +std::string BoringSSLCertificate::ToPEMString() const { + return SSLIdentity::DerToPem(kPemTypeCertificate, + CRYPTO_BUFFER_data(cert_buffer_.get()), + CRYPTO_BUFFER_len(cert_buffer_.get())); +} + +void BoringSSLCertificate::ToDER(Buffer* der_buffer) const { + der_buffer->SetData(CRYPTO_BUFFER_data(cert_buffer_.get()), + CRYPTO_BUFFER_len(cert_buffer_.get())); +} + +bool BoringSSLCertificate::operator==(const BoringSSLCertificate& other) const { + return CRYPTO_BUFFER_len(cert_buffer_.get()) == + CRYPTO_BUFFER_len(other.cert_buffer_.get()) && + 0 == memcmp(CRYPTO_BUFFER_data(cert_buffer_.get()), + CRYPTO_BUFFER_data(other.cert_buffer_.get()), + CRYPTO_BUFFER_len(cert_buffer_.get())); +} + +bool BoringSSLCertificate::operator!=(const BoringSSLCertificate& other) const { + return !(*this == other); +} + +int64_t BoringSSLCertificate::CertificateExpirationTime() const { + int64_t ret; + if (!openssl::ParseCertificate(cert_buffer_.get(), nullptr, &ret)) { + RTC_LOG(LS_ERROR) << "Failed to parse certificate."; + return -1; + } + return ret; +} + +} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h new file mode 100644 index 000000000..740763dc6 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h @@ -0,0 +1,80 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_BORINGSSL_CERTIFICATE_H_ +#define RTC_BASE_BORINGSSL_CERTIFICATE_H_ + +#include +#include +#include + +#include +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" + +namespace rtc { + +class OpenSSLKeyPair; + +// BoringSSLCertificate encapsulates a BoringSSL CRYPTO_BUFFER object holding a +// certificate, which is also reference counted inside the BoringSSL library. +// This offers binary size and memory improvements over the OpenSSL X509 +// object. +class BoringSSLCertificate final : public SSLCertificate { + public: + explicit BoringSSLCertificate(bssl::UniquePtr cert_buffer); + + static std::unique_ptr Generate( + OpenSSLKeyPair* key_pair, + const SSLIdentityParams& params); + static std::unique_ptr FromPEMString( + const std::string& pem_string); + + ~BoringSSLCertificate() override; + + std::unique_ptr Clone() const override; + + CRYPTO_BUFFER* cert_buffer() const { return cert_buffer_.get(); } + + std::string ToPEMString() const override; + void ToDER(Buffer* der_buffer) const override; + bool operator==(const BoringSSLCertificate& other) const; + bool operator!=(const BoringSSLCertificate& other) const; + + // Compute the digest of the certificate given |algorithm|. + bool ComputeDigest(const std::string& algorithm, + unsigned char* digest, + size_t size, + size_t* length) const override; + + // Compute the digest of a certificate as a CRYPTO_BUFFER. + static bool ComputeDigest(const CRYPTO_BUFFER* cert_buffer, + const std::string& algorithm, + unsigned char* digest, + size_t size, + size_t* length); + + bool GetSignatureDigestAlgorithm(std::string* algorithm) const override; + + int64_t CertificateExpirationTime() const override; + + private: + // A handle to the DER encoded certificate data. + bssl::UniquePtr cert_buffer_; + RTC_DISALLOW_COPY_AND_ASSIGN(BoringSSLCertificate); +}; + +} // namespace rtc + +#endif // RTC_BASE_BORINGSSL_CERTIFICATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc new file mode 100644 index 000000000..d22c8ce52 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.cc @@ -0,0 +1,215 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/boringssl_identity.h" + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "absl/memory/memory.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/openssl.h" +#include "rtc_base/openssl_utility.h" + +namespace rtc { + +BoringSSLIdentity::BoringSSLIdentity( + std::unique_ptr key_pair, + std::unique_ptr certificate) + : key_pair_(std::move(key_pair)) { + RTC_DCHECK(key_pair_ != nullptr); + RTC_DCHECK(certificate != nullptr); + std::vector> certs; + certs.push_back(std::move(certificate)); + cert_chain_.reset(new SSLCertChain(std::move(certs))); +} + +BoringSSLIdentity::BoringSSLIdentity(std::unique_ptr key_pair, + std::unique_ptr cert_chain) + : key_pair_(std::move(key_pair)), cert_chain_(std::move(cert_chain)) { + RTC_DCHECK(key_pair_ != nullptr); + RTC_DCHECK(cert_chain_ != nullptr); +} + +BoringSSLIdentity::~BoringSSLIdentity() = default; + +std::unique_ptr BoringSSLIdentity::CreateInternal( + const SSLIdentityParams& params) { + auto key_pair = OpenSSLKeyPair::Generate(params.key_params); + if (key_pair) { + std::unique_ptr certificate( + BoringSSLCertificate::Generate(key_pair.get(), params)); + if (certificate != nullptr) { + return absl::WrapUnique( + new BoringSSLIdentity(std::move(key_pair), std::move(certificate))); + } + } + RTC_LOG(LS_ERROR) << "Identity generation failed."; + return nullptr; +} + +// static +std::unique_ptr BoringSSLIdentity::CreateWithExpiration( + const std::string& common_name, + const KeyParams& key_params, + time_t certificate_lifetime) { + SSLIdentityParams params; + params.key_params = key_params; + params.common_name = common_name; + time_t now = time(nullptr); + params.not_before = now + kCertificateWindowInSeconds; + params.not_after = now + certificate_lifetime; + if (params.not_before > params.not_after) + return nullptr; + return CreateInternal(params); +} + +std::unique_ptr BoringSSLIdentity::CreateForTest( + const SSLIdentityParams& params) { + return CreateInternal(params); +} + +std::unique_ptr BoringSSLIdentity::CreateFromPEMStrings( + const std::string& private_key, + const std::string& certificate) { + std::unique_ptr cert( + BoringSSLCertificate::FromPEMString(certificate)); + if (!cert) { + RTC_LOG(LS_ERROR) + << "Failed to create BoringSSLCertificate from PEM string."; + return nullptr; + } + + auto key_pair = OpenSSLKeyPair::FromPrivateKeyPEMString(private_key); + if (!key_pair) { + RTC_LOG(LS_ERROR) << "Failed to create key pair from PEM string."; + return nullptr; + } + + return absl::WrapUnique( + new BoringSSLIdentity(std::move(key_pair), std::move(cert))); +} + +std::unique_ptr BoringSSLIdentity::CreateFromPEMChainStrings( + const std::string& private_key, + const std::string& certificate_chain) { + bssl::UniquePtr bio( + BIO_new_mem_buf(certificate_chain.data(), + rtc::dchecked_cast(certificate_chain.size()))); + if (!bio) { + return nullptr; + } + BIO_set_mem_eof_return(bio.get(), 0); + std::vector> certs; + while (true) { + char* name; + char* header; + unsigned char* data; + long len; // NOLINT + int ret = PEM_read_bio(bio.get(), &name, &header, &data, &len); + if (ret == 0) { + uint32_t err = ERR_peek_error(); + if (ERR_GET_LIB(err) == ERR_LIB_PEM && + ERR_GET_REASON(err) == PEM_R_NO_START_LINE) { + break; + } + RTC_LOG(LS_ERROR) << "Failed to parse certificate from PEM string."; + return nullptr; + } + bssl::UniquePtr owned_name(name); + bssl::UniquePtr owned_header(header); + bssl::UniquePtr owned_data(data); + if (strcmp(owned_name.get(), PEM_STRING_X509) != 0) { + RTC_LOG(LS_ERROR) + << "Non-certificate found while parsing certificate chain: " + << owned_name.get(); + return nullptr; + } + bssl::UniquePtr crypto_buffer( + CRYPTO_BUFFER_new(data, len, openssl::GetBufferPool())); + if (!crypto_buffer) { + return nullptr; + } + certs.emplace_back(new BoringSSLCertificate(std::move(crypto_buffer))); + } + if (certs.empty()) { + RTC_LOG(LS_ERROR) << "Found no certificates in PEM string."; + return nullptr; + } + + auto key_pair = OpenSSLKeyPair::FromPrivateKeyPEMString(private_key); + if (!key_pair) { + RTC_LOG(LS_ERROR) << "Failed to create key pair from PEM string."; + return nullptr; + } + + return absl::WrapUnique(new BoringSSLIdentity( + std::move(key_pair), std::make_unique(std::move(certs)))); +} + +const BoringSSLCertificate& BoringSSLIdentity::certificate() const { + return *static_cast(&cert_chain_->Get(0)); +} + +const SSLCertChain& BoringSSLIdentity::cert_chain() const { + return *cert_chain_.get(); +} + +std::unique_ptr BoringSSLIdentity::CloneInternal() const { + // We cannot use std::make_unique here because the referenced + // BoringSSLIdentity constructor is private. + return absl::WrapUnique( + new BoringSSLIdentity(key_pair_->Clone(), cert_chain_->Clone())); +} + +bool BoringSSLIdentity::ConfigureIdentity(SSL_CTX* ctx) { + std::vector cert_buffers; + for (size_t i = 0; i < cert_chain_->GetSize(); ++i) { + cert_buffers.push_back( + static_cast(&cert_chain_->Get(i)) + ->cert_buffer()); + } + // 1 is the documented success return code. + if (1 != SSL_CTX_set_chain_and_key(ctx, &cert_buffers[0], cert_buffers.size(), + key_pair_->pkey(), nullptr)) { + openssl::LogSSLErrors("Configuring key and certificate"); + return false; + } + return true; +} + +std::string BoringSSLIdentity::PrivateKeyToPEMString() const { + return key_pair_->PrivateKeyToPEMString(); +} + +std::string BoringSSLIdentity::PublicKeyToPEMString() const { + return key_pair_->PublicKeyToPEMString(); +} + +bool BoringSSLIdentity::operator==(const BoringSSLIdentity& other) const { + return *this->key_pair_ == *other.key_pair_ && + this->certificate() == other.certificate(); +} + +bool BoringSSLIdentity::operator!=(const BoringSSLIdentity& other) const { + return !(*this == other); +} + +} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h new file mode 100644 index 000000000..71b29b486 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h @@ -0,0 +1,76 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_BORINGSSL_IDENTITY_H_ +#define RTC_BASE_BORINGSSL_IDENTITY_H_ + +#include + +#include +#include +#include + +#include "rtc_base/boringssl_certificate.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/openssl_key_pair.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" + +namespace rtc { + +// Holds a keypair and certificate together, and a method to generate them +// consistently. Uses CRYPTO_BUFFER instead of X509, which offers binary size +// and memory improvements. +class BoringSSLIdentity final : public SSLIdentity { + public: + static std::unique_ptr CreateWithExpiration( + const std::string& common_name, + const KeyParams& key_params, + time_t certificate_lifetime); + static std::unique_ptr CreateForTest( + const SSLIdentityParams& params); + static std::unique_ptr CreateFromPEMStrings( + const std::string& private_key, + const std::string& certificate); + static std::unique_ptr CreateFromPEMChainStrings( + const std::string& private_key, + const std::string& certificate_chain); + ~BoringSSLIdentity() override; + + const BoringSSLCertificate& certificate() const override; + const SSLCertChain& cert_chain() const override; + + // Configure an SSL context object to use our key and certificate. + bool ConfigureIdentity(SSL_CTX* ctx); + + std::string PrivateKeyToPEMString() const override; + std::string PublicKeyToPEMString() const override; + bool operator==(const BoringSSLIdentity& other) const; + bool operator!=(const BoringSSLIdentity& other) const; + + private: + BoringSSLIdentity(std::unique_ptr key_pair, + std::unique_ptr certificate); + BoringSSLIdentity(std::unique_ptr key_pair, + std::unique_ptr cert_chain); + std::unique_ptr CloneInternal() const override; + + static std::unique_ptr CreateInternal( + const SSLIdentityParams& params); + + std::unique_ptr key_pair_; + std::unique_ptr cert_chain_; + + RTC_DISALLOW_COPY_AND_ASSIGN(BoringSSLIdentity); +}; + +} // namespace rtc + +#endif // RTC_BASE_BORINGSSL_IDENTITY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h index 24a9b04dc..09c6c4f73 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h @@ -16,9 +16,10 @@ #include #include +#include "api/sequence_checker.h" #include "rtc_base/buffer.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" namespace rtc { @@ -55,7 +56,7 @@ class BufferQueue final { } private: - webrtc::SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; const size_t capacity_; const size_t default_size_; std::deque queue_ RTC_GUARDED_BY(sequence_checker_); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h b/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h deleted file mode 100644 index 47512214e..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h +++ /dev/null @@ -1,250 +0,0 @@ -// This file was GENERATED by command: -// pump.py callback.h.pump -// DO NOT EDIT BY HAND!!! - -/* - * Copyright 2012 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// To generate callback.h from callback.h.pump, execute: -// ../third_party/googletest/src/googletest/scripts/pump.py callback.h.pump - -// Callbacks are callable object containers. They can hold a function pointer -// or a function object and behave like a value type. Internally, data is -// reference-counted, making copies and pass-by-value inexpensive. -// -// Callbacks are typed using template arguments. The format is: -// CallbackN -// where N is the number of arguments supplied to the callable object. -// Callbacks are invoked using operator(), just like a function or a function -// object. Default-constructed callbacks are "empty," and executing an empty -// callback does nothing. A callback can be made empty by assigning it from -// a default-constructed callback. -// -// Callbacks are similar in purpose to std::function (which isn't available on -// all platforms we support) and a lightweight alternative to sigslots. Since -// they effectively hide the type of the object they call, they're useful in -// breaking dependencies between objects that need to interact with one another. -// Notably, they can hold the results of Bind(), std::bind*, etc, without -// needing -// to know the resulting object type of those calls. -// -// Sigslots, on the other hand, provide a fuller feature set, such as multiple -// subscriptions to a signal, optional thread-safety, and lifetime tracking of -// slots. When these features are needed, choose sigslots. -// -// Example: -// int sqr(int x) { return x * x; } -// struct AddK { -// int k; -// int operator()(int x) const { return x + k; } -// } add_k = {5}; -// -// Callback1 my_callback; -// cout << my_callback.empty() << endl; // true -// -// my_callback = Callback1(&sqr); -// cout << my_callback.empty() << endl; // false -// cout << my_callback(3) << endl; // 9 -// -// my_callback = Callback1(add_k); -// cout << my_callback(10) << endl; // 15 -// -// my_callback = Callback1(); -// cout << my_callback.empty() << endl; // true - -#ifndef RTC_BASE_CALLBACK_H_ -#define RTC_BASE_CALLBACK_H_ - -#include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" -#include "rtc_base/ref_counted_object.h" - -namespace rtc { - -template -class Callback0 { - public: - // Default copy operations are appropriate for this class. - Callback0() {} - template - Callback0(const T& functor) - : helper_(new RefCountedObject >(functor)) {} - R operator()() { - if (empty()) - return R(); - return helper_->Run(); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run() = 0; - }; - template - struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run() { return functor_(); } - T functor_; - }; - scoped_refptr helper_; -}; - -template -class Callback1 { - public: - // Default copy operations are appropriate for this class. - Callback1() {} - template - Callback1(const T& functor) - : helper_(new RefCountedObject >(functor)) {} - R operator()(P1 p1) { - if (empty()) - return R(); - return helper_->Run(p1); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run(P1 p1) = 0; - }; - template - struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run(P1 p1) { return functor_(p1); } - T functor_; - }; - scoped_refptr helper_; -}; - -template -class Callback2 { - public: - // Default copy operations are appropriate for this class. - Callback2() {} - template - Callback2(const T& functor) - : helper_(new RefCountedObject >(functor)) {} - R operator()(P1 p1, P2 p2) { - if (empty()) - return R(); - return helper_->Run(p1, p2); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run(P1 p1, P2 p2) = 0; - }; - template - struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run(P1 p1, P2 p2) { return functor_(p1, p2); } - T functor_; - }; - scoped_refptr helper_; -}; - -template -class Callback3 { - public: - // Default copy operations are appropriate for this class. - Callback3() {} - template - Callback3(const T& functor) - : helper_(new RefCountedObject >(functor)) {} - R operator()(P1 p1, P2 p2, P3 p3) { - if (empty()) - return R(); - return helper_->Run(p1, p2, p3); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run(P1 p1, P2 p2, P3 p3) = 0; - }; - template - struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run(P1 p1, P2 p2, P3 p3) { return functor_(p1, p2, p3); } - T functor_; - }; - scoped_refptr helper_; -}; - -template -class Callback4 { - public: - // Default copy operations are appropriate for this class. - Callback4() {} - template - Callback4(const T& functor) - : helper_(new RefCountedObject >(functor)) {} - R operator()(P1 p1, P2 p2, P3 p3, P4 p4) { - if (empty()) - return R(); - return helper_->Run(p1, p2, p3, p4); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4) = 0; - }; - template - struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4) { - return functor_(p1, p2, p3, p4); - } - T functor_; - }; - scoped_refptr helper_; -}; - -template -class Callback5 { - public: - // Default copy operations are appropriate for this class. - Callback5() {} - template - Callback5(const T& functor) - : helper_(new RefCountedObject >(functor)) {} - R operator()(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) { - if (empty()) - return R(); - return helper_->Run(p1, p2, p3, p4, p5); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) = 0; - }; - template - struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) { - return functor_(p1, p2, p3, p4, p5); - } - T functor_; - }; - scoped_refptr helper_; -}; -} // namespace rtc - -#endif // RTC_BASE_CALLBACK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h.pump b/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h.pump deleted file mode 100644 index dc5fb3ae1..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/callback.h.pump +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2012 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// To generate callback.h from callback.h.pump, execute: -// ../third_party/googletest/src/googletest/scripts/pump.py callback.h.pump - -// Callbacks are callable object containers. They can hold a function pointer -// or a function object and behave like a value type. Internally, data is -// reference-counted, making copies and pass-by-value inexpensive. -// -// Callbacks are typed using template arguments. The format is: -// CallbackN -// where N is the number of arguments supplied to the callable object. -// Callbacks are invoked using operator(), just like a function or a function -// object. Default-constructed callbacks are "empty," and executing an empty -// callback does nothing. A callback can be made empty by assigning it from -// a default-constructed callback. -// -// Callbacks are similar in purpose to std::function (which isn't available on -// all platforms we support) and a lightweight alternative to sigslots. Since -// they effectively hide the type of the object they call, they're useful in -// breaking dependencies between objects that need to interact with one another. -// Notably, they can hold the results of Bind(), std::bind*, etc, without needing -// to know the resulting object type of those calls. -// -// Sigslots, on the other hand, provide a fuller feature set, such as multiple -// subscriptions to a signal, optional thread-safety, and lifetime tracking of -// slots. When these features are needed, choose sigslots. -// -// Example: -// int sqr(int x) { return x * x; } -// struct AddK { -// int k; -// int operator()(int x) const { return x + k; } -// } add_k = {5}; -// -// Callback1 my_callback; -// cout << my_callback.empty() << endl; // true -// -// my_callback = Callback1(&sqr); -// cout << my_callback.empty() << endl; // false -// cout << my_callback(3) << endl; // 9 -// -// my_callback = Callback1(add_k); -// cout << my_callback(10) << endl; // 15 -// -// my_callback = Callback1(); -// cout << my_callback.empty() << endl; // true - -#ifndef RTC_BASE_CALLBACK_H_ -#define RTC_BASE_CALLBACK_H_ - -#include "rtc_base/ref_count.h" -#include "rtc_base/ref_counted_object.h" -#include "api/scoped_refptr.h" - -namespace rtc { - -$var n = 5 -$range i 0..n -$for i [[ -$range j 1..i - -template -class Callback$i { - public: - // Default copy operations are appropriate for this class. - Callback$i() {} - template Callback$i(const T& functor) - : helper_(new RefCountedObject< HelperImpl >(functor)) {} - R operator()($for j , [[P$j p$j]]) { - if (empty()) - return R(); - return helper_->Run($for j , [[p$j]]); - } - bool empty() const { return !helper_; } - - private: - struct Helper : RefCountInterface { - virtual ~Helper() {} - virtual R Run($for j , [[P$j p$j]]) = 0; - }; - template struct HelperImpl : Helper { - explicit HelperImpl(const T& functor) : functor_(functor) {} - virtual R Run($for j , [[P$j p$j]]) { - return functor_($for j , [[p$j]]); - } - T functor_; - }; - scoped_refptr helper_; -}; - -]] -} // namespace rtc - -#endif // RTC_BASE_CALLBACK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc index ac947e225..88d0b6fc7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.cc @@ -21,16 +21,75 @@ CallbackListReceivers::~CallbackListReceivers() { RTC_CHECK(!send_in_progress_); } +void CallbackListReceivers::RemoveReceivers(const void* removal_tag) { + RTC_CHECK(!send_in_progress_); + RTC_DCHECK(removal_tag != nullptr); + + // We divide the receivers_ vector into three regions: from right to left, the + // "keep" region, the "todo" region, and the "remove" region. The "todo" + // region initially covers the whole vector. + size_t first_todo = 0; // First element of the "todo" + // region. + size_t first_remove = receivers_.size(); // First element of the "remove" + // region. + + // Loop until the "todo" region is empty. + while (first_todo != first_remove) { + if (receivers_[first_todo].removal_tag != removal_tag) { + // The first element of the "todo" region should be kept. Move the + // "keep"/"todo" boundary. + ++first_todo; + } else if (receivers_[first_remove - 1].removal_tag == removal_tag) { + // The last element of the "todo" region should be removed. Move the + // "todo"/"remove" boundary. + --first_remove; + } else { + // The first element of the "todo" region should be removed, and the last + // element of the "todo" region should be kept. Swap them, and then shrink + // the "todo" region from both ends. + RTC_DCHECK_NE(first_todo, first_remove - 1); + using std::swap; + swap(receivers_[first_todo], receivers_[first_remove - 1]); + RTC_DCHECK_NE(receivers_[first_todo].removal_tag, removal_tag); + ++first_todo; + RTC_DCHECK_EQ(receivers_[first_remove - 1].removal_tag, removal_tag); + --first_remove; + } + } + + // Discard the remove region. + receivers_.resize(first_remove); +} + void CallbackListReceivers::Foreach( rtc::FunctionView fv) { RTC_CHECK(!send_in_progress_); send_in_progress_ = true; for (auto& r : receivers_) { - fv(r); + fv(r.function); } send_in_progress_ = false; } +template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<1>); +template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<2>); +template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<3>); +template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<4>); +template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::NontrivialUntypedFunctionArgs); +template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::FunctionPointerUntypedFunctionArgs); + template void CallbackListReceivers::AddReceiver( UntypedFunction::TrivialUntypedFunctionArgs<1>); template void CallbackListReceivers::AddReceiver( diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h index 659b838d0..18d48b02e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/callback_list.h @@ -32,19 +32,52 @@ class CallbackListReceivers { CallbackListReceivers& operator=(CallbackListReceivers&&) = delete; ~CallbackListReceivers(); + template + RTC_NO_INLINE void AddReceiver(const void* removal_tag, + UntypedFunctionArgsT args) { + RTC_CHECK(!send_in_progress_); + RTC_DCHECK(removal_tag != nullptr); + receivers_.push_back({removal_tag, UntypedFunction::Create(args)}); + } + template RTC_NO_INLINE void AddReceiver(UntypedFunctionArgsT args) { RTC_CHECK(!send_in_progress_); - receivers_.push_back(UntypedFunction::Create(args)); + receivers_.push_back({nullptr, UntypedFunction::Create(args)}); } + void RemoveReceivers(const void* removal_tag); + void Foreach(rtc::FunctionView fv); private: - std::vector receivers_; + struct Callback { + const void* removal_tag; + UntypedFunction function; + }; + std::vector receivers_; bool send_in_progress_ = false; }; +extern template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<1>); +extern template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<2>); +extern template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<3>); +extern template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::TrivialUntypedFunctionArgs<4>); +extern template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::NontrivialUntypedFunctionArgs); +extern template void CallbackListReceivers::AddReceiver( + const void*, + UntypedFunction::FunctionPointerUntypedFunctionArgs); + extern template void CallbackListReceivers::AddReceiver( UntypedFunction::TrivialUntypedFunctionArgs<1>); extern template void CallbackListReceivers::AddReceiver( @@ -125,11 +158,6 @@ extern template void CallbackListReceivers::AddReceiver( // foo_callbacks_.AddReceiver(std::forward(callback)); // } // -// Removing callbacks -// ------------------ -// -// TODO(kwiberg): The current design doesn’t support removing callbacks, only -// adding them, but removal support can easily be added. template class CallbackList { public: @@ -141,16 +169,35 @@ class CallbackList { // Adds a new receiver. The receiver (a callable object or a function pointer) // must be movable, but need not be copyable. Its call signature should be - // `void(ArgT...)`. + // `void(ArgT...)`. The removal tag is a pointer to an arbitrary object that + // you own, and that will stay alive until the CallbackList is gone, or until + // all receivers using it as a removal tag have been removed; you can use it + // to remove the receiver. + template + void AddReceiver(const void* removal_tag, F&& f) { + receivers_.AddReceiver( + removal_tag, + UntypedFunction::PrepareArgs(std::forward(f))); + } + + // Adds a new receiver with no removal tag. template void AddReceiver(F&& f) { receivers_.AddReceiver( UntypedFunction::PrepareArgs(std::forward(f))); } + // Removes all receivers that were added with the given removal tag. + void RemoveReceivers(const void* removal_tag) { + receivers_.RemoveReceivers(removal_tag); + } + // Calls all receivers with the given arguments. While the Send is in // progress, no method calls are allowed; specifically, this means that the // callbacks may not do anything with this CallbackList instance. + // + // Note: Receivers are called serially, but not necessarily in the same order + // they were added. template void Send(ArgU&&... args) { receivers_.Foreach([&](UntypedFunction& f) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h index 508de2a57..21fca7e40 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/checks.h @@ -95,7 +95,7 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg); // messages if the condition doesn't hold. Prefer them to raw RTC_CHECK and // RTC_DCHECK. // -// - FATAL() aborts unconditionally. +// - RTC_FATAL() aborts unconditionally. namespace rtc { namespace webrtc_checks_impl { @@ -454,8 +454,7 @@ RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(); RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS); \ } while (0) -// TODO(bugs.webrtc.org/8454): Add an RTC_ prefix or rename differently. -#define FATAL() \ +#define RTC_FATAL() \ ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ "FATAL()") & \ ::rtc::webrtc_checks_impl::LogStreamer<>() diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc index 73182a12b..f3cc710f8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.cc @@ -32,16 +32,15 @@ CopyOnWriteBuffer::CopyOnWriteBuffer(const std::string& s) : CopyOnWriteBuffer(s.data(), s.length()) {} CopyOnWriteBuffer::CopyOnWriteBuffer(size_t size) - : buffer_(size > 0 ? new RefCountedObject(size) : nullptr), + : buffer_(size > 0 ? new RefCountedBuffer(size) : nullptr), offset_(0), size_(size) { RTC_DCHECK(IsConsistent()); } CopyOnWriteBuffer::CopyOnWriteBuffer(size_t size, size_t capacity) - : buffer_(size > 0 || capacity > 0 - ? new RefCountedObject(size, capacity) - : nullptr), + : buffer_(size > 0 || capacity > 0 ? new RefCountedBuffer(size, capacity) + : nullptr), offset_(0), size_(size) { RTC_DCHECK(IsConsistent()); @@ -61,7 +60,7 @@ void CopyOnWriteBuffer::SetSize(size_t size) { RTC_DCHECK(IsConsistent()); if (!buffer_) { if (size > 0) { - buffer_ = new RefCountedObject(size); + buffer_ = new RefCountedBuffer(size); offset_ = 0; size_ = size; } @@ -84,7 +83,7 @@ void CopyOnWriteBuffer::EnsureCapacity(size_t new_capacity) { RTC_DCHECK(IsConsistent()); if (!buffer_) { if (new_capacity > 0) { - buffer_ = new RefCountedObject(0, new_capacity); + buffer_ = new RefCountedBuffer(0, new_capacity); offset_ = 0; size_ = 0; } @@ -105,7 +104,7 @@ void CopyOnWriteBuffer::Clear() { if (buffer_->HasOneRef()) { buffer_->Clear(); } else { - buffer_ = new RefCountedObject(0, capacity()); + buffer_ = new RefCountedBuffer(0, capacity()); } offset_ = 0; size_ = 0; @@ -117,8 +116,8 @@ void CopyOnWriteBuffer::UnshareAndEnsureCapacity(size_t new_capacity) { return; } - buffer_ = new RefCountedObject(buffer_->data() + offset_, size_, - new_capacity); + buffer_ = + new RefCountedBuffer(buffer_->data() + offset_, size_, new_capacity); offset_ = 0; RTC_DCHECK(IsConsistent()); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h index 68c6ad53d..526cbe5c5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/copy_on_write_buffer.h @@ -86,7 +86,7 @@ class RTC_EXPORT CopyOnWriteBuffer { template ::value>::type* = nullptr> - T* data() { + T* MutableData() { RTC_DCHECK(IsConsistent()); if (!buffer_) { return nullptr; @@ -146,11 +146,6 @@ class RTC_EXPORT CopyOnWriteBuffer { return !(*this == buf); } - uint8_t& operator[](size_t index) { - RTC_DCHECK_LT(index, size()); - return data()[index]; - } - uint8_t operator[](size_t index) const { RTC_DCHECK_LT(index, size()); return cdata()[index]; @@ -164,9 +159,9 @@ class RTC_EXPORT CopyOnWriteBuffer { void SetData(const T* data, size_t size) { RTC_DCHECK(IsConsistent()); if (!buffer_) { - buffer_ = size > 0 ? new RefCountedObject(data, size) : nullptr; + buffer_ = size > 0 ? new RefCountedBuffer(data, size) : nullptr; } else if (!buffer_->HasOneRef()) { - buffer_ = new RefCountedObject(data, size, capacity()); + buffer_ = new RefCountedBuffer(data, size, capacity()); } else { buffer_->SetData(data, size); } @@ -201,7 +196,7 @@ class RTC_EXPORT CopyOnWriteBuffer { void AppendData(const T* data, size_t size) { RTC_DCHECK(IsConsistent()); if (!buffer_) { - buffer_ = new RefCountedObject(data, size); + buffer_ = new RefCountedBuffer(data, size); offset_ = 0; size_ = size; RTC_DCHECK(IsConsistent()); @@ -247,7 +242,7 @@ class RTC_EXPORT CopyOnWriteBuffer { // Swaps two buffers. friend void swap(CopyOnWriteBuffer& a, CopyOnWriteBuffer& b) { - std::swap(a.buffer_, b.buffer_); + a.buffer_.swap(b.buffer_); std::swap(a.offset_, b.offset_); std::swap(a.size_, b.size_); } @@ -262,6 +257,7 @@ class RTC_EXPORT CopyOnWriteBuffer { } private: + using RefCountedBuffer = FinalRefCountedObject; // Create a copy of the underlying data if it is referenced from other Buffer // objects or there is not enough capacity. void UnshareAndEnsureCapacity(size_t new_capacity); @@ -277,7 +273,7 @@ class RTC_EXPORT CopyOnWriteBuffer { } // buffer_ is either null, or points to an rtc::Buffer with capacity > 0. - scoped_refptr> buffer_; + scoped_refptr buffer_; // This buffer may represent a slice of a original data. size_t offset_; // Offset of a current slice in the original data in buffer_. // Should be 0 if the buffer_ is empty. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.cc deleted file mode 100644 index 96bdd6515..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.cc +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/deprecated/signal_thread.h" - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/location.h" -#include "rtc_base/null_socket_server.h" -#include "rtc_base/socket_server.h" - -namespace rtc { - -/////////////////////////////////////////////////////////////////////////////// -// SignalThread -/////////////////////////////////////////////////////////////////////////////// - -DEPRECATED_SignalThread::DEPRECATED_SignalThread() - : main_(Thread::Current()), worker_(this), state_(kInit), refcount_(1) { - main_->SignalQueueDestroyed.connect( - this, &DEPRECATED_SignalThread::OnMainThreadDestroyed); - worker_.SetName("SignalThread", this); -} - -DEPRECATED_SignalThread::~DEPRECATED_SignalThread() { - rtc::CritScope lock(&cs_); - RTC_DCHECK(refcount_ == 0); -} - -bool DEPRECATED_SignalThread::SetName(const std::string& name, - const void* obj) { - EnterExit ee(this); - RTC_DCHECK(!destroy_called_); - RTC_DCHECK(main_->IsCurrent()); - RTC_DCHECK(kInit == state_); - return worker_.SetName(name, obj); -} - -void DEPRECATED_SignalThread::Start() { - EnterExit ee(this); - RTC_DCHECK(!destroy_called_); - RTC_DCHECK(main_->IsCurrent()); - if (kInit == state_ || kComplete == state_) { - state_ = kRunning; - OnWorkStart(); - worker_.Start(); - } else { - RTC_NOTREACHED(); - } -} - -void DEPRECATED_SignalThread::Destroy(bool wait) { - EnterExit ee(this); - // Sometimes the caller can't guarantee which thread will call Destroy, only - // that it will be the last thing it does. - // RTC_DCHECK(main_->IsCurrent()); - RTC_DCHECK(!destroy_called_); - destroy_called_ = true; - if ((kInit == state_) || (kComplete == state_)) { - refcount_--; - } else if (kRunning == state_ || kReleasing == state_) { - state_ = kStopping; - // OnWorkStop() must follow Quit(), so that when the thread wakes up due to - // OWS(), ContinueWork() will return false. - worker_.Quit(); - OnWorkStop(); - if (wait) { - // Release the thread's lock so that it can return from ::Run. - cs_.Leave(); - worker_.Stop(); - cs_.Enter(); - refcount_--; - } - } else { - RTC_NOTREACHED(); - } -} - -void DEPRECATED_SignalThread::Release() { - EnterExit ee(this); - RTC_DCHECK(!destroy_called_); - RTC_DCHECK(main_->IsCurrent()); - if (kComplete == state_) { - refcount_--; - } else if (kRunning == state_) { - state_ = kReleasing; - } else { - // if (kInit == state_) use Destroy() - RTC_NOTREACHED(); - } -} - -bool DEPRECATED_SignalThread::ContinueWork() { - EnterExit ee(this); - RTC_DCHECK(!destroy_called_); - RTC_DCHECK(worker_.IsCurrent()); - return worker_.ProcessMessages(0); -} - -void DEPRECATED_SignalThread::OnMessage(Message* msg) { - EnterExit ee(this); - if (ST_MSG_WORKER_DONE == msg->message_id) { - RTC_DCHECK(main_->IsCurrent()); - OnWorkDone(); - bool do_delete = false; - if (kRunning == state_) { - state_ = kComplete; - } else { - do_delete = true; - } - if (kStopping != state_) { - // Before signaling that the work is done, make sure that the worker - // thread actually is done. We got here because DoWork() finished and - // Run() posted the ST_MSG_WORKER_DONE message. This means the worker - // thread is about to go away anyway, but sometimes it doesn't actually - // finish before SignalWorkDone is processed, and for a reusable - // SignalThread this makes an assert in thread.cc fire. - // - // Calling Stop() on the worker ensures that the OS thread that underlies - // the worker will finish, and will be set to null, enabling us to call - // Start() again. - worker_.Stop(); - SignalWorkDone(this); - } - if (do_delete) { - refcount_--; - } - } -} - -DEPRECATED_SignalThread::Worker::Worker(DEPRECATED_SignalThread* parent) - : Thread(std::make_unique(), /*do_init=*/false), - parent_(parent) { - DoInit(); -} - -DEPRECATED_SignalThread::Worker::~Worker() { - Stop(); -} - -void DEPRECATED_SignalThread::Worker::Run() { - parent_->Run(); -} - -void DEPRECATED_SignalThread::Run() { - DoWork(); - { - EnterExit ee(this); - if (main_) { - main_->Post(RTC_FROM_HERE, this, ST_MSG_WORKER_DONE); - } - } -} - -void DEPRECATED_SignalThread::OnMainThreadDestroyed() { - EnterExit ee(this); - main_ = nullptr; -} - -bool DEPRECATED_SignalThread::Worker::IsProcessingMessagesForTesting() { - return false; -} - -} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h deleted file mode 100644 index 10805ad45..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/signal_thread.h +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_ -#define RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_ - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/constructor_magic.h" -#include "rtc_base/deprecated/recursive_critical_section.h" -#include "rtc_base/deprecation.h" -#include "rtc_base/message_handler.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" - -namespace rtc { - -/////////////////////////////////////////////////////////////////////////////// -// NOTE: this class has been deprecated. Do not use for new code. New code -// should use factilities exposed by api/task_queue/ instead. -// -// SignalThread - Base class for worker threads. The main thread should call -// Start() to begin work, and then follow one of these models: -// Normal: Wait for SignalWorkDone, and then call Release to destroy. -// Cancellation: Call Release(true), to abort the worker thread. -// Fire-and-forget: Call Release(false), which allows the thread to run to -// completion, and then self-destruct without further notification. -// Periodic tasks: Wait for SignalWorkDone, then eventually call Start() -// again to repeat the task. When the instance isn't needed anymore, -// call Release. DoWork, OnWorkStart and OnWorkStop are called again, -// on a new thread. -// The subclass should override DoWork() to perform the background task. By -// periodically calling ContinueWork(), it can check for cancellation. -// OnWorkStart and OnWorkDone can be overridden to do pre- or post-work -// tasks in the context of the main thread. -/////////////////////////////////////////////////////////////////////////////// - -class DEPRECATED_SignalThread : public sigslot::has_slots<>, - protected MessageHandlerAutoCleanup { - public: - DEPRECATED_SignalThread(); - - // Context: Main Thread. Call before Start to change the worker's name. - bool SetName(const std::string& name, const void* obj); - - // Context: Main Thread. Call to begin the worker thread. - void Start(); - - // Context: Main Thread. If the worker thread is not running, deletes the - // object immediately. Otherwise, asks the worker thread to abort processing, - // and schedules the object to be deleted once the worker exits. - // SignalWorkDone will not be signalled. If wait is true, does not return - // until the thread is deleted. - void Destroy(bool wait); - - // Context: Main Thread. If the worker thread is complete, deletes the - // object immediately. Otherwise, schedules the object to be deleted once - // the worker thread completes. SignalWorkDone will be signalled. - void Release(); - - // Context: Main Thread. Signalled when work is complete. - sigslot::signal1 SignalWorkDone; - - enum { ST_MSG_WORKER_DONE, ST_MSG_FIRST_AVAILABLE }; - - protected: - ~DEPRECATED_SignalThread() override; - - Thread* worker() { return &worker_; } - - // Context: Main Thread. Subclass should override to do pre-work setup. - virtual void OnWorkStart() {} - - // Context: Worker Thread. Subclass should override to do work. - virtual void DoWork() = 0; - - // Context: Worker Thread. Subclass should call periodically to - // dispatch messages and determine if the thread should terminate. - bool ContinueWork(); - - // Context: Worker Thread. Subclass should override when extra work is - // needed to abort the worker thread. - virtual void OnWorkStop() {} - - // Context: Main Thread. Subclass should override to do post-work cleanup. - virtual void OnWorkDone() {} - - // Context: Any Thread. If subclass overrides, be sure to call the base - // implementation. Do not use (message_id < ST_MSG_FIRST_AVAILABLE) - void OnMessage(Message* msg) override; - - private: - enum State { - kInit, // Initialized, but not started - kRunning, // Started and doing work - kReleasing, // Same as running, but to be deleted when work is done - kComplete, // Work is done - kStopping, // Work is being interrupted - }; - - class Worker : public Thread { - public: - explicit Worker(DEPRECATED_SignalThread* parent); - - Worker() = delete; - Worker(const Worker&) = delete; - Worker& operator=(const Worker&) = delete; - - ~Worker() override; - void Run() override; - bool IsProcessingMessagesForTesting() override; - - private: - DEPRECATED_SignalThread* parent_; - }; - - class RTC_SCOPED_LOCKABLE EnterExit { - public: - explicit EnterExit(DEPRECATED_SignalThread* t) - RTC_EXCLUSIVE_LOCK_FUNCTION(t->cs_) - : t_(t) { - t_->cs_.Enter(); - // If refcount_ is zero then the object has already been deleted and we - // will be double-deleting it in ~EnterExit()! (shouldn't happen) - RTC_DCHECK_NE(0, t_->refcount_); - ++t_->refcount_; - } - - EnterExit() = delete; - EnterExit(const EnterExit&) = delete; - EnterExit& operator=(const EnterExit&) = delete; - - ~EnterExit() RTC_UNLOCK_FUNCTION() { - bool d = (0 == --t_->refcount_); - t_->cs_.Leave(); - if (d) - delete t_; - } - - private: - DEPRECATED_SignalThread* t_; - }; - - void Run(); - void OnMainThreadDestroyed(); - - Thread* main_; - Worker worker_; - RecursiveCriticalSection cs_; - State state_ RTC_GUARDED_BY(cs_); - int refcount_ RTC_GUARDED_BY(cs_); - bool destroy_called_ RTC_GUARDED_BY(cs_) = false; - - RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_SignalThread); -}; - -typedef RTC_DEPRECATED DEPRECATED_SignalThread SignalThread; - -/////////////////////////////////////////////////////////////////////////////// - -} // namespace rtc - -#endif // RTC_BASE_DEPRECATED_SIGNAL_THREAD_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecation.h b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecation.h deleted file mode 100644 index f285ab04b..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecation.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_DEPRECATION_H_ -#define RTC_BASE_DEPRECATION_H_ - -// Annotate the declarations of deprecated functions with this to cause a -// compiler warning when they're used. Like so: -// -// RTC_DEPRECATED std::pony PonyPlz(const std::pony_spec& ps); -// -// NOTE 1: The annotation goes on the declaration in the .h file, not the -// definition in the .cc file! -// -// NOTE 2: In order to keep unit testing the deprecated function without -// getting warnings, do something like this: -// -// std::pony DEPRECATED_PonyPlz(const std::pony_spec& ps); -// RTC_DEPRECATED inline std::pony PonyPlz(const std::pony_spec& ps) { -// return DEPRECATED_PonyPlz(ps); -// } -// -// In other words, rename the existing function, and provide an inline wrapper -// using the original name that calls it. That way, callers who are willing to -// call it using the DEPRECATED_-prefixed name don't get the warning. -// -// TODO(kwiberg): Remove this when we can use [[deprecated]] from C++14. -#if defined(_MSC_VER) -// Note: Deprecation warnings seem to fail to trigger on Windows -// (https://bugs.chromium.org/p/webrtc/issues/detail?id=5368). -#define RTC_DEPRECATED __declspec(deprecated) -#elif defined(__GNUC__) -#define RTC_DEPRECATED __attribute__((__deprecated__)) -#else -#define RTC_DEPRECATED -#endif - -#endif // RTC_BASE_DEPRECATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc index 3af8183b1..1a2b41ec5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.cc @@ -17,6 +17,7 @@ #include #include +#include "api/sequence_checker.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" @@ -25,7 +26,6 @@ #include "rtc_base/platform_thread_types.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -79,19 +79,12 @@ namespace rtc { namespace tracing { namespace { -static void EventTracingThreadFunc(void* params); - // Atomic-int fast path for avoiding logging when disabled. static volatile int g_event_logging_active = 0; // TODO(pbos): Log metadata for all threads, etc. class EventLogger final { public: - EventLogger() - : logging_thread_(EventTracingThreadFunc, - this, - "EventTracingThread", - kLowPriority) {} ~EventLogger() { RTC_DCHECK(thread_checker_.IsCurrent()); } void AddTraceEvent(const char* name, @@ -209,7 +202,8 @@ class EventLogger final { rtc::AtomicOps::CompareAndSwap(&g_event_logging_active, 0, 1)); // Finally start, everything should be set up now. - logging_thread_.Start(); + logging_thread_ = + PlatformThread::SpawnJoinable([this] { Log(); }, "EventTracingThread"); TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Start"); } @@ -223,7 +217,7 @@ class EventLogger final { // Wake up logging thread to finish writing. shutdown_event_.Set(); // Join the logging thread. - logging_thread_.Stop(); + logging_thread_.Finalize(); } private: @@ -321,15 +315,11 @@ class EventLogger final { std::vector trace_events_ RTC_GUARDED_BY(mutex_); rtc::PlatformThread logging_thread_; rtc::Event shutdown_event_; - rtc::ThreadChecker thread_checker_; + webrtc::SequenceChecker thread_checker_; FILE* output_file_ = nullptr; bool output_file_owned_ = false; }; -static void EventTracingThreadFunc(void* params) { - static_cast(params)->Log(); -} - static EventLogger* volatile g_event_logger = nullptr; static const char* const kDisabledTracePrefix = TRACE_DISABLED_BY_DEFAULT(""); const unsigned char* InternalGetCategoryEnabled(const char* name) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc index d061597f7..90d44efb1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc @@ -93,7 +93,8 @@ bool IsValid(const BalancedDegradationSettings::CodecTypeSpecific& config1, bool IsValid(const std::vector& configs) { if (configs.size() <= 1) { - RTC_LOG(LS_WARNING) << "Unsupported size, value ignored."; + if (configs.size() == 1) + RTC_LOG(LS_WARNING) << "Unsupported size, value ignored."; return false; } for (const auto& config : configs) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc index 0f5332009..7e6125526 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc @@ -25,7 +25,6 @@ constexpr int kMaxSetting = -1; std::vector GetValidOrEmpty( const std::vector& configs) { if (configs.empty()) { - RTC_LOG(LS_WARNING) << "Unsupported size, value ignored."; return {}; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc new file mode 100644 index 000000000..9e1a5190a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.cc @@ -0,0 +1,120 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/experiments/encoder_info_settings.h" + +#include + +#include "rtc_base/experiments/field_trial_list.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace { + +std::vector ToResolutionBitrateLimits( + const std::vector& limits) { + std::vector result; + for (const auto& limit : limits) { + result.push_back(VideoEncoder::ResolutionBitrateLimits( + limit.frame_size_pixels, limit.min_start_bitrate_bps, + limit.min_bitrate_bps, limit.max_bitrate_bps)); + } + return result; +} + +} // namespace + +// Default bitrate limits for simulcast with one active stream: +// {frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps, max_bitrate_bps}. +std::vector +EncoderInfoSettings::GetDefaultSinglecastBitrateLimits( + VideoCodecType codec_type) { + // Specific limits for VP9. Other codecs use VP8 limits. + if (codec_type == kVideoCodecVP9) { + return {{320 * 180, 0, 30000, 150000}, + {480 * 270, 120000, 30000, 300000}, + {640 * 360, 190000, 30000, 420000}, + {960 * 540, 350000, 30000, 1000000}, + {1280 * 720, 480000, 30000, 1500000}}; + } + + return {{320 * 180, 0, 30000, 300000}, + {480 * 270, 200000, 30000, 500000}, + {640 * 360, 300000, 30000, 800000}, + {960 * 540, 500000, 30000, 1500000}, + {1280 * 720, 900000, 30000, 2500000}}; +} + +absl::optional +EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( + VideoCodecType codec_type, + int frame_size_pixels) { + VideoEncoder::EncoderInfo info; + info.resolution_bitrate_limits = + GetDefaultSinglecastBitrateLimits(codec_type); + return info.GetEncoderBitrateLimitsForResolution(frame_size_pixels); +} + +EncoderInfoSettings::EncoderInfoSettings(std::string name) + : requested_resolution_alignment_("requested_resolution_alignment"), + apply_alignment_to_all_simulcast_layers_( + "apply_alignment_to_all_simulcast_layers") { + FieldTrialStructList bitrate_limits( + {FieldTrialStructMember( + "frame_size_pixels", + [](BitrateLimit* b) { return &b->frame_size_pixels; }), + FieldTrialStructMember( + "min_start_bitrate_bps", + [](BitrateLimit* b) { return &b->min_start_bitrate_bps; }), + FieldTrialStructMember( + "min_bitrate_bps", + [](BitrateLimit* b) { return &b->min_bitrate_bps; }), + FieldTrialStructMember( + "max_bitrate_bps", + [](BitrateLimit* b) { return &b->max_bitrate_bps; })}, + {}); + + if (field_trial::FindFullName(name).empty()) { + // Encoder name not found, use common string applying to all encoders. + name = "WebRTC-GetEncoderInfoOverride"; + } + + ParseFieldTrial({&bitrate_limits, &requested_resolution_alignment_, + &apply_alignment_to_all_simulcast_layers_}, + field_trial::FindFullName(name)); + + resolution_bitrate_limits_ = ToResolutionBitrateLimits(bitrate_limits.Get()); +} + +absl::optional EncoderInfoSettings::requested_resolution_alignment() + const { + if (requested_resolution_alignment_ && + requested_resolution_alignment_.Value() < 1) { + RTC_LOG(LS_WARNING) << "Unsupported alignment value, ignored."; + return absl::nullopt; + } + return requested_resolution_alignment_.GetOptional(); +} + +EncoderInfoSettings::~EncoderInfoSettings() {} + +SimulcastEncoderAdapterEncoderInfoSettings:: + SimulcastEncoderAdapterEncoderInfoSettings() + : EncoderInfoSettings( + "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride") {} + +LibvpxVp8EncoderInfoSettings::LibvpxVp8EncoderInfoSettings() + : EncoderInfoSettings("WebRTC-VP8-GetEncoderInfoOverride") {} + +LibvpxVp9EncoderInfoSettings::LibvpxVp9EncoderInfoSettings() + : EncoderInfoSettings("WebRTC-VP9-GetEncoderInfoOverride") {} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h new file mode 100644 index 000000000..9cbb5875b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/encoder_info_settings.h @@ -0,0 +1,83 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_ +#define RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/video_codecs/video_encoder.h" +#include "rtc_base/experiments/field_trial_parser.h" + +namespace webrtc { + +class EncoderInfoSettings { + public: + virtual ~EncoderInfoSettings(); + + // Bitrate limits per resolution. + struct BitrateLimit { + int frame_size_pixels = 0; // The video frame size. + int min_start_bitrate_bps = 0; // The minimum bitrate to start encoding. + int min_bitrate_bps = 0; // The minimum bitrate. + int max_bitrate_bps = 0; // The maximum bitrate. + }; + + absl::optional requested_resolution_alignment() const; + bool apply_alignment_to_all_simulcast_layers() const { + return apply_alignment_to_all_simulcast_layers_.Get(); + } + std::vector resolution_bitrate_limits() + const { + return resolution_bitrate_limits_; + } + + static std::vector + GetDefaultSinglecastBitrateLimits(VideoCodecType codec_type); + + static absl::optional + GetDefaultSinglecastBitrateLimitsForResolution(VideoCodecType codec_type, + int frame_size_pixels); + + protected: + explicit EncoderInfoSettings(std::string name); + + private: + FieldTrialOptional requested_resolution_alignment_; + FieldTrialFlag apply_alignment_to_all_simulcast_layers_; + std::vector resolution_bitrate_limits_; +}; + +// EncoderInfo settings for SimulcastEncoderAdapter. +class SimulcastEncoderAdapterEncoderInfoSettings : public EncoderInfoSettings { + public: + SimulcastEncoderAdapterEncoderInfoSettings(); + ~SimulcastEncoderAdapterEncoderInfoSettings() override {} +}; + +// EncoderInfo settings for LibvpxVp8Encoder. +class LibvpxVp8EncoderInfoSettings : public EncoderInfoSettings { + public: + LibvpxVp8EncoderInfoSettings(); + ~LibvpxVp8EncoderInfoSettings() override {} +}; + +// EncoderInfo settings for LibvpxVp9Encoder. +class LibvpxVp9EncoderInfoSettings : public EncoderInfoSettings { + public: + LibvpxVp9EncoderInfoSettings(); + ~LibvpxVp9EncoderInfoSettings() override {} +}; + +} // namespace webrtc + +#endif // RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc index b88d0f97c..8fc89cec8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.cc @@ -83,7 +83,10 @@ void ParseFieldTrial( RTC_LOG(LS_WARNING) << "Failed to read empty key field with value '" << key << "' in trial: \"" << trial_string << "\""; } - } else { + } else if (key.empty() || key[0] != '_') { + // "_" is be used to prefix keys that are part of the string for + // debugging purposes but not neccessarily used. + // e.g. WebRTC-Experiment/param: value, _DebuggingString RTC_LOG(LS_INFO) << "No field with key: '" << key << "' (found in trial: \"" << trial_string << "\")"; std::string valid_keys; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc index 2f19a1c53..76c85cbba 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.cc @@ -22,11 +22,8 @@ constexpr char kFieldTrialName[] = "WebRTC-KeyframeInterval"; KeyframeIntervalSettings::KeyframeIntervalSettings( const WebRtcKeyValueConfig* const key_value_config) - : min_keyframe_send_interval_ms_("min_keyframe_send_interval_ms"), - max_wait_for_keyframe_ms_("max_wait_for_keyframe_ms"), - max_wait_for_frame_ms_("max_wait_for_frame_ms") { - ParseFieldTrial({&min_keyframe_send_interval_ms_, &max_wait_for_keyframe_ms_, - &max_wait_for_frame_ms_}, + : min_keyframe_send_interval_ms_("min_keyframe_send_interval_ms") { + ParseFieldTrial({&min_keyframe_send_interval_ms_}, key_value_config->Lookup(kFieldTrialName)); } @@ -39,13 +36,4 @@ absl::optional KeyframeIntervalSettings::MinKeyframeSendIntervalMs() const { return min_keyframe_send_interval_ms_.GetOptional(); } - -absl::optional KeyframeIntervalSettings::MaxWaitForKeyframeMs() const { - return max_wait_for_keyframe_ms_.GetOptional(); -} - -absl::optional KeyframeIntervalSettings::MaxWaitForFrameMs() const { - return max_wait_for_frame_ms_.GetOptional(); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h index 7c8d6d364..3f253f002 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/keyframe_interval_settings.h @@ -17,6 +17,9 @@ namespace webrtc { +// TODO(bugs.webrtc.org/10427): Remove and replace with proper configuration +// parameter, or move to using FIR if intent is to avoid triggering multiple +// times to PLIs corresponding to the same request when RTT is large. class KeyframeIntervalSettings final { public: static KeyframeIntervalSettings ParseFromFieldTrials(); @@ -25,22 +28,11 @@ class KeyframeIntervalSettings final { // The encoded keyframe send rate is <= 1/MinKeyframeSendIntervalMs(). absl::optional MinKeyframeSendIntervalMs() const; - // Receiver side. - // The keyframe request send rate is - // - when we have not received a key frame at all: - // <= 1/MaxWaitForKeyframeMs() - // - when we have not received a frame recently: - // <= 1/MaxWaitForFrameMs() - absl::optional MaxWaitForKeyframeMs() const; - absl::optional MaxWaitForFrameMs() const; - private: explicit KeyframeIntervalSettings( const WebRtcKeyValueConfig* key_value_config); FieldTrialOptional min_keyframe_send_interval_ms_; - FieldTrialOptional max_wait_for_keyframe_ms_; - FieldTrialOptional max_wait_for_frame_ms_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaling_experiment.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaling_experiment.cc index ca58ba858..7d5722bbe 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaling_experiment.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/quality_scaling_experiment.cc @@ -25,6 +25,11 @@ constexpr int kMaxVp9Qp = 255; constexpr int kMaxH264Qp = 51; constexpr int kMaxGenericQp = 255; +#if !defined(WEBRTC_IOS) +constexpr char kDefaultQualityScalingSetttings[] = + "Enabled-29,95,149,205,24,37,26,36,0.9995,0.9999,1"; +#endif + absl::optional GetThresholds(int low, int high, int max) { @@ -38,15 +43,22 @@ absl::optional GetThresholds(int low, } // namespace bool QualityScalingExperiment::Enabled() { +#if defined(WEBRTC_IOS) return webrtc::field_trial::IsEnabled(kFieldTrial); +#else + return !webrtc::field_trial::IsDisabled(kFieldTrial); +#endif } absl::optional QualityScalingExperiment::ParseSettings() { - const std::string group = webrtc::field_trial::FindFullName(kFieldTrial); + std::string group = webrtc::field_trial::FindFullName(kFieldTrial); + // TODO(http://crbug.com/webrtc/12401): Completely remove the experiment code + // after few releases. +#if !defined(WEBRTC_IOS) if (group.empty()) - return absl::nullopt; - + group = kDefaultQualityScalingSetttings; +#endif Settings s; if (sscanf(group.c_str(), "Enabled-%d,%d,%d,%d,%d,%d,%d,%d,%f,%f,%d", &s.vp8_low, &s.vp8_high, &s.vp9_low, &s.vp9_high, &s.h264_low, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc index 6766db62c..bed194e68 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.cc @@ -24,10 +24,13 @@ namespace webrtc { namespace { -const int kDefaultAcceptedQueueMs = 250; +const int kDefaultAcceptedQueueMs = 350; const int kDefaultMinPushbackTargetBitrateBps = 30000; +const char kCongestionWindowDefaultFieldTrialString[] = + "QueueSize:350,MinBitrate:30000,DropFrame:true"; + const char kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName[] = "WebRTC-UseBaseHeavyVP8TL3RateAllocation"; @@ -91,9 +94,13 @@ std::unique_ptr VideoRateControlConfig::Parser() { } RateControlSettings::RateControlSettings( - const WebRtcKeyValueConfig* const key_value_config) - : congestion_window_config_(CongestionWindowConfig::Parse( - key_value_config->Lookup(CongestionWindowConfig::kKey))) { + const WebRtcKeyValueConfig* const key_value_config) { + std::string congestion_window_config = + key_value_config->Lookup(CongestionWindowConfig::kKey).empty() + ? kCongestionWindowDefaultFieldTrialString + : key_value_config->Lookup(CongestionWindowConfig::kKey); + congestion_window_config_ = + CongestionWindowConfig::Parse(congestion_window_config); video_config_.vp8_base_heavy_tl3_alloc = IsEnabled( key_value_config, kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName); ParseHysteresisFactor(key_value_config, kVideoHysteresisFieldTrialname, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h index db7f1cd13..1c38e927d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/rate_control_settings.h @@ -96,7 +96,7 @@ class RateControlSettings final { explicit RateControlSettings( const WebRtcKeyValueConfig* const key_value_config); - const CongestionWindowConfig congestion_window_config_; + CongestionWindowConfig congestion_window_config_; VideoRateControlConfig video_config_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc index 2605da8fe..d62eb6f1e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.cc @@ -107,7 +107,10 @@ void StructParametersParser::Parse(absl::string_view src) { break; } } - if (!found) { + // "_" is be used to prefix keys that are part of the string for + // debugging purposes but not neccessarily used. + // e.g. WebRTC-Experiment/param: value, _DebuggingString + if (!found && (key.empty() || key[0] != '_')) { RTC_LOG(LS_INFO) << "No field with key: '" << key << "' (found in trial: \"" << src << "\")"; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h index 42908764a..1f87cf4b8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_mdns_responder.h @@ -15,14 +15,17 @@ #include #include -#include "rtc_base/async_invoker.h" #include "rtc_base/ip_address.h" #include "rtc_base/location.h" #include "rtc_base/mdns_responder_interface.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" namespace webrtc { +// This class posts tasks on the given `thread` to invoke callbacks. It's the +// callback's responsibility to be aware of potential destruction of state it +// depends on, e.g., using WeakPtrFactory or PendingTaskSafetyFlag. class FakeMdnsResponder : public MdnsResponderInterface { public: explicit FakeMdnsResponder(rtc::Thread* thread) : thread_(thread) {} @@ -37,9 +40,8 @@ class FakeMdnsResponder : public MdnsResponderInterface { name = std::to_string(next_available_id_++) + ".local"; addr_name_map_[addr] = name; } - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread_, - [callback, addr, name]() { callback(addr, name); }); + thread_->PostTask( + ToQueuedTask([callback, addr, name]() { callback(addr, name); })); } void RemoveNameForAddress(const rtc::IPAddress& addr, NameRemovedCallback callback) override { @@ -48,8 +50,7 @@ class FakeMdnsResponder : public MdnsResponderInterface { addr_name_map_.erase(it); } bool result = it != addr_name_map_.end(); - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, - [callback, result]() { callback(result); }); + thread_->PostTask(ToQueuedTask([callback, result]() { callback(result); })); } rtc::IPAddress GetMappedAddressForName(const std::string& name) const { @@ -64,8 +65,7 @@ class FakeMdnsResponder : public MdnsResponderInterface { private: uint32_t next_available_id_ = 0; std::map addr_name_map_; - rtc::Thread* thread_; - rtc::AsyncInvoker invoker_; + rtc::Thread* const thread_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h index 8bd50b69f..1bbdd460a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/fake_network.h @@ -70,10 +70,11 @@ class FakeNetworkManager : public NetworkManagerBase, ++start_count_; if (start_count_ == 1) { sent_first_update_ = false; - rtc::Thread::Current()->Post(RTC_FROM_HERE, this); + rtc::Thread::Current()->Post(RTC_FROM_HERE, this, kUpdateNetworksMessage); } else { if (sent_first_update_) { - SignalNetworksChanged(); + rtc::Thread::Current()->Post(RTC_FROM_HERE, this, + kSignalNetworksMessage); } } } @@ -81,7 +82,15 @@ class FakeNetworkManager : public NetworkManagerBase, void StopUpdating() override { --start_count_; } // MessageHandler interface. - void OnMessage(Message* msg) override { DoUpdateNetworks(); } + void OnMessage(Message* msg) override { + if (msg->message_id == kUpdateNetworksMessage) { + DoUpdateNetworks(); + } else if (msg->message_id == kSignalNetworksMessage) { + SignalNetworksChanged(); + } else { + RTC_CHECK(false); + } + } using NetworkManagerBase::set_default_local_addresses; using NetworkManagerBase::set_enumeration_permission; @@ -129,6 +138,9 @@ class FakeNetworkManager : public NetworkManagerBase, int start_count_ = 0; bool sent_first_update_ = false; + static constexpr uint32_t kUpdateNetworksMessage = 1; + static constexpr uint32_t kSignalNetworksMessage = 2; + std::unique_ptr mdns_responder_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc index 826e6745f..b7d64ba92 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.cc @@ -193,49 +193,40 @@ FileRotatingStream::FileRotatingStream(const std::string& dir_path, FileRotatingStream::~FileRotatingStream() {} -StreamState FileRotatingStream::GetState() const { - return (file_.is_open() ? SS_OPEN : SS_CLOSED); +bool FileRotatingStream::IsOpen() const { + return file_.is_open(); } -StreamResult FileRotatingStream::Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) { - RTC_DCHECK(buffer); - RTC_NOTREACHED(); - return SR_EOS; -} - -StreamResult FileRotatingStream::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { +bool FileRotatingStream::Write(const void* data, size_t data_len) { if (!file_.is_open()) { std::fprintf(stderr, "Open() must be called before Write.\n"); - return SR_ERROR; + return false; } - // Write as much as will fit in to the current file. - RTC_DCHECK_LT(current_bytes_written_, max_file_size_); - size_t remaining_bytes = max_file_size_ - current_bytes_written_; - size_t write_length = std::min(data_len, remaining_bytes); + while (data_len > 0) { + // Write as much as will fit in to the current file. + RTC_DCHECK_LT(current_bytes_written_, max_file_size_); + size_t remaining_bytes = max_file_size_ - current_bytes_written_; + size_t write_length = std::min(data_len, remaining_bytes); - if (!file_.Write(data, write_length)) { - return SR_ERROR; - } - if (disable_buffering_ && !file_.Flush()) { - return SR_ERROR; - } + if (!file_.Write(data, write_length)) { + return false; + } + if (disable_buffering_ && !file_.Flush()) { + return false; + } - current_bytes_written_ += write_length; - if (written) { - *written = write_length; + current_bytes_written_ += write_length; + + // If we're done with this file, rotate it out. + if (current_bytes_written_ >= max_file_size_) { + RTC_DCHECK_EQ(current_bytes_written_, max_file_size_); + RotateFiles(); + } + data_len -= write_length; + data = + static_cast(static_cast(data) + write_length); } - // If we're done with this file, rotate it out. - if (current_bytes_written_ >= max_file_size_) { - RTC_DCHECK_EQ(current_bytes_written_, max_file_size_); - RotateFiles(); - } - return SR_SUCCESS; + return true; } bool FileRotatingStream::Flush() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h index 117cf2019..88461e344 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h @@ -18,7 +18,6 @@ #include #include "rtc_base/constructor_magic.h" -#include "rtc_base/stream.h" #include "rtc_base/system/file_wrapper.h" namespace rtc { @@ -27,13 +26,8 @@ namespace rtc { // constructor. It rotates the files once the current file is full. The // individual file size and the number of files used is configurable in the // constructor. Open() must be called before using this stream. -class FileRotatingStream : public StreamInterface { +class FileRotatingStream { public: - // Use this constructor for reading a directory previously written to with - // this stream. - FileRotatingStream(const std::string& dir_path, - const std::string& file_prefix); - // Use this constructor for writing to a directory. Files in the directory // matching the prefix will be deleted on open. FileRotatingStream(const std::string& dir_path, @@ -41,20 +35,13 @@ class FileRotatingStream : public StreamInterface { size_t max_file_size, size_t num_files); - ~FileRotatingStream() override; + virtual ~FileRotatingStream(); - // StreamInterface methods. - StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override; - StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; - bool Flush() override; - void Close() override; + bool IsOpen() const; + + bool Write(const void* data, size_t data_len); + bool Flush(); + void Close(); // Opens the appropriate file(s). Call this before using the stream. bool Open(); @@ -63,6 +50,8 @@ class FileRotatingStream : public StreamInterface { // enabled by default for performance. bool DisableBuffering(); + // Below two methods are public for testing only. + // Returns the path used for the i-th newest file, where the 0th file is the // newest file. The file may or may not exist, this is just used for // formatting. Index must be less than GetNumFiles(). @@ -72,8 +61,6 @@ class FileRotatingStream : public StreamInterface { size_t GetNumFiles() const { return file_names_.size(); } protected: - size_t GetMaxFileSize() const { return max_file_size_; } - void SetMaxFileSize(size_t size) { max_file_size_ = size; } size_t GetRotationIndex() const { return rotation_index_; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/hash.h b/TMessagesProj/jni/voip/webrtc/rtc_base/hash.h new file mode 100644 index 000000000..56d581cdf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/hash.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_BASE_HASH_H_ +#define RTC_BASE_HASH_H_ + +#include + +#include +#include + +namespace webrtc { + +// A custom hash function for std::pair, to be able to be used as key in a +// std::unordered_map. If absl::flat_hash_map would ever be used, this is +// unnecessary as it already has a hash function for std::pair. +struct PairHash { + template + size_t operator()(const std::pair& p) const { + return (3 * std::hash{}(p.first)) ^ std::hash{}(p.second); + } +}; + +} // namespace webrtc + +#endif // RTC_BASE_HASH_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/internal/default_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/internal/default_socket_server.cc new file mode 100644 index 000000000..5632b989f --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/internal/default_socket_server.cc @@ -0,0 +1,33 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/internal/default_socket_server.h" + +#include + +#include "rtc_base/socket_server.h" + +#if defined(__native_client__) +#include "rtc_base/null_socket_server.h" +#else +#include "rtc_base/physical_socket_server.h" +#endif + +namespace rtc { + +std::unique_ptr CreateDefaultSocketServer() { +#if defined(__native_client__) + return std::unique_ptr(new rtc::NullSocketServer); +#else + return std::unique_ptr(new rtc::PhysicalSocketServer); +#endif +} + +} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/signal_thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/internal/default_socket_server.h similarity index 56% rename from TMessagesProj/jni/voip/webrtc/rtc_base/signal_thread.h rename to TMessagesProj/jni/voip/webrtc/rtc_base/internal/default_socket_server.h index b444d5499..5b3489f61 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/signal_thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/internal/default_socket_server.h @@ -8,12 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_SIGNAL_THREAD_H_ -#define RTC_BASE_SIGNAL_THREAD_H_ +#ifndef RTC_BASE_INTERNAL_DEFAULT_SOCKET_SERVER_H_ +#define RTC_BASE_INTERNAL_DEFAULT_SOCKET_SERVER_H_ -// The facilities in this file have been deprecated. Please do not use them -// in new code. New code should use factilities exposed by api/task_queue/ -// instead. -#include "rtc_base/deprecated/signal_thread.h" +#include -#endif // RTC_BASE_SIGNAL_THREAD_H_ +#include "rtc_base/socket_server.h" + +namespace rtc { + +std::unique_ptr CreateDefaultSocketServer(); + +} // namespace rtc + +#endif // RTC_BASE_INTERNAL_DEFAULT_SOCKET_SERVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc index 9dd534c2b..86f42e0bf 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.cc @@ -20,8 +20,9 @@ #include #endif -#include "rtc_base/byte_order.h" #include "rtc_base/ip_address.h" + +#include "rtc_base/byte_order.h" #include "rtc_base/net_helpers.h" #include "rtc_base/string_utils.h" @@ -148,10 +149,6 @@ std::string IPAddress::ToString() const { } std::string IPAddress::ToSensitiveString() const { -#if !defined(NDEBUG) - // Return non-stripped in debug. - return ToString(); -#else switch (family_) { case AF_INET: { std::string address = ToString(); @@ -175,7 +172,6 @@ std::string IPAddress::ToSensitiveString() const { } } return std::string(); -#endif } IPAddress IPAddress::Normalized() const { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h index ae135a69d..872541739 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ip_address.h @@ -80,12 +80,12 @@ class RTC_EXPORT IPAddress { bool operator<(const IPAddress& other) const; bool operator>(const IPAddress& other) const; -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& os) { // no-presubmit-check TODO(webrtc:8982) return os << ToString(); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST int family() const { return family_; } in_addr ipv4_address() const; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/keep_ref_until_done.h b/TMessagesProj/jni/voip/webrtc/rtc_base/keep_ref_until_done.h deleted file mode 100644 index 7bebd8237..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/keep_ref_until_done.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_KEEP_REF_UNTIL_DONE_H_ -#define RTC_BASE_KEEP_REF_UNTIL_DONE_H_ - -#include "api/scoped_refptr.h" -#include "rtc_base/bind.h" -#include "rtc_base/callback.h" -#include "rtc_base/ref_count.h" - -namespace rtc { - -namespace impl { -template -static inline void DoNothing(const scoped_refptr& object) {} -} // namespace impl - -// KeepRefUntilDone keeps a reference to |object| until the returned -// callback goes out of scope. If the returned callback is copied, the -// reference will be released when the last callback goes out of scope. -template -static inline Callback0 KeepRefUntilDone(ObjectT* object) { - return rtc::Bind(&impl::DoNothing, scoped_refptr(object)); -} - -template -static inline Callback0 KeepRefUntilDone( - const scoped_refptr& object) { - return rtc::Bind(&impl::DoNothing, object); -} - -} // namespace rtc - -#endif // RTC_BASE_KEEP_REF_UNTIL_DONE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc index a3019b978..436514251 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.cc @@ -16,7 +16,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/stream.h" namespace rtc { @@ -37,23 +36,23 @@ FileRotatingLogSink::FileRotatingLogSink(FileRotatingStream* stream) FileRotatingLogSink::~FileRotatingLogSink() {} void FileRotatingLogSink::OnLogMessage(const std::string& message) { - if (stream_->GetState() != SS_OPEN) { + if (!stream_->IsOpen()) { std::fprintf(stderr, "Init() must be called before adding this sink.\n"); return; } - stream_->WriteAll(message.c_str(), message.size(), nullptr, nullptr); + stream_->Write(message.c_str(), message.size()); } void FileRotatingLogSink::OnLogMessage(const std::string& message, LoggingSeverity sev, const char* tag) { - if (stream_->GetState() != SS_OPEN) { + if (!stream_->IsOpen()) { std::fprintf(stderr, "Init() must be called before adding this sink.\n"); return; } - stream_->WriteAll(tag, strlen(tag), nullptr, nullptr); - stream_->WriteAll(": ", 2, nullptr, nullptr); - stream_->WriteAll(message.c_str(), message.size(), nullptr, nullptr); + stream_->Write(tag, strlen(tag)); + stream_->Write(": ", 2); + stream_->Write(message.c_str(), message.size()); } bool FileRotatingLogSink::Init() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc index 13a5f0259..a333d8397 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc @@ -51,6 +51,17 @@ static const int kMaxLogLineSize = 1024 - 60; #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" +#if defined(WEBRTC_RACE_CHECK_MUTEX) +#if defined(WEBRTC_ABSL_MUTEX) +#error Please only define one of WEBRTC_RACE_CHECK_MUTEX and WEBRTC_ABSL_MUTEX. +#endif +#include "absl/base/const_init.h" +#include "absl/synchronization/mutex.h" // nogncheck +using LoggingMutexLock = ::absl::MutexLock; +#else +using LoggingMutexLock = ::webrtc::MutexLock; +#endif // if defined(WEBRTC_RACE_CHECK_MUTEX) + namespace rtc { namespace { // By default, release builds don't log, debug builds at info level @@ -75,7 +86,15 @@ const char* FilenameFromPath(const char* file) { // Global lock for log subsystem, only needed to serialize access to streams_. // TODO(bugs.webrtc.org/11665): this is not currently constant initialized and // trivially destructible. +#if defined(WEBRTC_RACE_CHECK_MUTEX) +// When WEBRTC_RACE_CHECK_MUTEX is defined, even though WebRTC objects are +// invoked serially, the logging is static, invoked concurrently and hence needs +// protection. +absl::Mutex g_log_mutex_(absl::kConstInit); +#else webrtc::Mutex g_log_mutex_; +#endif + } // namespace ///////////////////////////////////////////////////////////////////////////// @@ -201,7 +220,7 @@ LogMessage::~LogMessage() { #endif } - webrtc::MutexLock lock(&g_log_mutex_); + LoggingMutexLock lock(&g_log_mutex_); for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { if (severity_ >= entry->min_severity_) { #if defined(WEBRTC_ANDROID) @@ -250,7 +269,7 @@ void LogMessage::LogTimestamps(bool on) { void LogMessage::LogToDebug(LoggingSeverity min_sev) { g_dbg_sev = min_sev; - webrtc::MutexLock lock(&g_log_mutex_); + LoggingMutexLock lock(&g_log_mutex_); UpdateMinLogSeverity(); } @@ -259,7 +278,7 @@ void LogMessage::SetLogToStderr(bool log_to_stderr) { } int LogMessage::GetLogToStream(LogSink* stream) { - webrtc::MutexLock lock(&g_log_mutex_); + LoggingMutexLock lock(&g_log_mutex_); LoggingSeverity sev = LS_NONE; for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { if (stream == nullptr || stream == entry) { @@ -270,7 +289,7 @@ int LogMessage::GetLogToStream(LogSink* stream) { } void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) { - webrtc::MutexLock lock(&g_log_mutex_); + LoggingMutexLock lock(&g_log_mutex_); stream->min_severity_ = min_sev; stream->next_ = streams_; streams_ = stream; @@ -279,7 +298,7 @@ void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) { } void LogMessage::RemoveLogToStream(LogSink* stream) { - webrtc::MutexLock lock(&g_log_mutex_); + LoggingMutexLock lock(&g_log_mutex_); for (LogSink** entry = &streams_; *entry != nullptr; entry = &(*entry)->next_) { if (*entry == stream) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h index d2607c28b..e21c30e21 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h @@ -51,10 +51,10 @@ #include #include +#include "absl/base/attributes.h" #include "absl/meta/type_traits.h" #include "absl/strings/string_view.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/deprecation.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/inline.h" @@ -434,7 +434,7 @@ class LogMessage { // DEPRECATED - DO NOT USE - PLEASE USE THE MACROS INSTEAD OF THE CLASS. // Android code should use the 'const char*' version since tags are static // and we want to avoid allocating a std::string copy per log line. - RTC_DEPRECATED + ABSL_DEPRECATED("Use RTC_LOG macros instead of accessing this class directly") LogMessage(const char* file, int line, LoggingSeverity sev, @@ -508,7 +508,7 @@ class LogMessage { // DEPRECATED - DO NOT USE - PLEASE USE THE MACROS INSTEAD OF THE CLASS. // Android code should use the 'const char*' version since tags are static // and we want to avoid allocating a std::string copy per log line. - RTC_DEPRECATED + ABSL_DEPRECATED("Use RTC_LOG macros instead of accessing this class directly") LogMessage(const char* file, int line, LoggingSeverity sev, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_buffer_reader.h b/TMessagesProj/jni/voip/webrtc/rtc_base/message_buffer_reader.h deleted file mode 100644 index 32b8f336b..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_buffer_reader.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_MESSAGE_BUFFER_READER_H_ -#define RTC_BASE_MESSAGE_BUFFER_READER_H_ - -#include "rtc_base/byte_buffer.h" - -namespace webrtc { - -// A simple subclass of the ByteBufferReader that exposes the starting address -// of the message and its length, so that we can recall previously parsed data. -class MessageBufferReader : public rtc::ByteBufferReader { - public: - MessageBufferReader(const char* bytes, size_t len) - : rtc::ByteBufferReader(bytes, len) {} - ~MessageBufferReader() = default; - - // Starting address of the message. - const char* MessageData() const { return bytes_; } - // Total length of the message. Note that this is different from Length(), - // which is the length of the remaining message from the current offset. - size_t MessageLength() const { return size_; } - // Current offset in the message. - size_t CurrentOffset() const { return start_; } -}; - -} // namespace webrtc - -#endif // RTC_BASE_MESSAGE_BUFFER_READER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h b/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h index 36f00b527..691330e23 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/message_digest.h @@ -45,7 +45,8 @@ class MessageDigestFactory { static MessageDigest* Create(const std::string& alg); }; -// A whitelist of approved digest algorithms from RFC 4572 (FIPS 180). +// A check that an algorithm is in a list of approved digest algorithms +// from RFC 4572 (FIPS 180). bool IsFips180DigestAlgorithm(const std::string& alg); // Functions to create hashes. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc index 7c853e9c9..effbb5a6c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.cc @@ -230,10 +230,10 @@ class NATSocket : public AsyncSocket, public sigslot::has_slots<> { return connected_ ? CS_CONNECTED : CS_CLOSED; } int GetOption(Option opt, int* value) override { - return socket_->GetOption(opt, value); + return socket_ ? socket_->GetOption(opt, value) : -1; } int SetOption(Option opt, int value) override { - return socket_->SetOption(opt, value); + return socket_ ? socket_->SetOption(opt, value) : -1; } void OnConnectEvent(AsyncSocket* socket) { @@ -428,14 +428,15 @@ NATSocketServer::Translator::Translator(NATSocketServer* server, // Create a new private network, and a NATServer running on the private // network that bridges to the external network. Also tell the private // network to use the same message queue as us. - VirtualSocketServer* internal_server = new VirtualSocketServer(); - internal_server->SetMessageQueue(server_->queue()); - internal_factory_.reset(internal_server); - nat_server_.reset(new NATServer(type, internal_server, int_ip, int_ip, - ext_factory, ext_ip)); + internal_server_ = std::make_unique(); + internal_server_->SetMessageQueue(server_->queue()); + nat_server_ = std::make_unique( + type, internal_server_.get(), int_ip, int_ip, ext_factory, ext_ip); } -NATSocketServer::Translator::~Translator() = default; +NATSocketServer::Translator::~Translator() { + internal_server_->SetMessageQueue(nullptr); +} NATSocketServer::Translator* NATSocketServer::Translator::GetTranslator( const SocketAddress& ext_ip) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h index e649d19a8..70030d834 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h @@ -107,7 +107,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { const SocketAddress& ext_addr); ~Translator(); - SocketFactory* internal_factory() { return internal_factory_.get(); } + SocketFactory* internal_factory() { return internal_server_.get(); } SocketAddress internal_udp_address() const { return nat_server_->internal_udp_address(); } @@ -129,7 +129,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { private: NATSocketServer* server_; - std::unique_ptr internal_factory_; + std::unique_ptr internal_server_; std::unique_ptr nat_server_; TranslatorMap nats_; std::set clients_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc index c6685e2a6..bec854af0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.cc @@ -10,6 +10,8 @@ #include "rtc_base/net_helpers.h" +#include + #if defined(WEBRTC_WIN) #include #include @@ -17,6 +19,7 @@ #include "rtc_base/win32.h" #endif #if defined(WEBRTC_POSIX) && !defined(__native_client__) +#include #if defined(WEBRTC_ANDROID) #include "rtc_base/ifaddrs_android.h" #else @@ -24,145 +27,8 @@ #endif #endif // defined(WEBRTC_POSIX) && !defined(__native_client__) -#include "api/task_queue/task_queue_base.h" -#include "rtc_base/logging.h" -#include "rtc_base/signal_thread.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread... - namespace rtc { -int ResolveHostname(const std::string& hostname, - int family, - std::vector* addresses) { -#ifdef __native_client__ - RTC_NOTREACHED(); - RTC_LOG(LS_WARNING) << "ResolveHostname() is not implemented for NaCl"; - return -1; -#else // __native_client__ - if (!addresses) { - return -1; - } - addresses->clear(); - struct addrinfo* result = nullptr; - struct addrinfo hints = {0}; - hints.ai_family = family; - // |family| here will almost always be AF_UNSPEC, because |family| comes from - // AsyncResolver::addr_.family(), which comes from a SocketAddress constructed - // with a hostname. When a SocketAddress is constructed with a hostname, its - // family is AF_UNSPEC. However, if someday in the future we construct - // a SocketAddress with both a hostname and a family other than AF_UNSPEC, - // then it would be possible to get a specific family value here. - - // The behavior of AF_UNSPEC is roughly "get both ipv4 and ipv6", as - // documented by the various operating systems: - // Linux: http://man7.org/linux/man-pages/man3/getaddrinfo.3.html - // Windows: https://msdn.microsoft.com/en-us/library/windows/desktop/ - // ms738520(v=vs.85).aspx - // Mac: https://developer.apple.com/legacy/library/documentation/Darwin/ - // Reference/ManPages/man3/getaddrinfo.3.html - // Android (source code, not documentation): - // https://android.googlesource.com/platform/bionic/+/ - // 7e0bfb511e85834d7c6cb9631206b62f82701d60/libc/netbsd/net/getaddrinfo.c#1657 - hints.ai_flags = AI_ADDRCONFIG; - int ret = getaddrinfo(hostname.c_str(), nullptr, &hints, &result); - if (ret != 0) { - return ret; - } - struct addrinfo* cursor = result; - for (; cursor; cursor = cursor->ai_next) { - if (family == AF_UNSPEC || cursor->ai_family == family) { - IPAddress ip; - if (IPFromAddrInfo(cursor, &ip)) { - addresses->push_back(ip); - } - } - } - freeaddrinfo(result); - return 0; -#endif // !__native_client__ -} - -AsyncResolver::AsyncResolver() : error_(-1) {} - -AsyncResolver::~AsyncResolver() { - RTC_DCHECK_RUN_ON(&sequence_checker_); -} - -void AsyncResolver::Start(const SocketAddress& addr) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - addr_ = addr; - webrtc::TaskQueueBase* current_task_queue = webrtc::TaskQueueBase::Current(); - popup_thread_ = Thread::Create(); - popup_thread_->Start(); - popup_thread_->PostTask(webrtc::ToQueuedTask( - [this, flag = safety_.flag(), addr, current_task_queue] { - std::vector addresses; - int error = - ResolveHostname(addr.hostname().c_str(), addr.family(), &addresses); - current_task_queue->PostTask(webrtc::ToQueuedTask( - std::move(flag), [this, error, addresses = std::move(addresses)] { - RTC_DCHECK_RUN_ON(&sequence_checker_); - ResolveDone(std::move(addresses), error); - })); - })); -} - -bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - if (error_ != 0 || addresses_.empty()) - return false; - - *addr = addr_; - for (size_t i = 0; i < addresses_.size(); ++i) { - if (family == addresses_[i].family()) { - addr->SetResolvedIP(addresses_[i]); - return true; - } - } - return false; -} - -int AsyncResolver::GetError() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - return error_; -} - -void AsyncResolver::Destroy(bool wait) { - // Some callers have trouble guaranteeing that Destroy is called on the - // sequence guarded by |sequence_checker_|. - // RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - destroy_called_ = true; - MaybeSelfDestruct(); -} - -const std::vector& AsyncResolver::addresses() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - return addresses_; -} - -void AsyncResolver::ResolveDone(std::vector addresses, int error) { - addresses_ = addresses; - error_ = error; - recursion_check_ = true; - SignalDone(this); - MaybeSelfDestruct(); -} - -void AsyncResolver::MaybeSelfDestruct() { - if (!recursion_check_) { - delete this; - } else { - recursion_check_ = false; - } -} - const char* inet_ntop(int af, const void* src, char* dst, socklen_t size) { #if defined(WEBRTC_WIN) return win32_inet_ntop(af, src, dst, size); @@ -187,7 +53,7 @@ bool HasIPv4Enabled() { return false; } for (struct ifaddrs* cur = ifa; cur != nullptr; cur = cur->ifa_next) { - if (cur->ifa_addr->sa_family == AF_INET) { + if (cur->ifa_addr != nullptr && cur->ifa_addr->sa_family == AF_INET) { has_ipv4 = true; break; } @@ -246,7 +112,7 @@ bool HasIPv6Enabled() { return false; } for (struct ifaddrs* cur = ifa; cur != nullptr; cur = cur->ifa_next) { - if (cur->ifa_addr->sa_family == AF_INET6) { + if (cur->ifa_addr != nullptr && cur->ifa_addr->sa_family == AF_INET6) { has_ipv6 = true; break; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h index c6aa4be5b..4ed84786b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/net_helpers.h @@ -15,56 +15,12 @@ #include #elif WEBRTC_WIN #include // NOLINT + +#include "rtc_base/win32.h" #endif -#include - -#include "rtc_base/async_resolver_interface.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/synchronization/sequence_checker.h" -#include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" - namespace rtc { -// AsyncResolver will perform async DNS resolution, signaling the result on -// the SignalDone from AsyncResolverInterface when the operation completes. -// -// This class is thread-compatible, and all methods and destruction needs to -// happen from the same rtc::Thread, except for Destroy which is allowed to -// happen on another context provided it's not happening concurrently to another -// public API call, and is the last access to the object. -class RTC_EXPORT AsyncResolver : public AsyncResolverInterface { - public: - AsyncResolver(); - ~AsyncResolver() override; - - void Start(const SocketAddress& addr) override; - bool GetResolvedAddress(int family, SocketAddress* addr) const override; - int GetError() const override; - void Destroy(bool wait) override; - - const std::vector& addresses() const; - - private: - void ResolveDone(std::vector addresses, int error) - RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); - void MaybeSelfDestruct(); - - SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_); - std::vector addresses_ RTC_GUARDED_BY(sequence_checker_); - int error_ RTC_GUARDED_BY(sequence_checker_); - webrtc::ScopedTaskSafety safety_ RTC_GUARDED_BY(sequence_checker_); - std::unique_ptr popup_thread_ RTC_GUARDED_BY(sequence_checker_); - bool recursion_check_ = - false; // Protects against SignalDone calling into Destroy. - bool destroy_called_ = false; - webrtc::SequenceChecker sequence_checker_; -}; - // rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid // the windows-native versions of these. const char* inet_ntop(int af, const void* src, char* dst, socklen_t size); @@ -72,6 +28,7 @@ int inet_pton(int af, const char* src, void* dst); bool HasIPv4Enabled(); bool HasIPv6Enabled(); + } // namespace rtc #endif // RTC_BASE_NET_HELPERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc index 8aabdcb7e..f4a349bae 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc @@ -131,7 +131,7 @@ uint16_t ComputeNetworkCostByType(int type, } #if !defined(__native_client__) -bool IsIgnoredIPv6(const InterfaceAddress& ip) { +bool IsIgnoredIPv6(bool allow_mac_based_ipv6, const InterfaceAddress& ip) { if (ip.family() != AF_INET6) { return false; } @@ -144,7 +144,7 @@ bool IsIgnoredIPv6(const InterfaceAddress& ip) { } // Any MAC based IPv6 should be avoided to prevent the MAC tracking. - if (IPIsMacBased(ip)) { + if (IPIsMacBased(ip) && !allow_mac_based_ipv6) { return true; } @@ -212,7 +212,8 @@ AdapterType GetAdapterTypeFromName(const char* network_name) { return ADAPTER_TYPE_ETHERNET; } - if (MatchTypeNameWithIndexPattern(network_name, "wlan")) { + if (MatchTypeNameWithIndexPattern(network_name, "wlan") || + MatchTypeNameWithIndexPattern(network_name, "v4-wlan")) { return ADAPTER_TYPE_WIFI; } @@ -478,11 +479,15 @@ Network* NetworkManagerBase::GetNetworkFromAddress( return nullptr; } -BasicNetworkManager::BasicNetworkManager() {} +BasicNetworkManager::BasicNetworkManager() : BasicNetworkManager(nullptr) {} BasicNetworkManager::BasicNetworkManager( NetworkMonitorFactory* network_monitor_factory) - : network_monitor_factory_(network_monitor_factory) {} + : network_monitor_factory_(network_monitor_factory), + allow_mac_based_ipv6_( + webrtc::field_trial::IsEnabled("WebRTC-AllowMACBasedIPv6")), + bind_using_ifname_( + !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName")) {} BasicNetworkManager::~BasicNetworkManager() {} @@ -535,7 +540,7 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces, // Special case for IPv6 address. if (cursor->ifa_addr->sa_family == AF_INET6) { - if (IsIgnoredIPv6(ip)) { + if (IsIgnoredIPv6(allow_mac_based_ipv6_, ip)) { continue; } scope_id = @@ -713,7 +718,7 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored, scope_id = v6_addr->sin6_scope_id; ip = IPAddress(v6_addr->sin6_addr); - if (IsIgnoredIPv6(InterfaceAddress(ip))) { + if (IsIgnoredIPv6(allow_mac_based_ipv6_, InterfaceAddress(ip))) { continue; } @@ -861,6 +866,15 @@ void BasicNetworkManager::StartNetworkMonitor() { network_monitor_->SignalNetworksChanged.connect( this, &BasicNetworkManager::OnNetworksChanged); } + + if (network_monitor_->SupportsBindSocketToNetwork()) { + // Set NetworkBinder on SocketServer so that + // PhysicalSocket::Bind will call + // BasicNetworkManager::BindSocketToNetwork(), (that will lookup interface + // name and then call network_monitor_->BindSocketToNetwork()). + thread_->socketserver()->set_network_binder(this); + } + network_monitor_->Start(); } @@ -869,6 +883,13 @@ void BasicNetworkManager::StopNetworkMonitor() { return; } network_monitor_->Stop(); + + if (network_monitor_->SupportsBindSocketToNetwork()) { + // Reset NetworkBinder on SocketServer. + if (thread_->socketserver()->network_binder() == this) { + thread_->socketserver()->set_network_binder(nullptr); + } + } } void BasicNetworkManager::OnMessage(Message* msg) { @@ -950,6 +971,20 @@ void BasicNetworkManager::DumpNetworks() { } } +NetworkBindingResult BasicNetworkManager::BindSocketToNetwork( + int socket_fd, + const IPAddress& address) { + RTC_DCHECK_RUN_ON(thread_); + std::string if_name; + if (bind_using_ifname_) { + Network* net = GetNetworkFromAddress(address); + if (net != nullptr) { + if_name = net->name(); + } + } + return network_monitor_->BindSocketToNetwork(socket_fd, address, if_name); +} + Network::Network(const std::string& name, const std::string& desc, const IPAddress& prefix, diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h index 7103f0fa2..8b6b6235f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h @@ -19,12 +19,12 @@ #include #include +#include "api/sequence_checker.h" #include "rtc_base/ip_address.h" #include "rtc_base/mdns_responder_interface.h" #include "rtc_base/message_handler.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread_annotations.h" @@ -194,11 +194,11 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { void set_default_local_addresses(const IPAddress& ipv4, const IPAddress& ipv6); + Network* GetNetworkFromAddress(const rtc::IPAddress& ip) const; + private: friend class NetworkTest; - Network* GetNetworkFromAddress(const rtc::IPAddress& ip) const; - EnumerationPermission enumeration_permission_; NetworkList networks_; @@ -225,6 +225,7 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // of networks using OS APIs. class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, public MessageHandlerAutoCleanup, + public NetworkBinderInterface, public sigslot::has_slots<> { public: BasicNetworkManager(); @@ -248,6 +249,15 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, network_ignore_list_ = list; } + // Bind a socket to interface that ip address belong to. + // Implementation look up interface name and calls + // BindSocketToNetwork on NetworkMonitor. + // The interface name is needed as e.g ipv4 over ipv6 addresses + // are not exposed using Android functions, but it is possible + // bind an ipv4 address to the interface. + NetworkBindingResult BindSocketToNetwork(int socket_fd, + const IPAddress& address) override; + protected: #if defined(WEBRTC_POSIX) // Separated from CreateNetworks for tests. @@ -293,6 +303,8 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, nullptr; std::unique_ptr network_monitor_ RTC_GUARDED_BY(thread_); + bool allow_mac_based_ipv6_ RTC_GUARDED_BY(thread_) = false; + bool bind_using_ifname_ RTC_GUARDED_BY(thread_) = false; }; // Represents a Unix-type network interface, with a name and single address. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h index 4a3002f42..dddc2f60f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_monitor.h @@ -36,6 +36,8 @@ enum class NetworkPreference { const char* NetworkPreferenceToString(NetworkPreference preference); +// This interface is set onto a socket server, +// where only the ip address is known at the time of binding. class NetworkBinderInterface { public: // Binds a socket to the network that is attached to |address| so that all @@ -83,6 +85,19 @@ class NetworkMonitorInterface { virtual NetworkPreference GetNetworkPreference( const std::string& interface_name) = 0; + // Does |this| NetworkMonitorInterface implement BindSocketToNetwork? + // Only Android returns true. + virtual bool SupportsBindSocketToNetwork() const { return false; } + + // Bind a socket to an interface specified by ip address and/or interface + // name. Only implemented on Android. + virtual NetworkBindingResult BindSocketToNetwork( + int socket_fd, + const IPAddress& address, + const std::string& interface_name) { + return NetworkBindingResult::NOT_IMPLEMENTED; + } + // Is this interface available to use? WebRTC shouldn't attempt to use it if // this returns false. // diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc index 8fd882c2b..e5c2c4276 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.cc @@ -13,6 +13,9 @@ #include #include #include +#ifdef OPENSSL_IS_BORINGSSL +#include +#endif #include #include #include @@ -20,13 +23,24 @@ #include +// Use CRYPTO_BUFFER APIs if available and we have no dependency on X509 +// objects. +#if defined(OPENSSL_IS_BORINGSSL) && \ + defined(WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS) +#define WEBRTC_USE_CRYPTO_BUFFER_CALLBACK +#endif + #include "absl/memory/memory.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/openssl.h" -#include "rtc_base/openssl_certificate.h" +#ifdef OPENSSL_IS_BORINGSSL +#include "rtc_base/boringssl_identity.h" +#else +#include "rtc_base/openssl_identity.h" +#endif #include "rtc_base/openssl_utility.h" #include "rtc_base/string_encode.h" #include "rtc_base/thread.h" @@ -223,8 +237,13 @@ void OpenSSLAdapter::SetCertVerifier( void OpenSSLAdapter::SetIdentity(std::unique_ptr identity) { RTC_DCHECK(!identity_); +#ifdef OPENSSL_IS_BORINGSSL + identity_ = + absl::WrapUnique(static_cast(identity.release())); +#else identity_ = absl::WrapUnique(static_cast(identity.release())); +#endif } void OpenSSLAdapter::SetRole(SSLRole role) { @@ -797,7 +816,70 @@ void OpenSSLAdapter::SSLInfoCallback(const SSL* s, int where, int ret) { #endif +#ifdef WEBRTC_USE_CRYPTO_BUFFER_CALLBACK +// static +enum ssl_verify_result_t OpenSSLAdapter::SSLVerifyCallback(SSL* ssl, + uint8_t* out_alert) { + // Get our stream pointer from the SSL context. + OpenSSLAdapter* stream = + reinterpret_cast(SSL_get_app_data(ssl)); + + ssl_verify_result_t ret = stream->SSLVerifyInternal(ssl, out_alert); + + // Should only be used for debugging and development. + if (ret != ssl_verify_ok && stream->ignore_bad_cert_) { + RTC_DLOG(LS_WARNING) << "Ignoring cert error while verifying cert chain"; + return ssl_verify_ok; + } + + return ret; +} + +enum ssl_verify_result_t OpenSSLAdapter::SSLVerifyInternal(SSL* ssl, + uint8_t* out_alert) { + if (ssl_cert_verifier_ == nullptr) { + RTC_LOG(LS_WARNING) << "Built-in trusted root certificates disabled but no " + "SSL verify callback provided."; + return ssl_verify_invalid; + } + + RTC_LOG(LS_INFO) << "Invoking SSL Verify Callback."; + const STACK_OF(CRYPTO_BUFFER)* chain = SSL_get0_peer_certificates(ssl); + if (sk_CRYPTO_BUFFER_num(chain) == 0) { + RTC_LOG(LS_ERROR) << "Peer certificate chain empty?"; + return ssl_verify_invalid; + } + + BoringSSLCertificate cert(bssl::UpRef(sk_CRYPTO_BUFFER_value(chain, 0))); + if (!ssl_cert_verifier_->Verify(cert)) { + RTC_LOG(LS_WARNING) << "Failed to verify certificate using custom callback"; + return ssl_verify_invalid; + } + + custom_cert_verifier_status_ = true; + RTC_LOG(LS_INFO) << "Validated certificate using custom callback"; + return ssl_verify_ok; +} +#else // WEBRTC_USE_CRYPTO_BUFFER_CALLBACK int OpenSSLAdapter::SSLVerifyCallback(int ok, X509_STORE_CTX* store) { + // Get our stream pointer from the store + SSL* ssl = reinterpret_cast( + X509_STORE_CTX_get_ex_data(store, SSL_get_ex_data_X509_STORE_CTX_idx())); + + OpenSSLAdapter* stream = + reinterpret_cast(SSL_get_app_data(ssl)); + ok = stream->SSLVerifyInternal(ok, ssl, store); + + // Should only be used for debugging and development. + if (!ok && stream->ignore_bad_cert_) { + RTC_DLOG(LS_WARNING) << "Ignoring cert error while verifying cert chain"; + return 1; + } + + return ok; +} + +int OpenSSLAdapter::SSLVerifyInternal(int ok, SSL* ssl, X509_STORE_CTX* store) { #if !defined(NDEBUG) if (!ok) { char data[256]; @@ -814,33 +896,40 @@ int OpenSSLAdapter::SSLVerifyCallback(int ok, X509_STORE_CTX* store) { << X509_verify_cert_error_string(err); } #endif - // Get our stream pointer from the store - SSL* ssl = reinterpret_cast( - X509_STORE_CTX_get_ex_data(store, SSL_get_ex_data_X509_STORE_CTX_idx())); - - OpenSSLAdapter* stream = - reinterpret_cast(SSL_get_app_data(ssl)); - - if (!ok && stream->ssl_cert_verifier_ != nullptr) { - RTC_LOG(LS_INFO) << "Invoking SSL Verify Callback."; - const OpenSSLCertificate cert(X509_STORE_CTX_get_current_cert(store)); - if (stream->ssl_cert_verifier_->Verify(cert)) { - stream->custom_cert_verifier_status_ = true; - RTC_LOG(LS_INFO) << "Validated certificate using custom callback"; - ok = true; - } else { - RTC_LOG(LS_INFO) << "Failed to verify certificate using custom callback"; - } + if (ssl_cert_verifier_ == nullptr) { + return ok; } - // Should only be used for debugging and development. - if (!ok && stream->ignore_bad_cert_) { - RTC_DLOG(LS_WARNING) << "Ignoring cert error while verifying cert chain"; - ok = 1; + RTC_LOG(LS_INFO) << "Invoking SSL Verify Callback."; +#ifdef OPENSSL_IS_BORINGSSL + // Convert X509 to CRYPTO_BUFFER. + uint8_t* data = nullptr; + int length = i2d_X509(X509_STORE_CTX_get_current_cert(store), &data); + if (length < 0) { + RTC_LOG(LS_ERROR) << "Failed to encode X509."; + return ok; + } + bssl::UniquePtr owned_data(data); + bssl::UniquePtr crypto_buffer( + CRYPTO_BUFFER_new(data, length, openssl::GetBufferPool())); + if (!crypto_buffer) { + RTC_LOG(LS_ERROR) << "Failed to allocate CRYPTO_BUFFER."; + return ok; + } + const BoringSSLCertificate cert(std::move(crypto_buffer)); +#else + const OpenSSLCertificate cert(X509_STORE_CTX_get_current_cert(store)); +#endif + if (!ssl_cert_verifier_->Verify(cert)) { + RTC_LOG(LS_INFO) << "Failed to verify certificate using custom callback"; + return ok; } - return ok; + custom_cert_verifier_status_ = true; + RTC_LOG(LS_INFO) << "Validated certificate using custom callback"; + return 1; } +#endif // !defined(WEBRTC_USE_CRYPTO_BUFFER_CALLBACK) int OpenSSLAdapter::NewSSLSessionCallback(SSL* ssl, SSL_SESSION* session) { OpenSSLAdapter* stream = @@ -852,8 +941,15 @@ int OpenSSLAdapter::NewSSLSessionCallback(SSL* ssl, SSL_SESSION* session) { } SSL_CTX* OpenSSLAdapter::CreateContext(SSLMode mode, bool enable_cache) { +#ifdef WEBRTC_USE_CRYPTO_BUFFER_CALLBACK + // If X509 objects aren't used, we can use these methods to avoid + // linking the sizable crypto/x509 code. + SSL_CTX* ctx = SSL_CTX_new(mode == SSL_MODE_DTLS ? DTLS_with_buffers_method() + : TLS_with_buffers_method()); +#else SSL_CTX* ctx = SSL_CTX_new(mode == SSL_MODE_DTLS ? DTLS_method() : TLS_method()); +#endif if (ctx == nullptr) { unsigned long error = ERR_get_error(); // NOLINT: type used by OpenSSL. RTC_LOG(LS_WARNING) << "SSL_CTX creation failed: " << '"' @@ -877,8 +973,16 @@ SSL_CTX* OpenSSLAdapter::CreateContext(SSLMode mode, bool enable_cache) { SSL_CTX_set_info_callback(ctx, SSLInfoCallback); #endif +#ifdef OPENSSL_IS_BORINGSSL + SSL_CTX_set0_buffer_pool(ctx, openssl::GetBufferPool()); +#endif + +#ifdef WEBRTC_USE_CRYPTO_BUFFER_CALLBACK + SSL_CTX_set_custom_verify(ctx, SSL_VERIFY_PEER, SSLVerifyCallback); +#else SSL_CTX_set_verify(ctx, SSL_VERIFY_PEER, SSLVerifyCallback); SSL_CTX_set_verify_depth(ctx, 4); +#endif // Use defaults, but disable HMAC-SHA256 and HMAC-SHA384 ciphers // (note that SHA256 and SHA384 only select legacy CBC ciphers). // Additionally disable HMAC-SHA1 ciphers in ECDSA. These are the remaining diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h index 6f1f7dcca..76b003a7d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_adapter.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_OPENSSL_ADAPTER_H_ #define RTC_BASE_OPENSSL_ADAPTER_H_ +#include #include #include @@ -21,7 +22,11 @@ #include "rtc_base/async_socket.h" #include "rtc_base/buffer.h" #include "rtc_base/message_handler.h" +#ifdef OPENSSL_IS_BORINGSSL +#include "rtc_base/boringssl_identity.h" +#else #include "rtc_base/openssl_identity.h" +#endif #include "rtc_base/openssl_session_cache.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" @@ -109,7 +114,16 @@ class OpenSSLAdapter final : public SSLAdapter, // In debug builds, logs info about the state of the SSL connection. static void SSLInfoCallback(const SSL* ssl, int where, int ret); #endif + +#if defined(OPENSSL_IS_BORINGSSL) && \ + defined(WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS) + static enum ssl_verify_result_t SSLVerifyCallback(SSL* ssl, + uint8_t* out_alert); + enum ssl_verify_result_t SSLVerifyInternal(SSL* ssl, uint8_t* out_alert); +#else static int SSLVerifyCallback(int ok, X509_STORE_CTX* store); + int SSLVerifyInternal(int ok, SSL* ssl, X509_STORE_CTX* store); +#endif friend class OpenSSLStreamAdapter; // for custom_verify_callback_; // If the SSL_CTX was created with |enable_cache| set to true, this callback @@ -123,7 +137,12 @@ class OpenSSLAdapter final : public SSLAdapter, SSLCertificateVerifier* ssl_cert_verifier_ = nullptr; // The current connection state of the (d)TLS connection. SSLState state_; + +#ifdef OPENSSL_IS_BORINGSSL + std::unique_ptr identity_; +#else std::unique_ptr identity_; +#endif // Indicates whethere this is a client or a server. SSLRole role_; bool ssl_read_needs_write_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc index c94df40bf..3794d981c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.cc @@ -20,10 +20,8 @@ #endif // WEBRTC_WIN #include -#include #include #include -#include #include #include "absl/memory/memory.h" @@ -35,160 +33,6 @@ namespace rtc { -// We could have exposed a myriad of parameters for the crypto stuff, -// but keeping it simple seems best. - -// Generate a key pair. Caller is responsible for freeing the returned object. -static EVP_PKEY* MakeKey(const KeyParams& key_params) { - RTC_LOG(LS_INFO) << "Making key pair"; - EVP_PKEY* pkey = EVP_PKEY_new(); - if (key_params.type() == KT_RSA) { - int key_length = key_params.rsa_params().mod_size; - BIGNUM* exponent = BN_new(); - RSA* rsa = RSA_new(); - if (!pkey || !exponent || !rsa || - !BN_set_word(exponent, key_params.rsa_params().pub_exp) || - !RSA_generate_key_ex(rsa, key_length, exponent, nullptr) || - !EVP_PKEY_assign_RSA(pkey, rsa)) { - EVP_PKEY_free(pkey); - BN_free(exponent); - RSA_free(rsa); - RTC_LOG(LS_ERROR) << "Failed to make RSA key pair"; - return nullptr; - } - // ownership of rsa struct was assigned, don't free it. - BN_free(exponent); - } else if (key_params.type() == KT_ECDSA) { - if (key_params.ec_curve() == EC_NIST_P256) { - EC_KEY* ec_key = EC_KEY_new_by_curve_name(NID_X9_62_prime256v1); - - // Ensure curve name is included when EC key is serialized. - // Without this call, OpenSSL versions before 1.1.0 will create - // certificates that don't work for TLS. - // This is a no-op for BoringSSL and OpenSSL 1.1.0+ - EC_KEY_set_asn1_flag(ec_key, OPENSSL_EC_NAMED_CURVE); - - if (!pkey || !ec_key || !EC_KEY_generate_key(ec_key) || - !EVP_PKEY_assign_EC_KEY(pkey, ec_key)) { - EVP_PKEY_free(pkey); - EC_KEY_free(ec_key); - RTC_LOG(LS_ERROR) << "Failed to make EC key pair"; - return nullptr; - } - // ownership of ec_key struct was assigned, don't free it. - } else { - // Add generation of any other curves here. - EVP_PKEY_free(pkey); - RTC_LOG(LS_ERROR) << "ECDSA key requested for unknown curve"; - return nullptr; - } - } else { - EVP_PKEY_free(pkey); - RTC_LOG(LS_ERROR) << "Key type requested not understood"; - return nullptr; - } - - RTC_LOG(LS_INFO) << "Returning key pair"; - return pkey; -} - -OpenSSLKeyPair* OpenSSLKeyPair::Generate(const KeyParams& key_params) { - EVP_PKEY* pkey = MakeKey(key_params); - if (!pkey) { - openssl::LogSSLErrors("Generating key pair"); - return nullptr; - } - return new OpenSSLKeyPair(pkey); -} - -OpenSSLKeyPair* OpenSSLKeyPair::FromPrivateKeyPEMString( - const std::string& pem_string) { - BIO* bio = BIO_new_mem_buf(const_cast(pem_string.c_str()), -1); - if (!bio) { - RTC_LOG(LS_ERROR) << "Failed to create a new BIO buffer."; - return nullptr; - } - BIO_set_mem_eof_return(bio, 0); - EVP_PKEY* pkey = - PEM_read_bio_PrivateKey(bio, nullptr, nullptr, const_cast("\0")); - BIO_free(bio); // Frees the BIO, but not the pointed-to string. - if (!pkey) { - RTC_LOG(LS_ERROR) << "Failed to create the private key from PEM string."; - return nullptr; - } - if (EVP_PKEY_missing_parameters(pkey) != 0) { - RTC_LOG(LS_ERROR) - << "The resulting key pair is missing public key parameters."; - EVP_PKEY_free(pkey); - return nullptr; - } - return new OpenSSLKeyPair(pkey); -} - -OpenSSLKeyPair::~OpenSSLKeyPair() { - EVP_PKEY_free(pkey_); -} - -OpenSSLKeyPair* OpenSSLKeyPair::GetReference() { - AddReference(); - return new OpenSSLKeyPair(pkey_); -} - -void OpenSSLKeyPair::AddReference() { - EVP_PKEY_up_ref(pkey_); -} - -std::string OpenSSLKeyPair::PrivateKeyToPEMString() const { - BIO* temp_memory_bio = BIO_new(BIO_s_mem()); - if (!temp_memory_bio) { - RTC_LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio"; - RTC_NOTREACHED(); - return ""; - } - if (!PEM_write_bio_PrivateKey(temp_memory_bio, pkey_, nullptr, nullptr, 0, - nullptr, nullptr)) { - RTC_LOG_F(LS_ERROR) << "Failed to write private key"; - BIO_free(temp_memory_bio); - RTC_NOTREACHED(); - return ""; - } - BIO_write(temp_memory_bio, "\0", 1); - char* buffer; - BIO_get_mem_data(temp_memory_bio, &buffer); - std::string priv_key_str = buffer; - BIO_free(temp_memory_bio); - return priv_key_str; -} - -std::string OpenSSLKeyPair::PublicKeyToPEMString() const { - BIO* temp_memory_bio = BIO_new(BIO_s_mem()); - if (!temp_memory_bio) { - RTC_LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio"; - RTC_NOTREACHED(); - return ""; - } - if (!PEM_write_bio_PUBKEY(temp_memory_bio, pkey_)) { - RTC_LOG_F(LS_ERROR) << "Failed to write public key"; - BIO_free(temp_memory_bio); - RTC_NOTREACHED(); - return ""; - } - BIO_write(temp_memory_bio, "\0", 1); - char* buffer; - BIO_get_mem_data(temp_memory_bio, &buffer); - std::string pub_key_str = buffer; - BIO_free(temp_memory_bio); - return pub_key_str; -} - -bool OpenSSLKeyPair::operator==(const OpenSSLKeyPair& other) const { - return EVP_PKEY_cmp(this->pkey_, other.pkey_) == 1; -} - -bool OpenSSLKeyPair::operator!=(const OpenSSLKeyPair& other) const { - return !(*this == other); -} - OpenSSLIdentity::OpenSSLIdentity( std::unique_ptr key_pair, std::unique_ptr certificate) @@ -211,8 +55,7 @@ OpenSSLIdentity::~OpenSSLIdentity() = default; std::unique_ptr OpenSSLIdentity::CreateInternal( const SSLIdentityParams& params) { - std::unique_ptr key_pair( - OpenSSLKeyPair::Generate(params.key_params)); + auto key_pair = OpenSSLKeyPair::Generate(params.key_params); if (key_pair) { std::unique_ptr certificate( OpenSSLCertificate::Generate(key_pair.get(), params)); @@ -221,7 +64,7 @@ std::unique_ptr OpenSSLIdentity::CreateInternal( new OpenSSLIdentity(std::move(key_pair), std::move(certificate))); } } - RTC_LOG(LS_INFO) << "Identity generation failed"; + RTC_LOG(LS_ERROR) << "Identity generation failed"; return nullptr; } @@ -256,8 +99,7 @@ std::unique_ptr OpenSSLIdentity::CreateFromPEMStrings( return nullptr; } - std::unique_ptr key_pair( - OpenSSLKeyPair::FromPrivateKeyPEMString(private_key)); + auto key_pair = OpenSSLKeyPair::FromPrivateKeyPEMString(private_key); if (!key_pair) { RTC_LOG(LS_ERROR) << "Failed to create key pair from PEM string."; return nullptr; @@ -298,8 +140,7 @@ std::unique_ptr OpenSSLIdentity::CreateFromPEMChainStrings( return nullptr; } - std::unique_ptr key_pair( - OpenSSLKeyPair::FromPrivateKeyPEMString(private_key)); + auto key_pair = OpenSSLKeyPair::FromPrivateKeyPEMString(private_key); if (!key_pair) { RTC_LOG(LS_ERROR) << "Failed to create key pair from PEM string."; return nullptr; @@ -320,8 +161,8 @@ const SSLCertChain& OpenSSLIdentity::cert_chain() const { std::unique_ptr OpenSSLIdentity::CloneInternal() const { // We cannot use std::make_unique here because the referenced OpenSSLIdentity // constructor is private. - return absl::WrapUnique(new OpenSSLIdentity( - absl::WrapUnique(key_pair_->GetReference()), cert_chain_->Clone())); + return absl::WrapUnique( + new OpenSSLIdentity(key_pair_->Clone(), cert_chain_->Clone())); } bool OpenSSLIdentity::ConfigureIdentity(SSL_CTX* ctx) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h index a2ac87cf4..00d6c7492 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h @@ -17,45 +17,14 @@ #include #include -#include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/openssl_certificate.h" +#include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" namespace rtc { -// OpenSSLKeyPair encapsulates an OpenSSL EVP_PKEY* keypair object, -// which is reference counted inside the OpenSSL library. -class OpenSSLKeyPair final { - public: - explicit OpenSSLKeyPair(EVP_PKEY* pkey) : pkey_(pkey) { - RTC_DCHECK(pkey_ != nullptr); - } - - static OpenSSLKeyPair* Generate(const KeyParams& key_params); - // Constructs a key pair from the private key PEM string. This must not result - // in missing public key parameters. Returns null on error. - static OpenSSLKeyPair* FromPrivateKeyPEMString(const std::string& pem_string); - - virtual ~OpenSSLKeyPair(); - - virtual OpenSSLKeyPair* GetReference(); - - EVP_PKEY* pkey() const { return pkey_; } - std::string PrivateKeyToPEMString() const; - std::string PublicKeyToPEMString() const; - bool operator==(const OpenSSLKeyPair& other) const; - bool operator!=(const OpenSSLKeyPair& other) const; - - private: - void AddReference(); - - EVP_PKEY* pkey_; - - RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLKeyPair); -}; - // Holds a keypair and certificate together, and a method to generate // them consistently. class OpenSSLIdentity final : public SSLIdentity { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc new file mode 100644 index 000000000..911a751cb --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.cc @@ -0,0 +1,192 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/openssl_key_pair.h" + +#include +#include + +#if defined(WEBRTC_WIN) +// Must be included first before openssl headers. +#include "rtc_base/win32.h" // NOLINT +#endif // WEBRTC_WIN + +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/openssl.h" +#include "rtc_base/openssl_utility.h" + +namespace rtc { + +// We could have exposed a myriad of parameters for the crypto stuff, +// but keeping it simple seems best. + +// Generate a key pair. Caller is responsible for freeing the returned object. +static EVP_PKEY* MakeKey(const KeyParams& key_params) { + RTC_LOG(LS_INFO) << "Making key pair"; + EVP_PKEY* pkey = EVP_PKEY_new(); + if (key_params.type() == KT_RSA) { + int key_length = key_params.rsa_params().mod_size; + BIGNUM* exponent = BN_new(); + RSA* rsa = RSA_new(); + if (!pkey || !exponent || !rsa || + !BN_set_word(exponent, key_params.rsa_params().pub_exp) || + !RSA_generate_key_ex(rsa, key_length, exponent, nullptr) || + !EVP_PKEY_assign_RSA(pkey, rsa)) { + EVP_PKEY_free(pkey); + BN_free(exponent); + RSA_free(rsa); + RTC_LOG(LS_ERROR) << "Failed to make RSA key pair"; + return nullptr; + } + // ownership of rsa struct was assigned, don't free it. + BN_free(exponent); + } else if (key_params.type() == KT_ECDSA) { + if (key_params.ec_curve() == EC_NIST_P256) { + EC_KEY* ec_key = EC_KEY_new_by_curve_name(NID_X9_62_prime256v1); + if (!ec_key) { + EVP_PKEY_free(pkey); + RTC_LOG(LS_ERROR) << "Failed to allocate EC key"; + return nullptr; + } + + // Ensure curve name is included when EC key is serialized. + // Without this call, OpenSSL versions before 1.1.0 will create + // certificates that don't work for TLS. + // This is a no-op for BoringSSL and OpenSSL 1.1.0+ + EC_KEY_set_asn1_flag(ec_key, OPENSSL_EC_NAMED_CURVE); + + if (!pkey || !ec_key || !EC_KEY_generate_key(ec_key) || + !EVP_PKEY_assign_EC_KEY(pkey, ec_key)) { + EVP_PKEY_free(pkey); + EC_KEY_free(ec_key); + RTC_LOG(LS_ERROR) << "Failed to make EC key pair"; + return nullptr; + } + // ownership of ec_key struct was assigned, don't free it. + } else { + // Add generation of any other curves here. + EVP_PKEY_free(pkey); + RTC_LOG(LS_ERROR) << "ECDSA key requested for unknown curve"; + return nullptr; + } + } else { + EVP_PKEY_free(pkey); + RTC_LOG(LS_ERROR) << "Key type requested not understood"; + return nullptr; + } + + RTC_LOG(LS_INFO) << "Returning key pair"; + return pkey; +} + +std::unique_ptr OpenSSLKeyPair::Generate( + const KeyParams& key_params) { + EVP_PKEY* pkey = MakeKey(key_params); + if (!pkey) { + openssl::LogSSLErrors("Generating key pair"); + return nullptr; + } + return std::make_unique(pkey); +} + +std::unique_ptr OpenSSLKeyPair::FromPrivateKeyPEMString( + const std::string& pem_string) { + BIO* bio = + BIO_new_mem_buf(const_cast(pem_string.data()), pem_string.size()); + if (!bio) { + RTC_LOG(LS_ERROR) << "Failed to create a new BIO buffer."; + return nullptr; + } + BIO_set_mem_eof_return(bio, 0); + EVP_PKEY* pkey = PEM_read_bio_PrivateKey(bio, nullptr, nullptr, nullptr); + BIO_free(bio); // Frees the BIO, but not the pointed-to string. + if (!pkey) { + RTC_LOG(LS_ERROR) << "Failed to create the private key from PEM string."; + return nullptr; + } + if (EVP_PKEY_missing_parameters(pkey) != 0) { + RTC_LOG(LS_ERROR) + << "The resulting key pair is missing public key parameters."; + EVP_PKEY_free(pkey); + return nullptr; + } + return std::make_unique(pkey); +} + +OpenSSLKeyPair::~OpenSSLKeyPair() { + EVP_PKEY_free(pkey_); +} + +std::unique_ptr OpenSSLKeyPair::Clone() { + AddReference(); + return std::make_unique(pkey_); +} + +void OpenSSLKeyPair::AddReference() { + EVP_PKEY_up_ref(pkey_); +} + +std::string OpenSSLKeyPair::PrivateKeyToPEMString() const { + BIO* temp_memory_bio = BIO_new(BIO_s_mem()); + if (!temp_memory_bio) { + RTC_LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio"; + RTC_NOTREACHED(); + return ""; + } + if (!PEM_write_bio_PrivateKey(temp_memory_bio, pkey_, nullptr, nullptr, 0, + nullptr, nullptr)) { + RTC_LOG_F(LS_ERROR) << "Failed to write private key"; + BIO_free(temp_memory_bio); + RTC_NOTREACHED(); + return ""; + } + char* buffer; + size_t len = BIO_get_mem_data(temp_memory_bio, &buffer); + std::string priv_key_str(buffer, len); + BIO_free(temp_memory_bio); + return priv_key_str; +} + +std::string OpenSSLKeyPair::PublicKeyToPEMString() const { + BIO* temp_memory_bio = BIO_new(BIO_s_mem()); + if (!temp_memory_bio) { + RTC_LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio"; + RTC_NOTREACHED(); + return ""; + } + if (!PEM_write_bio_PUBKEY(temp_memory_bio, pkey_)) { + RTC_LOG_F(LS_ERROR) << "Failed to write public key"; + BIO_free(temp_memory_bio); + RTC_NOTREACHED(); + return ""; + } + BIO_write(temp_memory_bio, "\0", 1); + char* buffer; + BIO_get_mem_data(temp_memory_bio, &buffer); + std::string pub_key_str = buffer; + BIO_free(temp_memory_bio); + return pub_key_str; +} + +bool OpenSSLKeyPair::operator==(const OpenSSLKeyPair& other) const { + return EVP_PKEY_cmp(this->pkey_, other.pkey_) == 1; +} + +bool OpenSSLKeyPair::operator!=(const OpenSSLKeyPair& other) const { + return !(*this == other); +} + +} // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h new file mode 100644 index 000000000..a84c43b6b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h @@ -0,0 +1,60 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_OPENSSL_KEY_PAIR_H_ +#define RTC_BASE_OPENSSL_KEY_PAIR_H_ + +#include + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/ssl_identity.h" + +namespace rtc { + +// OpenSSLKeyPair encapsulates an OpenSSL EVP_PKEY* keypair object, +// which is reference counted inside the OpenSSL library. +class OpenSSLKeyPair final { + public: + // Takes ownership of the key. + explicit OpenSSLKeyPair(EVP_PKEY* pkey) : pkey_(pkey) { + RTC_DCHECK(pkey_ != nullptr); + } + + static std::unique_ptr Generate(const KeyParams& key_params); + // Constructs a key pair from the private key PEM string. This must not result + // in missing public key parameters. Returns null on error. + static std::unique_ptr FromPrivateKeyPEMString( + const std::string& pem_string); + + ~OpenSSLKeyPair(); + + std::unique_ptr Clone(); + + EVP_PKEY* pkey() const { return pkey_; } + std::string PrivateKeyToPEMString() const; + std::string PublicKeyToPEMString() const; + bool operator==(const OpenSSLKeyPair& other) const; + bool operator!=(const OpenSSLKeyPair& other) const; + + private: + void AddReference(); + + EVP_PKEY* pkey_; + + RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLKeyPair); +}; + +} // namespace rtc + +#endif // RTC_BASE_OPENSSL_KEY_PAIR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc index f59b4edf1..ab2289b1f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.cc @@ -32,7 +32,12 @@ #include "rtc_base/openssl.h" #include "rtc_base/openssl_adapter.h" #include "rtc_base/openssl_digest.h" +#ifdef OPENSSL_IS_BORINGSSL +#include "rtc_base/boringssl_identity.h" +#else #include "rtc_base/openssl_identity.h" +#endif +#include "rtc_base/openssl_utility.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/stream.h" #include "rtc_base/task_utils/to_queued_task.h" @@ -283,7 +288,7 @@ bool ShouldAllowLegacyTLSProtocols() { OpenSSLStreamAdapter::OpenSSLStreamAdapter( std::unique_ptr stream) - : SSLStreamAdapter(std::move(stream)), + : stream_(std::move(stream)), owner_(rtc::Thread::Current()), state_(SSL_NONE), role_(SSL_CLIENT), @@ -295,7 +300,9 @@ OpenSSLStreamAdapter::OpenSSLStreamAdapter( ssl_max_version_(SSL_PROTOCOL_TLS_12), // Default is to support legacy TLS protocols. // This will be changed to default non-support in M82 or M83. - support_legacy_tls_protocols_flag_(ShouldAllowLegacyTLSProtocols()) {} + support_legacy_tls_protocols_flag_(ShouldAllowLegacyTLSProtocols()) { + stream_->SignalEvent.connect(this, &OpenSSLStreamAdapter::OnEvent); +} OpenSSLStreamAdapter::~OpenSSLStreamAdapter() { timeout_task_.Stop(); @@ -304,10 +311,14 @@ OpenSSLStreamAdapter::~OpenSSLStreamAdapter() { void OpenSSLStreamAdapter::SetIdentity(std::unique_ptr identity) { RTC_DCHECK(!identity_); +#ifdef OPENSSL_IS_BORINGSSL + identity_.reset(static_cast(identity.release())); +#else identity_.reset(static_cast(identity.release())); +#endif } -OpenSSLIdentity* OpenSSLStreamAdapter::GetIdentityForTesting() const { +SSLIdentity* OpenSSLStreamAdapter::GetIdentityForTesting() const { return identity_.get(); } @@ -510,7 +521,7 @@ int OpenSSLStreamAdapter::StartSSL() { return -1; } - if (StreamAdapterInterface::GetState() != SS_OPEN) { + if (stream_->GetState() != SS_OPEN) { state_ = SSL_WAIT; return 0; } @@ -552,7 +563,7 @@ StreamResult OpenSSLStreamAdapter::Write(const void* data, switch (state_) { case SSL_NONE: // pass-through in clear text - return StreamAdapterInterface::Write(data, data_len, written, error); + return stream_->Write(data, data_len, written, error); case SSL_WAIT: case SSL_CONNECTING: @@ -620,7 +631,7 @@ StreamResult OpenSSLStreamAdapter::Read(void* data, switch (state_) { case SSL_NONE: // pass-through in clear text - return StreamAdapterInterface::Read(data, data_len, read, error); + return stream_->Read(data, data_len, read, error); case SSL_WAIT: case SSL_CONNECTING: return SR_BLOCK; @@ -724,7 +735,7 @@ void OpenSSLStreamAdapter::Close() { // When we're closed at SSL layer, also close the stream level which // performs necessary clean up. Otherwise, a new incoming packet after // this could overflow the stream buffer. - StreamAdapterInterface::Close(); + stream_->Close(); } StreamState OpenSSLStreamAdapter::GetState() const { @@ -748,7 +759,7 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, int err) { int events_to_signal = 0; int signal_error = 0; - RTC_DCHECK(stream == this->stream()); + RTC_DCHECK(stream == stream_.get()); if ((events & SE_OPEN)) { RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN"; @@ -800,7 +811,9 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, } if (events_to_signal) { - StreamAdapterInterface::OnEvent(stream, events_to_signal, signal_error); + // Note that the adapter presents itself as the origin of the stream events, + // since users of the adapter may not recognize the adapted object. + SignalEvent(this, events_to_signal, signal_error); } } @@ -845,7 +858,7 @@ int OpenSSLStreamAdapter::BeginSSL() { return -1; } - bio = BIO_new_stream(static_cast(stream())); + bio = BIO_new_stream(stream_.get()); if (!bio) { return -1; } @@ -903,8 +916,7 @@ int OpenSSLStreamAdapter::ContinueSSL() { // The caller of ContinueSSL may be the same object listening for these // events and may not be prepared for reentrancy. // PostEvent(SE_OPEN | SE_READ | SE_WRITE, 0); - StreamAdapterInterface::OnEvent(stream(), SE_OPEN | SE_READ | SE_WRITE, - 0); + SignalEvent(this, SE_OPEN | SE_READ | SE_WRITE, 0); } break; @@ -947,7 +959,7 @@ void OpenSSLStreamAdapter::Error(const char* context, ssl_error_code_ = err; Cleanup(alert); if (signal) { - StreamAdapterInterface::OnEvent(stream(), SE_CLOSE, err); + SignalEvent(this, SE_CLOSE, err); } } @@ -994,8 +1006,16 @@ void OpenSSLStreamAdapter::Cleanup(uint8_t alert) { } SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { +#ifdef OPENSSL_IS_BORINGSSL + // If X509 objects aren't used, we can use these methods to avoid + // linking the sizable crypto/x509 code, using CRYPTO_BUFFER instead. + SSL_CTX* ctx = + SSL_CTX_new(ssl_mode_ == SSL_MODE_DTLS ? DTLS_with_buffers_method() + : TLS_with_buffers_method()); +#else SSL_CTX* ctx = SSL_CTX_new(ssl_mode_ == SSL_MODE_DTLS ? DTLS_method() : TLS_method()); +#endif if (ctx == nullptr) { return nullptr; } @@ -1033,6 +1053,7 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { if (g_use_time_callback_for_testing) { SSL_CTX_set_current_time_cb(ctx, &TimeCallbackForTesting); } + SSL_CTX_set0_buffer_pool(ctx, openssl::GetBufferPool()); #endif if (identity_ && !identity_->ConfigureIdentity(ctx)) { @@ -1053,11 +1074,16 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { } // Configure a custom certificate verification callback to check the peer - // certificate digest. Note the second argument to SSL_CTX_set_verify is to - // override individual errors in the default verification logic, which is not - // what we want here. + // certificate digest. +#ifdef OPENSSL_IS_BORINGSSL + // Use CRYPTO_BUFFER version of the callback if building with BoringSSL. + SSL_CTX_set_custom_verify(ctx, mode, SSLVerifyCallback); +#else + // Note the second argument to SSL_CTX_set_verify is to override individual + // errors in the default verification logic, which is not what we want here. SSL_CTX_set_verify(ctx, mode, nullptr); SSL_CTX_set_cert_verify_callback(ctx, SSLVerifyCallback, nullptr); +#endif // Select list of available ciphers. Note that !SHA256 and !SHA384 only // remove HMAC-SHA256 and HMAC-SHA384 cipher suites, not GCM cipher suites @@ -1082,14 +1108,12 @@ bool OpenSSLStreamAdapter::VerifyPeerCertificate() { RTC_LOG(LS_WARNING) << "Missing digest or peer certificate."; return false; } - const OpenSSLCertificate* leaf_cert = - static_cast(&peer_cert_chain_->Get(0)); unsigned char digest[EVP_MAX_MD_SIZE]; size_t digest_length; - if (!OpenSSLCertificate::ComputeDigest( - leaf_cert->x509(), peer_certificate_digest_algorithm_, digest, - sizeof(digest), &digest_length)) { + if (!peer_cert_chain_->Get(0).ComputeDigest( + peer_certificate_digest_algorithm_, digest, sizeof(digest), + &digest_length)) { RTC_LOG(LS_WARNING) << "Failed to compute peer cert digest."; return false; } @@ -1113,6 +1137,36 @@ std::unique_ptr OpenSSLStreamAdapter::GetPeerSSLCertChain() return peer_cert_chain_ ? peer_cert_chain_->Clone() : nullptr; } +#ifdef OPENSSL_IS_BORINGSSL +enum ssl_verify_result_t OpenSSLStreamAdapter::SSLVerifyCallback( + SSL* ssl, + uint8_t* out_alert) { + // Get our OpenSSLStreamAdapter from the context. + OpenSSLStreamAdapter* stream = + reinterpret_cast(SSL_get_app_data(ssl)); + const STACK_OF(CRYPTO_BUFFER)* chain = SSL_get0_peer_certificates(ssl); + // Creates certificate chain. + std::vector> cert_chain; + for (CRYPTO_BUFFER* cert : chain) { + cert_chain.emplace_back(new BoringSSLCertificate(bssl::UpRef(cert))); + } + stream->peer_cert_chain_.reset(new SSLCertChain(std::move(cert_chain))); + + // If the peer certificate digest isn't known yet, we'll wait to verify + // until it's known, and for now just return a success status. + if (stream->peer_certificate_digest_algorithm_.empty()) { + RTC_LOG(LS_INFO) << "Waiting to verify certificate until digest is known."; + // TODO(deadbeef): Use ssl_verify_retry? + return ssl_verify_ok; + } + + if (!stream->VerifyPeerCertificate()) { + return ssl_verify_invalid; + } + + return ssl_verify_ok; +} +#else // OPENSSL_IS_BORINGSSL int OpenSSLStreamAdapter::SSLVerifyCallback(X509_STORE_CTX* store, void* arg) { // Get our SSL structure and OpenSSLStreamAdapter from the store. SSL* ssl = reinterpret_cast( @@ -1120,20 +1174,10 @@ int OpenSSLStreamAdapter::SSLVerifyCallback(X509_STORE_CTX* store, void* arg) { OpenSSLStreamAdapter* stream = reinterpret_cast(SSL_get_app_data(ssl)); -#if defined(OPENSSL_IS_BORINGSSL) - STACK_OF(X509)* chain = SSL_get_peer_full_cert_chain(ssl); - // Creates certificate chain. - std::vector> cert_chain; - for (X509* cert : chain) { - cert_chain.emplace_back(new OpenSSLCertificate(cert)); - } - stream->peer_cert_chain_.reset(new SSLCertChain(std::move(cert_chain))); -#else // Record the peer's certificate. X509* cert = X509_STORE_CTX_get0_cert(store); stream->peer_cert_chain_.reset( new SSLCertChain(std::make_unique(cert))); -#endif // If the peer certificate digest isn't known yet, we'll wait to verify // until it's known, and for now just return a success status. @@ -1149,6 +1193,7 @@ int OpenSSLStreamAdapter::SSLVerifyCallback(X509_STORE_CTX* store, void* arg) { return 1; } +#endif // !OPENSSL_IS_BORINGSSL bool OpenSSLStreamAdapter::IsBoringSsl() { #ifdef OPENSSL_IS_BORINGSSL diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h index fbfccd684..58e15e3e6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_stream_adapter.h @@ -21,7 +21,11 @@ #include "absl/types/optional.h" #include "rtc_base/buffer.h" +#ifdef OPENSSL_IS_BORINGSSL +#include "rtc_base/boringssl_identity.h" +#else #include "rtc_base/openssl_identity.h" +#endif #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" @@ -71,7 +75,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { ~OpenSSLStreamAdapter() override; void SetIdentity(std::unique_ptr identity) override; - OpenSSLIdentity* GetIdentityForTesting() const override; + SSLIdentity* GetIdentityForTesting() const override; // Default argument is for compatibility void SetServerRole(SSLRole role = SSL_SERVER) override; @@ -132,9 +136,6 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { // using a fake clock. static void EnableTimeCallbackForTesting(); - protected: - void OnEvent(StreamInterface* stream, int events, int err) override; - private: enum SSLState { // Before calling one of the StartSSL methods, data flows @@ -147,6 +148,8 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { SSL_CLOSED // Clean close }; + void OnEvent(StreamInterface* stream, int events, int err); + void PostEvent(int events, int err); void SetTimeout(int delay_ms); @@ -179,9 +182,16 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { SSL_CTX* SetupSSLContext(); // Verify the peer certificate matches the signaled digest. bool VerifyPeerCertificate(); + +#ifdef OPENSSL_IS_BORINGSSL + // SSL certificate verification callback. See SSL_CTX_set_custom_verify. + static enum ssl_verify_result_t SSLVerifyCallback(SSL* ssl, + uint8_t* out_alert); +#else // SSL certificate verification callback. See // SSL_CTX_set_cert_verify_callback. static int SSLVerifyCallback(X509_STORE_CTX* store, void* arg); +#endif bool WaitingToVerifyPeerCertificate() const { return GetClientAuthEnabled() && !peer_certificate_verified_; @@ -192,6 +202,8 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { !peer_certificate_digest_value_.empty(); } + const std::unique_ptr stream_; + rtc::Thread* const owner_; webrtc::ScopedTaskSafety task_safety_; webrtc::RepeatingTaskHandle timeout_task_; @@ -208,7 +220,11 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter { SSL_CTX* ssl_ctx_; // Our key and certificate. +#ifdef OPENSSL_IS_BORINGSSL + std::unique_ptr identity_; +#else std::unique_ptr identity_; +#endif // The certificate chain that the peer presented. Initially null, until the // connection is established. std::unique_ptr peer_cert_chain_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc index 1984eb070..b5d649ca5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.cc @@ -14,6 +14,9 @@ #include "rtc_base/win32.h" // NOLINT #endif // WEBRTC_WIN +#ifdef OPENSSL_IS_BORINGSSL +#include +#endif #include #include #include @@ -23,7 +26,7 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/openssl.h" -#include "rtc_base/openssl_certificate.h" +#include "rtc_base/ssl_identity.h" #ifndef WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS #include "rtc_base/ssl_roots.h" #endif // WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS @@ -33,6 +36,10 @@ namespace openssl { // Holds various helper methods. namespace { + +// TODO(crbug.com/webrtc/11710): When OS certificate verification is available, +// and we don't need VerifyPeerCertMatchesHost, don't compile this in order to +// avoid a dependency on OpenSSL X509 objects (see crbug.com/webrtc/11410). void LogCertificates(SSL* ssl, X509* certificate) { // Logging certificates is extremely verbose. So it is disabled by default. #ifdef LOG_CERTIFICATES @@ -65,6 +72,118 @@ void LogCertificates(SSL* ssl, X509* certificate) { } } // namespace +#ifdef OPENSSL_IS_BORINGSSL +bool ParseCertificate(CRYPTO_BUFFER* cert_buffer, + CBS* signature_algorithm_oid, + int64_t* expiration_time) { + CBS cbs; + CRYPTO_BUFFER_init_CBS(cert_buffer, &cbs); + + // Certificate ::= SEQUENCE { + CBS certificate; + if (!CBS_get_asn1(&cbs, &certificate, CBS_ASN1_SEQUENCE)) { + return false; + } + // tbsCertificate TBSCertificate, + CBS tbs_certificate; + if (!CBS_get_asn1(&certificate, &tbs_certificate, CBS_ASN1_SEQUENCE)) { + return false; + } + // signatureAlgorithm AlgorithmIdentifier, + CBS signature_algorithm; + if (!CBS_get_asn1(&certificate, &signature_algorithm, CBS_ASN1_SEQUENCE)) { + return false; + } + if (!CBS_get_asn1(&signature_algorithm, signature_algorithm_oid, + CBS_ASN1_OBJECT)) { + return false; + } + // signatureValue BIT STRING } + if (!CBS_get_asn1(&certificate, nullptr, CBS_ASN1_BITSTRING)) { + return false; + } + if (CBS_len(&certificate)) { + return false; + } + + // Now parse the inner TBSCertificate. + // version [0] EXPLICIT Version DEFAULT v1, + if (!CBS_get_optional_asn1( + &tbs_certificate, nullptr, nullptr, + CBS_ASN1_CONSTRUCTED | CBS_ASN1_CONTEXT_SPECIFIC)) { + return false; + } + // serialNumber CertificateSerialNumber, + if (!CBS_get_asn1(&tbs_certificate, nullptr, CBS_ASN1_INTEGER)) { + return false; + } + // signature AlgorithmIdentifier + if (!CBS_get_asn1(&tbs_certificate, nullptr, CBS_ASN1_SEQUENCE)) { + return false; + } + // issuer Name, + if (!CBS_get_asn1(&tbs_certificate, nullptr, CBS_ASN1_SEQUENCE)) { + return false; + } + // validity Validity, + CBS validity; + if (!CBS_get_asn1(&tbs_certificate, &validity, CBS_ASN1_SEQUENCE)) { + return false; + } + // Skip over notBefore. + if (!CBS_get_any_asn1_element(&validity, nullptr, nullptr, nullptr)) { + return false; + } + // Parse notAfter. + CBS not_after; + unsigned not_after_tag; + if (!CBS_get_any_asn1(&validity, ¬_after, ¬_after_tag)) { + return false; + } + bool long_format; + if (not_after_tag == CBS_ASN1_UTCTIME) { + long_format = false; + } else if (not_after_tag == CBS_ASN1_GENERALIZEDTIME) { + long_format = true; + } else { + return false; + } + if (expiration_time) { + *expiration_time = + ASN1TimeToSec(CBS_data(¬_after), CBS_len(¬_after), long_format); + } + // subject Name, + if (!CBS_get_asn1_element(&tbs_certificate, nullptr, CBS_ASN1_SEQUENCE)) { + return false; + } + // subjectPublicKeyInfo SubjectPublicKeyInfo, + if (!CBS_get_asn1(&tbs_certificate, nullptr, CBS_ASN1_SEQUENCE)) { + return false; + } + // issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL + if (!CBS_get_optional_asn1(&tbs_certificate, nullptr, nullptr, + 0x01 | CBS_ASN1_CONTEXT_SPECIFIC)) { + return false; + } + // subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL + if (!CBS_get_optional_asn1(&tbs_certificate, nullptr, nullptr, + 0x02 | CBS_ASN1_CONTEXT_SPECIFIC)) { + return false; + } + // extensions [3] EXPLICIT Extensions OPTIONAL + if (!CBS_get_optional_asn1( + &tbs_certificate, nullptr, nullptr, + 0x03 | CBS_ASN1_CONSTRUCTED | CBS_ASN1_CONTEXT_SPECIFIC)) { + return false; + } + if (CBS_len(&tbs_certificate)) { + return false; + } + + return true; +} +#endif // OPENSSL_IS_BORINGSSL + bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host) { if (host.empty()) { RTC_DLOG(LS_ERROR) << "Hostname is empty. Cannot verify peer certificate."; @@ -76,9 +195,28 @@ bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host) { return false; } +#ifdef OPENSSL_IS_BORINGSSL + // We can't grab a X509 object directly, as the SSL context may have been + // initialized with TLS_with_buffers_method. + const STACK_OF(CRYPTO_BUFFER)* chain = SSL_get0_peer_certificates(ssl); + if (chain == nullptr || sk_CRYPTO_BUFFER_num(chain) == 0) { + RTC_LOG(LS_ERROR) + << "SSL_get0_peer_certificates failed. This should never happen."; + return false; + } + CRYPTO_BUFFER* leaf = sk_CRYPTO_BUFFER_value(chain, 0); + bssl::UniquePtr x509(X509_parse_from_buffer(leaf)); + if (!x509) { + RTC_LOG(LS_ERROR) << "Failed to parse certificate to X509 object."; + return false; + } + LogCertificates(ssl, x509.get()); + return X509_check_host(x509.get(), host.c_str(), host.size(), 0, nullptr) == + 1; +#else // OPENSSL_IS_BORINGSSL X509* certificate = SSL_get_peer_certificate(ssl); if (certificate == nullptr) { - RTC_DLOG(LS_ERROR) + RTC_LOG(LS_ERROR) << "SSL_get_peer_certificate failed. This should never happen."; return false; } @@ -89,6 +227,7 @@ bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host) { X509_check_host(certificate, host.c_str(), host.size(), 0, nullptr) == 1; X509_free(certificate); return is_valid_cert_name; +#endif // !defined(OPENSSL_IS_BORINGSSL) } void LogSSLErrors(const std::string& prefix) { @@ -123,5 +262,12 @@ bool LoadBuiltinSSLRootCertificates(SSL_CTX* ctx) { } #endif // WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS +#ifdef OPENSSL_IS_BORINGSSL +CRYPTO_BUFFER_POOL* GetBufferPool() { + static CRYPTO_BUFFER_POOL* instance = CRYPTO_BUFFER_POOL_new(); + return instance; +} +#endif + } // namespace openssl } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h index 022294d4b..ee29ccd60 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_utility.h @@ -20,8 +20,21 @@ namespace rtc { // to OpenSSL that are commonly used and don't require global state should be // placed here. namespace openssl { + +#ifdef OPENSSL_IS_BORINGSSL +// Does minimal parsing of a certificate (only verifying the presence of major +// fields), primarily for the purpose of extracting the relevant out +// parameters. Any that the caller is uninterested in can be null. +bool ParseCertificate(CRYPTO_BUFFER* cert_buffer, + CBS* signature_algorithm_oid, + int64_t* expiration_time); +#endif + // Verifies that the hostname provided matches that in the peer certificate // attached to this SSL state. +// TODO(crbug.com/webrtc/11710): When OS certificate verification is available, +// skip compiling this as it adds a dependency on OpenSSL X509 objects, which we +// are trying to avoid in favor of CRYPTO_BUFFERs (see crbug.com/webrtc/11410). bool VerifyPeerCertMatchesHost(SSL* ssl, const std::string& host); // Logs all the errors in the OpenSSL errror queue from the current thread. A @@ -35,6 +48,10 @@ void LogSSLErrors(const std::string& prefix); bool LoadBuiltinSSLRootCertificates(SSL_CTX* ssl_ctx); #endif // WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS +#ifdef OPENSSL_IS_BORINGSSL +CRYPTO_BUFFER_POOL* GetBufferPool(); +#endif + } // namespace openssl } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h index 44a3d9acb..3dc599511 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h @@ -20,11 +20,12 @@ #include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counted_object.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace rtc { @@ -183,7 +184,7 @@ class OperationsChain final : public RefCountedObject { std::function CreateOperationsChainCallback(); void OnOperationComplete(); - webrtc::SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; // FIFO-list of operations that are chained. An operation that is executing // remains on this list until it has completed by invoking the callback passed // to it. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc index cf6e79279..790454804 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.cc @@ -48,6 +48,7 @@ #include "rtc_base/logging.h" #include "rtc_base/network_monitor.h" #include "rtc_base/null_socket_server.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #if defined(WEBRTC_LINUX) @@ -119,14 +120,6 @@ class ScopedSetTrue { namespace rtc { -std::unique_ptr SocketServer::CreateDefault() { -#if defined(__native_client__) - return std::unique_ptr(new rtc::NullSocketServer); -#else - return std::unique_ptr(new rtc::PhysicalSocketServer); -#endif -} - PhysicalSocket::PhysicalSocket(PhysicalSocketServer* ss, SOCKET s) : ss_(ss), s_(s), @@ -281,12 +274,12 @@ int PhysicalSocket::DoConnect(const SocketAddress& connect_addr) { } int PhysicalSocket::GetError() const { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); return error_; } void PhysicalSocket::SetError(int error) { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); error_ = error; } @@ -335,10 +328,17 @@ int PhysicalSocket::SetOption(Option opt, int value) { #if defined(WEBRTC_POSIX) if (sopt == IPV6_TCLASS) { // Set the IPv4 option in all cases to support dual-stack sockets. + // Don't bother checking the return code, as this is expected to fail if + // it's not actually dual-stack. ::setsockopt(s_, IPPROTO_IP, IP_TOS, (SockOptArg)&value, sizeof(value)); } #endif - return ::setsockopt(s_, slevel, sopt, (SockOptArg)&value, sizeof(value)); + int result = + ::setsockopt(s_, slevel, sopt, (SockOptArg)&value, sizeof(value)); + if (result != 0) { + UpdateLastError(); + } + return result; } int PhysicalSocket::Send(const void* pv, size_t cb) { @@ -760,21 +760,14 @@ uint32_t SocketDispatcher::GetRequestedEvents() { return enabled_events(); } -void SocketDispatcher::OnPreEvent(uint32_t ff) { - if ((ff & DE_CONNECT) != 0) - state_ = CS_CONNECTED; - -#if defined(WEBRTC_WIN) -// We set CS_CLOSED from CheckSignalClose. -#elif defined(WEBRTC_POSIX) - if ((ff & DE_CLOSE) != 0) - state_ = CS_CLOSED; -#endif -} - #if defined(WEBRTC_WIN) void SocketDispatcher::OnEvent(uint32_t ff, int err) { + if ((ff & DE_CONNECT) != 0) + state_ = CS_CONNECTED; + + // We set CS_CLOSED from CheckSignalClose. + int cache_id = id_; // Make sure we deliver connect/accept first. Otherwise, consumers may see // something like a READ followed by a CONNECT, which would be odd. @@ -809,6 +802,12 @@ void SocketDispatcher::OnEvent(uint32_t ff, int err) { #elif defined(WEBRTC_POSIX) void SocketDispatcher::OnEvent(uint32_t ff, int err) { + if ((ff & DE_CONNECT) != 0) + state_ = CS_CONNECTED; + + if ((ff & DE_CLOSE) != 0) + state_ = CS_CLOSED; + #if defined(WEBRTC_USE_EPOLL) // Remember currently enabled events so we can combine multiple changes // into one update call later. @@ -920,22 +919,32 @@ int SocketDispatcher::Close() { } #if defined(WEBRTC_POSIX) -class EventDispatcher : public Dispatcher { +// Sets the value of a boolean value to false when signaled. +class Signaler : public Dispatcher { public: - EventDispatcher(PhysicalSocketServer* ss) : ss_(ss), fSignaled_(false) { - if (pipe(afd_) < 0) - RTC_LOG(LERROR) << "pipe failed"; + Signaler(PhysicalSocketServer* ss, bool& flag_to_clear) + : ss_(ss), + afd_([] { + std::array afd = {-1, -1}; + + if (pipe(afd.data()) < 0) { + RTC_LOG(LERROR) << "pipe failed"; + } + return afd; + }()), + fSignaled_(false), + flag_to_clear_(flag_to_clear) { ss_->Add(this); } - ~EventDispatcher() override { + ~Signaler() override { ss_->Remove(this); close(afd_[0]); close(afd_[1]); } virtual void Signal() { - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); if (!fSignaled_) { const uint8_t b[1] = {0}; const ssize_t res = write(afd_[1], b, sizeof(b)); @@ -946,30 +955,30 @@ class EventDispatcher : public Dispatcher { uint32_t GetRequestedEvents() override { return DE_READ; } - void OnPreEvent(uint32_t ff) override { + void OnEvent(uint32_t ff, int err) override { // It is not possible to perfectly emulate an auto-resetting event with // pipes. This simulates it by resetting before the event is handled. - CritScope cs(&crit_); + webrtc::MutexLock lock(&mutex_); if (fSignaled_) { uint8_t b[4]; // Allow for reading more than 1 byte, but expect 1. const ssize_t res = read(afd_[0], b, sizeof(b)); RTC_DCHECK_EQ(1, res); fSignaled_ = false; } + flag_to_clear_ = false; } - void OnEvent(uint32_t ff, int err) override { RTC_NOTREACHED(); } - int GetDescriptor() override { return afd_[0]; } bool IsDescriptorClosed() override { return false; } private: - PhysicalSocketServer* ss_; - int afd_[2]; - bool fSignaled_; - RecursiveCriticalSection crit_; + PhysicalSocketServer* const ss_; + const std::array afd_; + bool fSignaled_ RTC_GUARDED_BY(mutex_); + webrtc::Mutex mutex_; + bool& flag_to_clear_; }; #endif // WEBRTC_POSIX @@ -988,16 +997,18 @@ static uint32_t FlagsToEvents(uint32_t events) { return ffFD; } -class EventDispatcher : public Dispatcher { +// Sets the value of a boolean value to false when signaled. +class Signaler : public Dispatcher { public: - EventDispatcher(PhysicalSocketServer* ss) : ss_(ss) { + Signaler(PhysicalSocketServer* ss, bool& flag_to_clear) + : ss_(ss), flag_to_clear_(flag_to_clear) { hev_ = WSACreateEvent(); if (hev_) { ss_->Add(this); } } - ~EventDispatcher() override { + ~Signaler() override { if (hev_ != nullptr) { ss_->Remove(this); WSACloseEvent(hev_); @@ -1012,9 +1023,10 @@ class EventDispatcher : public Dispatcher { uint32_t GetRequestedEvents() override { return 0; } - void OnPreEvent(uint32_t ff) override { WSAResetEvent(hev_); } - - void OnEvent(uint32_t ff, int err) override {} + void OnEvent(uint32_t ff, int err) override { + WSAResetEvent(hev_); + flag_to_clear_ = false; + } WSAEVENT GetWSAEvent() override { return hev_; } @@ -1025,24 +1037,10 @@ class EventDispatcher : public Dispatcher { private: PhysicalSocketServer* ss_; WSAEVENT hev_; + bool& flag_to_clear_; }; #endif // WEBRTC_WIN -// Sets the value of a boolean value to false when signaled. -class Signaler : public EventDispatcher { - public: - Signaler(PhysicalSocketServer* ss, bool* pf) : EventDispatcher(ss), pf_(pf) {} - ~Signaler() override {} - - void OnEvent(uint32_t ff, int err) override { - if (pf_) - *pf_ = false; - } - - private: - bool* pf_; -}; - PhysicalSocketServer::PhysicalSocketServer() : #if defined(WEBRTC_USE_EPOLL) @@ -1062,7 +1060,8 @@ PhysicalSocketServer::PhysicalSocketServer() // Note that -1 == INVALID_SOCKET, the alias used by later checks. } #endif - signal_wakeup_ = new Signaler(this, &fWait_); + // The `fWait_` flag to be cleared by the Signaler. + signal_wakeup_ = new Signaler(this, fWait_); } PhysicalSocketServer::~PhysicalSocketServer() { @@ -1230,7 +1229,6 @@ static void ProcessEvents(Dispatcher* dispatcher, // Tell the descriptor about the event. if (ff != 0) { - dispatcher->OnPreEvent(ff); dispatcher->OnEvent(ff, errcode); } } @@ -1634,7 +1632,6 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { continue; } Dispatcher* disp = dispatcher_by_key_.at(key); - disp->OnPreEvent(0); disp->OnEvent(0, 0); } else if (process_io) { // Iterate only on the dispatchers whose sockets were passed into @@ -1705,7 +1702,6 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) { errcode = wsaEvents.iErrorCode[FD_CLOSE_BIT]; } if (ff != 0) { - disp->OnPreEvent(ff); disp->OnEvent(ff, errcode); } } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h index cc21a67b1..4b7957eb2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/physical_socket_server.h @@ -21,9 +21,11 @@ #include #include +#include "rtc_base/async_resolver.h" +#include "rtc_base/async_resolver_interface.h" #include "rtc_base/deprecated/recursive_critical_section.h" -#include "rtc_base/net_helpers.h" #include "rtc_base/socket_server.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -48,7 +50,6 @@ class Dispatcher { public: virtual ~Dispatcher() {} virtual uint32_t GetRequestedEvents() = 0; - virtual void OnPreEvent(uint32_t ff) = 0; virtual void OnEvent(uint32_t ff, int err) = 0; #if defined(WEBRTC_WIN) virtual WSAEVENT GetWSAEvent() = 0; @@ -202,8 +203,8 @@ class PhysicalSocket : public AsyncSocket, public sigslot::has_slots<> { SOCKET s_; bool udp_; int family_ = 0; - RecursiveCriticalSection crit_; - int error_ RTC_GUARDED_BY(crit_); + mutable webrtc::Mutex mutex_; + int error_ RTC_GUARDED_BY(mutex_); ConnState state_; AsyncResolver* resolver_; @@ -236,7 +237,6 @@ class SocketDispatcher : public Dispatcher, public PhysicalSocket { #endif uint32_t GetRequestedEvents() override; - void OnPreEvent(uint32_t ff) override; void OnEvent(uint32_t ff, int err) override; int Close() override; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.cc index 8a5f2c9d6..6d369d747 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.cc @@ -10,131 +10,37 @@ #include "rtc_base/platform_thread.h" +#include +#include + #if !defined(WEBRTC_WIN) #include #endif -#include -#include - -#include #include "rtc_base/checks.h" namespace rtc { namespace { -#if !defined(WEBRTC_WIN) -struct ThreadAttributes { - ThreadAttributes() { pthread_attr_init(&attr); } - ~ThreadAttributes() { pthread_attr_destroy(&attr); } - pthread_attr_t* operator&() { return &attr; } - pthread_attr_t attr; -}; -#endif // defined(WEBRTC_WIN) -} // namespace - -PlatformThread::PlatformThread(ThreadRunFunction func, - void* obj, - absl::string_view thread_name, - ThreadPriority priority /*= kNormalPriority*/) - : run_function_(func), priority_(priority), obj_(obj), name_(thread_name) { - RTC_DCHECK(func); - RTC_DCHECK(!name_.empty()); - // TODO(tommi): Consider lowering the limit to 15 (limit on Linux). - RTC_DCHECK(name_.length() < 64); - spawned_thread_checker_.Detach(); -} - -PlatformThread::~PlatformThread() { - RTC_DCHECK(thread_checker_.IsCurrent()); -#if defined(WEBRTC_WIN) - RTC_DCHECK(!thread_); - RTC_DCHECK(!thread_id_); -#endif // defined(WEBRTC_WIN) -} #if defined(WEBRTC_WIN) -DWORD WINAPI PlatformThread::StartThread(void* param) { - // The GetLastError() function only returns valid results when it is called - // after a Win32 API function that returns a "failed" result. A crash dump - // contains the result from GetLastError() and to make sure it does not - // falsely report a Windows error we call SetLastError here. - ::SetLastError(ERROR_SUCCESS); - static_cast(param)->Run(); - return 0; +int Win32PriorityFromThreadPriority(ThreadPriority priority) { + switch (priority) { + case ThreadPriority::kLow: + return THREAD_PRIORITY_BELOW_NORMAL; + case ThreadPriority::kNormal: + return THREAD_PRIORITY_NORMAL; + case ThreadPriority::kHigh: + return THREAD_PRIORITY_ABOVE_NORMAL; + case ThreadPriority::kRealtime: + return THREAD_PRIORITY_TIME_CRITICAL; + } } -#else -void* PlatformThread::StartThread(void* param) { - static_cast(param)->Run(); - return 0; -} -#endif // defined(WEBRTC_WIN) +#endif -void PlatformThread::Start() { - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!thread_) << "Thread already started?"; +bool SetPriority(ThreadPriority priority) { #if defined(WEBRTC_WIN) - // See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION. - // Set the reserved stack stack size to 1M, which is the default on Windows - // and Linux. - thread_ = ::CreateThread(nullptr, 1024 * 1024, &StartThread, this, - STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id_); - RTC_CHECK(thread_) << "CreateThread failed"; - RTC_DCHECK(thread_id_); -#else - ThreadAttributes attr; - // Set the stack stack size to 1M. - pthread_attr_setstacksize(&attr, 1024 * 1024); - RTC_CHECK_EQ(0, pthread_create(&thread_, &attr, &StartThread, this)); -#endif // defined(WEBRTC_WIN) -} - -bool PlatformThread::IsRunning() const { - RTC_DCHECK(thread_checker_.IsCurrent()); -#if defined(WEBRTC_WIN) - return thread_ != nullptr; -#else - return thread_ != 0; -#endif // defined(WEBRTC_WIN) -} - -PlatformThreadRef PlatformThread::GetThreadRef() const { -#if defined(WEBRTC_WIN) - return thread_id_; -#else - return thread_; -#endif // defined(WEBRTC_WIN) -} - -void PlatformThread::Stop() { - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!IsRunning()) - return; - -#if defined(WEBRTC_WIN) - WaitForSingleObject(thread_, INFINITE); - CloseHandle(thread_); - thread_ = nullptr; - thread_id_ = 0; -#else - RTC_CHECK_EQ(0, pthread_join(thread_, nullptr)); - thread_ = 0; -#endif // defined(WEBRTC_WIN) - spawned_thread_checker_.Detach(); -} - -void PlatformThread::Run() { - // Attach the worker thread checker to this thread. - RTC_DCHECK(spawned_thread_checker_.IsCurrent()); - rtc::SetCurrentThreadName(name_.c_str()); - SetPriority(priority_); - run_function_(obj_); -} - -bool PlatformThread::SetPriority(ThreadPriority priority) { - RTC_DCHECK(spawned_thread_checker_.IsCurrent()); - -#if defined(WEBRTC_WIN) - return SetThreadPriority(thread_, priority) != FALSE; + return SetThreadPriority(GetCurrentThread(), + Win32PriorityFromThreadPriority(priority)) != FALSE; #elif defined(__native_client__) || defined(WEBRTC_FUCHSIA) // Setting thread priorities is not supported in NaCl or Fuchsia. return true; @@ -158,35 +64,148 @@ bool PlatformThread::SetPriority(ThreadPriority priority) { const int top_prio = max_prio - 1; const int low_prio = min_prio + 1; switch (priority) { - case kLowPriority: + case ThreadPriority::kLow: param.sched_priority = low_prio; break; - case kNormalPriority: + case ThreadPriority::kNormal: // The -1 ensures that the kHighPriority is always greater or equal to // kNormalPriority. param.sched_priority = (low_prio + top_prio - 1) / 2; break; - case kHighPriority: + case ThreadPriority::kHigh: param.sched_priority = std::max(top_prio - 2, low_prio); break; - case kHighestPriority: - param.sched_priority = std::max(top_prio - 1, low_prio); - break; - case kRealtimePriority: + case ThreadPriority::kRealtime: param.sched_priority = top_prio; break; } - return pthread_setschedparam(thread_, policy, ¶m) == 0; + return pthread_setschedparam(pthread_self(), policy, ¶m) == 0; #endif // defined(WEBRTC_WIN) } #if defined(WEBRTC_WIN) -bool PlatformThread::QueueAPC(PAPCFUNC function, ULONG_PTR data) { - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(IsRunning()); +DWORD WINAPI RunPlatformThread(void* param) { + // The GetLastError() function only returns valid results when it is called + // after a Win32 API function that returns a "failed" result. A crash dump + // contains the result from GetLastError() and to make sure it does not + // falsely report a Windows error we call SetLastError here. + ::SetLastError(ERROR_SUCCESS); + auto function = static_cast*>(param); + (*function)(); + delete function; + return 0; +} +#else +void* RunPlatformThread(void* param) { + auto function = static_cast*>(param); + (*function)(); + delete function; + return 0; +} +#endif // defined(WEBRTC_WIN) - return QueueUserAPC(function, thread_, data) != FALSE; +} // namespace + +PlatformThread::PlatformThread(Handle handle, bool joinable) + : handle_(handle), joinable_(joinable) {} + +PlatformThread::PlatformThread(PlatformThread&& rhs) + : handle_(rhs.handle_), joinable_(rhs.joinable_) { + rhs.handle_ = absl::nullopt; +} + +PlatformThread& PlatformThread::operator=(PlatformThread&& rhs) { + Finalize(); + handle_ = rhs.handle_; + joinable_ = rhs.joinable_; + rhs.handle_ = absl::nullopt; + return *this; +} + +PlatformThread::~PlatformThread() { + Finalize(); +} + +PlatformThread PlatformThread::SpawnJoinable( + std::function thread_function, + absl::string_view name, + ThreadAttributes attributes) { + return SpawnThread(std::move(thread_function), name, attributes, + /*joinable=*/true); +} + +PlatformThread PlatformThread::SpawnDetached( + std::function thread_function, + absl::string_view name, + ThreadAttributes attributes) { + return SpawnThread(std::move(thread_function), name, attributes, + /*joinable=*/false); +} + +absl::optional PlatformThread::GetHandle() const { + return handle_; +} + +#if defined(WEBRTC_WIN) +bool PlatformThread::QueueAPC(PAPCFUNC function, ULONG_PTR data) { + RTC_DCHECK(handle_.has_value()); + return handle_.has_value() ? QueueUserAPC(function, *handle_, data) != FALSE + : false; } #endif +void PlatformThread::Finalize() { + if (!handle_.has_value()) + return; +#if defined(WEBRTC_WIN) + if (joinable_) + WaitForSingleObject(*handle_, INFINITE); + CloseHandle(*handle_); +#else + if (joinable_) + RTC_CHECK_EQ(0, pthread_join(*handle_, nullptr)); +#endif + handle_ = absl::nullopt; +} + +PlatformThread PlatformThread::SpawnThread( + std::function thread_function, + absl::string_view name, + ThreadAttributes attributes, + bool joinable) { + RTC_DCHECK(thread_function); + RTC_DCHECK(!name.empty()); + // TODO(tommi): Consider lowering the limit to 15 (limit on Linux). + RTC_DCHECK(name.length() < 64); + auto start_thread_function_ptr = + new std::function([thread_function = std::move(thread_function), + name = std::string(name), attributes] { + rtc::SetCurrentThreadName(name.c_str()); + SetPriority(attributes.priority); + thread_function(); + }); +#if defined(WEBRTC_WIN) + // See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION. + // Set the reserved stack stack size to 1M, which is the default on Windows + // and Linux. + DWORD thread_id = 0; + PlatformThread::Handle handle = ::CreateThread( + nullptr, 1024 * 1024, &RunPlatformThread, start_thread_function_ptr, + STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id); + RTC_CHECK(handle) << "CreateThread failed"; +#else + pthread_attr_t attr; + pthread_attr_init(&attr); + // Set the stack stack size to 1M. + pthread_attr_setstacksize(&attr, 1024 * 1024); + pthread_attr_setdetachstate( + &attr, joinable ? PTHREAD_CREATE_JOINABLE : PTHREAD_CREATE_DETACHED); + PlatformThread::Handle handle; + RTC_CHECK_EQ(0, pthread_create(&handle, &attr, &RunPlatformThread, + start_thread_function_ptr)); + pthread_attr_destroy(&attr); +#endif // defined(WEBRTC_WIN) + return PlatformThread(handle, joinable); +} + } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h index 4968de9ee..11ccfae3d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/platform_thread.h @@ -11,92 +11,101 @@ #ifndef RTC_BASE_PLATFORM_THREAD_H_ #define RTC_BASE_PLATFORM_THREAD_H_ -#ifndef WEBRTC_WIN -#include -#endif +#include #include #include "absl/strings/string_view.h" -#include "rtc_base/constructor_magic.h" +#include "absl/types/optional.h" #include "rtc_base/platform_thread_types.h" -#include "rtc_base/thread_checker.h" namespace rtc { -// Callback function that the spawned thread will enter once spawned. -typedef void (*ThreadRunFunction)(void*); - -enum ThreadPriority { -#ifdef WEBRTC_WIN - kLowPriority = THREAD_PRIORITY_BELOW_NORMAL, - kNormalPriority = THREAD_PRIORITY_NORMAL, - kHighPriority = THREAD_PRIORITY_ABOVE_NORMAL, - kHighestPriority = THREAD_PRIORITY_HIGHEST, - kRealtimePriority = THREAD_PRIORITY_TIME_CRITICAL -#else - kLowPriority = 1, - kNormalPriority = 2, - kHighPriority = 3, - kHighestPriority = 4, - kRealtimePriority = 5 -#endif +enum class ThreadPriority { + kLow = 1, + kNormal, + kHigh, + kRealtime, }; -// Represents a simple worker thread. The implementation must be assumed -// to be single threaded, meaning that all methods of the class, must be -// called from the same thread, including instantiation. -class PlatformThread { +struct ThreadAttributes { + ThreadPriority priority = ThreadPriority::kNormal; + ThreadAttributes& SetPriority(ThreadPriority priority_param) { + priority = priority_param; + return *this; + } +}; + +// Represents a simple worker thread. +class PlatformThread final { public: - PlatformThread(ThreadRunFunction func, - void* obj, - absl::string_view thread_name, - ThreadPriority priority = kNormalPriority); + // Handle is the base platform thread handle. +#if defined(WEBRTC_WIN) + using Handle = HANDLE; +#else + using Handle = pthread_t; +#endif // defined(WEBRTC_WIN) + // This ctor creates the PlatformThread with an unset handle (returning true + // in empty()) and is provided for convenience. + // TODO(bugs.webrtc.org/12727) Look into if default and move support can be + // removed. + PlatformThread() = default; + + // Moves |rhs| into this, storing an empty state in |rhs|. + // TODO(bugs.webrtc.org/12727) Look into if default and move support can be + // removed. + PlatformThread(PlatformThread&& rhs); + + // Moves |rhs| into this, storing an empty state in |rhs|. + // TODO(bugs.webrtc.org/12727) Look into if default and move support can be + // removed. + PlatformThread& operator=(PlatformThread&& rhs); + + // For a PlatformThread that's been spawned joinable, the destructor suspends + // the calling thread until the created thread exits unless the thread has + // already exited. virtual ~PlatformThread(); - const std::string& name() const { return name_; } + // Finalizes any allocated resources. + // For a PlatformThread that's been spawned joinable, Finalize() suspends + // the calling thread until the created thread exits unless the thread has + // already exited. + // empty() returns true after completion. + void Finalize(); - // Spawns a thread and tries to set thread priority according to the priority - // from when CreateThread was called. - void Start(); + // Returns true if default constructed, moved from, or Finalize()ed. + bool empty() const { return !handle_.has_value(); } - bool IsRunning() const; + // Creates a started joinable thread which will be joined when the returned + // PlatformThread destructs or Finalize() is called. + static PlatformThread SpawnJoinable( + std::function thread_function, + absl::string_view name, + ThreadAttributes attributes = ThreadAttributes()); - // Returns an identifier for the worker thread that can be used to do - // thread checks. - PlatformThreadRef GetThreadRef() const; + // Creates a started detached thread. The caller has to use external + // synchronization as nothing is provided by the PlatformThread construct. + static PlatformThread SpawnDetached( + std::function thread_function, + absl::string_view name, + ThreadAttributes attributes = ThreadAttributes()); - // Stops (joins) the spawned thread. - void Stop(); + // Returns the base platform thread handle of this thread. + absl::optional GetHandle() const; - protected: #if defined(WEBRTC_WIN) - // Exposed to derived classes to allow for special cases specific to Windows. + // Queue a Windows APC function that runs when the thread is alertable. bool QueueAPC(PAPCFUNC apc_function, ULONG_PTR data); #endif private: - void Run(); - bool SetPriority(ThreadPriority priority); + PlatformThread(Handle handle, bool joinable); + static PlatformThread SpawnThread(std::function thread_function, + absl::string_view name, + ThreadAttributes attributes, + bool joinable); - ThreadRunFunction const run_function_ = nullptr; - const ThreadPriority priority_ = kNormalPriority; - void* const obj_; - // TODO(pbos): Make sure call sites use string literals and update to a const - // char* instead of a std::string. - const std::string name_; - rtc::ThreadChecker thread_checker_; - rtc::ThreadChecker spawned_thread_checker_; -#if defined(WEBRTC_WIN) - static DWORD WINAPI StartThread(void* param); - - HANDLE thread_ = nullptr; - DWORD thread_id_ = 0; -#else - static void* StartThread(void* param); - - pthread_t thread_ = 0; -#endif // defined(WEBRTC_WIN) - RTC_DISALLOW_COPY_AND_ASSIGN(PlatformThread); + absl::optional handle_; + bool joinable_ = false; }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/random.h b/TMessagesProj/jni/voip/webrtc/rtc_base/random.h index 0e2d103cb..b3b9fd160 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/random.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/random.h @@ -66,7 +66,8 @@ class Random { double Exponential(double lambda); private: - // Outputs a nonzero 64-bit random number. + // Outputs a nonzero 64-bit random number using Xorshift algorithm. + // https://en.wikipedia.org/wiki/Xorshift uint64_t NextOutput() { state_ ^= state_ >> 12; state_ ^= state_ << 25; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.cc index 5c827927f..e39dadb98 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.cc @@ -108,14 +108,18 @@ int64_t RateTracker::TotalSampleCount() const { } void RateTracker::AddSamples(int64_t sample_count) { + AddSamplesAtTime(Time(), sample_count); +} + +void RateTracker::AddSamplesAtTime(int64_t current_time_ms, + int64_t sample_count) { RTC_DCHECK_LE(0, sample_count); EnsureInitialized(); - int64_t current_time = Time(); // Advance the current bucket as needed for the current time, and reset // bucket counts as we advance. - for (size_t i = 0; - i <= bucket_count_ && - current_time >= bucket_start_time_milliseconds_ + bucket_milliseconds_; + for (size_t i = 0; i <= bucket_count_ && + current_time_ms >= + bucket_start_time_milliseconds_ + bucket_milliseconds_; ++i) { bucket_start_time_milliseconds_ += bucket_milliseconds_; current_bucket_ = NextBucketIndex(current_bucket_); @@ -125,7 +129,8 @@ void RateTracker::AddSamples(int64_t sample_count) { // the entire buffer of samples has been expired. bucket_start_time_milliseconds_ += bucket_milliseconds_ * - ((current_time - bucket_start_time_milliseconds_) / bucket_milliseconds_); + ((current_time_ms - bucket_start_time_milliseconds_) / + bucket_milliseconds_); // Add all samples in the bucket that includes the current time. sample_buckets_[current_bucket_] += sample_count; total_sample_count_ += sample_count; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.h index e42d40f14..3b3c23538 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rate_tracker.h @@ -47,6 +47,9 @@ class RateTracker { // these samples, and increments the count for that bucket by sample_count. void AddSamples(int64_t sample_count); + // Increment count for bucket at |current_time_ms|. + void AddSamplesAtTime(int64_t current_time_ms, int64_t sample_count); + protected: // overrideable for tests virtual int64_t Time() const; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h index ce18379d5..331132c56 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h @@ -13,6 +13,7 @@ #include #include +#include "api/scoped_refptr.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counter.h" @@ -33,9 +34,9 @@ class RefCountedObject : public T { std::forward(p1), std::forward(args)...) {} - virtual void AddRef() const { ref_count_.IncRef(); } + void AddRef() const override { ref_count_.IncRef(); } - virtual RefCountReleaseStatus Release() const { + RefCountReleaseStatus Release() const override { const auto status = ref_count_.DecRef(); if (status == RefCountReleaseStatus::kDroppedLastRef) { delete this; @@ -52,13 +53,146 @@ class RefCountedObject : public T { virtual bool HasOneRef() const { return ref_count_.HasOneRef(); } protected: - virtual ~RefCountedObject() {} + ~RefCountedObject() override {} mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedObject); }; +template +class FinalRefCountedObject final : public T { + public: + using T::T; + // Until c++17 compilers are allowed not to inherit the default constructors. + // Thus the default constructors are forwarded explicitly. + FinalRefCountedObject() = default; + explicit FinalRefCountedObject(const T& other) : T(other) {} + explicit FinalRefCountedObject(T&& other) : T(std::move(other)) {} + FinalRefCountedObject(const FinalRefCountedObject&) = delete; + FinalRefCountedObject& operator=(const FinalRefCountedObject&) = delete; + + void AddRef() const { ref_count_.IncRef(); } + void Release() const { + if (ref_count_.DecRef() == RefCountReleaseStatus::kDroppedLastRef) { + delete this; + } + } + bool HasOneRef() const { return ref_count_.HasOneRef(); } + + private: + ~FinalRefCountedObject() = default; + + mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; +}; + +// General utilities for constructing a reference counted class and the +// appropriate reference count implementation for that class. +// +// These utilities select either the `RefCountedObject` implementation or +// `FinalRefCountedObject` depending on whether the to-be-shared class is +// derived from the RefCountInterface interface or not (respectively). + +// `make_ref_counted`: +// +// Use this when you want to construct a reference counted object of type T and +// get a `scoped_refptr<>` back. Example: +// +// auto p = make_ref_counted("bar", 123); +// +// For a class that inherits from RefCountInterface, this is equivalent to: +// +// auto p = scoped_refptr(new RefCountedObject("bar", 123)); +// +// If the class does not inherit from RefCountInterface, the example is +// equivalent to: +// +// auto p = scoped_refptr>( +// new FinalRefCountedObject("bar", 123)); +// +// In these cases, `make_ref_counted` reduces the amount of boilerplate code but +// also helps with the most commonly intended usage of RefCountedObject whereby +// methods for reference counting, are virtual and designed to satisfy the need +// of an interface. When such a need does not exist, it is more efficient to use +// the `FinalRefCountedObject` template, which does not add the vtable overhead. +// +// Note that in some cases, using RefCountedObject directly may still be what's +// needed. + +// `make_ref_counted` for classes that are convertible to RefCountInterface. +template < + typename T, + typename... Args, + typename std::enable_if::value, + T>::type* = nullptr> +scoped_refptr make_ref_counted(Args&&... args) { + return new RefCountedObject(std::forward(args)...); +} + +// `make_ref_counted` for complete classes that are not convertible to +// RefCountInterface. +template < + typename T, + typename... Args, + typename std::enable_if::value, + T>::type* = nullptr> +scoped_refptr> make_ref_counted(Args&&... args) { + return new FinalRefCountedObject(std::forward(args)...); +} + +// `Ref<>`, `Ref<>::Type` and `Ref<>::Ptr`: +// +// `Ref` is a type declaring utility that is compatible with `make_ref_counted` +// and can be used in classes and methods where it's more convenient (or +// readable) to have the compiler figure out the fully fleshed out type for a +// class rather than spell it out verbatim in all places the type occurs (which +// can mean maintenance work if the class layout changes). +// +// Usage examples: +// +// If you want to declare the parameter type that's always compatible with +// this code: +// +// Bar(make_ref_counted()); +// +// You can use `Ref<>::Ptr` to declare a compatible scoped_refptr type: +// +// void Bar(Ref::Ptr p); +// +// This might be more practically useful in templates though. +// +// In rare cases you might need to be able to declare a parameter that's fully +// compatible with the reference counted T type - and just using T* is not +// enough. To give a code example, we can declare a function, `Foo` that is +// compatible with this code: +// auto p = make_ref_counted(); +// Foo(p.get()); +// +// void Foo(Ref::Type* foo_ptr); +// +// Alternatively this would be: +// void Foo(Foo* foo_ptr); +// or +// void Foo(FinalRefCountedObject* foo_ptr); + +// Declares the approprate reference counted type for T depending on whether +// T is convertible to RefCountInterface or not. +// For classes that are convertible, the type will simply be T. +// For classes that cannot be converted to RefCountInterface, the type will be +// FinalRefCountedObject. +// This is most useful for declaring a scoped_refptr instance for a class +// that may or may not implement a virtual reference counted interface: +// * scoped_refptr::Type> my_ptr; +template +struct Ref { + typedef typename std::conditional< + std::is_convertible::value, + T, + FinalRefCountedObject>::type Type; + + typedef scoped_refptr Ptr; +}; + } // namespace rtc #endif // RTC_BASE_REF_COUNTED_OBJECT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc index 04ae99685..937defc6c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc @@ -13,7 +13,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/time_utils.h" @@ -22,14 +21,14 @@ namespace rtc { scoped_refptr RTCCertificate::Create( std::unique_ptr identity) { - return new RefCountedObject(identity.release()); + return new RTCCertificate(identity.release()); } RTCCertificate::RTCCertificate(SSLIdentity* identity) : identity_(identity) { RTC_DCHECK(identity_); } -RTCCertificate::~RTCCertificate() {} +RTCCertificate::~RTCCertificate() = default; uint64_t RTCCertificate::Expires() const { int64_t expires = GetSSLCertificate().CertificateExpirationTime(); @@ -67,7 +66,7 @@ scoped_refptr RTCCertificate::FromPEM( SSLIdentity::CreateFromPEMStrings(pem.private_key(), pem.certificate())); if (!identity) return nullptr; - return new RefCountedObject(identity.release()); + return new RTCCertificate(identity.release()); } bool RTCCertificate::operator==(const RTCCertificate& certificate) const { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h index 102385e5a..ce9aa4751 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.h @@ -16,8 +16,8 @@ #include #include +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -49,7 +49,8 @@ class RTCCertificatePEM { // A thin abstraction layer between "lower level crypto stuff" like // SSLCertificate and WebRTC usage. Takes ownership of some lower level objects, // reference counting protects these from premature destruction. -class RTC_EXPORT RTCCertificate : public RefCountInterface { +class RTC_EXPORT RTCCertificate final + : public RefCountedNonVirtual { public: // Takes ownership of |identity|. static scoped_refptr Create( @@ -82,12 +83,14 @@ class RTC_EXPORT RTCCertificate : public RefCountInterface { protected: explicit RTCCertificate(SSLIdentity* identity); - ~RTCCertificate() override; + + friend class RefCountedNonVirtual; + ~RTCCertificate(); private: // The SSLIdentity is the owner of the SSLCertificate. To protect our // GetSSLCertificate() we take ownership of |identity_|. - std::unique_ptr identity_; + const std::unique_ptr identity_; }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc index d95b64539..5e1fdcac3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc @@ -51,7 +51,7 @@ scoped_refptr RTCCertificateGenerator::GenerateCertificate( expires_s = std::min(expires_s, kYearInSeconds); // TODO(torbjorng): Stop using |time_t|, its type is unspecified. It it safe // to assume it can hold up to a year's worth of seconds (and more), but - // |SSLIdentity::Generate| should stop relying on |time_t|. + // |SSLIdentity::Create| should stop relying on |time_t|. // See bugs.webrtc.org/5720. time_t cert_lifetime_s = static_cast(expires_s); identity = SSLIdentity::Create(kIdentityName, key_params, cert_lifetime_s); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h index c2d1e3d29..6b3ad5e9f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h @@ -59,6 +59,8 @@ #define ECONNREFUSED WSAECONNREFUSED #undef EHOSTUNREACH #define EHOSTUNREACH WSAEHOSTUNREACH +#undef ENETUNREACH +#define ENETUNREACH WSAENETUNREACH #define SOCKET_EACCES WSAEACCES #endif // WEBRTC_WIN diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc index 639be52c5..2996ede9d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.cc @@ -178,6 +178,16 @@ std::string SocketAddress::ToSensitiveString() const { return sb.str(); } +std::string SocketAddress::ToResolvedSensitiveString() const { + if (IsUnresolvedIP()) { + return ""; + } + char buf[1024]; + rtc::SimpleStringBuilder sb(buf); + sb << ipaddr().ToSensitiveString() << ":" << port(); + return sb.str(); +} + bool SocketAddress::FromString(const std::string& str) { if (str.at(0) == '[') { std::string::size_type closebracket = str.rfind(']'); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h index 6ee3d37bc..570a71281 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_address.h @@ -12,9 +12,9 @@ #define RTC_BASE_SOCKET_ADDRESS_H_ #include -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST #include // no-presubmit-check TODO(webrtc:8982) -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST #include "rtc_base/ip_address.h" #include "rtc_base/system/rtc_export.h" @@ -124,15 +124,19 @@ class RTC_EXPORT SocketAddress { // Same as ToString but anonymizes it by hiding the last part. std::string ToSensitiveString() const; + // Returns hostname:port string if address is resolved, otherwise returns + // empty string. + std::string ToResolvedSensitiveString() const; + // Parses hostname:port and [hostname]:port. bool FromString(const std::string& str); -#ifdef UNIT_TEST +#ifdef WEBRTC_UNIT_TEST inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) std::ostream& os) { // no-presubmit-check TODO(webrtc:8982) return os << HostAsURIString() << ":" << port(); } -#endif // UNIT_TEST +#endif // WEBRTC_UNIT_TEST // Determines whether this represents a missing / any IP address. // That is, 0.0.0.0 or ::. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h index 98971e4d8..face04dbc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_server.h @@ -33,9 +33,10 @@ class SocketServer : public SocketFactory { static const int kForever = -1; static std::unique_ptr CreateDefault(); - // When the socket server is installed into a Thread, this function is - // called to allow the socket server to use the thread's message queue for - // any messaging that it might need to perform. + // When the socket server is installed into a Thread, this function is called + // to allow the socket server to use the thread's message queue for any + // messaging that it might need to perform. It is also called with a null + // argument before the thread is destroyed. virtual void SetMessageQueue(Thread* queue) {} // Sleeps until: diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc index db9097b9a..3f7013ee1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.cc @@ -16,7 +16,12 @@ #include "absl/algorithm/container.h" #include "rtc_base/checks.h" -#include "rtc_base/openssl_certificate.h" +#include "rtc_base/openssl.h" +#ifdef OPENSSL_IS_BORINGSSL +#include "rtc_base/boringssl_identity.h" +#else +#include "rtc_base/openssl_identity.h" +#endif #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/third_party/base64/base64.h" @@ -117,7 +122,11 @@ std::unique_ptr SSLCertChain::GetStats() const { // static std::unique_ptr SSLCertificate::FromPEMString( const std::string& pem_string) { +#ifdef OPENSSL_IS_BORINGSSL + return BoringSSLCertificate::FromPEMString(pem_string); +#else return OpenSSLCertificate::FromPEMString(pem_string); +#endif } } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc index 5b261e0f5..358402eb0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.cc @@ -103,9 +103,6 @@ SSLFingerprint::SSLFingerprint(const std::string& algorithm, size_t digest_len) : SSLFingerprint(algorithm, MakeArrayView(digest_in, digest_len)) {} -SSLFingerprint::SSLFingerprint(const SSLFingerprint& from) - : algorithm(from.algorithm), digest(from.digest) {} - bool SSLFingerprint::operator==(const SSLFingerprint& other) const { return algorithm == other.algorithm && digest == other.digest; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h index d65d665d8..add3ab791 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_fingerprint.h @@ -57,7 +57,8 @@ struct RTC_EXPORT SSLFingerprint { const uint8_t* digest_in, size_t digest_len); - SSLFingerprint(const SSLFingerprint& from); + SSLFingerprint(const SSLFingerprint& from) = default; + SSLFingerprint& operator=(const SSLFingerprint& from) = default; bool operator==(const SSLFingerprint& other) const; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc index 09d25d228..8d93ecfe2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.cc @@ -11,12 +11,16 @@ // Handling of certificates and keypairs for SSLStreamAdapter's peer mode. #include "rtc_base/ssl_identity.h" +#include #include #include -#include #include "rtc_base/checks.h" +#ifdef OPENSSL_IS_BORINGSSL +#include "rtc_base/boringssl_identity.h" +#else #include "rtc_base/openssl_identity.h" +#endif #include "rtc_base/ssl_certificate.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/third_party/base64/base64.h" @@ -213,28 +217,36 @@ std::string SSLIdentity::DerToPem(const std::string& pem_type, std::unique_ptr SSLIdentity::Create(const std::string& common_name, const KeyParams& key_param, time_t certificate_lifetime) { +#ifdef OPENSSL_IS_BORINGSSL + return BoringSSLIdentity::CreateWithExpiration(common_name, key_param, + certificate_lifetime); +#else return OpenSSLIdentity::CreateWithExpiration(common_name, key_param, certificate_lifetime); +#endif } // static std::unique_ptr SSLIdentity::Create(const std::string& common_name, const KeyParams& key_param) { - return OpenSSLIdentity::CreateWithExpiration( - common_name, key_param, kDefaultCertificateLifetimeInSeconds); + return Create(common_name, key_param, kDefaultCertificateLifetimeInSeconds); } // static std::unique_ptr SSLIdentity::Create(const std::string& common_name, KeyType key_type) { - return OpenSSLIdentity::CreateWithExpiration( - common_name, KeyParams(key_type), kDefaultCertificateLifetimeInSeconds); + return Create(common_name, KeyParams(key_type), + kDefaultCertificateLifetimeInSeconds); } // static std::unique_ptr SSLIdentity::CreateForTest( const SSLIdentityParams& params) { +#ifdef OPENSSL_IS_BORINGSSL + return BoringSSLIdentity::CreateForTest(params); +#else return OpenSSLIdentity::CreateForTest(params); +#endif } // Construct an identity from a private key and a certificate. @@ -242,7 +254,11 @@ std::unique_ptr SSLIdentity::CreateForTest( std::unique_ptr SSLIdentity::CreateFromPEMStrings( const std::string& private_key, const std::string& certificate) { +#ifdef OPENSSL_IS_BORINGSSL + return BoringSSLIdentity::CreateFromPEMStrings(private_key, certificate); +#else return OpenSSLIdentity::CreateFromPEMStrings(private_key, certificate); +#endif } // Construct an identity from a private key and a certificate chain. @@ -250,13 +266,23 @@ std::unique_ptr SSLIdentity::CreateFromPEMStrings( std::unique_ptr SSLIdentity::CreateFromPEMChainStrings( const std::string& private_key, const std::string& certificate_chain) { +#ifdef OPENSSL_IS_BORINGSSL + return BoringSSLIdentity::CreateFromPEMChainStrings(private_key, + certificate_chain); +#else return OpenSSLIdentity::CreateFromPEMChainStrings(private_key, certificate_chain); +#endif } bool operator==(const SSLIdentity& a, const SSLIdentity& b) { +#ifdef OPENSSL_IS_BORINGSSL + return static_cast(a) == + static_cast(b); +#else return static_cast(a) == static_cast(b); +#endif } bool operator!=(const SSLIdentity& a, const SSLIdentity& b) { return !(a == b); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h index d078b045a..a9167ef5e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_identity.h @@ -18,7 +18,6 @@ #include #include -#include "rtc_base/deprecation.h" #include "rtc_base/system/rtc_export.h" namespace rtc { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc index 354622e6f..5730af63d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.cc @@ -95,11 +95,6 @@ std::unique_ptr SSLStreamAdapter::Create( return std::make_unique(std::move(stream)); } -SSLStreamAdapter::SSLStreamAdapter(std::unique_ptr stream) - : StreamAdapterInterface(stream.release()) {} - -SSLStreamAdapter::~SSLStreamAdapter() {} - bool SSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) { return false; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h index 7bff72651..6b44c7645 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_stream_adapter.h @@ -18,7 +18,6 @@ #include #include "absl/memory/memory.h" -#include "rtc_base/deprecation.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/stream.h" @@ -119,7 +118,7 @@ enum { SSE_MSG_TRUNC = 0xff0001 }; // Used to send back UMA histogram value. Logged when Dtls handshake fails. enum class SSLHandshakeError { UNKNOWN, INCOMPATIBLE_CIPHERSUITE, MAX_VALUE }; -class SSLStreamAdapter : public StreamAdapterInterface { +class SSLStreamAdapter : public StreamInterface, public sigslot::has_slots<> { public: // Instantiate an SSLStreamAdapter wrapping the given stream, // (using the selected implementation for the platform). @@ -127,8 +126,8 @@ class SSLStreamAdapter : public StreamAdapterInterface { static std::unique_ptr Create( std::unique_ptr stream); - explicit SSLStreamAdapter(std::unique_ptr stream); - ~SSLStreamAdapter() override; + SSLStreamAdapter() = default; + ~SSLStreamAdapter() override = default; // Specify our SSL identity: key and certificate. SSLStream takes ownership // of the SSLIdentity object and will free it when appropriate. Should be diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc index ee72f8d2b..30c767888 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.cc @@ -49,68 +49,4 @@ bool StreamInterface::Flush() { StreamInterface::StreamInterface() {} -/////////////////////////////////////////////////////////////////////////////// -// StreamAdapterInterface -/////////////////////////////////////////////////////////////////////////////// - -StreamAdapterInterface::StreamAdapterInterface(StreamInterface* stream, - bool owned) - : stream_(stream), owned_(owned) { - if (nullptr != stream_) - stream_->SignalEvent.connect(this, &StreamAdapterInterface::OnEvent); -} - -StreamState StreamAdapterInterface::GetState() const { - return stream_->GetState(); -} -StreamResult StreamAdapterInterface::Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) { - return stream_->Read(buffer, buffer_len, read, error); -} -StreamResult StreamAdapterInterface::Write(const void* data, - size_t data_len, - size_t* written, - int* error) { - return stream_->Write(data, data_len, written, error); -} -void StreamAdapterInterface::Close() { - stream_->Close(); -} - -bool StreamAdapterInterface::Flush() { - return stream_->Flush(); -} - -void StreamAdapterInterface::Attach(StreamInterface* stream, bool owned) { - if (nullptr != stream_) - stream_->SignalEvent.disconnect(this); - if (owned_) - delete stream_; - stream_ = stream; - owned_ = owned; - if (nullptr != stream_) - stream_->SignalEvent.connect(this, &StreamAdapterInterface::OnEvent); -} - -StreamInterface* StreamAdapterInterface::Detach() { - if (nullptr != stream_) - stream_->SignalEvent.disconnect(this); - StreamInterface* stream = stream_; - stream_ = nullptr; - return stream; -} - -StreamAdapterInterface::~StreamAdapterInterface() { - if (owned_) - delete stream_; -} - -void StreamAdapterInterface::OnEvent(StreamInterface* stream, - int events, - int err) { - SignalEvent(this, events, err); -} - } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h index 9bf11a240..70de65a75 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h @@ -115,50 +115,6 @@ class RTC_EXPORT StreamInterface { RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterface); }; -/////////////////////////////////////////////////////////////////////////////// -// StreamAdapterInterface is a convenient base-class for adapting a stream. -// By default, all operations are pass-through. Override the methods that you -// require adaptation. Streams should really be upgraded to reference-counted. -// In the meantime, use the owned flag to indicate whether the adapter should -// own the adapted stream. -/////////////////////////////////////////////////////////////////////////////// - -class StreamAdapterInterface : public StreamInterface, - public sigslot::has_slots<> { - public: - explicit StreamAdapterInterface(StreamInterface* stream, bool owned = true); - - // Core Stream Interface - StreamState GetState() const override; - StreamResult Read(void* buffer, - size_t buffer_len, - size_t* read, - int* error) override; - StreamResult Write(const void* data, - size_t data_len, - size_t* written, - int* error) override; - void Close() override; - - bool Flush() override; - - void Attach(StreamInterface* stream, bool owned = true); - StreamInterface* Detach(); - - protected: - ~StreamAdapterInterface() override; - - // Note that the adapter presents itself as the origin of the stream events, - // since users of the adapter may not recognize the adapted object. - virtual void OnEvent(StreamInterface* stream, int events, int err); - StreamInterface* stream() { return stream_; } - - private: - StreamInterface* stream_; - bool owned_; - RTC_DISALLOW_COPY_AND_ASSIGN(StreamAdapterInterface); -}; - } // namespace rtc #endif // RTC_BASE_STREAM_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h index 9eac49a93..3c8149c16 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/swap_queue.h @@ -17,8 +17,8 @@ #include #include +#include "absl/base/attributes.h" #include "rtc_base/checks.h" -#include "rtc_base/system/unused.h" namespace webrtc { @@ -127,7 +127,7 @@ class SwapQueue { // When specified, the T given in *input must pass the ItemVerifier() test. // The contents of *input after the call are then also guaranteed to pass the // ItemVerifier() test. - bool Insert(T* input) RTC_WARN_UNUSED_RESULT { + ABSL_MUST_USE_RESULT bool Insert(T* input) { RTC_DCHECK(input); RTC_DCHECK(queue_item_verifier_(*input)); @@ -168,7 +168,7 @@ class SwapQueue { // empty). When specified, The T given in *output must pass the ItemVerifier() // test and the contents of *output after the call are then also guaranteed to // pass the ItemVerifier() test. - bool Remove(T* output) RTC_WARN_UNUSED_RESULT { + ABSL_MUST_USE_RESULT bool Remove(T* output) { RTC_DCHECK(output); RTC_DCHECK(queue_item_verifier_(*output)); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h index 620fe74e4..e1512e96c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex.h @@ -13,12 +13,17 @@ #include +#include "absl/base/attributes.h" #include "absl/base/const_init.h" #include "rtc_base/checks.h" -#include "rtc_base/system/unused.h" #include "rtc_base/thread_annotations.h" -#if defined(WEBRTC_ABSL_MUTEX) +#if defined(WEBRTC_RACE_CHECK_MUTEX) +// To use the race check mutex, define WEBRTC_RACE_CHECK_MUTEX globally. This +// also adds a dependency to absl::Mutex from logging.cc due to concurrent +// invocation of the static logging system. +#include "rtc_base/synchronization/mutex_race_check.h" +#elif defined(WEBRTC_ABSL_MUTEX) #include "rtc_base/synchronization/mutex_abseil.h" // nogncheck #elif defined(WEBRTC_WIN) #include "rtc_base/synchronization/mutex_critical_section.h" @@ -41,7 +46,7 @@ class RTC_LOCKABLE Mutex final { void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { impl_.Lock(); } - RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { return impl_.TryLock(); } void Unlock() RTC_UNLOCK_FUNCTION() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_abseil.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_abseil.h index 4ad1d07ee..9247065ae 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_abseil.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_abseil.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_ #define RTC_BASE_SYNCHRONIZATION_MUTEX_ABSEIL_H_ +#include "absl/base/attributes.h" #include "absl/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -23,7 +24,7 @@ class RTC_LOCKABLE MutexImpl final { MutexImpl& operator=(const MutexImpl&) = delete; void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { mutex_.Lock(); } - RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { return mutex_.TryLock(); } void Unlock() RTC_UNLOCK_FUNCTION() { mutex_.Unlock(); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_critical_section.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_critical_section.h index d20679498..cb3d6a095 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_critical_section.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_critical_section.h @@ -23,6 +23,7 @@ #include // must come after windows headers. // clang-format on +#include "absl/base/attributes.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -37,7 +38,7 @@ class RTC_LOCKABLE MutexImpl final { void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { EnterCriticalSection(&critical_section_); } - RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { return TryEnterCriticalSection(&critical_section_) != FALSE; } void Unlock() RTC_UNLOCK_FUNCTION() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_pthread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_pthread.h index c9496e72c..8898ca534 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_pthread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_pthread.h @@ -18,6 +18,7 @@ #include #endif +#include "absl/base/attributes.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -39,7 +40,7 @@ class RTC_LOCKABLE MutexImpl final { ~MutexImpl() { pthread_mutex_destroy(&mutex_); } void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { pthread_mutex_lock(&mutex_); } - RTC_WARN_UNUSED_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { return pthread_mutex_trylock(&mutex_) == 0; } void Unlock() RTC_UNLOCK_FUNCTION() { pthread_mutex_unlock(&mutex_); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_race_check.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_race_check.h new file mode 100644 index 000000000..cada6292b --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/mutex_race_check.h @@ -0,0 +1,65 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYNCHRONIZATION_MUTEX_RACE_CHECK_H_ +#define RTC_BASE_SYNCHRONIZATION_MUTEX_RACE_CHECK_H_ + +#include + +#include "absl/base/attributes.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/unused.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// This implementation class is useful when a consuming project can guarantee +// that all WebRTC invocation is happening serially. Additionally, the consuming +// project cannot use WebRTC code that spawn threads or task queues. +// +// The class internally check fails on Lock() if it finds the consumer actually +// invokes WebRTC concurrently. +// +// To use the race check mutex, define WEBRTC_RACE_CHECK_MUTEX globally. This +// also adds a dependency to absl::Mutex from logging.cc because even though +// objects are invoked serially, the logging is static and invoked concurrently +// and hence needs protection. +class RTC_LOCKABLE MutexImpl final { + public: + MutexImpl() = default; + MutexImpl(const MutexImpl&) = delete; + MutexImpl& operator=(const MutexImpl&) = delete; + + void Lock() RTC_EXCLUSIVE_LOCK_FUNCTION() { + bool was_free = free_.exchange(false, std::memory_order_acquire); + RTC_CHECK(was_free) + << "WEBRTC_RACE_CHECK_MUTEX: mutex locked concurrently."; + } + ABSL_MUST_USE_RESULT bool TryLock() RTC_EXCLUSIVE_TRYLOCK_FUNCTION(true) { + bool was_free = free_.exchange(false, std::memory_order_acquire); + return was_free; + } + void Unlock() RTC_UNLOCK_FUNCTION() { + free_.store(true, std::memory_order_release); + } + + private: + // Release-acquire ordering is used. + // - In the Lock methods we're guaranteeing that reads and writes happening + // after the (Try)Lock don't appear to have happened before the Lock (acquire + // ordering). + // - In the Unlock method we're guaranteeing that reads and writes happening + // before the Unlock don't appear to happen after it (release ordering). + std::atomic free_{true}; +}; + +} // namespace webrtc + +#endif // RTC_BASE_SYNCHRONIZATION_MUTEX_RACE_CHECK_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker.h deleted file mode 100644 index ecf8490ce..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker.h +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_ -#define RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_ - -#include - -#include "api/task_queue/task_queue_base.h" -#include "rtc_base/platform_thread_types.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { -// Real implementation of SequenceChecker, for use in debug mode, or -// for temporary use in release mode (e.g. to RTC_CHECK on a threading issue -// seen only in the wild). -// -// Note: You should almost always use the SequenceChecker class to get the -// right version for your build configuration. -class RTC_EXPORT SequenceCheckerImpl { - public: - SequenceCheckerImpl(); - ~SequenceCheckerImpl(); - - bool IsCurrent() const; - // Changes the task queue or thread that is checked for in IsCurrent. This can - // be useful when an object may be created on one task queue / thread and then - // used exclusively on another thread. - void Detach(); - - // Returns a string that is formatted to match with the error string printed - // by RTC_CHECK() when a condition is not met. - // This is used in conjunction with the RTC_DCHECK_RUN_ON() macro. - std::string ExpectationToString() const; - - private: - mutable Mutex lock_; - // These are mutable so that IsCurrent can set them. - mutable bool attached_ RTC_GUARDED_BY(lock_); - mutable rtc::PlatformThreadRef valid_thread_ RTC_GUARDED_BY(lock_); - mutable const TaskQueueBase* valid_queue_ RTC_GUARDED_BY(lock_); - mutable const void* valid_system_queue_ RTC_GUARDED_BY(lock_); -}; - -// Do nothing implementation, for use in release mode. -// -// Note: You should almost always use the SequenceChecker class to get the -// right version for your build configuration. -class SequenceCheckerDoNothing { - public: - bool IsCurrent() const { return true; } - void Detach() {} -}; - -// SequenceChecker is a helper class used to help verify that some methods -// of a class are called on the same task queue or thread. A -// SequenceChecker is bound to a a task queue if the object is -// created on a task queue, or a thread otherwise. -// -// -// Example: -// class MyClass { -// public: -// void Foo() { -// RTC_DCHECK_RUN_ON(sequence_checker_); -// ... (do stuff) ... -// } -// -// private: -// SequenceChecker sequence_checker_; -// } -// -// In Release mode, IsCurrent will always return true. -#if RTC_DCHECK_IS_ON -class RTC_LOCKABLE SequenceChecker : public SequenceCheckerImpl {}; -#else -class RTC_LOCKABLE SequenceChecker : public SequenceCheckerDoNothing {}; -#endif // RTC_ENABLE_THREAD_CHECKER - -namespace webrtc_seq_check_impl { -// Helper class used by RTC_DCHECK_RUN_ON (see example usage below). -class RTC_SCOPED_LOCKABLE SequenceCheckerScope { - public: - template - explicit SequenceCheckerScope(const ThreadLikeObject* thread_like_object) - RTC_EXCLUSIVE_LOCK_FUNCTION(thread_like_object) {} - SequenceCheckerScope(const SequenceCheckerScope&) = delete; - SequenceCheckerScope& operator=(const SequenceCheckerScope&) = delete; - ~SequenceCheckerScope() RTC_UNLOCK_FUNCTION() {} - - template - static bool IsCurrent(const ThreadLikeObject* thread_like_object) { - return thread_like_object->IsCurrent(); - } -}; -} // namespace webrtc_seq_check_impl -} // namespace webrtc - -// RTC_RUN_ON/RTC_GUARDED_BY/RTC_DCHECK_RUN_ON macros allows to annotate -// variables are accessed from same thread/task queue. -// Using tools designed to check mutexes, it checks at compile time everywhere -// variable is access, there is a run-time dcheck thread/task queue is correct. -// -// class ThreadExample { -// public: -// void NeedVar1() { -// RTC_DCHECK_RUN_ON(network_thread_); -// transport_->Send(); -// } -// -// private: -// rtc::Thread* network_thread_; -// int transport_ RTC_GUARDED_BY(network_thread_); -// }; -// -// class SequenceCheckerExample { -// public: -// int CalledFromPacer() RTC_RUN_ON(pacer_sequence_checker_) { -// return var2_; -// } -// -// void CallMeFromPacer() { -// RTC_DCHECK_RUN_ON(&pacer_sequence_checker_) -// << "Should be called from pacer"; -// CalledFromPacer(); -// } -// -// private: -// int pacer_var_ RTC_GUARDED_BY(pacer_sequence_checker_); -// SequenceChecker pacer_sequence_checker_; -// }; -// -// class TaskQueueExample { -// public: -// class Encoder { -// public: -// rtc::TaskQueue* Queue() { return encoder_queue_; } -// void Encode() { -// RTC_DCHECK_RUN_ON(encoder_queue_); -// DoSomething(var_); -// } -// -// private: -// rtc::TaskQueue* const encoder_queue_; -// Frame var_ RTC_GUARDED_BY(encoder_queue_); -// }; -// -// void Encode() { -// // Will fail at runtime when DCHECK is enabled: -// // encoder_->Encode(); -// // Will work: -// rtc::scoped_refptr encoder = encoder_; -// encoder_->Queue()->PostTask([encoder] { encoder->Encode(); }); -// } -// -// private: -// rtc::scoped_refptr encoder_; -// } - -// Document if a function expected to be called from same thread/task queue. -#define RTC_RUN_ON(x) \ - RTC_THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(x)) - -namespace webrtc { -std::string ExpectationToString(const webrtc::SequenceChecker* checker); - -// Catch-all implementation for types other than explicitly supported above. -template -std::string ExpectationToString(const ThreadLikeObject*) { - return std::string(); -} - -} // namespace webrtc - -#define RTC_DCHECK_RUN_ON(x) \ - webrtc::webrtc_seq_check_impl::SequenceCheckerScope seq_check_scope(x); \ - RTC_DCHECK((x)->IsCurrent()) << webrtc::ExpectationToString(x) - -#endif // RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc similarity index 92% rename from TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker.cc rename to TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc index 1de26cf0f..7b66d8020 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc @@ -7,15 +7,19 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/synchronization/sequence_checker_internal.h" + +#include #if defined(WEBRTC_MAC) #include #endif +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace webrtc_sequence_checker_internal { namespace { // On Mac, returns the label of the current dispatch queue; elsewhere, return // null. @@ -29,21 +33,12 @@ const void* GetSystemQueueRef() { } // namespace -std::string ExpectationToString(const webrtc::SequenceChecker* checker) { -#if RTC_DCHECK_IS_ON - return checker->ExpectationToString(); -#endif - return std::string(); -} - SequenceCheckerImpl::SequenceCheckerImpl() : attached_(true), valid_thread_(rtc::CurrentThreadRef()), valid_queue_(TaskQueueBase::Current()), valid_system_queue_(GetSystemQueueRef()) {} -SequenceCheckerImpl::~SequenceCheckerImpl() = default; - bool SequenceCheckerImpl::IsCurrent() const { const TaskQueueBase* const current_queue = TaskQueueBase::Current(); const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef(); @@ -109,4 +104,12 @@ std::string SequenceCheckerImpl::ExpectationToString() const { } #endif // RTC_DCHECK_IS_ON +std::string ExpectationToString(const SequenceCheckerImpl* checker) { +#if RTC_DCHECK_IS_ON + return checker->ExpectationToString(); +#endif + return std::string(); +} + +} // namespace webrtc_sequence_checker_internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h new file mode 100644 index 000000000..f7ac6de12 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/synchronization/sequence_checker_internal.h @@ -0,0 +1,93 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_INTERNAL_H_ +#define RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_INTERNAL_H_ + +#include +#include + +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/platform_thread_types.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { +namespace webrtc_sequence_checker_internal { + +// Real implementation of SequenceChecker, for use in debug mode, or +// for temporary use in release mode (e.g. to RTC_CHECK on a threading issue +// seen only in the wild). +// +// Note: You should almost always use the SequenceChecker class to get the +// right version for your build configuration. +class RTC_EXPORT SequenceCheckerImpl { + public: + SequenceCheckerImpl(); + ~SequenceCheckerImpl() = default; + + bool IsCurrent() const; + // Changes the task queue or thread that is checked for in IsCurrent. This can + // be useful when an object may be created on one task queue / thread and then + // used exclusively on another thread. + void Detach(); + + // Returns a string that is formatted to match with the error string printed + // by RTC_CHECK() when a condition is not met. + // This is used in conjunction with the RTC_DCHECK_RUN_ON() macro. + std::string ExpectationToString() const; + + private: + mutable Mutex lock_; + // These are mutable so that IsCurrent can set them. + mutable bool attached_ RTC_GUARDED_BY(lock_); + mutable rtc::PlatformThreadRef valid_thread_ RTC_GUARDED_BY(lock_); + mutable const TaskQueueBase* valid_queue_ RTC_GUARDED_BY(lock_); + mutable const void* valid_system_queue_ RTC_GUARDED_BY(lock_); +}; + +// Do nothing implementation, for use in release mode. +// +// Note: You should almost always use the SequenceChecker class to get the +// right version for your build configuration. +class SequenceCheckerDoNothing { + public: + bool IsCurrent() const { return true; } + void Detach() {} +}; + +// Helper class used by RTC_DCHECK_RUN_ON (see example usage below). +class RTC_SCOPED_LOCKABLE SequenceCheckerScope { + public: + template + explicit SequenceCheckerScope(const ThreadLikeObject* thread_like_object) + RTC_EXCLUSIVE_LOCK_FUNCTION(thread_like_object) {} + SequenceCheckerScope(const SequenceCheckerScope&) = delete; + SequenceCheckerScope& operator=(const SequenceCheckerScope&) = delete; + ~SequenceCheckerScope() RTC_UNLOCK_FUNCTION() {} + + template + static bool IsCurrent(const ThreadLikeObject* thread_like_object) { + return thread_like_object->IsCurrent(); + } +}; + +std::string ExpectationToString(const SequenceCheckerImpl* checker); + +// Catch-all implementation for types other than explicitly supported above. +template +std::string ExpectationToString(const ThreadLikeObject*) { + return std::string(); +} + +} // namespace webrtc_sequence_checker_internal +} // namespace webrtc + +#endif // RTC_BASE_SYNCHRONIZATION_SEQUENCE_CHECKER_INTERNAL_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h index ed216e660..be2367b85 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/arch.h @@ -15,8 +15,9 @@ #define RTC_BASE_SYSTEM_ARCH_H_ // Processor architecture detection. For more info on what's defined, see: -// http://msdn.microsoft.com/en-us/library/b0084kay.aspx -// http://www.agner.org/optimize/calling_conventions.pdf +// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros +// https://www.agner.org/optimize/calling_conventions.pdf +// https://sourceforge.net/p/predef/wiki/Architectures/ // or with gcc, run: "echo | gcc -E -dM -" #if defined(_M_X64) || defined(__x86_64__) #define WEBRTC_ARCH_X86_FAMILY @@ -32,17 +33,45 @@ #define WEBRTC_ARCH_X86 #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN -#elif defined(__ARMEL__) +#elif defined(_M_ARM) || defined(__ARMEL__) #define WEBRTC_ARCH_ARM_FAMILY #define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN -#elif defined(__MIPSEL__) +#elif defined(__MIPSEL__) || defined(__MIPSEB__) #define WEBRTC_ARCH_MIPS_FAMILY #if defined(__LP64__) #define WEBRTC_ARCH_64_BITS #else #define WEBRTC_ARCH_32_BITS #endif +#if defined(__MIPSEL__) +#define WEBRTC_ARCH_LITTLE_ENDIAN +#else +#define WEBRTC_ARCH_BIG_ENDIAN +#endif +#elif defined(__PPC__) +#if defined(__PPC64__) +#define WEBRTC_ARCH_64_BITS +#else +#define WEBRTC_ARCH_32_BITS +#endif +#if defined(__LITTLE_ENDIAN__) +#define WEBRTC_ARCH_LITTLE_ENDIAN +#else +#define WEBRTC_ARCH_BIG_ENDIAN +#endif +#elif defined(__sparc) || defined(__sparc__) +#if __SIZEOF_LONG__ == 8 +#define WEBRTC_ARCH_64_BITS +#else +#define WEBRTC_ARCH_32_BITS +#endif +#define WEBRTC_ARCH_BIG_ENDIAN +#elif defined(__riscv) && __riscv_xlen == 64 +#define WEBRTC_ARCH_64_BITS +#define WEBRTC_ARCH_LITTLE_ENDIAN +#elif defined(__riscv) && __riscv_xlen == 32 +#define WEBRTC_ARCH_32_BITS #define WEBRTC_ARCH_LITTLE_ENDIAN #elif defined(__pnacl__) #define WEBRTC_ARCH_32_BITS diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc index 2828790e0..3e4931579 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.cc @@ -89,6 +89,22 @@ bool FileWrapper::SeekTo(int64_t position) { return fseek(file_, rtc::checked_cast(position), SEEK_SET) == 0; } +long FileWrapper::FileSize() { + if (file_ == nullptr) + return -1; + long original_position = ftell(file_); + if (original_position < 0) + return -1; + int seek_error = fseek(file_, 0, SEEK_END); + if (seek_error) + return -1; + long file_size = ftell(file_); + seek_error = fseek(file_, original_position, SEEK_SET); + if (seek_error) + return -1; + return file_size; +} + bool FileWrapper::Flush() { RTC_DCHECK(file_); return fflush(file_) == 0; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h index 42c463cb1..0b293d9a8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/file_wrapper.h @@ -38,7 +38,6 @@ class FileWrapper final { static FileWrapper OpenReadOnly(const std::string& file_name_utf8); static FileWrapper OpenWriteOnly(const char* file_name_utf8, int* error = nullptr); - static FileWrapper OpenWriteOnly(const std::string& file_name_utf8, int* error = nullptr); @@ -87,6 +86,11 @@ class FileWrapper final { // Seek to given position. bool SeekTo(int64_t position); + // Returns the file size or -1 if a size could not be determined. + // (A file size might not exists for non-seekable files or file-like + // objects, for example /dev/tty on unix.) + long FileSize(); + // Returns number of bytes read. Short count indicates EOF or error. size_t Read(void* buf, size_t length); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h new file mode 100644 index 000000000..77e7a9952 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/no_unique_address.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYSTEM_NO_UNIQUE_ADDRESS_H_ +#define RTC_BASE_SYSTEM_NO_UNIQUE_ADDRESS_H_ + +// RTC_NO_UNIQUE_ADDRESS is a portable annotation to tell the compiler that +// a data member need not have an address distinct from all other non-static +// data members of its class. +// It allows empty types to actually occupy zero bytes as class members, +// instead of occupying at least one byte just so that they get their own +// address. There is almost never any reason not to use it on class members +// that could possibly be empty. +// The macro expands to [[no_unique_address]] if the compiler supports the +// attribute, it expands to nothing otherwise. +// Clang should supports this attribute since C++11, while other compilers +// should add support for it starting from C++20. Among clang compilers, +// clang-cl doesn't support it yet and support is unclear also when the target +// platform is iOS. +#if ((defined(__clang__) && !defined(_MSC_VER) && !defined(WEBRTC_IOS)) || \ + __cplusplus > 201703L) +// NOLINTNEXTLINE(whitespace/braces) +#define RTC_NO_UNIQUE_ADDRESS [[no_unique_address]] +#else +#define RTC_NO_UNIQUE_ADDRESS +#endif + +#endif // RTC_BASE_SYSTEM_NO_UNIQUE_ADDRESS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system/unused.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system/unused.h index a0add4ee2..084c52662 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/system/unused.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system/unused.h @@ -11,21 +11,6 @@ #ifndef RTC_BASE_SYSTEM_UNUSED_H_ #define RTC_BASE_SYSTEM_UNUSED_H_ -// Annotate a function indicating the caller must examine the return value. -// Use like: -// int foo() RTC_WARN_UNUSED_RESULT; -// To explicitly ignore a result, cast to void. -// TODO(kwiberg): Remove when we can use [[nodiscard]] from C++17. -#if defined(__clang__) -#define RTC_WARN_UNUSED_RESULT __attribute__((__warn_unused_result__)) -#elif defined(__GNUC__) -// gcc has a __warn_unused_result__ attribute, but you can't quiet it by -// casting to void, so we don't use it. -#define RTC_WARN_UNUSED_RESULT -#else -#define RTC_WARN_UNUSED_RESULT -#endif - // Prevent the compiler from warning about an unused variable. For example: // int result = DoSomething(); // assert(result == 17); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc new file mode 100644 index 000000000..9efe76e3a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.cc @@ -0,0 +1,97 @@ +/* + * Copyright 2021 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// If WEBRTC_EXCLUDE_SYSTEM_TIME is set, an implementation of +// rtc::SystemTimeNanos() must be provided externally. +#ifndef WEBRTC_EXCLUDE_SYSTEM_TIME + +#include + +#include + +#if defined(WEBRTC_POSIX) +#include +#if defined(WEBRTC_MAC) +#include +#endif +#endif + +#if defined(WEBRTC_WIN) +// clang-format off +// clang formatting would put last, +// which leads to compilation failure. +#include +#include +#include +// clang-format on +#endif + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/system_time.h" +#include "rtc_base/time_utils.h" + +namespace rtc { + +int64_t SystemTimeNanos() { + int64_t ticks; +#if defined(WEBRTC_MAC) + static mach_timebase_info_data_t timebase; + if (timebase.denom == 0) { + // Get the timebase if this is the first time we run. + // Recommended by Apple's QA1398. + if (mach_timebase_info(&timebase) != KERN_SUCCESS) { + RTC_NOTREACHED(); + } + } + // Use timebase to convert absolute time tick units into nanoseconds. + const auto mul = [](uint64_t a, uint32_t b) -> int64_t { + RTC_DCHECK_NE(b, 0); + RTC_DCHECK_LE(a, std::numeric_limits::max() / b) + << "The multiplication " << a << " * " << b << " overflows"; + return rtc::dchecked_cast(a * b); + }; + ticks = mul(mach_absolute_time(), timebase.numer) / timebase.denom; +#elif defined(WEBRTC_POSIX) + struct timespec ts; + // TODO(deadbeef): Do we need to handle the case when CLOCK_MONOTONIC is not + // supported? + clock_gettime(CLOCK_MONOTONIC, &ts); + ticks = kNumNanosecsPerSec * static_cast(ts.tv_sec) + + static_cast(ts.tv_nsec); +#elif defined(WINUWP) + ticks = WinUwpSystemTimeNanos(); +#elif defined(WEBRTC_WIN) + static volatile LONG last_timegettime = 0; + static volatile int64_t num_wrap_timegettime = 0; + volatile LONG* last_timegettime_ptr = &last_timegettime; + DWORD now = timeGetTime(); + // Atomically update the last gotten time + DWORD old = InterlockedExchange(last_timegettime_ptr, now); + if (now < old) { + // If now is earlier than old, there may have been a race between threads. + // 0x0fffffff ~3.1 days, the code will not take that long to execute + // so it must have been a wrap around. + if (old > 0xf0000000 && now < 0x0fffffff) { + num_wrap_timegettime++; + } + } + ticks = now + (num_wrap_timegettime << 32); + // TODO(deadbeef): Calculate with nanosecond precision. Otherwise, we're + // just wasting a multiply and divide when doing Time() on Windows. + ticks = ticks * kNumNanosecsPerMillisec; +#else +#error Unsupported platform. +#endif + return ticks; +} + +} // namespace rtc +#endif // WEBRTC_EXCLUDE_SYSTEM_TIME diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h new file mode 100644 index 000000000..d86e94adf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/system_time.h @@ -0,0 +1,22 @@ +/* + * Copyright 2021 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_SYSTEM_TIME_H_ +#define RTC_BASE_SYSTEM_TIME_H_ + +namespace rtc { + +// Returns the actual system time, even if a clock is set for testing. +// Useful for timeouts while using a test clock, or for logging. +int64_t SystemTimeNanos(); + +} // namespace rtc + +#endif // RTC_BASE_SYSTEM_TIME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc new file mode 100644 index 000000000..cb6b23cea --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc @@ -0,0 +1,21 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/task_queue_for_test.h" + +#include "api/task_queue/default_task_queue_factory.h" + +namespace webrtc { + +TaskQueueForTest::TaskQueueForTest(absl::string_view name, Priority priority) + : TaskQueue( + CreateDefaultTaskQueueFactory()->CreateTaskQueue(name, priority)) {} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h new file mode 100644 index 000000000..dd5679bc9 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h @@ -0,0 +1,81 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_TASK_QUEUE_FOR_TEST_H_ +#define RTC_BASE_TASK_QUEUE_FOR_TEST_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/location.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +template +void SendTask(rtc::Location loc, TaskQueueBase* task_queue, Closure&& task) { + RTC_CHECK(!task_queue->IsCurrent()) + << "Called SendTask to a queue from the same queue at " << loc.ToString(); + rtc::Event event; + task_queue->PostTask( + ToQueuedTask(std::forward(task), [&event] { event.Set(); })); + RTC_CHECK(event.Wait(/*give_up_after_ms=*/rtc::Event::kForever, + /*warn_after_ms=*/10'000)) + << "Waited too long at " << loc.ToString(); +} + +class RTC_LOCKABLE TaskQueueForTest : public rtc::TaskQueue { + public: + using rtc::TaskQueue::TaskQueue; + explicit TaskQueueForTest(absl::string_view name = "TestQueue", + Priority priority = Priority::NORMAL); + TaskQueueForTest(const TaskQueueForTest&) = delete; + TaskQueueForTest& operator=(const TaskQueueForTest&) = delete; + ~TaskQueueForTest() = default; + + // A convenience, test-only method that blocks the current thread while + // a task executes on the task queue. + // This variant is specifically for posting custom QueuedTask derived + // implementations that tests do not want to pass ownership of over to the + // task queue (i.e. the Run() method always returns |false|.). + template + void SendTask(Closure* task) { + RTC_CHECK(!IsCurrent()); + rtc::Event event; + PostTask(ToQueuedTask( + [&task] { RTC_CHECK_EQ(false, static_cast(task)->Run()); }, + [&event] { event.Set(); })); + event.Wait(rtc::Event::kForever); + } + + // A convenience, test-only method that blocks the current thread while + // a task executes on the task queue. + template + void SendTask(Closure&& task, rtc::Location loc) { + ::webrtc::SendTask(loc, Get(), std::forward(task)); + } + + // Wait for the completion of all tasks posted prior to the + // WaitForPreviouslyPostedTasks() call. + void WaitForPreviouslyPostedTasks() { + // Post an empty task on the queue and wait for it to finish, to ensure + // that all already posted tasks on the queue get executed. + SendTask([]() {}, RTC_FROM_HERE); + } +}; + +} // namespace webrtc + +#endif // RTC_BASE_TASK_QUEUE_FOR_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc index 38660cd5a..909698611 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_libevent.cc @@ -93,16 +93,12 @@ void EventAssign(struct event* ev, rtc::ThreadPriority TaskQueuePriorityToThreadPriority(Priority priority) { switch (priority) { case Priority::HIGH: - return rtc::kRealtimePriority; + return rtc::ThreadPriority::kRealtime; case Priority::LOW: - return rtc::kLowPriority; + return rtc::ThreadPriority::kLow; case Priority::NORMAL: - return rtc::kNormalPriority; - default: - RTC_NOTREACHED(); - break; + return rtc::ThreadPriority::kNormal; } - return rtc::kNormalPriority; } class TaskQueueLibevent final : public TaskQueueBase { @@ -120,7 +116,6 @@ class TaskQueueLibevent final : public TaskQueueBase { ~TaskQueueLibevent() override = default; - static void ThreadMain(void* context); static void OnWakeup(int socket, short flags, void* context); // NOLINT static void RunTimer(int fd, short flags, void* context); // NOLINT @@ -172,8 +167,7 @@ class TaskQueueLibevent::SetTimerTask : public QueuedTask { TaskQueueLibevent::TaskQueueLibevent(absl::string_view queue_name, rtc::ThreadPriority priority) - : event_base_(event_base_new()), - thread_(&TaskQueueLibevent::ThreadMain, this, queue_name, priority) { + : event_base_(event_base_new()) { int fds[2]; RTC_CHECK(pipe(fds) == 0); SetNonBlocking(fds[0]); @@ -184,7 +178,18 @@ TaskQueueLibevent::TaskQueueLibevent(absl::string_view queue_name, EventAssign(&wakeup_event_, event_base_, wakeup_pipe_out_, EV_READ | EV_PERSIST, OnWakeup, this); event_add(&wakeup_event_, 0); - thread_.Start(); + thread_ = rtc::PlatformThread::SpawnJoinable( + [this] { + { + CurrentTaskQueueSetter set_current(this); + while (is_active_) + event_base_loop(event_base_, 0); + } + + for (TimerEvent* timer : pending_timers_) + delete timer; + }, + queue_name, rtc::ThreadAttributes().SetPriority(priority)); } void TaskQueueLibevent::Delete() { @@ -199,7 +204,7 @@ void TaskQueueLibevent::Delete() { nanosleep(&ts, nullptr); } - thread_.Stop(); + thread_.Finalize(); event_del(&wakeup_event_); @@ -252,20 +257,6 @@ void TaskQueueLibevent::PostDelayedTask(std::unique_ptr task, } } -// static -void TaskQueueLibevent::ThreadMain(void* context) { - TaskQueueLibevent* me = static_cast(context); - - { - CurrentTaskQueueSetter set_current(me); - while (me->is_active_) - event_base_loop(me->event_base_, 0); - } - - for (TimerEvent* timer : me->pending_timers_) - delete timer; -} - // static void TaskQueueLibevent::OnWakeup(int socket, short flags, // NOLINT diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc index 5de634512..548f7ef69 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_stdlib.cc @@ -36,14 +36,11 @@ rtc::ThreadPriority TaskQueuePriorityToThreadPriority( TaskQueueFactory::Priority priority) { switch (priority) { case TaskQueueFactory::Priority::HIGH: - return rtc::kRealtimePriority; + return rtc::ThreadPriority::kRealtime; case TaskQueueFactory::Priority::LOW: - return rtc::kLowPriority; + return rtc::ThreadPriority::kLow; case TaskQueueFactory::Priority::NORMAL: - return rtc::kNormalPriority; - default: - RTC_NOTREACHED(); - return rtc::kNormalPriority; + return rtc::ThreadPriority::kNormal; } } @@ -78,8 +75,6 @@ class TaskQueueStdlib final : public TaskQueueBase { NextTask GetNextTask(); - static void ThreadMain(void* context); - void ProcessTasks(); void NotifyWake(); @@ -126,8 +121,13 @@ TaskQueueStdlib::TaskQueueStdlib(absl::string_view queue_name, : started_(/*manual_reset=*/false, /*initially_signaled=*/false), stopped_(/*manual_reset=*/false, /*initially_signaled=*/false), flag_notify_(/*manual_reset=*/false, /*initially_signaled=*/false), - thread_(&TaskQueueStdlib::ThreadMain, this, queue_name, priority) { - thread_.Start(); + thread_(rtc::PlatformThread::SpawnJoinable( + [this] { + CurrentTaskQueueSetter set_current(this); + ProcessTasks(); + }, + queue_name, + rtc::ThreadAttributes().SetPriority(priority))) { started_.Wait(rtc::Event::kForever); } @@ -142,7 +142,7 @@ void TaskQueueStdlib::Delete() { NotifyWake(); stopped_.Wait(rtc::Event::kForever); - thread_.Stop(); + thread_.Finalize(); delete this; } @@ -219,13 +219,6 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { return result; } -// static -void TaskQueueStdlib::ThreadMain(void* context) { - TaskQueueStdlib* me = static_cast(context); - CurrentTaskQueueSetter set_current(me); - me->ProcessTasks(); -} - void TaskQueueStdlib::ProcessTasks() { started_.Set(); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc index 5eb3776ce..d797d478f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc @@ -29,16 +29,18 @@ #include #include "absl/strings/string_view.h" +#include "absl/types/optional.h" #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/platform_thread.h" -#include "rtc_base/time_utils.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/time_utils.h" namespace webrtc { namespace { @@ -56,16 +58,12 @@ rtc::ThreadPriority TaskQueuePriorityToThreadPriority( TaskQueueFactory::Priority priority) { switch (priority) { case TaskQueueFactory::Priority::HIGH: - return rtc::kRealtimePriority; + return rtc::ThreadPriority::kRealtime; case TaskQueueFactory::Priority::LOW: - return rtc::kLowPriority; + return rtc::ThreadPriority::kLow; case TaskQueueFactory::Priority::NORMAL: - return rtc::kNormalPriority; - default: - RTC_NOTREACHED(); - break; + return rtc::ThreadPriority::kNormal; } - return rtc::kNormalPriority; } int64_t GetTick() { @@ -167,21 +165,6 @@ class TaskQueueWin : public TaskQueueBase { void RunPendingTasks(); private: - static void ThreadMain(void* context); - - class WorkerThread : public rtc::PlatformThread { - public: - WorkerThread(rtc::ThreadRunFunction func, - void* obj, - absl::string_view thread_name, - rtc::ThreadPriority priority) - : PlatformThread(func, obj, thread_name, priority) {} - - bool QueueAPC(PAPCFUNC apc_function, ULONG_PTR data) { - return rtc::PlatformThread::QueueAPC(apc_function, data); - } - }; - void RunThreadMain(); bool ProcessQueuedMessages(); void RunDueTasks(); @@ -204,7 +187,7 @@ class TaskQueueWin : public TaskQueueBase { greater> timer_tasks_; UINT_PTR timer_id_ = 0; - WorkerThread thread_; + rtc::PlatformThread thread_; Mutex pending_lock_; std::queue> pending_ RTC_GUARDED_BY(pending_lock_); @@ -213,10 +196,12 @@ class TaskQueueWin : public TaskQueueBase { TaskQueueWin::TaskQueueWin(absl::string_view queue_name, rtc::ThreadPriority priority) - : thread_(&TaskQueueWin::ThreadMain, this, queue_name, priority), - in_queue_(::CreateEvent(nullptr, true, false, nullptr)) { + : in_queue_(::CreateEvent(nullptr, true, false, nullptr)) { RTC_DCHECK(in_queue_); - thread_.Start(); + thread_ = rtc::PlatformThread::SpawnJoinable( + [this] { RunThreadMain(); }, queue_name, + rtc::ThreadAttributes().SetPriority(priority)); + rtc::Event event(false, false); RTC_CHECK(thread_.QueueAPC(&InitializeQueueThread, reinterpret_cast(&event))); @@ -225,11 +210,13 @@ TaskQueueWin::TaskQueueWin(absl::string_view queue_name, void TaskQueueWin::Delete() { RTC_DCHECK(!IsCurrent()); - while (!::PostThreadMessage(thread_.GetThreadRef(), WM_QUIT, 0, 0)) { + RTC_CHECK(thread_.GetHandle() != absl::nullopt); + while ( + !::PostThreadMessage(GetThreadId(*thread_.GetHandle()), WM_QUIT, 0, 0)) { RTC_CHECK_EQ(ERROR_NOT_ENOUGH_QUOTA, ::GetLastError()); Sleep(1); } - thread_.Stop(); + thread_.Finalize(); ::CloseHandle(in_queue_); delete this; } @@ -252,7 +239,9 @@ void TaskQueueWin::PostDelayedTask(std::unique_ptr task, // and WPARAM is 32bits in 32bit builds. Otherwise, we could pass the // task pointer and timestamp as LPARAM and WPARAM. auto* task_info = new DelayedTaskInfo(milliseconds, std::move(task)); - if (!::PostThreadMessage(thread_.GetThreadRef(), WM_QUEUE_DELAYED_TASK, 0, + RTC_CHECK(thread_.GetHandle() != absl::nullopt); + if (!::PostThreadMessage(GetThreadId(*thread_.GetHandle()), + WM_QUEUE_DELAYED_TASK, 0, reinterpret_cast(task_info))) { delete task_info; } @@ -274,11 +263,6 @@ void TaskQueueWin::RunPendingTasks() { } } -// static -void TaskQueueWin::ThreadMain(void* context) { - static_cast(context)->RunThreadMain(); -} - void TaskQueueWin::RunThreadMain() { CurrentTaskQueueSetter set_current(this); HANDLE handles[2] = {*timer_.event_for_wait(), in_queue_}; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc index 4be2131f3..57b3f6ce8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc @@ -10,13 +10,27 @@ #include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/ref_counted_object.h" - namespace webrtc { // static rtc::scoped_refptr PendingTaskSafetyFlag::Create() { - return new rtc::RefCountedObject(); + return new PendingTaskSafetyFlag(true); +} + +rtc::scoped_refptr +PendingTaskSafetyFlag::CreateDetached() { + rtc::scoped_refptr safety_flag( + new PendingTaskSafetyFlag(true)); + safety_flag->main_sequence_.Detach(); + return safety_flag; +} + +rtc::scoped_refptr +PendingTaskSafetyFlag::CreateDetachedInactive() { + rtc::scoped_refptr safety_flag( + new PendingTaskSafetyFlag(false)); + safety_flag->main_sequence_.Detach(); + return safety_flag; } void PendingTaskSafetyFlag::SetNotAlive() { @@ -24,6 +38,11 @@ void PendingTaskSafetyFlag::SetNotAlive() { alive_ = false; } +void PendingTaskSafetyFlag::SetAlive() { + RTC_DCHECK_RUN_ON(&main_sequence_); + alive_ = true; +} + bool PendingTaskSafetyFlag::alive() const { RTC_DCHECK_RUN_ON(&main_sequence_); return alive_; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h index 580fb3f91..fc1b5bd87 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h @@ -11,23 +11,30 @@ #ifndef RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ #define RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_count.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { -// Use this flag to drop pending tasks that have been posted to the "main" -// thread/TQ and end up running after the owning instance has been -// deleted. The owning instance signals deletion by calling SetNotAlive() from -// its destructor. -// +// The PendingTaskSafetyFlag and the ScopedTaskSafety are designed to address +// the issue where you have a task to be executed later that has references, +// but cannot guarantee that the referenced object is alive when the task is +// executed. + +// This mechanism can be used with tasks that are created and destroyed +// on a single thread / task queue, and with tasks posted to the same +// thread/task queue, but tasks can be posted from any thread/TQ. + +// Typical usage: // When posting a task, post a copy (capture by-value in a lambda) of the flag -// instance and before performing the work, check the |alive()| state. Abort if +// reference and before performing the work, check the |alive()| state. Abort if // alive() returns |false|: // -// // Running outside of the main thread. +// class ExampleClass { +// .... // my_task_queue_->PostTask(ToQueuedTask( // [safety = pending_task_safety_flag_, this]() { // // Now running on the main thread. @@ -35,39 +42,79 @@ namespace webrtc { // return; // MyMethod(); // })); +// .... +// ~ExampleClass() { +// pending_task_safety_flag_->SetNotAlive(); +// } +// scoped_refptr pending_task_safety_flag_ +// = PendingTaskSafetyFlag::Create(); +// } // -// Or implicitly by letting ToQueuedTask do the checking: +// ToQueuedTask has an overload that makes this check automatic: // -// // Running outside of the main thread. // my_task_queue_->PostTask(ToQueuedTask(pending_task_safety_flag_, // [this]() { MyMethod(); })); // -// Note that checking the state only works on the construction/destruction -// thread of the ReceiveStatisticsProxy instance. -class PendingTaskSafetyFlag : public rtc::RefCountInterface { +class PendingTaskSafetyFlag final + : public rtc::RefCountedNonVirtual { public: static rtc::scoped_refptr Create(); + // Creates a flag, but with its SequenceChecker initially detached. Hence, it + // may be created on a different thread than the flag will be used on. + static rtc::scoped_refptr CreateDetached(); + + // Same as `CreateDetached()` except the initial state of the returned flag + // will be `!alive()`. + static rtc::scoped_refptr CreateDetachedInactive(); + ~PendingTaskSafetyFlag() = default; void SetNotAlive(); + // The SetAlive method is intended to support Start/Stop/Restart usecases. + // When a class has called SetNotAlive on a flag used for posted tasks, and + // decides it wants to post new tasks and have them run, there are two + // reasonable ways to do that: + // + // (i) Use the below SetAlive method. One subtlety is that any task posted + // prior to SetNotAlive, and still in the queue, is resurrected and will + // run. + // + // (ii) Create a fresh flag, and just drop the reference to the old one. This + // avoids the above problem, and ensures that tasks poster prior to + // SetNotAlive stay cancelled. Instead, there's a potential data race on + // the flag pointer itself. Some synchronization is required between the + // thread overwriting the flag pointer, and the threads that want to post + // tasks and therefore read that same pointer. + void SetAlive(); bool alive() const; protected: - PendingTaskSafetyFlag() = default; + explicit PendingTaskSafetyFlag(bool alive) : alive_(alive) {} private: bool alive_ = true; - SequenceChecker main_sequence_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker main_sequence_; }; -// Makes using PendingTaskSafetyFlag very simple. Automatic PTSF creation -// and signalling of destruction when the ScopedTaskSafety instance goes out -// of scope. -// Should be used by the class that wants tasks dropped after destruction. -// Requirements are that the instance be constructed and destructed on +// The ScopedTaskSafety makes using PendingTaskSafetyFlag very simple. +// It does automatic PTSF creation and signalling of destruction when the +// ScopedTaskSafety instance goes out of scope. +// +// ToQueuedTask has an overload that takes a ScopedTaskSafety too, so there +// is no need to explicitly call the "flag" method. +// +// Example usage: +// +// my_task_queue->PostTask(ToQueuedTask(scoped_task_safety, +// [this]() { +// // task goes here +// } +// +// This should be used by the class that wants tasks dropped after destruction. +// The requirement is that the instance has to be constructed and destructed on // the same thread as the potentially dropped tasks would be running on. -class ScopedTaskSafety { +class ScopedTaskSafety final { public: ScopedTaskSafety() = default; ~ScopedTaskSafety() { flag_->SetNotAlive(); } @@ -80,6 +127,21 @@ class ScopedTaskSafety { PendingTaskSafetyFlag::Create(); }; +// Like ScopedTaskSafety, but allows construction on a different thread than +// where the flag will be used. +class ScopedTaskSafetyDetached final { + public: + ScopedTaskSafetyDetached() = default; + ~ScopedTaskSafetyDetached() { flag_->SetNotAlive(); } + + // Returns a new reference to the safety flag. + rtc::scoped_refptr flag() const { return flag_; } + + private: + rtc::scoped_refptr flag_ = + PendingTaskSafetyFlag::CreateDetached(); +}; + } // namespace webrtc #endif // RTC_BASE_TASK_UTILS_PENDING_TASK_SAFETY_FLAG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h index 07ab0ebe2..b2e3aae7a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/to_queued_task.h @@ -20,7 +20,7 @@ namespace webrtc { namespace webrtc_new_closure_impl { -// Simple implementation of QueuedTask for use with rtc::Bind and lambdas. +// Simple implementation of QueuedTask for use with lambdas. template class ClosureTask : public QueuedTask { public: diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/test_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/test_utils.h index 4746e962a..7068e7388 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/test_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/test_utils.h @@ -17,25 +17,23 @@ #include #include "rtc_base/async_socket.h" -#include "rtc_base/stream.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace webrtc { namespace testing { /////////////////////////////////////////////////////////////////////////////// -// StreamSink - Monitor asynchronously signalled events from StreamInterface -// or AsyncSocket (which should probably be a StreamInterface. +// StreamSink - Monitor asynchronously signalled events from AsyncSocket. /////////////////////////////////////////////////////////////////////////////// -// Note: Any event that is an error is treaded as SSE_ERROR instead of that +// Note: Any event that is an error is treated as SSE_ERROR instead of that // event. enum StreamSinkEvent { - SSE_OPEN = rtc::SE_OPEN, - SSE_READ = rtc::SE_READ, - SSE_WRITE = rtc::SE_WRITE, - SSE_CLOSE = rtc::SE_CLOSE, + SSE_OPEN = 1, + SSE_READ = 2, + SSE_WRITE = 4, + SSE_CLOSE = 8, SSE_ERROR = 16 }; @@ -44,24 +42,6 @@ class StreamSink : public sigslot::has_slots<> { StreamSink(); ~StreamSink() override; - void Monitor(rtc::StreamInterface* stream) { - stream->SignalEvent.connect(this, &StreamSink::OnEvent); - events_.erase(stream); - } - void Unmonitor(rtc::StreamInterface* stream) { - stream->SignalEvent.disconnect(this); - // In case you forgot to unmonitor a previous object with this address - events_.erase(stream); - } - bool Check(rtc::StreamInterface* stream, - StreamSinkEvent event, - bool reset = true) { - return DoCheck(stream, event, reset); - } - int Events(rtc::StreamInterface* stream, bool reset = true) { - return DoEvents(stream, reset); - } - void Monitor(rtc::AsyncSocket* socket) { socket->SignalConnectEvent.connect(this, &StreamSink::OnConnectEvent); socket->SignalReadEvent.connect(this, &StreamSink::OnReadEvent); @@ -82,19 +62,10 @@ class StreamSink : public sigslot::has_slots<> { bool reset = true) { return DoCheck(socket, event, reset); } - int Events(rtc::AsyncSocket* socket, bool reset = true) { - return DoEvents(socket, reset); - } private: - typedef std::map EventMap; + typedef std::map EventMap; - void OnEvent(rtc::StreamInterface* stream, int events, int error) { - if (error) { - events = SSE_ERROR; - } - AddEvents(stream, events); - } void OnConnectEvent(rtc::AsyncSocket* socket) { AddEvents(socket, SSE_OPEN); } void OnReadEvent(rtc::AsyncSocket* socket) { AddEvents(socket, SSE_READ); } void OnWriteEvent(rtc::AsyncSocket* socket) { AddEvents(socket, SSE_WRITE); } @@ -102,7 +73,7 @@ class StreamSink : public sigslot::has_slots<> { AddEvents(socket, (0 == error) ? SSE_CLOSE : SSE_ERROR); } - void AddEvents(void* obj, int events) { + void AddEvents(rtc::AsyncSocket* obj, int events) { EventMap::iterator it = events_.find(obj); if (events_.end() == it) { events_.insert(EventMap::value_type(obj, events)); @@ -110,7 +81,7 @@ class StreamSink : public sigslot::has_slots<> { it->second |= events; } } - bool DoCheck(void* obj, StreamSinkEvent event, bool reset) { + bool DoCheck(rtc::AsyncSocket* obj, StreamSinkEvent event, bool reset) { EventMap::iterator it = events_.find(obj); if ((events_.end() == it) || (0 == (it->second & event))) { return false; @@ -120,16 +91,6 @@ class StreamSink : public sigslot::has_slots<> { } return true; } - int DoEvents(void* obj, bool reset) { - EventMap::iterator it = events_.find(obj); - if (events_.end() == it) - return 0; - int events = it->second; - if (reset) { - it->second = 0; - } - return events; - } EventMap events_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc index 32449020c..2a5d5eccd 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc @@ -29,13 +29,14 @@ #include #include "absl/algorithm/container.h" +#include "api/sequence_checker.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" #include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/event.h" +#include "rtc_base/internal/default_socket_server.h" #include "rtc_base/logging.h" #include "rtc_base/null_socket_server.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -70,8 +71,6 @@ class ScopedAutoReleasePool { namespace rtc { namespace { -const int kSlowDispatchLoggingThreshold = 50; // 50 ms - class MessageHandlerWithTask final : public MessageHandler { public: MessageHandlerWithTask() {} @@ -257,7 +256,7 @@ Thread* Thread::Current() { #ifndef NO_MAIN_THREAD_WRAPPING // Only autowrap the thread which instantiated the ThreadManager. if (!thread && manager->IsMainThread()) { - thread = new Thread(SocketServer::CreateDefault()); + thread = new Thread(CreateDefaultSocketServer()); thread->WrapCurrentWithThreadManager(manager, true); } #endif @@ -326,7 +325,7 @@ void rtc::ThreadManager::ChangeCurrentThreadForTest(rtc::Thread* thread) { Thread* ThreadManager::WrapCurrentThread() { Thread* result = CurrentThread(); if (nullptr == result) { - result = new Thread(SocketServer::CreateDefault()); + result = new Thread(CreateDefaultSocketServer()); result->WrapCurrentWithThreadManager(this, true); } return result; @@ -353,6 +352,35 @@ Thread::ScopedDisallowBlockingCalls::~ScopedDisallowBlockingCalls() { thread_->SetAllowBlockingCalls(previous_state_); } +#if RTC_DCHECK_IS_ON +Thread::ScopedCountBlockingCalls::ScopedCountBlockingCalls( + std::function callback) + : thread_(Thread::Current()), + base_blocking_call_count_(thread_->GetBlockingCallCount()), + base_could_be_blocking_call_count_( + thread_->GetCouldBeBlockingCallCount()), + result_callback_(std::move(callback)) {} + +Thread::ScopedCountBlockingCalls::~ScopedCountBlockingCalls() { + if (GetTotalBlockedCallCount() >= min_blocking_calls_for_callback_) { + result_callback_(GetBlockingCallCount(), GetCouldBeBlockingCallCount()); + } +} + +uint32_t Thread::ScopedCountBlockingCalls::GetBlockingCallCount() const { + return thread_->GetBlockingCallCount() - base_blocking_call_count_; +} + +uint32_t Thread::ScopedCountBlockingCalls::GetCouldBeBlockingCallCount() const { + return thread_->GetCouldBeBlockingCallCount() - + base_could_be_blocking_call_count_; +} + +uint32_t Thread::ScopedCountBlockingCalls::GetTotalBlockedCallCount() const { + return GetBlockingCallCount() + GetCouldBeBlockingCallCount(); +} +#endif + Thread::Thread(SocketServer* ss) : Thread(ss, /*do_init=*/true) {} Thread::Thread(std::unique_ptr ss) @@ -401,13 +429,11 @@ void Thread::DoDestroy() { // The signal is done from here to ensure // that it always gets called when the queue // is going away. - SignalQueueDestroyed(); - ThreadManager::Remove(this); - ClearInternal(nullptr, MQID_ANY, nullptr); - if (ss_) { ss_->SetMessageQueue(nullptr); } + ThreadManager::Remove(this); + ClearInternal(nullptr, MQID_ANY, nullptr); } SocketServer* Thread::socketserver() { @@ -680,14 +706,18 @@ void Thread::Dispatch(Message* pmsg) { TRACE_EVENT2("webrtc", "Thread::Dispatch", "src_file", pmsg->posted_from.file_name(), "src_func", pmsg->posted_from.function_name()); + RTC_DCHECK_RUN_ON(this); int64_t start_time = TimeMillis(); pmsg->phandler->OnMessage(pmsg); int64_t end_time = TimeMillis(); int64_t diff = TimeDiff(end_time, start_time); - if (diff >= kSlowDispatchLoggingThreshold) { - RTC_LOG(LS_INFO) << "Message took " << diff + if (diff >= dispatch_warning_ms_) { + RTC_LOG(LS_INFO) << "Message to " << name() << " took " << diff << "ms to dispatch. Posted from: " << pmsg->posted_from.ToString(); + // To avoid log spew, move the warning limit to only give warning + // for delays that are larger than the one observed. + dispatch_warning_ms_ = diff + 1; } } @@ -696,7 +726,7 @@ bool Thread::IsCurrent() const { } std::unique_ptr Thread::CreateWithSocketServer() { - return std::unique_ptr(new Thread(SocketServer::CreateDefault())); + return std::unique_ptr(new Thread(CreateDefaultSocketServer())); } std::unique_ptr Thread::Create() { @@ -739,6 +769,16 @@ bool Thread::SetName(const std::string& name, const void* obj) { return true; } +void Thread::SetDispatchWarningMs(int deadline) { + if (!IsCurrent()) { + PostTask(webrtc::ToQueuedTask( + [this, deadline]() { SetDispatchWarningMs(deadline); })); + return; + } + RTC_DCHECK_RUN_ON(this); + dispatch_warning_ms_ = deadline; +} + bool Thread::Start() { RTC_DCHECK(!IsRunning()); @@ -888,6 +928,10 @@ void Thread::Send(const Location& posted_from, msg.message_id = id; msg.pdata = pdata; if (IsCurrent()) { +#if RTC_DCHECK_IS_ON + RTC_DCHECK_RUN_ON(this); + could_be_blocking_call_count_++; +#endif msg.phandler->OnMessage(&msg); return; } @@ -898,6 +942,8 @@ void Thread::Send(const Location& posted_from, #if RTC_DCHECK_IS_ON if (current_thread) { + RTC_DCHECK_RUN_ON(current_thread); + current_thread->blocking_call_count_++; RTC_DCHECK(current_thread->IsInvokeToThreadAllowed(this)); ThreadManager::Instance()->RegisterSendAndCheckForCycles(current_thread, this); @@ -1021,6 +1067,17 @@ void Thread::DisallowAllInvokes() { #endif } +#if RTC_DCHECK_IS_ON +uint32_t Thread::GetBlockingCallCount() const { + RTC_DCHECK_RUN_ON(this); + return blocking_call_count_; +} +uint32_t Thread::GetCouldBeBlockingCallCount() const { + RTC_DCHECK_RUN_ON(this); + return could_be_blocking_call_count_; +} +#endif + // Returns true if no policies added or if there is at least one policy // that permits invocation to |target| thread. bool Thread::IsInvokeToThreadAllowed(rtc::Thread* target) { @@ -1137,7 +1194,7 @@ MessageHandler* Thread::GetPostTaskMessageHandler() { } AutoThread::AutoThread() - : Thread(SocketServer::CreateDefault(), /*do_init=*/false) { + : Thread(CreateDefaultSocketServer(), /*do_init=*/false) { if (!ThreadManager::Instance()->CurrentThread()) { // DoInit registers with ThreadManager. Do that only if we intend to // be rtc::Thread::Current(), otherwise ProcessAllMessageQueuesInternal will diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h index ed19e9892..6e68f1a67 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h @@ -42,6 +42,35 @@ #include "rtc_base/win32.h" #endif +#if RTC_DCHECK_IS_ON +// Counts how many blocking Thread::Invoke or Thread::Send calls are made from +// within a scope and logs the number of blocking calls at the end of the scope. +#define RTC_LOG_THREAD_BLOCK_COUNT() \ + rtc::Thread::ScopedCountBlockingCalls blocked_call_count_printer( \ + [func = __func__](uint32_t actual_block, uint32_t could_block) { \ + auto total = actual_block + could_block; \ + if (total) { \ + RTC_LOG(LS_WARNING) << "Blocking " << func << ": total=" << total \ + << " (actual=" << actual_block \ + << ", could=" << could_block << ")"; \ + } \ + }) + +// Adds an RTC_DCHECK_LE that checks that the number of blocking calls are +// less than or equal to a specific value. Use to avoid regressing in the +// number of blocking thread calls. +// Note: Use of this macro, requires RTC_LOG_THREAD_BLOCK_COUNT() to be called +// first. +#define RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(x) \ + do { \ + blocked_call_count_printer.set_minimum_call_count_for_callback(x + 1); \ + RTC_DCHECK_LE(blocked_call_count_printer.GetTotalBlockedCallCount(), x); \ + } while (0) +#else +#define RTC_LOG_THREAD_BLOCK_COUNT() +#define RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(x) +#endif + namespace rtc { class Thread; @@ -212,6 +241,39 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { const bool previous_state_; }; +#if RTC_DCHECK_IS_ON + class ScopedCountBlockingCalls { + public: + ScopedCountBlockingCalls(std::function callback); + ScopedCountBlockingCalls(const ScopedDisallowBlockingCalls&) = delete; + ScopedCountBlockingCalls& operator=(const ScopedDisallowBlockingCalls&) = + delete; + ~ScopedCountBlockingCalls(); + + uint32_t GetBlockingCallCount() const; + uint32_t GetCouldBeBlockingCallCount() const; + uint32_t GetTotalBlockedCallCount() const; + + void set_minimum_call_count_for_callback(uint32_t minimum) { + min_blocking_calls_for_callback_ = minimum; + } + + private: + Thread* const thread_; + const uint32_t base_blocking_call_count_; + const uint32_t base_could_be_blocking_call_count_; + // The minimum number of blocking calls required in order to issue the + // result_callback_. This is used by RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN to + // tame log spam. + // By default we always issue the callback, regardless of callback count. + uint32_t min_blocking_calls_for_callback_ = 0; + std::function result_callback_; + }; + + uint32_t GetBlockingCallCount() const; + uint32_t GetCouldBeBlockingCallCount() const; +#endif + SocketServer* socketserver(); // Note: The behavior of Thread has changed. When a thread is stopped, @@ -274,10 +336,6 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { } } - // When this signal is sent out, any references to this queue should - // no longer be used. - sigslot::signal0<> SignalQueueDestroyed; - bool IsCurrent() const; // Sleeps the calling thread for the specified number of milliseconds, during @@ -290,6 +348,11 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { const std::string& name() const { return name_; } bool SetName(const std::string& name, const void* obj); + // Sets the expected processing time in ms. The thread will write + // log messages when Invoke() takes more time than this. + // Default is 50 ms. + void SetDispatchWarningMs(int deadline); + // Starts the execution of the thread. bool Start(); @@ -525,6 +588,8 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { RecursiveCriticalSection* CritForTest() { return &crit_; } private: + static const int kSlowDispatchLoggingThreshold = 50; // 50 ms + class QueuedTaskHandler final : public MessageHandler { public: QueuedTaskHandler() {} @@ -570,7 +635,9 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { MessageList messages_ RTC_GUARDED_BY(crit_); PriorityQueue delayed_messages_ RTC_GUARDED_BY(crit_); uint32_t delayed_next_num_ RTC_GUARDED_BY(crit_); -#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) +#if RTC_DCHECK_IS_ON + uint32_t blocking_call_count_ RTC_GUARDED_BY(this) = 0; + uint32_t could_be_blocking_call_count_ RTC_GUARDED_BY(this) = 0; std::vector allowed_threads_ RTC_GUARDED_BY(this); bool invoke_policy_enabled_ RTC_GUARDED_BY(this) = false; #endif @@ -614,6 +681,8 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { friend class ThreadManager; + int dispatch_warning_ms_ RTC_GUARDED_BY(this) = kSlowDispatchLoggingThreshold; + RTC_DISALLOW_COPY_AND_ASSIGN(Thread); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread_checker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/thread_checker.h deleted file mode 100644 index 876a08e38..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread_checker.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Borrowed from Chromium's src/base/threading/thread_checker.h. - -#ifndef RTC_BASE_THREAD_CHECKER_H_ -#define RTC_BASE_THREAD_CHECKER_H_ - -#include "rtc_base/deprecation.h" -#include "rtc_base/synchronization/sequence_checker.h" - -namespace rtc { -// TODO(srte): Replace usages of this with SequenceChecker. -class ThreadChecker : public webrtc::SequenceChecker { - public: - RTC_DEPRECATED bool CalledOnValidThread() const { return IsCurrent(); } - RTC_DEPRECATED void DetachFromThread() { Detach(); } -}; -} // namespace rtc -#endif // RTC_BASE_THREAD_CHECKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.cc index 11c9d5a47..fe63d3a8e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.cc @@ -12,23 +12,15 @@ #if defined(WEBRTC_POSIX) #include -#if defined(WEBRTC_MAC) -#include -#endif #endif #if defined(WEBRTC_WIN) -// clang-format off -// clang formatting would put last, -// which leads to compilation failure. -#include -#include #include -// clang-format on #endif #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/system_time.h" #include "rtc_base/time_utils.h" namespace rtc { @@ -141,61 +133,12 @@ void SyncWithNtp(int64_t time_from_ntp_server_ms) { TimeHelper::SyncWithNtp(time_from_ntp_server_ms); } -#endif // defined(WINUWP) - -int64_t SystemTimeNanos() { - int64_t ticks; -#if defined(WEBRTC_MAC) - static mach_timebase_info_data_t timebase; - if (timebase.denom == 0) { - // Get the timebase if this is the first time we run. - // Recommended by Apple's QA1398. - if (mach_timebase_info(&timebase) != KERN_SUCCESS) { - RTC_NOTREACHED(); - } - } - // Use timebase to convert absolute time tick units into nanoseconds. - const auto mul = [](uint64_t a, uint32_t b) -> int64_t { - RTC_DCHECK_NE(b, 0); - RTC_DCHECK_LE(a, std::numeric_limits::max() / b) - << "The multiplication " << a << " * " << b << " overflows"; - return rtc::dchecked_cast(a * b); - }; - ticks = mul(mach_absolute_time(), timebase.numer) / timebase.denom; -#elif defined(WEBRTC_POSIX) - struct timespec ts; - // TODO(deadbeef): Do we need to handle the case when CLOCK_MONOTONIC is not - // supported? - clock_gettime(CLOCK_MONOTONIC, &ts); - ticks = kNumNanosecsPerSec * static_cast(ts.tv_sec) + - static_cast(ts.tv_nsec); -#elif defined(WINUWP) - ticks = TimeHelper::TicksNs(); -#elif defined(WEBRTC_WIN) - static volatile LONG last_timegettime = 0; - static volatile int64_t num_wrap_timegettime = 0; - volatile LONG* last_timegettime_ptr = &last_timegettime; - DWORD now = timeGetTime(); - // Atomically update the last gotten time - DWORD old = InterlockedExchange(last_timegettime_ptr, now); - if (now < old) { - // If now is earlier than old, there may have been a race between threads. - // 0x0fffffff ~3.1 days, the code will not take that long to execute - // so it must have been a wrap around. - if (old > 0xf0000000 && now < 0x0fffffff) { - num_wrap_timegettime++; - } - } - ticks = now + (num_wrap_timegettime << 32); - // TODO(deadbeef): Calculate with nanosecond precision. Otherwise, we're - // just wasting a multiply and divide when doing Time() on Windows. - ticks = ticks * kNumNanosecsPerMillisec; -#else -#error Unsupported platform. -#endif - return ticks; +int64_t WinUwpSystemTimeNanos() { + return TimeHelper::TicksNs(); } +#endif // defined(WINUWP) + int64_t SystemTimeMillis() { return static_cast(SystemTimeNanos() / kNumNanosecsPerMillisec); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h index 147ab8daf..de3c58c81 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h @@ -16,6 +16,7 @@ #include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/system_time.h" namespace rtc { @@ -61,11 +62,16 @@ RTC_EXPORT ClockInterface* GetClockForTesting(); // Synchronizes the current clock based upon an NTP server's epoch in // milliseconds. void SyncWithNtp(int64_t time_from_ntp_server_ms); + +// Returns the current time in nanoseconds. The clock is synchonized with the +// system wall clock time upon instatiation. It may also be synchronized using +// the SyncWithNtp() function above. Please note that the clock will most likely +// drift away from the system wall clock time as time goes by. +int64_t WinUwpSystemTimeNanos(); #endif // defined(WINUWP) // Returns the actual system time, even if a clock is set for testing. // Useful for timeouts while using a test clock, or for logging. -int64_t SystemTimeNanos(); int64_t SystemTimeMillis(); // Returns the current time in milliseconds in 32 bits. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc index d41fa8d18..9fa3021c6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.cc @@ -26,6 +26,8 @@ UniqueRandomIdGenerator::UniqueRandomIdGenerator(ArrayView known_ids) UniqueRandomIdGenerator::~UniqueRandomIdGenerator() = default; uint32_t UniqueRandomIdGenerator::GenerateId() { + webrtc::MutexLock lock(&mutex_); + RTC_CHECK_LT(known_ids_.size(), std::numeric_limits::max() - 1); while (true) { auto pair = known_ids_.insert(CreateRandomNonZeroId()); @@ -36,6 +38,7 @@ uint32_t UniqueRandomIdGenerator::GenerateId() { } bool UniqueRandomIdGenerator::AddKnownId(uint32_t value) { + webrtc::MutexLock lock(&mutex_); return known_ids_.insert(value).second; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h index 836dc70b6..22398fd3f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/unique_id_generator.h @@ -16,6 +16,9 @@ #include #include "api/array_view.h" +#include "api/sequence_checker.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" namespace rtc { @@ -47,9 +50,10 @@ class UniqueNumberGenerator { bool AddKnownId(TIntegral value); private: + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; static_assert(std::is_integral::value, "Must be integral type."); - TIntegral counter_; - std::set known_ids_; + TIntegral counter_ RTC_GUARDED_BY(sequence_checker_); + std::set known_ids_ RTC_GUARDED_BY(sequence_checker_); }; // This class will generate unique ids. Ids are 32 bit unsigned integers. @@ -76,7 +80,10 @@ class UniqueRandomIdGenerator { bool AddKnownId(uint32_t value); private: - std::set known_ids_; + // TODO(bugs.webrtc.org/12666): This lock is needed due to an instance in + // SdpOfferAnswerHandler being shared between threads. + webrtc::Mutex mutex_; + std::set known_ids_ RTC_GUARDED_BY(&mutex_); }; // This class will generate strings. A common use case is for identifiers. @@ -104,18 +111,23 @@ class UniqueStringGenerator { }; template -UniqueNumberGenerator::UniqueNumberGenerator() : counter_(0) {} +UniqueNumberGenerator::UniqueNumberGenerator() : counter_(0) { + sequence_checker_.Detach(); +} template UniqueNumberGenerator::UniqueNumberGenerator( ArrayView known_ids) - : counter_(0), known_ids_(known_ids.begin(), known_ids.end()) {} + : counter_(0), known_ids_(known_ids.begin(), known_ids.end()) { + sequence_checker_.Detach(); +} template UniqueNumberGenerator::~UniqueNumberGenerator() {} template TIntegral UniqueNumberGenerator::GenerateNumber() { + RTC_DCHECK_RUN_ON(&sequence_checker_); while (true) { RTC_CHECK_LT(counter_, std::numeric_limits::max()); auto pair = known_ids_.insert(counter_++); @@ -127,6 +139,7 @@ TIntegral UniqueNumberGenerator::GenerateNumber() { template bool UniqueNumberGenerator::AddKnownId(TIntegral value) { + RTC_DCHECK_RUN_ON(&sequence_checker_); return known_ids_.insert(value).second; } } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc index 3d412d66c..8140fcb6a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.cc @@ -54,7 +54,6 @@ const int NUM_SAMPLES = 1000; enum { MSG_ID_PACKET, - MSG_ID_ADDRESS_BOUND, MSG_ID_CONNECT, MSG_ID_DISCONNECT, MSG_ID_SIGNALREADEVENT, @@ -149,9 +148,6 @@ int VirtualSocket::Bind(const SocketAddress& addr) { } else { bound_ = true; was_any_ = addr.IsAnyIP(); - // Post a message here such that test case could have chance to - // process the local address. (i.e. SetAlternativeLocalAddress). - server_->msg_queue_->Post(RTC_FROM_HERE, this, MSG_ID_ADDRESS_BOUND); } return result; } @@ -174,7 +170,7 @@ int VirtualSocket::Close() { SocketAddress addr = listen_queue_->front(); // Disconnect listening socket. - server_->Disconnect(server_->LookupBinding(addr)); + server_->Disconnect(addr); listen_queue_->pop_front(); } delete listen_queue_; @@ -182,51 +178,15 @@ int VirtualSocket::Close() { } // Disconnect stream sockets if (CS_CONNECTED == state_) { - // Disconnect remote socket, check if it is a child of a server socket. - VirtualSocket* socket = - server_->LookupConnection(local_addr_, remote_addr_); - if (!socket) { - // Not a server socket child, then see if it is bound. - // TODO(tbd): If this is indeed a server socket that has no - // children this will cause the server socket to be - // closed. This might lead to unexpected results, how to fix this? - socket = server_->LookupBinding(remote_addr_); - } - server_->Disconnect(socket); - - // Remove mapping for both directions. - server_->RemoveConnection(remote_addr_, local_addr_); - server_->RemoveConnection(local_addr_, remote_addr_); + server_->Disconnect(local_addr_, remote_addr_); } // Cancel potential connects - MessageList msgs; - if (server_->msg_queue_) { - server_->msg_queue_->Clear(this, MSG_ID_CONNECT, &msgs); - } - for (MessageList::iterator it = msgs.begin(); it != msgs.end(); ++it) { - RTC_DCHECK(nullptr != it->pdata); - MessageAddress* data = static_cast(it->pdata); - - // Lookup remote side. - VirtualSocket* socket = - server_->LookupConnection(local_addr_, data->addr); - if (socket) { - // Server socket, remote side is a socket retreived by - // accept. Accepted sockets are not bound so we will not - // find it by looking in the bindings table. - server_->Disconnect(socket); - server_->RemoveConnection(local_addr_, data->addr); - } else { - server_->Disconnect(server_->LookupBinding(data->addr)); - } - delete data; - } - // Clear incoming packets and disconnect messages - if (server_->msg_queue_) { - server_->msg_queue_->Clear(this); - } + server_->CancelConnects(this); } + // Clear incoming packets and disconnect messages + server_->Clear(this); + state_ = CS_CLOSED; local_addr_.Clear(); remote_addr_.Clear(); @@ -278,9 +238,7 @@ int VirtualSocket::RecvFrom(void* pv, return -1; } while (recv_buffer_.empty()) { - Message msg; - server_->msg_queue_->Get(&msg); - server_->msg_queue_->Dispatch(&msg); + server_->ProcessOneMessage(); } } @@ -300,18 +258,14 @@ int VirtualSocket::RecvFrom(void* pv, // To behave like a real socket, SignalReadEvent should fire in the next // message loop pass if there's still data buffered. if (!recv_buffer_.empty()) { - // Clear the message so it doesn't end up posted multiple times. - server_->msg_queue_->Clear(this, MSG_ID_SIGNALREADEVENT); - server_->msg_queue_->Post(RTC_FROM_HERE, this, MSG_ID_SIGNALREADEVENT); + server_->PostSignalReadEvent(this); } if (SOCK_STREAM == type_) { - bool was_full = (recv_buffer_size_ == server_->recv_buffer_capacity_); + bool was_full = (recv_buffer_size_ == server_->recv_buffer_capacity()); recv_buffer_size_ -= data_read; if (was_full) { - VirtualSocket* sender = server_->LookupBinding(remote_addr_); - RTC_DCHECK(nullptr != sender); - server_->SendTcp(sender); + server_->SendTcp(remote_addr_); } } @@ -409,7 +363,7 @@ void VirtualSocket::OnMessage(Message* pmsg) { } else { RTC_LOG(LS_VERBOSE) << "Socket at " << local_addr_.ToString() << " is not listening"; - server_->Disconnect(server_->LookupBinding(data->addr)); + server_->Disconnect(data->addr); } delete data; } else if (pmsg->message_id == MSG_ID_DISCONNECT) { @@ -422,8 +376,6 @@ void VirtualSocket::OnMessage(Message* pmsg) { SignalCloseEvent(this, error); } } - } else if (pmsg->message_id == MSG_ID_ADDRESS_BOUND) { - SignalAddressReady(this, GetLocalAddress()); } else if (pmsg->message_id == MSG_ID_SIGNALREADEVENT) { if (!recv_buffer_.empty()) { SignalReadEvent(this); @@ -493,7 +445,7 @@ int VirtualSocket::SendUdp(const void* pv, } int VirtualSocket::SendTcp(const void* pv, size_t cb) { - size_t capacity = server_->send_buffer_capacity_ - send_buffer_.size(); + size_t capacity = server_->send_buffer_capacity() - send_buffer_.size(); if (0 == capacity) { ready_to_send_ = false; error_ = EWOULDBLOCK; @@ -522,6 +474,67 @@ void VirtualSocket::OnSocketServerReadyToSend() { } } +void VirtualSocket::SetToBlocked() { + CritScope cs(&crit_); + ready_to_send_ = false; + error_ = EWOULDBLOCK; +} + +void VirtualSocket::UpdateRecv(size_t data_size) { + recv_buffer_size_ += data_size; +} + +void VirtualSocket::UpdateSend(size_t data_size) { + size_t new_buffer_size = send_buffer_.size() - data_size; + // Avoid undefined access beyond the last element of the vector. + // This only happens when new_buffer_size is 0. + if (data_size < send_buffer_.size()) { + // memmove is required for potentially overlapping source/destination. + memmove(&send_buffer_[0], &send_buffer_[data_size], new_buffer_size); + } + send_buffer_.resize(new_buffer_size); +} + +void VirtualSocket::MaybeSignalWriteEvent(size_t capacity) { + if (!ready_to_send_ && (send_buffer_.size() < capacity)) { + ready_to_send_ = true; + SignalWriteEvent(this); + } +} + +uint32_t VirtualSocket::AddPacket(int64_t cur_time, size_t packet_size) { + network_size_ += packet_size; + uint32_t send_delay = + server_->SendDelay(static_cast(network_size_)); + + NetworkEntry entry; + entry.size = packet_size; + entry.done_time = cur_time + send_delay; + network_.push_back(entry); + + return send_delay; +} + +int64_t VirtualSocket::UpdateOrderedDelivery(int64_t ts) { + // Ensure that new packets arrive after previous ones + ts = std::max(ts, last_delivery_time_); + // A socket should not have both ordered and unordered delivery, so its last + // delivery time only needs to be updated when it has ordered delivery. + last_delivery_time_ = ts; + return ts; +} + +size_t VirtualSocket::PurgeNetworkPackets(int64_t cur_time) { + CritScope cs(&crit_); + + while (!network_.empty() && (network_.front().done_time <= cur_time)) { + RTC_DCHECK(network_size_ >= network_.front().size); + network_size_ -= network_.front().size; + network_.pop_front(); + } + return network_size_; +} + VirtualSocketServer::VirtualSocketServer() : VirtualSocketServer(nullptr) {} VirtualSocketServer::VirtualSocketServer(ThreadProcessingFakeClock* fake_clock) @@ -595,17 +608,11 @@ AsyncSocket* VirtualSocketServer::CreateAsyncSocket(int family, int type) { } VirtualSocket* VirtualSocketServer::CreateSocketInternal(int family, int type) { - VirtualSocket* socket = new VirtualSocket(this, family, type, true); - SignalSocketCreated(socket); - return socket; + return new VirtualSocket(this, family, type, true); } void VirtualSocketServer::SetMessageQueue(Thread* msg_queue) { msg_queue_ = msg_queue; - if (msg_queue_) { - msg_queue_->SignalQueueDestroyed.connect( - this, &VirtualSocketServer::OnMessageQueueDestroyed); - } } bool VirtualSocketServer::Wait(int cmsWait, bool process_io) { @@ -813,19 +820,98 @@ bool VirtualSocketServer::Disconnect(VirtualSocket* socket) { return false; } +bool VirtualSocketServer::Disconnect(const SocketAddress& addr) { + return Disconnect(LookupBinding(addr)); +} + +bool VirtualSocketServer::Disconnect(const SocketAddress& local_addr, + const SocketAddress& remote_addr) { + // Disconnect remote socket, check if it is a child of a server socket. + VirtualSocket* socket = LookupConnection(local_addr, remote_addr); + if (!socket) { + // Not a server socket child, then see if it is bound. + // TODO(tbd): If this is indeed a server socket that has no + // children this will cause the server socket to be + // closed. This might lead to unexpected results, how to fix this? + socket = LookupBinding(remote_addr); + } + Disconnect(socket); + + // Remove mapping for both directions. + RemoveConnection(remote_addr, local_addr); + RemoveConnection(local_addr, remote_addr); + return socket != nullptr; +} + +void VirtualSocketServer::CancelConnects(VirtualSocket* socket) { + MessageList msgs; + if (msg_queue_) { + msg_queue_->Clear(socket, MSG_ID_CONNECT, &msgs); + } + for (MessageList::iterator it = msgs.begin(); it != msgs.end(); ++it) { + RTC_DCHECK(nullptr != it->pdata); + MessageAddress* data = static_cast(it->pdata); + SocketAddress local_addr = socket->GetLocalAddress(); + // Lookup remote side. + VirtualSocket* socket = LookupConnection(local_addr, data->addr); + if (socket) { + // Server socket, remote side is a socket retreived by + // accept. Accepted sockets are not bound so we will not + // find it by looking in the bindings table. + Disconnect(socket); + RemoveConnection(local_addr, data->addr); + } else { + Disconnect(data->addr); + } + delete data; + } +} + +void VirtualSocketServer::Clear(VirtualSocket* socket) { + // Clear incoming packets and disconnect messages + if (msg_queue_) { + msg_queue_->Clear(socket); + } +} + +void VirtualSocketServer::ProcessOneMessage() { + Message msg; + msg_queue_->Get(&msg); + msg_queue_->Dispatch(&msg); +} + +void VirtualSocketServer::PostSignalReadEvent(VirtualSocket* socket) { + // Clear the message so it doesn't end up posted multiple times. + msg_queue_->Clear(socket, MSG_ID_SIGNALREADEVENT); + msg_queue_->Post(RTC_FROM_HERE, socket, MSG_ID_SIGNALREADEVENT); +} + int VirtualSocketServer::SendUdp(VirtualSocket* socket, const char* data, size_t data_size, const SocketAddress& remote_addr) { ++sent_packets_; if (sending_blocked_) { - CritScope cs(&socket->crit_); - socket->ready_to_send_ = false; - socket->error_ = EWOULDBLOCK; + socket->SetToBlocked(); return -1; } + if (data_size > largest_seen_udp_payload_) { + if (data_size > 1000) { + RTC_LOG(LS_VERBOSE) << "Largest UDP seen is " << data_size; + } + largest_seen_udp_payload_ = data_size; + } + // See if we want to drop this packet. + if (data_size > max_udp_payload_) { + RTC_LOG(LS_VERBOSE) << "Dropping too large UDP payload of size " + << data_size << ", UDP payload limit is " + << max_udp_payload_; + // Return as if send was successful; packet disappears. + return data_size; + } + if (Random() < drop_prob_) { RTC_LOG(LS_VERBOSE) << "Dropping packet: bad luck"; return static_cast(data_size); @@ -855,10 +941,8 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket, } { - CritScope cs(&socket->crit_); - int64_t cur_time = TimeMillis(); - PurgeNetworkPackets(socket, cur_time); + size_t network_size = socket->PurgeNetworkPackets(cur_time); // Determine whether we have enough bandwidth to accept this packet. To do // this, we need to update the send queue. Once we know it's current size, @@ -869,7 +953,7 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket, // simulation of what a normal network would do. size_t packet_size = data_size + UDP_HEADER_SIZE; - if (socket->network_size_ + packet_size > network_capacity_) { + if (network_size + packet_size > network_capacity_) { RTC_LOG(LS_VERBOSE) << "Dropping packet: network capacity exceeded"; return static_cast(data_size); } @@ -897,45 +981,36 @@ void VirtualSocketServer::SendTcp(VirtualSocket* socket) { // Lookup the local/remote pair in the connections table. VirtualSocket* recipient = - LookupConnection(socket->local_addr_, socket->remote_addr_); + LookupConnection(socket->GetLocalAddress(), socket->GetRemoteAddress()); if (!recipient) { RTC_LOG(LS_VERBOSE) << "Sending data to no one."; return; } - CritScope cs(&socket->crit_); - int64_t cur_time = TimeMillis(); - PurgeNetworkPackets(socket, cur_time); + socket->PurgeNetworkPackets(cur_time); while (true) { - size_t available = recv_buffer_capacity_ - recipient->recv_buffer_size_; + size_t available = recv_buffer_capacity_ - recipient->recv_buffer_size(); size_t max_data_size = std::min(available, TCP_MSS - TCP_HEADER_SIZE); - size_t data_size = std::min(socket->send_buffer_.size(), max_data_size); + size_t data_size = std::min(socket->send_buffer_size(), max_data_size); if (0 == data_size) break; - AddPacketToNetwork(socket, recipient, cur_time, &socket->send_buffer_[0], + AddPacketToNetwork(socket, recipient, cur_time, socket->send_buffer_data(), data_size, TCP_HEADER_SIZE, true); - recipient->recv_buffer_size_ += data_size; - - size_t new_buffer_size = socket->send_buffer_.size() - data_size; - // Avoid undefined access beyond the last element of the vector. - // This only happens when new_buffer_size is 0. - if (data_size < socket->send_buffer_.size()) { - // memmove is required for potentially overlapping source/destination. - memmove(&socket->send_buffer_[0], &socket->send_buffer_[data_size], - new_buffer_size); - } - socket->send_buffer_.resize(new_buffer_size); + recipient->UpdateRecv(data_size); + socket->UpdateSend(data_size); } - if (!socket->ready_to_send_ && - (socket->send_buffer_.size() < send_buffer_capacity_)) { - socket->ready_to_send_ = true; - socket->SignalWriteEvent(socket); - } + socket->MaybeSignalWriteEvent(send_buffer_capacity_); +} + +void VirtualSocketServer::SendTcp(const SocketAddress& addr) { + VirtualSocket* sender = LookupBinding(addr); + RTC_DCHECK(nullptr != sender); + SendTcp(sender); } void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, @@ -945,13 +1020,7 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, size_t data_size, size_t header_size, bool ordered) { - VirtualSocket::NetworkEntry entry; - entry.size = data_size + header_size; - - sender->network_size_ += entry.size; - uint32_t send_delay = SendDelay(static_cast(sender->network_size_)); - entry.done_time = cur_time + send_delay; - sender->network_.push_back(entry); + uint32_t send_delay = sender->AddPacket(cur_time, data_size + header_size); // Find the delay for crossing the many virtual hops of the network. uint32_t transit_delay = GetTransitDelay(sender); @@ -959,7 +1028,7 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, // When the incoming packet is from a binding of the any address, translate it // to the default route here such that the recipient will see the default // route. - SocketAddress sender_addr = sender->local_addr_; + SocketAddress sender_addr = sender->GetLocalAddress(); IPAddress default_ip = GetDefaultRoute(sender_addr.ipaddr().family()); if (sender_addr.IsAnyIP() && !IPIsUnspec(default_ip)) { sender_addr.SetIP(default_ip); @@ -970,25 +1039,11 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, int64_t ts = TimeAfter(send_delay + transit_delay); if (ordered) { - // Ensure that new packets arrive after previous ones - ts = std::max(ts, sender->last_delivery_time_); - // A socket should not have both ordered and unordered delivery, so its last - // delivery time only needs to be updated when it has ordered delivery. - sender->last_delivery_time_ = ts; + ts = sender->UpdateOrderedDelivery(ts); } msg_queue_->PostAt(RTC_FROM_HERE, ts, recipient, MSG_ID_PACKET, p); } -void VirtualSocketServer::PurgeNetworkPackets(VirtualSocket* socket, - int64_t cur_time) { - while (!socket->network_.empty() && - (socket->network_.front().done_time <= cur_time)) { - RTC_DCHECK(socket->network_size_ >= socket->network_.front().size); - socket->network_size_ -= socket->network_.front().size; - socket->network_.pop_front(); - } -} - uint32_t VirtualSocketServer::SendDelay(uint32_t size) { if (bandwidth_ == 0) return 0; @@ -1018,13 +1073,7 @@ void PrintFunction(std::vector >* f) { #endif // void VirtualSocketServer::UpdateDelayDistribution() { - Function* dist = - CreateDistribution(delay_mean_, delay_stddev_, delay_samples_); - // We take a lock just to make sure we don't leak memory. - { - CritScope cs(&delay_crit_); - delay_dist_.reset(dist); - } + delay_dist_ = CreateDistribution(delay_mean_, delay_stddev_, delay_samples_); } static double PI = 4 * atan(1.0); @@ -1043,11 +1092,11 @@ static double Pareto(double x, double min, double k) { } #endif -VirtualSocketServer::Function* VirtualSocketServer::CreateDistribution( - uint32_t mean, - uint32_t stddev, - uint32_t samples) { - Function* f = new Function(); +std::unique_ptr +VirtualSocketServer::CreateDistribution(uint32_t mean, + uint32_t stddev, + uint32_t samples) { + auto f = std::make_unique(); if (0 == stddev) { f->push_back(Point(mean, 1.0)); @@ -1063,7 +1112,7 @@ VirtualSocketServer::Function* VirtualSocketServer::CreateDistribution( f->push_back(Point(x, y)); } } - return Resample(Invert(Accumulate(f)), 0, 1, samples); + return Resample(Invert(Accumulate(std::move(f))), 0, 1, samples); } uint32_t VirtualSocketServer::GetTransitDelay(Socket* socket) { @@ -1092,7 +1141,8 @@ struct FunctionDomainCmp { } }; -VirtualSocketServer::Function* VirtualSocketServer::Accumulate(Function* f) { +std::unique_ptr VirtualSocketServer::Accumulate( + std::unique_ptr f) { RTC_DCHECK(f->size() >= 1); double v = 0; for (Function::size_type i = 0; i < f->size() - 1; ++i) { @@ -1105,7 +1155,8 @@ VirtualSocketServer::Function* VirtualSocketServer::Accumulate(Function* f) { return f; } -VirtualSocketServer::Function* VirtualSocketServer::Invert(Function* f) { +std::unique_ptr VirtualSocketServer::Invert( + std::unique_ptr f) { for (Function::size_type i = 0; i < f->size(); ++i) std::swap((*f)[i].first, (*f)[i].second); @@ -1113,24 +1164,25 @@ VirtualSocketServer::Function* VirtualSocketServer::Invert(Function* f) { return f; } -VirtualSocketServer::Function* VirtualSocketServer::Resample(Function* f, - double x1, - double x2, - uint32_t samples) { - Function* g = new Function(); +std::unique_ptr VirtualSocketServer::Resample( + std::unique_ptr f, + double x1, + double x2, + uint32_t samples) { + auto g = std::make_unique(); for (size_t i = 0; i < samples; i++) { double x = x1 + (x2 - x1) * i / (samples - 1); - double y = Evaluate(f, x); + double y = Evaluate(f.get(), x); g->push_back(Point(x, y)); } - delete f; return g; } -double VirtualSocketServer::Evaluate(Function* f, double x) { - Function::iterator iter = absl::c_lower_bound(*f, x, FunctionDomainCmp()); +double VirtualSocketServer::Evaluate(const Function* f, double x) { + Function::const_iterator iter = + absl::c_lower_bound(*f, x, FunctionDomainCmp()); if (iter == f->begin()) { return (*f)[0].second; } else if (iter == f->end()) { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h index f33ebccd3..faf31f007 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h @@ -33,7 +33,7 @@ class SocketAddressPair; // interface can create as many addresses as you want. All of the sockets // created by this network will be able to communicate with one another, unless // they are bound to addresses from incompatible families. -class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { +class VirtualSocketServer : public SocketServer { public: VirtualSocketServer(); // This constructor needs to be used if the test uses a fake clock and @@ -94,6 +94,16 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { drop_prob_ = drop_prob; } + // Controls the maximum UDP payload for the networks simulated + // by this server. Any UDP payload sent that is larger than this will + // be dropped. + size_t max_udp_payload() { return max_udp_payload_; } + void set_max_udp_payload(size_t payload_size) { + max_udp_payload_ = payload_size; + } + + size_t largest_seen_udp_payload() { return largest_seen_udp_payload_; } + // If |blocked| is true, subsequent attempts to send will result in -1 being // returned, with the socket error set to EWOULDBLOCK. // @@ -130,9 +140,9 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { typedef std::pair Point; typedef std::vector Function; - static Function* CreateDistribution(uint32_t mean, - uint32_t stddev, - uint32_t samples); + static std::unique_ptr CreateDistribution(uint32_t mean, + uint32_t stddev, + uint32_t samples); // Similar to Thread::ProcessMessages, but it only processes messages until // there are no immediate messages or pending network traffic. Returns false @@ -151,25 +161,12 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // socket server. Intended to be used for test assertions. uint32_t sent_packets() const { return sent_packets_; } - // For testing purpose only. Fired when a client socket is created. - sigslot::signal1 SignalSocketCreated; - - protected: - // Returns a new IP not used before in this network. - IPAddress GetNextIP(int family); - uint16_t GetNextPort(); - - VirtualSocket* CreateSocketInternal(int family, int type); - // Binds the given socket to addr, assigning and IP and Port if necessary int Bind(VirtualSocket* socket, SocketAddress* addr); // Binds the given socket to the given (fully-defined) address. int Bind(VirtualSocket* socket, const SocketAddress& addr); - // Find the socket bound to the given address - VirtualSocket* LookupBinding(const SocketAddress& addr); - int Unbind(const SocketAddress& addr, VirtualSocket* socket); // Adds a mapping between this socket pair and the socket. @@ -177,13 +174,6 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { const SocketAddress& server, VirtualSocket* socket); - // Find the socket pair corresponding to this server address. - VirtualSocket* LookupConnection(const SocketAddress& client, - const SocketAddress& server); - - void RemoveConnection(const SocketAddress& client, - const SocketAddress& server); - // Connects the given socket to the socket at the given address int Connect(VirtualSocket* socket, const SocketAddress& remote_addr, @@ -192,6 +182,13 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // Sends a disconnect message to the socket at the given address bool Disconnect(VirtualSocket* socket); + // Lookup address, and disconnect corresponding socket. + bool Disconnect(const SocketAddress& addr); + + // Lookup connection, close corresponding socket. + bool Disconnect(const SocketAddress& local_addr, + const SocketAddress& remote_addr); + // Sends the given packet to the socket at the given address (if one exists). int SendUdp(VirtualSocket* socket, const char* data, @@ -201,6 +198,44 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // Moves as much data as possible from the sender's buffer to the network void SendTcp(VirtualSocket* socket); + // Like above, but lookup sender by address. + void SendTcp(const SocketAddress& addr); + + // Computes the number of milliseconds required to send a packet of this size. + uint32_t SendDelay(uint32_t size); + + // Cancel attempts to connect to a socket that is being closed. + void CancelConnects(VirtualSocket* socket); + + // Clear incoming messages for a socket that is being closed. + void Clear(VirtualSocket* socket); + + void ProcessOneMessage(); + + void PostSignalReadEvent(VirtualSocket* socket); + + // Sending was previously blocked, but now isn't. + sigslot::signal0<> SignalReadyToSend; + + protected: + // Returns a new IP not used before in this network. + IPAddress GetNextIP(int family); + + // Find the socket bound to the given address + VirtualSocket* LookupBinding(const SocketAddress& addr); + + private: + uint16_t GetNextPort(); + + VirtualSocket* CreateSocketInternal(int family, int type); + + // Find the socket pair corresponding to this server address. + VirtualSocket* LookupConnection(const SocketAddress& client, + const SocketAddress& server); + + void RemoveConnection(const SocketAddress& client, + const SocketAddress& server); + // Places a packet on the network. void AddPacketToNetwork(VirtualSocket* socket, VirtualSocket* recipient, @@ -210,31 +245,19 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { size_t header_size, bool ordered); - // Removes stale packets from the network - void PurgeNetworkPackets(VirtualSocket* socket, int64_t cur_time); - - // Computes the number of milliseconds required to send a packet of this size. - uint32_t SendDelay(uint32_t size); - // If the delay has been set for the address of the socket, returns the set // delay. Otherwise, returns a random transit delay chosen from the // appropriate distribution. uint32_t GetTransitDelay(Socket* socket); - // Basic operations on functions. Those that return a function also take - // ownership of the function given (and hence, may modify or delete it). - static Function* Accumulate(Function* f); - static Function* Invert(Function* f); - static Function* Resample(Function* f, - double x1, - double x2, - uint32_t samples); - static double Evaluate(Function* f, double x); - - // Null out our message queue if it goes away. Necessary in the case where - // our lifetime is greater than that of the thread we are using, since we - // try to send Close messages for all connected sockets when we shutdown. - void OnMessageQueueDestroyed() { msg_queue_ = nullptr; } + // Basic operations on functions. + static std::unique_ptr Accumulate(std::unique_ptr f); + static std::unique_ptr Invert(std::unique_ptr f); + static std::unique_ptr Resample(std::unique_ptr f, + double x1, + double x2, + uint32_t samples); + static double Evaluate(const Function* f, double x); // Determine if two sockets should be able to communicate. // We don't (currently) specify an address family for sockets; instead, @@ -254,12 +277,6 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // NB: This scheme doesn't permit non-dualstack IPv6 sockets. static bool CanInteractWith(VirtualSocket* local, VirtualSocket* remote); - private: - friend class VirtualSocket; - - // Sending was previously blocked, but now isn't. - sigslot::signal0<> SignalReadyToSend; - typedef std::map AddressMap; typedef std::map ConnectionMap; @@ -295,9 +312,14 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { std::map alternative_address_mapping_; std::unique_ptr delay_dist_; - RecursiveCriticalSection delay_crit_; - double drop_prob_; + // The largest UDP payload permitted on this virtual socket server. + // The default is the max size of IPv4 fragmented UDP packet payload: + // 65535 bytes - 8 bytes UDP header - 20 bytes IP header. + size_t max_udp_payload_ = 65507; + // The largest UDP payload seen so far. + size_t largest_seen_udp_payload_ = 0; + bool sending_blocked_ = false; RTC_DISALLOW_COPY_AND_ASSIGN(VirtualSocketServer); }; @@ -305,7 +327,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> { // Implements the socket interface using the virtual network. Packets are // passed as messages using the message queue of the socket server. class VirtualSocket : public AsyncSocket, - public MessageHandlerAutoCleanup, + public MessageHandler, public sigslot::has_slots<> { public: VirtualSocket(VirtualSocketServer* server, int family, int type, bool async); @@ -334,11 +356,30 @@ class VirtualSocket : public AsyncSocket, int SetOption(Option opt, int value) override; void OnMessage(Message* pmsg) override; + size_t recv_buffer_size() const { return recv_buffer_size_; } + size_t send_buffer_size() const { return send_buffer_.size(); } + const char* send_buffer_data() const { return send_buffer_.data(); } + + // Used by server sockets to set the local address without binding. + void SetLocalAddress(const SocketAddress& addr); + bool was_any() { return was_any_; } void set_was_any(bool was_any) { was_any_ = was_any; } - // For testing purpose only. Fired when client socket is bound to an address. - sigslot::signal2 SignalAddressReady; + void SetToBlocked(); + + void UpdateRecv(size_t data_size); + void UpdateSend(size_t data_size); + + void MaybeSignalWriteEvent(size_t capacity); + + // Adds a packet to be sent. Returns delay, based on network_size_. + uint32_t AddPacket(int64_t cur_time, size_t packet_size); + + int64_t UpdateOrderedDelivery(int64_t ts); + + // Removes stale packets from the network. Returns current size. + size_t PurgeNetworkPackets(int64_t cur_time); private: struct NetworkEntry { @@ -357,9 +398,6 @@ class VirtualSocket : public AsyncSocket, int SendUdp(const void* pv, size_t cb, const SocketAddress& addr); int SendTcp(const void* pv, size_t cb); - // Used by server sockets to set the local address without binding. - void SetLocalAddress(const SocketAddress& addr); - void OnSocketServerReadyToSend(); VirtualSocketServer* server_; @@ -405,8 +443,6 @@ class VirtualSocket : public AsyncSocket, // Store the options that are set OptionsMap options_map_; - - friend class VirtualSocketServer; }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h b/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h index 8b2ba099c..a9e6b3a99 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/weak_ptr.h @@ -15,9 +15,10 @@ #include #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counted_object.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" // The implementation is borrowed from chromium except that it does not // implement SupportsWeakPtr. @@ -103,7 +104,7 @@ class WeakReference { ~Flag() override; - ::webrtc::SequenceChecker checker_; + RTC_NO_UNIQUE_ADDRESS ::webrtc::SequenceChecker checker_; bool is_valid_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h index 801f39d31..08f602f0c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/get_activation_factory.h @@ -40,8 +40,10 @@ HRESULT GetActivationFactory(InterfaceType** factory) { return hr; hr = RoGetActivationFactoryProxy(class_id_hstring, IID_PPV_ARGS(factory)); - if (FAILED(hr)) + if (FAILED(hr)) { + DeleteHstring(class_id_hstring); return hr; + } return DeleteHstring(class_id_hstring); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc index b83ad32a6..81079fb54 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.cc @@ -10,6 +10,9 @@ #include "rtc_base/win/scoped_com_initializer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + namespace webrtc { ScopedCOMInitializer::ScopedCOMInitializer() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h index 918812fc7..2427097b5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/scoped_com_initializer.h @@ -13,8 +13,6 @@ #include -#include "rtc_base/logging.h" - namespace webrtc { // Initializes COM in the constructor (STA or MTA), and uninitializes COM in the diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/OWNERS b/TMessagesProj/jni/voip/webrtc/sdk/android/OWNERS index a9d3a8234..766b79890 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/OWNERS @@ -1,3 +1,8 @@ +# New owners +xalep@webrtc.org +sartorius@webrtc.org + +# Legacy owners magjed@webrtc.org -sakal@webrtc.org +xalep@webrtc.org per-file *Audio*.java=henrika@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h index 4e7d313b9..8c3ed906b 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h @@ -281,6 +281,29 @@ static jint Java_WebRtcAudioTrack_getBufferSizeInFrames(JNIEnv* env, const return ret; } +static std::atomic + g_org_webrtc_audio_WebRtcAudioTrack_getInitialBufferSizeInFrames(nullptr); +static jint Java_WebRtcAudioTrack_getInitialBufferSizeInFrames(JNIEnv* env, const + base::android::JavaRef& obj) { + jclass clazz = org_webrtc_audio_WebRtcAudioTrack_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_audio_WebRtcAudioTrack_clazz(env), 0); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "getInitialBufferSizeInFrames", + "()I", + &g_org_webrtc_audio_WebRtcAudioTrack_getInitialBufferSizeInFrames); + + jint ret = + env->CallIntMethod(obj.obj(), + call_context.base.method_id); + return ret; +} + } // namespace jni } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h new file mode 100644 index 000000000..55c058bb0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h @@ -0,0 +1,64 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +// This file is autogenerated by +// base/android/jni_generator/jni_generator.py +// For +// org/webrtc/LibaomAv1Decoder + +#ifndef org_webrtc_LibaomAv1Decoder_JNI +#define org_webrtc_LibaomAv1Decoder_JNI + +#include + +#include "webrtc/sdk/android/src/jni/jni_generator_helper.h" + + +// Step 1: Forward declarations. + +JNI_REGISTRATION_EXPORT extern const char kClassPath_org_webrtc_LibaomAv1Decoder[]; +const char kClassPath_org_webrtc_LibaomAv1Decoder[] = "org/webrtc/LibaomAv1Decoder"; +// Leaking this jclass as we cannot use LazyInstance from some threads. +JNI_REGISTRATION_EXPORT std::atomic g_org_webrtc_LibaomAv1Decoder_clazz(nullptr); +#ifndef org_webrtc_LibaomAv1Decoder_clazz_defined +#define org_webrtc_LibaomAv1Decoder_clazz_defined +inline jclass org_webrtc_LibaomAv1Decoder_clazz(JNIEnv* env) { + return base::android::LazyGetClass(env, kClassPath_org_webrtc_LibaomAv1Decoder, + &g_org_webrtc_LibaomAv1Decoder_clazz); +} +#endif + + +// Step 2: Constants (optional). + + +// Step 3: Method stubs. +namespace webrtc { +namespace jni { + +static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* env); + +JNI_GENERATOR_EXPORT jlong Java_org_webrtc_LibaomAv1Decoder_nativeCreateDecoder( + JNIEnv* env, + jclass jcaller) { + return JNI_LibaomAv1Decoder_CreateDecoder(env); +} + +static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* env); + +JNI_GENERATOR_EXPORT jboolean Java_org_webrtc_LibaomAv1Decoder_nativeIsSupported( + JNIEnv* env, + jclass jcaller) { + return JNI_LibaomAv1Decoder_IsSupported(env); +} + + +} // namespace jni +} // namespace webrtc + +// Step 4: Generated test functions (optional). + + +#endif // org_webrtc_LibaomAv1Decoder_JNI diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h new file mode 100644 index 000000000..cb61009a8 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h @@ -0,0 +1,64 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +// This file is autogenerated by +// base/android/jni_generator/jni_generator.py +// For +// org/webrtc/LibaomAv1Encoder + +#ifndef org_webrtc_LibaomAv1Encoder_JNI +#define org_webrtc_LibaomAv1Encoder_JNI + +#include + +#include "webrtc/sdk/android/src/jni/jni_generator_helper.h" + + +// Step 1: Forward declarations. + +JNI_REGISTRATION_EXPORT extern const char kClassPath_org_webrtc_LibaomAv1Encoder[]; +const char kClassPath_org_webrtc_LibaomAv1Encoder[] = "org/webrtc/LibaomAv1Encoder"; +// Leaking this jclass as we cannot use LazyInstance from some threads. +JNI_REGISTRATION_EXPORT std::atomic g_org_webrtc_LibaomAv1Encoder_clazz(nullptr); +#ifndef org_webrtc_LibaomAv1Encoder_clazz_defined +#define org_webrtc_LibaomAv1Encoder_clazz_defined +inline jclass org_webrtc_LibaomAv1Encoder_clazz(JNIEnv* env) { + return base::android::LazyGetClass(env, kClassPath_org_webrtc_LibaomAv1Encoder, + &g_org_webrtc_LibaomAv1Encoder_clazz); +} +#endif + + +// Step 2: Constants (optional). + + +// Step 3: Method stubs. +namespace webrtc { +namespace jni { + +static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* env); + +JNI_GENERATOR_EXPORT jlong Java_org_webrtc_LibaomAv1Encoder_nativeCreateEncoder( + JNIEnv* env, + jclass jcaller) { + return JNI_LibaomAv1Encoder_CreateEncoder(env); +} + +static jboolean JNI_LibaomAv1Encoder_IsSupported(JNIEnv* env); + +JNI_GENERATOR_EXPORT jboolean Java_org_webrtc_LibaomAv1Encoder_nativeIsSupported( + JNIEnv* env, + jclass jcaller) { + return JNI_LibaomAv1Encoder_IsSupported(env); +} + + +} // namespace jni +} // namespace webrtc + +// Step 4: Generated test functions (optional). + + +#endif // org_webrtc_LibaomAv1Encoder_JNI diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h new file mode 100644 index 000000000..32d01dcaf --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h @@ -0,0 +1,88 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +// This file is autogenerated by +// base/android/jni_generator/jni_generator.py +// For +// org/webrtc/AddIceObserver + +#ifndef org_webrtc_AddIceObserver_JNI +#define org_webrtc_AddIceObserver_JNI + +#include + +#include "webrtc/sdk/android/src/jni/jni_generator_helper.h" + + +// Step 1: Forward declarations. + +JNI_REGISTRATION_EXPORT extern const char kClassPath_org_webrtc_AddIceObserver[]; +const char kClassPath_org_webrtc_AddIceObserver[] = "org/webrtc/AddIceObserver"; +// Leaking this jclass as we cannot use LazyInstance from some threads. +JNI_REGISTRATION_EXPORT std::atomic g_org_webrtc_AddIceObserver_clazz(nullptr); +#ifndef org_webrtc_AddIceObserver_clazz_defined +#define org_webrtc_AddIceObserver_clazz_defined +inline jclass org_webrtc_AddIceObserver_clazz(JNIEnv* env) { + return base::android::LazyGetClass(env, kClassPath_org_webrtc_AddIceObserver, + &g_org_webrtc_AddIceObserver_clazz); +} +#endif + + +// Step 2: Constants (optional). + + +// Step 3: Method stubs. +namespace webrtc { +namespace jni { + + +static std::atomic g_org_webrtc_AddIceObserver_onAddSuccess(nullptr); +static void Java_AddIceObserver_onAddSuccess(JNIEnv* env, const base::android::JavaRef& + obj) { + jclass clazz = org_webrtc_AddIceObserver_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_AddIceObserver_clazz(env)); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "onAddSuccess", + "()V", + &g_org_webrtc_AddIceObserver_onAddSuccess); + + env->CallVoidMethod(obj.obj(), + call_context.base.method_id); +} + +static std::atomic g_org_webrtc_AddIceObserver_onAddFailure(nullptr); +static void Java_AddIceObserver_onAddFailure(JNIEnv* env, const base::android::JavaRef& + obj, const base::android::JavaRef& error) { + jclass clazz = org_webrtc_AddIceObserver_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_AddIceObserver_clazz(env)); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "onAddFailure", + "(Ljava/lang/String;)V", + &g_org_webrtc_AddIceObserver_onAddFailure); + + env->CallVoidMethod(obj.obj(), + call_context.base.method_id, error.obj()); +} + +} // namespace jni +} // namespace webrtc + +// Step 4: Generated test functions (optional). + + +#endif // org_webrtc_AddIceObserver_JNI diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnection_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnection_jni.h index 9e2a72497..3b90c932b 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnection_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/PeerConnection_jni.h @@ -244,6 +244,19 @@ JNI_GENERATOR_EXPORT void Java_org_webrtc_PeerConnection_nativeCreateAnswer( constraints)); } +static void JNI_PeerConnection_SetLocalDescriptionAutomatically(JNIEnv* env, const + base::android::JavaParamRef& jcaller, + const base::android::JavaParamRef& observer); + +JNI_GENERATOR_EXPORT void Java_org_webrtc_PeerConnection_nativeSetLocalDescriptionAutomatically( + JNIEnv* env, + jobject jcaller, + jobject observer) { + return JNI_PeerConnection_SetLocalDescriptionAutomatically(env, + base::android::JavaParamRef(env, jcaller), base::android::JavaParamRef(env, + observer)); +} + static void JNI_PeerConnection_SetLocalDescription(JNIEnv* env, const base::android::JavaParamRef& jcaller, const base::android::JavaParamRef& observer, @@ -274,6 +287,15 @@ JNI_GENERATOR_EXPORT void Java_org_webrtc_PeerConnection_nativeSetRemoteDescript base::android::JavaParamRef(env, sdp)); } +static void JNI_PeerConnection_RestartIce(JNIEnv* env, const base::android::JavaParamRef& + jcaller); + +JNI_GENERATOR_EXPORT void Java_org_webrtc_PeerConnection_nativeRestartIce( + JNIEnv* env, + jobject jcaller) { + return JNI_PeerConnection_RestartIce(env, base::android::JavaParamRef(env, jcaller)); +} + static void JNI_PeerConnection_SetAudioPlayout(JNIEnv* env, const base::android::JavaParamRef& jcaller, jboolean playout); @@ -413,6 +435,26 @@ JNI_GENERATOR_EXPORT jboolean Java_org_webrtc_PeerConnection_nativeAddIceCandida base::android::JavaParamRef(env, iceCandidateSdp)); } +static void JNI_PeerConnection_AddIceCandidateWithObserver(JNIEnv* env, const + base::android::JavaParamRef& jcaller, + const base::android::JavaParamRef& sdpMid, + jint sdpMLineIndex, + const base::android::JavaParamRef& iceCandidateSdp, + const base::android::JavaParamRef& observer); + +JNI_GENERATOR_EXPORT void Java_org_webrtc_PeerConnection_nativeAddIceCandidateWithObserver( + JNIEnv* env, + jobject jcaller, + jstring sdpMid, + jint sdpMLineIndex, + jstring iceCandidateSdp, + jobject observer) { + return JNI_PeerConnection_AddIceCandidateWithObserver(env, + base::android::JavaParamRef(env, jcaller), base::android::JavaParamRef(env, + sdpMid), sdpMLineIndex, base::android::JavaParamRef(env, iceCandidateSdp), + base::android::JavaParamRef(env, observer)); +} + static jboolean JNI_PeerConnection_RemoveIceCandidates(JNIEnv* env, const base::android::JavaParamRef& jcaller, const base::android::JavaParamRef& candidates); @@ -1739,6 +1781,30 @@ static base::android::ScopedJavaLocalRef return base::android::ScopedJavaLocalRef(env, ret); } +static std::atomic + g_org_webrtc_PeerConnection_00024RTCConfiguration_getStableWritableConnectionPingIntervalMs(nullptr); +static base::android::ScopedJavaLocalRef + Java_RTCConfiguration_getStableWritableConnectionPingIntervalMs(JNIEnv* env, const + base::android::JavaRef& obj) { + jclass clazz = org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env), NULL); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "getStableWritableConnectionPingIntervalMs", + "()Ljava/lang/Integer;", +&g_org_webrtc_PeerConnection_00024RTCConfiguration_getStableWritableConnectionPingIntervalMs); + + jobject ret = + env->CallObjectMethod(obj.obj(), + call_context.base.method_id); + return base::android::ScopedJavaLocalRef(env, ret); +} + static std::atomic g_org_webrtc_PeerConnection_00024RTCConfiguration_getDisableIPv6OnWifi(nullptr); static jboolean Java_RTCConfiguration_getDisableIPv6OnWifi(JNIEnv* env, const @@ -1877,29 +1943,6 @@ static jboolean Java_RTCConfiguration_getEnableCpuOveruseDetection(JNIEnv* env, return ret; } -static std::atomic - g_org_webrtc_PeerConnection_00024RTCConfiguration_getEnableRtpDataChannel(nullptr); -static jboolean Java_RTCConfiguration_getEnableRtpDataChannel(JNIEnv* env, const - base::android::JavaRef& obj) { - jclass clazz = org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env); - CHECK_CLAZZ(env, obj.obj(), - org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env), false); - - jni_generator::JniJavaCallContextChecked call_context; - call_context.Init< - base::android::MethodID::TYPE_INSTANCE>( - env, - clazz, - "getEnableRtpDataChannel", - "()Z", - &g_org_webrtc_PeerConnection_00024RTCConfiguration_getEnableRtpDataChannel); - - jboolean ret = - env->CallBooleanMethod(obj.obj(), - call_context.base.method_id); - return ret; -} - static std::atomic g_org_webrtc_PeerConnection_00024RTCConfiguration_getSuspendBelowMinBitrate(nullptr); static jboolean Java_RTCConfiguration_getSuspendBelowMinBitrate(JNIEnv* env, const @@ -2133,6 +2176,52 @@ static base::android::ScopedJavaLocalRef Java_RTCConfiguration_getTurnL return base::android::ScopedJavaLocalRef(env, ret); } +static std::atomic + g_org_webrtc_PeerConnection_00024RTCConfiguration_getEnableImplicitRollback(nullptr); +static jboolean Java_RTCConfiguration_getEnableImplicitRollback(JNIEnv* env, const + base::android::JavaRef& obj) { + jclass clazz = org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env), false); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "getEnableImplicitRollback", + "()Z", + &g_org_webrtc_PeerConnection_00024RTCConfiguration_getEnableImplicitRollback); + + jboolean ret = + env->CallBooleanMethod(obj.obj(), + call_context.base.method_id); + return ret; +} + +static std::atomic + g_org_webrtc_PeerConnection_00024RTCConfiguration_getOfferExtmapAllowMixed(nullptr); +static jboolean Java_RTCConfiguration_getOfferExtmapAllowMixed(JNIEnv* env, const + base::android::JavaRef& obj) { + jclass clazz = org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_PeerConnection_00024RTCConfiguration_clazz(env), false); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "getOfferExtmapAllowMixed", + "()Z", + &g_org_webrtc_PeerConnection_00024RTCConfiguration_getOfferExtmapAllowMixed); + + jboolean ret = + env->CallBooleanMethod(obj.obj(), + call_context.base.method_id); + return ret; +} + static std::atomic g_org_webrtc_PeerConnection_getNativeOwnedPeerConnection(nullptr); static jlong Java_PeerConnection_getNativeOwnedPeerConnection(JNIEnv* env, const base::android::JavaRef& obj) { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpParameters_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpParameters_jni.h index 4a1eee3dd..96bdf4cea 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpParameters_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_peerconnection_jni/RtpParameters_jni.h @@ -141,7 +141,8 @@ static base::android::ScopedJavaLocalRef Java_Encoding_Constructor(JNIE const base::android::JavaRef& maxFramerate, const base::android::JavaRef& numTemporalLayers, const base::android::JavaRef& scaleResolutionDownBy, - const base::android::JavaRef& ssrc) { + const base::android::JavaRef& ssrc, + jboolean adaptiveAudioPacketTime) { jclass clazz = org_webrtc_RtpParameters_00024Encoding_clazz(env); CHECK_CLAZZ(env, clazz, org_webrtc_RtpParameters_00024Encoding_clazz(env), NULL); @@ -152,14 +153,14 @@ static base::android::ScopedJavaLocalRef Java_Encoding_Constructor(JNIE env, clazz, "", -"(Ljava/lang/String;ZDILjava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Double;Ljava/lang/Long;)V", +"(Ljava/lang/String;ZDILjava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Double;Ljava/lang/Long;Z)V", &g_org_webrtc_RtpParameters_00024Encoding_Constructor); jobject ret = env->NewObject(clazz, call_context.base.method_id, rid.obj(), active, bitratePriority, as_jint(networkPriority), maxBitrateBps.obj(), minBitrateBps.obj(), maxFramerate.obj(), numTemporalLayers.obj(), - scaleResolutionDownBy.obj(), ssrc.obj()); + scaleResolutionDownBy.obj(), ssrc.obj(), adaptiveAudioPacketTime); return base::android::ScopedJavaLocalRef(env, ret); } @@ -384,6 +385,28 @@ static base::android::ScopedJavaLocalRef Java_Encoding_getSsrc(JNIEnv* return base::android::ScopedJavaLocalRef(env, ret); } +static std::atomic g_org_webrtc_RtpParameters_00024Encoding_getAdaptivePTime(nullptr); +static jboolean Java_Encoding_getAdaptivePTime(JNIEnv* env, const base::android::JavaRef& + obj) { + jclass clazz = org_webrtc_RtpParameters_00024Encoding_clazz(env); + CHECK_CLAZZ(env, obj.obj(), + org_webrtc_RtpParameters_00024Encoding_clazz(env), false); + + jni_generator::JniJavaCallContextChecked call_context; + call_context.Init< + base::android::MethodID::TYPE_INSTANCE>( + env, + clazz, + "getAdaptivePTime", + "()Z", + &g_org_webrtc_RtpParameters_00024Encoding_getAdaptivePTime); + + jboolean ret = + env->CallBooleanMethod(obj.obj(), + call_context.base.method_id); + return ret; +} + static std::atomic g_org_webrtc_RtpParameters_00024Codec_Constructor(nullptr); static base::android::ScopedJavaLocalRef Java_Codec_Constructor(JNIEnv* env, JniIntWrapper payloadType, diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h new file mode 100644 index 000000000..eab7c1b53 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h @@ -0,0 +1,56 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +// This file is autogenerated by +// base/android/jni_generator/jni_generator.py +// For +// org/webrtc/EglBase10Impl + +#ifndef org_webrtc_EglBase10Impl_JNI +#define org_webrtc_EglBase10Impl_JNI + +#include + +#include "webrtc/sdk/android/src/jni/jni_generator_helper.h" + + +// Step 1: Forward declarations. + +JNI_REGISTRATION_EXPORT extern const char kClassPath_org_webrtc_EglBase10Impl[]; +const char kClassPath_org_webrtc_EglBase10Impl[] = "org/webrtc/EglBase10Impl"; +// Leaking this jclass as we cannot use LazyInstance from some threads. +JNI_REGISTRATION_EXPORT std::atomic g_org_webrtc_EglBase10Impl_clazz(nullptr); +#ifndef org_webrtc_EglBase10Impl_clazz_defined +#define org_webrtc_EglBase10Impl_clazz_defined +inline jclass org_webrtc_EglBase10Impl_clazz(JNIEnv* env) { + return base::android::LazyGetClass(env, kClassPath_org_webrtc_EglBase10Impl, + &g_org_webrtc_EglBase10Impl_clazz); +} +#endif + + +// Step 2: Constants (optional). + + +// Step 3: Method stubs. +namespace webrtc { +namespace jni { + +static jlong JNI_EglBase10Impl_GetCurrentNativeEGLContext(JNIEnv* env); + +JNI_GENERATOR_EXPORT jlong Java_org_webrtc_EglBase10Impl_nativeGetCurrentNativeEGLContext( + JNIEnv* env, + jclass jcaller) { + return JNI_EglBase10Impl_GetCurrentNativeEGLContext(env); +} + + +} // namespace jni +} // namespace webrtc + +// Step 4: Generated test functions (optional). + + +#endif // org_webrtc_EglBase10Impl_JNI diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/VideoDecoder_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/VideoDecoder_jni.h index 06490f508..20ea81e96 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/VideoDecoder_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_video_jni/VideoDecoder_jni.h @@ -165,28 +165,6 @@ static base::android::ScopedJavaLocalRef Java_VideoDecoder_decode(JNIEn return base::android::ScopedJavaLocalRef(env, ret); } -static std::atomic g_org_webrtc_VideoDecoder_getPrefersLateDecoding(nullptr); -static jboolean Java_VideoDecoder_getPrefersLateDecoding(JNIEnv* env, const - base::android::JavaRef& obj) { - jclass clazz = org_webrtc_VideoDecoder_clazz(env); - CHECK_CLAZZ(env, obj.obj(), - org_webrtc_VideoDecoder_clazz(env), false); - - jni_generator::JniJavaCallContextChecked call_context; - call_context.Init< - base::android::MethodID::TYPE_INSTANCE>( - env, - clazz, - "getPrefersLateDecoding", - "()Z", - &g_org_webrtc_VideoDecoder_getPrefersLateDecoding); - - jboolean ret = - env->CallBooleanMethod(obj.obj(), - call_context.base.method_id); - return ret; -} - static std::atomic g_org_webrtc_VideoDecoder_getImplementationName(nullptr); static base::android::ScopedJavaLocalRef Java_VideoDecoder_getImplementationName(JNIEnv* env, const base::android::JavaRef& obj) { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc index 16a3643ae..8a57e4af9 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/audio_device_module/audio_device_android.cc @@ -57,7 +57,7 @@ void GetDefaultAudioParameters(JNIEnv* env, rtc::scoped_refptr CreateAAudioAudioDeviceModule( JNIEnv* env, jobject application_context) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Get default audio input/output parameters. AudioParameters input_parameters; AudioParameters output_parameters; @@ -76,7 +76,7 @@ rtc::scoped_refptr CreateAAudioAudioDeviceModule( rtc::scoped_refptr CreateJavaAudioDeviceModule( JNIEnv* env, jobject application_context) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Get default audio input/output parameters. const JavaParamRef j_context(application_context); const ScopedJavaLocalRef j_audio_manager = @@ -104,7 +104,7 @@ rtc::scoped_refptr CreateJavaAudioDeviceModule( rtc::scoped_refptr CreateOpenSLESAudioDeviceModule( JNIEnv* env, jobject application_context) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Get default audio input/output parameters. AudioParameters input_parameters; AudioParameters output_parameters; @@ -127,7 +127,7 @@ rtc::scoped_refptr CreateOpenSLESAudioDeviceModule( rtc::scoped_refptr CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env, jobject application_context) { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; // Get default audio input/output parameters. const JavaParamRef j_context(application_context); const ScopedJavaLocalRef j_audio_manager = diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/java_types.h b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/java_types.h index 26fdd5a0b..a1639d647 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/java_types.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/jni/java_types.h @@ -26,8 +26,8 @@ #include "absl/types/optional.h" #include "api/array_view.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" // Abort the process if |jni| has a Java exception pending. @@ -95,7 +95,7 @@ class Iterable { JNIEnv* jni_ = nullptr; ScopedJavaLocalRef iterator_; ScopedJavaLocalRef value_; - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; RTC_DISALLOW_COPY_AND_ASSIGN(Iterator); }; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc index 1f4bc4dea..4f1409ef7 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/native_api/video/video_source.cc @@ -10,6 +10,7 @@ #include "sdk/android/native_api/video/video_source.h" +#include "rtc_base/ref_counted_object.h" #include "sdk/android/src/jni/android_video_track_source.h" #include "sdk/android/src/jni/native_capturer_observer.h" @@ -28,7 +29,7 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface { bool is_screencast, bool align_timestamps) : android_video_track_source_( - new rtc::RefCountedObject( + rtc::make_ref_counted( signaling_thread, env, is_screencast, @@ -108,7 +109,7 @@ rtc::scoped_refptr CreateJavaVideoSource( rtc::Thread* signaling_thread, bool is_screencast, bool align_timestamps) { - return new rtc::RefCountedObject( + return rtc::make_ref_counted( jni, signaling_thread, is_screencast, align_timestamps); } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/OWNERS b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/OWNERS index 4f2f24210..557373424 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/OWNERS @@ -1,4 +1,4 @@ -per-file androidhistogram.cc=sakal@webrtc.org -per-file androidmetrics.cc=sakal@webrtc.org -per-file androidvideotracksource.*=sakal@webrtc.org -per-file androidvideotracksource.cc=sakal@webrtc.org +per-file androidhistogram.cc=xalep@webrtc.org +per-file androidmetrics.cc=xalep@webrtc.org +per-file androidvideotracksource.*=xalep@webrtc.org +per-file androidvideotracksource.cc=xalep@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc index 434e6d3af..686f94e1e 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.cc @@ -16,12 +16,12 @@ #define RTLD_NOLOAD 4 #endif -#include "rtc_base/bind.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/to_queued_task.h" #include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h" #include "sdk/android/generated_base_jni/NetworkMonitor_jni.h" #include "sdk/android/native_api/jni/java_types.h" @@ -231,7 +231,9 @@ AndroidNetworkMonitor::AndroidNetworkMonitor( j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)), network_thread_(rtc::Thread::Current()) {} -AndroidNetworkMonitor::~AndroidNetworkMonitor() = default; +AndroidNetworkMonitor::~AndroidNetworkMonitor() { + RTC_DCHECK(!started_); +} void AndroidNetworkMonitor::Start() { RTC_DCHECK_RUN_ON(network_thread_); @@ -244,11 +246,13 @@ void AndroidNetworkMonitor::Start() { find_network_handle_without_ipv6_temporary_part_ = webrtc::field_trial::IsEnabled( "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart"); + bind_using_ifname_ = + !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName"); - // This is kind of magic behavior, but doing this allows the SocketServer to - // use this as a NetworkBinder to bind sockets on a particular network when - // it creates sockets. - network_thread_->socketserver()->set_network_binder(this); + // This pointer is also accessed by the methods called from java threads. + // Assigning it here is safe, because the java monitor is in a stopped state, + // and will not make any callbacks. + safety_flag_ = PendingTaskSafetyFlag::Create(); JNIEnv* env = AttachCurrentThreadIfNeeded(); Java_NetworkMonitor_startMonitoring( @@ -263,11 +267,9 @@ void AndroidNetworkMonitor::Stop() { started_ = false; find_network_handle_without_ipv6_temporary_part_ = false; - // Once the network monitor stops, it will clear all network information and - // it won't find the network handle to bind anyway. - if (network_thread_->socketserver()->network_binder() == this) { - network_thread_->socketserver()->set_network_binder(nullptr); - } + // Cancel any pending tasks. We should not call SignalNetworksChanged when the + // monitor is stopped. + safety_flag_->SetNotAlive(); JNIEnv* env = AttachCurrentThreadIfNeeded(); Java_NetworkMonitor_stopMonitoring(env, j_network_monitor_, @@ -281,7 +283,8 @@ void AndroidNetworkMonitor::Stop() { // https://cs.chromium.org/chromium/src/net/udp/udp_socket_posix.cc rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( int socket_fd, - const rtc::IPAddress& address) { + const rtc::IPAddress& address, + const std::string& if_name) { RTC_DCHECK_RUN_ON(network_thread_); // Android prior to Lollipop didn't have support for binding sockets to @@ -299,12 +302,18 @@ rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( } absl::optional network_handle = - FindNetworkHandleFromAddress(address); + FindNetworkHandleFromAddressOrName(address, if_name); if (!network_handle) { + RTC_LOG(LS_WARNING) + << "BindSocketToNetwork unable to find network handle for" + << " addr: " << address.ToSensitiveString() << " ifname: " << if_name; return rtc::NetworkBindingResult::ADDRESS_NOT_FOUND; } if (*network_handle == 0 /* NETWORK_UNSPECIFIED */) { + RTC_LOG(LS_WARNING) << "BindSocketToNetwork 0 network handle for" + << " addr: " << address.ToSensitiveString() + << " ifname: " << if_name; return rtc::NetworkBindingResult::NOT_IMPLEMENTED; } @@ -371,11 +380,19 @@ rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork( // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back // the less descriptive ERR_FAILED. if (rv == 0) { + RTC_LOG(LS_VERBOSE) << "BindSocketToNetwork bound network handle for" + << " addr: " << address.ToSensitiveString() + << " ifname: " << if_name; return rtc::NetworkBindingResult::SUCCESS; } + + RTC_LOG(LS_WARNING) << "BindSocketToNetwork got error: " << rv + << " addr: " << address.ToSensitiveString() + << " ifname: " << if_name; if (rv == ENONET) { return rtc::NetworkBindingResult::NETWORK_CHANGED; } + return rtc::NetworkBindingResult::FAILURE; } @@ -398,8 +415,9 @@ void AndroidNetworkMonitor::OnNetworkConnected_n( } absl::optional -AndroidNetworkMonitor::FindNetworkHandleFromAddress( - const rtc::IPAddress& ip_address) const { +AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName( + const rtc::IPAddress& ip_address, + const std::string& if_name) const { RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Find network handle."; if (find_network_handle_without_ipv6_temporary_part_) { @@ -413,14 +431,31 @@ AndroidNetworkMonitor::FindNetworkHandleFromAddress( return absl::make_optional(iter.first); } } - return absl::nullopt; } else { auto iter = network_handle_by_address_.find(ip_address); - if (iter == network_handle_by_address_.end()) { - return absl::nullopt; + if (iter != network_handle_by_address_.end()) { + return absl::make_optional(iter->second); } - return absl::make_optional(iter->second); } + + return FindNetworkHandleFromIfname(if_name); +} + +absl::optional +AndroidNetworkMonitor::FindNetworkHandleFromIfname( + const std::string& if_name) const { + RTC_DCHECK_RUN_ON(network_thread_); + if (bind_using_ifname_) { + for (auto const& iter : network_info_by_handle_) { + if (if_name.find(iter.second.interface_name) != std::string::npos) { + // Use partial match so that e.g if_name="v4-wlan0" is matched + // agains iter.first="wlan0" + return absl::make_optional(iter.first); + } + } + } + + return absl::nullopt; } void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) { @@ -466,6 +501,18 @@ rtc::AdapterType AndroidNetworkMonitor::GetAdapterType( rtc::AdapterType type = (iter == adapter_type_by_name_.end()) ? rtc::ADAPTER_TYPE_UNKNOWN : iter->second; + + if (type == rtc::ADAPTER_TYPE_UNKNOWN && bind_using_ifname_) { + for (auto const& iter : adapter_type_by_name_) { + // Use partial match so that e.g if_name="v4-wlan0" is matched + // agains iter.first="wlan0" + if (if_name.find(iter.first) != std::string::npos) { + type = iter.second; + break; + } + } + } + if (type == rtc::ADAPTER_TYPE_UNKNOWN) { RTC_LOG(LS_WARNING) << "Get an unknown type for the interface " << if_name; } @@ -479,6 +526,17 @@ rtc::AdapterType AndroidNetworkMonitor::GetVpnUnderlyingAdapterType( rtc::AdapterType type = (iter == vpn_underlying_adapter_type_by_name_.end()) ? rtc::ADAPTER_TYPE_UNKNOWN : iter->second; + if (type == rtc::ADAPTER_TYPE_UNKNOWN && bind_using_ifname_) { + // Use partial match so that e.g if_name="v4-wlan0" is matched + // agains iter.first="wlan0" + for (auto const& iter : vpn_underlying_adapter_type_by_name_) { + if (if_name.find(iter.first) != std::string::npos) { + type = iter.second; + break; + } + } + } + return type; } @@ -525,11 +583,11 @@ AndroidNetworkMonitorFactory::CreateNetworkMonitor() { void AndroidNetworkMonitor::NotifyConnectionTypeChanged( JNIEnv* env, const JavaRef& j_caller) { - invoker_.AsyncInvoke(RTC_FROM_HERE, network_thread_, [this] { + network_thread_->PostTask(ToQueuedTask(safety_flag_, [this] { RTC_LOG(LS_INFO) << "Android network monitor detected connection type change."; SignalNetworksChanged(); - }); + })); } void AndroidNetworkMonitor::NotifyOfActiveNetworkList( @@ -548,19 +606,19 @@ void AndroidNetworkMonitor::NotifyOfNetworkConnect( const JavaRef& j_network_info) { NetworkInformation network_info = GetNetworkInformationFromJava(env, j_network_info); - network_thread_->Invoke( - RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_n, - this, network_info)); + network_thread_->PostTask(ToQueuedTask( + safety_flag_, [this, network_info = std::move(network_info)] { + OnNetworkConnected_n(network_info); + })); } void AndroidNetworkMonitor::NotifyOfNetworkDisconnect( JNIEnv* env, const JavaRef& j_caller, jlong network_handle) { - network_thread_->Invoke( - RTC_FROM_HERE, - rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_n, this, - static_cast(network_handle))); + network_thread_->PostTask(ToQueuedTask(safety_flag_, [this, network_handle] { + OnNetworkDisconnected_n(static_cast(network_handle)); + })); } void AndroidNetworkMonitor::NotifyOfNetworkPreference( @@ -572,9 +630,9 @@ void AndroidNetworkMonitor::NotifyOfNetworkPreference( rtc::NetworkPreference preference = static_cast(jpreference); - network_thread_->Invoke( - RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkPreference_n, - this, type, preference)); + network_thread_->PostTask(ToQueuedTask( + safety_flag_, + [this, type, preference] { OnNetworkPreference_n(type, preference); })); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h index eff212254..423ae3a66 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_network_monitor.h @@ -12,14 +12,15 @@ #define SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_ #include + #include #include #include #include "absl/types/optional.h" -#include "rtc_base/async_invoker.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -63,8 +64,7 @@ struct NetworkInformation { std::string ToString() const; }; -class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, - public rtc::NetworkBinderInterface { +class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface { public: AndroidNetworkMonitor(JNIEnv* env, const JavaRef& j_application_context); @@ -76,9 +76,14 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, void Start() override; void Stop() override; + // Does |this| NetworkMonitorInterface implement BindSocketToNetwork? + // Only Android returns true. + virtual bool SupportsBindSocketToNetwork() const override { return true; } + rtc::NetworkBindingResult BindSocketToNetwork( int socket_fd, - const rtc::IPAddress& address) override; + const rtc::IPAddress& address, + const std::string& if_name) override; rtc::AdapterType GetAdapterType(const std::string& if_name) override; rtc::AdapterType GetVpnUnderlyingAdapterType( const std::string& if_name) override; @@ -105,8 +110,9 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, jint preference); // Visible for testing. - absl::optional FindNetworkHandleFromAddress( - const rtc::IPAddress& address) const; + absl::optional FindNetworkHandleFromAddressOrName( + const rtc::IPAddress& address, + const std::string& ifname) const; private: void OnNetworkConnected_n(const NetworkInformation& network_info); @@ -114,10 +120,13 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, void OnNetworkPreference_n(NetworkType type, rtc::NetworkPreference preference); + absl::optional FindNetworkHandleFromIfname( + const std::string& ifname) const; + const int android_sdk_int_; ScopedJavaGlobalRef j_application_context_; ScopedJavaGlobalRef j_network_monitor_; - rtc::Thread* network_thread_; + rtc::Thread* const network_thread_; bool started_ RTC_GUARDED_BY(network_thread_) = false; std::map adapter_type_by_name_ RTC_GUARDED_BY(network_thread_); @@ -132,7 +141,16 @@ class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface, bool find_network_handle_without_ipv6_temporary_part_ RTC_GUARDED_BY(network_thread_) = false; bool surface_cellular_types_ RTC_GUARDED_BY(network_thread_) = false; - rtc::AsyncInvoker invoker_; + + // NOTE: if bind_using_ifname_ is TRUE + // then the adapter name is used with substring matching as follows: + // An adapater name repored by android as 'wlan0' + // will be matched with 'v4-wlan0' ("v4-wlan0".find("wlan0") != npos). + // This applies to adapter_type_by_name_, vpn_underlying_adapter_type_by_name_ + // and FindNetworkHandleFromIfname. + bool bind_using_ifname_ RTC_GUARDED_BY(network_thread_) = true; + rtc::scoped_refptr safety_flag_ + RTC_PT_GUARDED_BY(network_thread_) = nullptr; }; class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc index f8455c91f..72cf3955f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc @@ -14,7 +14,6 @@ #include -#include "rtc_base/bind.h" #include "rtc_base/logging.h" namespace webrtc { @@ -68,12 +67,7 @@ void AndroidVideoTrackSource::SetState(JNIEnv* env, } else { // TODO(sakal): Is this even necessary, does FireOnChanged have to be // called from signaling thread? - signaling_thread_->PostTask( - RTC_FROM_HERE, - rtc::Bind( - &AndroidVideoTrackSource::FireOnChanged, - static_cast*>( - this))); + signaling_thread_->PostTask(RTC_FROM_HERE, [this] { FireOnChanged(); }); } } } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h index e6146d0d4..5f9a9eace 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_player.h @@ -12,15 +12,16 @@ #define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_ #include + #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/message_handler.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" @@ -99,12 +100,12 @@ class AAudioPlayer final : public AudioOutput, // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker main_thread_checker_; + SequenceChecker main_thread_checker_; // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a // real-time thread owned by AAudio. Detached during construction of this // object. - rtc::ThreadChecker thread_checker_aaudio_; + SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. rtc::Thread* main_thread_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h index 0ed0fa2d5..2b6aa0312 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h @@ -12,13 +12,14 @@ #define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_ #include + #include +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/message_handler.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" @@ -90,12 +91,12 @@ class AAudioRecorder : public AudioInput, // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a // real-time thread owned by AAudio. Detached during construction of this // object. - rtc::ThreadChecker thread_checker_aaudio_; + SequenceChecker thread_checker_aaudio_; // The thread on which this object is created on. rtc::Thread* main_thread_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h index 1900ab988..cbc78a0a2 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h @@ -13,8 +13,8 @@ #include +#include "api/sequence_checker.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -113,8 +113,8 @@ class AAudioWrapper { bool VerifyStreamConfiguration(); bool OptimizeBuffers(); - rtc::ThreadChecker thread_checker_; - rtc::ThreadChecker aaudio_thread_checker_; + SequenceChecker thread_checker_; + SequenceChecker aaudio_thread_checker_; const AudioParameters audio_parameters_; const aaudio_direction_t direction_; AAudioObserverInterface* observer_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc index eb5d93fa2..4c9c36b7a 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -13,13 +13,13 @@ #include #include +#include "api/sequence_checker.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/audio_device_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/generated_audio_device_module_base_jni/WebRtcAudioManager_jni.h" #include "system_wrappers/include/metrics.h" @@ -70,26 +70,26 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { initialized_(false) { RTC_CHECK(input_); RTC_CHECK(output_); - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; thread_checker_.Detach(); } - ~AndroidAudioDeviceModule() override { RTC_LOG(INFO) << __FUNCTION__; } + ~AndroidAudioDeviceModule() override { RTC_DLOG(INFO) << __FUNCTION__; } int32_t ActiveAudioLayer( AudioDeviceModule::AudioLayer* audioLayer) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *audioLayer = audio_layer_; return 0; } int32_t RegisterAudioCallback(AudioTransport* audioCallback) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return audio_device_buffer_->RegisterAudioCallback(audioCallback); } int32_t Init() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_DCHECK(thread_checker_.IsCurrent()); audio_device_buffer_ = std::make_unique(task_queue_factory_.get()); @@ -118,7 +118,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t Terminate() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return 0; RTC_DCHECK(thread_checker_.IsCurrent()); @@ -132,19 +132,19 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } bool Initialized() const override { - RTC_LOG(INFO) << __FUNCTION__ << ":" << initialized_; + RTC_DLOG(INFO) << __FUNCTION__ << ":" << initialized_; return initialized_; } int16_t PlayoutDevices() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_LOG(INFO) << "output: " << 1; return 1; } int16_t RecordingDevices() override { - RTC_LOG(INFO) << __FUNCTION__; - RTC_LOG(INFO) << "output: " << 1; + RTC_DLOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << "output: " << 1; return 1; } @@ -163,7 +163,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetPlayoutDevice(uint16_t index) override { // OK to use but it has no effect currently since device selection is // done using Andoid APIs instead. - RTC_LOG(INFO) << __FUNCTION__ << "(" << index << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << index << ")"; return 0; } @@ -175,7 +175,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { int32_t SetRecordingDevice(uint16_t index) override { // OK to use but it has no effect currently since device selection is // done using Andoid APIs instead. - RTC_LOG(INFO) << __FUNCTION__ << "(" << index << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << index << ")"; return 0; } @@ -185,66 +185,66 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t PlayoutIsAvailable(bool* available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *available = true; - RTC_LOG(INFO) << "output: " << *available; + RTC_DLOG(INFO) << "output: " << *available; return 0; } int32_t InitPlayout() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; if (PlayoutIsInitialized()) { return 0; } int32_t result = output_->InitPlayout(); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess", static_cast(result == 0)); return result; } bool PlayoutIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_->PlayoutIsInitialized(); } int32_t RecordingIsAvailable(bool* available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *available = true; - RTC_LOG(INFO) << "output: " << *available; + RTC_DLOG(INFO) << "output: " << *available; return 0; } int32_t InitRecording() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; if (RecordingIsInitialized()) { return 0; } int32_t result = input_->InitRecording(); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess", static_cast(result == 0)); return result; } bool RecordingIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return input_->RecordingIsInitialized(); } int32_t StartPlayout() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; if (Playing()) { return 0; } int32_t result = output_->StartPlayout(); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess", static_cast(result == 0)); if (result == 0) { @@ -256,7 +256,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t StopPlayout() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; if (!Playing()) @@ -264,26 +264,26 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { RTC_LOG(INFO) << __FUNCTION__; audio_device_buffer_->StopPlayout(); int32_t result = output_->StopPlayout(); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess", static_cast(result == 0)); return result; } bool Playing() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return output_->Playing(); } int32_t StartRecording() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; if (Recording()) { return 0; } int32_t result = input_->StartRecording(); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess", static_cast(result == 0)); if (result == 0) { @@ -295,74 +295,74 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t StopRecording() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; if (!Recording()) return 0; audio_device_buffer_->StopRecording(); int32_t result = input_->StopRecording(); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess", static_cast(result == 0)); return result; } bool Recording() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return input_->Recording(); } int32_t InitSpeaker() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return initialized_ ? 0 : -1; } bool SpeakerIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return initialized_; } int32_t InitMicrophone() override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return initialized_ ? 0 : -1; } bool MicrophoneIsInitialized() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; return initialized_; } int32_t SpeakerVolumeIsAvailable(bool* available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; *available = output_->SpeakerVolumeIsAvailable(); - RTC_LOG(INFO) << "output: " << *available; + RTC_DLOG(INFO) << "output: " << *available; return 0; } int32_t SetSpeakerVolume(uint32_t volume) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; return output_->SetSpeakerVolume(volume); } int32_t SpeakerVolume(uint32_t* output_volume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; absl::optional volume = output_->SpeakerVolume(); if (!volume) return -1; *output_volume = *volume; - RTC_LOG(INFO) << "output: " << *volume; + RTC_DLOG(INFO) << "output: " << *volume; return 0; } int32_t MaxSpeakerVolume(uint32_t* output_max_volume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; absl::optional max_volume = output_->MaxSpeakerVolume(); @@ -373,7 +373,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t MinSpeakerVolume(uint32_t* output_min_volume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return -1; absl::optional min_volume = output_->MinSpeakerVolume(); @@ -384,71 +384,71 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t MicrophoneVolumeIsAvailable(bool* available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *available = false; - RTC_LOG(INFO) << "output: " << *available; + RTC_DLOG(INFO) << "output: " << *available; return -1; } int32_t SetMicrophoneVolume(uint32_t volume) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << volume << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << volume << ")"; RTC_CHECK_NOTREACHED(); } int32_t MicrophoneVolume(uint32_t* volume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t MinMicrophoneVolume(uint32_t* minVolume) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t SpeakerMuteIsAvailable(bool* available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t SetSpeakerMute(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; RTC_CHECK_NOTREACHED(); } int32_t SpeakerMute(bool* enabled) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMuteIsAvailable(bool* available) override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t SetMicrophoneMute(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; RTC_CHECK_NOTREACHED(); } int32_t MicrophoneMute(bool* enabled) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; RTC_CHECK_NOTREACHED(); } int32_t StereoPlayoutIsAvailable(bool* available) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *available = is_stereo_playout_supported_; - RTC_LOG(INFO) << "output: " << *available; + RTC_DLOG(INFO) << "output: " << *available; return 0; } int32_t SetStereoPlayout(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; // Android does not support changes between mono and stero on the fly. The // use of stereo or mono is determined by the audio layer. It is allowed // to call this method if that same state is not modified. @@ -461,21 +461,21 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t StereoPlayout(bool* enabled) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *enabled = is_stereo_playout_supported_; - RTC_LOG(INFO) << "output: " << *enabled; + RTC_DLOG(INFO) << "output: " << *enabled; return 0; } int32_t StereoRecordingIsAvailable(bool* available) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *available = is_stereo_record_supported_; - RTC_LOG(INFO) << "output: " << *available; + RTC_DLOG(INFO) << "output: " << *available; return 0; } int32_t SetStereoRecording(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; // Android does not support changes between mono and stero on the fly. The // use of stereo or mono is determined by the audio layer. It is allowed // to call this method if that same state is not modified. @@ -488,9 +488,9 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t StereoRecording(bool* enabled) const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; *enabled = is_stereo_record_supported_; - RTC_LOG(INFO) << "output: " << *enabled; + RTC_DLOG(INFO) << "output: " << *enabled; return 0; } @@ -514,18 +514,18 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { // a "Not Implemented" log will be filed. This non-perfect state will remain // until I have added full support for audio effects based on OpenSL ES APIs. bool BuiltInAECIsAvailable() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return false; bool isAvailable = input_->IsAcousticEchoCancelerSupported(); - RTC_LOG(INFO) << "output: " << isAvailable; + RTC_DLOG(INFO) << "output: " << isAvailable; return isAvailable; } // Not implemented for any input device on Android. bool BuiltInAGCIsAvailable() const override { - RTC_LOG(INFO) << __FUNCTION__; - RTC_LOG(INFO) << "output: " << false; + RTC_DLOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << "output: " << false; return false; } @@ -534,38 +534,38 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { // TODO(henrika): add implementation for OpenSL ES based audio as well. // In addition, see comments for BuiltInAECIsAvailable(). bool BuiltInNSIsAvailable() const override { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; if (!initialized_) return false; bool isAvailable = input_->IsNoiseSuppressorSupported(); - RTC_LOG(INFO) << "output: " << isAvailable; + RTC_DLOG(INFO) << "output: " << isAvailable; return isAvailable; } // TODO(henrika): add implementation for OpenSL ES based audio as well. int32_t EnableBuiltInAEC(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; if (!initialized_) return -1; RTC_CHECK(BuiltInAECIsAvailable()) << "HW AEC is not available"; int32_t result = input_->EnableBuiltInAEC(enable); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; return result; } int32_t EnableBuiltInAGC(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; RTC_CHECK_NOTREACHED(); } // TODO(henrika): add implementation for OpenSL ES based audio as well. int32_t EnableBuiltInNS(bool enable) override { - RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; + RTC_DLOG(INFO) << __FUNCTION__ << "(" << enable << ")"; if (!initialized_) return -1; RTC_CHECK(BuiltInNSIsAvailable()) << "HW NS is not available"; int32_t result = input_->EnableBuiltInNS(enable); - RTC_LOG(INFO) << "output: " << result; + RTC_DLOG(INFO) << "output: " << result; return result; } @@ -576,14 +576,14 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { } int32_t AttachAudioBuffer() { - RTC_LOG(INFO) << __FUNCTION__; + RTC_DLOG(INFO) << __FUNCTION__; output_->AttachAudioBuffer(audio_device_buffer_.get()); input_->AttachAudioBuffer(audio_device_buffer_.get()); return 0; } private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; const AudioDeviceModule::AudioLayer audio_layer_; const bool is_stereo_playout_supported_; @@ -640,8 +640,8 @@ rtc::scoped_refptr CreateAudioDeviceModuleFromInputAndOutput( uint16_t playout_delay_ms, std::unique_ptr audio_input, std::unique_ptr audio_output) { - RTC_LOG(INFO) << __FUNCTION__; - return new rtc::RefCountedObject( + RTC_DLOG(INFO) << __FUNCTION__; + return rtc::make_ref_counted( audio_layer, is_stereo_playout_supported, is_stereo_record_supported, playout_delay_ms, std::move(audio_input), std::move(audio_output)); } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h index 7578f8394..800d23543 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h @@ -12,11 +12,12 @@ #define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_ #include + #include +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" namespace webrtc { @@ -93,11 +94,11 @@ class AudioRecordJni : public AudioInput { private: // Stores thread ID in constructor. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to OnDataIsRecorded() from high-priority // thread in Java. Detached during construction of this object. - rtc::ThreadChecker thread_checker_java_; + SequenceChecker thread_checker_java_; // Wraps the Java specific parts of the AudioRecordJni class. JNIEnv* env_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc index d5b880b1b..85adee286 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -151,6 +151,18 @@ int32_t AudioTrackJni::StopPlayout() { if (!initialized_ || !playing_) { return 0; } + // Log the difference in initial and current buffer level. + const int current_buffer_size_frames = + Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_); + const int initial_buffer_size_frames = + Java_WebRtcAudioTrack_getInitialBufferSizeInFrames(env_, j_audio_track_); + const int sample_rate_hz = audio_parameters_.sample_rate(); + RTC_HISTOGRAM_COUNTS( + "WebRTC.Audio.AndroidNativeAudioBufferSizeDifferenceFromInitialMs", + (current_buffer_size_frames - initial_buffer_size_frames) * 1000 / + sample_rate_hz, + -500, 100, 100); + if (!Java_WebRtcAudioTrack_stopPlayout(env_, j_audio_track_)) { RTC_LOG(LS_ERROR) << "StopPlayout failed"; return -1; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.h index c7d060033..cc4d8f53a 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.h @@ -12,12 +12,13 @@ #define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ #include + #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_common.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" @@ -84,11 +85,11 @@ class AudioTrackJni : public AudioOutput { private: // Stores thread ID in constructor. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to OnGetPlayoutData() from high-priority // thread in Java. Detached during construction of this object. - rtc::ThreadChecker thread_checker_java_; + SequenceChecker thread_checker_java_; // Wraps the Java specific parts of the AudioTrackJni class. JNIEnv* env_ = nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.cc index 04c3ae9f7..0f35b2712 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.cc @@ -106,8 +106,6 @@ OpenSLEngineManager::OpenSLEngineManager() { thread_checker_.Detach(); } -OpenSLEngineManager::~OpenSLEngineManager() = default; - SLObjectItf OpenSLEngineManager::GetOpenSLEngine() { RTC_LOG(INFO) << "GetOpenSLEngine"; RTC_DCHECK(thread_checker_.IsCurrent()); diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.h index 605ddfc0e..9dd1e0f7d 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_common.h @@ -15,9 +15,9 @@ #include #include "api/ref_counted_base.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/thread_checker.h" namespace webrtc { @@ -68,14 +68,15 @@ typedef ScopedSLObject ScopedSLObjectItf; // Subsequent calls returns the already created engine. // Note: This class must be used single threaded and this is enforced by a // thread checker. -class OpenSLEngineManager : public rtc::RefCountedBase { +class OpenSLEngineManager + : public rtc::RefCountedNonVirtual { public: OpenSLEngineManager(); - ~OpenSLEngineManager() override; + ~OpenSLEngineManager() = default; SLObjectItf GetOpenSLEngine(); private: - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // This object is the global entry point of the OpenSL ES API. // After creating the engine object, the application can obtain this object‘s // SLEngineItf interface. This interface contains creation methods for all diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.h index a2a49f986..7388a9370 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_player.h @@ -16,12 +16,13 @@ #include #include + #include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/fine_audio_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_common.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/opensles_common.h" @@ -121,12 +122,12 @@ class OpenSLESPlayer : public AudioOutput { // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to SimpleBufferQueueCallback() from internal // non-application thread which is not attached to the Dalvik JVM. // Detached during construction of this object. - rtc::ThreadChecker thread_checker_opensles_; + SequenceChecker thread_checker_opensles_; const AudioParameters audio_parameters_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.h index 4856fd015..ff324f313 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/opensles_recorder.h @@ -18,10 +18,10 @@ #include #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/fine_audio_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_common.h" #include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/opensles_common.h" @@ -128,12 +128,12 @@ class OpenSLESRecorder : public AudioInput { // Ensures that methods are called from the same thread as this object is // created on. - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; // Stores thread ID in first call to SimpleBufferQueueCallback() from internal // non-application thread which is not attached to the Dalvik JVM. // Detached during construction of this object. - rtc::ThreadChecker thread_checker_opensles_; + SequenceChecker thread_checker_opensles_; const AudioParameters audio_parameters_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc new file mode 100644 index 000000000..02070f790 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h" +#include "sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); +} + +static jboolean JNI_LibaomAv1Encoder_IsSupported(JNIEnv* jni) { + return webrtc::kIsLibaomAv1EncoderSupported; +} + +static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* jni) { + return jlongFromPointer(webrtc::CreateLibaomAv1Decoder().release()); +} + +static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* jni) { + return webrtc::kIsLibaomAv1DecoderSupported; +} + +} // namespace jni +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc similarity index 52% rename from TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.cc rename to TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc index 511a3c7e9..1bbc7031a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * Copyright 2021 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,16 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/video_codecs/video_decoder_factory.h" +#include -#include "api/video_codecs/video_decoder.h" +#include "sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h" namespace webrtc { +namespace jni { -std::unique_ptr VideoDecoderFactory::LegacyCreateVideoDecoder( - const SdpVideoFormat& format, - const std::string& receive_stream_id) { - return CreateVideoDecoder(format); +static jlong JNI_EglBase10Impl_GetCurrentNativeEGLContext(JNIEnv* jni) { + return reinterpret_cast(eglGetCurrentContext()); } +} // namespace jni } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc index 839f6a8f6..189d7e95e 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/encoded_image.cc @@ -90,7 +90,7 @@ EncodedImage JavaToNativeEncodedImage(JNIEnv* env, const size_t buffer_size = env->GetDirectBufferCapacity(j_buffer.obj()); EncodedImage frame; - frame.SetEncodedData(new rtc::RefCountedObject( + frame.SetEncodedData(rtc::make_ref_counted( env, j_encoded_image, buffer, buffer_size)); frame._encodedWidth = Java_EncodedImage_getEncodedWidth(env, j_encoded_image); diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/h264_utils.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/h264_utils.cc index 02e3ae110..882df95b8 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/h264_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/h264_utils.cc @@ -8,10 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "sdk/android/src/jni/video_codec_info.h" - -#include "common_video/h264/profile_level_id.h" +#include "api/video_codecs/h264_profile_level_id.h" #include "sdk/android/generated_video_jni/H264Utils_jni.h" +#include "sdk/android/src/jni/video_codec_info.h" namespace webrtc { namespace jni { @@ -20,8 +19,8 @@ static jboolean JNI_H264Utils_IsSameH264Profile( JNIEnv* env, const JavaParamRef& params1, const JavaParamRef& params2) { - return H264::IsSameH264Profile(JavaToNativeStringMap(env, params1), - JavaToNativeStringMap(env, params2)); + return H264IsSameProfile(JavaToNativeStringMap(env, params1), + JavaToNativeStringMap(env, params2)); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc new file mode 100644 index 000000000..7f3dddbb2 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h" + +#include + +#include "sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/media_constraints.h" + +namespace webrtc { +namespace jni { + +AddIceCandidateObserverJni::AddIceCandidateObserverJni( + JNIEnv* env, + const JavaRef& j_observer) + : j_observer_global_(env, j_observer) {} + +void AddIceCandidateObserverJni::OnComplete(webrtc::RTCError error) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + if (error.ok()) { + Java_AddIceObserver_onAddSuccess(env, j_observer_global_); + } else { + Java_AddIceObserver_onAddFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); + } +} + +} // namespace jni +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h new file mode 100644 index 000000000..112838538 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h @@ -0,0 +1,38 @@ +/* + * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_ +#define SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_ + +#include +#include + +#include "api/peer_connection_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class AddIceCandidateObserverJni final + : public rtc::RefCountedNonVirtual { + public: + AddIceCandidateObserverJni(JNIEnv* env, const JavaRef& j_observer); + ~AddIceCandidateObserverJni() = default; + + void OnComplete(RTCError error); + + private: + const ScopedJavaGlobalRef j_observer_global_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc index 6706782e3..93c6eb389 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc @@ -44,6 +44,7 @@ #include "sdk/android/generated_peerconnection_jni/PeerConnection_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h" #include "sdk/android/src/jni/pc/crypto_options.h" #include "sdk/android/src/jni/pc/data_channel.h" #include "sdk/android/src/jni/pc/ice_candidate.h" @@ -237,6 +238,12 @@ void JavaToNativeRTCConfiguration( j_rtc_config); rtc_config->stun_candidate_keepalive_interval = JavaToNativeOptionalInt(jni, j_stun_candidate_keepalive_interval); + ScopedJavaLocalRef j_stable_writable_connection_ping_interval_ms = + Java_RTCConfiguration_getStableWritableConnectionPingIntervalMs( + jni, j_rtc_config); + rtc_config->stable_writable_connection_ping_interval_ms = + JavaToNativeOptionalInt(jni, + j_stable_writable_connection_ping_interval_ms); rtc_config->disable_ipv6_on_wifi = Java_RTCConfiguration_getDisableIPv6OnWifi(jni, j_rtc_config); rtc_config->max_ipv6_networks = @@ -250,8 +257,6 @@ void JavaToNativeRTCConfiguration( Java_RTCConfiguration_getEnableDscp(jni, j_rtc_config); rtc_config->media_config.video.enable_cpu_adaptation = Java_RTCConfiguration_getEnableCpuOveruseDetection(jni, j_rtc_config); - rtc_config->enable_rtp_data_channel = - Java_RTCConfiguration_getEnableRtpDataChannel(jni, j_rtc_config); rtc_config->media_config.video.suspend_below_min_bitrate = Java_RTCConfiguration_getSuspendBelowMinBitrate(jni, j_rtc_config); rtc_config->screencast_min_bitrate = JavaToNativeOptionalInt( @@ -271,6 +276,11 @@ void JavaToNativeRTCConfiguration( rtc_config->allow_codec_switching = JavaToNativeOptionalBool( jni, Java_RTCConfiguration_getAllowCodecSwitching(jni, j_rtc_config)); + rtc_config->offer_extmap_allow_mixed = + Java_RTCConfiguration_getOfferExtmapAllowMixed(jni, j_rtc_config); + rtc_config->enable_implicit_rollback = + Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config); + ScopedJavaLocalRef j_turn_logging_id = Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); if (!IsNull(jni, j_turn_logging_id)) { @@ -464,9 +474,7 @@ static jlong JNI_PeerConnection_CreatePeerConnectionObserver( return jlongFromPointer(new PeerConnectionObserverJni(jni, j_observer)); } -static void JNI_PeerConnection_FreeOwnedPeerConnection( - JNIEnv*, - jlong j_p) { +static void JNI_PeerConnection_FreeOwnedPeerConnection(JNIEnv*, jlong j_p) { delete reinterpret_cast(j_p); } @@ -543,9 +551,8 @@ static void JNI_PeerConnection_CreateOffer( const JavaParamRef& j_constraints) { std::unique_ptr constraints = JavaToNativeMediaConstraints(jni, j_constraints); - rtc::scoped_refptr observer( - new rtc::RefCountedObject(jni, j_observer, - std::move(constraints))); + auto observer = rtc::make_ref_counted( + jni, j_observer, std::move(constraints)); PeerConnectionInterface::RTCOfferAnswerOptions options; CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options); ExtractNativePC(jni, j_pc)->CreateOffer(observer, options); @@ -558,23 +565,31 @@ static void JNI_PeerConnection_CreateAnswer( const JavaParamRef& j_constraints) { std::unique_ptr constraints = JavaToNativeMediaConstraints(jni, j_constraints); - rtc::scoped_refptr observer( - new rtc::RefCountedObject(jni, j_observer, - std::move(constraints))); + auto observer = rtc::make_ref_counted( + jni, j_observer, std::move(constraints)); PeerConnectionInterface::RTCOfferAnswerOptions options; CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options); ExtractNativePC(jni, j_pc)->CreateAnswer(observer, options); } +static void JNI_PeerConnection_SetLocalDescriptionAutomatically( + JNIEnv* jni, + const JavaParamRef& j_pc, + const JavaParamRef& j_observer) { + auto observer = + rtc::make_ref_counted(jni, j_observer); + ExtractNativePC(jni, j_pc)->SetLocalDescription(observer); +} + static void JNI_PeerConnection_SetLocalDescription( JNIEnv* jni, const JavaParamRef& j_pc, const JavaParamRef& j_observer, const JavaParamRef& j_sdp) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject(jni, j_observer, nullptr)); + auto observer = + rtc::make_ref_counted(jni, j_observer); ExtractNativePC(jni, j_pc)->SetLocalDescription( - observer, JavaToNativeSessionDescription(jni, j_sdp).release()); + JavaToNativeSessionDescription(jni, j_sdp), observer); } static void JNI_PeerConnection_SetRemoteDescription( @@ -582,10 +597,15 @@ static void JNI_PeerConnection_SetRemoteDescription( const JavaParamRef& j_pc, const JavaParamRef& j_observer, const JavaParamRef& j_sdp) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject(jni, j_observer, nullptr)); + auto observer = + rtc::make_ref_counted(jni, j_observer); ExtractNativePC(jni, j_pc)->SetRemoteDescription( - observer, JavaToNativeSessionDescription(jni, j_sdp).release()); + JavaToNativeSessionDescription(jni, j_sdp), observer); +} + +static void JNI_PeerConnection_RestartIce(JNIEnv* jni, + const JavaParamRef& j_pc) { + ExtractNativePC(jni, j_pc)->RestartIce(); } static void JNI_PeerConnection_SetAudioPlayout( @@ -632,6 +652,25 @@ static jboolean JNI_PeerConnection_AddIceCandidate( return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get()); } +static void JNI_PeerConnection_AddIceCandidateWithObserver( + JNIEnv* jni, + const JavaParamRef& j_pc, + const JavaParamRef& j_sdp_mid, + jint j_sdp_mline_index, + const JavaParamRef& j_candidate_sdp, + const JavaParamRef& j_observer) { + std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid); + std::string sdp = JavaToNativeString(jni, j_candidate_sdp); + std::unique_ptr candidate( + CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr)); + + rtc::scoped_refptr observer( + new AddIceCandidateObserverJni(jni, j_observer)); + ExtractNativePC(jni, j_pc)->AddIceCandidate( + std::move(candidate), + [observer](RTCError error) { observer->OnComplete(error); }); +} + static jboolean JNI_PeerConnection_RemoveIceCandidates( JNIEnv* jni, const JavaParamRef& j_pc, @@ -758,8 +797,7 @@ static jboolean JNI_PeerConnection_OldGetStats( const JavaParamRef& j_pc, const JavaParamRef& j_observer, jlong native_track) { - rtc::scoped_refptr observer( - new rtc::RefCountedObject(jni, j_observer)); + auto observer = rtc::make_ref_counted(jni, j_observer); return ExtractNativePC(jni, j_pc)->GetStats( observer, reinterpret_cast(native_track), PeerConnectionInterface::kStatsOutputLevelStandard); @@ -769,9 +807,8 @@ static void JNI_PeerConnection_NewGetStats( JNIEnv* jni, const JavaParamRef& j_pc, const JavaParamRef& j_callback) { - rtc::scoped_refptr callback( - new rtc::RefCountedObject(jni, - j_callback)); + auto callback = + rtc::make_ref_counted(jni, j_callback); ExtractNativePC(jni, j_pc)->GetStats(callback); } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc index 2392db240..53e715bd0 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -471,14 +471,14 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnection( jni, j_sslCertificateVerifier); } - rtc::scoped_refptr pc = - PeerConnectionFactoryFromJava(factory)->CreatePeerConnection( + auto result = + PeerConnectionFactoryFromJava(factory)->CreatePeerConnectionOrError( rtc_config, std::move(peer_connection_dependencies)); - if (!pc) + if (!result.ok()) return 0; - return jlongFromPointer( - new OwnedPeerConnection(pc, std::move(observer), std::move(constraints))); + return jlongFromPointer(new OwnedPeerConnection( + result.MoveValue(), std::move(observer), std::move(constraints))); } static jlong JNI_PeerConnectionFactory_CreateVideoSource( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_parameters.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_parameters.cc index a65fa6eaa..4bd9ee0e1 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_parameters.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_parameters.cc @@ -53,7 +53,8 @@ ScopedJavaLocalRef NativeToJavaRtpEncodingParameter( NativeToJavaInteger(env, encoding.max_framerate), NativeToJavaInteger(env, encoding.num_temporal_layers), NativeToJavaDouble(env, encoding.scale_resolution_down_by), - encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr); + encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr, + encoding.adaptive_ptime); } ScopedJavaLocalRef NativeToJavaRtpCodecParameter( @@ -115,6 +116,8 @@ RtpEncodingParameters JavaToNativeRtpEncodingParameters( Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters); encoding.scale_resolution_down_by = JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by); + encoding.adaptive_ptime = + Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters); ScopedJavaLocalRef j_ssrc = Java_Encoding_getSsrc(jni, j_encoding_parameters); if (!IsNull(jni, j_ssrc)) diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.cc index d1842a3db..c8b4345af 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.cc @@ -47,24 +47,34 @@ void CreateSdpObserverJni::OnFailure(webrtc::RTCError error) { NativeToJavaString(env, error.message())); } -SetSdpObserverJni::SetSdpObserverJni( +SetLocalSdpObserverJni::SetLocalSdpObserverJni( JNIEnv* env, - const JavaRef& j_observer, - std::unique_ptr constraints) - : j_observer_global_(env, j_observer), - constraints_(std::move(constraints)) {} + const JavaRef& j_observer) + : j_observer_global_(env, j_observer) {} -SetSdpObserverJni::~SetSdpObserverJni() = default; - -void SetSdpObserverJni::OnSuccess() { +void SetLocalSdpObserverJni::OnSetLocalDescriptionComplete(RTCError error) { JNIEnv* env = AttachCurrentThreadIfNeeded(); - Java_SdpObserver_onSetSuccess(env, j_observer_global_); + if (error.ok()) { + Java_SdpObserver_onSetSuccess(env, j_observer_global_); + } else { + Java_SdpObserver_onSetFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); + } } -void SetSdpObserverJni::OnFailure(webrtc::RTCError error) { +SetRemoteSdpObserverJni::SetRemoteSdpObserverJni( + JNIEnv* env, + const JavaRef& j_observer) + : j_observer_global_(env, j_observer) {} + +void SetRemoteSdpObserverJni::OnSetRemoteDescriptionComplete(RTCError error) { JNIEnv* env = AttachCurrentThreadIfNeeded(); - Java_SdpObserver_onSetFailure(env, j_observer_global_, - NativeToJavaString(env, error.message())); + if (error.ok()) { + Java_SdpObserver_onSetSuccess(env, j_observer_global_); + } else { + Java_SdpObserver_onSetFailure(env, j_observer_global_, + NativeToJavaString(env, error.message())); + } } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.h index 68ded76e7..b33a3018c 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/sdp_observer.h @@ -39,21 +39,28 @@ class CreateSdpObserverJni : public CreateSessionDescriptionObserver { std::unique_ptr constraints_; }; -class SetSdpObserverJni : public SetSessionDescriptionObserver { +class SetLocalSdpObserverJni : public SetLocalDescriptionObserverInterface { public: - SetSdpObserverJni(JNIEnv* env, - const JavaRef& j_observer, - std::unique_ptr constraints); - ~SetSdpObserverJni() override; + SetLocalSdpObserverJni(JNIEnv* env, const JavaRef& j_observer); - MediaConstraints* constraints() { return constraints_.get(); } + ~SetLocalSdpObserverJni() override = default; - void OnSuccess() override; - void OnFailure(RTCError error) override; + virtual void OnSetLocalDescriptionComplete(RTCError error) override; + + private: + const ScopedJavaGlobalRef j_observer_global_; +}; + +class SetRemoteSdpObserverJni : public SetRemoteDescriptionObserverInterface { + public: + SetRemoteSdpObserverJni(JNIEnv* env, const JavaRef& j_observer); + + ~SetRemoteSdpObserverJni() override = default; + + virtual void OnSetRemoteDescriptionComplete(RTCError error) override; private: const ScopedJavaGlobalRef j_observer_global_; - std::unique_ptr constraints_; }; } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc index 605258436..ee5ecbea6 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/video.cc @@ -16,6 +16,7 @@ #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/android_video_track_source.h" #include "sdk/android/src/jni/video_decoder_factory_wrapper.h" @@ -45,9 +46,8 @@ void* CreateVideoSource(JNIEnv* env, rtc::Thread* worker_thread, jboolean is_screencast, jboolean align_timestamps) { - rtc::scoped_refptr source( - new rtc::RefCountedObject( - signaling_thread, env, is_screencast, align_timestamps)); + auto source = rtc::make_ref_counted( + signaling_thread, env, is_screencast, align_timestamps); return source.release(); } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_codec_info.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_codec_info.cc index a218a1d23..8c86b7c37 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_codec_info.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_codec_info.cc @@ -19,18 +19,33 @@ namespace jni { SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, const JavaRef& j_info) { + std::string codecName = + JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)); + std::string sdpCodecName; + if (codecName == "AV1") { + // TODO(yyaroshevich): Undo mapping once AV1 sdp name is standardized + sdpCodecName = "AV1X"; + } else { + sdpCodecName = codecName; + } return SdpVideoFormat( - JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)), + sdpCodecName, JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info))); } ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( JNIEnv* jni, const SdpVideoFormat& format) { + std::string codecName; + if (format.name == "AV1X" || format.name == "AV1") { + codecName = "AV1"; + } else { + codecName = format.name; + } ScopedJavaLocalRef j_params = NativeToJavaStringMap(jni, format.parameters); return Java_VideoCodecInfo_Constructor( - jni, NativeToJavaString(jni, format.name), j_params); + jni, NativeToJavaString(jni, codecName), j_params); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc index 3aa18abbd..01fb84fc0 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.cc @@ -144,11 +144,6 @@ int32_t VideoDecoderWrapper::Release() { return status; } -bool VideoDecoderWrapper::PrefersLateDecoding() const { - JNIEnv* jni = AttachCurrentThreadIfNeeded(); - return Java_VideoDecoder_getPrefersLateDecoding(jni, decoder_); -} - const char* VideoDecoderWrapper::ImplementationName() const { return implementation_name_.c_str(); } @@ -249,12 +244,8 @@ absl::optional VideoDecoderWrapper::ParseQP( break; } case kVideoCodecH264: { - h264_bitstream_parser_.ParseBitstream(input_image.data(), - input_image.size()); - int qp_int; - if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { - qp = qp_int; - } + h264_bitstream_parser_.ParseBitstream(input_image); + qp = h264_bitstream_parser_.GetLastSliceQp(); break; } default: diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h index f5c4787a6..15f7ab9bf 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_decoder_wrapper.h @@ -16,11 +16,11 @@ #include #include +#include "api/sequence_checker.h" #include "api/video_codecs/video_decoder.h" #include "common_video/h264/h264_bitstream_parser.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/jni_helpers.h" namespace webrtc { @@ -47,11 +47,6 @@ class VideoDecoderWrapper : public VideoDecoder { // still safe and synchronous. int32_t Release() override RTC_NO_THREAD_SAFETY_ANALYSIS; - // Returns true if the decoder prefer to decode frames late. - // That is, it can not decode infinite number of frames before the decoded - // frame is consumed. - bool PrefersLateDecoding() const override; - const char* ImplementationName() const override; // Wraps the frame to a AndroidVideoBuffer and passes it to the callback. @@ -88,7 +83,7 @@ class VideoDecoderWrapper : public VideoDecoder { const ScopedJavaGlobalRef decoder_; const std::string implementation_name_; - rtc::ThreadChecker decoder_thread_checker_; + SequenceChecker decoder_thread_checker_; // Callbacks must be executed sequentially on an arbitrary thread. We do not // own this thread so a thread checker cannot be used. rtc::RaceChecker callback_race_checker_; diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc index f64f1b466..f1f095d8f 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.cc @@ -18,6 +18,7 @@ #endif #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "rtc_base/logging.h" @@ -312,8 +313,9 @@ int VideoEncoderWrapper::ParseQp(rtc::ArrayView buffer) { success = vp9::GetQp(buffer.data(), buffer.size(), &qp); break; case kVideoCodecH264: - h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size()); - success = h264_bitstream_parser_.GetLastSliceQp(&qp); + h264_bitstream_parser_.ParseBitstream(buffer); + qp = h264_bitstream_parser_.GetLastSliceQp().value_or(-1); + success = (qp >= 0); break; #ifndef DISABLE_H265 case kVideoCodecH265: @@ -333,6 +335,19 @@ CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo( const bool key_frame = frame._frameType == VideoFrameType::kVideoFrameKey; CodecSpecificInfo info; + // For stream with scalability, NextFrameConfig should be called before + // encoding and used to configure encoder, then passed here e.g. via + // FrameExtraInfo structure. But while this encoder wrapper uses only trivial + // scalability, NextFrameConfig can be called here. + auto layer_frames = svc_controller_.NextFrameConfig(/*reset=*/key_frame); + RTC_DCHECK_EQ(layer_frames.size(), 1); + info.generic_frame_info = svc_controller_.OnEncodeDone(layer_frames[0]); + if (key_frame) { + info.template_structure = svc_controller_.DependencyStructure(); + info.template_structure->resolutions = { + RenderResolution(frame._encodedWidth, frame._encodedHeight)}; + } + info.codecType = codec_settings_.codecType; switch (codec_settings_.codecType) { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h index 16eb1c2b8..d1a4198c8 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_encoder_wrapper.h @@ -12,6 +12,7 @@ #define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_ #include + #include #include #include @@ -24,6 +25,7 @@ #include "common_video/h265/h265_bitstream_parser.h" #endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/synchronization/mutex.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/video_frame.h" @@ -102,6 +104,8 @@ class VideoEncoderWrapper : public VideoEncoder { H265BitstreamParser h265_bitstream_parser_; #endif + // Fills frame dependencies in codec-agnostic format. + ScalableVideoControllerNoLayering svc_controller_; // VP9 variables to populate codec specific structure. GofInfoVP9 gof_; // Contains each frame's temporal information for // non-flexible VP9 mode. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc index 860eebe5e..dd3562f54 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.cc @@ -14,10 +14,9 @@ #include "api/scoped_refptr.h" #include "common_video/include/video_frame_buffer.h" -#include "rtc_base/bind.h" #include "rtc_base/checks.h" -#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" #include "sdk/android/generated_video_jni/VideoFrame_jni.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -78,8 +77,8 @@ rtc::scoped_refptr AndroidVideoI420Buffer::Adopt( int width, int height, const JavaRef& j_video_frame_buffer) { - return new rtc::RefCountedObject( - jni, width, height, j_video_frame_buffer); + return rtc::make_ref_counted(jni, width, height, + j_video_frame_buffer); } AndroidVideoI420Buffer::AndroidVideoI420Buffer( @@ -124,8 +123,7 @@ int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni, rtc::scoped_refptr AndroidVideoBuffer::Adopt( JNIEnv* jni, const JavaRef& j_video_frame_buffer) { - return new rtc::RefCountedObject(jni, - j_video_frame_buffer); + return rtc::make_ref_counted(jni, j_video_frame_buffer); } rtc::scoped_refptr AndroidVideoBuffer::Create( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h index 5e39b8a77..d1e463bba 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/video_frame.h @@ -16,7 +16,6 @@ #include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" -#include "rtc_base/callback.h" #include "sdk/android/src/jni/jni_helpers.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc index faf393bf3..6f4901c97 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc @@ -118,7 +118,6 @@ const char MediaConstraints::kUseRtpMux[] = "googUseRtpMUX"; // Below constraints should be used during PeerConnection construction. const char MediaConstraints::kEnableDtlsSrtp[] = "DtlsSrtpKeyAgreement"; -const char MediaConstraints::kEnableRtpDataChannels[] = "RtpDataChannels"; // Google-specific constraint keys. const char MediaConstraints::kEnableDscp[] = "googDscp"; const char MediaConstraints::kEnableIPv6[] = "googIPv6"; @@ -167,8 +166,6 @@ void CopyConstraintsIntoRtcConfiguration( FindConstraint(constraints, MediaConstraints::kCpuOveruseDetection, &configuration->media_config.video.enable_cpu_adaptation, nullptr); - FindConstraint(constraints, MediaConstraints::kEnableRtpDataChannels, - &configuration->enable_rtp_data_channel, nullptr); // Find Suspend Below Min Bitrate constraint. FindConstraint( constraints, MediaConstraints::kEnableVideoSuspendBelowMinBitrate, diff --git a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h index b85dc472e..15cb363f7 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h @@ -85,8 +85,6 @@ class MediaConstraints { // PeerConnection constraint keys. // Temporary pseudo-constraints used to enable DTLS-SRTP static const char kEnableDtlsSrtp[]; // Enable DTLS-SRTP - // Temporary pseudo-constraints used to enable DataChannels - static const char kEnableRtpDataChannels[]; // Enable RTP DataChannels // Google-specific constraint keys. // Temporary pseudo-constraint for enabling DSCP through JS. static const char kEnableDscp[]; // googDscp diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc index d29d819fc..4fbd82508 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtc_stats_report.cc @@ -56,15 +56,12 @@ bool RTCStatsReport::ConstIterator::operator!=( rtc::scoped_refptr RTCStatsReport::Create( int64_t timestamp_us) { - return rtc::scoped_refptr( - new rtc::RefCountedObject(timestamp_us)); + return rtc::scoped_refptr(new RTCStatsReport(timestamp_us)); } RTCStatsReport::RTCStatsReport(int64_t timestamp_us) : timestamp_us_(timestamp_us) {} -RTCStatsReport::~RTCStatsReport() {} - rtc::scoped_refptr RTCStatsReport::Copy() const { rtc::scoped_refptr copy = Create(timestamp_us_); for (auto it = stats_.begin(); it != stats_.end(); ++it) { diff --git a/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc b/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc index 5a803de07..dcd2aeb77 100644 --- a/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc +++ b/TMessagesProj/jni/voip/webrtc/stats/rtcstats_objects.cc @@ -259,13 +259,13 @@ WEBRTC_RTCSTATS_IMPL(RTCIceCandidateStats, RTCStats, "abstract-ice-candidate", &is_remote, &network_type, &ip, + &address, &port, &protocol, &relay_protocol, &candidate_type, &priority, - &url, - &deleted) + &url) // clang-format on RTCIceCandidateStats::RTCIceCandidateStats(const std::string& id, @@ -281,13 +281,13 @@ RTCIceCandidateStats::RTCIceCandidateStats(std::string&& id, is_remote("isRemote", is_remote), network_type("networkType"), ip("ip"), + address("address"), port("port"), protocol("protocol"), relay_protocol("relayProtocol"), candidate_type("candidateType"), priority("priority"), - url("url"), - deleted("deleted", false) {} + url("url") {} RTCIceCandidateStats::RTCIceCandidateStats(const RTCIceCandidateStats& other) : RTCStats(other.id(), other.timestamp_us()), @@ -295,13 +295,13 @@ RTCIceCandidateStats::RTCIceCandidateStats(const RTCIceCandidateStats& other) is_remote(other.is_remote), network_type(other.network_type), ip(other.ip), + address(other.address), port(other.port), protocol(other.protocol), relay_protocol(other.relay_protocol), candidate_type(other.candidate_type), priority(other.priority), - url(other.url), - deleted(other.deleted) {} + url(other.url) {} RTCIceCandidateStats::~RTCIceCandidateStats() {} @@ -547,17 +547,11 @@ RTCPeerConnectionStats::~RTCPeerConnectionStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL(RTCRTPStreamStats, RTCStats, "rtp", &ssrc, - &is_remote, - &media_type, &kind, &track_id, &transport_id, &codec_id, - &fir_count, - &pli_count, - &nack_count, - &sli_count, - &qp_sum) + &media_type) // clang-format on RTCRTPStreamStats::RTCRTPStreamStats(const std::string& id, @@ -567,46 +561,82 @@ RTCRTPStreamStats::RTCRTPStreamStats(const std::string& id, RTCRTPStreamStats::RTCRTPStreamStats(std::string&& id, int64_t timestamp_us) : RTCStats(std::move(id), timestamp_us), ssrc("ssrc"), - is_remote("isRemote", false), - media_type("mediaType"), kind("kind"), track_id("trackId"), transport_id("transportId"), codec_id("codecId"), - fir_count("firCount"), - pli_count("pliCount"), - nack_count("nackCount"), - sli_count("sliCount"), - qp_sum("qpSum") {} + media_type("mediaType") {} RTCRTPStreamStats::RTCRTPStreamStats(const RTCRTPStreamStats& other) : RTCStats(other.id(), other.timestamp_us()), ssrc(other.ssrc), - is_remote(other.is_remote), - media_type(other.media_type), kind(other.kind), track_id(other.track_id), transport_id(other.transport_id), codec_id(other.codec_id), - fir_count(other.fir_count), - pli_count(other.pli_count), - nack_count(other.nack_count), - sli_count(other.sli_count), - qp_sum(other.qp_sum) {} + media_type(other.media_type) {} RTCRTPStreamStats::~RTCRTPStreamStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL( - RTCInboundRTPStreamStats, RTCRTPStreamStats, "inbound-rtp", + RTCReceivedRtpStreamStats, RTCRTPStreamStats, "received-rtp", + &jitter, + &packets_lost) +// clang-format on + +RTCReceivedRtpStreamStats::RTCReceivedRtpStreamStats(const std::string&& id, + int64_t timestamp_us) + : RTCReceivedRtpStreamStats(std::string(id), timestamp_us) {} + +RTCReceivedRtpStreamStats::RTCReceivedRtpStreamStats(std::string&& id, + int64_t timestamp_us) + : RTCRTPStreamStats(std::move(id), timestamp_us), + jitter("jitter"), + packets_lost("packetsLost") {} + +RTCReceivedRtpStreamStats::RTCReceivedRtpStreamStats( + const RTCReceivedRtpStreamStats& other) + : RTCRTPStreamStats(other), + jitter(other.jitter), + packets_lost(other.packets_lost) {} + +RTCReceivedRtpStreamStats::~RTCReceivedRtpStreamStats() {} + +// clang-format off +WEBRTC_RTCSTATS_IMPL( + RTCSentRtpStreamStats, RTCRTPStreamStats, "sent-rtp", + &packets_sent, + &bytes_sent) +// clang-format on + +RTCSentRtpStreamStats::RTCSentRtpStreamStats(const std::string&& id, + int64_t timestamp_us) + : RTCSentRtpStreamStats(std::string(id), timestamp_us) {} + +RTCSentRtpStreamStats::RTCSentRtpStreamStats(std::string&& id, + int64_t timestamp_us) + : RTCRTPStreamStats(std::move(id), timestamp_us), + packets_sent("packetsSent"), + bytes_sent("bytesSent") {} + +RTCSentRtpStreamStats::RTCSentRtpStreamStats(const RTCSentRtpStreamStats& other) + : RTCRTPStreamStats(other), + packets_sent(other.packets_sent), + bytes_sent(other.bytes_sent) {} + +RTCSentRtpStreamStats::~RTCSentRtpStreamStats() {} + +// clang-format off +WEBRTC_RTCSTATS_IMPL( + RTCInboundRTPStreamStats, RTCReceivedRtpStreamStats, "inbound-rtp", + &remote_id, &packets_received, &fec_packets_received, &fec_packets_discarded, &bytes_received, &header_bytes_received, - &packets_lost, &last_packet_received_timestamp, - &jitter, &jitter_buffer_delay, &jitter_buffer_emitted_count, &total_samples_received, @@ -642,7 +672,11 @@ WEBRTC_RTCSTATS_IMPL( &total_squared_inter_frame_delay, &content_type, &estimated_playout_timestamp, - &decoder_implementation) + &decoder_implementation, + &fir_count, + &pli_count, + &nack_count, + &qp_sum) // clang-format on RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(const std::string& id, @@ -651,15 +685,14 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(const std::string& id, RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, int64_t timestamp_us) - : RTCRTPStreamStats(std::move(id), timestamp_us), + : RTCReceivedRtpStreamStats(std::move(id), timestamp_us), + remote_id("remoteId"), packets_received("packetsReceived"), fec_packets_received("fecPacketsReceived"), fec_packets_discarded("fecPacketsDiscarded"), bytes_received("bytesReceived"), header_bytes_received("headerBytesReceived"), - packets_lost("packetsLost"), last_packet_received_timestamp("lastPacketReceivedTimestamp"), - jitter("jitter"), jitter_buffer_delay("jitterBufferDelay"), jitter_buffer_emitted_count("jitterBufferEmittedCount"), total_samples_received("totalSamplesReceived"), @@ -695,19 +728,22 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(std::string&& id, total_squared_inter_frame_delay("totalSquaredInterFrameDelay"), content_type("contentType"), estimated_playout_timestamp("estimatedPlayoutTimestamp"), - decoder_implementation("decoderImplementation") {} + decoder_implementation("decoderImplementation"), + fir_count("firCount"), + pli_count("pliCount"), + nack_count("nackCount"), + qp_sum("qpSum") {} RTCInboundRTPStreamStats::RTCInboundRTPStreamStats( const RTCInboundRTPStreamStats& other) - : RTCRTPStreamStats(other), + : RTCReceivedRtpStreamStats(other), + remote_id(other.remote_id), packets_received(other.packets_received), fec_packets_received(other.fec_packets_received), fec_packets_discarded(other.fec_packets_discarded), bytes_received(other.bytes_received), header_bytes_received(other.header_bytes_received), - packets_lost(other.packets_lost), last_packet_received_timestamp(other.last_packet_received_timestamp), - jitter(other.jitter), jitter_buffer_delay(other.jitter_buffer_delay), jitter_buffer_emitted_count(other.jitter_buffer_emitted_count), total_samples_received(other.total_samples_received), @@ -744,7 +780,11 @@ RTCInboundRTPStreamStats::RTCInboundRTPStreamStats( total_squared_inter_frame_delay(other.total_squared_inter_frame_delay), content_type(other.content_type), estimated_playout_timestamp(other.estimated_playout_timestamp), - decoder_implementation(other.decoder_implementation) {} + decoder_implementation(other.decoder_implementation), + fir_count(other.fir_count), + pli_count(other.pli_count), + nack_count(other.nack_count), + qp_sum(other.qp_sum) {} RTCInboundRTPStreamStats::~RTCInboundRTPStreamStats() {} @@ -773,7 +813,11 @@ WEBRTC_RTCSTATS_IMPL( &quality_limitation_reason, &quality_limitation_resolution_changes, &content_type, - &encoder_implementation) + &encoder_implementation, + &fir_count, + &pli_count, + &nack_count, + &qp_sum) // clang-format on RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(const std::string& id, @@ -806,7 +850,11 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats(std::string&& id, quality_limitation_resolution_changes( "qualityLimitationResolutionChanges"), content_type("contentType"), - encoder_implementation("encoderImplementation") {} + encoder_implementation("encoderImplementation"), + fir_count("firCount"), + pli_count("pliCount"), + nack_count("nackCount"), + qp_sum("qpSum") {} RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( const RTCOutboundRTPStreamStats& other) @@ -834,21 +882,23 @@ RTCOutboundRTPStreamStats::RTCOutboundRTPStreamStats( quality_limitation_resolution_changes( other.quality_limitation_resolution_changes), content_type(other.content_type), - encoder_implementation(other.encoder_implementation) {} + encoder_implementation(other.encoder_implementation), + fir_count(other.fir_count), + pli_count(other.pli_count), + nack_count(other.nack_count), + qp_sum(other.qp_sum) {} RTCOutboundRTPStreamStats::~RTCOutboundRTPStreamStats() {} // clang-format off WEBRTC_RTCSTATS_IMPL( - RTCRemoteInboundRtpStreamStats, RTCStats, "remote-inbound-rtp", - &ssrc, - &kind, - &transport_id, - &codec_id, - &packets_lost, - &jitter, + RTCRemoteInboundRtpStreamStats, RTCReceivedRtpStreamStats, + "remote-inbound-rtp", &local_id, - &round_trip_time) + &round_trip_time, + &fraction_lost, + &total_round_trip_time, + &round_trip_time_measurements) // clang-format on RTCRemoteInboundRtpStreamStats::RTCRemoteInboundRtpStreamStats( @@ -859,30 +909,55 @@ RTCRemoteInboundRtpStreamStats::RTCRemoteInboundRtpStreamStats( RTCRemoteInboundRtpStreamStats::RTCRemoteInboundRtpStreamStats( std::string&& id, int64_t timestamp_us) - : RTCStats(std::move(id), timestamp_us), - ssrc("ssrc"), - kind("kind"), - transport_id("transportId"), - codec_id("codecId"), - packets_lost("packetsLost"), - jitter("jitter"), + : RTCReceivedRtpStreamStats(std::move(id), timestamp_us), local_id("localId"), - round_trip_time("roundTripTime") {} + round_trip_time("roundTripTime"), + fraction_lost("fractionLost"), + total_round_trip_time("totalRoundTripTime"), + round_trip_time_measurements("roundTripTimeMeasurements") {} RTCRemoteInboundRtpStreamStats::RTCRemoteInboundRtpStreamStats( const RTCRemoteInboundRtpStreamStats& other) - : RTCStats(other), - ssrc(other.ssrc), - kind(other.kind), - transport_id(other.transport_id), - codec_id(other.codec_id), - packets_lost(other.packets_lost), - jitter(other.jitter), + : RTCReceivedRtpStreamStats(other), local_id(other.local_id), - round_trip_time(other.round_trip_time) {} + round_trip_time(other.round_trip_time), + fraction_lost(other.fraction_lost), + total_round_trip_time(other.total_round_trip_time), + round_trip_time_measurements(other.round_trip_time_measurements) {} RTCRemoteInboundRtpStreamStats::~RTCRemoteInboundRtpStreamStats() {} +// clang-format off +WEBRTC_RTCSTATS_IMPL( + RTCRemoteOutboundRtpStreamStats, RTCSentRtpStreamStats, + "remote-outbound-rtp", + &local_id, + &remote_timestamp, + &reports_sent) +// clang-format on + +RTCRemoteOutboundRtpStreamStats::RTCRemoteOutboundRtpStreamStats( + const std::string& id, + int64_t timestamp_us) + : RTCRemoteOutboundRtpStreamStats(std::string(id), timestamp_us) {} + +RTCRemoteOutboundRtpStreamStats::RTCRemoteOutboundRtpStreamStats( + std::string&& id, + int64_t timestamp_us) + : RTCSentRtpStreamStats(std::move(id), timestamp_us), + local_id("localId"), + remote_timestamp("remoteTimestamp"), + reports_sent("reportsSent") {} + +RTCRemoteOutboundRtpStreamStats::RTCRemoteOutboundRtpStreamStats( + const RTCRemoteOutboundRtpStreamStats& other) + : RTCSentRtpStreamStats(other), + local_id(other.local_id), + remote_timestamp(other.remote_timestamp), + reports_sent(other.reports_sent) {} + +RTCRemoteOutboundRtpStreamStats::~RTCRemoteOutboundRtpStreamStats() {} + // clang-format off WEBRTC_RTCSTATS_IMPL(RTCMediaSourceStats, RTCStats, "parent-media-source", &track_identifier, diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h index 3c60f63da..914fbda92 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/include/clock.h @@ -32,23 +32,15 @@ const double kMagicNtpFractionalUnit = 4.294967296E+9; class RTC_EXPORT Clock { public: virtual ~Clock() {} + // Return a timestamp relative to an unspecified epoch. - virtual Timestamp CurrentTime() { - return Timestamp::Micros(TimeInMicroseconds()); - } - virtual int64_t TimeInMilliseconds() { return CurrentTime().ms(); } - virtual int64_t TimeInMicroseconds() { return CurrentTime().us(); } + virtual Timestamp CurrentTime() = 0; + int64_t TimeInMilliseconds() { return CurrentTime().ms(); } + int64_t TimeInMicroseconds() { return CurrentTime().us(); } - // Retrieve an NTP absolute timestamp. + // Retrieve an NTP absolute timestamp (with an epoch of Jan 1, 1900). virtual NtpTime CurrentNtpTime() = 0; - - // Retrieve an NTP absolute timestamp in milliseconds. - virtual int64_t CurrentNtpInMilliseconds() = 0; - - // Converts an NTP timestamp to a millisecond timestamp. - static int64_t NtpToMs(uint32_t seconds, uint32_t fractions) { - return NtpTime(seconds, fractions).ToMs(); - } + int64_t CurrentNtpInMilliseconds() { return CurrentNtpTime().ToMs(); } // Returns an instance of the real-time system clock implementation. static Clock* GetRealTimeClock(); @@ -56,21 +48,16 @@ class RTC_EXPORT Clock { class SimulatedClock : public Clock { public: + // The constructors assume an epoch of Jan 1, 1970. explicit SimulatedClock(int64_t initial_time_us); explicit SimulatedClock(Timestamp initial_time); - ~SimulatedClock() override; - // Return a timestamp relative to some arbitrary source; the source is fixed - // for this clock. + // Return a timestamp with an epoch of Jan 1, 1970. Timestamp CurrentTime() override; - // Retrieve an NTP absolute timestamp. NtpTime CurrentNtpTime() override; - // Converts an NTP timestamp to a millisecond timestamp. - int64_t CurrentNtpInMilliseconds() override; - // Advance the simulated clock with a given number of milliseconds or // microseconds. void AdvanceTimeMilliseconds(int64_t milliseconds); diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc index 0ae624d84..2c3981a5a 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/clock.cc @@ -10,6 +10,8 @@ #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" + #if defined(WEBRTC_WIN) // Windows needs to be included before mmsystem.h @@ -29,57 +31,82 @@ #include "rtc_base/time_utils.h" namespace webrtc { +namespace { + +int64_t NtpOffsetUsCalledOnce() { + constexpr int64_t kNtpJan1970Sec = 2208988800; + int64_t clock_time = rtc::TimeMicros(); + int64_t utc_time = rtc::TimeUTCMicros(); + return utc_time - clock_time + kNtpJan1970Sec * rtc::kNumMicrosecsPerSec; +} + +NtpTime TimeMicrosToNtp(int64_t time_us) { + static int64_t ntp_offset_us = NtpOffsetUsCalledOnce(); + + int64_t time_ntp_us = time_us + ntp_offset_us; + RTC_DCHECK_GE(time_ntp_us, 0); // Time before year 1900 is unsupported. + + // Convert seconds to uint32 through uint64 for a well-defined cast. + // A wrap around, which will happen in 2036, is expected for NTP time. + uint32_t ntp_seconds = + static_cast(time_ntp_us / rtc::kNumMicrosecsPerSec); + + // Scale fractions of the second to NTP resolution. + constexpr int64_t kNtpFractionsInSecond = 1LL << 32; + int64_t us_fractions = time_ntp_us % rtc::kNumMicrosecsPerSec; + uint32_t ntp_fractions = + us_fractions * kNtpFractionsInSecond / rtc::kNumMicrosecsPerSec; + + return NtpTime(ntp_seconds, ntp_fractions); +} + +void GetSecondsAndFraction(const timeval& time, + uint32_t* seconds, + double* fraction) { + *seconds = time.tv_sec + kNtpJan1970; + *fraction = time.tv_usec / 1e6; + + while (*fraction >= 1) { + --*fraction; + ++*seconds; + } + while (*fraction < 0) { + ++*fraction; + --*seconds; + } +} + +} // namespace class RealTimeClock : public Clock { + public: + RealTimeClock() + : use_system_independent_ntp_time_(!field_trial::IsEnabled( + "WebRTC-SystemIndependentNtpTimeKillSwitch")) {} + Timestamp CurrentTime() override { return Timestamp::Micros(rtc::TimeMicros()); } - // Return a timestamp in milliseconds relative to some arbitrary source; the - // source is fixed for this clock. - int64_t TimeInMilliseconds() override { return rtc::TimeMillis(); } - // Return a timestamp in microseconds relative to some arbitrary source; the - // source is fixed for this clock. - int64_t TimeInMicroseconds() override { return rtc::TimeMicros(); } - - // Retrieve an NTP absolute timestamp. NtpTime CurrentNtpTime() override { - timeval tv = CurrentTimeVal(); - double microseconds_in_seconds; - uint32_t seconds; - Adjust(tv, &seconds, µseconds_in_seconds); - uint32_t fractions = static_cast( - microseconds_in_seconds * kMagicNtpFractionalUnit + 0.5); - return NtpTime(seconds, fractions); - } - - // Retrieve an NTP absolute timestamp in milliseconds. - int64_t CurrentNtpInMilliseconds() override { - timeval tv = CurrentTimeVal(); - uint32_t seconds; - double microseconds_in_seconds; - Adjust(tv, &seconds, µseconds_in_seconds); - return 1000 * static_cast(seconds) + - static_cast(1000.0 * microseconds_in_seconds + 0.5); + return use_system_independent_ntp_time_ ? TimeMicrosToNtp(rtc::TimeMicros()) + : SystemDependentNtpTime(); } protected: virtual timeval CurrentTimeVal() = 0; - static void Adjust(const timeval& tv, - uint32_t* adjusted_s, - double* adjusted_us_in_s) { - *adjusted_s = tv.tv_sec + kNtpJan1970; - *adjusted_us_in_s = tv.tv_usec / 1e6; + private: + NtpTime SystemDependentNtpTime() { + uint32_t seconds; + double fraction; + GetSecondsAndFraction(CurrentTimeVal(), &seconds, &fraction); - if (*adjusted_us_in_s >= 1) { - *adjusted_us_in_s -= 1; - ++*adjusted_s; - } else if (*adjusted_us_in_s < -1) { - *adjusted_us_in_s += 1; - --*adjusted_s; - } + return NtpTime(seconds, static_cast( + fraction * kMagicNtpFractionalUnit + 0.5)); } + + bool use_system_independent_ntp_time_; }; #if defined(WINUWP) @@ -90,10 +117,10 @@ class WinUwpRealTimeClock final : public RealTimeClock { protected: timeval CurrentTimeVal() override { - // The rtc::SystemTimeNanos() method is already time offset from a base - // epoch value and might as be synchronized against an NTP time server as - // an added bonus. - auto nanos = rtc::SystemTimeNanos(); + // The rtc::WinUwpSystemTimeNanos() method is already time offset from a + // base epoch value and might as be synchronized against an NTP time server + // as an added bonus. + auto nanos = rtc::WinUwpSystemTimeNanos(); struct timeval tv; @@ -257,10 +284,6 @@ NtpTime SimulatedClock::CurrentNtpTime() { return NtpTime(seconds, fractions); } -int64_t SimulatedClock::CurrentNtpInMilliseconds() { - return TimeInMilliseconds() + 1000 * static_cast(kNtpJan1970); -} - void SimulatedClock::AdvanceTimeMilliseconds(int64_t milliseconds) { AdvanceTime(TimeDelta::Millis(milliseconds)); } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc index fb455193f..3de81e72e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.cc @@ -8,12 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "video/adaptation/balanced_constraint.h" + #include #include -#include "rtc_base/synchronization/sequence_checker.h" +#include "api/sequence_checker.h" #include "rtc_base/task_utils/to_queued_task.h" -#include "video/adaptation/balanced_constraint.h" namespace webrtc { @@ -39,18 +40,20 @@ bool BalancedConstraint::IsAdaptationUpAllowed( // Don't adapt if BalancedDegradationSettings applies and determines this will // exceed bitrate constraints. if (degradation_preference_provider_->degradation_preference() == - DegradationPreference::BALANCED && - !balanced_settings_.CanAdaptUp(input_state.video_codec_type(), - input_state.frame_size_pixels().value(), - encoder_target_bitrate_bps_.value_or(0))) { - return false; - } - if (DidIncreaseResolution(restrictions_before, restrictions_after) && - !balanced_settings_.CanAdaptUpResolution( - input_state.video_codec_type(), - input_state.frame_size_pixels().value(), - encoder_target_bitrate_bps_.value_or(0))) { - return false; + DegradationPreference::BALANCED) { + if (!balanced_settings_.CanAdaptUp( + input_state.video_codec_type(), + input_state.frame_size_pixels().value(), + encoder_target_bitrate_bps_.value_or(0))) { + return false; + } + if (DidIncreaseResolution(restrictions_before, restrictions_after) && + !balanced_settings_.CanAdaptUpResolution( + input_state.video_codec_type(), + input_state.frame_size_pixels().value(), + encoder_target_bitrate_bps_.value_or(0))) { + return false; + } } return true; } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h index 5e0240802..0bbd67040 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/balanced_constraint.h @@ -14,10 +14,11 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/degradation_preference_provider.h" #include "rtc_base/experiments/balanced_degradation_settings.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -38,7 +39,7 @@ class BalancedConstraint : public AdaptationConstraint { const VideoSourceRestrictions& restrictions_after) const override; private: - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; absl::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(&sequence_checker_); const BalancedDegradationSettings balanced_settings_; diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc index 1061c4557..cd61e555c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.cc @@ -8,13 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include -#include - -#include "call/adaptation/video_stream_adapter.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "video/adaptation/bitrate_constraint.h" +#include +#include + +#include "api/sequence_checker.h" +#include "call/adaptation/video_stream_adapter.h" +#include "video/adaptation/video_stream_encoder_resource_manager.h" + namespace webrtc { BitrateConstraint::BitrateConstraint() @@ -42,19 +44,35 @@ bool BitrateConstraint::IsAdaptationUpAllowed( RTC_DCHECK_RUN_ON(&sequence_checker_); // Make sure bitrate limits are not violated. if (DidIncreaseResolution(restrictions_before, restrictions_after)) { + if (!encoder_settings_.has_value()) { + return true; + } + uint32_t bitrate_bps = encoder_target_bitrate_bps_.value_or(0); + if (bitrate_bps == 0) { + return true; + } + + if (VideoStreamEncoderResourceManager::IsSimulcast( + encoder_settings_->encoder_config())) { + // Resolution bitrate limits usage is restricted to singlecast. + return true; + } + + absl::optional current_frame_size_px = + input_state.single_active_stream_pixels(); + if (!current_frame_size_px.has_value()) { + return true; + } + absl::optional bitrate_limits = - encoder_settings_.has_value() - ? encoder_settings_->encoder_info() - .GetEncoderBitrateLimitsForResolution( - // Need some sort of expected resulting pixels to be used - // instead of unrestricted. - GetHigherResolutionThan( - input_state.frame_size_pixels().value())) - : absl::nullopt; - if (bitrate_limits.has_value() && bitrate_bps != 0) { - RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, - input_state.frame_size_pixels().value()); + encoder_settings_->encoder_info().GetEncoderBitrateLimitsForResolution( + // Need some sort of expected resulting pixels to be used + // instead of unrestricted. + GetHigherResolutionThan(*current_frame_size_px)); + + if (bitrate_limits.has_value()) { + RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, *current_frame_size_px); return bitrate_bps >= static_cast(bitrate_limits->min_start_bitrate_bps); } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h index 015edcc13..a608e5db5 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/bitrate_constraint.h @@ -14,11 +14,12 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/encoder_settings.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -40,7 +41,7 @@ class BitrateConstraint : public AdaptationConstraint { const VideoSourceRestrictions& restrictions_after) const override; private: - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; absl::optional encoder_settings_ RTC_GUARDED_BY(&sequence_checker_); absl::optional encoder_target_bitrate_bps_ diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc index 8fe7450a0..c42c63f4b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/encode_usage_resource.cc @@ -21,7 +21,7 @@ namespace webrtc { // static rtc::scoped_refptr EncodeUsageResource::Create( std::unique_ptr overuse_detector) { - return new rtc::RefCountedObject( + return rtc::make_ref_counted( std::move(overuse_detector)); } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h index 16217fff8..2b4dd61d2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h @@ -15,12 +15,13 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_stream_encoder_observer.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/exp_filter.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -134,7 +135,7 @@ class OveruseFrameDetector { static std::unique_ptr CreateProcessingUsage( const CpuOveruseOptions& options); - SequenceChecker task_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker task_checker_; // Owned by the task queue from where StartCheckForOveruse is called. RepeatingTaskHandle check_overuse_task_ RTC_GUARDED_BY(task_checker_); diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc new file mode 100644 index 000000000..789dac2c0 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc @@ -0,0 +1,102 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/adaptation/pixel_limit_resource.h" + +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "call/adaptation/video_stream_adapter.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +namespace { + +constexpr TimeDelta kResourceUsageCheckIntervalMs = TimeDelta::Seconds(5); + +} // namespace + +// static +rtc::scoped_refptr PixelLimitResource::Create( + TaskQueueBase* task_queue, + VideoStreamInputStateProvider* input_state_provider) { + return rtc::make_ref_counted(task_queue, + input_state_provider); +} + +PixelLimitResource::PixelLimitResource( + TaskQueueBase* task_queue, + VideoStreamInputStateProvider* input_state_provider) + : task_queue_(task_queue), + input_state_provider_(input_state_provider), + max_pixels_(absl::nullopt) { + RTC_DCHECK(task_queue_); + RTC_DCHECK(input_state_provider_); +} + +PixelLimitResource::~PixelLimitResource() { + RTC_DCHECK(!listener_); + RTC_DCHECK(!repeating_task_.Running()); +} + +void PixelLimitResource::SetMaxPixels(int max_pixels) { + RTC_DCHECK_RUN_ON(task_queue_); + max_pixels_ = max_pixels; +} + +void PixelLimitResource::SetResourceListener(ResourceListener* listener) { + RTC_DCHECK_RUN_ON(task_queue_); + listener_ = listener; + if (listener_) { + repeating_task_.Stop(); + repeating_task_ = RepeatingTaskHandle::Start(task_queue_, [&] { + RTC_DCHECK_RUN_ON(task_queue_); + if (!listener_) { + // We don't have a listener so resource adaptation must not be running, + // try again later. + return kResourceUsageCheckIntervalMs; + } + if (!max_pixels_.has_value()) { + // No pixel limit configured yet, try again later. + return kResourceUsageCheckIntervalMs; + } + absl::optional frame_size_pixels = + input_state_provider_->InputState().frame_size_pixels(); + if (!frame_size_pixels.has_value()) { + // We haven't observed a frame yet so we don't know if it's going to be + // too big or too small, try again later. + return kResourceUsageCheckIntervalMs; + } + int current_pixels = frame_size_pixels.value(); + int target_pixel_upper_bounds = max_pixels_.value(); + // To avoid toggling, we allow any resolutions between + // |target_pixel_upper_bounds| and video_stream_adapter.h's + // GetLowerResolutionThan(). This is the pixels we end up if we adapt down + // from |target_pixel_upper_bounds|. + int target_pixels_lower_bounds = + GetLowerResolutionThan(target_pixel_upper_bounds); + if (current_pixels > target_pixel_upper_bounds) { + listener_->OnResourceUsageStateMeasured(this, + ResourceUsageState::kOveruse); + } else if (current_pixels < target_pixels_lower_bounds) { + listener_->OnResourceUsageStateMeasured(this, + ResourceUsageState::kUnderuse); + } + return kResourceUsageCheckIntervalMs; + }); + } else { + repeating_task_.Stop(); + } + // The task must be running if we have a listener. + RTC_DCHECK(repeating_task_.Running() || !listener_); +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.h new file mode 100644 index 000000000..b42f92434 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.h @@ -0,0 +1,60 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_ +#define VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" +#include "call/adaptation/video_stream_input_state_provider.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// An adaptation resource designed to be used in the TestBed. Used to simulate +// being CPU limited. +// +// Periodically reports "overuse" or "underuse" (every 5 seconds) until the +// stream is within the bounds specified in terms of a maximum resolution and +// one resolution step lower than that (this avoids toggling when this is the +// only resource in play). When multiple resources come in to play some amount +// of toggling is still possible in edge cases but that is OK for testing +// purposes. +class PixelLimitResource : public Resource { + public: + static rtc::scoped_refptr Create( + TaskQueueBase* task_queue, + VideoStreamInputStateProvider* input_state_provider); + + PixelLimitResource(TaskQueueBase* task_queue, + VideoStreamInputStateProvider* input_state_provider); + ~PixelLimitResource() override; + + void SetMaxPixels(int max_pixels); + + // Resource implementation. + std::string Name() const override { return "PixelLimitResource"; } + void SetResourceListener(ResourceListener* listener) override; + + private: + TaskQueueBase* const task_queue_; + VideoStreamInputStateProvider* const input_state_provider_; + absl::optional max_pixels_ RTC_GUARDED_BY(task_queue_); + webrtc::ResourceListener* listener_ RTC_GUARDED_BY(task_queue_); + RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(task_queue_); +}; + +} // namespace webrtc + +#endif // VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc index c43848818..c455252d4 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/quality_scaler_resource.cc @@ -22,7 +22,7 @@ namespace webrtc { // static rtc::scoped_refptr QualityScalerResource::Create() { - return new rtc::RefCountedObject(); + return rtc::make_ref_counted(); } QualityScalerResource::QualityScalerResource() diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.h index 477fdf492..e10f59575 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.h @@ -16,10 +16,10 @@ #include "absl/types/optional.h" #include "api/adaptation/resource.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "call/adaptation/adaptation_constraint.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc index c7ca4bccf..1c2e5839f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc @@ -10,6 +10,9 @@ #include "video/adaptation/video_stream_encoder_resource_manager.h" +#include + +#include #include #include #include @@ -18,6 +21,7 @@ #include "absl/algorithm/container.h" #include "absl/base/macros.h" #include "api/adaptation/resource.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_reason.h" #include "api/video/video_source_interface.h" @@ -27,8 +31,8 @@ #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/time_utils.h" +#include "system_wrappers/include/field_trial.h" #include "video/adaptation/quality_scaler_resource.h" namespace webrtc { @@ -38,6 +42,9 @@ const int kDefaultInputPixelsHeight = 144; namespace { +constexpr const char* kPixelLimitResourceFieldTrialName = + "WebRTC-PixelLimitResource"; + bool IsResolutionScalingEnabled(DegradationPreference degradation_preference) { return degradation_preference == DegradationPreference::MAINTAIN_FRAMERATE || degradation_preference == DegradationPreference::BALANCED; @@ -58,6 +65,28 @@ std::string ToString(VideoAdaptationReason reason) { RTC_CHECK_NOTREACHED(); } +std::vector GetActiveLayersFlags(const VideoCodec& codec) { + std::vector flags; + if (codec.codecType == VideoCodecType::kVideoCodecVP9) { + flags.resize(codec.VP9().numberOfSpatialLayers); + for (size_t i = 0; i < flags.size(); ++i) { + flags[i] = codec.spatialLayers[i].active; + } + } else { + flags.resize(codec.numberOfSimulcastStreams); + for (size_t i = 0; i < flags.size(); ++i) { + flags[i] = codec.simulcastStream[i].active; + } + } + return flags; +} + +bool EqualFlags(const std::vector& a, const std::vector& b) { + if (a.size() != b.size()) + return false; + return std::equal(a.begin(), a.end(), b.begin()); +} + } // namespace class VideoStreamEncoderResourceManager::InitialFrameDropper { @@ -69,7 +98,11 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { has_seen_first_bwe_drop_(false), set_start_bitrate_(DataRate::Zero()), set_start_bitrate_time_ms_(0), - initial_framedrop_(0) { + initial_framedrop_(0), + use_bandwidth_allocation_(false), + bandwidth_allocation_(DataRate::Zero()), + last_input_width_(0), + last_input_height_(0) { RTC_DCHECK(quality_scaler_resource_); } @@ -78,12 +111,27 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { return initial_framedrop_ < kMaxInitialFramedrop; } + absl::optional single_active_stream_pixels() const { + return single_active_stream_pixels_; + } + + absl::optional UseBandwidthAllocationBps() const { + return (use_bandwidth_allocation_ && + bandwidth_allocation_ > DataRate::Zero()) + ? absl::optional(bandwidth_allocation_.bps()) + : absl::nullopt; + } + // Input signals. void SetStartBitrate(DataRate start_bitrate, int64_t now_ms) { set_start_bitrate_ = start_bitrate; set_start_bitrate_time_ms_ = now_ms; } + void SetBandwidthAllocation(DataRate bandwidth_allocation) { + bandwidth_allocation_ = bandwidth_allocation; + } + void SetTargetBitrate(DataRate target_bitrate, int64_t now_ms) { if (set_start_bitrate_ > DataRate::Zero() && !has_seen_first_bwe_drop_ && quality_scaler_resource_->is_started() && @@ -104,9 +152,48 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { } } + void OnEncoderSettingsUpdated( + const VideoCodec& codec, + const VideoAdaptationCounters& adaptation_counters) { + std::vector active_flags = GetActiveLayersFlags(codec); + // Check if the source resolution has changed for the external reasons, + // i.e. without any adaptation from WebRTC. + const bool source_resolution_changed = + (last_input_width_ != codec.width || + last_input_height_ != codec.height) && + adaptation_counters.resolution_adaptations == + last_adaptation_counters_.resolution_adaptations; + if (!EqualFlags(active_flags, last_active_flags_) || + source_resolution_changed) { + // Streams configuration has changed. + // Initial frame drop must be enabled because BWE might be way too low + // for the selected resolution. + if (quality_scaler_resource_->is_started()) { + RTC_LOG(LS_INFO) << "Resetting initial_framedrop_ due to changed " + "stream parameters"; + initial_framedrop_ = 0; + if (single_active_stream_pixels_ && + VideoStreamAdapter::GetSingleActiveLayerPixels(codec) > + *single_active_stream_pixels_) { + // Resolution increased. + use_bandwidth_allocation_ = true; + } + } + } + last_adaptation_counters_ = adaptation_counters; + last_active_flags_ = active_flags; + last_input_width_ = codec.width; + last_input_height_ = codec.height; + single_active_stream_pixels_ = + VideoStreamAdapter::GetSingleActiveLayerPixels(codec); + } + void OnFrameDroppedDueToSize() { ++initial_framedrop_; } - void OnMaybeEncodeFrame() { initial_framedrop_ = kMaxInitialFramedrop; } + void Disable() { + initial_framedrop_ = kMaxInitialFramedrop; + use_bandwidth_allocation_ = false; + } void OnQualityScalerSettingsUpdated() { if (quality_scaler_resource_->is_started()) { @@ -114,7 +201,7 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { initial_framedrop_ = 0; } else { // Quality scaling disabled so we shouldn't drop initial frames. - initial_framedrop_ = kMaxInitialFramedrop; + Disable(); } } @@ -130,6 +217,14 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { int64_t set_start_bitrate_time_ms_; // Counts how many frames we've dropped in the initial framedrop phase. int initial_framedrop_; + absl::optional single_active_stream_pixels_; + bool use_bandwidth_allocation_; + DataRate bandwidth_allocation_; + + std::vector last_active_flags_; + VideoAdaptationCounters last_adaptation_counters_; + int last_input_width_; + int last_input_height_; }; VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( @@ -146,6 +241,7 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( encode_usage_resource_( EncodeUsageResource::Create(std::move(overuse_detector))), quality_scaler_resource_(QualityScalerResource::Create()), + pixel_limit_resource_(nullptr), encoder_queue_(nullptr), input_state_provider_(input_state_provider), adaptation_processor_(nullptr), @@ -198,7 +294,7 @@ VideoStreamEncoderResourceManager::degradation_preference() const { return degradation_preference_; } -void VideoStreamEncoderResourceManager::EnsureEncodeUsageResourceStarted() { +void VideoStreamEncoderResourceManager::ConfigureEncodeUsageResource() { RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(encoder_settings_.has_value()); if (encode_usage_resource_->is_started()) { @@ -210,6 +306,34 @@ void VideoStreamEncoderResourceManager::EnsureEncodeUsageResourceStarted() { encode_usage_resource_->StartCheckForOveruse(GetCpuOveruseOptions()); } +void VideoStreamEncoderResourceManager::MaybeInitializePixelLimitResource() { + RTC_DCHECK_RUN_ON(encoder_queue_); + RTC_DCHECK(adaptation_processor_); + RTC_DCHECK(!pixel_limit_resource_); + if (!field_trial::IsEnabled(kPixelLimitResourceFieldTrialName)) { + // The field trial is not running. + return; + } + int max_pixels = 0; + std::string pixel_limit_field_trial = + field_trial::FindFullName(kPixelLimitResourceFieldTrialName); + if (sscanf(pixel_limit_field_trial.c_str(), "Enabled-%d", &max_pixels) != 1) { + RTC_LOG(LS_ERROR) << "Couldn't parse " << kPixelLimitResourceFieldTrialName + << " trial config: " << pixel_limit_field_trial; + return; + } + RTC_LOG(LS_INFO) << "Running field trial " + << kPixelLimitResourceFieldTrialName << " configured to " + << max_pixels << " max pixels"; + // Configure the specified max pixels from the field trial. The pixel limit + // resource is active for the lifetme of the stream (until + // StopManagedResources() is called). + pixel_limit_resource_ = + PixelLimitResource::Create(encoder_queue_->Get(), input_state_provider_); + pixel_limit_resource_->SetMaxPixels(max_pixels); + AddResource(pixel_limit_resource_, VideoAdaptationReason::kCpu); +} + void VideoStreamEncoderResourceManager::StopManagedResources() { RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(adaptation_processor_); @@ -221,6 +345,10 @@ void VideoStreamEncoderResourceManager::StopManagedResources() { quality_scaler_resource_->StopCheckForOveruse(); RemoveResource(quality_scaler_resource_); } + if (pixel_limit_resource_) { + RemoveResource(pixel_limit_resource_); + pixel_limit_resource_ = nullptr; + } } void VideoStreamEncoderResourceManager::AddResource( @@ -230,7 +358,7 @@ void VideoStreamEncoderResourceManager::AddResource( RTC_DCHECK(resource); bool inserted; std::tie(std::ignore, inserted) = resources_.emplace(resource, reason); - RTC_DCHECK(inserted) << "Resurce " << resource->Name() + RTC_DCHECK(inserted) << "Resource " << resource->Name() << " already was inserted"; adaptation_processor_->AddResource(resource); } @@ -259,6 +387,8 @@ void VideoStreamEncoderResourceManager::SetEncoderSettings( RTC_DCHECK_RUN_ON(encoder_queue_); encoder_settings_ = std::move(encoder_settings); bitrate_constraint_->OnEncoderSettingsUpdated(encoder_settings_); + initial_frame_dropper_->OnEncoderSettingsUpdated( + encoder_settings_->video_codec(), current_adaptation_counters_); MaybeUpdateTargetFrameRate(); } @@ -294,6 +424,8 @@ void VideoStreamEncoderResourceManager::SetEncoderRates( const VideoEncoder::RateControlParameters& encoder_rates) { RTC_DCHECK_RUN_ON(encoder_queue_); encoder_rates_ = encoder_rates; + initial_frame_dropper_->SetBandwidthAllocation( + encoder_rates.bandwidth_allocation); } void VideoStreamEncoderResourceManager::OnFrameDroppedDueToSize() { @@ -339,9 +471,21 @@ bool VideoStreamEncoderResourceManager::DropInitialFrames() const { return initial_frame_dropper_->DropInitialFrames(); } +absl::optional +VideoStreamEncoderResourceManager::SingleActiveStreamPixels() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return initial_frame_dropper_->single_active_stream_pixels(); +} + +absl::optional +VideoStreamEncoderResourceManager::UseBandwidthAllocationBps() const { + RTC_DCHECK_RUN_ON(encoder_queue_); + return initial_frame_dropper_->UseBandwidthAllocationBps(); +} + void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() { RTC_DCHECK_RUN_ON(encoder_queue_); - initial_frame_dropper_->OnMaybeEncodeFrame(); + initial_frame_dropper_->Disable(); if (quality_rampup_experiment_ && quality_scaler_resource_->is_started()) { DataRate bandwidth = encoder_rates_.has_value() ? encoder_rates_->bandwidth_allocation @@ -377,7 +521,9 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler( const auto scaling_settings = encoder_info.scaling_settings; const bool quality_scaling_allowed = IsResolutionScalingEnabled(degradation_preference_) && - scaling_settings.thresholds; + (scaling_settings.thresholds.has_value() || + (encoder_settings_.has_value() && + encoder_settings_->encoder_config().is_quality_scaling_allowed)); // TODO(https://crbug.com/webrtc/11222): Should this move to // QualityScalerResource? @@ -391,9 +537,9 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler( experimental_thresholds = QualityScalingExperiment::GetQpThresholds( GetVideoCodecTypeOrGeneric(encoder_settings_)); } - UpdateQualityScalerSettings(experimental_thresholds - ? *experimental_thresholds - : *(scaling_settings.thresholds)); + UpdateQualityScalerSettings(experimental_thresholds.has_value() + ? experimental_thresholds + : scaling_settings.thresholds); } } else { UpdateQualityScalerSettings(absl::nullopt); @@ -457,6 +603,8 @@ void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated( rtc::scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { RTC_DCHECK_RUN_ON(encoder_queue_); + current_adaptation_counters_ = adaptation_counters; + // TODO(bugs.webrtc.org/11553) Remove reason parameter and add reset callback. if (!reason && adaptation_counters.Total() == 0) { // Adaptation was manually reset - clear the per-reason counters too. @@ -567,4 +715,25 @@ void VideoStreamEncoderResourceManager::OnQualityRampUp() { stream_adapter_->ClearRestrictions(); quality_rampup_experiment_.reset(); } + +bool VideoStreamEncoderResourceManager::IsSimulcast( + const VideoEncoderConfig& encoder_config) { + const std::vector& simulcast_layers = + encoder_config.simulcast_layers; + if (simulcast_layers.size() <= 1) { + return false; + } + + if (simulcast_layers[0].active) { + // We can't distinguish between simulcast and singlecast when only the + // lowest spatial layer is active. Treat this case as simulcast. + return true; + } + + int num_active_layers = + std::count_if(simulcast_layers.begin(), simulcast_layers.end(), + [](const VideoStream& layer) { return layer.active; }); + return num_active_layers > 1; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h index 932d90c20..2e7060c60 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h @@ -46,6 +46,7 @@ #include "video/adaptation/bitrate_constraint.h" #include "video/adaptation/encode_usage_resource.h" #include "video/adaptation/overuse_frame_detector.h" +#include "video/adaptation/pixel_limit_resource.h" #include "video/adaptation/quality_rampup_experiment_helper.h" #include "video/adaptation/quality_scaler_resource.h" #include "video/adaptation/video_stream_encoder_resource.h" @@ -91,8 +92,12 @@ class VideoStreamEncoderResourceManager void SetDegradationPreferences(DegradationPreference degradation_preference); DegradationPreference degradation_preference() const; - void EnsureEncodeUsageResourceStarted(); + void ConfigureEncodeUsageResource(); + // Initializes the pixel limit resource if the "WebRTC-PixelLimitResource" + // field trial is enabled. This can be used for testing. + void MaybeInitializePixelLimitResource(); // Stops the encode usage and quality scaler resources if not already stopped. + // If the pixel limit resource was created it is also stopped and nulled. void StopManagedResources(); // Settings that affect the VideoStreamEncoder-specific resources. @@ -121,9 +126,11 @@ class VideoStreamEncoderResourceManager VideoAdaptationReason reason); void RemoveResource(rtc::scoped_refptr resource); std::vector AdaptationConstraints() const; - // If true, the VideoStreamEncoder should eexecute its logic to maybe drop - // frames baseed on size and bitrate. + // If true, the VideoStreamEncoder should execute its logic to maybe drop + // frames based on size and bitrate. bool DropInitialFrames() const; + absl::optional SingleActiveStreamPixels() const; + absl::optional UseBandwidthAllocationBps() const; // VideoSourceRestrictionsListener implementation. // Updates |video_source_restrictions_|. @@ -140,6 +147,8 @@ class VideoStreamEncoderResourceManager // QualityRampUpExperimentListener implementation. void OnQualityRampUp() override; + static bool IsSimulcast(const VideoEncoderConfig& encoder_config); + private: class InitialFrameDropper; @@ -170,6 +179,7 @@ class VideoStreamEncoderResourceManager RTC_GUARDED_BY(encoder_queue_); const rtc::scoped_refptr encode_usage_resource_; const rtc::scoped_refptr quality_scaler_resource_; + rtc::scoped_refptr pixel_limit_resource_; rtc::TaskQueue* encoder_queue_; VideoStreamInputStateProvider* const input_state_provider_ @@ -183,6 +193,9 @@ class VideoStreamEncoderResourceManager VideoSourceRestrictions video_source_restrictions_ RTC_GUARDED_BY(encoder_queue_); + VideoAdaptationCounters current_adaptation_counters_ + RTC_GUARDED_BY(encoder_queue_); + const BalancedDegradationSettings balanced_settings_; Clock* clock_ RTC_GUARDED_BY(encoder_queue_); const bool experiment_cpu_load_estimator_ RTC_GUARDED_BY(encoder_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc index b08f2f184..6b1db9238 100644 --- a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc +++ b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.cc @@ -66,7 +66,8 @@ double RoundToMultiple(int alignment, int AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( const VideoEncoder::EncoderInfo& encoder_info, - VideoEncoderConfig* config) { + VideoEncoderConfig* config, + absl::optional max_layers) { const int requested_alignment = encoder_info.requested_resolution_alignment; if (!encoder_info.apply_alignment_to_all_simulcast_layers) { return requested_alignment; @@ -85,7 +86,11 @@ int AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( if (!has_scale_resolution_down_by) { // Default resolution downscaling used (scale factors: 1, 2, 4, ...). - return requested_alignment * (1 << (config->simulcast_layers.size() - 1)); + size_t size = config->simulcast_layers.size(); + if (max_layers && *max_layers > 0 && *max_layers < size) { + size = *max_layers; + } + return requested_alignment * (1 << (size - 1)); } // Get alignment for downscaled layers. diff --git a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h index 53d792788..4b72623a1 100644 --- a/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h +++ b/TMessagesProj/jni/voip/webrtc/video/alignment_adjuster.h @@ -28,9 +28,13 @@ class AlignmentAdjuster { // |scale_resolution_down_by| may be adjusted to a common multiple to limit // the alignment value to avoid largely cropped frames and possibly with an // aspect ratio far from the original. + + // Note: |max_layers| currently only taken into account when using default + // scale factors. static int GetAlignmentAndMaybeAdjustScaleFactors( const VideoEncoder::EncoderInfo& info, - VideoEncoderConfig* config); + VideoEncoderConfig* config, + absl::optional max_layers); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc index 187bac6ee..436fff83f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc +++ b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.cc @@ -36,7 +36,7 @@ void BufferedFrameDecryptor::SetFrameDecryptor( } void BufferedFrameDecryptor::ManageEncryptedFrame( - std::unique_ptr encrypted_frame) { + std::unique_ptr encrypted_frame) { switch (DecryptFrame(encrypted_frame.get())) { case FrameDecision::kStash: if (stashed_frames_.size() >= kMaxStashedFrames) { @@ -55,7 +55,7 @@ void BufferedFrameDecryptor::ManageEncryptedFrame( } BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame( - video_coding::RtpFrameObject* frame) { + RtpFrameObject* frame) { // Optionally attempt to decrypt the raw video frame if it was provided. if (frame_decryptor_ == nullptr) { RTC_LOG(LS_INFO) << "Frame decryption required but not attached to this " diff --git a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h index ff04837bc..f6dd8d8c2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h +++ b/TMessagesProj/jni/voip/webrtc/video/buffered_frame_decryptor.h @@ -27,8 +27,7 @@ class OnDecryptedFrameCallback { public: virtual ~OnDecryptedFrameCallback() = default; // Called each time a decrypted frame is returned. - virtual void OnDecryptedFrame( - std::unique_ptr frame) = 0; + virtual void OnDecryptedFrame(std::unique_ptr frame) = 0; }; // This callback is called each time there is a status change in the decryption @@ -72,8 +71,7 @@ class BufferedFrameDecryptor final { // Determines whether the frame should be stashed, dropped or handed off to // the OnDecryptedFrameCallback. - void ManageEncryptedFrame( - std::unique_ptr encrypted_frame); + void ManageEncryptedFrame(std::unique_ptr encrypted_frame); private: // Represents what should be done with a given frame. @@ -82,7 +80,7 @@ class BufferedFrameDecryptor final { // Attempts to decrypt the frame, if it fails and no prior frames have been // decrypted it will return kStash. Otherwise fail to decrypts will return // kDrop. Successful decryptions will always return kDecrypted. - FrameDecision DecryptFrame(video_coding::RtpFrameObject* frame); + FrameDecision DecryptFrame(RtpFrameObject* frame); // Retries all the stashed frames this is triggered each time a kDecrypted // event occurs. void RetryStashedFrames(); @@ -96,7 +94,7 @@ class BufferedFrameDecryptor final { rtc::scoped_refptr frame_decryptor_; OnDecryptedFrameCallback* const decrypted_frame_callback_; OnDecryptionStatusChangeCallback* const decryption_status_change_callback_; - std::deque> stashed_frames_; + std::deque> stashed_frames_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats.h b/TMessagesProj/jni/voip/webrtc/video/call_stats.h index 3bfb63244..5dc8fa0cb 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats.h +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats.h @@ -14,12 +14,12 @@ #include #include +#include "api/sequence_checker.h" #include "modules/include/module.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -110,8 +110,8 @@ class CallStats : public Module, public RtcpRttStats { // for the observers_ list, which makes the most common case lock free. std::list observers_; - rtc::ThreadChecker construction_thread_checker_; - rtc::ThreadChecker process_thread_checker_; + SequenceChecker construction_thread_checker_; + SequenceChecker process_thread_checker_; ProcessThread* const process_thread_; bool process_thread_running_ RTC_GUARDED_BY(construction_thread_checker_); diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc b/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc index faf08d69b..2b7c61e0f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc @@ -76,16 +76,11 @@ CallStats::CallStats(Clock* clock, TaskQueueBase* task_queue) time_of_first_rtt_ms_(-1), task_queue_(task_queue) { RTC_DCHECK(task_queue_); - process_thread_checker_.Detach(); - repeating_task_ = - RepeatingTaskHandle::DelayedStart(task_queue_, kUpdateInterval, [this]() { - UpdateAndReport(); - return kUpdateInterval; - }); + RTC_DCHECK_RUN_ON(task_queue_); } CallStats::~CallStats() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); RTC_DCHECK(observers_.empty()); repeating_task_.Stop(); @@ -93,8 +88,17 @@ CallStats::~CallStats() { UpdateHistograms(); } +void CallStats::EnsureStarted() { + RTC_DCHECK_RUN_ON(task_queue_); + repeating_task_ = + RepeatingTaskHandle::DelayedStart(task_queue_, kUpdateInterval, [this]() { + UpdateAndReport(); + return kUpdateInterval; + }); +} + void CallStats::UpdateAndReport() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); RemoveOldReports(clock_->CurrentTime().ms(), &reports_); max_rtt_ms_ = GetMaxRttMs(reports_); @@ -112,18 +116,18 @@ void CallStats::UpdateAndReport() { } void CallStats::RegisterStatsObserver(CallStatsObserver* observer) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); if (!absl::c_linear_search(observers_, observer)) observers_.push_back(observer); } void CallStats::DeregisterStatsObserver(CallStatsObserver* observer) { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); observers_.remove(observer); } int64_t CallStats::LastProcessedRtt() const { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); // No need for locking since we're on the construction thread. return avg_rtt_ms_; } @@ -134,7 +138,7 @@ void CallStats::OnRttUpdate(int64_t rtt) { // on the correct TQ. int64_t now_ms = clock_->TimeInMilliseconds(); auto update = [this, rtt, now_ms]() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); reports_.push_back(RttTime(rtt, now_ms)); if (time_of_first_rtt_ms_ == -1) time_of_first_rtt_ms_ = now_ms; @@ -149,7 +153,7 @@ void CallStats::OnRttUpdate(int64_t rtt) { } void CallStats::UpdateHistograms() { - RTC_DCHECK_RUN_ON(&construction_thread_checker_); + RTC_DCHECK_RUN_ON(task_queue_); if (time_of_first_rtt_ms_ == -1 || num_avg_rtt_ < 1) return; diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats2.h b/TMessagesProj/jni/voip/webrtc/video/call_stats2.h index 822685320..35a793558 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats2.h +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats2.h @@ -18,7 +18,6 @@ #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" @@ -32,9 +31,13 @@ class CallStats { // Time interval for updating the observers. static constexpr TimeDelta kUpdateInterval = TimeDelta::Millis(1000); + // Must be created and destroyed on the same task_queue. CallStats(Clock* clock, TaskQueueBase* task_queue); ~CallStats(); + // Ensure that necessary repeating tasks are started. + void EnsureStarted(); + // Expose an RtcpRttStats implementation without inheriting from RtcpRttStats. // That allows us to separate the threading model of how RtcpRttStats is // used (mostly on a process thread) and how CallStats is used (mostly on @@ -49,11 +52,6 @@ class CallStats { // Expose |LastProcessedRtt()| from RtcpRttStats to the public interface, as // it is the part of the API that is needed by direct users of CallStats. - // TODO(tommi): Threading or lifetime guarantees are not explicit in how - // CallStats is used as RtcpRttStats or how pointers are cached in a - // few different places (distributed via Call). It would be good to clarify - // from what thread/TQ calls to OnRttUpdate and LastProcessedRtt need to be - // allowed. int64_t LastProcessedRtt() const; // Exposed for tests to test histogram support. @@ -105,35 +103,24 @@ class CallStats { Clock* const clock_; // Used to regularly call UpdateAndReport(). - RepeatingTaskHandle repeating_task_ - RTC_GUARDED_BY(construction_thread_checker_); + RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(task_queue_); // The last RTT in the statistics update (zero if there is no valid estimate). - int64_t max_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + int64_t max_rtt_ms_ RTC_GUARDED_BY(task_queue_); // Last reported average RTT value. - int64_t avg_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + int64_t avg_rtt_ms_ RTC_GUARDED_BY(task_queue_); - // |sum_avg_rtt_ms_|, |num_avg_rtt_| and |time_of_first_rtt_ms_| are only used - // on the ProcessThread when running. When the Process Thread is not running, - // (and only then) they can be used in UpdateHistograms(), usually called from - // the dtor. - int64_t sum_avg_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); - int64_t num_avg_rtt_ RTC_GUARDED_BY(construction_thread_checker_); - int64_t time_of_first_rtt_ms_ RTC_GUARDED_BY(construction_thread_checker_); + int64_t sum_avg_rtt_ms_ RTC_GUARDED_BY(task_queue_); + int64_t num_avg_rtt_ RTC_GUARDED_BY(task_queue_); + int64_t time_of_first_rtt_ms_ RTC_GUARDED_BY(task_queue_); // All Rtt reports within valid time interval, oldest first. - std::list reports_ RTC_GUARDED_BY(construction_thread_checker_); + std::list reports_ RTC_GUARDED_BY(task_queue_); // Observers getting stats reports. - // When attached to ProcessThread, this is read-only. In order to allow - // modification, we detach from the process thread while the observer - // list is updated, to avoid races. This allows us to not require a lock - // for the observers_ list, which makes the most common case lock free. - std::list observers_; + std::list observers_ RTC_GUARDED_BY(task_queue_); - SequenceChecker construction_thread_checker_; - SequenceChecker process_thread_checker_; TaskQueueBase* const task_queue_; // Used to signal destruction to potentially pending tasks. diff --git a/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc b/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc index 45d88875e..6a2c99ffe 100644 --- a/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc +++ b/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.cc @@ -314,15 +314,14 @@ void EncoderBitrateAdjuster::OnEncoderInfo( AdjustRateAllocation(current_rate_control_parameters_); } -void EncoderBitrateAdjuster::OnEncodedFrame(const EncodedImage& encoded_image, +void EncoderBitrateAdjuster::OnEncodedFrame(DataSize size, + int spatial_index, int temporal_index) { ++frames_since_layout_change_; // Detectors may not exist, for instance if ScreenshareLayers is used. - auto& detector = - overshoot_detectors_[encoded_image.SpatialIndex().value_or(0)] - [temporal_index]; + auto& detector = overshoot_detectors_[spatial_index][temporal_index]; if (detector) { - detector->OnEncodedFrame(encoded_image.size(), rtc::TimeMillis()); + detector->OnEncodedFrame(size.bytes(), rtc::TimeMillis()); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.h b/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.h index b142519b4..74d0289ad 100644 --- a/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.h +++ b/TMessagesProj/jni/voip/webrtc/video/encoder_bitrate_adjuster.h @@ -47,7 +47,7 @@ class EncoderBitrateAdjuster { void OnEncoderInfo(const VideoEncoder::EncoderInfo& encoder_info); // Updates the overuse detectors according to the encoded image size. - void OnEncodedFrame(const EncodedImage& encoded_image, int temporal_index); + void OnEncodedFrame(DataSize size, int spatial_index, int temporal_index); void Reset(); diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_dumping_decoder.cc b/TMessagesProj/jni/voip/webrtc/video/frame_dumping_decoder.cc index 4ccb33308..59202dd03 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_dumping_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_dumping_decoder.cc @@ -32,7 +32,7 @@ class FrameDumpingDecoder : public VideoDecoder { int32_t RegisterDecodeCompleteCallback( DecodedImageCallback* callback) override; int32_t Release() override; - bool PrefersLateDecoding() const override; + DecoderInfo GetDecoderInfo() const override; const char* ImplementationName() const override; private: @@ -73,8 +73,8 @@ int32_t FrameDumpingDecoder::Release() { return decoder_->Release(); } -bool FrameDumpingDecoder::PrefersLateDecoding() const { - return decoder_->PrefersLateDecoding(); +VideoDecoder::DecoderInfo FrameDumpingDecoder::GetDecoderInfo() const { + return decoder_->GetDecoderInfo(); } const char* FrameDumpingDecoder::ImplementationName() const { diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc index 0e604cd76..8a0f3b386 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc @@ -217,7 +217,7 @@ void FrameEncodeMetadataWriter::UpdateBitstream( buffer, encoded_image->ColorSpace()); encoded_image->SetEncodedData( - new rtc::RefCountedObject( + rtc::make_ref_counted( std::move(modified_buffer))); } diff --git a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc index ece756b2d..3831fdfce 100644 --- a/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/full_stack_tests.cc @@ -21,7 +21,7 @@ #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder_config.h" -#include "media/base/vp9_profile.h" +#include "api/video_codecs/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" diff --git a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc index d515a5271..5cebf41e9 100644 --- a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc @@ -21,8 +21,8 @@ #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" +#include "api/video_codecs/vp9_profile.h" #include "call/simulated_network.h" -#include "media/base/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" @@ -1738,9 +1738,9 @@ TEST(PCFullStackTest, MAYBE_LargeRoomVP8_50thumb) { } */ +/* class PCDualStreamsTest : public ::testing::TestWithParam {}; -/* // Disable dual video test on mobile device becuase it's too heavy. // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on MAC. #if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS) && !defined(WEBRTC_MAC) @@ -1842,10 +1842,10 @@ TEST_P(PCDualStreamsTest, Conference_Restricted) { auto fixture = CreateVideoQualityTestFixture(); fixture->RunWithAnalyzer(dual_streams); } -*/ INSTANTIATE_TEST_SUITE_P(PCFullStackTest, PCDualStreamsTest, ::testing::Values(0, 1)); +*/ } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc b/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc index 65a23dbbc..833b7758b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/quality_scaling_tests.cc @@ -15,252 +15,462 @@ #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "rtc_base/experiments/encoder_info_settings.h" #include "test/call_test.h" #include "test/field_trial.h" #include "test/frame_generator_capturer.h" namespace webrtc { namespace { -constexpr int kWidth = 1280; -constexpr int kHeight = 720; +constexpr int kInitialWidth = 1280; +constexpr int kInitialHeight = 720; constexpr int kLowStartBps = 100000; -constexpr int kHighStartBps = 600000; -constexpr size_t kTimeoutMs = 10000; // Some tests are expected to time out. +constexpr int kHighStartBps = 1000000; +constexpr int kDefaultVgaMinStartBps = 500000; // From video_stream_encoder.cc +constexpr int kTimeoutMs = 10000; // Some tests are expected to time out. void SetEncoderSpecific(VideoEncoderConfig* encoder_config, VideoCodecType type, bool automatic_resize, - bool frame_dropping) { + size_t num_spatial_layers) { if (type == kVideoCodecVP8) { VideoCodecVP8 vp8 = VideoEncoder::GetDefaultVp8Settings(); vp8.automaticResizeOn = automatic_resize; - vp8.frameDroppingOn = frame_dropping; - encoder_config->encoder_specific_settings = new rtc::RefCountedObject< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8); + encoder_config->encoder_specific_settings = + rtc::make_ref_counted( + vp8); } else if (type == kVideoCodecVP9) { VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings(); vp9.automaticResizeOn = automatic_resize; - vp9.frameDroppingOn = frame_dropping; - encoder_config->encoder_specific_settings = new rtc::RefCountedObject< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9); - } else if (type == kVideoCodecH264) { - VideoCodecH264 h264 = VideoEncoder::GetDefaultH264Settings(); - h264.frameDroppingOn = frame_dropping; - encoder_config->encoder_specific_settings = new rtc::RefCountedObject< - VideoEncoderConfig::H264EncoderSpecificSettings>(h264); + vp9.numberOfSpatialLayers = num_spatial_layers; + encoder_config->encoder_specific_settings = + rtc::make_ref_counted( + vp9); } } } // namespace class QualityScalingTest : public test::CallTest { protected: - void RunTest(VideoEncoderFactory* encoder_factory, - const std::string& payload_name, - int start_bps, - bool automatic_resize, - bool frame_dropping, - bool expect_adaptation); - const std::string kPrefix = "WebRTC-Video-QualityScaling/Enabled-"; const std::string kEnd = ",0,0,0.9995,0.9999,1/"; + const absl::optional + kSinglecastLimits720pVp8 = + EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( + kVideoCodecVP8, + 1280 * 720); + const absl::optional + kSinglecastLimits360pVp9 = + EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( + kVideoCodecVP9, + 640 * 360); }; -void QualityScalingTest::RunTest(VideoEncoderFactory* encoder_factory, - const std::string& payload_name, - int start_bps, - bool automatic_resize, - bool frame_dropping, - bool expect_adaptation) { - class ScalingObserver - : public test::SendTest, - public test::FrameGeneratorCapturer::SinkWantsObserver { - public: - ScalingObserver(VideoEncoderFactory* encoder_factory, - const std::string& payload_name, +class ScalingObserver : public test::SendTest { + protected: + ScalingObserver(const std::string& payload_name, + const std::vector& streams_active, + int start_bps, + bool automatic_resize, + bool expect_scaling) + : SendTest(expect_scaling ? kTimeoutMs * 4 : kTimeoutMs), + encoder_factory_( + [](const SdpVideoFormat& format) -> std::unique_ptr { + if (format.name == "VP8") + return VP8Encoder::Create(); + if (format.name == "VP9") + return VP9Encoder::Create(); + if (format.name == "H264") + return H264Encoder::Create(cricket::VideoCodec("H264")); + RTC_NOTREACHED() << format.name; + return nullptr; + }), + payload_name_(payload_name), + streams_active_(streams_active), + start_bps_(start_bps), + automatic_resize_(automatic_resize), + expect_scaling_(expect_scaling) {} + + private: + void ModifySenderBitrateConfig(BitrateConstraints* bitrate_config) override { + bitrate_config->start_bitrate_bps = start_bps_; + } + + size_t GetNumVideoStreams() const override { + return (payload_name_ == "VP9") ? 1 : streams_active_.size(); + } + + void ModifyVideoConfigs( + VideoSendStream::Config* send_config, + std::vector* receive_configs, + VideoEncoderConfig* encoder_config) override { + send_config->encoder_settings.encoder_factory = &encoder_factory_; + send_config->rtp.payload_name = payload_name_; + send_config->rtp.payload_type = test::CallTest::kVideoSendPayloadType; + encoder_config->video_format.name = payload_name_; + const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_); + encoder_config->codec_type = codec_type; + encoder_config->max_bitrate_bps = + std::max(start_bps_, encoder_config->max_bitrate_bps); + if (payload_name_ == "VP9") { + // Simulcast layers indicates which spatial layers are active. + encoder_config->simulcast_layers.resize(streams_active_.size()); + encoder_config->simulcast_layers[0].max_bitrate_bps = + encoder_config->max_bitrate_bps; + } + double scale_factor = 1.0; + for (int i = streams_active_.size() - 1; i >= 0; --i) { + VideoStream& stream = encoder_config->simulcast_layers[i]; + stream.active = streams_active_[i]; + stream.scale_resolution_down_by = scale_factor; + scale_factor *= (payload_name_ == "VP9") ? 1.0 : 2.0; + } + SetEncoderSpecific(encoder_config, codec_type, automatic_resize_, + streams_active_.size()); + } + + void PerformTest() override { EXPECT_EQ(expect_scaling_, Wait()); } + + test::FunctionVideoEncoderFactory encoder_factory_; + const std::string payload_name_; + const std::vector streams_active_; + const int start_bps_; + const bool automatic_resize_; + const bool expect_scaling_; +}; + +class DownscalingObserver + : public ScalingObserver, + public test::FrameGeneratorCapturer::SinkWantsObserver { + public: + DownscalingObserver(const std::string& payload_name, + const std::vector& streams_active, + int start_bps, + bool automatic_resize, + bool expect_downscale) + : ScalingObserver(payload_name, + streams_active, + start_bps, + automatic_resize, + expect_downscale) {} + + private: + void OnFrameGeneratorCapturerCreated( + test::FrameGeneratorCapturer* frame_generator_capturer) override { + frame_generator_capturer->SetSinkWantsObserver(this); + frame_generator_capturer->ChangeResolution(kInitialWidth, kInitialHeight); + } + + void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, + const rtc::VideoSinkWants& wants) override { + if (wants.max_pixel_count < kInitialWidth * kInitialHeight) + observation_complete_.Set(); + } +}; + +class UpscalingObserver + : public ScalingObserver, + public test::FrameGeneratorCapturer::SinkWantsObserver { + public: + UpscalingObserver(const std::string& payload_name, + const std::vector& streams_active, int start_bps, bool automatic_resize, - bool frame_dropping, - bool expect_adaptation) - : SendTest(expect_adaptation ? kDefaultTimeoutMs : kTimeoutMs), - encoder_factory_(encoder_factory), - payload_name_(payload_name), - start_bps_(start_bps), - automatic_resize_(automatic_resize), - frame_dropping_(frame_dropping), - expect_adaptation_(expect_adaptation) {} + bool expect_upscale) + : ScalingObserver(payload_name, + streams_active, + start_bps, + automatic_resize, + expect_upscale) {} - private: - void OnFrameGeneratorCapturerCreated( - test::FrameGeneratorCapturer* frame_generator_capturer) override { - frame_generator_capturer->SetSinkWantsObserver(this); - // Set initial resolution. - frame_generator_capturer->ChangeResolution(kWidth, kHeight); - } + private: + void OnFrameGeneratorCapturerCreated( + test::FrameGeneratorCapturer* frame_generator_capturer) override { + frame_generator_capturer->SetSinkWantsObserver(this); + frame_generator_capturer->ChangeResolution(kInitialWidth, kInitialHeight); + } - // Called when FrameGeneratorCapturer::AddOrUpdateSink is called. - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { - if (wants.max_pixel_count < kWidth * kHeight) + void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, + const rtc::VideoSinkWants& wants) override { + if (wants.max_pixel_count > last_wants_.max_pixel_count) { + if (wants.max_pixel_count == std::numeric_limits::max()) observation_complete_.Set(); } - void ModifySenderBitrateConfig( - BitrateConstraints* bitrate_config) override { - bitrate_config->start_bitrate_bps = start_bps_; - } + last_wants_ = wants; + } - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - send_config->encoder_settings.encoder_factory = encoder_factory_; - send_config->rtp.payload_name = payload_name_; - send_config->rtp.payload_type = kVideoSendPayloadType; - const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_); - encoder_config->codec_type = codec_type; - encoder_config->max_bitrate_bps = start_bps_; - SetEncoderSpecific(encoder_config, codec_type, automatic_resize_, - frame_dropping_); - } + rtc::VideoSinkWants last_wants_; +}; - void PerformTest() override { - EXPECT_EQ(expect_adaptation_, Wait()) - << "Timed out while waiting for a scale down."; - } - - VideoEncoderFactory* const encoder_factory_; - const std::string payload_name_; - const int start_bps_; - const bool automatic_resize_; - const bool frame_dropping_; - const bool expect_adaptation_; - } test(encoder_factory, payload_name, start_bps, automatic_resize, - frame_dropping, expect_adaptation); +TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) { + // qp_low:1, qp_high:1 -> kHighQp + test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); + DownscalingObserver test("VP8", {true}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); RunBaseTest(&test); } -TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) { - // VP8 QP thresholds, low:1, high:1 -> high QP. +TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp8) { + // qp_low:1, qp_high:1 -> kHighQp test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); - // QualityScaler enabled. - const bool kAutomaticResize = true; - const bool kFrameDropping = true; - const bool kExpectAdapt = true; - - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); -} - -TEST_F(QualityScalingTest, NoAdaptDownForHighQpWithResizeOff_Vp8) { - // VP8 QP thresholds, low:1, high:1 -> high QP. - test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); - - // QualityScaler disabled. - const bool kAutomaticResize = false; - const bool kFrameDropping = true; - const bool kExpectAdapt = false; - - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); -} - -// TODO(bugs.webrtc.org/10388): Fix and re-enable. -TEST_F(QualityScalingTest, - DISABLED_NoAdaptDownForHighQpWithFrameDroppingOff_Vp8) { - // VP8 QP thresholds, low:1, high:1 -> high QP. - test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); - - // QualityScaler disabled. - const bool kAutomaticResize = true; - const bool kFrameDropping = false; - const bool kExpectAdapt = false; - - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); + DownscalingObserver test("VP8", {true}, kHighStartBps, + /*automatic_resize=*/false, + /*expect_downscale=*/false); + RunBaseTest(&test); } TEST_F(QualityScalingTest, NoAdaptDownForNormalQp_Vp8) { - // VP8 QP thresholds, low:1, high:127 -> normal QP. + // qp_low:1, qp_high:127 -> kNormalQp test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); - // QualityScaler enabled. - const bool kAutomaticResize = true; - const bool kFrameDropping = true; - const bool kExpectAdapt = false; - - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kHighStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); + DownscalingObserver test("VP8", {true}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); } -TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate) { - // VP8 QP thresholds, low:1, high:127 -> normal QP. +TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); - // QualityScaler enabled. - const bool kAutomaticResize = true; - const bool kFrameDropping = true; - const bool kExpectAdapt = true; - - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kLowStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); + DownscalingObserver test("VP8", {true}, kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); } -TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateWithScalingOff) { - // VP8 QP thresholds, low:1, high:127 -> normal QP. +TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrateAndThenUp) { + // qp_low:127, qp_high:127 -> kLowQp + test::ScopedFieldTrials field_trials(kPrefix + "127,127,0,0,0,0" + kEnd); + + UpscalingObserver test("VP8", {true}, kDefaultVgaMinStartBps - 1, + /*automatic_resize=*/true, /*expect_upscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_Simulcast) { + // qp_low:1, qp_high:127 -> kNormalQp test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); - // QualityScaler disabled. - const bool kAutomaticResize = false; - const bool kFrameDropping = true; - const bool kExpectAdapt = false; - - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - RunTest(&encoder_factory, "VP8", kLowStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); + DownscalingObserver test("VP8", {true, true}, kLowStartBps, + /*automatic_resize=*/false, + /*expect_downscale=*/false); + RunBaseTest(&test); } -TEST_F(QualityScalingTest, NoAdaptDownForHighQp_Vp9) { - // VP9 QP thresholds, low:1, high:1 -> high QP. +TEST_F(QualityScalingTest, AdaptsDownForHighQp_HighestStreamActive_Vp8) { + // qp_low:1, qp_high:1 -> kHighQp + test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); + + DownscalingObserver test("VP8", {false, false, true}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + AdaptsDownForLowStartBitrate_HighestStreamActive_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test("VP8", {false, false, true}, + kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, AdaptsDownButNotUpWithMinStartBitrateLimit) { + // qp_low:127, qp_high:127 -> kLowQp + test::ScopedFieldTrials field_trials(kPrefix + "127,127,0,0,0,0" + kEnd); + + UpscalingObserver test("VP8", {false, true}, + kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, /*expect_upscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test("VP8", {false, false, true}, + kSinglecastLimits720pVp8->min_start_bitrate_bps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + NoAdaptDownForLowStartBitrateIfDefaultLimitsDisabled_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp + test::ScopedFieldTrials field_trials( + kPrefix + "1,127,0,0,0,0" + kEnd + + "WebRTC-DefaultBitrateLimitsKillSwitch/Enabled/"); + + DownscalingObserver test("VP8", {false, false, true}, + kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + NoAdaptDownForLowStartBitrate_OneStreamSinglecastLimitsNotUsed_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test( + "VP8", {true}, kSinglecastLimits720pVp8->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp8) { + // qp_low:1, qp_high:1 -> kHighQp + test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd); + + DownscalingObserver test("VP8", {true, false, false}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test("VP8", {true, false, false}, kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfScalingOff_Vp8) { + // qp_low:1, qp_high:127 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "1,127,0,0,0,0" + kEnd); + + DownscalingObserver test("VP8", {true}, kLowStartBps, + /*automatic_resize=*/false, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp9) { + // qp_low:1, qp_high:1 -> kHighQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {true}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForHighQpIfScalingOff_Vp9) { + // qp_low:1, qp_high:1 -> kHighQp test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + "WebRTC-VP9QualityScaler/Disabled/"); - // QualityScaler always disabled. - const bool kAutomaticResize = true; - const bool kFrameDropping = true; - const bool kExpectAdapt = false; + DownscalingObserver test("VP9", {true}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} - test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); - RunTest(&encoder_factory, "VP9", kHighStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); +TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp9) { + // qp_low:1, qp_high:255 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {true}, kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp9) { + // qp_low:1, qp_high:1 -> kHighQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {true, false, false}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp9) { + // qp_low:1, qp_high:255 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {true, false, false}, kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, AdaptsDownForHighQp_MiddleStreamActive_Vp9) { + // qp_low:1, qp_high:1 -> kHighQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {false, true, false}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, + AdaptsDownForLowStartBitrate_MiddleStreamActive_Vp9) { + // qp_low:1, qp_high:255 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {false, true, false}, + kSinglecastLimits360pVp9->min_start_bitrate_bps - 1, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} + +TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrateIfBitrateEnough_Vp9) { + // qp_low:1, qp_high:255 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,255,0,0" + kEnd + + "WebRTC-VP9QualityScaler/Enabled/"); + + DownscalingObserver test("VP9", {false, true, false}, + kSinglecastLimits360pVp9->min_start_bitrate_bps, + /*automatic_resize=*/true, + /*expect_downscale=*/false); + RunBaseTest(&test); } #if defined(WEBRTC_USE_H264) TEST_F(QualityScalingTest, AdaptsDownForHighQp_H264) { - // H264 QP thresholds, low:1, high:1 -> high QP. + // qp_low:1, qp_high:1 -> kHighQp test::ScopedFieldTrials field_trials(kPrefix + "0,0,0,0,1,1" + kEnd); - // QualityScaler always enabled. - const bool kAutomaticResize = false; - const bool kFrameDropping = false; - const bool kExpectAdapt = true; + DownscalingObserver test("H264", {true}, kHighStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); +} - test::FunctionVideoEncoderFactory encoder_factory( - []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - RunTest(&encoder_factory, "H264", kHighStartBps, kAutomaticResize, - kFrameDropping, kExpectAdapt); +TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_H264) { + // qp_low:1, qp_high:51 -> kNormalQp + test::ScopedFieldTrials field_trials(kPrefix + "0,0,0,0,1,51" + kEnd); + + DownscalingObserver test("H264", {true}, kLowStartBps, + /*automatic_resize=*/true, + /*expect_downscale=*/true); + RunBaseTest(&test); } #endif // defined(WEBRTC_USE_H264) diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h index 8b94c32b6..57738f29c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy.h @@ -17,6 +17,7 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "call/video_receive_stream.h" #include "modules/include/module_common_types.h" #include "modules/video_coding/include/video_coding_defines.h" @@ -27,7 +28,6 @@ #include "rtc_base/rate_tracker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "video/quality_threshold.h" #include "video/stats_counter.h" #include "video/video_quality_observer.h" @@ -196,9 +196,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, RTC_GUARDED_BY(&mutex_); absl::optional last_estimated_playout_time_ms_ RTC_GUARDED_BY(&mutex_); - rtc::ThreadChecker decode_thread_; - rtc::ThreadChecker network_thread_; - rtc::ThreadChecker main_thread_; + SequenceChecker decode_thread_; + SequenceChecker network_thread_; + SequenceChecker main_thread_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h index 1357c407a..7797d9321 100644 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h +++ b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.h @@ -17,6 +17,7 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/units/timestamp.h" #include "call/video_receive_stream.h" @@ -27,10 +28,9 @@ #include "rtc_base/numerics/sample_counter.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/rate_tracker.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "video/quality_threshold.h" #include "video/stats_counter.h" #include "video/video_quality_observer2.h" @@ -213,9 +213,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback, ScopedTaskSafety task_safety_; - SequenceChecker decode_queue_; - rtc::ThreadChecker main_thread_; - SequenceChecker incoming_render_queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker decode_queue_; + SequenceChecker main_thread_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker incoming_render_queue_; }; } // namespace internal diff --git a/TMessagesProj/jni/voip/webrtc/video/report_block_stats.cc b/TMessagesProj/jni/voip/webrtc/video/report_block_stats.cc index e3e95f9ae..bf6036468 100644 --- a/TMessagesProj/jni/voip/webrtc/video/report_block_stats.cc +++ b/TMessagesProj/jni/voip/webrtc/video/report_block_stats.cc @@ -31,16 +31,13 @@ ReportBlockStats::ReportBlockStats() ReportBlockStats::~ReportBlockStats() {} -void ReportBlockStats::Store(uint32_t ssrc, const RtcpStatistics& rtcp_stats) { +void ReportBlockStats::Store(uint32_t ssrc, + int packets_lost, + uint32_t extended_highest_sequence_number) { Report report; - report.packets_lost = rtcp_stats.packets_lost; - report.extended_highest_sequence_number = - rtcp_stats.extended_highest_sequence_number; - StoreAndAddPacketIncrement(ssrc, report); -} + report.packets_lost = packets_lost; + report.extended_highest_sequence_number = extended_highest_sequence_number; -void ReportBlockStats::StoreAndAddPacketIncrement(uint32_t ssrc, - const Report& report) { // Get diff with previous report block. const auto prev_report = prev_reports_.find(ssrc); if (prev_report != prev_reports_.end()) { diff --git a/TMessagesProj/jni/voip/webrtc/video/report_block_stats.h b/TMessagesProj/jni/voip/webrtc/video/report_block_stats.h index de4a07903..1d1140295 100644 --- a/TMessagesProj/jni/voip/webrtc/video/report_block_stats.h +++ b/TMessagesProj/jni/voip/webrtc/video/report_block_stats.h @@ -15,8 +15,6 @@ #include -#include "modules/rtp_rtcp/include/rtcp_statistics.h" - namespace webrtc { // TODO(nisse): Usefulness of this class is somewhat unclear. The inputs are @@ -32,7 +30,9 @@ class ReportBlockStats { ~ReportBlockStats(); // Updates stats and stores report block. - void Store(uint32_t ssrc, const RtcpStatistics& rtcp_stats); + void Store(uint32_t ssrc, + int packets_lost, + uint32_t extended_highest_sequence_number); // Returns the total fraction of lost packets (or -1 if less than two report // blocks have been stored). @@ -45,10 +45,6 @@ class ReportBlockStats { int32_t packets_lost; }; - // Updates the total number of packets/lost packets. - // Stores the report. - void StoreAndAddPacketIncrement(uint32_t ssrc, const Report& report); - // The total number of packets/lost packets. uint32_t num_sequence_numbers_; uint32_t num_lost_sequence_numbers_; diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h index 732c9a7d7..574ccba70 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer.h @@ -16,9 +16,9 @@ #include +#include "api/sequence_checker.h" #include "modules/include/module.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_checker.h" #include "video/stream_synchronization.h" namespace webrtc { @@ -57,7 +57,7 @@ class RtpStreamsSynchronizer : public Module { StreamSynchronization::Measurements audio_measurement_ RTC_GUARDED_BY(mutex_); StreamSynchronization::Measurements video_measurement_ RTC_GUARDED_BY(mutex_); - rtc::ThreadChecker process_thread_checker_; + SequenceChecker process_thread_checker_; int64_t last_sync_time_ RTC_GUARDED_BY(&process_thread_checker_); int64_t last_stats_log_ms_ RTC_GUARDED_BY(&process_thread_checker_); }; diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h index 6a522e801..192378aba 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_streams_synchronizer2.h @@ -13,7 +13,8 @@ #include -#include "rtc_base/synchronization/sequence_checker.h" +#include "api/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "video/stream_synchronization.h" @@ -54,7 +55,7 @@ class RtpStreamsSynchronizer { // we might be running on an rtc::Thread implementation of TaskQueue, which // does not consistently set itself as the active TaskQueue. // Instead, we rely on a SequenceChecker for now. - SequenceChecker main_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker main_checker_; Syncable* const syncable_video_; diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc index 78dc62137..dab8fb783 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc @@ -84,6 +84,7 @@ std::unique_ptr CreateRtpRtcpModule( RtcpRttStats* rtt_stats, RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, RtcpCnameCallback* rtcp_cname_callback, + bool non_sender_rtt_measurement, uint32_t local_ssrc) { RtpRtcpInterface::Configuration configuration; configuration.clock = clock; @@ -96,6 +97,7 @@ std::unique_ptr CreateRtpRtcpModule( rtcp_packet_type_counter_observer; configuration.rtcp_cname_callback = rtcp_cname_callback; configuration.local_media_ssrc = local_ssrc; + configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; std::unique_ptr rtp_rtcp = RtpRtcp::DEPRECATED_Create(configuration); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); @@ -208,7 +210,7 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( ProcessThread* process_thread, NackSender* nack_sender, KeyFrameRequestSender* keyframe_request_sender, - video_coding::OnCompleteFrameCallback* complete_frame_callback, + OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer) : RtpVideoStreamReceiver(clock, @@ -238,7 +240,7 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( ProcessThread* process_thread, NackSender* nack_sender, KeyFrameRequestSender* keyframe_request_sender, - video_coding::OnCompleteFrameCallback* complete_frame_callback, + OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer) : clock_(clock), @@ -255,22 +257,24 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( config->rtp.extensions)), receiving_(false), last_packet_log_ms_(-1), - rtp_rtcp_(CreateRtpRtcpModule(clock, - rtp_receive_statistics_, - transport, - rtt_stats, - rtcp_packet_type_counter_observer, - rtcp_cname_callback, - config_.rtp.local_ssrc)), + rtp_rtcp_(CreateRtpRtcpModule( + clock, + rtp_receive_statistics_, + transport, + rtt_stats, + rtcp_packet_type_counter_observer, + rtcp_cname_callback, + config_.rtp.rtcp_xr.receiver_reference_time_report, + config_.rtp.local_ssrc)), complete_frame_callback_(complete_frame_callback), keyframe_request_sender_(keyframe_request_sender), // TODO(bugs.webrtc.org/10336): Let |rtcp_feedback_buffer_| communicate // directly with |rtp_rtcp_|. rtcp_feedback_buffer_(this, nack_sender, this), - packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()), + packet_buffer_(kPacketBufferStartSize, PacketBufferMaxSize()), has_received_frame_(false), frames_decryptable_(false), - absolute_capture_time_receiver_(clock) { + absolute_capture_time_interpolator_(clock) { constexpr bool remb_candidate = true; if (packet_router_) packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); @@ -299,9 +303,6 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( rtp_receive_statistics_->SetMaxReorderingThreshold( config_.rtp.rtx_ssrc, max_reordering_threshold); } - if (config_.rtp.rtcp_xr.receiver_reference_time_report) - rtp_rtcp_->SetRtcpXrRrtrStatus(true); - ParseFieldTrial( {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, field_trial::FindFullName("WebRTC-ForcePlayoutDelay")); @@ -320,8 +321,7 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( process_thread_->RegisterModule(nack_module_.get(), RTC_FROM_HERE); } - reference_finder_ = - std::make_unique(this); + reference_finder_ = std::make_unique(this); // Only construct the encrypted receiver if frame encryption is enabled. if (config_.crypto_options.sframe.require_frame_encryption) { @@ -333,10 +333,10 @@ RtpVideoStreamReceiver::RtpVideoStreamReceiver( } if (frame_transformer) { - frame_transformer_delegate_ = new rtc::RefCountedObject< - RtpVideoStreamReceiverFrameTransformerDelegate>( - this, std::move(frame_transformer), rtc::Thread::Current(), - config_.rtp.remote_ssrc); + frame_transformer_delegate_ = + rtc::make_ref_counted( + this, std::move(frame_transformer), rtc::Thread::Current(), + config_.rtp.remote_ssrc); frame_transformer_delegate_->Init(); } } @@ -364,6 +364,7 @@ void RtpVideoStreamReceiver::AddReceiveCodec( bool raw_payload) { if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) || field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + MutexLock lock(&packet_buffer_lock_); packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); } payload_type_map_.emplace( @@ -376,17 +377,19 @@ void RtpVideoStreamReceiver::AddReceiveCodec( absl::optional RtpVideoStreamReceiver::GetSyncInfo() const { Syncable::Info info; if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, - &info.capture_time_ntp_frac, nullptr, nullptr, + &info.capture_time_ntp_frac, + /*rtcp_arrival_time_secs=*/nullptr, + /*rtcp_arrival_time_frac=*/nullptr, &info.capture_time_source_clock) != 0) { return absl::nullopt; } { MutexLock lock(&sync_info_lock_); - if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { + if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_) { return absl::nullopt; } info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; - info.latest_receive_time_ms = *last_received_rtp_system_time_ms_; + info.latest_receive_time_ms = last_received_rtp_system_time_->ms(); } // Leaves info.current_delay_ms uninitialized. @@ -503,19 +506,9 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( const RtpPacketReceived& rtp_packet, const RTPVideoHeader& video) { RTC_DCHECK_RUN_ON(&worker_task_checker_); - auto packet = std::make_unique( - rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()), - clock_->TimeInMilliseconds()); - // Try to extrapolate absolute capture time if it is missing. - packet->packet_info.set_absolute_capture_time( - absolute_capture_time_receiver_.OnReceivePacket( - AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(), - packet->packet_info.csrcs()), - packet->packet_info.rtp_timestamp(), - // Assume frequency is the same one for all video frames. - kVideoPayloadTypeFrequency, - packet->packet_info.absolute_capture_time())); + auto packet = + std::make_unique(rtp_packet, video); RTPVideoHeader& video_header = packet->video_header; video_header.rotation = kVideoRotation_0; @@ -542,6 +535,12 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( ParseGenericDependenciesResult generic_descriptor_state = ParseGenericDependenciesExtension(rtp_packet, &video_header); + + if (!rtp_packet.recovered()) { + UpdatePacketReceiveTimestamps( + rtp_packet, video_header.frame_type == VideoFrameType::kVideoFrameKey); + } + if (generic_descriptor_state == kDropPacket) return; @@ -560,6 +559,8 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( video_header.color_space = last_color_space_; } } + video_header.video_frame_tracking_id = + rtp_packet.GetExtension(); if (loss_notification_controller_) { if (rtp_packet.recovered()) { @@ -663,7 +664,35 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); frame_counter_.Add(packet->timestamp); - OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet))); + video_coding::PacketBuffer::InsertResult insert_result; + { + MutexLock lock(&packet_buffer_lock_); + int64_t unwrapped_rtp_seq_num = + rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber()); + auto& packet_info = + packet_infos_ + .emplace( + unwrapped_rtp_seq_num, + RtpPacketInfo( + rtp_packet.Ssrc(), rtp_packet.Csrcs(), + rtp_packet.Timestamp(), + /*audio_level=*/absl::nullopt, + rtp_packet.GetExtension(), + /*receive_time_ms=*/clock_->TimeInMilliseconds())) + .first->second; + + // Try to extrapolate absolute capture time if it is missing. + packet_info.set_absolute_capture_time( + absolute_capture_time_interpolator_.OnReceivePacket( + AbsoluteCaptureTimeInterpolator::GetSource(packet_info.ssrc(), + packet_info.csrcs()), + packet_info.rtp_timestamp(), + // Assume frequency is the same one for all video frames. + kVideoPayloadTypeFrequency, packet_info.absolute_capture_time())); + + insert_result = packet_buffer_.InsertPacket(std::move(packet)); + } + OnInsertedPacket(std::move(insert_result)); } void RtpVideoStreamReceiver::OnRecoveredPacket(const uint8_t* rtp_packet, @@ -696,35 +725,6 @@ void RtpVideoStreamReceiver::OnRtpPacket(const RtpPacketReceived& packet) { return; } - if (!packet.recovered()) { - // TODO(nisse): Exclude out-of-order packets? - int64_t now_ms = clock_->TimeInMilliseconds(); - { - MutexLock lock(&sync_info_lock_); - last_received_rtp_timestamp_ = packet.Timestamp(); - last_received_rtp_system_time_ms_ = now_ms; - } - // Periodically log the RTP header of incoming packets. - if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) { - rtc::StringBuilder ss; - ss << "Packet received on SSRC: " << packet.Ssrc() - << " with payload type: " << static_cast(packet.PayloadType()) - << ", timestamp: " << packet.Timestamp() - << ", sequence number: " << packet.SequenceNumber() - << ", arrival time: " << packet.arrival_time_ms(); - int32_t time_offset; - if (packet.GetExtension(&time_offset)) { - ss << ", toffset: " << time_offset; - } - uint32_t send_time; - if (packet.GetExtension(&send_time)) { - ss << ", abs send time: " << send_time; - } - RTC_LOG(LS_INFO) << ss.str(); - last_packet_log_ms_ = now_ms; - } - } - ReceivePacket(packet); // Update receive statistics after ReceivePacket. @@ -779,76 +779,100 @@ bool RtpVideoStreamReceiver::IsDecryptable() const { void RtpVideoStreamReceiver::OnInsertedPacket( video_coding::PacketBuffer::InsertResult result) { - video_coding::PacketBuffer::Packet* first_packet = nullptr; - int max_nack_count; - int64_t min_recv_time; - int64_t max_recv_time; - std::vector> payloads; - RtpPacketInfos::vector_type packet_infos; + std::vector> assembled_frames; + { + MutexLock lock(&packet_buffer_lock_); + video_coding::PacketBuffer::Packet* first_packet = nullptr; + int max_nack_count; + int64_t min_recv_time; + int64_t max_recv_time; + std::vector> payloads; + RtpPacketInfos::vector_type packet_infos; - bool frame_boundary = true; - for (auto& packet : result.packets) { - // PacketBuffer promisses frame boundaries are correctly set on each - // packet. Document that assumption with the DCHECKs. - RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); - if (packet->is_first_packet_in_frame()) { - first_packet = packet.get(); - max_nack_count = packet->times_nacked; - min_recv_time = packet->packet_info.receive_time_ms(); - max_recv_time = packet->packet_info.receive_time_ms(); - payloads.clear(); - packet_infos.clear(); - } else { - max_nack_count = std::max(max_nack_count, packet->times_nacked); - min_recv_time = - std::min(min_recv_time, packet->packet_info.receive_time_ms()); - max_recv_time = - std::max(max_recv_time, packet->packet_info.receive_time_ms()); - } - payloads.emplace_back(packet->video_payload); - packet_infos.push_back(packet->packet_info); - - frame_boundary = packet->is_last_packet_in_frame(); - if (packet->is_last_packet_in_frame()) { - auto depacketizer_it = payload_type_map_.find(first_packet->payload_type); - RTC_CHECK(depacketizer_it != payload_type_map_.end()); - - rtc::scoped_refptr bitstream = - depacketizer_it->second->AssembleFrame(payloads); - if (!bitstream) { - // Failed to assemble a frame. Discard and continue. - continue; + bool frame_boundary = true; + for (auto& packet : result.packets) { + // PacketBuffer promisses frame boundaries are correctly set on each + // packet. Document that assumption with the DCHECKs. + RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); + int64_t unwrapped_rtp_seq_num = + rtp_seq_num_unwrapper_.Unwrap(packet->seq_num); + RTC_DCHECK(packet_infos_.count(unwrapped_rtp_seq_num) > 0); + RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num]; + if (packet->is_first_packet_in_frame()) { + first_packet = packet.get(); + max_nack_count = packet->times_nacked; + min_recv_time = packet_info.receive_time().ms(); + max_recv_time = packet_info.receive_time().ms(); + payloads.clear(); + packet_infos.clear(); + } else { + max_nack_count = std::max(max_nack_count, packet->times_nacked); + min_recv_time = + std::min(min_recv_time, packet_info.receive_time().ms()); + max_recv_time = + std::max(max_recv_time, packet_info.receive_time().ms()); } + payloads.emplace_back(packet->video_payload); + packet_infos.push_back(packet_info); - const video_coding::PacketBuffer::Packet& last_packet = *packet; - OnAssembledFrame(std::make_unique( - first_packet->seq_num, // - last_packet.seq_num, // - last_packet.marker_bit, // - max_nack_count, // - min_recv_time, // - max_recv_time, // - first_packet->timestamp, // - first_packet->ntp_time_ms, // - last_packet.video_header.video_timing, // - first_packet->payload_type, // - first_packet->codec(), // - last_packet.video_header.rotation, // - last_packet.video_header.content_type, // - first_packet->video_header, // - last_packet.video_header.color_space, // - RtpPacketInfos(std::move(packet_infos)), // - std::move(bitstream))); + frame_boundary = packet->is_last_packet_in_frame(); + if (packet->is_last_packet_in_frame()) { + auto depacketizer_it = + payload_type_map_.find(first_packet->payload_type); + RTC_CHECK(depacketizer_it != payload_type_map_.end()); + + rtc::scoped_refptr bitstream = + depacketizer_it->second->AssembleFrame(payloads); + if (!bitstream) { + // Failed to assemble a frame. Discard and continue. + continue; + } + + const video_coding::PacketBuffer::Packet& last_packet = *packet; + assembled_frames.push_back(std::make_unique( + first_packet->seq_num, // + last_packet.seq_num, // + last_packet.marker_bit, // + max_nack_count, // + min_recv_time, // + max_recv_time, // + first_packet->timestamp, // + ntp_estimator_.Estimate(first_packet->timestamp), // + last_packet.video_header.video_timing, // + first_packet->payload_type, // + first_packet->codec(), // + last_packet.video_header.rotation, // + last_packet.video_header.content_type, // + first_packet->video_header, // + last_packet.video_header.color_space, // + RtpPacketInfos(std::move(packet_infos)), // + std::move(bitstream))); + } } - } - RTC_DCHECK(frame_boundary); + RTC_DCHECK(frame_boundary); + + if (result.buffer_cleared) { + packet_infos_.clear(); + } + } // packet_buffer_lock_ + if (result.buffer_cleared) { + { + MutexLock lock(&sync_info_lock_); + last_received_rtp_system_time_.reset(); + last_received_keyframe_rtp_system_time_.reset(); + last_received_keyframe_rtp_timestamp_.reset(); + } RequestKeyFrame(); } + + for (auto& frame : assembled_frames) { + OnAssembledFrame(std::move(frame)); + } } void RtpVideoStreamReceiver::OnAssembledFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&network_tc_); RTC_DCHECK(frame); @@ -887,12 +911,11 @@ void RtpVideoStreamReceiver::OnAssembledFrame( if (frame_is_newer) { // When we reset the |reference_finder_| we don't want new picture ids // to overlap with old picture ids. To ensure that doesn't happen we - // start from the |last_completed_picture_id_| and add an offset in case - // of reordering. - reference_finder_ = - std::make_unique( - this, last_completed_picture_id_ + - std::numeric_limits::max()); + // start from the |last_completed_picture_id_| and add an offset in + // case of reordering. + reference_finder_ = std::make_unique( + this, + last_completed_picture_id_ + std::numeric_limits::max()); current_codec_ = frame->codec_type(); } else { // Old frame from before the codec switch, discard it. @@ -918,21 +941,19 @@ void RtpVideoStreamReceiver::OnAssembledFrame( } void RtpVideoStreamReceiver::OnCompleteFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { { MutexLock lock(&last_seq_num_mutex_); - video_coding::RtpFrameObject* rtp_frame = - static_cast(frame.get()); - last_seq_num_for_pic_id_[rtp_frame->id.picture_id] = - rtp_frame->last_seq_num(); + RtpFrameObject* rtp_frame = static_cast(frame.get()); + last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num(); } last_completed_picture_id_ = - std::max(last_completed_picture_id_, frame->id.picture_id); + std::max(last_completed_picture_id_, frame->Id()); complete_frame_callback_->OnCompleteFrame(std::move(frame)); } void RtpVideoStreamReceiver::OnDecryptedFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { MutexLock lock(&reference_finder_lock_); reference_finder_->ManageFrame(std::move(frame)); } @@ -958,7 +979,7 @@ void RtpVideoStreamReceiver::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&network_tc_); frame_transformer_delegate_ = - new rtc::RefCountedObject( + rtc::make_ref_counted( this, std::move(frame_transformer), rtc::Thread::Current(), config_.rtp.remote_ssrc); frame_transformer_delegate_->Init(); @@ -970,12 +991,21 @@ void RtpVideoStreamReceiver::UpdateRtt(int64_t max_rtt_ms) { } absl::optional RtpVideoStreamReceiver::LastReceivedPacketMs() const { - return packet_buffer_.LastReceivedPacketMs(); + MutexLock lock(&sync_info_lock_); + if (last_received_rtp_system_time_) { + return absl::optional(last_received_rtp_system_time_->ms()); + } + return absl::nullopt; } absl::optional RtpVideoStreamReceiver::LastReceivedKeyframePacketMs() const { - return packet_buffer_.LastReceivedKeyframePacketMs(); + MutexLock lock(&sync_info_lock_); + if (last_received_keyframe_rtp_system_time_) { + return absl::optional( + last_received_keyframe_rtp_system_time_->ms()); + } + return absl::nullopt; } void RtpVideoStreamReceiver::AddSecondarySink(RtpPacketSinkInterface* sink) { @@ -999,7 +1029,7 @@ void RtpVideoStreamReceiver::RemoveSecondarySink( } void RtpVideoStreamReceiver::ManageFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { MutexLock lock(&reference_finder_lock_); reference_finder_->ManageFrame(std::move(frame)); } @@ -1058,7 +1088,14 @@ void RtpVideoStreamReceiver::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { MutexLock lock(&reference_finder_lock_); reference_finder_->PaddingReceived(seq_num); } - OnInsertedPacket(packet_buffer_.InsertPadding(seq_num)); + + video_coding::PacketBuffer::InsertResult insert_result; + { + MutexLock lock(&packet_buffer_lock_); + insert_result = packet_buffer_.InsertPadding(seq_num); + } + OnInsertedPacket(std::move(insert_result)); + if (nack_module_) { nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false, /* is _recovered = */ false); @@ -1105,7 +1142,7 @@ bool RtpVideoStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet, absl::optional remote_to_local_clock_offset_ms = ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); if (remote_to_local_clock_offset_ms.has_value()) { - absolute_capture_time_receiver_.SetRemoteToLocalClockOffset( + capture_clock_offset_updater_.SetRemoteToLocalClockOffset( Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); } } @@ -1140,7 +1177,13 @@ void RtpVideoStreamReceiver::FrameDecoded(int64_t picture_id) { } } if (seq_num != -1) { - packet_buffer_.ClearTo(seq_num); + { + MutexLock lock(&packet_buffer_lock_); + packet_buffer_.ClearTo(seq_num); + int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num); + packet_infos_.erase(packet_infos_.begin(), + packet_infos_.upper_bound(unwrapped_rtp_seq_num)); + } MutexLock lock(&reference_finder_lock_); reference_finder_->ClearTo(seq_num); } @@ -1211,4 +1254,40 @@ void RtpVideoStreamReceiver::InsertSpsPpsIntoTracker(uint8_t payload_type) { sprop_decoder.pps_nalu()); } +void RtpVideoStreamReceiver::UpdatePacketReceiveTimestamps( + const RtpPacketReceived& packet, + bool is_keyframe) { + Timestamp now = clock_->CurrentTime(); + { + MutexLock lock(&sync_info_lock_); + if (is_keyframe || + last_received_keyframe_rtp_timestamp_ == packet.Timestamp()) { + last_received_keyframe_rtp_timestamp_ = packet.Timestamp(); + last_received_keyframe_rtp_system_time_ = now; + } + last_received_rtp_system_time_ = now; + last_received_rtp_timestamp_ = packet.Timestamp(); + } + + // Periodically log the RTP header of incoming packets. + if (now.ms() - last_packet_log_ms_ > kPacketLogIntervalMs) { + rtc::StringBuilder ss; + ss << "Packet received on SSRC: " << packet.Ssrc() + << " with payload type: " << static_cast(packet.PayloadType()) + << ", timestamp: " << packet.Timestamp() + << ", sequence number: " << packet.SequenceNumber() + << ", arrival time: " << ToString(packet.arrival_time()); + int32_t time_offset; + if (packet.GetExtension(&time_offset)) { + ss << ", toffset: " << time_offset; + } + uint32_t send_time; + if (packet.GetExtension(&send_time)) { + ss << ", abs send time: " << send_time; + } + RTC_LOG(LS_INFO) << ss.str(); + last_packet_log_ms_ = now.ms(); + } +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h index 2746295fc..c582a5633 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h @@ -21,6 +21,8 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/sequence_checker.h" +#include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video_codecs/video_codec.h" #include "call/rtp_packet_sink_interface.h" @@ -31,7 +33,8 @@ #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" @@ -48,9 +51,8 @@ #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "video/buffered_frame_decryptor.h" #include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" @@ -70,7 +72,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, public RecoveredPacketReceiver, public RtpPacketSinkInterface, public KeyFrameRequestSender, - public video_coding::OnCompleteFrameCallback, + public OnCompleteFrameCallback, public OnDecryptedFrameCallback, public OnDecryptionStatusChangeCallback, public RtpVideoFrameReceiver { @@ -92,7 +94,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, // The KeyFrameRequestSender is optional; if not provided, key frame // requests are sent via the internal RtpRtcp module. KeyFrameRequestSender* keyframe_request_sender, - video_coding::OnCompleteFrameCallback* complete_frame_callback, + OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer); @@ -113,7 +115,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, // The KeyFrameRequestSender is optional; if not provided, key frame // requests are sent via the internal RtpRtcp module. KeyFrameRequestSender* keyframe_request_sender, - video_coding::OnCompleteFrameCallback* complete_frame_callback, + OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer); ~RtpVideoStreamReceiver() override; @@ -175,12 +177,10 @@ class RtpVideoStreamReceiver : public LossNotificationSender, void RequestPacketRetransmit(const std::vector& sequence_numbers); // Implements OnCompleteFrameCallback. - void OnCompleteFrame( - std::unique_ptr frame) override; + void OnCompleteFrame(std::unique_ptr frame) override; // Implements OnDecryptedFrameCallback. - void OnDecryptedFrame( - std::unique_ptr frame) override; + void OnDecryptedFrame(std::unique_ptr frame) override; // Implements OnDecryptionStatusChangeCallback. void OnDecryptionStatusChange( @@ -211,8 +211,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, private: // Implements RtpVideoFrameReceiver. - void ManageFrame( - std::unique_ptr frame) override; + void ManageFrame(std::unique_ptr frame) override; // Used for buffering RTCP feedback messages and sending them all together. // Note: @@ -308,7 +307,11 @@ class RtpVideoStreamReceiver : public LossNotificationSender, ParseGenericDependenciesResult ParseGenericDependenciesExtension( const RtpPacketReceived& rtp_packet, RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_); - void OnAssembledFrame(std::unique_ptr frame); + void OnAssembledFrame(std::unique_ptr frame) + RTC_LOCKS_EXCLUDED(packet_buffer_lock_); + void UpdatePacketReceiveTimestamps(const RtpPacketReceived& packet, + bool is_keyframe) + RTC_RUN_ON(worker_task_checker_); Clock* const clock_; // Ownership of this object lies with VideoReceiveStream, which owns |this|. @@ -326,20 +329,21 @@ class RtpVideoStreamReceiver : public LossNotificationSender, ReceiveStatistics* const rtp_receive_statistics_; std::unique_ptr ulpfec_receiver_; - SequenceChecker worker_task_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_; bool receiving_ RTC_GUARDED_BY(worker_task_checker_); int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_); const std::unique_ptr rtp_rtcp_; - video_coding::OnCompleteFrameCallback* complete_frame_callback_; + OnCompleteFrameCallback* complete_frame_callback_; KeyFrameRequestSender* const keyframe_request_sender_; RtcpFeedbackBuffer rtcp_feedback_buffer_; std::unique_ptr nack_module_; std::unique_ptr loss_notification_controller_; - video_coding::PacketBuffer packet_buffer_; + mutable Mutex packet_buffer_lock_; + video_coding::PacketBuffer packet_buffer_ RTC_GUARDED_BY(packet_buffer_lock_); UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_); SeqNumUnwrapper frame_id_unwrapper_ RTC_GUARDED_BY(worker_task_checker_); @@ -355,7 +359,7 @@ class RtpVideoStreamReceiver : public LossNotificationSender, RTC_GUARDED_BY(worker_task_checker_); Mutex reference_finder_lock_; - std::unique_ptr reference_finder_ + std::unique_ptr reference_finder_ RTC_GUARDED_BY(reference_finder_lock_); absl::optional current_codec_; uint32_t last_assembled_frame_rtp_timestamp_; @@ -389,12 +393,16 @@ class RtpVideoStreamReceiver : public LossNotificationSender, mutable Mutex sync_info_lock_; absl::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(sync_info_lock_); - absl::optional last_received_rtp_system_time_ms_ + absl::optional last_received_keyframe_rtp_timestamp_ + RTC_GUARDED_BY(sync_info_lock_); + absl::optional last_received_rtp_system_time_ + RTC_GUARDED_BY(sync_info_lock_); + absl::optional last_received_keyframe_rtp_system_time_ RTC_GUARDED_BY(sync_info_lock_); // Used to validate the buffered frame decryptor is always run on the correct // thread. - rtc::ThreadChecker network_tc_; + SequenceChecker network_tc_; // Handles incoming encrypted frames and forwards them to the // rtp_reference_finder if they are decryptable. std::unique_ptr buffered_frame_decryptor_ @@ -402,13 +410,21 @@ class RtpVideoStreamReceiver : public LossNotificationSender, std::atomic frames_decryptable_; absl::optional last_color_space_; - AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_ + AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_ + RTC_GUARDED_BY(worker_task_checker_); + + CaptureClockOffsetUpdater capture_clock_offset_updater_ RTC_GUARDED_BY(worker_task_checker_); int64_t last_completed_picture_id_ = 0; rtc::scoped_refptr frame_transformer_delegate_; + + SeqNumUnwrapper rtp_seq_num_unwrapper_ + RTC_GUARDED_BY(packet_buffer_lock_); + std::map packet_infos_ + RTC_GUARDED_BY(packet_buffer_lock_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc index d623e7a87..79bef284e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc @@ -83,6 +83,7 @@ std::unique_ptr CreateRtpRtcpModule( RtcpRttStats* rtt_stats, RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, RtcpCnameCallback* rtcp_cname_callback, + bool non_sender_rtt_measurement, uint32_t local_ssrc) { RtpRtcpInterface::Configuration configuration; configuration.clock = clock; @@ -95,6 +96,7 @@ std::unique_ptr CreateRtpRtcpModule( rtcp_packet_type_counter_observer; configuration.rtcp_cname_callback = rtcp_cname_callback; configuration.local_media_ssrc = local_ssrc; + configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; std::unique_ptr rtp_rtcp = ModuleRtpRtcpImpl2::Create(configuration); @@ -112,6 +114,7 @@ std::unique_ptr MaybeConstructNackModule( if (config.rtp.nack.rtp_history_ms == 0) return nullptr; + // TODO(bugs.webrtc.org/12420): pass rtp_history_ms to the nack module. return std::make_unique(current_queue, clock, nack_sender, keyframe_request_sender); } @@ -211,7 +214,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( ProcessThread* process_thread, NackSender* nack_sender, KeyFrameRequestSender* keyframe_request_sender, - video_coding::OnCompleteFrameCallback* complete_frame_callback, + OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer) : clock_(clock), @@ -228,13 +231,15 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( config->rtp.extensions)), receiving_(false), last_packet_log_ms_(-1), - rtp_rtcp_(CreateRtpRtcpModule(clock, - rtp_receive_statistics_, - transport, - rtt_stats, - rtcp_packet_type_counter_observer, - rtcp_cname_callback, - config_.rtp.local_ssrc)), + rtp_rtcp_(CreateRtpRtcpModule( + clock, + rtp_receive_statistics_, + transport, + rtt_stats, + rtcp_packet_type_counter_observer, + rtcp_cname_callback, + config_.rtp.rtcp_xr.receiver_reference_time_report, + config_.rtp.local_ssrc)), complete_frame_callback_(complete_frame_callback), keyframe_request_sender_(keyframe_request_sender), // TODO(bugs.webrtc.org/10336): Let |rtcp_feedback_buffer_| communicate @@ -245,10 +250,10 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( clock_, &rtcp_feedback_buffer_, &rtcp_feedback_buffer_)), - packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()), + packet_buffer_(kPacketBufferStartSize, PacketBufferMaxSize()), has_received_frame_(false), frames_decryptable_(false), - absolute_capture_time_receiver_(clock) { + absolute_capture_time_interpolator_(clock) { constexpr bool remb_candidate = true; if (packet_router_) packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); @@ -277,8 +282,6 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( rtp_receive_statistics_->SetMaxReorderingThreshold( config_.rtp.rtx_ssrc, max_reordering_threshold); } - if (config_.rtp.rtcp_xr.receiver_reference_time_report) - rtp_rtcp_->SetRtcpXrRrtrStatus(true); ParseFieldTrial( {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, @@ -292,8 +295,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( &rtcp_feedback_buffer_); } - reference_finder_ = - std::make_unique(this); + reference_finder_ = std::make_unique(this); // Only construct the encrypted receiver if frame encryption is enabled. if (config_.crypto_options.sframe.require_frame_encryption) { @@ -305,17 +307,15 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( } if (frame_transformer) { - frame_transformer_delegate_ = new rtc::RefCountedObject< - RtpVideoStreamReceiverFrameTransformerDelegate>( - this, std::move(frame_transformer), rtc::Thread::Current(), - config_.rtp.remote_ssrc); + frame_transformer_delegate_ = + rtc::make_ref_counted( + this, std::move(frame_transformer), rtc::Thread::Current(), + config_.rtp.remote_ssrc); frame_transformer_delegate_->Init(); } } RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() { - RTC_DCHECK(secondary_sinks_.empty()); - process_thread_->DeRegisterModule(rtp_rtcp_.get()); if (packet_router_) @@ -346,16 +346,18 @@ absl::optional RtpVideoStreamReceiver2::GetSyncInfo() const { RTC_DCHECK_RUN_ON(&worker_task_checker_); Syncable::Info info; if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, - &info.capture_time_ntp_frac, nullptr, nullptr, + &info.capture_time_ntp_frac, + /*rtcp_arrival_time_secs=*/nullptr, + /*rtcp_arrival_time_frac=*/nullptr, &info.capture_time_source_clock) != 0) { return absl::nullopt; } - if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { + if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_) { return absl::nullopt; } info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; - info.latest_receive_time_ms = *last_received_rtp_system_time_ms_; + info.latest_receive_time_ms = last_received_rtp_system_time_->ms(); // Leaves info.current_delay_ms uninitialized. return info; @@ -471,19 +473,31 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( const RtpPacketReceived& rtp_packet, const RTPVideoHeader& video) { RTC_DCHECK_RUN_ON(&worker_task_checker_); - auto packet = std::make_unique( - rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()), - clock_->TimeInMilliseconds()); + + auto packet = + std::make_unique(rtp_packet, video); + + int64_t unwrapped_rtp_seq_num = + rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber()); + auto& packet_info = + packet_infos_ + .emplace( + unwrapped_rtp_seq_num, + RtpPacketInfo( + rtp_packet.Ssrc(), rtp_packet.Csrcs(), rtp_packet.Timestamp(), + /*audio_level=*/absl::nullopt, + rtp_packet.GetExtension(), + /*receive_time_ms=*/clock_->CurrentTime())) + .first->second; // Try to extrapolate absolute capture time if it is missing. - packet->packet_info.set_absolute_capture_time( - absolute_capture_time_receiver_.OnReceivePacket( - AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(), - packet->packet_info.csrcs()), - packet->packet_info.rtp_timestamp(), + packet_info.set_absolute_capture_time( + absolute_capture_time_interpolator_.OnReceivePacket( + AbsoluteCaptureTimeInterpolator::GetSource(packet_info.ssrc(), + packet_info.csrcs()), + packet_info.rtp_timestamp(), // Assume frequency is the same one for all video frames. - kVideoPayloadTypeFrequency, - packet->packet_info.absolute_capture_time())); + kVideoPayloadTypeFrequency, packet_info.absolute_capture_time())); RTPVideoHeader& video_header = packet->video_header; video_header.rotation = kVideoRotation_0; @@ -510,6 +524,12 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( ParseGenericDependenciesResult generic_descriptor_state = ParseGenericDependenciesExtension(rtp_packet, &video_header); + + if (!rtp_packet.recovered()) { + UpdatePacketReceiveTimestamps( + rtp_packet, video_header.frame_type == VideoFrameType::kVideoFrameKey); + } + if (generic_descriptor_state == kDropPacket) return; @@ -528,6 +548,8 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( video_header.color_space = last_color_space_; } } + video_header.video_frame_tracking_id = + rtp_packet.GetExtension(); if (loss_notification_controller_) { if (rtp_packet.recovered()) { @@ -665,34 +687,6 @@ void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) { return; } - if (!packet.recovered()) { - // TODO(nisse): Exclude out-of-order packets? - int64_t now_ms = clock_->TimeInMilliseconds(); - - last_received_rtp_timestamp_ = packet.Timestamp(); - last_received_rtp_system_time_ms_ = now_ms; - - // Periodically log the RTP header of incoming packets. - if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) { - rtc::StringBuilder ss; - ss << "Packet received on SSRC: " << packet.Ssrc() - << " with payload type: " << static_cast(packet.PayloadType()) - << ", timestamp: " << packet.Timestamp() - << ", sequence number: " << packet.SequenceNumber() - << ", arrival time: " << packet.arrival_time_ms(); - int32_t time_offset; - if (packet.GetExtension(&time_offset)) { - ss << ", toffset: " << time_offset; - } - uint32_t send_time; - if (packet.GetExtension(&send_time)) { - ss << ", abs send time: " << send_time; - } - RTC_LOG(LS_INFO) << ss.str(); - last_packet_log_ms_ = now_ms; - } - } - ReceivePacket(packet); // Update receive statistics after ReceivePacket. @@ -702,8 +696,8 @@ void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) { rtp_receive_statistics_->OnRtpPacket(packet); } - for (RtpPacketSinkInterface* secondary_sink : secondary_sinks_) { - secondary_sink->OnRtpPacket(packet); + if (config_.rtp.packet_sink_) { + config_.rtp.packet_sink_->OnRtpPacket(packet); } } @@ -763,22 +757,24 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( // PacketBuffer promisses frame boundaries are correctly set on each // packet. Document that assumption with the DCHECKs. RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); + int64_t unwrapped_rtp_seq_num = + rtp_seq_num_unwrapper_.Unwrap(packet->seq_num); + RTC_DCHECK(packet_infos_.count(unwrapped_rtp_seq_num) > 0); + RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num]; if (packet->is_first_packet_in_frame()) { first_packet = packet.get(); max_nack_count = packet->times_nacked; - min_recv_time = packet->packet_info.receive_time_ms(); - max_recv_time = packet->packet_info.receive_time_ms(); + min_recv_time = packet_info.receive_time().ms(); + max_recv_time = packet_info.receive_time().ms(); payloads.clear(); packet_infos.clear(); } else { max_nack_count = std::max(max_nack_count, packet->times_nacked); - min_recv_time = - std::min(min_recv_time, packet->packet_info.receive_time_ms()); - max_recv_time = - std::max(max_recv_time, packet->packet_info.receive_time_ms()); + min_recv_time = std::min(min_recv_time, packet_info.receive_time().ms()); + max_recv_time = std::max(max_recv_time, packet_info.receive_time().ms()); } payloads.emplace_back(packet->video_payload); - packet_infos.push_back(packet->packet_info); + packet_infos.push_back(packet_info); frame_boundary = packet->is_last_packet_in_frame(); if (packet->is_last_packet_in_frame()) { @@ -793,34 +789,38 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( } const video_coding::PacketBuffer::Packet& last_packet = *packet; - OnAssembledFrame(std::make_unique( - first_packet->seq_num, // - last_packet.seq_num, // - last_packet.marker_bit, // - max_nack_count, // - min_recv_time, // - max_recv_time, // - first_packet->timestamp, // - first_packet->ntp_time_ms, // - last_packet.video_header.video_timing, // - first_packet->payload_type, // - first_packet->codec(), // - last_packet.video_header.rotation, // - last_packet.video_header.content_type, // - first_packet->video_header, // - last_packet.video_header.color_space, // - RtpPacketInfos(std::move(packet_infos)), // + OnAssembledFrame(std::make_unique( + first_packet->seq_num, // + last_packet.seq_num, // + last_packet.marker_bit, // + max_nack_count, // + min_recv_time, // + max_recv_time, // + first_packet->timestamp, // + ntp_estimator_.Estimate(first_packet->timestamp), // + last_packet.video_header.video_timing, // + first_packet->payload_type, // + first_packet->codec(), // + last_packet.video_header.rotation, // + last_packet.video_header.content_type, // + first_packet->video_header, // + last_packet.video_header.color_space, // + RtpPacketInfos(std::move(packet_infos)), // std::move(bitstream))); } } RTC_DCHECK(frame_boundary); if (result.buffer_cleared) { + last_received_rtp_system_time_.reset(); + last_received_keyframe_rtp_system_time_.reset(); + last_received_keyframe_rtp_timestamp_.reset(); + packet_infos_.clear(); RequestKeyFrame(); } } void RtpVideoStreamReceiver2::OnAssembledFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_task_checker_); RTC_DCHECK(frame); @@ -860,10 +860,9 @@ void RtpVideoStreamReceiver2::OnAssembledFrame( // to overlap with old picture ids. To ensure that doesn't happen we // start from the |last_completed_picture_id_| and add an offset in case // of reordering. - reference_finder_ = - std::make_unique( - this, last_completed_picture_id_ + - std::numeric_limits::max()); + reference_finder_ = std::make_unique( + this, + last_completed_picture_id_ + std::numeric_limits::max()); current_codec_ = frame->codec_type(); } else { // Old frame from before the codec switch, discard it. @@ -889,20 +888,18 @@ void RtpVideoStreamReceiver2::OnAssembledFrame( } void RtpVideoStreamReceiver2::OnCompleteFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_task_checker_); - video_coding::RtpFrameObject* rtp_frame = - static_cast(frame.get()); - last_seq_num_for_pic_id_[rtp_frame->id.picture_id] = - rtp_frame->last_seq_num(); + RtpFrameObject* rtp_frame = static_cast(frame.get()); + last_seq_num_for_pic_id_[rtp_frame->Id()] = rtp_frame->last_seq_num(); last_completed_picture_id_ = - std::max(last_completed_picture_id_, frame->id.picture_id); + std::max(last_completed_picture_id_, frame->Id()); complete_frame_callback_->OnCompleteFrame(std::move(frame)); } void RtpVideoStreamReceiver2::OnDecryptedFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_task_checker_); reference_finder_->ManageFrame(std::move(frame)); } @@ -930,7 +927,7 @@ void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_task_checker_); frame_transformer_delegate_ = - new rtc::RefCountedObject( + rtc::make_ref_counted( this, std::move(frame_transformer), rtc::Thread::Current(), config_.rtp.remote_ssrc); frame_transformer_delegate_->Init(); @@ -943,36 +940,25 @@ void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) { } absl::optional RtpVideoStreamReceiver2::LastReceivedPacketMs() const { - return packet_buffer_.LastReceivedPacketMs(); + RTC_DCHECK_RUN_ON(&worker_task_checker_); + if (last_received_rtp_system_time_) { + return absl::optional(last_received_rtp_system_time_->ms()); + } + return absl::nullopt; } absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() const { - return packet_buffer_.LastReceivedKeyframePacketMs(); -} - -void RtpVideoStreamReceiver2::AddSecondarySink(RtpPacketSinkInterface* sink) { RTC_DCHECK_RUN_ON(&worker_task_checker_); - RTC_DCHECK(!absl::c_linear_search(secondary_sinks_, sink)); - secondary_sinks_.push_back(sink); -} - -void RtpVideoStreamReceiver2::RemoveSecondarySink( - const RtpPacketSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&worker_task_checker_); - auto it = absl::c_find(secondary_sinks_, sink); - if (it == secondary_sinks_.end()) { - // We might be rolling-back a call whose setup failed mid-way. In such a - // case, it's simpler to remove "everything" rather than remember what - // has already been added. - RTC_LOG(LS_WARNING) << "Removal of unknown sink."; - return; + if (last_received_keyframe_rtp_system_time_) { + return absl::optional( + last_received_keyframe_rtp_system_time_->ms()); } - secondary_sinks_.erase(it); + return absl::nullopt; } void RtpVideoStreamReceiver2::ManageFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_task_checker_); reference_finder_->ManageFrame(std::move(frame)); } @@ -1079,7 +1065,7 @@ bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet, absl::optional remote_to_local_clock_offset_ms = ntp_estimator_.EstimateRemoteToLocalClockOffsetMs(); if (remote_to_local_clock_offset_ms.has_value()) { - absolute_capture_time_receiver_.SetRemoteToLocalClockOffset( + capture_clock_offset_updater_.SetRemoteToLocalClockOffset( Int64MsToQ32x32(*remote_to_local_clock_offset_ms)); } } @@ -1112,6 +1098,9 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { } if (seq_num != -1) { + int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num); + packet_infos_.erase(packet_infos_.begin(), + packet_infos_.upper_bound(unwrapped_rtp_seq_num)); packet_buffer_.ClearTo(seq_num); reference_finder_->ClearTo(seq_num); } @@ -1185,4 +1174,37 @@ void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) { sprop_decoder.pps_nalu()); } +void RtpVideoStreamReceiver2::UpdatePacketReceiveTimestamps( + const RtpPacketReceived& packet, + bool is_keyframe) { + Timestamp now = clock_->CurrentTime(); + if (is_keyframe || + last_received_keyframe_rtp_timestamp_ == packet.Timestamp()) { + last_received_keyframe_rtp_timestamp_ = packet.Timestamp(); + last_received_keyframe_rtp_system_time_ = now; + } + last_received_rtp_system_time_ = now; + last_received_rtp_timestamp_ = packet.Timestamp(); + + // Periodically log the RTP header of incoming packets. + if (now.ms() - last_packet_log_ms_ > kPacketLogIntervalMs) { + rtc::StringBuilder ss; + ss << "Packet received on SSRC: " << packet.Ssrc() + << " with payload type: " << static_cast(packet.PayloadType()) + << ", timestamp: " << packet.Timestamp() + << ", sequence number: " << packet.SequenceNumber() + << ", arrival time: " << ToString(packet.arrival_time()); + int32_t time_offset; + if (packet.GetExtension(&time_offset)) { + ss << ", toffset: " << time_offset; + } + uint32_t send_time; + if (packet.GetExtension(&send_time)) { + ss << ", abs send time: " << send_time; + } + RTC_LOG(LS_INFO) << ss.str(); + last_packet_log_ms_ = now.ms(); + } +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h index 79e95b688..691144af0 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h @@ -18,6 +18,8 @@ #include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/sequence_checker.h" +#include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video_codecs/video_codec.h" #include "call/rtp_packet_sink_interface.h" @@ -27,7 +29,8 @@ #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" @@ -45,7 +48,7 @@ #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" #include "video/buffered_frame_decryptor.h" #include "video/rtp_video_stream_receiver_frame_transformer_delegate.h" @@ -65,7 +68,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, public RecoveredPacketReceiver, public RtpPacketSinkInterface, public KeyFrameRequestSender, - public video_coding::OnCompleteFrameCallback, + public OnCompleteFrameCallback, public OnDecryptedFrameCallback, public OnDecryptionStatusChangeCallback, public RtpVideoFrameReceiver { @@ -88,7 +91,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // The KeyFrameRequestSender is optional; if not provided, key frame // requests are sent via the internal RtpRtcp module. KeyFrameRequestSender* keyframe_request_sender, - video_coding::OnCompleteFrameCallback* complete_frame_callback, + OnCompleteFrameCallback* complete_frame_callback, rtc::scoped_refptr frame_decryptor, rtc::scoped_refptr frame_transformer); ~RtpVideoStreamReceiver2() override; @@ -150,12 +153,10 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void RequestPacketRetransmit(const std::vector& sequence_numbers); // Implements OnCompleteFrameCallback. - void OnCompleteFrame( - std::unique_ptr frame) override; + void OnCompleteFrame(std::unique_ptr frame) override; // Implements OnDecryptedFrameCallback. - void OnDecryptedFrame( - std::unique_ptr frame) override; + void OnDecryptedFrame(std::unique_ptr frame) override; // Implements OnDecryptionStatusChangeCallback. void OnDecryptionStatusChange( @@ -177,17 +178,9 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, absl::optional LastReceivedPacketMs() const; absl::optional LastReceivedKeyframePacketMs() const; - // RtpDemuxer only forwards a given RTP packet to one sink. However, some - // sinks, such as FlexFEC, might wish to be informed of all of the packets - // a given sink receives (or any set of sinks). They may do so by registering - // themselves as secondary sinks. - void AddSecondarySink(RtpPacketSinkInterface* sink); - void RemoveSecondarySink(const RtpPacketSinkInterface* sink); - private: // Implements RtpVideoFrameReceiver. - void ManageFrame( - std::unique_ptr frame) override; + void ManageFrame(std::unique_ptr frame) override; // Used for buffering RTCP feedback messages and sending them all together. // Note: @@ -236,7 +229,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, bool decodability_flag; }; - SequenceChecker worker_task_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_; KeyFrameRequestSender* const key_frame_request_sender_; NackSender* const nack_sender_; LossNotificationSender* const loss_notification_sender_; @@ -271,7 +264,10 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, ParseGenericDependenciesResult ParseGenericDependenciesExtension( const RtpPacketReceived& rtp_packet, RTPVideoHeader* video_header) RTC_RUN_ON(worker_task_checker_); - void OnAssembledFrame(std::unique_ptr frame); + void OnAssembledFrame(std::unique_ptr frame); + void UpdatePacketReceiveTimestamps(const RtpPacketReceived& packet, + bool is_keyframe) + RTC_RUN_ON(worker_task_checker_); Clock* const clock_; // Ownership of this object lies with VideoReceiveStream, which owns |this|. @@ -289,20 +285,21 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, ReceiveStatistics* const rtp_receive_statistics_; std::unique_ptr ulpfec_receiver_; - SequenceChecker worker_task_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_task_checker_; bool receiving_ RTC_GUARDED_BY(worker_task_checker_); int64_t last_packet_log_ms_ RTC_GUARDED_BY(worker_task_checker_); const std::unique_ptr rtp_rtcp_; - video_coding::OnCompleteFrameCallback* complete_frame_callback_; + OnCompleteFrameCallback* complete_frame_callback_; KeyFrameRequestSender* const keyframe_request_sender_; RtcpFeedbackBuffer rtcp_feedback_buffer_; const std::unique_ptr nack_module_; std::unique_ptr loss_notification_controller_; - video_coding::PacketBuffer packet_buffer_; + video_coding::PacketBuffer packet_buffer_ + RTC_GUARDED_BY(worker_task_checker_); UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(worker_task_checker_); SeqNumUnwrapper frame_id_unwrapper_ RTC_GUARDED_BY(worker_task_checker_); @@ -317,7 +314,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, absl::optional video_structure_frame_id_ RTC_GUARDED_BY(worker_task_checker_); - std::unique_ptr reference_finder_ + std::unique_ptr reference_finder_ RTC_GUARDED_BY(worker_task_checker_); absl::optional current_codec_ RTC_GUARDED_BY(worker_task_checker_); @@ -345,12 +342,13 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, bool has_received_frame_ RTC_GUARDED_BY(worker_task_checker_); - std::vector secondary_sinks_ - RTC_GUARDED_BY(worker_task_checker_); - absl::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(worker_task_checker_); - absl::optional last_received_rtp_system_time_ms_ + absl::optional last_received_keyframe_rtp_timestamp_ + RTC_GUARDED_BY(worker_task_checker_); + absl::optional last_received_rtp_system_time_ + RTC_GUARDED_BY(worker_task_checker_); + absl::optional last_received_keyframe_rtp_system_time_ RTC_GUARDED_BY(worker_task_checker_); // Handles incoming encrypted frames and forwards them to the @@ -360,13 +358,21 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, bool frames_decryptable_ RTC_GUARDED_BY(worker_task_checker_); absl::optional last_color_space_; - AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_ + AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_ + RTC_GUARDED_BY(worker_task_checker_); + + CaptureClockOffsetUpdater capture_clock_offset_updater_ RTC_GUARDED_BY(worker_task_checker_); int64_t last_completed_picture_id_ = 0; rtc::scoped_refptr frame_transformer_delegate_; + + SeqNumUnwrapper rtp_seq_num_unwrapper_ + RTC_GUARDED_BY(worker_task_checker_); + std::map packet_infos_ + RTC_GUARDED_BY(worker_task_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc index 31eb344d5..f2f81df3e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -24,9 +24,8 @@ namespace { class TransformableVideoReceiverFrame : public TransformableVideoFrameInterface { public: - TransformableVideoReceiverFrame( - std::unique_ptr frame, - uint32_t ssrc) + TransformableVideoReceiverFrame(std::unique_ptr frame, + uint32_t ssrc) : frame_(std::move(frame)), metadata_(frame_->GetRtpVideoHeader()), ssrc_(ssrc) {} @@ -55,12 +54,12 @@ class TransformableVideoReceiverFrame const VideoFrameMetadata& GetMetadata() const override { return metadata_; } - std::unique_ptr ExtractFrame() && { + std::unique_ptr ExtractFrame() && { return std::move(frame_); } private: - std::unique_ptr frame_; + std::unique_ptr frame_; const VideoFrameMetadata metadata_; const uint32_t ssrc_; }; @@ -91,7 +90,7 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::Reset() { } void RtpVideoStreamReceiverFrameTransformerDelegate::TransformFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&network_sequence_checker_); frame_transformer_->Transform( std::make_unique(std::move(frame), diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h index e687e7f47..ef05d91fd 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.h @@ -14,8 +14,9 @@ #include #include "api/frame_transformer_interface.h" +#include "api/sequence_checker.h" #include "modules/video_coding/frame_object.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread.h" namespace webrtc { @@ -24,8 +25,7 @@ namespace webrtc { // thread after transformation. class RtpVideoFrameReceiver { public: - virtual void ManageFrame( - std::unique_ptr frame) = 0; + virtual void ManageFrame(std::unique_ptr frame) = 0; protected: virtual ~RtpVideoFrameReceiver() = default; @@ -46,7 +46,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate void Reset(); // Delegates the call to FrameTransformerInterface::TransformFrame. - void TransformFrame(std::unique_ptr frame); + void TransformFrame(std::unique_ptr frame); // Implements TransformedFrameCallback. Can be called on any thread. Posts // the transformed frame to be managed on the |network_thread_|. @@ -61,7 +61,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate ~RtpVideoStreamReceiverFrameTransformerDelegate() override = default; private: - SequenceChecker network_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker network_sequence_checker_; RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_); rtc::scoped_refptr frame_transformer_ RTC_GUARDED_BY(network_sequence_checker_); diff --git a/TMessagesProj/jni/voip/webrtc/video/send_delay_stats.h b/TMessagesProj/jni/voip/webrtc/video/send_delay_stats.h index 20f9804d6..fa76a1e39 100644 --- a/TMessagesProj/jni/voip/webrtc/video/send_delay_stats.h +++ b/TMessagesProj/jni/voip/webrtc/video/send_delay_stats.h @@ -27,6 +27,12 @@ namespace webrtc { +// Used to collect delay stats for video streams. The class gets callbacks +// from more than one threads and internally uses a mutex for data access +// synchronization. +// TODO(bugs.webrtc.org/11993): OnSendPacket and OnSentPacket will eventually +// be called consistently on the same thread. Once we're there, we should be +// able to avoid locking (at least for the fast path). class SendDelayStats : public SendPacketObserver { public: explicit SendDelayStats(Clock* clock); diff --git a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc index 92545ecf9..dbed7959f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.cc @@ -677,6 +677,7 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( void SendStatisticsProxy::OnEncoderReconfigured( const VideoEncoderConfig& config, const std::vector& streams) { + // Called on VideoStreamEncoder's encoder_queue_. MutexLock lock(&mutex_); if (content_type_ != config.content_type) { @@ -744,6 +745,8 @@ VideoSendStream::Stats SendStatisticsProxy::GetStats() { PurgeOldStats(); stats_.input_frame_rate = round(uma_container_->input_frame_rate_tracker_.ComputeRate()); + stats_.frames = + uma_container_->input_frame_rate_tracker_.TotalSampleCount(); stats_.content_type = content_type_ == VideoEncoderConfig::ContentType::kRealtimeVideo ? VideoContentType::UNSPECIFIED @@ -1289,17 +1292,6 @@ void SendStatisticsProxy::RtcpPacketTypesCounterUpdated( uma_container_->first_rtcp_stats_time_ms_ = clock_->TimeInMilliseconds(); } -void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics, - uint32_t ssrc) { - MutexLock lock(&mutex_); - VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); - if (!stats) - return; - - stats->rtcp_stats = statistics; - uma_container_->report_block_stats_.Store(ssrc, statistics); -} - void SendStatisticsProxy::OnReportBlockDataUpdated( ReportBlockData report_block_data) { MutexLock lock(&mutex_); @@ -1307,6 +1299,13 @@ void SendStatisticsProxy::OnReportBlockDataUpdated( GetStatsEntry(report_block_data.report_block().source_ssrc); if (!stats) return; + const RTCPReportBlock& report_block = report_block_data.report_block(); + uma_container_->report_block_stats_.Store( + /*ssrc=*/report_block.source_ssrc, + /*packets_lost=*/report_block.packets_lost, + /*extended_highest_sequence_number=*/ + report_block.extended_highest_sequence_number); + stats->report_block_data = std::move(report_block_data); } diff --git a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h index 0de7df290..bfb221f65 100644 --- a/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/video/send_statistics_proxy.h @@ -37,7 +37,6 @@ namespace webrtc { class SendStatisticsProxy : public VideoStreamEncoderObserver, - public RtcpStatisticsCallback, public ReportBlockDataObserver, public RtcpPacketTypeCounterObserver, public StreamDataCountersCallback, @@ -106,9 +105,6 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, int GetSendFrameRate() const; protected: - // From RtcpStatisticsCallback. - void StatisticsUpdated(const RtcpStatistics& statistics, - uint32_t ssrc) override; // From ReportBlockDataObserver. void OnReportBlockDataUpdated(ReportBlockData report_block_data) override; // From RtcpPacketTypeCounterObserver. diff --git a/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h b/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h new file mode 100644 index 000000000..2af613e3a --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/test/mock_video_stream_encoder.h @@ -0,0 +1,66 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_ +#define VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_ + +#include + +#include "api/video/video_stream_encoder_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoStreamEncoder : public VideoStreamEncoderInterface { + public: + MOCK_METHOD(void, + AddAdaptationResource, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(std::vector>, + GetAdaptationResources, + (), + (override)); + MOCK_METHOD(void, + SetSource, + (rtc::VideoSourceInterface*, + const DegradationPreference&), + (override)); + MOCK_METHOD(void, SetSink, (EncoderSink*, bool), (override)); + MOCK_METHOD(void, SetStartBitrate, (int), (override)); + MOCK_METHOD(void, SendKeyFrame, (), (override)); + MOCK_METHOD(void, + OnLossNotification, + (const VideoEncoder::LossNotification&), + (override)); + MOCK_METHOD(void, + OnBitrateUpdated, + (DataRate, DataRate, DataRate, uint8_t, int64_t, double), + (override)); + MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override)); + MOCK_METHOD(void, + SetFecControllerOverride, + (FecControllerOverride*), + (override)); + MOCK_METHOD(void, Stop, (), (override)); + + MOCK_METHOD(void, + MockedConfigureEncoder, + (const VideoEncoderConfig&, size_t)); + // gtest generates implicit copy which is not allowed on VideoEncoderConfig, + // so we can't mock ConfigureEncoder directly. + void ConfigureEncoder(VideoEncoderConfig config, + size_t max_data_payload_length) { + MockedConfigureEncoder(config, max_data_payload_length); + } +}; + +} // namespace webrtc + +#endif // VIDEO_TEST_MOCK_VIDEO_STREAM_ENCODER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc index c16c3b383..81dcf055b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.cc @@ -23,6 +23,7 @@ #include "rtc_base/memory_usage.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/time_utils.h" #include "system_wrappers/include/cpu_info.h" #include "test/call_test.h" #include "test/testsupport/file_utils.h" @@ -136,10 +137,12 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, } for (uint32_t i = 0; i < num_cores; ++i) { - rtc::PlatformThread* thread = - new rtc::PlatformThread(&FrameComparisonThread, this, "Analyzer"); - thread->Start(); - comparison_thread_pool_.push_back(thread); + comparison_thread_pool_.push_back(rtc::PlatformThread::SpawnJoinable( + [this] { + while (CompareFrames()) { + } + }, + "Analyzer")); } if (!rtp_dump_name.empty()) { @@ -154,10 +157,8 @@ VideoAnalyzer::~VideoAnalyzer() { MutexLock lock(&comparison_lock_); quit_ = true; } - for (rtc::PlatformThread* thread : comparison_thread_pool_) { - thread->Stop(); - delete thread; - } + // Joins all threads. + comparison_thread_pool_.clear(); } void VideoAnalyzer::SetReceiver(PacketReceiver* receiver) { @@ -532,12 +533,6 @@ void VideoAnalyzer::PollStats() { memory_usage_.AddSample(rtc::GetProcessResidentSizeBytes()); } -void VideoAnalyzer::FrameComparisonThread(void* obj) { - VideoAnalyzer* analyzer = static_cast(obj); - while (analyzer->CompareFrames()) { - } -} - bool VideoAnalyzer::CompareFrames() { if (AllFramesRecorded()) return false; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h index 18bacc16f..68861d1b5 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_analyzer.h @@ -302,7 +302,7 @@ class VideoAnalyzer : public PacketReceiver, const double avg_ssim_threshold_; bool is_quick_test_enabled_; - std::vector comparison_thread_pool_; + std::vector comparison_thread_pool_; rtc::Event comparison_available_event_; std::deque comparisons_ RTC_GUARDED_BY(comparison_lock_); bool quit_ RTC_GUARDED_BY(comparison_lock_); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc index a58aa1f33..b87957f1c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc @@ -626,7 +626,7 @@ void VideoQualityTest::FillScalabilitySettings( encoder_config.spatial_layers = params->ss[video_idx].spatial_layers; encoder_config.simulcast_layers = std::vector(num_streams); encoder_config.video_stream_factory = - new rtc::RefCountedObject( + rtc::make_ref_counted( params->video[video_idx].codec, kDefaultMaxQp, params->screenshare[video_idx].enabled, true); params->ss[video_idx].streams = @@ -800,7 +800,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, params_.ss[video_idx].streams; } video_encoder_configs_[video_idx].video_stream_factory = - new rtc::RefCountedObject( + rtc::make_ref_counted( params_.video[video_idx].codec, params_.ss[video_idx].streams[0].max_qp, params_.screenshare[video_idx].enabled, true); @@ -829,7 +829,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp8_settings.numberOfTemporalLayers = static_cast( params_.video[video_idx].num_temporal_layers); video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); @@ -846,7 +846,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp9_settings.flexibleMode = true; } video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); } } else if (params_.ss[video_idx].num_spatial_layers > 1) { @@ -860,8 +860,8 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; vp9_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + rtc::make_ref_counted( + vp9_settings); RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(), 1); // Min bitrate will be enforced by spatial layer config instead. @@ -871,7 +871,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.automaticResizeOn = true; video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); @@ -879,7 +879,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp9_settings.automaticResizeOn = params_.ss[video_idx].num_spatial_layers == 1; video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); } else if (params_.video[video_idx].codec == "H264") { // Quality scaling is always on for H.264. @@ -898,18 +898,18 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); } else if (params_.video[video_idx].codec == "H264") { VideoCodecH264 h264_settings = VideoEncoder::GetDefaultH264Settings(); video_encoder_configs_[video_idx].encoder_specific_settings = - new rtc::RefCountedObject< + rtc::make_ref_counted< VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings); } } @@ -986,7 +986,7 @@ void VideoQualityTest::SetupThumbnails(Transport* send_transport, thumbnail_encoder_config.max_bitrate_bps = 50000; std::vector streams{params_.ss[0].streams[0]}; thumbnail_encoder_config.video_stream_factory = - new rtc::RefCountedObject(streams); + rtc::make_ref_counted(streams); thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers; thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy()); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc index 418901131..8ef681f72 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc @@ -24,6 +24,7 @@ #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/video/encoded_image.h" +#include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder_factory.h" @@ -31,7 +32,6 @@ #include "call/rtp_stream_receiver_controller_interface.h" #include "call/rtx_receive_stream.h" #include "common_video/include/incoming_video_stream.h" -#include "media/base/h264_profile_level_id.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" @@ -39,7 +39,6 @@ #include "modules/video_coding/timing.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/keyframe_interval_settings.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -60,7 +59,6 @@ constexpr int VideoReceiveStream::kMaxWaitForKeyFrameMs; namespace { -using video_coding::EncodedFrame; using ReturnReason = video_coding::FrameBuffer::ReturnReason; constexpr int kMinBaseMinimumDelayMs = 0; @@ -69,7 +67,7 @@ constexpr int kMaxBaseMinimumDelayMs = 10000; constexpr int kMaxWaitForFrameMs = 3000; // Concrete instance of RecordableEncodedFrame wrapping needed content -// from video_coding::EncodedFrame. +// from EncodedFrame. class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { public: explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame) @@ -172,6 +170,11 @@ class NullVideoDecoder : public webrtc::VideoDecoder { int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; } + DecoderInfo GetDecoderInfo() const override { + DecoderInfo info; + info.implementation_name = "NullVideoDecoder"; + return info; + } const char* ImplementationName() const override { return "NullVideoDecoder"; } }; @@ -221,12 +224,8 @@ VideoReceiveStream::VideoReceiveStream( config_.frame_decryptor, config_.frame_transformer), rtp_stream_sync_(this), - max_wait_for_keyframe_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() - .MaxWaitForKeyframeMs() - .value_or(kMaxWaitForKeyFrameMs)), - max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() - .MaxWaitForFrameMs() - .value_or(kMaxWaitForFrameMs)), + max_wait_for_keyframe_ms_(kMaxWaitForKeyFrameMs), + max_wait_for_frame_ms_(kMaxWaitForFrameMs), decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { @@ -338,8 +337,7 @@ void VideoReceiveStream::Start() { for (const Decoder& decoder : config_.decoders) { std::unique_ptr video_decoder = - config_.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format, - config_.stream_id); + config_.decoder_factory->CreateVideoDecoder(decoder.video_format); // If we still have no valid decoder, we have to create a "Null" decoder // that ignores all calls. The reason we can get into this state is that the // old decoder factory interface doesn't have a way to query supported @@ -513,6 +511,10 @@ void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) { int64_t video_playout_ntp_ms; int64_t sync_offset_ms; double estimated_freq_khz; + + // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for + // |video_frame.packet_infos|. But VideoFrame is const qualified here. + // TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the // function itself, another in GetChannel() and a third in // GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function @@ -554,8 +556,7 @@ void VideoReceiveStream::RequestKeyFrame(int64_t timestamp_ms) { last_keyframe_request_ms_ = timestamp_ms; } -void VideoReceiveStream::OnCompleteFrame( - std::unique_ptr frame) { +void VideoReceiveStream::OnCompleteFrame(std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&network_sequence_checker_); // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. int64_t time_now_ms = clock_->TimeInMilliseconds(); @@ -639,17 +640,15 @@ void VideoReceiveStream::StartNextDecode() { [this](std::unique_ptr frame, ReturnReason res) { RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout); RTC_DCHECK_EQ(frame != nullptr, res == ReturnReason::kFrameFound); - decode_queue_.PostTask([this, frame = std::move(frame)]() mutable { - RTC_DCHECK_RUN_ON(&decode_queue_); - if (decoder_stopped_) - return; - if (frame) { - HandleEncodedFrame(std::move(frame)); - } else { - HandleFrameBufferTimeout(); - } - StartNextDecode(); - }); + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + if (frame) { + HandleEncodedFrame(std::move(frame)); + } else { + HandleFrameBufferTimeout(); + } + StartNextDecode(); }); } @@ -672,7 +671,7 @@ void VideoReceiveStream::HandleEncodedFrame( decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { keyframe_required_ = false; frame_decoded_ = true; - rtp_video_stream_receiver_.FrameDecoded(frame->id.picture_id); + rtp_video_stream_receiver_.FrameDecoded(frame->Id()); if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) RequestKeyFrame(now_ms); @@ -685,7 +684,6 @@ void VideoReceiveStream::HandleEncodedFrame( } if (encoded_frame_buffer_function_) { - frame->Retain(); encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame)); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h index 5fb9cf72d..f3b51892f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.h @@ -14,6 +14,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/video/recordable_encoded_frame.h" #include "call/rtp_packet_sink_interface.h" @@ -24,7 +25,7 @@ #include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" #include "system_wrappers/include/clock.h" #include "video/receive_statistics_proxy.h" @@ -44,10 +45,10 @@ class VCMTiming; namespace internal { -class VideoReceiveStream : public webrtc::VideoReceiveStream, +class VideoReceiveStream : public webrtc::DEPRECATED_VideoReceiveStream, public rtc::VideoSinkInterface, public NackSender, - public video_coding::OnCompleteFrameCallback, + public OnCompleteFrameCallback, public Syncable, public CallStatsObserver { public: @@ -110,9 +111,8 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, void SendNack(const std::vector& sequence_numbers, bool buffering_allowed) override; - // Implements video_coding::OnCompleteFrameCallback. - void OnCompleteFrame( - std::unique_ptr frame) override; + // Implements OnCompleteFrameCallback. + void OnCompleteFrame(std::unique_ptr frame) override; // Implements CallStatsObserver::OnRttUpdate void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; @@ -137,7 +137,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, private: int64_t GetWaitMs() const; void StartNextDecode() RTC_RUN_ON(decode_queue_); - void HandleEncodedFrame(std::unique_ptr frame) + void HandleEncodedFrame(std::unique_ptr frame) RTC_RUN_ON(decode_queue_); void HandleFrameBufferTimeout() RTC_RUN_ON(decode_queue_); void UpdatePlayoutDelays() const @@ -150,9 +150,9 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream, void UpdateHistograms(); - SequenceChecker worker_sequence_checker_; - SequenceChecker module_process_sequence_checker_; - SequenceChecker network_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker module_process_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker network_sequence_checker_; TaskQueueFactory* const task_queue_factory_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc index 8cc14e57c..eecdc8a9e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc @@ -24,6 +24,7 @@ #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/video/encoded_image.h" +#include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder_factory.h" @@ -31,17 +32,16 @@ #include "call/rtp_stream_receiver_controller_interface.h" #include "call/rtx_receive_stream.h" #include "common_video/include/incoming_video_stream.h" -#include "media/base/h264_profile_level_id.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/timing.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/keyframe_interval_settings.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/thread_registry.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -58,7 +58,6 @@ constexpr int VideoReceiveStream2::kMaxWaitForKeyFrameMs; namespace { -using video_coding::EncodedFrame; using ReturnReason = video_coding::FrameBuffer::ReturnReason; constexpr int kMinBaseMinimumDelayMs = 0; @@ -66,17 +65,20 @@ constexpr int kMaxBaseMinimumDelayMs = 10000; constexpr int kMaxWaitForFrameMs = 3000; +constexpr int kDefaultMaximumPreStreamDecoders = 100; + // Concrete instance of RecordableEncodedFrame wrapping needed content -// from video_coding::EncodedFrame. +// from EncodedFrame. class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { public: - explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame) + explicit WebRtcRecordableEncodedFrame( + const EncodedFrame& frame, + RecordableEncodedFrame::EncodedResolution resolution) : buffer_(frame.GetEncodedData()), render_time_ms_(frame.RenderTime()), codec_(frame.CodecSpecific()->codecType), is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey), - resolution_{frame.EncodedImage()._encodedWidth, - frame.EncodedImage()._encodedHeight} { + resolution_(resolution) { if (frame.ColorSpace()) { color_space_ = *frame.ColorSpace(); } @@ -129,6 +131,11 @@ VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) { associated_codec.codecType = kVideoCodecMultiplex; return associated_codec; } +#ifndef DISABLE_H265 + else if (codec.codecType == kVideoCodecH265) { + *(codec.H265()) = VideoEncoder::GetDefaultH265Settings(); + } +#endif FieldTrialOptional width("w"); FieldTrialOptional height("h"); @@ -179,6 +186,12 @@ class NullVideoDecoder : public webrtc::VideoDecoder { const char* ImplementationName() const override { return "NullVideoDecoder"; } }; +bool IsKeyFrameAndUnspecifiedResolution(const EncodedFrame& frame) { + return frame.FrameType() == VideoFrameType::kVideoFrameKey && + frame.EncodedImage()._encodedWidth == 0 && + frame.EncodedImage()._encodedHeight == 0; +} + // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. // Maximum time between frames before resetting the FrameBuffer to avoid RTP // timestamps wraparound to affect FrameBuffer. @@ -186,6 +199,23 @@ constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes. } // namespace +int DetermineMaxWaitForFrame(const VideoReceiveStream::Config& config, + bool is_keyframe) { + // A (arbitrary) conversion factor between the remotely signalled NACK buffer + // time (if not present defaults to 1000ms) and the maximum time we wait for a + // remote frame. Chosen to not change existing defaults when using not + // rtx-time. + const int conversion_factor = 3; + + if (config.rtp.nack.rtp_history_ms > 0 && + conversion_factor * config.rtp.nack.rtp_history_ms < kMaxWaitForFrameMs) { + return is_keyframe ? config.rtp.nack.rtp_history_ms + : conversion_factor * config.rtp.nack.rtp_history_ms; + } + return is_keyframe ? VideoReceiveStream2::kMaxWaitForKeyFrameMs + : kMaxWaitForFrameMs; +} + VideoReceiveStream2::VideoReceiveStream2( TaskQueueFactory* task_queue_factory, TaskQueueBase* current_queue, @@ -225,15 +255,12 @@ VideoReceiveStream2::VideoReceiveStream2( config_.frame_decryptor, config_.frame_transformer), rtp_stream_sync_(current_queue, this), - max_wait_for_keyframe_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() - .MaxWaitForKeyframeMs() - .value_or(kMaxWaitForKeyFrameMs)), - max_wait_for_frame_ms_(KeyframeIntervalSettings::ParseFromFieldTrials() - .MaxWaitForFrameMs() - .value_or(kMaxWaitForFrameMs)), + max_wait_for_keyframe_ms_(DetermineMaxWaitForFrame(config, true)), + max_wait_for_frame_ms_(DetermineMaxWaitForFrame(config, false)), low_latency_renderer_enabled_("enabled", true), low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", true), + maximum_pre_stream_decoders_("max", kDefaultMaximumPreStreamDecoders), decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { @@ -242,7 +269,6 @@ VideoReceiveStream2::VideoReceiveStream2( RTC_DCHECK(worker_thread_); RTC_DCHECK(config_.renderer); RTC_DCHECK(call_stats_); - module_process_sequence_checker_.Detach(); RTC_DCHECK(!config_.decoders.empty()); @@ -278,6 +304,11 @@ VideoReceiveStream2::VideoReceiveStream2( ParseFieldTrial({&low_latency_renderer_enabled_, &low_latency_renderer_include_predecode_buffer_}, field_trial::FindFullName("WebRTC-LowLatencyRenderer")); + ParseFieldTrial( + { + &maximum_pre_stream_decoders_, + }, + field_trial::FindFullName("WebRTC-PreStreamDecoders")); } VideoReceiveStream2::~VideoReceiveStream2() { @@ -325,41 +356,16 @@ void VideoReceiveStream2::Start() { renderer = this; } + int decoders_count = 0; for (const Decoder& decoder : config_.decoders) { - std::unique_ptr video_decoder = - config_.decoder_factory->LegacyCreateVideoDecoder(decoder.video_format, - config_.stream_id); - // If we still have no valid decoder, we have to create a "Null" decoder - // that ignores all calls. The reason we can get into this state is that the - // old decoder factory interface doesn't have a way to query supported - // codecs. - if (!video_decoder) { - video_decoder = std::make_unique(); + // Create up to maximum_pre_stream_decoders_ up front, wait the the other + // decoders until they are requested (i.e., we receive the corresponding + // payload). + if (decoders_count < maximum_pre_stream_decoders_) { + CreateAndRegisterExternalDecoder(decoder); + ++decoders_count; } - std::string decoded_output_file = - field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); - // Because '/' can't be used inside a field trial parameter, we use ';' - // instead. - // This is only relevant to WebRTC-DecoderDataDumpDirectory - // field trial. ';' is chosen arbitrary. Even though it's a legal character - // in some file systems, we can sacrifice ability to use it in the path to - // dumped video, since it's developers-only feature for debugging. - absl::c_replace(decoded_output_file, ';', '/'); - if (!decoded_output_file.empty()) { - char filename_buffer[256]; - rtc::SimpleStringBuilder ssb(filename_buffer); - ssb << decoded_output_file << "/webrtc_receive_stream_" - << this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() - << ".ivf"; - video_decoder = CreateFrameDumpingDecoderWrapper( - std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); - } - - video_decoders_.push_back(std::move(video_decoder)); - - video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), - decoder.payload_type); VideoCodec codec = CreateDecoderVideoCodec(decoder); const bool raw_payload = @@ -429,6 +435,41 @@ void VideoReceiveStream2::Stop() { transport_adapter_.Disable(); } +void VideoReceiveStream2::CreateAndRegisterExternalDecoder( + const Decoder& decoder) { + std::unique_ptr video_decoder = + config_.decoder_factory->CreateVideoDecoder(decoder.video_format); + // If we still have no valid decoder, we have to create a "Null" decoder + // that ignores all calls. The reason we can get into this state is that the + // old decoder factory interface doesn't have a way to query supported + // codecs. + if (!video_decoder) { + video_decoder = std::make_unique(); + } + + std::string decoded_output_file = + field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); + // Because '/' can't be used inside a field trial parameter, we use ';' + // instead. + // This is only relevant to WebRTC-DecoderDataDumpDirectory + // field trial. ';' is chosen arbitrary. Even though it's a legal character + // in some file systems, we can sacrifice ability to use it in the path to + // dumped video, since it's developers-only feature for debugging. + absl::c_replace(decoded_output_file, ';', '/'); + if (!decoded_output_file.empty()) { + char filename_buffer[256]; + rtc::SimpleStringBuilder ssb(filename_buffer); + ssb << decoded_output_file << "/webrtc_receive_stream_" + << this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() << ".ivf"; + video_decoder = CreateFrameDumpingDecoderWrapper( + std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); + } + + video_decoders_.push_back(std::move(video_decoder)); + video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), + decoder.payload_type); +} + VideoReceiveStream::Stats VideoReceiveStream2::GetStats() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); VideoReceiveStream2::Stats stats = stats_proxy_.GetStats(); @@ -471,15 +512,6 @@ void VideoReceiveStream2::UpdateHistograms() { stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr); } -void VideoReceiveStream2::AddSecondarySink(RtpPacketSinkInterface* sink) { - rtp_video_stream_receiver_.AddSecondarySink(sink); -} - -void VideoReceiveStream2::RemoveSecondarySink( - const RtpPacketSinkInterface* sink) { - rtp_video_stream_receiver_.RemoveSecondarySink(sink); -} - bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) { @@ -499,6 +531,9 @@ int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); + // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for + // |video_frame.packet_infos|. But VideoFrame is const qualified here. + worker_thread_->PostTask( ToQueuedTask(task_safety_, [frame_meta, this]() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); @@ -516,6 +551,22 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { source_tracker_.OnFrameDelivered(video_frame.packet_infos()); config_.renderer->OnFrame(video_frame); + webrtc::MutexLock lock(&pending_resolution_mutex_); + if (pending_resolution_.has_value()) { + if (!pending_resolution_->empty() && + (video_frame.width() != static_cast(pending_resolution_->width) || + video_frame.height() != + static_cast(pending_resolution_->height))) { + RTC_LOG(LS_WARNING) + << "Recordable encoded frame stream resolution was reported as " + << pending_resolution_->width << "x" << pending_resolution_->height + << " but the stream is now " << video_frame.width() + << video_frame.height(); + } + pending_resolution_ = RecordableEncodedFrame::EncodedResolution{ + static_cast(video_frame.width()), + static_cast(video_frame.height())}; + } } void VideoReceiveStream2::SetFrameDecryptor( @@ -548,8 +599,7 @@ void VideoReceiveStream2::RequestKeyFrame(int64_t timestamp_ms) { }); } -void VideoReceiveStream2::OnCompleteFrame( - std::unique_ptr frame) { +void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. @@ -633,22 +683,20 @@ void VideoReceiveStream2::StartNextDecode() { [this](std::unique_ptr frame, ReturnReason res) { RTC_DCHECK_EQ(frame == nullptr, res == ReturnReason::kTimeout); RTC_DCHECK_EQ(frame != nullptr, res == ReturnReason::kFrameFound); - decode_queue_.PostTask([this, frame = std::move(frame)]() mutable { - RTC_DCHECK_RUN_ON(&decode_queue_); - if (decoder_stopped_) - return; - if (frame) { - HandleEncodedFrame(std::move(frame)); - } else { - int64_t now_ms = clock_->TimeInMilliseconds(); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - HandleFrameBufferTimeout(now_ms, wait_ms); - })); - } - StartNextDecode(); - }); + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + if (frame) { + HandleEncodedFrame(std::move(frame)); + } else { + int64_t now_ms = clock_->TimeInMilliseconds(); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + HandleFrameBufferTimeout(now_ms, wait_ms); + })); + } + StartNextDecode(); }); } @@ -672,13 +720,26 @@ void VideoReceiveStream2::HandleEncodedFrame( const bool keyframe_request_is_due = now_ms >= (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_); - int decode_result = video_receiver_.Decode(frame.get()); + if (!video_receiver_.IsExternalDecoderRegistered(frame->PayloadType())) { + // Look for the decoder with this payload type. + for (const Decoder& decoder : config_.decoders) { + if (decoder.payload_type == frame->PayloadType()) { + CreateAndRegisterExternalDecoder(decoder); + break; + } + } + } + + int64_t frame_id = frame->Id(); + bool received_frame_is_keyframe = + frame->FrameType() == VideoFrameType::kVideoFrameKey; + int decode_result = DecodeAndMaybeDispatchEncodedFrame(std::move(frame)); if (decode_result == WEBRTC_VIDEO_CODEC_OK || decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { keyframe_required_ = false; frame_decoded_ = true; - decoded_frame_picture_id = frame->id.picture_id; + decoded_frame_picture_id = frame_id; if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) force_request_key_frame = true; @@ -690,9 +751,6 @@ void VideoReceiveStream2::HandleEncodedFrame( force_request_key_frame = true; } - bool received_frame_is_keyframe = - frame->FrameType() == VideoFrameType::kVideoFrameKey; - worker_thread_->PostTask(ToQueuedTask( task_safety_, [this, now_ms, received_frame_is_keyframe, force_request_key_frame, @@ -706,11 +764,66 @@ void VideoReceiveStream2::HandleEncodedFrame( force_request_key_frame, keyframe_request_is_due); })); +} - if (encoded_frame_buffer_function_) { - frame->Retain(); - encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame)); +int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( + std::unique_ptr frame) { + // Running on decode_queue_. + + // If |buffered_encoded_frames_| grows out of control (=60 queued frames), + // maybe due to a stuck decoder, we just halt the process here and log the + // error. + const bool encoded_frame_output_enabled = + encoded_frame_buffer_function_ != nullptr && + buffered_encoded_frames_.size() < kBufferedEncodedFramesMaxSize; + EncodedFrame* frame_ptr = frame.get(); + if (encoded_frame_output_enabled) { + // If we receive a key frame with unset resolution, hold on dispatching the + // frame and following ones until we know a resolution of the stream. + // NOTE: The code below has a race where it can report the wrong + // resolution for keyframes after an initial keyframe of other resolution. + // However, the only known consumer of this information is the W3C + // MediaRecorder and it will only use the resolution in the first encoded + // keyframe from WebRTC, so misreporting is fine. + buffered_encoded_frames_.push_back(std::move(frame)); + if (buffered_encoded_frames_.size() == kBufferedEncodedFramesMaxSize) + RTC_LOG(LS_ERROR) << "About to halt recordable encoded frame output due " + "to too many buffered frames."; + + webrtc::MutexLock lock(&pending_resolution_mutex_); + if (IsKeyFrameAndUnspecifiedResolution(*frame_ptr) && + !pending_resolution_.has_value()) + pending_resolution_.emplace(); } + + int decode_result = video_receiver_.Decode(frame_ptr); + if (encoded_frame_output_enabled) { + absl::optional + pending_resolution; + { + // Fish out |pending_resolution_| to avoid taking the mutex on every lap + // or dispatching under the mutex in the flush loop. + webrtc::MutexLock lock(&pending_resolution_mutex_); + if (pending_resolution_.has_value()) + pending_resolution = *pending_resolution_; + } + if (!pending_resolution.has_value() || !pending_resolution->empty()) { + // Flush the buffered frames. + for (const auto& frame : buffered_encoded_frames_) { + RecordableEncodedFrame::EncodedResolution resolution{ + frame->EncodedImage()._encodedWidth, + frame->EncodedImage()._encodedHeight}; + if (IsKeyFrameAndUnspecifiedResolution(*frame)) { + RTC_DCHECK(!pending_resolution->empty()); + resolution = *pending_resolution; + } + encoded_frame_buffer_function_( + WebRtcRecordableEncodedFrame(*frame, resolution)); + } + buffered_encoded_frames_.clear(); + } + } + return decode_result; } void VideoReceiveStream2::HandleKeyFrameGeneration( diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h index e8e3edc3d..c22ce1c02 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h @@ -14,6 +14,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/timestamp.h" #include "api/video/recordable_encoded_frame.h" @@ -24,9 +25,10 @@ #include "modules/rtp_rtcp/source/source_tracker.h" #include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/video_receiver2.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" +#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" #include "video/receive_statistics_proxy2.h" #include "video/rtp_streams_synchronizer2.h" @@ -76,13 +78,16 @@ struct VideoFrameMetaData { class VideoReceiveStream2 : public webrtc::VideoReceiveStream, public rtc::VideoSinkInterface, public NackSender, - public video_coding::OnCompleteFrameCallback, + public OnCompleteFrameCallback, public Syncable, public CallStatsObserver { public: // The default number of milliseconds to pass before re-requesting a key frame // to be sent. static constexpr int kMaxWaitForKeyFrameMs = 200; + // The maximum number of buffered encoded frames when encoded output is + // configured. + static constexpr size_t kBufferedEncodedFramesMaxSize = 60; VideoReceiveStream2(TaskQueueFactory* task_queue_factory, TaskQueueBase* current_queue, @@ -109,9 +114,6 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, webrtc::VideoReceiveStream::Stats GetStats() const override; - void AddSecondarySink(RtpPacketSinkInterface* sink) override; - void RemoveSecondarySink(const RtpPacketSinkInterface* sink) override; - // SetBaseMinimumPlayoutDelayMs and GetBaseMinimumPlayoutDelayMs are called // from webrtc/api level and requested by user code. For e.g. blink/js layer // in Chromium. @@ -132,9 +134,8 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, void SendNack(const std::vector& sequence_numbers, bool buffering_allowed) override; - // Implements video_coding::OnCompleteFrameCallback. - void OnCompleteFrame( - std::unique_ptr frame) override; + // Implements OnCompleteFrameCallback. + void OnCompleteFrame(std::unique_ptr frame) override; // Implements CallStatsObserver::OnRttUpdate void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; @@ -157,9 +158,10 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, void GenerateKeyFrame() override; private: + void CreateAndRegisterExternalDecoder(const Decoder& decoder); int64_t GetMaxWaitMs() const RTC_RUN_ON(decode_queue_); void StartNextDecode() RTC_RUN_ON(decode_queue_); - void HandleEncodedFrame(std::unique_ptr frame) + void HandleEncodedFrame(std::unique_ptr frame) RTC_RUN_ON(decode_queue_); void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms) RTC_RUN_ON(worker_sequence_checker_); @@ -174,11 +176,13 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, RTC_RUN_ON(worker_sequence_checker_); bool IsReceivingKeyFrame(int64_t timestamp_ms) const RTC_RUN_ON(worker_sequence_checker_); + int DecodeAndMaybeDispatchEncodedFrame(std::unique_ptr frame) + RTC_RUN_ON(decode_queue_); void UpdateHistograms(); - SequenceChecker worker_sequence_checker_; - SequenceChecker module_process_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker module_process_sequence_checker_; TaskQueueFactory* const task_queue_factory_; @@ -256,6 +260,16 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, // Set to true while we're requesting keyframes but not yet received one. bool keyframe_generation_requested_ RTC_GUARDED_BY(worker_sequence_checker_) = false; + // Lock to avoid unnecessary per-frame idle wakeups in the code. + webrtc::Mutex pending_resolution_mutex_; + // Signal from decode queue to OnFrame callback to fill pending_resolution_. + // absl::nullopt - no resolution needed. 0x0 - next OnFrame to fill with + // received resolution. Not 0x0 - OnFrame has filled a resolution. + absl::optional pending_resolution_ + RTC_GUARDED_BY(pending_resolution_mutex_); + // Buffered encoded frames held while waiting for decoded resolution. + std::vector> buffered_encoded_frames_ + RTC_GUARDED_BY(decode_queue_); // Set by the field trial WebRTC-LowLatencyRenderer. The parameter |enabled| // determines if the low-latency renderer algorithm should be used for the @@ -267,6 +281,11 @@ class VideoReceiveStream2 : public webrtc::VideoReceiveStream, // queue. FieldTrialParameter low_latency_renderer_include_predecode_buffer_; + // Set by the field trial WebRTC-PreStreamDecoders. The parameter |max| + // determines the maximum number of decoders that are created up front before + // any video frame has been received. + FieldTrialParameter maximum_pre_stream_decoders_; + // Defined last so they are destroyed before all other members. rtc::TaskQueue decode_queue_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc index d6e1b6bbf..295e56bdf 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.cc @@ -12,7 +12,6 @@ #include #include "api/array_view.h" -#include "api/video/video_stream_encoder_create.h" #include "api/video/video_stream_encoder_settings.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extension_size.h" @@ -23,7 +22,9 @@ #include "rtc_base/task_utils/to_queued_task.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" +#include "video/adaptation/overuse_frame_detector.h" #include "video/video_send_stream_impl.h" +#include "video/video_stream_encoder.h" namespace webrtc { @@ -60,6 +61,22 @@ size_t CalculateMaxHeaderSize(const RtpConfig& config) { return header_size; } +VideoStreamEncoder::BitrateAllocationCallbackType +GetBitrateAllocationCallbackType(const VideoSendStream::Config& config) { + if (webrtc::RtpExtension::FindHeaderExtensionByUri( + config.rtp.extensions, + webrtc::RtpExtension::kVideoLayersAllocationUri)) { + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoLayersAllocation; + } + if (field_trial::IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoBitrateAllocation; + } + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing; +} + } // namespace namespace internal { @@ -86,9 +103,11 @@ VideoSendStream::VideoSendStream( RTC_DCHECK(config_.encoder_settings.encoder_factory); RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory); - video_stream_encoder_ = - CreateVideoStreamEncoder(clock, task_queue_factory, num_cpu_cores, - &stats_proxy_, config_.encoder_settings); + video_stream_encoder_ = std::make_unique( + clock, num_cpu_cores, &stats_proxy_, config_.encoder_settings, + std::make_unique(&stats_proxy_), task_queue_factory, + GetBitrateAllocationCallbackType(config_)); + // TODO(srte): Initialization should not be done posted on a task queue. // Note that the posted task must not outlive this scope since the closure // references local variables. @@ -150,7 +169,7 @@ void VideoSendStream::UpdateActiveSimulcastLayers( void VideoSendStream::Start() { RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_LOG(LS_INFO) << "VideoSendStream::Start"; + RTC_DLOG(LS_INFO) << "VideoSendStream::Start"; VideoSendStreamImpl* send_stream = send_stream_.get(); worker_queue_->PostTask([this, send_stream] { send_stream->Start(); @@ -165,7 +184,7 @@ void VideoSendStream::Start() { void VideoSendStream::Stop() { RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_LOG(LS_INFO) << "VideoSendStream::Stop"; + RTC_DLOG(LS_INFO) << "VideoSendStream::Stop"; VideoSendStreamImpl* send_stream = send_stream_.get(); worker_queue_->PostTask([send_stream] { send_stream->Stop(); }); } @@ -190,10 +209,8 @@ void VideoSendStream::SetSource( } void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { - // TODO(perkj): Some test cases in VideoSendStreamTest call - // ReconfigureVideoEncoder from the network thread. - // RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(content_type_ == config.content_type); + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_EQ(content_type_, config.content_type); video_stream_encoder_->ConfigureEncoder( std::move(config), config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp)); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h index e10f4ad59..e36f279ca 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h @@ -16,13 +16,13 @@ #include #include "api/fec_controller.h" +#include "api/sequence_checker.h" #include "api/video/video_stream_encoder_interface.h" #include "call/bitrate_allocator.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" #include "rtc_base/event.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" #include "video/send_delay_stats.h" #include "video/send_statistics_proxy.h" @@ -97,7 +97,7 @@ class VideoSendStream : public webrtc::VideoSendStream { absl::optional GetPacingFactorOverride() const; - rtc::ThreadChecker thread_checker_; + SequenceChecker thread_checker_; rtc::TaskQueue* const worker_queue_; rtc::Event thread_sync_event_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc index ee4301862..ebd444500 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.cc @@ -20,6 +20,7 @@ #include "api/crypto/crypto_options.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/video_codecs/video_codec.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/video_send_stream.h" @@ -32,8 +33,6 @@ #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/synchronization/sequence_checker.h" -#include "rtc_base/thread_checker.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" @@ -147,7 +146,6 @@ RtpSenderObservers CreateObservers(RtcpRttStats* call_stats, observers.rtcp_rtt_stats = call_stats; observers.intra_frame_callback = encoder_feedback; observers.rtcp_loss_notification_observer = encoder_feedback; - observers.rtcp_stats = stats_proxy; observers.report_block_data_observer = stats_proxy; observers.rtp_stats = stats_proxy; observers.bitrate_observer = stats_proxy; @@ -403,7 +401,7 @@ void VideoSendStreamImpl::StartupVideoSendStream() { void VideoSendStreamImpl::Stop() { RTC_DCHECK_RUN_ON(worker_queue_); - RTC_LOG(LS_INFO) << "VideoSendStream::Stop"; + RTC_LOG(LS_INFO) << "VideoSendStreamImpl::Stop"; if (!rtp_video_sender_->IsActive()) return; TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); @@ -481,15 +479,8 @@ void VideoSendStreamImpl::OnBitrateAllocationUpdated( void VideoSendStreamImpl::OnVideoLayersAllocationUpdated( VideoLayersAllocation allocation) { - if (!worker_queue_->IsCurrent()) { - auto ptr = weak_ptr_; - worker_queue_->PostTask([allocation = std::move(allocation), ptr] { - if (!ptr.get()) - return; - ptr->OnVideoLayersAllocationUpdated(allocation); - }); - return; - } + // OnVideoLayersAllocationUpdated is handled on the encoder task queue in + // order to not race with OnEncodedImage callbacks. rtp_video_sender_->OnVideoLayersAllocationUpdated(allocation); } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc index 52e4ddbc4..78265cc7d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc @@ -12,6 +12,7 @@ #include #include "absl/algorithm/container.h" +#include "api/sequence_checker.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/test/simulated_network.h" @@ -39,7 +40,6 @@ #include "rtc_base/platform_thread.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" @@ -1474,7 +1474,9 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { if (!rtp_packet.Parse(packet, length)) return DROP_PACKET; RTC_DCHECK(stream_); - VideoSendStream::Stats stats = stream_->GetStats(); + VideoSendStream::Stats stats; + SendTask(RTC_FROM_HERE, task_queue_, + [&]() { stats = stream_->GetStats(); }); if (!stats.substreams.empty()) { EXPECT_EQ(1u, stats.substreams.size()); int total_bitrate_bps = @@ -1982,7 +1984,6 @@ TEST_F(VideoSendStreamTest, public: EncoderObserver() : FakeEncoder(Clock::GetRealTimeClock()), - number_of_initializations_(0), last_initialized_frame_width_(0), last_initialized_frame_height_(0) {} @@ -2009,7 +2010,6 @@ TEST_F(VideoSendStreamTest, MutexLock lock(&mutex_); last_initialized_frame_width_ = config->width; last_initialized_frame_height_ = config->height; - ++number_of_initializations_; init_encode_called_.Set(); return FakeEncoder::InitEncode(config, settings); } @@ -2023,7 +2023,6 @@ TEST_F(VideoSendStreamTest, Mutex mutex_; rtc::Event init_encode_called_; - size_t number_of_initializations_ RTC_GUARDED_BY(&mutex_); int last_initialized_frame_width_ RTC_GUARDED_BY(&mutex_); int last_initialized_frame_height_ RTC_GUARDED_BY(&mutex_); }; @@ -2275,13 +2274,6 @@ TEST_F(VideoSendStreamTest, VideoSendStreamUpdateActiveSimulcastLayers) { GetVideoSendStream()->ReconfigureVideoEncoder( GetVideoEncoderConfig()->Copy()); }); - // TODO(bugs.webrtc.org/8807): Currently we require a hard reconfiguration to - // update the VideoBitrateAllocator and BitrateAllocator of which layers are - // active. Once the change is made for a "soft" reconfiguration we can remove - // the expecation for an encoder init. We can also test that bitrate changes - // when just updating individual active layers, which should change the - // bitrate set to the video encoder. - EXPECT_TRUE(encoder.WaitForEncoderInit()); EXPECT_TRUE(encoder.WaitBitrateChanged(true)); // Turning off both simulcast layers should trigger a bitrate change of 0. @@ -2432,14 +2424,16 @@ class VideoCodecConfigObserver : public test::SendTest, public test::FakeEncoder { public: VideoCodecConfigObserver(VideoCodecType video_codec_type, - const char* codec_name) + const char* codec_name, + TaskQueueBase* task_queue) : SendTest(VideoSendStreamTest::kDefaultTimeoutMs), FakeEncoder(Clock::GetRealTimeClock()), video_codec_type_(video_codec_type), codec_name_(codec_name), num_initializations_(0), stream_(nullptr), - encoder_factory_(this) { + encoder_factory_(this), + task_queue_(task_queue) { InitCodecSpecifics(); } @@ -2487,7 +2481,9 @@ class VideoCodecConfigObserver : public test::SendTest, // Change encoder settings to actually trigger reconfiguration. encoder_settings_.frameDroppingOn = !encoder_settings_.frameDroppingOn; encoder_config_.encoder_specific_settings = GetEncoderSpecificSettings(); - stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); + SendTask(RTC_FROM_HERE, task_queue_, [&]() { + stream_->ReconfigureVideoEncoder(std::move(encoder_config_)); + }); ASSERT_TRUE( init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs)); EXPECT_EQ(2u, num_initializations_) @@ -2509,6 +2505,7 @@ class VideoCodecConfigObserver : public test::SendTest, VideoSendStream* stream_; test::VideoEncoderProxyFactory encoder_factory_; VideoEncoderConfig encoder_config_; + TaskQueueBase* task_queue_; }; template <> @@ -2541,8 +2538,8 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( template <> rtc::scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { - return new rtc::RefCountedObject< - VideoEncoderConfig::H264EncoderSpecificSettings>(encoder_settings_); + return rtc::make_ref_counted( + encoder_settings_); } template <> @@ -2575,8 +2572,8 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( template <> rtc::scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { - return new rtc::RefCountedObject< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(encoder_settings_); + return rtc::make_ref_counted( + encoder_settings_); } template <> @@ -2609,17 +2606,19 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( template <> rtc::scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { - return new rtc::RefCountedObject< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(encoder_settings_); + return rtc::make_ref_counted( + encoder_settings_); } TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) { - VideoCodecConfigObserver test(kVideoCodecVP8, "VP8"); + VideoCodecConfigObserver test(kVideoCodecVP8, "VP8", + task_queue()); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) { - VideoCodecConfigObserver test(kVideoCodecVP9, "VP9"); + VideoCodecConfigObserver test(kVideoCodecVP9, "VP9", + task_queue()); RunBaseTest(&test); } @@ -2631,7 +2630,8 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) { #define MAYBE_EncoderSetupPropagatesH264Config EncoderSetupPropagatesH264Config #endif TEST_F(VideoSendStreamTest, MAYBE_EncoderSetupPropagatesH264Config) { - VideoCodecConfigObserver test(kVideoCodecH264, "H264"); + VideoCodecConfigObserver test(kVideoCodecH264, "H264", + task_queue()); RunBaseTest(&test); } @@ -2736,7 +2736,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { send_config->encoder_settings.encoder_factory = &encoder_factory_; EXPECT_EQ(1u, encoder_config->number_of_streams); encoder_config->video_stream_factory = - new rtc::RefCountedObject(); + rtc::make_ref_counted(); EXPECT_EQ(1u, encoder_config->simulcast_layers.size()); encoder_config->simulcast_layers[0].num_temporal_layers = 2; encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; @@ -2914,7 +2914,9 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // Encoder rate is capped by EncoderConfig max_bitrate_bps. WaitForSetRates(kMaxBitrateKbps); encoder_config_.max_bitrate_bps = kLowerMaxBitrateKbps * 1000; - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + SendTask(RTC_FROM_HERE, task_queue_, [&]() { + send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + }); ASSERT_TRUE(create_rate_allocator_event_.Wait( VideoSendStreamTest::kDefaultTimeoutMs)); EXPECT_EQ(2, num_rate_allocator_creations_) @@ -2924,7 +2926,9 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { EXPECT_EQ(1, num_encoder_initializations_); encoder_config_.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000; - send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + SendTask(RTC_FROM_HERE, task_queue_, [&]() { + send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy()); + }); ASSERT_TRUE(create_rate_allocator_event_.Wait( VideoSendStreamTest::kDefaultTimeoutMs)); EXPECT_EQ(3, num_rate_allocator_creations_) @@ -2965,11 +2969,12 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { class ScreencastTargetBitrateTest : public test::SendTest, public test::FakeEncoder { public: - ScreencastTargetBitrateTest() + explicit ScreencastTargetBitrateTest(TaskQueueBase* task_queue) : SendTest(kDefaultTimeoutMs), test::FakeEncoder(Clock::GetRealTimeClock()), send_stream_(nullptr), - encoder_factory_(this) {} + encoder_factory_(this), + task_queue_(task_queue) {} private: int32_t Encode(const VideoFrame& input_image, @@ -3017,7 +3022,9 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { void PerformTest() override { EXPECT_TRUE(Wait()) << "Timed out while waiting for the encoder to send one frame."; - VideoSendStream::Stats stats = send_stream_->GetStats(); + VideoSendStream::Stats stats; + SendTask(RTC_FROM_HERE, task_queue_, + [&]() { stats = send_stream_->GetStats(); }); for (size_t i = 0; i < kNumStreams; ++i) { ASSERT_TRUE(stats.substreams.find(kVideoSendSsrcs[i]) != @@ -3039,7 +3046,8 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { VideoSendStream* send_stream_; test::VideoEncoderProxyFactory encoder_factory_; - } test; + TaskQueueBase* const task_queue_; + } test(task_queue()); RunBaseTest(&test); } @@ -3074,8 +3082,9 @@ class Vp9HeaderObserver : public test::SendTest { send_config->rtp.payload_name = "VP9"; send_config->rtp.payload_type = kVp9PayloadType; ModifyVideoConfigsHook(send_config, receive_configs, encoder_config); - encoder_config->encoder_specific_settings = new rtc::RefCountedObject< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_); + encoder_config->encoder_specific_settings = + rtc::make_ref_counted( + vp9_settings_); EXPECT_EQ(1u, encoder_config->number_of_streams); EXPECT_EQ(1u, encoder_config->simulcast_layers.size()); encoder_config->simulcast_layers[0].num_temporal_layers = @@ -3809,14 +3818,15 @@ class ContentSwitchTest : public test::SendTest { }; static const uint32_t kMinPacketsToSend = 50; - explicit ContentSwitchTest(T* stream_reset_fun) + explicit ContentSwitchTest(T* stream_reset_fun, TaskQueueBase* task_queue) : SendTest(test::CallTest::kDefaultTimeoutMs), call_(nullptr), state_(StreamState::kBeforeSwitch), send_stream_(nullptr), send_stream_config_(nullptr), packets_sent_(0), - stream_resetter_(stream_reset_fun) { + stream_resetter_(stream_reset_fun), + task_queue_(task_queue) { RTC_DCHECK(stream_resetter_); } @@ -3850,8 +3860,10 @@ class ContentSwitchTest : public test::SendTest { float pacing_factor = internal_send_peer.GetPacingFactorOverride().value_or(0.0f); float expected_pacing_factor = 1.1; // Strict pacing factor. - if (send_stream_->GetStats().content_type == - webrtc::VideoContentType::SCREENSHARE) { + VideoSendStream::Stats stats; + SendTask(RTC_FROM_HERE, task_queue_, + [&stats, stream = send_stream_]() { stats = stream->GetStats(); }); + if (stats.content_type == webrtc::VideoContentType::SCREENSHARE) { expected_pacing_factor = 1.0f; // Currently used pacing factor in ALR. } @@ -3919,6 +3931,7 @@ class ContentSwitchTest : public test::SendTest { VideoEncoderConfig encoder_config_; uint32_t packets_sent_ RTC_GUARDED_BY(mutex_); T* stream_resetter_; + TaskQueueBase* task_queue_; }; TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) { @@ -3938,7 +3951,7 @@ TEST_F(VideoSendStreamTest, SwitchesToScreenshareAndBack) { Start(); }); }; - ContentSwitchTest test(&reset_fun); + ContentSwitchTest test(&reset_fun, task_queue()); RunBaseTest(&test); } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc index 376eb85ea..4cd12d8a2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc @@ -29,7 +29,14 @@ std::string WantsToString(const rtc::VideoSinkWants& wants) { << " max_pixel_count=" << wants.max_pixel_count << " target_pixel_count=" << (wants.target_pixel_count.has_value() ? std::to_string(wants.target_pixel_count.value()) - : "null"); + : "null") + << " resolutions={"; + for (size_t i = 0; i < wants.resolutions.size(); ++i) { + if (i != 0) + ss << ","; + ss << wants.resolutions[i].width << "x" << wants.resolutions[i].height; + } + ss << "}"; return ss.Release(); } @@ -104,6 +111,12 @@ int VideoSourceSinkController::resolution_alignment() const { return resolution_alignment_; } +const std::vector& +VideoSourceSinkController::resolutions() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return resolutions_; +} + void VideoSourceSinkController::SetRestrictions( VideoSourceRestrictions restrictions) { RTC_DCHECK_RUN_ON(&sequence_checker_); @@ -133,6 +146,12 @@ void VideoSourceSinkController::SetResolutionAlignment( resolution_alignment_ = resolution_alignment; } +void VideoSourceSinkController::SetResolutions( + std::vector resolutions) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + resolutions_ = std::move(resolutions); +} + // RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_) rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() const { @@ -161,6 +180,7 @@ rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() frame_rate_upper_limit_.has_value() ? static_cast(frame_rate_upper_limit_.value()) : std::numeric_limits::max()); + wants.resolutions = resolutions_; return wants; } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h index ed8f99097..c61084f99 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h @@ -12,13 +12,15 @@ #define VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_ #include +#include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "call/adaptation/video_source_restrictions.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -45,6 +47,7 @@ class VideoSourceSinkController { absl::optional frame_rate_upper_limit() const; bool rotation_applied() const; int resolution_alignment() const; + const std::vector& resolutions() const; // Updates the settings stored internally. In order for these settings to be // applied to the sink, PushSourceSinkSettings() must subsequently be called. @@ -54,6 +57,7 @@ class VideoSourceSinkController { void SetFrameRateUpperLimit(absl::optional frame_rate_upper_limit); void SetRotationApplied(bool rotation_applied); void SetResolutionAlignment(int resolution_alignment); + void SetResolutions(std::vector resolutions); private: rtc::VideoSinkWants CurrentSettingsToSinkWants() const @@ -62,7 +66,7 @@ class VideoSourceSinkController { // Used to ensure that this class is called on threads/sequences that it and // downstream implementations were designed for. // In practice, this represent's libjingle's worker thread. - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; rtc::VideoSinkInterface* const sink_; rtc::VideoSourceInterface* source_ @@ -78,6 +82,8 @@ class VideoSourceSinkController { RTC_GUARDED_BY(&sequence_checker_); bool rotation_applied_ RTC_GUARDED_BY(&sequence_checker_) = false; int resolution_alignment_ RTC_GUARDED_BY(&sequence_checker_) = 1; + std::vector resolutions_ + RTC_GUARDED_BY(&sequence_checker_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc index f5b0f5f78..b6d754e8b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.cc @@ -50,8 +50,7 @@ VideoStreamDecoderImpl::~VideoStreamDecoderImpl() { shut_down_ = true; } -void VideoStreamDecoderImpl::OnFrame( - std::unique_ptr frame) { +void VideoStreamDecoderImpl::OnFrame(std::unique_ptr frame) { if (!bookkeeping_queue_.IsCurrent()) { bookkeeping_queue_.PostTask([this, frame = std::move(frame)]() mutable { OnFrame(std::move(frame)); @@ -63,11 +62,10 @@ void VideoStreamDecoderImpl::OnFrame( RTC_DCHECK_RUN_ON(&bookkeeping_queue_); - uint64_t continuous_pid = frame_buffer_.InsertFrame(std::move(frame)); - video_coding::VideoLayerFrameId continuous_id(continuous_pid, 0); - if (last_continuous_id_ < continuous_id) { - last_continuous_id_ = continuous_id; - callbacks_->OnContinuousUntil(last_continuous_id_); + int64_t continuous_frame_id = frame_buffer_.InsertFrame(std::move(frame)); + if (last_continuous_frame_id_ < continuous_frame_id) { + last_continuous_frame_id_ = continuous_frame_id; + callbacks_->OnContinuousUntil(last_continuous_frame_id_); } } @@ -124,8 +122,7 @@ VideoDecoder* VideoStreamDecoderImpl::GetDecoder(int payload_type) { return decoder_.get(); } -void VideoStreamDecoderImpl::SaveFrameInfo( - const video_coding::EncodedFrame& frame) { +void VideoStreamDecoderImpl::SaveFrameInfo(const EncodedFrame& frame) { FrameInfo* frame_info = &frame_info_[next_frame_info_index_]; frame_info->timestamp = frame.Timestamp(); frame_info->decode_start_time_ms = rtc::TimeMillis(); @@ -140,7 +137,7 @@ void VideoStreamDecoderImpl::StartNextDecode() { frame_buffer_.NextFrame( max_wait_time, keyframe_required_, &bookkeeping_queue_, - [this](std::unique_ptr frame, + [this](std::unique_ptr frame, video_coding::FrameBuffer::ReturnReason res) mutable { RTC_DCHECK_RUN_ON(&bookkeeping_queue_); OnNextFrameCallback(std::move(frame), res); @@ -148,7 +145,7 @@ void VideoStreamDecoderImpl::StartNextDecode() { } void VideoStreamDecoderImpl::OnNextFrameCallback( - std::unique_ptr frame, + std::unique_ptr frame, video_coding::FrameBuffer::ReturnReason result) { switch (result) { case video_coding::FrameBuffer::kFrameFound: { @@ -205,7 +202,7 @@ void VideoStreamDecoderImpl::OnNextFrameCallback( } VideoStreamDecoderImpl::DecodeResult VideoStreamDecoderImpl::DecodeFrame( - std::unique_ptr frame) { + std::unique_ptr frame) { RTC_DCHECK(frame); VideoDecoder* decoder = GetDecoder(frame->PayloadType()); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h index 69a819505..106f38340 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_decoder_impl.h @@ -16,13 +16,13 @@ #include #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/video/video_stream_decoder.h" #include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/timing.h" #include "rtc_base/platform_thread.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -37,7 +37,7 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { ~VideoStreamDecoderImpl() override; - void OnFrame(std::unique_ptr frame) override; + void OnFrame(std::unique_ptr frame) override; void SetMinPlayoutDelay(TimeDelta min_delay) override; void SetMaxPlayoutDelay(TimeDelta max_delay) override; @@ -69,11 +69,10 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { VideoContentType content_type; }; - void SaveFrameInfo(const video_coding::EncodedFrame& frame) - RTC_RUN_ON(bookkeeping_queue_); + void SaveFrameInfo(const EncodedFrame& frame) RTC_RUN_ON(bookkeeping_queue_); FrameInfo* GetFrameInfo(int64_t timestamp) RTC_RUN_ON(bookkeeping_queue_); void StartNextDecode() RTC_RUN_ON(bookkeeping_queue_); - void OnNextFrameCallback(std::unique_ptr frame, + void OnNextFrameCallback(std::unique_ptr frame, video_coding::FrameBuffer::ReturnReason res) RTC_RUN_ON(bookkeeping_queue_); void OnDecodedFrameCallback(VideoFrame& decodedImage, // NOLINT @@ -82,8 +81,7 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { VideoDecoder* GetDecoder(int payload_type) RTC_RUN_ON(decode_queue_); VideoStreamDecoderImpl::DecodeResult DecodeFrame( - std::unique_ptr frame) - RTC_RUN_ON(decode_queue_); + std::unique_ptr frame) RTC_RUN_ON(decode_queue_); VCMTiming timing_; DecodeCallbacks decode_callbacks_; @@ -96,8 +94,7 @@ class VideoStreamDecoderImpl : public VideoStreamDecoderInterface { int next_frame_info_index_ RTC_GUARDED_BY(bookkeeping_queue_); VideoStreamDecoderInterface::Callbacks* const callbacks_ RTC_PT_GUARDED_BY(bookkeeping_queue_); - video_coding::VideoLayerFrameId last_continuous_id_ - RTC_GUARDED_BY(bookkeeping_queue_); + int64_t last_continuous_frame_id_ RTC_GUARDED_BY(bookkeeping_queue_) = -1; bool keyframe_required_ RTC_GUARDED_BY(bookkeeping_queue_); absl::optional current_payload_type_ RTC_GUARDED_BY(decode_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc index b3bef4901..c7a470895 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc @@ -19,6 +19,7 @@ #include "absl/algorithm/container.h" #include "absl/types/optional.h" +#include "api/sequence_checker.h" #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "api/video/encoded_image.h" @@ -37,11 +38,12 @@ #include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" +#include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" @@ -224,12 +226,12 @@ VideoLayersAllocation CreateVideoLayersAllocation( return layers_allocation; } - if (encoder_config.numberOfSimulcastStreams > 0) { + if (encoder_config.numberOfSimulcastStreams > 1) { layers_allocation.resolution_and_frame_rate_is_valid = true; for (int si = 0; si < encoder_config.numberOfSimulcastStreams; ++si) { if (!target_bitrate.IsSpatialLayerUsed(si) || target_bitrate.GetSpatialLayerSum(si) == 0) { - break; + continue; } layers_allocation.active_spatial_layers.emplace_back(); VideoLayersAllocation::SpatialLayer& spatial_layer = @@ -264,19 +266,243 @@ VideoLayersAllocation CreateVideoLayersAllocation( } } // Encoder may drop frames internally if `maxFramerate` is set. - spatial_layer.frame_rate_fps = std::min( - static_cast(encoder_config.simulcastStream[si].maxFramerate), - static_cast( + spatial_layer.frame_rate_fps = std::min( + encoder_config.simulcastStream[si].maxFramerate, + rtc::saturated_cast( + (current_rate.framerate_fps * frame_rate_fraction) / + VideoEncoder::EncoderInfo::kMaxFramerateFraction)); + } + } else if (encoder_config.numberOfSimulcastStreams == 1) { + // TODO(bugs.webrtc.org/12000): Implement support for AV1 with + // scalability. + const bool higher_spatial_depend_on_lower = + encoder_config.codecType == kVideoCodecVP9 && + encoder_config.VP9().interLayerPred == InterLayerPredMode::kOn; + layers_allocation.resolution_and_frame_rate_is_valid = true; + + std::vector aggregated_spatial_bitrate( + webrtc::kMaxTemporalStreams, DataRate::Zero()); + for (int si = 0; si < webrtc::kMaxSpatialLayers; ++si) { + layers_allocation.resolution_and_frame_rate_is_valid = true; + if (!target_bitrate.IsSpatialLayerUsed(si) || + target_bitrate.GetSpatialLayerSum(si) == 0) { + break; + } + layers_allocation.active_spatial_layers.emplace_back(); + VideoLayersAllocation::SpatialLayer& spatial_layer = + layers_allocation.active_spatial_layers.back(); + spatial_layer.width = encoder_config.spatialLayers[si].width; + spatial_layer.height = encoder_config.spatialLayers[si].height; + spatial_layer.rtp_stream_index = 0; + spatial_layer.spatial_id = si; + auto frame_rate_fraction = + VideoEncoder::EncoderInfo::kMaxFramerateFraction; + if (encoder_info.fps_allocation[si].size() == 1) { + // One TL is signalled to be used by the encoder. Do not distribute + // bitrate allocation across TLs (use sum at tl:0). + DataRate aggregated_temporal_bitrate = + DataRate::BitsPerSec(target_bitrate.GetSpatialLayerSum(si)); + aggregated_spatial_bitrate[0] += aggregated_temporal_bitrate; + if (higher_spatial_depend_on_lower) { + spatial_layer.target_bitrate_per_temporal_layer.push_back( + aggregated_spatial_bitrate[0]); + } else { + spatial_layer.target_bitrate_per_temporal_layer.push_back( + aggregated_temporal_bitrate); + } + frame_rate_fraction = encoder_info.fps_allocation[si][0]; + } else { // Temporal layers are supported. + DataRate aggregated_temporal_bitrate = DataRate::Zero(); + for (size_t ti = 0; + ti < encoder_config.spatialLayers[si].numberOfTemporalLayers; + ++ti) { + if (!target_bitrate.HasBitrate(si, ti)) { + break; + } + if (ti < encoder_info.fps_allocation[si].size()) { + // Use frame rate of the top used temporal layer. + frame_rate_fraction = encoder_info.fps_allocation[si][ti]; + } + aggregated_temporal_bitrate += + DataRate::BitsPerSec(target_bitrate.GetBitrate(si, ti)); + if (higher_spatial_depend_on_lower) { + spatial_layer.target_bitrate_per_temporal_layer.push_back( + aggregated_temporal_bitrate + aggregated_spatial_bitrate[ti]); + aggregated_spatial_bitrate[ti] += aggregated_temporal_bitrate; + } else { + spatial_layer.target_bitrate_per_temporal_layer.push_back( + aggregated_temporal_bitrate); + } + } + } + // Encoder may drop frames internally if `maxFramerate` is set. + spatial_layer.frame_rate_fps = std::min( + encoder_config.spatialLayers[si].maxFramerate, + rtc::saturated_cast( (current_rate.framerate_fps * frame_rate_fraction) / VideoEncoder::EncoderInfo::kMaxFramerateFraction)); } - } else { - // TODO(bugs.webrtc.org/12000): Implement support for kSVC and full SVC. } return layers_allocation; } +VideoEncoder::EncoderInfo GetEncoderInfoWithBitrateLimitUpdate( + const VideoEncoder::EncoderInfo& info, + const VideoEncoderConfig& encoder_config, + bool default_limits_allowed) { + if (!default_limits_allowed || !info.resolution_bitrate_limits.empty() || + encoder_config.simulcast_layers.size() <= 1) { + return info; + } + // Bitrate limits are not configured and more than one layer is used, use + // the default limits (bitrate limits are not used for simulcast). + VideoEncoder::EncoderInfo new_info = info; + new_info.resolution_bitrate_limits = + EncoderInfoSettings::GetDefaultSinglecastBitrateLimits( + encoder_config.codec_type); + return new_info; +} + +int NumActiveStreams(const std::vector& streams) { + int num_active = 0; + for (const auto& stream : streams) { + if (stream.active) + ++num_active; + } + return num_active; +} + +void ApplyVp9BitrateLimits(const VideoEncoder::EncoderInfo& encoder_info, + const VideoEncoderConfig& encoder_config, + VideoCodec* codec) { + if (codec->codecType != VideoCodecType::kVideoCodecVP9 || + encoder_config.simulcast_layers.size() <= 1 || + VideoStreamEncoderResourceManager::IsSimulcast(encoder_config)) { + // Resolution bitrate limits usage is restricted to singlecast. + return; + } + + // Get bitrate limits for active stream. + absl::optional pixels = + VideoStreamAdapter::GetSingleActiveLayerPixels(*codec); + if (!pixels.has_value()) { + return; + } + absl::optional bitrate_limits = + encoder_info.GetEncoderBitrateLimitsForResolution(*pixels); + if (!bitrate_limits.has_value()) { + return; + } + + // Index for the active stream. + absl::optional index; + for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) { + if (encoder_config.simulcast_layers[i].active) + index = i; + } + if (!index.has_value()) { + return; + } + + int min_bitrate_bps; + if (encoder_config.simulcast_layers[*index].min_bitrate_bps <= 0) { + min_bitrate_bps = bitrate_limits->min_bitrate_bps; + } else { + min_bitrate_bps = + std::max(bitrate_limits->min_bitrate_bps, + encoder_config.simulcast_layers[*index].min_bitrate_bps); + } + int max_bitrate_bps; + if (encoder_config.simulcast_layers[*index].max_bitrate_bps <= 0) { + max_bitrate_bps = bitrate_limits->max_bitrate_bps; + } else { + max_bitrate_bps = + std::min(bitrate_limits->max_bitrate_bps, + encoder_config.simulcast_layers[*index].max_bitrate_bps); + } + if (min_bitrate_bps >= max_bitrate_bps) { + RTC_LOG(LS_WARNING) << "Bitrate limits not used, min_bitrate_bps " + << min_bitrate_bps << " >= max_bitrate_bps " + << max_bitrate_bps; + return; + } + + for (int i = 0; i < codec->VP9()->numberOfSpatialLayers; ++i) { + if (codec->spatialLayers[i].active) { + codec->spatialLayers[i].minBitrate = min_bitrate_bps / 1000; + codec->spatialLayers[i].maxBitrate = max_bitrate_bps / 1000; + codec->spatialLayers[i].targetBitrate = + std::min(codec->spatialLayers[i].targetBitrate, + codec->spatialLayers[i].maxBitrate); + break; + } + } +} + +void ApplyEncoderBitrateLimitsIfSingleActiveStream( + const VideoEncoder::EncoderInfo& encoder_info, + const std::vector& encoder_config_layers, + std::vector* streams) { + // Apply limits if simulcast with one active stream (expect lowest). + bool single_active_stream = + streams->size() > 1 && NumActiveStreams(*streams) == 1 && + !streams->front().active && NumActiveStreams(encoder_config_layers) == 1; + if (!single_active_stream) { + return; + } + + // Index for the active stream. + size_t index = 0; + for (size_t i = 0; i < encoder_config_layers.size(); ++i) { + if (encoder_config_layers[i].active) + index = i; + } + if (streams->size() < (index + 1) || !(*streams)[index].active) { + return; + } + + // Get bitrate limits for active stream. + absl::optional encoder_bitrate_limits = + encoder_info.GetEncoderBitrateLimitsForResolution( + (*streams)[index].width * (*streams)[index].height); + if (!encoder_bitrate_limits) { + return; + } + + // If bitrate limits are set by RtpEncodingParameters, use intersection. + int min_bitrate_bps; + if (encoder_config_layers[index].min_bitrate_bps <= 0) { + min_bitrate_bps = encoder_bitrate_limits->min_bitrate_bps; + } else { + min_bitrate_bps = std::max(encoder_bitrate_limits->min_bitrate_bps, + (*streams)[index].min_bitrate_bps); + } + int max_bitrate_bps; + if (encoder_config_layers[index].max_bitrate_bps <= 0) { + max_bitrate_bps = encoder_bitrate_limits->max_bitrate_bps; + } else { + max_bitrate_bps = std::min(encoder_bitrate_limits->max_bitrate_bps, + (*streams)[index].max_bitrate_bps); + } + if (min_bitrate_bps >= max_bitrate_bps) { + RTC_LOG(LS_WARNING) << "Encoder bitrate limits" + << " (min=" << encoder_bitrate_limits->min_bitrate_bps + << ", max=" << encoder_bitrate_limits->max_bitrate_bps + << ") do not intersect with stream limits" + << " (min=" << (*streams)[index].min_bitrate_bps + << ", max=" << (*streams)[index].max_bitrate_bps + << "). Encoder bitrate limits not used."; + return; + } + + (*streams)[index].min_bitrate_bps = min_bitrate_bps; + (*streams)[index].max_bitrate_bps = max_bitrate_bps; + (*streams)[index].target_bitrate_bps = + std::min((*streams)[index].target_bitrate_bps, + encoder_bitrate_limits->max_bitrate_bps); +} + } // namespace VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings() @@ -354,7 +580,7 @@ class VideoStreamEncoder::DegradationPreferenceManager } } - SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; DegradationPreference degradation_preference_ RTC_GUARDED_BY(&sequence_checker_); bool is_screenshare_ RTC_GUARDED_BY(&sequence_checker_); @@ -369,12 +595,13 @@ VideoStreamEncoder::VideoStreamEncoder( VideoStreamEncoderObserver* encoder_stats_observer, const VideoStreamEncoderSettings& settings, std::unique_ptr overuse_detector, - TaskQueueFactory* task_queue_factory) + TaskQueueFactory* task_queue_factory, + BitrateAllocationCallbackType allocation_cb_type) : main_queue_(TaskQueueBase::Current()), number_of_cores_(number_of_cores), - quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), sink_(nullptr), settings_(settings), + allocation_cb_type_(allocation_cb_type), rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), encoder_selector_(settings.encoder_factory->GetEncoderSelector()), encoder_stats_observer_(encoder_stats_observer), @@ -412,10 +639,8 @@ VideoStreamEncoder::VideoStreamEncoder( next_frame_types_(1, VideoFrameType::kVideoFrameDelta), frame_encode_metadata_writer_(this), experiment_groups_(GetExperimentGroups()), - encoder_switch_experiment_(ParseEncoderSwitchFieldTrial()), automatic_animation_detection_experiment_( ParseAutomatincAnimationDetectionFieldTrial()), - encoder_switch_requested_(false), input_state_provider_(encoder_stats_observer), video_stream_adapter_( std::make_unique(&input_state_provider_, @@ -435,6 +660,10 @@ VideoStreamEncoder::VideoStreamEncoder( degradation_preference_manager_.get()), video_source_sink_controller_(/*sink=*/this, /*source=*/nullptr), + default_limits_allowed_( + !field_trial::IsEnabled("WebRTC-DefaultBitrateLimitsKillSwitch")), + qp_parsing_allowed_( + !field_trial::IsEnabled("WebRTC-QpParsingKillSwitch")), encoder_queue_(task_queue_factory->CreateTaskQueue( "EncoderQueue", TaskQueueFactory::Priority::NORMAL)) { @@ -454,6 +683,7 @@ VideoStreamEncoder::VideoStreamEncoder( &stream_resource_manager_); video_stream_adapter_->AddRestrictionsListener(&stream_resource_manager_); video_stream_adapter_->AddRestrictionsListener(this); + stream_resource_manager_.MaybeInitializePixelLimitResource(); // Add the stream resource manager's resources to the processor. adaptation_constraints_ = stream_resource_manager_.AdaptationConstraints(); @@ -572,6 +802,7 @@ void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) { encoder_queue_.PostTask([this, start_bitrate_bps] { RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_LOG(LS_INFO) << "SetStartBitrate " << start_bitrate_bps; encoder_target_bitrate_bps_ = start_bitrate_bps != 0 ? absl::optional(start_bitrate_bps) : absl::nullopt; @@ -622,19 +853,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { // Running on the encoder queue. RTC_DCHECK(pending_encoder_reconfiguration_); - if (!encoder_selector_ && - encoder_switch_experiment_.IsPixelCountBelowThreshold( - last_frame_info_->width * last_frame_info_->height) && - !encoder_switch_requested_ && settings_.encoder_switch_request_callback) { - EncoderSwitchRequestCallback::Config conf; - conf.codec_name = encoder_switch_experiment_.to_codec; - conf.param = encoder_switch_experiment_.to_param; - conf.value = encoder_switch_experiment_.to_value; - QueueRequestEncoderSwitch(conf); - - encoder_switch_requested_ = true; - } - bool encoder_reset_required = false; if (pending_encoder_creation_) { // Destroy existing encoder instance before creating a new one. Otherwise @@ -662,13 +880,17 @@ void VideoStreamEncoder::ReconfigureEncoder() { // Possibly adjusts scale_resolution_down_by in |encoder_config_| to limit the // alignment value. - int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( - encoder_->GetEncoderInfo(), &encoder_config_); + AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( + encoder_->GetEncoderInfo(), &encoder_config_, absl::nullopt); std::vector streams = encoder_config_.video_stream_factory->CreateEncoderStreams( last_frame_info_->width, last_frame_info_->height, encoder_config_); + // Get alignment when actual number of layers are known. + int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( + encoder_->GetEncoderInfo(), &encoder_config_, streams.size()); + // Check that the higher layers do not try to set number of temporal layers // to less than 1. // TODO(brandtr): Get rid of the wrapping optional as it serves no purpose @@ -697,53 +919,59 @@ void VideoStreamEncoder::ReconfigureEncoder() { crop_width_ = last_frame_info_->width - highest_stream_width; crop_height_ = last_frame_info_->height - highest_stream_height; - encoder_bitrate_limits_ = + absl::optional encoder_bitrate_limits = encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution( last_frame_info_->width * last_frame_info_->height); - if (streams.size() == 1 && encoder_bitrate_limits_) { - // Bitrate limits can be set by app (in SDP or RtpEncodingParameters) or/and - // can be provided by encoder. In presence of both set of limits, the final - // set is derived as their intersection. - int min_bitrate_bps; - if (encoder_config_.simulcast_layers.empty() || - encoder_config_.simulcast_layers[0].min_bitrate_bps <= 0) { - min_bitrate_bps = encoder_bitrate_limits_->min_bitrate_bps; - } else { - min_bitrate_bps = std::max(encoder_bitrate_limits_->min_bitrate_bps, - streams.back().min_bitrate_bps); - } + if (encoder_bitrate_limits) { + if (streams.size() == 1 && encoder_config_.simulcast_layers.size() == 1) { + // Bitrate limits can be set by app (in SDP or RtpEncodingParameters) + // or/and can be provided by encoder. In presence of both set of limits, + // the final set is derived as their intersection. + int min_bitrate_bps; + if (encoder_config_.simulcast_layers.empty() || + encoder_config_.simulcast_layers[0].min_bitrate_bps <= 0) { + min_bitrate_bps = encoder_bitrate_limits->min_bitrate_bps; + } else { + min_bitrate_bps = std::max(encoder_bitrate_limits->min_bitrate_bps, + streams.back().min_bitrate_bps); + } - int max_bitrate_bps; - // We don't check encoder_config_.simulcast_layers[0].max_bitrate_bps - // here since encoder_config_.max_bitrate_bps is derived from it (as - // well as from other inputs). - if (encoder_config_.max_bitrate_bps <= 0) { - max_bitrate_bps = encoder_bitrate_limits_->max_bitrate_bps; - } else { - max_bitrate_bps = std::min(encoder_bitrate_limits_->max_bitrate_bps, - streams.back().max_bitrate_bps); - } + int max_bitrate_bps; + // We don't check encoder_config_.simulcast_layers[0].max_bitrate_bps + // here since encoder_config_.max_bitrate_bps is derived from it (as + // well as from other inputs). + if (encoder_config_.max_bitrate_bps <= 0) { + max_bitrate_bps = encoder_bitrate_limits->max_bitrate_bps; + } else { + max_bitrate_bps = std::min(encoder_bitrate_limits->max_bitrate_bps, + streams.back().max_bitrate_bps); + } - if (min_bitrate_bps < max_bitrate_bps) { - streams.back().min_bitrate_bps = min_bitrate_bps; - streams.back().max_bitrate_bps = max_bitrate_bps; - streams.back().target_bitrate_bps = - std::min(streams.back().target_bitrate_bps, - encoder_bitrate_limits_->max_bitrate_bps); - } else { - RTC_LOG(LS_WARNING) << "Bitrate limits provided by encoder" - << " (min=" - << encoder_bitrate_limits_->min_bitrate_bps - << ", max=" - << encoder_bitrate_limits_->min_bitrate_bps - << ") do not intersect with limits set by app" - << " (min=" << streams.back().min_bitrate_bps - << ", max=" << encoder_config_.max_bitrate_bps - << "). The app bitrate limits will be used."; + if (min_bitrate_bps < max_bitrate_bps) { + streams.back().min_bitrate_bps = min_bitrate_bps; + streams.back().max_bitrate_bps = max_bitrate_bps; + streams.back().target_bitrate_bps = + std::min(streams.back().target_bitrate_bps, + encoder_bitrate_limits->max_bitrate_bps); + } else { + RTC_LOG(LS_WARNING) + << "Bitrate limits provided by encoder" + << " (min=" << encoder_bitrate_limits->min_bitrate_bps + << ", max=" << encoder_bitrate_limits->max_bitrate_bps + << ") do not intersect with limits set by app" + << " (min=" << streams.back().min_bitrate_bps + << ", max=" << encoder_config_.max_bitrate_bps + << "). The app bitrate limits will be used."; + } } } + ApplyEncoderBitrateLimitsIfSingleActiveStream( + GetEncoderInfoWithBitrateLimitUpdate( + encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_), + encoder_config_.simulcast_layers, &streams); + VideoCodec codec; if (!VideoCodecInitializer::SetupCodec(encoder_config_, streams, &codec)) { RTC_LOG(LS_ERROR) << "Failed to create encoder configuration."; @@ -754,6 +982,10 @@ void VideoStreamEncoder::ReconfigureEncoder() { // thus some cropping might be needed. crop_width_ = last_frame_info_->width - codec.width; crop_height_ = last_frame_info_->height - codec.height; + ApplyVp9BitrateLimits(GetEncoderInfoWithBitrateLimitUpdate( + encoder_->GetEncoderInfo(), encoder_config_, + default_limits_allowed_), + encoder_config_, &codec); } char log_stream_buf[4 * 1024]; @@ -805,14 +1037,29 @@ void VideoStreamEncoder::ReconfigureEncoder() { max_framerate = std::max(stream.max_framerate, max_framerate); } - main_queue_->PostTask( - ToQueuedTask(task_safety_, [this, max_framerate, alignment]() { + // The resolutions that we're actually encoding with. + std::vector encoder_resolutions; + // TODO(hbos): For the case of SVC, also make use of |codec.spatialLayers|. + // For now, SVC layers are handled by the VP9 encoder. + for (const auto& simulcastStream : codec.simulcastStream) { + if (!simulcastStream.active) + continue; + encoder_resolutions.emplace_back(simulcastStream.width, + simulcastStream.height); + } + main_queue_->PostTask(ToQueuedTask( + task_safety_, [this, max_framerate, alignment, + encoder_resolutions = std::move(encoder_resolutions)]() { RTC_DCHECK_RUN_ON(main_queue_); if (max_framerate != video_source_sink_controller_.frame_rate_upper_limit() || - alignment != video_source_sink_controller_.resolution_alignment()) { + alignment != video_source_sink_controller_.resolution_alignment() || + encoder_resolutions != + video_source_sink_controller_.resolutions()) { video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); video_source_sink_controller_.SetResolutionAlignment(alignment); + video_source_sink_controller_.SetResolutions( + std::move(encoder_resolutions)); video_source_sink_controller_.PushSourceSinkSettings(); } })); @@ -847,8 +1094,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { } send_codec_ = codec; - encoder_switch_experiment_.SetCodec(send_codec_.codecType); - // Keep the same encoder, as long as the video_format is unchanged. // Encoder creation block is split in two since EncoderInfo needed to start // CPU adaptation with the correct settings should be polled after @@ -899,7 +1144,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { } if (pending_encoder_creation_) { - stream_resource_manager_.EnsureEncodeUsageResourceStarted(); + stream_resource_manager_.ConfigureEncodeUsageResource(); pending_encoder_creation_ = false; } @@ -976,8 +1221,10 @@ void VideoStreamEncoder::ReconfigureEncoder() { } void VideoStreamEncoder::OnEncoderSettingsChanged() { - EncoderSettings encoder_settings(encoder_->GetEncoderInfo(), - encoder_config_.Copy(), send_codec_); + EncoderSettings encoder_settings( + GetEncoderInfoWithBitrateLimitUpdate( + encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_), + encoder_config_.Copy(), send_codec_); stream_resource_manager_.SetEncoderSettings(encoder_settings); input_state_provider_.OnEncoderSettingsChanged(encoder_settings); bool is_screenshare = encoder_settings.encoder_config().content_type == @@ -1057,7 +1304,7 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { MaybeEncodeVideoFrame(incoming_frame, post_time_us); } else { if (cwnd_frame_drop) { - // Frame drop by congestion window pusback. Do not encode this + // Frame drop by congestion window pushback. Do not encode this // frame. ++dropped_frame_cwnd_pushback_count_; encoder_stats_observer_->OnFrameDropped( @@ -1194,7 +1441,7 @@ void VideoStreamEncoder::SetEncoderRates( // |bitrate_allocation| is 0 it means that the network is down or the send // pacer is full. We currently only report this if the encoder has an internal // source. If the encoder does not have an internal source, higher levels - // are expected to not call AddVideoFrame. We do this since its unclear + // are expected to not call AddVideoFrame. We do this since it is unclear // how current encoder implementations behave when given a zero target // bitrate. // TODO(perkj): Make sure all known encoder implementations handle zero @@ -1214,21 +1461,18 @@ void VideoStreamEncoder::SetEncoderRates( static_cast(rate_settings.rate_control.framerate_fps + 0.5)); stream_resource_manager_.SetEncoderRates(rate_settings.rate_control); if (layer_allocation_changed && - settings_.allocation_cb_type == - VideoStreamEncoderSettings::BitrateAllocationCallbackType:: - kVideoLayersAllocation) { + allocation_cb_type_ == + BitrateAllocationCallbackType::kVideoLayersAllocation) { sink_->OnVideoLayersAllocationUpdated(CreateVideoLayersAllocation( send_codec_, rate_settings.rate_control, encoder_->GetEncoderInfo())); } } - if ((settings_.allocation_cb_type == - VideoStreamEncoderSettings::BitrateAllocationCallbackType:: - kVideoBitrateAllocation) || + if ((allocation_cb_type_ == + BitrateAllocationCallbackType::kVideoBitrateAllocation) || (encoder_config_.content_type == VideoEncoderConfig::ContentType::kScreen && - settings_.allocation_cb_type == - VideoStreamEncoderSettings::BitrateAllocationCallbackType:: - kVideoBitrateAllocationWhenScreenSharing)) { + allocation_cb_type_ == BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing)) { sink_->OnBitrateAllocationUpdated( // Update allocation according to info from encoder. An encoder may // choose to not use all layers due to for example HW. @@ -1258,7 +1502,7 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, VideoFrame::UpdateRect{0, 0, video_frame.width(), video_frame.height()}; } - // We have to create then encoder before the frame drop logic, + // We have to create the encoder before the frame drop logic, // because the latter depends on encoder_->GetScalingSettings. // According to the testcase // InitialFrameDropOffWhenEncoderDisabledScaling, the return value @@ -1342,8 +1586,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, // Frame dropping is enabled iff frame dropping is not force-disabled, and // rate controller is not trusted. const bool frame_dropping_enabled = false; - //!force_disable_frame_dropper_ && - //!encoder_info_.has_trusted_rate_controller; + // !force_disable_frame_dropper_ && + // !encoder_info_.has_trusted_rate_controller; frame_dropper_.Enable(frame_dropping_enabled); if (frame_dropping_enabled && frame_dropper_.DropFrame()) { RTC_LOG(LS_VERBOSE) @@ -1388,6 +1632,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, if (encoder_info_ != info) { OnEncoderSettingsChanged(); + stream_resource_manager_.ConfigureEncodeUsageResource(); RTC_LOG(LS_INFO) << "Encoder settings changed from " << encoder_info_.ToString() << " to " << info.ToString(); } @@ -1404,45 +1649,12 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, last_encode_info_ms_ = clock_->TimeInMilliseconds(); VideoFrame out_frame(video_frame); - if (out_frame.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kNative && - !info.supports_native_handle) { - // This module only supports software encoding. - rtc::scoped_refptr buffer = - out_frame.video_frame_buffer()->GetMappedFrameBuffer( - info.preferred_pixel_formats); - bool buffer_was_converted = false; - if (!buffer) { - buffer = out_frame.video_frame_buffer()->ToI420(); - // TODO(https://crbug.com/webrtc/12021): Once GetI420 is pure virtual, - // this just true as an I420 buffer would return from - // GetMappedFrameBuffer. - buffer_was_converted = - (out_frame.video_frame_buffer()->GetI420() == nullptr); - } - if (!buffer) { - RTC_LOG(LS_ERROR) << "Frame conversion failed, dropping frame."; - return; - } - - VideoFrame::UpdateRect update_rect = out_frame.update_rect(); - if (!update_rect.IsEmpty() && - out_frame.video_frame_buffer()->GetI420() == nullptr) { - // UpdatedRect is reset to full update if it's not empty, and buffer was - // converted, therefore we can't guarantee that pixels outside of - // UpdateRect didn't change comparing to the previous frame. - update_rect = - VideoFrame::UpdateRect{0, 0, out_frame.width(), out_frame.height()}; - } - out_frame.set_video_frame_buffer(buffer); - out_frame.set_update_rect(update_rect); - } - - // Crop frame if needed. + // Crop or scale the frame if needed. Dimension may be reduced to fit encoder + // requirements, e.g. some encoders may require them to be divisible by 4. if ((crop_width_ > 0 || crop_height_ > 0) && - out_frame.video_frame_buffer()->type() != - VideoFrameBuffer::Type::kNative) { - // If the frame can't be converted to I420, drop it. + (out_frame.video_frame_buffer()->type() != + VideoFrameBuffer::Type::kNative || + !info.supports_native_handle)) { int cropped_width = video_frame.width() - crop_width_; int cropped_height = video_frame.height() - crop_height_; rtc::scoped_refptr cropped_buffer; @@ -1450,6 +1662,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, // happen after SinkWants signaled correctly from ReconfigureEncoder. VideoFrame::UpdateRect update_rect = video_frame.update_rect(); if (crop_width_ < 4 && crop_height_ < 4) { + // The difference is small, crop without scaling. cropped_buffer = video_frame.video_frame_buffer()->CropAndScale( crop_width_ / 2, crop_height_ / 2, cropped_width, cropped_height, cropped_width, cropped_height); @@ -1459,6 +1672,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height}); } else { + // The difference is large, scale it. cropped_buffer = video_frame.video_frame_buffer()->Scale(cropped_width, cropped_height); if (!update_rect.IsEmpty()) { @@ -1503,14 +1717,12 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, stream_resource_manager_.OnEncodeStarted(out_frame, time_when_posted_us); - RTC_DCHECK_LE(send_codec_.width, out_frame.width()); - RTC_DCHECK_LE(send_codec_.height, out_frame.height()); - // Native frames should be scaled by the client. - // For internal encoders we scale everything in one place here. - RTC_DCHECK((out_frame.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kNative) || - (send_codec_.width == out_frame.width() && - send_codec_.height == out_frame.height())); + // The encoder should get the size that it expects. + RTC_DCHECK(send_codec_.width <= out_frame.width() && + send_codec_.height <= out_frame.height()) + << "Encoder configured to " << send_codec_.width << "x" + << send_codec_.height << " received a too small frame " + << out_frame.width() << "x" << out_frame.height(); TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp", out_frame.timestamp()); @@ -1618,6 +1830,18 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( frame_encode_metadata_writer_.UpdateBitstream(codec_specific_info, &image_copy); + VideoCodecType codec_type = codec_specific_info + ? codec_specific_info->codecType + : VideoCodecType::kVideoCodecGeneric; + + if (image_copy.qp_ < 0 && qp_parsing_allowed_) { + // Parse encoded frame QP if that was not provided by encoder. + image_copy.qp_ = qp_parser_ + .Parse(codec_type, spatial_idx, image_copy.data(), + image_copy.size()) + .value_or(-1); + } + // Piggyback ALR experiment group id and simulcast id into the content type. const uint8_t experiment_id = experiment_groups_[videocontenttypehelpers::IsScreenshare( @@ -1640,12 +1864,9 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( // Post a task because |send_codec_| requires |encoder_queue_| lock. unsigned int image_width = image_copy._encodedWidth; unsigned int image_height = image_copy._encodedHeight; - VideoCodecType codec = codec_specific_info - ? codec_specific_info->codecType - : VideoCodecType::kVideoCodecGeneric; - encoder_queue_.PostTask([this, codec, image_width, image_height] { + encoder_queue_.PostTask([this, codec_type, image_width, image_height] { RTC_DCHECK_RUN_ON(&encoder_queue_); - if (codec == VideoCodecType::kVideoCodecVP9 && + if (codec_type == VideoCodecType::kVideoCodecVP9 && send_codec_.VP9()->automaticResizeOn) { unsigned int expected_width = send_codec_.width; unsigned int expected_height = send_codec_.height; @@ -1791,22 +2012,10 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, const bool video_is_suspended = target_bitrate == DataRate::Zero(); const bool video_suspension_changed = video_is_suspended != EncoderPaused(); - if (!video_is_suspended && settings_.encoder_switch_request_callback) { - if (encoder_selector_) { - if (auto encoder = - encoder_selector_->OnAvailableBitrate(link_allocation)) { - QueueRequestEncoderSwitch(*encoder); - } - } else if (encoder_switch_experiment_.IsBitrateBelowThreshold( - target_bitrate) && - !encoder_switch_requested_) { - EncoderSwitchRequestCallback::Config conf; - conf.codec_name = encoder_switch_experiment_.to_codec; - conf.param = encoder_switch_experiment_.to_param; - conf.value = encoder_switch_experiment_.to_value; - QueueRequestEncoderSwitch(conf); - - encoder_switch_requested_ = true; + if (!video_is_suspended && settings_.encoder_switch_request_callback && + encoder_selector_) { + if (auto encoder = encoder_selector_->OnAvailableBitrate(link_allocation)) { + QueueRequestEncoderSwitch(*encoder); } } @@ -1852,30 +2061,43 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, } bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const { - bool simulcast_or_svc = - (send_codec_.codecType == VideoCodecType::kVideoCodecVP9 && - send_codec_.VP9().numberOfSpatialLayers > 1) || - send_codec_.numberOfSimulcastStreams > 1 || - encoder_config_.simulcast_layers.size() > 1; - - if (simulcast_or_svc || !stream_resource_manager_.DropInitialFrames() || + if (!stream_resource_manager_.DropInitialFrames() || !encoder_target_bitrate_bps_.has_value()) { return false; } + bool simulcast_or_svc = + (send_codec_.codecType == VideoCodecType::kVideoCodecVP9 && + send_codec_.VP9().numberOfSpatialLayers > 1) || + (send_codec_.numberOfSimulcastStreams > 1 || + encoder_config_.simulcast_layers.size() > 1); + + if (simulcast_or_svc) { + if (stream_resource_manager_.SingleActiveStreamPixels()) { + pixel_count = stream_resource_manager_.SingleActiveStreamPixels().value(); + } else { + return false; + } + } + + uint32_t bitrate_bps = + stream_resource_manager_.UseBandwidthAllocationBps().value_or( + encoder_target_bitrate_bps_.value()); + absl::optional encoder_bitrate_limits = - encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution( - pixel_count); + GetEncoderInfoWithBitrateLimitUpdate( + encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_) + .GetEncoderBitrateLimitsForResolution(pixel_count); if (encoder_bitrate_limits.has_value()) { // Use bitrate limits provided by encoder. - return encoder_target_bitrate_bps_.value() < + return bitrate_bps < static_cast(encoder_bitrate_limits->min_start_bitrate_bps); } - if (encoder_target_bitrate_bps_.value() < 300000 /* qvga */) { + if (bitrate_bps < 300000 /* qvga */) { return pixel_count > 320 * 240; - } else if (encoder_target_bitrate_bps_.value() < 500000 /* vga */) { + } else if (bitrate_bps < 500000 /* vga */) { return pixel_count > 640 * 480; } return false; @@ -1943,7 +2165,8 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, stream_resource_manager_.OnEncodeCompleted(encoded_image, time_sent_us, encode_duration_us); if (bitrate_adjuster_) { - bitrate_adjuster_->OnEncodedFrame(encoded_image, temporal_index); + bitrate_adjuster_->OnEncodedFrame( + frame_size, encoded_image.SpatialIndex().value_or(0), temporal_index); } } @@ -1962,113 +2185,6 @@ void VideoStreamEncoder::ReleaseEncoder() { TRACE_EVENT0("webrtc", "VCMGenericEncoder::Release"); } -bool VideoStreamEncoder::EncoderSwitchExperiment::IsBitrateBelowThreshold( - const DataRate& target_bitrate) { - DataRate rate = DataRate::KilobitsPerSec( - bitrate_filter.Apply(1.0, target_bitrate.kbps())); - return current_thresholds.bitrate && rate < *current_thresholds.bitrate; -} - -bool VideoStreamEncoder::EncoderSwitchExperiment::IsPixelCountBelowThreshold( - int pixel_count) const { - return current_thresholds.pixel_count && - pixel_count < *current_thresholds.pixel_count; -} - -void VideoStreamEncoder::EncoderSwitchExperiment::SetCodec( - VideoCodecType codec) { - auto it = codec_thresholds.find(codec); - if (it == codec_thresholds.end()) { - current_thresholds = {}; - } else { - current_thresholds = it->second; - } -} - -VideoStreamEncoder::EncoderSwitchExperiment -VideoStreamEncoder::ParseEncoderSwitchFieldTrial() const { - EncoderSwitchExperiment result; - - // Each "codec threshold" have the format - // ";;", and are separated by the "|" - // character. - webrtc::FieldTrialOptional codec_thresholds_string{ - "codec_thresholds"}; - webrtc::FieldTrialOptional to_codec{"to_codec"}; - webrtc::FieldTrialOptional to_param{"to_param"}; - webrtc::FieldTrialOptional to_value{"to_value"}; - webrtc::FieldTrialOptional window{"window"}; - - webrtc::ParseFieldTrial( - {&codec_thresholds_string, &to_codec, &to_param, &to_value, &window}, - webrtc::field_trial::FindFullName( - "WebRTC-NetworkCondition-EncoderSwitch")); - - if (!codec_thresholds_string || !to_codec || !window) { - return {}; - } - - result.bitrate_filter.Reset(1.0 - 1.0 / *window); - result.to_codec = *to_codec; - result.to_param = to_param.GetOptional(); - result.to_value = to_value.GetOptional(); - - std::vector codecs_thresholds; - if (rtc::split(*codec_thresholds_string, '|', &codecs_thresholds) == 0) { - return {}; - } - - for (const std::string& codec_threshold : codecs_thresholds) { - std::vector thresholds_split; - if (rtc::split(codec_threshold, ';', &thresholds_split) != 3) { - return {}; - } - - VideoCodecType codec = PayloadStringToCodecType(thresholds_split[0]); - int bitrate_kbps; - rtc::FromString(thresholds_split[1], &bitrate_kbps); - int pixel_count; - rtc::FromString(thresholds_split[2], &pixel_count); - - if (bitrate_kbps > 0) { - result.codec_thresholds[codec].bitrate = - DataRate::KilobitsPerSec(bitrate_kbps); - } - - if (pixel_count > 0) { - result.codec_thresholds[codec].pixel_count = pixel_count; - } - - if (!result.codec_thresholds[codec].bitrate && - !result.codec_thresholds[codec].pixel_count) { - return {}; - } - } - - rtc::StringBuilder ss; - ss << "Successfully parsed WebRTC-NetworkCondition-EncoderSwitch field " - "trial." - " to_codec:" - << result.to_codec << " to_param:" << result.to_param.value_or("") - << " to_value:" << result.to_value.value_or("") - << " codec_thresholds:"; - - for (auto kv : result.codec_thresholds) { - std::string codec_name = CodecTypeToPayloadString(kv.first); - std::string bitrate = kv.second.bitrate - ? std::to_string(kv.second.bitrate->kbps()) - : ""; - std::string pixels = kv.second.pixel_count - ? std::to_string(*kv.second.pixel_count) - : ""; - ss << " (" << codec_name << ":" << bitrate << ":" << pixels << ")"; - } - - RTC_LOG(LS_INFO) << ss.str(); - - return result; -} - VideoStreamEncoder::AutomaticAnimationDetectionExperiment VideoStreamEncoder::ParseAutomatincAnimationDetectionFieldTrial() const { AutomaticAnimationDetectionExperiment result; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h index 7dfc99084..9e7020366 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h @@ -18,6 +18,7 @@ #include #include "api/adaptation/resource.h" +#include "api/sequence_checker.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_rotation.h" @@ -33,6 +34,7 @@ #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/utility/frame_dropper.h" +#include "modules/video_coding/utility/qp_parser.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/race_checker.h" @@ -40,7 +42,6 @@ #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/thread_checker.h" #include "system_wrappers/include/clock.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" #include "video/encoder_bitrate_adjuster.h" @@ -61,12 +62,20 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, private EncodedImageCallback, public VideoSourceRestrictionsListener { public: + // TODO(bugs.webrtc.org/12000): Reporting of VideoBitrateAllocation is being + // deprecated. Instead VideoLayersAllocation should be reported. + enum class BitrateAllocationCallbackType { + kVideoBitrateAllocation, + kVideoBitrateAllocationWhenScreenSharing, + kVideoLayersAllocation + }; VideoStreamEncoder(Clock* clock, uint32_t number_of_cores, VideoStreamEncoderObserver* encoder_stats_observer, const VideoStreamEncoderSettings& settings, std::unique_ptr overuse_detector, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + BitrateAllocationCallbackType allocation_cb_type); ~VideoStreamEncoder() override; void AddAdaptationResource(rtc::scoped_refptr resource) override; @@ -173,7 +182,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void EncodeVideoFrame(const VideoFrame& frame, int64_t time_when_posted_in_ms); - // Indicates wether frame should be dropped because the pixel count is too + // Indicates whether frame should be dropped because the pixel count is too // large for the current bitrate configuration. bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(&encoder_queue_); @@ -221,10 +230,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, const uint32_t number_of_cores_; - const bool quality_scaling_experiment_enabled_; - EncoderSink* sink_; const VideoStreamEncoderSettings settings_; + const BitrateAllocationCallbackType allocation_cb_type_; const RateControlSettings rate_control_settings_; std::unique_ptr const @@ -305,8 +313,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, absl::optional last_encode_info_ms_ RTC_GUARDED_BY(&encoder_queue_); VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(&encoder_queue_); - absl::optional encoder_bitrate_limits_ - RTC_GUARDED_BY(&encoder_queue_); VideoEncoderFactory::CodecInfo codec_info_ RTC_GUARDED_BY(&encoder_queue_); VideoCodec send_codec_ RTC_GUARDED_BY(&encoder_queue_); @@ -343,38 +349,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // experiment group numbers incremented by 1. const std::array experiment_groups_; - struct EncoderSwitchExperiment { - struct Thresholds { - absl::optional bitrate; - absl::optional pixel_count; - }; - - // Codec --> switching thresholds - std::map codec_thresholds; - - // To smooth out the target bitrate so that we don't trigger a switch - // too easily. - rtc::ExpFilter bitrate_filter{1.0}; - - // Codec/implementation to switch to - std::string to_codec; - absl::optional to_param; - absl::optional to_value; - - // Thresholds for the currently used codecs. - Thresholds current_thresholds; - - // Updates the |bitrate_filter|, so not const. - bool IsBitrateBelowThreshold(const DataRate& target_bitrate); - bool IsPixelCountBelowThreshold(int pixel_count) const; - void SetCodec(VideoCodecType codec); - }; - - EncoderSwitchExperiment ParseEncoderSwitchFieldTrial() const; - - EncoderSwitchExperiment encoder_switch_experiment_ - RTC_GUARDED_BY(&encoder_queue_); - struct AutomaticAnimationDetectionExperiment { bool enabled = false; int min_duration_ms = 2000; @@ -395,11 +369,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, AutomaticAnimationDetectionExperiment automatic_animation_detection_experiment_ RTC_GUARDED_BY(&encoder_queue_); - // An encoder switch is only requested once, this variable is used to keep - // track of whether a request has been made or not. - bool encoder_switch_requested_ RTC_GUARDED_BY(&encoder_queue_); - - // Provies video stream input states: current resolution and frame rate. + // Provides video stream input states: current resolution and frame rate. VideoStreamInputStateProvider input_state_provider_; std::unique_ptr video_stream_adapter_ @@ -415,7 +385,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, RTC_GUARDED_BY(&encoder_queue_); // Handles input, output and stats reporting related to VideoStreamEncoder // specific resources, such as "encode usage percent" measurements and "QP - // scaling". Also involved with various mitigations such as inital frame + // scaling". Also involved with various mitigations such as initial frame // dropping. // The manager primarily operates on the |encoder_queue_| but its lifetime is // tied to the VideoStreamEncoder (which is destroyed off the encoder queue) @@ -431,6 +401,14 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, VideoSourceSinkController video_source_sink_controller_ RTC_GUARDED_BY(main_queue_); + // Default bitrate limits in EncoderInfoSettings allowed. + const bool default_limits_allowed_; + + // QP parser is used to extract QP value from encoded frame when that is not + // provided by encoder. + QpParser qp_parser_; + const bool qp_parsing_allowed_; + // Public methods are proxied to the task queues. The queues must be destroyed // first to make sure no tasks run that use other members. rtc::TaskQueue encoder_queue_; diff --git a/TMessagesProj/src/main/AndroidManifest.xml b/TMessagesProj/src/main/AndroidManifest.xml index a25e951b0..042fdaca9 100644 --- a/TMessagesProj/src/main/AndroidManifest.xml +++ b/TMessagesProj/src/main/AndroidManifest.xml @@ -145,11 +145,10 @@ - - - - - + + + + @@ -332,7 +331,7 @@ - + diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java index 992568dbc..2e0084164 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/ChatListItemAnimator.java @@ -630,12 +630,22 @@ public class ChatListItemAnimator extends DefaultItemAnimator { if (activity != null && holder.itemView instanceof BotHelpCell) { BotHelpCell botCell = (BotHelpCell) holder.itemView ; - float top = recyclerListView.getMeasuredHeight() / 2 - botCell.getMeasuredHeight() / 2 + activity.getChatListViewPadding(); - float animateTo = 0; - if (botCell.getTop() > top) { - animateTo = top - botCell.getTop(); - } - animatorSet.playTogether(ObjectAnimator.ofFloat(botCell, View.TRANSLATION_Y, botCell.getTranslationY(), animateTo)); + float animateFrom = botCell.getTranslationY(); + + ValueAnimator valueAnimator = ValueAnimator.ofFloat(0, 1f); + valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { + @Override + public void onAnimationUpdate(ValueAnimator valueAnimator) { + float v = (float) valueAnimator.getAnimatedValue(); + float top = recyclerListView.getMeasuredHeight() / 2f - botCell.getMeasuredHeight() / 2f + activity.getChatListViewPadding(); + float animateTo = 0; + if (botCell.getTop() > top) { + animateTo = top - botCell.getTop(); + } + botCell.setTranslationY(animateFrom * (1f - v) + animateTo * v); + } + }); + animatorSet.playTogether(valueAnimator); } else if (holder.itemView instanceof ChatMessageCell) { ChatMessageCell chatMessageCell = (ChatMessageCell) holder.itemView; ChatMessageCell.TransitionParams params = chatMessageCell.getTransitionParams(); @@ -743,7 +753,9 @@ public class ChatListItemAnimator extends DefaultItemAnimator { if (animateCaption) { groupTransitionParams.captionEnterProgress = captionEnterFrom * v + captionEnterTo * (1f - v); } - recyclerListView.invalidate(); + if (recyclerListView != null) { + recyclerListView.invalidate(); + } }); valueAnimator.addListener(new AnimatorListenerAdapter() { diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java index 3448929a9..fd28a07bd 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/DefaultItemAnimator.java @@ -730,4 +730,8 @@ public class DefaultItemAnimator extends SimpleItemAnimator { @NonNull List payloads) { return !payloads.isEmpty() || super.canReuseUpdatedViewHolder(viewHolder, payloads); } + + public void setTranslationInterpolator(Interpolator translationInterpolator) { + this.translationInterpolator = translationInterpolator; + } } diff --git a/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java b/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java index ef1fe2d6c..2af1cb54d 100644 --- a/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java +++ b/TMessagesProj/src/main/java/androidx/recyclerview/widget/RecyclerView.java @@ -215,7 +215,7 @@ public class RecyclerView extends ViewGroup implements ScrollingView, static final String TAG = "RecyclerView"; - static final boolean DEBUG = BuildVars.DEBUG_VERSION; + static final boolean DEBUG = false; static final boolean VERBOSE_TRACING = false; diff --git a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java index de3a48d97..bb8673e18 100644 --- a/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java +++ b/TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java @@ -86,7 +86,7 @@ public final class DefaultExtractorsFactory implements ExtractorsFactory { FLAC_EXTENSION_EXTRACTOR_CONSTRUCTOR = flacExtensionExtractorConstructor; } - private boolean constantBitrateSeekingEnabled; + private boolean constantBitrateSeekingEnabled = true; private @AdtsExtractor.Flags int adtsFlags; private @AmrExtractor.Flags int amrFlags; private @MatroskaExtractor.Flags int matroskaFlags; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java index 1b80d906b..8dad1b50a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java @@ -49,7 +49,6 @@ import android.provider.MediaStore; import android.provider.Settings; import androidx.core.content.FileProvider; -import androidx.core.graphics.ColorUtils; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import androidx.viewpager.widget.ViewPager; @@ -68,7 +67,6 @@ import android.text.method.LinkMovementMethod; import android.text.style.URLSpan; import android.text.util.Linkify; import android.util.DisplayMetrics; -import android.util.Log; import android.util.StateSet; import android.util.TypedValue; import android.view.ContextThemeWrapper; @@ -116,6 +114,7 @@ import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.ForegroundColorSpanThemable; import org.telegram.ui.Components.ForegroundDetector; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.Components.PickerBottomLayout; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.ShareAlert; @@ -189,10 +188,14 @@ public class AndroidUtilities { public static final RectF rectTmp = new RectF(); public static Pattern WEB_URL = null; + public static Pattern BAD_CHARS_PATTERN = null; + public static Pattern BAD_CHARS_MESSAGE_PATTERN = null; static { try { final String GOOD_IRI_CHAR = "a-zA-Z0-9\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF"; + BAD_CHARS_PATTERN = Pattern.compile("[\u2500-\u25ff]"); + BAD_CHARS_MESSAGE_PATTERN = Pattern.compile("[\u0300-\u036f]+"); final Pattern IP_ADDRESS = Pattern.compile( "((25[0-5]|2[0-4][0-9]|[0-1][0-9]{2}|[1-9][0-9]|[1-9])\\.(25[0-5]|2[0-4]" + "[0-9]|[0-1][0-9]{2}|[1-9][0-9]|[1-9]|0)\\.(25[0-5]|2[0-4][0-9]|[0-1]" @@ -246,6 +249,13 @@ public class AndroidUtilities { if (text.contains("\u202E")) { return true; } + try { + if (BAD_CHARS_PATTERN.matcher(text).find()) { + return true; + } + } catch (Throwable e) { + return true; + } return false; } @@ -587,6 +597,10 @@ public class AndroidUtilities { bitmapColor = colors[0]; } } + } else if (drawable instanceof MotionBackgroundDrawable) { + result[0] = result[2] = Color.argb(0x2D, 0, 0, 0); + result[1] = result[3] = Color.argb(0x3D, 0, 0, 0); + return result; } } catch (Exception e) { FileLog.e(e); @@ -1226,7 +1240,7 @@ public class AndroidUtilities { if (args.length < 2 || currentData == null) { continue; } - if (args[0].startsWith("FN") || args[0].startsWith("ORG") && TextUtils.isEmpty(currentData.name)) { + if (args[0].startsWith("FN") || args[0].startsWith("N") || args[0].startsWith("ORG") && TextUtils.isEmpty(currentData.name)) { String nameEncoding = null; String nameCharset = null; String[] params = args[0].split(";"); @@ -1241,7 +1255,11 @@ public class AndroidUtilities { nameEncoding = args2[1]; } } - currentData.name = args[1]; + if (args[0].startsWith("N")) { + currentData.name = args[1].replace(';', ' ').trim(); + } else { + currentData.name = args[1]; + } if (nameEncoding != null && nameEncoding.equalsIgnoreCase("QUOTED-PRINTABLE")) { byte[] bytes = decodeQuotedPrintable(getStringBytes(currentData.name)); if (bytes != null && bytes.length != 0) { @@ -1713,7 +1731,7 @@ public class AndroidUtilities { } public static int getPeerLayerVersion(int layer) { - return (layer >> 16) & 0xffff; + return Math.max(73, (layer >> 16) & 0xffff); } public static int setMyLayerVersion(int layer, int version) { @@ -1740,6 +1758,10 @@ public class AndroidUtilities { ApplicationLoader.applicationHandler.removeCallbacks(runnable); } + public static boolean isValidWallChar(char ch) { + return ch == '-' || ch == '~'; + } + public static boolean isTablet() { if (isTablet == null) { isTablet = ApplicationLoader.applicationContext.getResources().getBoolean(R.bool.isTablet); @@ -1749,7 +1771,7 @@ public class AndroidUtilities { public static boolean isSmallTablet() { float minSide = Math.min(displaySize.x, displaySize.y) / density; - return minSide <= 700; + return minSide <= 690; } public static int getMinTabletSide() { @@ -2245,7 +2267,7 @@ public class AndroidUtilities { } } - private static File getAlbumDir(boolean secretChat) { + private static File getAlbumDir(boolean secretChat) { //TODO scoped storage if (secretChat || Build.VERSION.SDK_INT >= 23 && ApplicationLoader.applicationContext.checkSelfPermission(android.Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { return FileLoader.getDirectory(FileLoader.MEDIA_DIR_CACHE); } @@ -2778,15 +2800,7 @@ public class AndroidUtilities { } } - public static boolean openForView(MessageObject message, final Activity activity) { - File f = null; - String fileName = message.getFileName(); - if (message.messageOwner.attachPath != null && message.messageOwner.attachPath.length() != 0) { - f = new File(message.messageOwner.attachPath); - } - if (f == null || !f.exists()) { - f = FileLoader.getPathToMessage(message.messageOwner); - } + public static boolean openForView(File f, String fileName, String mimeType, final Activity activity) { if (f != null && f.exists()) { String realMimeType = null; Intent intent = new Intent(Intent.ACTION_VIEW); @@ -2797,9 +2811,7 @@ public class AndroidUtilities { String ext = fileName.substring(idx + 1); realMimeType = myMime.getMimeTypeFromExtension(ext.toLowerCase()); if (realMimeType == null) { - if (message.type == 9 || message.type == 0) { - realMimeType = message.getDocument().mime_type; - } + realMimeType = mimeType; if (realMimeType == null || realMimeType.length() == 0) { realMimeType = null; } @@ -2844,6 +2856,24 @@ public class AndroidUtilities { return false; } + public static boolean openForView(MessageObject message, Activity activity) { + File f = null; + if (message.messageOwner.attachPath != null && message.messageOwner.attachPath.length() != 0) { + f = new File(message.messageOwner.attachPath); + } + if (f == null || !f.exists()) { + f = FileLoader.getPathToMessage(message.messageOwner); + } + String mimeType = message.type == 9 || message.type == 0 ? message.getMimeType() : null; + return openForView(f, message.getFileName(), mimeType, activity); + } + + public static boolean openForView(TLRPC.Document document, boolean forceCache, Activity activity) { + String fileName = FileLoader.getAttachFileName(document); + File f = FileLoader.getPathToAttach(document, true); + return openForView(f, fileName, document.mime_type, activity); + } + public static CharSequence replaceNewLines(CharSequence original) { if (original instanceof StringBuilder) { StringBuilder stringBuilder = (StringBuilder) original; @@ -3308,16 +3338,20 @@ public class AndroidUtilities { } public static int getPatternColor(int color) { + return getPatternColor(color, false); + } + + public static int getPatternColor(int color, boolean alwaysDark) { float[] hsb = RGBtoHSB(Color.red(color), Color.green(color), Color.blue(color)); if (hsb[1] > 0.0f || (hsb[2] < 1.0f && hsb[2] > 0.0f)) { - hsb[1] = Math.min(1.0f, hsb[1] + 0.05f + 0.1f * (1.0f - hsb[1])); + hsb[1] = Math.min(1.0f, hsb[1] + (alwaysDark ? 0.15f : 0.05f) + 0.1f * (1.0f - hsb[1])); } - if (hsb[2] > 0.5f) { + if (alwaysDark || hsb[2] > 0.5f) { hsb[2] = Math.max(0.0f, hsb[2] * 0.65f); } else { hsb[2] = Math.max(0.0f, Math.min(1.0f, 1.0f - hsb[2] * 0.65f)); } - return HSBtoRGB(hsb[0], hsb[1], hsb[2]) & 0x66ffffff; + return HSBtoRGB(hsb[0], hsb[1], hsb[2]) & (alwaysDark ? 0x99ffffff : 0x66ffffff); } public static int getPatternSideColor(int color) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java index 866e6df2f..ba8cddf63 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ApplicationLoader.java @@ -30,8 +30,9 @@ import android.text.TextUtils; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; -import com.google.firebase.iid.FirebaseInstanceId; +import com.google.firebase.messaging.FirebaseMessaging; +import org.telegram.messenger.voip.VideoCapturerDevice; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.Components.ForegroundDetector; @@ -243,6 +244,7 @@ public class ApplicationLoader extends Application { try { LocaleController.getInstance().onDeviceConfigurationChange(newConfig); AndroidUtilities.checkDisplaySize(applicationContext, newConfig); + VideoCapturerDevice.checkScreenCapturerSize(); } catch (Exception e) { e.printStackTrace(); } @@ -263,18 +265,21 @@ public class ApplicationLoader extends Application { } Utilities.globalQueue.postRunnable(() -> { try { - FirebaseInstanceId.getInstance().getInstanceId().addOnSuccessListener(instanceIdResult -> { - String token = instanceIdResult.getToken(); - if (!TextUtils.isEmpty(token)) { - GcmPushListenerService.sendRegistrationToServer(token); - } - }).addOnFailureListener(e -> { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Failed to get regid"); - } - SharedConfig.pushStringStatus = "__FIREBASE_FAILED__"; - GcmPushListenerService.sendRegistrationToServer(null); - }); + FirebaseMessaging.getInstance().getToken() + .addOnCompleteListener(task -> { + if (!task.isSuccessful()) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Failed to get regid"); + } + SharedConfig.pushStringStatus = "__FIREBASE_FAILED__"; + GcmPushListenerService.sendRegistrationToServer(null); + return; + } + String token = task.getResult(); + if (!TextUtils.isEmpty(token)) { + GcmPushListenerService.sendRegistrationToServer(token); + } + }); } catch (Throwable e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AutoMessageReplyReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/AutoMessageReplyReceiver.java index 7608e37db..fb1833d75 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AutoMessageReplyReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AutoMessageReplyReceiver.java @@ -33,7 +33,7 @@ public class AutoMessageReplyReceiver extends BroadcastReceiver { if (dialog_id == 0 || max_id == 0 || !UserConfig.isValidAccount(currentAccount)) { return; } - SendMessagesHelper.getInstance(currentAccount).sendMessage(text.toString(), dialog_id, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(text.toString(), dialog_id, null, null, null, true, null, null, null, true, 0, null); MessagesController.getInstance(currentAccount).markDialogAsRead(dialog_id, max_id, max_id, 0, false, 0, 0, true, 0); } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java index d134334fa..0e7f0b8ac 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java @@ -18,20 +18,21 @@ public class BuildVars { public static boolean LOGS_ENABLED = false; public static boolean USE_CLOUD_STRINGS = true; public static boolean CHECK_UPDATES = true; - public static int BUILD_VERSION = 2293; - public static String BUILD_VERSION_STRING = "7.7.0"; + public static boolean NO_SCOPED_STORAGE = true/* || Build.VERSION.SDK_INT <= 28*/; + public static int BUILD_VERSION = 2359; + public static String BUILD_VERSION_STRING = "7.8.0"; public static int APP_ID = 4; public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103"; public static String APPCENTER_HASH = "a5b5c4f5-51da-dedc-9918-d9766a22ca7c"; public static String APPCENTER_HASH_DEBUG = "f9726602-67c9-48d2-b5d0-4761f1c1a8f3"; // - public static String SMS_HASH = DEBUG_VERSION ? "O2P2z+/jBpJ" : "oLeq9AcOZkT"; + public static String SMS_HASH = AndroidUtilities.isStandaloneApp() ? "w0lkcmTZkKh" : (DEBUG_VERSION ? "O2P2z+/jBpJ" : "oLeq9AcOZkT"); public static String PLAYSTORE_APP_URL = "https://play.google.com/store/apps/details?id=org.telegram.messenger"; static { if (ApplicationLoader.applicationContext != null) { SharedPreferences sharedPreferences = ApplicationLoader.applicationContext.getSharedPreferences("systemConfig", Context.MODE_PRIVATE); - LOGS_ENABLED = sharedPreferences.getBoolean("logsEnabled", DEBUG_VERSION); + LOGS_ENABLED = DEBUG_VERSION || sharedPreferences.getBoolean("logsEnabled", DEBUG_VERSION); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java index 571a75a6d..006e47337 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ChatObject.java @@ -8,15 +8,20 @@ package org.telegram.messenger; +import android.graphics.Bitmap; import android.os.SystemClock; import android.text.TextUtils; import android.util.SparseArray; +import org.telegram.messenger.voip.Instance; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.TLRPC; +import org.telegram.ui.GroupCallActivity; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; public class ChatObject { @@ -42,21 +47,34 @@ public class ChatObject { public static final int ACTION_DELETE_MESSAGES = 13; public static final int ACTION_MANAGE_CALLS = 14; + public final static int VIDEO_FRAME_NO_FRAME = 0; + public final static int VIDEO_FRAME_REQUESTING = 1; + public final static int VIDEO_FRAME_HAS_FRAME = 2; + private static final int MAX_PARTICIPANTS_COUNT = 5000; public static class Call { public TLRPC.GroupCall call; public int chatId; public SparseArray participants = new SparseArray<>(); - public ArrayList sortedParticipants = new ArrayList<>(); + public final ArrayList sortedParticipants = new ArrayList<>(); + public final ArrayList visibleVideoParticipants = new ArrayList<>(); + public final ArrayList visibleParticipants = new ArrayList<>(); + public final HashMap thumbs = new HashMap<>(); + + private final HashMap videoParticipantsCache = new HashMap<>(); public ArrayList invitedUsers = new ArrayList<>(); public HashSet invitedUsersMap = new HashSet<>(); public SparseArray participantsBySources = new SparseArray<>(); + public SparseArray participantsByVideoSources = new SparseArray<>(); + public SparseArray participantsByPresentationSources = new SparseArray<>(); private String nextLoadOffset; public boolean membersLoadEndReached; public boolean loadingMembers; public boolean reloadingMembers; public boolean recording; + public boolean canStreamVideo; + public VideoParticipant videoNotAvailableParticipant; public AccountInstance currentAccount; public int speakingMembersCount; private Runnable typingUpdateRunnable = () -> { @@ -79,6 +97,33 @@ public class ChatObject { private long lastGroupCallReloadTime; private boolean loadingGroupCall; + private static int videoPointer; + + public final SparseArray currentSpeakingPeers = new SparseArray<>(); + + private final Runnable updateCurrentSpeakingRunnable = new Runnable() { + @Override + public void run() { + long uptime = SystemClock.uptimeMillis(); + boolean update = false; + for(int i = 0; i < currentSpeakingPeers.size(); i++) { + int key = currentSpeakingPeers.keyAt(i); + TLRPC.TL_groupCallParticipant participant = currentSpeakingPeers.get(key); + if (uptime - participant.lastSpeakTime >= 500) { + update = true; + currentSpeakingPeers.remove(key); + i--; + } + } + + if (currentSpeakingPeers.size() > 0) { + AndroidUtilities.runOnUIThread(updateCurrentSpeakingRunnable, 550); + } + if (update) { + currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallSpeakingUsersUpdated, chatId, call.id, false); + } + } + }; public void setCall(AccountInstance account, int chatId, TLRPC.TL_phone_groupCall groupCall) { this.chatId = chatId; @@ -90,14 +135,29 @@ public class ChatObject { TLRPC.TL_groupCallParticipant participant = groupCall.participants.get(a); participants.put(MessageObject.getPeerId(participant.peer), participant); sortedParticipants.add(participant); - if (participant.source != 0) { - participantsBySources.put(participant.source, participant); - } + processAllSources(participant, true); date = Math.min(date, participant.date); } sortParticipants(); nextLoadOffset = groupCall.participants_next_offset; loadMembers(true); + + createNoVideoParticipant(); + } + + public void createNoVideoParticipant() { + if (videoNotAvailableParticipant != null) { + return; + } + TLRPC.TL_groupCallParticipant noVideoParticipant = new TLRPC.TL_groupCallParticipant(); + noVideoParticipant.peer = new TLRPC.TL_peerChannel(); + noVideoParticipant.peer.channel_id = chatId; + noVideoParticipant.muted = true; + noVideoParticipant.video = new TLRPC.TL_groupCallParticipantVideo(); + noVideoParticipant.video.paused = true; + noVideoParticipant.video.endpoint = ""; + + videoNotAvailableParticipant = new VideoParticipant(noVideoParticipant, false, false); } public void addSelfDummyParticipant(boolean notify) { @@ -109,6 +169,7 @@ public class ChatObject { selfDummyParticipant.peer = selfPeer; selfDummyParticipant.muted = true; selfDummyParticipant.self = true; + selfDummyParticipant.video_joined = call.can_start_video; TLRPC.Chat chat = currentAccount.getMessagesController().getChat(chatId); selfDummyParticipant.can_self_unmute = !call.join_muted || ChatObject.canManageCalls(chat); selfDummyParticipant.date = currentAccount.getConnectionsManager().getCurrentTime(); @@ -199,6 +260,8 @@ public class ChatObject { } sortedParticipants.clear(); participantsBySources.clear(); + participantsByVideoSources.clear(); + participantsByPresentationSources.clear(); loadingGuids.clear(); } nextLoadOffset = groupParticipants.next_offset; @@ -232,9 +295,7 @@ public class ChatObject { TLRPC.TL_groupCallParticipant oldParticipant = participants.get(MessageObject.getPeerId(participant.peer)); if (oldParticipant != null) { sortedParticipants.remove(oldParticipant); - if (oldParticipant.source != 0) { - participantsBySources.remove(oldParticipant.source); - } + processAllSources(oldParticipant, false); if (oldParticipant.self) { participant.lastTypingDate = oldParticipant.active_date; } else { @@ -260,9 +321,7 @@ public class ChatObject { } participants.put(MessageObject.getPeerId(participant.peer), participant); sortedParticipants.add(participant); - if (participant.source != 0) { - participantsBySources.put(participant.source, participant); - } + processAllSources(participant, true); } if (call.participants_count < participants.size()) { call.participants_count = participants.size(); @@ -389,15 +448,11 @@ public class ChatObject { TLRPC.TL_groupCallParticipant oldParticipant = participants.get(pid); if (oldParticipant != null) { sortedParticipants.remove(oldParticipant); - if (oldParticipant.source != 0) { - participantsBySources.remove(oldParticipant.source); - } + processAllSources(oldParticipant, false); } participants.put(pid, participant); sortedParticipants.add(participant); - if (participant.source != 0) { - participantsBySources.put(participant.source, participant); - } + processAllSources(participant, true); if (invitedUsersMap.contains(pid)) { Integer id = pid; invitedUsersMap.remove(id); @@ -419,11 +474,53 @@ public class ChatObject { })); } + private void processAllSources(TLRPC.TL_groupCallParticipant participant, boolean add) { + if (participant.source != 0) { + if (add) { + participantsBySources.put(participant.source, participant); + } else { + participantsBySources.remove(participant.source); + } + } + for (int c = 0; c < 2; c++) { + TLRPC.TL_groupCallParticipantVideo data = c == 0 ? participant.video : participant.presentation; + if (data != null) { + SparseArray sourcesArray = c == 0 ? participantsByVideoSources : participantsByPresentationSources; + for (int a = 0, N = data.source_groups.size(); a < N; a++) { + TLRPC.TL_groupCallParticipantVideoSourceGroup sourceGroup = data.source_groups.get(a); + for (int b = 0, N2 = sourceGroup.sources.size(); b < N2; b++) { + int source = sourceGroup.sources.get(b); + if (add) { + sourcesArray.put(source, participant); + } else { + sourcesArray.remove(source); + } + } + } + if (add) { + if (c == 0) { + participant.videoEndpoint = data.endpoint; + } else { + participant.presentationEndpoint = data.endpoint; + } + } else { + if (c == 0) { + participant.videoEndpoint = null; + } else { + participant.presentationEndpoint = null; + } + } + } + } + } + public void processVoiceLevelsUpdate(int[] ssrc, float[] levels, boolean[] voice) { boolean updated = false; + boolean updateCurrentSpeakingList = false; int currentTime = currentAccount.getConnectionsManager().getCurrentTime(); ArrayList participantsToLoad = null; long time = SystemClock.elapsedRealtime(); + long uptime = SystemClock.uptimeMillis(); currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.applyGroupCallVisibleParticipants, time); for (int a = 0; a < ssrc.length; a++) { TLRPC.TL_groupCallParticipant participant; @@ -438,6 +535,7 @@ public class ChatObject { participant.hasVoiceDelayed = voice[a]; participant.lastVoiceUpdateTime = time; } + int peerId = MessageObject.getPeerId(participant.peer); if (levels[a] > 0.1f) { if (voice[a] && participant.lastTypingDate + 1 < currentTime) { if (time != participant.lastVisibleDate) { @@ -446,9 +544,20 @@ public class ChatObject { participant.lastTypingDate = currentTime; updated = true; } - participant.lastSpeakTime = SystemClock.uptimeMillis(); + participant.lastSpeakTime = uptime; participant.amplitude = levels[a]; + + if (currentSpeakingPeers.get(peerId, null) == null) { + currentSpeakingPeers.put(peerId, participant); + updateCurrentSpeakingList = true; + } } else { + if (uptime - participant.lastSpeakTime >= 500) { + if (currentSpeakingPeers.get(peerId, null) != null) { + currentSpeakingPeers.remove(peerId); + updateCurrentSpeakingList = true; + } + } participant.amplitude = 0; } } else if (ssrc[a] != 0) { @@ -465,6 +574,27 @@ public class ChatObject { sortParticipants(); currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); } + if (updateCurrentSpeakingList) { + if (currentSpeakingPeers.size() > 0) { + AndroidUtilities.cancelRunOnUIThread(updateCurrentSpeakingRunnable); + AndroidUtilities.runOnUIThread(updateCurrentSpeakingRunnable, 550); + } + currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallSpeakingUsersUpdated, chatId, call.id, false); + } + } + + public void updateVisibleParticipants() { + sortParticipants(); + currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false, 0L); + } + + public void clearVideFramesInfo() { + for (int i = 0; i < sortedParticipants.size(); i++) { + sortedParticipants.get(i).hasCameraFrame = VIDEO_FRAME_NO_FRAME; + sortedParticipants.get(i).hasPresentationFrame = VIDEO_FRAME_NO_FRAME; + sortedParticipants.get(i).videoIndex = 0; + } + sortParticipants(); } public interface OnParticipantsLoad { @@ -474,16 +604,18 @@ public class ChatObject { public void processUnknownVideoParticipants(int[] ssrc, OnParticipantsLoad onLoad) { ArrayList participantsToLoad = null; for (int a = 0; a < ssrc.length; a++) { - TLRPC.TL_groupCallParticipant participant = participantsBySources.get(ssrc[a]); - if (participant == null) { - if (participantsToLoad == null) { - participantsToLoad = new ArrayList<>(); - } - participantsToLoad.add(ssrc[a]); + if (participantsBySources.get(ssrc[a]) != null || participantsByVideoSources.get(ssrc[a]) != null || participantsByPresentationSources.get(ssrc[a]) != null) { + continue; } + if (participantsToLoad == null) { + participantsToLoad = new ArrayList<>(); + } + participantsToLoad.add(ssrc[a]); } if (participantsToLoad != null) { loadUnknownParticipants(participantsToLoad, false, onLoad); + } else { + onLoad.onLoad(null); } } @@ -636,8 +768,11 @@ public class ChatObject { boolean updated = false; boolean selfUpdated = false; boolean changedOrAdded = false; + boolean speakingUpdated = false; + int selfId = getSelfId(); long time = SystemClock.elapsedRealtime(); + long justJoinedId = 0; int lastParticipantDate; if (!sortedParticipants.isEmpty()) { lastParticipantDate = sortedParticipants.get(sortedParticipants.size() - 1).date; @@ -661,10 +796,20 @@ public class ChatObject { } if (oldParticipant != null) { participants.remove(pid); - if (participant.source != 0) { - participantsBySources.remove(participant.source); - } + processAllSources(oldParticipant, false); sortedParticipants.remove(oldParticipant); + visibleParticipants.remove(oldParticipant); + if (currentSpeakingPeers.get(pid, null) != null) { + currentSpeakingPeers.remove(pid); + speakingUpdated = true; + } + for (int i = 0; i < visibleVideoParticipants.size(); i++) { + VideoParticipant videoParticipant = visibleVideoParticipants.get(i); + if (MessageObject.getPeerId(videoParticipant.participant.peer) == MessageObject.getPeerId(oldParticipant.peer)) { + visibleVideoParticipants.remove(i); + i--; + } + } } call.participants_count--; if (call.participants_count < 0) { @@ -682,6 +827,10 @@ public class ChatObject { FileLog.d("new participant, update old"); } oldParticipant.muted = participant.muted; + if (participant.muted && currentSpeakingPeers.get(pid, null) != null) { + currentSpeakingPeers.remove(pid); + speakingUpdated = true; + } if (!participant.min) { oldParticipant.volume = participant.volume; oldParticipant.muted_by_you = participant.muted_by_you; @@ -695,6 +844,7 @@ public class ChatObject { } oldParticipant.flags = participant.flags; oldParticipant.can_self_unmute = participant.can_self_unmute; + oldParticipant.video_joined = participant.video_joined; if (oldParticipant.raise_hand_rating == 0 && participant.raise_hand_rating != 0) { oldParticipant.lastRaiseHandDate = SystemClock.elapsedRealtime(); } @@ -704,26 +854,32 @@ public class ChatObject { if (time != oldParticipant.lastVisibleDate) { oldParticipant.active_date = oldParticipant.lastTypingDate; } - if (oldParticipant.source != participant.source) { - if (oldParticipant.source != 0) { - participantsBySources.remove(oldParticipant.source); - } + if (oldParticipant.source != participant.source || !isSameVideo(oldParticipant.video, participant.video) || !isSameVideo(oldParticipant.presentation, participant.presentation)) { + processAllSources(oldParticipant, false); + oldParticipant.video = participant.video; + oldParticipant.presentation = participant.presentation; oldParticipant.source = participant.source; - if (oldParticipant.source != 0) { - participantsBySources.put(oldParticipant.source, oldParticipant); - } + processAllSources(oldParticipant, true); + participant.presentationEndpoint = oldParticipant.presentationEndpoint; + participant.videoEndpoint = oldParticipant.videoEndpoint; + participant.videoIndex = oldParticipant.videoIndex; + } else if (oldParticipant.video != null && participant.video != null) { + oldParticipant.video.paused = participant.video.paused; } } else { if (participant.just_joined) { + if (pid != selfId) { + justJoinedId = pid; + } call.participants_count++; if (update.version == call.version) { reloadCall = true; if (BuildVars.LOGS_ENABLED) { - FileLog.d("new participant, just joned, reload call"); + FileLog.d("new participant, just joined, reload call"); } } else { if (BuildVars.LOGS_ENABLED) { - FileLog.d("new participant, just joned"); + FileLog.d("new participant, just joined"); } } } @@ -734,9 +890,7 @@ public class ChatObject { sortedParticipants.add(participant); } participants.put(pid, participant); - if (participant.source != 0) { - participantsBySources.put(participant.source, participant); - } + processAllSources(participant, true); } if (pid == selfId && participant.active_date == 0 && (participant.can_self_unmute || !participant.muted)) { participant.active_date = currentAccount.getConnectionsManager().getCurrentTime(); @@ -767,8 +921,42 @@ public class ChatObject { if (changedOrAdded) { sortParticipants(); } - currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, selfUpdated); + currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, selfUpdated, justJoinedId); } + if (speakingUpdated) { + currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallSpeakingUsersUpdated, chatId, call.id, false); + } + } + + private boolean isSameVideo(TLRPC.TL_groupCallParticipantVideo oldVideo, TLRPC.TL_groupCallParticipantVideo newVideo) { + if (oldVideo == null && newVideo != null || oldVideo != null && newVideo == null) { + return false; + } + if (oldVideo == null || newVideo == null) { + return true; + } + if (!TextUtils.equals(oldVideo.endpoint, newVideo.endpoint)) { + return false; + } + if (oldVideo.source_groups.size() != newVideo.source_groups.size()) { + return false; + } + for (int a = 0, N = oldVideo.source_groups.size(); a < N; a++) { + TLRPC.TL_groupCallParticipantVideoSourceGroup oldGroup = oldVideo.source_groups.get(a); + TLRPC.TL_groupCallParticipantVideoSourceGroup newGroup = newVideo.source_groups.get(a); + if (!TextUtils.equals(oldGroup.semantics, newGroup.semantics)) { + return false; + } + if (oldGroup.sources.size() != newGroup.sources.size()) { + return false; + } + for (int b = 0, N2 = oldGroup.sources.size(); b < N2; b++) { + if (!newGroup.sources.contains(oldGroup.sources.get(b))) { + return false; + } + } + } + return true; } public void processGroupCallUpdate(AccountInstance accountInstance, TLRPC.TL_updateGroupCall update) { @@ -777,6 +965,7 @@ public class ChatObject { loadMembers(true); } call = update.call; + TLRPC.TL_groupCallParticipant selfParticipant = participants.get(getSelfId()); recording = call.record_start_date != 0; currentAccount.getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, chatId, call.id, false); } @@ -788,11 +977,67 @@ public class ChatObject { return inputGroupCall; } - private void sortParticipants() { + public static boolean videoIsActive(TLRPC.TL_groupCallParticipant participant, boolean presentation, ChatObject.Call call) { + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + return false; + } + if (participant.self) { + return service.getVideoState(presentation) == Instance.VIDEO_STATE_ACTIVE; + } else { + if (call.videoNotAvailableParticipant != null && call.videoNotAvailableParticipant.participant == participant || call.participants.get(MessageObject.getPeerId(participant.peer)) != null) { + if (presentation) { + return participant.presentation != null;// && participant.hasPresentationFrame == 2; + } else { + return participant.video != null;// && participant.hasCameraFrame == 2; + } + } else { + return false; + } + } + } + + public void sortParticipants() { + visibleVideoParticipants.clear(); + visibleParticipants.clear(); TLRPC.Chat chat = currentAccount.getMessagesController().getChat(chatId); boolean isAdmin = ChatObject.canManageCalls(chat); int selfId = getSelfId(); - Collections.sort(sortedParticipants, (o1, o2) -> { + VoIPService service = VoIPService.getSharedInstance(); + TLRPC.TL_groupCallParticipant selfParticipant = participants.get(selfId); + canStreamVideo = selfParticipant != null && selfParticipant.video_joined; + boolean allowedVideoCount; + boolean hasAnyVideo = false; + for (int i = 0; i < sortedParticipants.size(); i++) { + TLRPC.TL_groupCallParticipant participant = sortedParticipants.get(i); + if (videoIsActive(participant, false, this) || videoIsActive(participant, true, this)) { + hasAnyVideo = true; + if (canStreamVideo) { + if (participant.videoIndex == 0) { + if (participant.self) { + participant.videoIndex = Integer.MAX_VALUE; + } else { + participant.videoIndex = ++videoPointer; + } + } + } else { + participant.videoIndex = 0; + } + } else if (participant.self || !canStreamVideo || (participant.video == null && participant.presentation == null)) { + participant.videoIndex = 0; + } + } + + Comparator comparator = (o1, o2) -> { + boolean videoActive1 = o1.videoIndex > 0; + boolean videoActive2 = o2.videoIndex > 0; + if (videoActive1 && videoActive2) { + return o2.videoIndex - o1.videoIndex; + } else if (videoActive1) { + return -1; + } else if (videoActive2) { + return 1; + } if (o1.active_date != 0 && o2.active_date != 0) { return Integer.compare(o2.active_date, o1.active_date); } else if (o1.active_date != 0) { @@ -819,19 +1064,92 @@ public class ChatObject { } else { return Integer.compare(o2.date, o1.date); } - }); + }; + Collections.sort(sortedParticipants, comparator); + if (sortedParticipants.size() > MAX_PARTICIPANTS_COUNT && (!ChatObject.canManageCalls(chat) || sortedParticipants.get(sortedParticipants.size() - 1).raise_hand_rating == 0)) { for (int a = MAX_PARTICIPANTS_COUNT, N = sortedParticipants.size(); a < N; a++) { TLRPC.TL_groupCallParticipant p = sortedParticipants.get(MAX_PARTICIPANTS_COUNT); if (p.raise_hand_rating != 0) { continue; } - participantsBySources.remove(p.source); + processAllSources(p, false); participants.remove(MessageObject.getPeerId(p.peer)); sortedParticipants.remove(MAX_PARTICIPANTS_COUNT); } } checkOnlineParticipants(); + + if (!canStreamVideo && hasAnyVideo) { + visibleVideoParticipants.add(videoNotAvailableParticipant); + } + + int wideVideoIndex = 0; + for (int i = 0; i < sortedParticipants.size(); i++) { + TLRPC.TL_groupCallParticipant participant = sortedParticipants.get(i); + if (canStreamVideo && participant.videoIndex != 0) { + if (!participant.self && videoIsActive(participant, true, this) && videoIsActive(participant, false, this)) { + VideoParticipant videoParticipant = videoParticipantsCache.get(participant.videoEndpoint); + if (videoParticipant == null) { + videoParticipant = new VideoParticipant(participant, false, true); + videoParticipantsCache.put(participant.videoEndpoint, videoParticipant); + } else { + videoParticipant.participant = participant; + videoParticipant.presentation = false; + videoParticipant.hasSame = true; + } + + VideoParticipant presentationParticipant = videoParticipantsCache.get(participant.presentationEndpoint); + if (presentationParticipant == null) { + presentationParticipant = new VideoParticipant(participant, true, true); + } else { + presentationParticipant.participant = participant; + presentationParticipant.presentation = true; + presentationParticipant.hasSame = true; + } + visibleVideoParticipants.add(videoParticipant); + if (videoParticipant.aspectRatio > 1f) { + wideVideoIndex = visibleVideoParticipants.size() - 1; + } + visibleVideoParticipants.add(presentationParticipant); + if (presentationParticipant.aspectRatio > 1f) { + wideVideoIndex = visibleVideoParticipants.size() - 1; + } + } else { + if (participant.self) { + if (videoIsActive(participant, true, this)) { + visibleVideoParticipants.add(new VideoParticipant(participant, true, false)); + } + if (videoIsActive(participant, false, this)) { + visibleVideoParticipants.add(new VideoParticipant(participant, false, false)); + } + } else { + boolean presentation = videoIsActive(participant, true, this); + + VideoParticipant videoParticipant = videoParticipantsCache.get(presentation ? participant.presentationEndpoint : participant.videoEndpoint); + if (videoParticipant == null) { + videoParticipant = new VideoParticipant(participant, presentation, false); + videoParticipantsCache.put(presentation ? participant.presentationEndpoint : participant.videoEndpoint, videoParticipant); + } else { + videoParticipant.participant = participant; + videoParticipant.presentation = presentation; + videoParticipant.hasSame = false; + } + visibleVideoParticipants.add(videoParticipant); + if (videoParticipant.aspectRatio > 1f) { + wideVideoIndex = visibleVideoParticipants.size() - 1; + } + } + } + } else { + visibleParticipants.add(participant); + } + } + + if (!GroupCallActivity.isLandscapeMode && visibleVideoParticipants.size() % 2 == 1) { + VideoParticipant videoParticipant = visibleVideoParticipants.remove(wideVideoIndex); + visibleVideoParticipants.add(videoParticipant); + } } public void saveActiveDates() { @@ -1175,4 +1493,47 @@ public class ChatObject { } return null; } + + public static boolean hasPhoto(TLRPC.Chat chat) { + return chat != null && chat.photo != null && !(chat.photo instanceof TLRPC.TL_chatPhotoEmpty); + } + + public static TLRPC.ChatPhoto getPhoto(TLRPC.Chat chat) { + return hasPhoto(chat) ? chat.photo : null; + } + + public static class VideoParticipant { + + public TLRPC.TL_groupCallParticipant participant; + public boolean presentation; + public boolean hasSame; + public float aspectRatio;// w / h + + public VideoParticipant(TLRPC.TL_groupCallParticipant participant, boolean presentation, boolean hasSame) { + this.participant = participant; + this.presentation = presentation; + this.hasSame = hasSame; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + VideoParticipant that = (VideoParticipant) o; + return presentation == that.presentation && MessageObject.getPeerId(participant.peer) == MessageObject.getPeerId(that.participant.peer); + } + + public void setAspectRatio(float aspectRatio, Call call) { + if (this.aspectRatio != aspectRatio) { + this.aspectRatio = aspectRatio; + if (!GroupCallActivity.isLandscapeMode && call.visibleVideoParticipants.size() % 2 == 1) { + call.updateVisibleParticipants(); + } + } + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java index 3974e2dcc..f3f33cb91 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ContactsController.java @@ -47,6 +47,7 @@ public class ContactsController extends BaseController { private boolean contactsSyncInProgress; private final Object observerLock = new Object(); public boolean contactsLoaded; + public boolean doneLoadingContacts; private boolean contactsBookLoaded; private boolean migratingContacts; private String lastContactsVersions = ""; @@ -252,6 +253,7 @@ public class ContactsController extends BaseController { loadingContacts = false; contactsSyncInProgress = false; + doneLoadingContacts = false; contactsLoaded = false; contactsBookLoaded = false; lastContactsVersions = ""; @@ -1393,7 +1395,7 @@ public class ContactsController extends BaseController { final boolean isEmpty = contactsArr.isEmpty(); - if (!contacts.isEmpty()) { + if (from == 2 && !contacts.isEmpty()) { for (int a = 0; a < contactsArr.size(); a++) { TLRPC.TL_contact contact = contactsArr.get(a); if (contactsDict.get(contact.user_id) != null) { @@ -1421,6 +1423,10 @@ public class ContactsController extends BaseController { if (from == 1 && (contactsArr.isEmpty() || Math.abs(System.currentTimeMillis() / 1000 - getUserConfig().lastContactsSyncTime) >= 24 * 60 * 60)) { loadContacts(false, getContactsHash(contactsArr)); if (contactsArr.isEmpty()) { + AndroidUtilities.runOnUIThread(() -> { + doneLoadingContacts = true; + getNotificationCenter().postNotificationName(NotificationCenter.contactsDidLoad); + }); return; } } @@ -1436,6 +1442,10 @@ public class ContactsController extends BaseController { if (BuildVars.LOGS_ENABLED) { FileLog.d("contacts are broken, load from server"); } + AndroidUtilities.runOnUIThread(() -> { + doneLoadingContacts = true; + getNotificationCenter().postNotificationName(NotificationCenter.contactsDidLoad); + }); return; } } @@ -1541,6 +1551,7 @@ public class ContactsController extends BaseController { usersMutualSectionsDict = sectionsDictMutual; sortedUsersSectionsArray = sortedSectionsArray; sortedUsersMutualSectionsArray = sortedSectionsArrayMutual; + doneLoadingContacts = true; if (from != 2) { synchronized (loadContactsSync) { loadingContacts = false; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java index 69f9519c3..585897e68 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DocumentObject.java @@ -1,5 +1,8 @@ package org.telegram.messenger; +import android.graphics.Paint; +import android.graphics.Path; + import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; @@ -69,6 +72,19 @@ public class DocumentObject { return getSvgThumb(document, colorKey, alpha, 1.0f); } + public static SvgHelper.SvgDrawable getSvgRectThumb(String colorKey, float alpha) { + Path path = new Path(); + path.addRect(0, 0, 512, 512, Path.Direction.CW); + path.close(); + SvgHelper.SvgDrawable drawable = new SvgHelper.SvgDrawable(); + drawable.commands.add(path); + drawable.paints.put(path, new Paint(Paint.ANTI_ALIAS_FLAG)); + drawable.width = 512; + drawable.height = 512; + drawable.setupGradient(colorKey, alpha); + return drawable; + } + public static SvgHelper.SvgDrawable getSvgThumb(TLRPC.Document document, String colorKey, float alpha, float zoom) { if (document == null) { return null; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java b/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java index dbbf415c7..05e794787 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/DownloadController.java @@ -288,10 +288,10 @@ public class DownloadController extends BaseController implements NotificationCe } AndroidUtilities.runOnUIThread(() -> { - getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.fileDidFailToLoad); - getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.fileDidLoad); - getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.FileLoadProgressChanged); - getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.FileUploadProgressChanged); + getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.fileLoadFailed); + getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.fileLoaded); + getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.fileLoadProgressChanged); + getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.fileUploadProgressChanged); getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.httpFileDidLoad); getNotificationCenter().addObserver(DownloadController.this, NotificationCenter.httpFileDidFailedLoad); loadAutoDownloadConfig(false); @@ -960,7 +960,7 @@ public class DownloadController extends BaseController implements NotificationCe @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidFailToLoad || id == NotificationCenter.httpFileDidFailedLoad) { + if (id == NotificationCenter.fileLoadFailed || id == NotificationCenter.httpFileDidFailedLoad) { String fileName = (String) args[0]; Integer canceled = (Integer) args[1]; listenerInProgress = true; @@ -982,7 +982,7 @@ public class DownloadController extends BaseController implements NotificationCe listenerInProgress = false; processLaterArrays(); checkDownloadFinished(fileName, canceled); - } else if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.httpFileDidLoad) { + } else if (id == NotificationCenter.fileLoaded || id == NotificationCenter.httpFileDidLoad) { listenerInProgress = true; String fileName = (String) args[0]; ArrayList messageObjects = loadingFileMessagesObservers.get(fileName); @@ -1007,7 +1007,7 @@ public class DownloadController extends BaseController implements NotificationCe listenerInProgress = false; processLaterArrays(); checkDownloadFinished(fileName, 0); - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { listenerInProgress = true; String fileName = (String) args[0]; ArrayList> arrayList = loadingFileObservers.get(fileName); @@ -1023,7 +1023,7 @@ public class DownloadController extends BaseController implements NotificationCe } listenerInProgress = false; processLaterArrays(); - } else if (id == NotificationCenter.FileUploadProgressChanged) { + } else if (id == NotificationCenter.fileUploadProgressChanged) { listenerInProgress = true; String fileName = (String) args[0]; ArrayList> arrayList = loadingFileObservers.get(fileName); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java b/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java index 095d6c1c9..57ab1d0d8 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/Emoji.java @@ -48,7 +48,7 @@ public class Emoji { public static ArrayList recentEmoji = new ArrayList<>(); public static HashMap emojiColor = new HashMap<>(); private static boolean recentEmojiLoaded; - private static Runnable invalidateUiRunnable = () -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.emojiDidLoad); + private static Runnable invalidateUiRunnable = () -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.emojiLoaded); private final static int MAX_RECENT_EMOJI_COUNT = 48; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java b/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java index 2f3c38a99..a7d34375e 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/EmuDetector.java @@ -333,7 +333,7 @@ public class EmuDetector { return false; } - private boolean checkFiles(String[] targets, EmulatorTypes type) { + private boolean checkFiles(String[] targets, EmulatorTypes type) { //TODO scoped storage for (String pipe : targets) { File qemu_file; if (ContextCompat.checkSelfPermission(mContext, Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java index 62edaffb8..46756d7e9 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoadOperation.java @@ -192,18 +192,22 @@ public class FileLoadOperation { System.arraycopy(imageLocation.iv, 0, iv, 0, iv.length); key = imageLocation.key; } else if (imageLocation.photoPeer != null) { - location = new TLRPC.TL_inputPeerPhotoFileLocation(); - location.id = imageLocation.location.volume_id; - location.volume_id = imageLocation.location.volume_id; - location.local_id = imageLocation.location.local_id; - location.big = imageLocation.photoPeerType == ImageLocation.TYPE_BIG; - location.peer = imageLocation.photoPeer; + TLRPC.TL_inputPeerPhotoFileLocation inputPeerPhotoFileLocation = new TLRPC.TL_inputPeerPhotoFileLocation(); + inputPeerPhotoFileLocation.id = imageLocation.location.volume_id; + inputPeerPhotoFileLocation.volume_id = imageLocation.location.volume_id; + inputPeerPhotoFileLocation.local_id = imageLocation.location.local_id; + inputPeerPhotoFileLocation.photo_id = imageLocation.photoId; + inputPeerPhotoFileLocation.big = imageLocation.photoPeerType == ImageLocation.TYPE_BIG; + inputPeerPhotoFileLocation.peer = imageLocation.photoPeer; + location = inputPeerPhotoFileLocation; } else if (imageLocation.stickerSet != null) { - location = new TLRPC.TL_inputStickerSetThumb(); - location.id = imageLocation.location.volume_id; - location.volume_id = imageLocation.location.volume_id; - location.local_id = imageLocation.location.local_id; - location.stickerset = imageLocation.stickerSet; + TLRPC.TL_inputStickerSetThumb inputStickerSetThumb = new TLRPC.TL_inputStickerSetThumb(); + inputStickerSetThumb.id = imageLocation.location.volume_id; + inputStickerSetThumb.volume_id = imageLocation.location.volume_id; + inputStickerSetThumb.local_id = imageLocation.location.local_id; + inputStickerSetThumb.thumb_version = imageLocation.thumbVersion; + inputStickerSetThumb.stickerset = imageLocation.stickerSet; + location = inputStickerSetThumb; } else if (imageLocation.thumbSize != null) { if (imageLocation.photoId != 0) { location = new TLRPC.TL_inputPhotoFileLocation(); @@ -1613,10 +1617,12 @@ public class FileLoadOperation { protected void onFail(boolean thread, final int reason) { cleanup(); state = stateFailed; - if (thread) { - Utilities.stageQueue.postRunnable(() -> delegate.didFailedLoadingFile(FileLoadOperation.this, reason)); - } else { - delegate.didFailedLoadingFile(FileLoadOperation.this, reason); + if (delegate != null) { + if (thread) { + Utilities.stageQueue.postRunnable(() -> delegate.didFailedLoadingFile(FileLoadOperation.this, reason)); + } else { + delegate.didFailedLoadingFile(FileLoadOperation.this, reason); + } } } @@ -1829,6 +1835,13 @@ public class FileLoadOperation { streamPriorityStartOffset = 0; priorityRequestInfo = requestInfo; } + if (location instanceof TLRPC.TL_inputPeerPhotoFileLocation) { + TLRPC.TL_inputPeerPhotoFileLocation inputPeerPhotoFileLocation = (TLRPC.TL_inputPeerPhotoFileLocation) location; + if (inputPeerPhotoFileLocation.photo_id == 0) { + requestReference(requestInfo); + continue; + } + } requestInfo.requestToken = ConnectionsManager.getInstance(currentAccount).sendRequest(request, (response, error) -> { if (!requestInfos.contains(requestInfo)) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java index e624215ad..af80b0656 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileLoader.java @@ -191,7 +191,7 @@ public class FileLoader extends BaseController { return isLoadingVideo(document, false) || isLoadingVideo(document, true); } - public void cancelUploadFile(final String location, final boolean enc) { + public void cancelFileUpload(final String location, final boolean enc) { fileLoaderQueue.postRunnable(() -> { FileUploadOperation operation; if (!enc) { @@ -963,6 +963,10 @@ public class FileLoader extends BaseController { } public static File getPathToAttach(TLObject attach, String ext, boolean forceCache) { + return getPathToAttach(attach, null, ext, forceCache); + } + + public static File getPathToAttach(TLObject attach, String size, String ext, boolean forceCache) { File dir = null; if (forceCache) { dir = getDirectory(MEDIA_DIR_CACHE); @@ -1006,6 +1010,15 @@ public class FileLoader extends BaseController { } else { dir = getDirectory(MEDIA_DIR_IMAGE); } + } else if (attach instanceof TLRPC.UserProfilePhoto || attach instanceof TLRPC.ChatPhoto) { + if (size == null) { + size = "s"; + } + if ("s".equals(size)) { + dir = getDirectory(MEDIA_DIR_CACHE); + } else { + dir = getDirectory(MEDIA_DIR_IMAGE); + } } else if (attach instanceof WebFile) { WebFile document = (WebFile) attach; if (document.mime_type.startsWith("image/")) { @@ -1159,6 +1172,10 @@ public class FileLoader extends BaseController { } public static String getAttachFileName(TLObject attach, String ext) { + return getAttachFileName(attach, null, ext); + } + + public static String getAttachFileName(TLObject attach, String size, String ext) { if (attach instanceof TLRPC.Document) { TLRPC.Document document = (TLRPC.Document) attach; String docExt; @@ -1204,6 +1221,31 @@ public class FileLoader extends BaseController { } TLRPC.FileLocation location = (TLRPC.FileLocation) attach; return location.volume_id + "_" + location.local_id + "." + (ext != null ? ext : "jpg"); + } else if (attach instanceof TLRPC.UserProfilePhoto) { + if (size == null) { + size = "s"; + } + TLRPC.UserProfilePhoto location = (TLRPC.UserProfilePhoto) attach; + if (location.photo_small != null) { + if ("s".equals(size)) { + return getAttachFileName(location.photo_small, ext); + } else { + return getAttachFileName(location.photo_big, ext); + } + } else { + return location.photo_id + "_" + size + "." + (ext != null ? ext : "jpg"); + } + } else if (attach instanceof TLRPC.ChatPhoto) { + TLRPC.ChatPhoto location = (TLRPC.ChatPhoto) attach; + if (location.photo_small != null) { + if ("s".equals(size)) { + return getAttachFileName(location.photo_small, ext); + } else { + return getAttachFileName(location.photo_big, ext); + } + } else { + return location.photo_id + "_" + size + "." + (ext != null ? ext : "jpg"); + } } return ""; } @@ -1284,6 +1326,28 @@ public class FileLoader extends BaseController { return true; } + public static boolean isSamePhoto(TLObject photo1, TLObject photo2) { + if (photo1 == null && photo2 != null || photo1 != null && photo2 == null) { + return false; + } + if (photo1 == null && photo2 == null) { + return true; + } + if (photo1.getClass() != photo2.getClass()) { + return false; + } + if (photo1 instanceof TLRPC.UserProfilePhoto) { + TLRPC.UserProfilePhoto p1 = (TLRPC.UserProfilePhoto) photo1; + TLRPC.UserProfilePhoto p2 = (TLRPC.UserProfilePhoto) photo2; + return p1.photo_id == p2.photo_id; + } else if (photo1 instanceof TLRPC.ChatPhoto) { + TLRPC.ChatPhoto p1 = (TLRPC.ChatPhoto) photo1; + TLRPC.ChatPhoto p2 = (TLRPC.ChatPhoto) photo2; + return p1.photo_id == p2.photo_id; + } + return false; + } + public static boolean isSamePhoto(TLRPC.FileLocation location, TLRPC.Photo photo) { if (location == null || !(photo instanceof TLRPC.TL_photo)) { return false; @@ -1294,6 +1358,20 @@ public class FileLoader extends BaseController { return true; } } + if (-location.volume_id == photo.id) { + return true; + } return false; } + + public static long getPhotoId(TLObject object) { + if (object instanceof TLRPC.Photo) { + return ((TLRPC.Photo) object).id; + } else if (object instanceof TLRPC.ChatPhoto) { + return ((TLRPC.ChatPhoto) object).photo_id; + } else if (object instanceof TLRPC.UserProfilePhoto) { + return ((TLRPC.UserProfilePhoto) object).photo_id; + } + return 0; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java b/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java index 4b74a055b..0803dc188 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/FileRefController.java @@ -8,6 +8,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; public class FileRefController extends BaseController { @@ -215,6 +216,9 @@ public class FileRefController extends BaseController { } else if (args[0] instanceof TLRPC.TL_inputPhotoFileLocation) { location = (TLRPC.TL_inputPhotoFileLocation) args[0]; locationKey = "photo_" + location.id; + } else if (args[0] instanceof TLRPC.TL_inputPeerPhotoFileLocation) { + location = (TLRPC.TL_inputPeerPhotoFileLocation) args[0]; + locationKey = "avatar_" + location.id; } else { sendErrorToObject(args, 0); return; @@ -268,13 +272,15 @@ public class FileRefController extends BaseController { cacheKey = "recent"; } else if ("fav".equals(string)) { cacheKey = "fav"; + } else if ("update".equals(string)) { + cacheKey = "update"; } } cleanupCache(); CachedResult cachedResult = getCachedResponse(cacheKey); if (cachedResult != null) { - if (!onRequestComplete(locationKey, parentKey, cachedResult.response, false)) { + if (!onRequestComplete(locationKey, parentKey, cachedResult.response, false, true)) { responseCache.remove(locationKey); } else { return; @@ -282,7 +288,7 @@ public class FileRefController extends BaseController { } else { cachedResult = getCachedResponse(parentKey); if (cachedResult != null) { - if (!onRequestComplete(locationKey, parentKey, cachedResult.response, false)) { + if (!onRequestComplete(locationKey, parentKey, cachedResult.response, false, true)) { responseCache.remove(parentKey); } else { return; @@ -296,7 +302,7 @@ public class FileRefController extends BaseController { private void broadcastWaitersData(ArrayList waiters, TLObject response) { for (int a = 0, N = waiters.size(); a < N; a++) { Waiter waiter = waiters.get(a); - onRequestComplete(waiter.locationKey, waiter.parentKey, response, a == N - 1); + onRequestComplete(waiter.locationKey, waiter.parentKey, response, a == N - 1, false); } waiters.clear(); } @@ -309,16 +315,16 @@ public class FileRefController extends BaseController { TLRPC.TL_messages_getScheduledMessages req = new TLRPC.TL_messages_getScheduledMessages(); req.peer = getMessagesController().getInputPeer((int) messageObject.getDialogId()); req.id.add(messageObject.getRealId()); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (channelId != 0) { TLRPC.TL_channels_getMessages req = new TLRPC.TL_channels_getMessages(); req.channel = getMessagesController().getInputChannel(channelId); req.id.add(messageObject.getRealId()); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else { TLRPC.TL_messages_getMessages req = new TLRPC.TL_messages_getMessages(); req.id.add(messageObject.getRealId()); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } } else if (parentObject instanceof TLRPC.TL_wallPaper) { TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) parentObject; @@ -327,7 +333,7 @@ public class FileRefController extends BaseController { inputWallPaper.id = wallPaper.id; inputWallPaper.access_hash = wallPaper.access_hash; req.wallpaper = inputWallPaper; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (parentObject instanceof TLRPC.TL_theme) { TLRPC.TL_theme theme = (TLRPC.TL_theme) parentObject; TLRPC.TL_account_getTheme req = new TLRPC.TL_account_getTheme(); @@ -336,28 +342,28 @@ public class FileRefController extends BaseController { inputTheme.access_hash = theme.access_hash; req.theme = inputTheme; req.format = "android"; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (parentObject instanceof TLRPC.WebPage) { TLRPC.WebPage webPage = (TLRPC.WebPage) parentObject; TLRPC.TL_messages_getWebPage req = new TLRPC.TL_messages_getWebPage(); req.url = webPage.url; req.hash = 0; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (parentObject instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) parentObject; TLRPC.TL_users_getUsers req = new TLRPC.TL_users_getUsers(); req.id.add(getMessagesController().getInputUser(user)); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (parentObject instanceof TLRPC.Chat) { TLRPC.Chat chat = (TLRPC.Chat) parentObject; if (chat instanceof TLRPC.TL_chat) { TLRPC.TL_messages_getChats req = new TLRPC.TL_messages_getChats(); req.id.add(chat.id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (chat instanceof TLRPC.TL_channel) { TLRPC.TL_channels_getChannels req = new TLRPC.TL_channels_getChannels(); req.id.add(MessagesController.getInputChannel(chat)); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } } else if (parentObject instanceof String) { String string = (String) parentObject; @@ -385,6 +391,17 @@ public class FileRefController extends BaseController { getConnectionsManager().sendRequest(req, (response, error) -> broadcastWaitersData(favStickersWaiter, response)); } favStickersWaiter.add(new Waiter(locationKey, parentKey)); + } else if ("update".equals(string)) { + TLRPC.TL_help_getAppUpdate req = new TLRPC.TL_help_getAppUpdate(); + try { + req.source = ApplicationLoader.applicationContext.getPackageManager().getInstallerPackageName(ApplicationLoader.applicationContext.getPackageName()); + } catch (Exception ignore) { + + } + if (req.source == null) { + req.source = ""; + } + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (string.startsWith("avatar_")) { int id = Utilities.parseInt(string); if (id > 0) { @@ -393,7 +410,7 @@ public class FileRefController extends BaseController { req.offset = 0; req.max_id = 0; req.user_id = getMessagesController().getInputUser(id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else { TLRPC.TL_messages_search req = new TLRPC.TL_messages_search(); req.filter = new TLRPC.TL_inputMessagesFilterChatPhotos(); @@ -401,7 +418,7 @@ public class FileRefController extends BaseController { req.offset_id = 0; req.q = ""; req.peer = getMessagesController().getInputPeer(id); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } } else if (string.startsWith("sent_")) { String[] params = string.split("_"); @@ -411,11 +428,11 @@ public class FileRefController extends BaseController { TLRPC.TL_channels_getMessages req = new TLRPC.TL_channels_getMessages(); req.channel = getMessagesController().getInputChannel(channelId); req.id.add(Utilities.parseInt(params[2])); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, false, false)); } else { TLRPC.TL_messages_getMessages req = new TLRPC.TL_messages_getMessages(); req.id.add(Utilities.parseInt(params[2])); - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, false)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, false, false)); } } else { sendErrorToObject(args, 0); @@ -429,25 +446,29 @@ public class FileRefController extends BaseController { req.stickerset = new TLRPC.TL_inputStickerSetID(); req.stickerset.id = stickerSet.set.id; req.stickerset.access_hash = stickerSet.set.access_hash; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (parentObject instanceof TLRPC.StickerSetCovered) { TLRPC.StickerSetCovered stickerSet = (TLRPC.StickerSetCovered) parentObject; TLRPC.TL_messages_getStickerSet req = new TLRPC.TL_messages_getStickerSet(); req.stickerset = new TLRPC.TL_inputStickerSetID(); req.stickerset.id = stickerSet.set.id; req.stickerset.access_hash = stickerSet.set.access_hash; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else if (parentObject instanceof TLRPC.InputStickerSet) { TLRPC.TL_messages_getStickerSet req = new TLRPC.TL_messages_getStickerSet(); req.stickerset = (TLRPC.InputStickerSet) parentObject; - getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true)); + getConnectionsManager().sendRequest(req, (response, error) -> onRequestComplete(locationKey, parentKey, response, true, false)); } else { sendErrorToObject(args, 0); } } + private boolean isSameReference(byte[] oldRef, byte[] newRef) { + return Arrays.equals(oldRef, newRef); + } + @SuppressWarnings("unchecked") - private void onUpdateObjectReference(Requester requester, byte[] file_reference, TLRPC.InputFileLocation locationReplacement) { + private boolean onUpdateObjectReference(Requester requester, byte[] file_reference, TLRPC.InputFileLocation locationReplacement, boolean fromCache) { if (BuildVars.DEBUG_VERSION) { FileLog.d("fileref updated for " + requester.args[0] + " " + requester.locationKey); } @@ -455,21 +476,27 @@ public class FileRefController extends BaseController { TLRPC.TL_messages_sendMultiMedia multiMedia = (TLRPC.TL_messages_sendMultiMedia) requester.args[1]; Object[] objects = multiMediaCache.get(multiMedia); if (objects == null) { - return; + return true; } TLRPC.TL_inputSingleMedia req = (TLRPC.TL_inputSingleMedia) requester.args[0]; if (req.media instanceof TLRPC.TL_inputMediaDocument) { TLRPC.TL_inputMediaDocument mediaDocument = (TLRPC.TL_inputMediaDocument) req.media; + if (fromCache && isSameReference(mediaDocument.id.file_reference, file_reference)) { + return false; + } mediaDocument.id.file_reference = file_reference; } else if (req.media instanceof TLRPC.TL_inputMediaPhoto) { TLRPC.TL_inputMediaPhoto mediaPhoto = (TLRPC.TL_inputMediaPhoto) req.media; + if (fromCache && isSameReference(mediaPhoto.id.file_reference, file_reference)) { + return false; + } mediaPhoto.id.file_reference = file_reference; } int index = multiMedia.multi_media.indexOf(req); if (index < 0) { - return; + return true; } ArrayList parentObjects = (ArrayList) objects[3]; parentObjects.set(index, null); @@ -488,9 +515,15 @@ public class FileRefController extends BaseController { TLRPC.TL_messages_sendMedia req = (TLRPC.TL_messages_sendMedia) requester.args[0]; if (req.media instanceof TLRPC.TL_inputMediaDocument) { TLRPC.TL_inputMediaDocument mediaDocument = (TLRPC.TL_inputMediaDocument) req.media; + if (fromCache && isSameReference(mediaDocument.id.file_reference, file_reference)) { + return false; + } mediaDocument.id.file_reference = file_reference; } else if (req.media instanceof TLRPC.TL_inputMediaPhoto) { TLRPC.TL_inputMediaPhoto mediaPhoto = (TLRPC.TL_inputMediaPhoto) req.media; + if (fromCache && isSameReference(mediaPhoto.id.file_reference, file_reference)) { + return false; + } mediaPhoto.id.file_reference = file_reference; } AndroidUtilities.runOnUIThread(() -> getSendMessagesHelper().performSendMessageRequest((TLObject) requester.args[0], (MessageObject) requester.args[1], (String) requester.args[2], (SendMessagesHelper.DelayedMessage) requester.args[3], (Boolean) requester.args[4], (SendMessagesHelper.DelayedMessage) requester.args[5], null, null, (Boolean) requester.args[6])); @@ -498,26 +531,41 @@ public class FileRefController extends BaseController { TLRPC.TL_messages_editMessage req = (TLRPC.TL_messages_editMessage) requester.args[0]; if (req.media instanceof TLRPC.TL_inputMediaDocument) { TLRPC.TL_inputMediaDocument mediaDocument = (TLRPC.TL_inputMediaDocument) req.media; + if (fromCache && isSameReference(mediaDocument.id.file_reference, file_reference)) { + return false; + } mediaDocument.id.file_reference = file_reference; } else if (req.media instanceof TLRPC.TL_inputMediaPhoto) { TLRPC.TL_inputMediaPhoto mediaPhoto = (TLRPC.TL_inputMediaPhoto) req.media; + if (fromCache && isSameReference(mediaPhoto.id.file_reference, file_reference)) { + return false; + } mediaPhoto.id.file_reference = file_reference; } AndroidUtilities.runOnUIThread(() -> getSendMessagesHelper().performSendMessageRequest((TLObject) requester.args[0], (MessageObject) requester.args[1], (String) requester.args[2], (SendMessagesHelper.DelayedMessage) requester.args[3], (Boolean) requester.args[4], (SendMessagesHelper.DelayedMessage) requester.args[5], null, null, (Boolean) requester.args[6])); } else if (requester.args[0] instanceof TLRPC.TL_messages_saveGif) { TLRPC.TL_messages_saveGif req = (TLRPC.TL_messages_saveGif) requester.args[0]; + if (fromCache && isSameReference(req.id.file_reference, file_reference)) { + return false; + } req.id.file_reference = file_reference; getConnectionsManager().sendRequest(req, (response, error) -> { }); } else if (requester.args[0] instanceof TLRPC.TL_messages_saveRecentSticker) { TLRPC.TL_messages_saveRecentSticker req = (TLRPC.TL_messages_saveRecentSticker) requester.args[0]; + if (fromCache && isSameReference(req.id.file_reference, file_reference)) { + return false; + } req.id.file_reference = file_reference; getConnectionsManager().sendRequest(req, (response, error) -> { }); } else if (requester.args[0] instanceof TLRPC.TL_messages_faveSticker) { TLRPC.TL_messages_faveSticker req = (TLRPC.TL_messages_faveSticker) requester.args[0]; + if (fromCache && isSameReference(req.id.file_reference, file_reference)) { + return false; + } req.id.file_reference = file_reference; getConnectionsManager().sendRequest(req, (response, error) -> { @@ -526,22 +574,35 @@ public class FileRefController extends BaseController { TLRPC.TL_messages_getAttachedStickers req = (TLRPC.TL_messages_getAttachedStickers) requester.args[0]; if (req.media instanceof TLRPC.TL_inputStickeredMediaDocument) { TLRPC.TL_inputStickeredMediaDocument mediaDocument = (TLRPC.TL_inputStickeredMediaDocument) req.media; + if (fromCache && isSameReference(mediaDocument.id.file_reference, file_reference)) { + return false; + } mediaDocument.id.file_reference = file_reference; } else if (req.media instanceof TLRPC.TL_inputStickeredMediaPhoto) { TLRPC.TL_inputStickeredMediaPhoto mediaPhoto = (TLRPC.TL_inputStickeredMediaPhoto) req.media; + if (fromCache && isSameReference(mediaPhoto.id.file_reference, file_reference)) { + return false; + } mediaPhoto.id.file_reference = file_reference; } getConnectionsManager().sendRequest(req, (RequestDelegate) requester.args[1]); } else if (requester.args[1] instanceof FileLoadOperation) { FileLoadOperation fileLoadOperation = (FileLoadOperation) requester.args[1]; if (locationReplacement != null) { + if (fromCache && isSameReference(fileLoadOperation.location.file_reference, locationReplacement.file_reference)) { + return false; + } fileLoadOperation.location = locationReplacement; } else { + if (fromCache && isSameReference(requester.location.file_reference, file_reference)) { + return false; + } requester.location.file_reference = file_reference; } fileLoadOperation.requestingReference = false; fileLoadOperation.startDownloadRequest(); } + return true; } @SuppressWarnings("unchecked") @@ -587,7 +648,7 @@ public class FileRefController extends BaseController { } } - private boolean onRequestComplete(String locationKey, String parentKey, TLObject response, boolean cache) { + private boolean onRequestComplete(String locationKey, String parentKey, TLObject response, boolean cache, boolean fromCache) { boolean found = false; String cacheKey = parentKey; if (response instanceof TLRPC.TL_account_wallPapers) { @@ -607,7 +668,7 @@ public class FileRefController extends BaseController { if (requester.completed) { continue; } - if (onRequestComplete(requester.locationKey, null, response, cache && !found)) { + if (onRequestComplete(requester.locationKey, null, response, cache && !found, fromCache)) { found = true; } } @@ -630,7 +691,7 @@ public class FileRefController extends BaseController { if (requester.completed) { continue; } - if (requester.location instanceof TLRPC.TL_inputFileLocation) { + if (requester.location instanceof TLRPC.TL_inputFileLocation || requester.location instanceof TLRPC.TL_inputPeerPhotoFileLocation) { locationReplacement = new TLRPC.InputFileLocation[1]; needReplacement = new boolean[1]; } @@ -670,6 +731,12 @@ public class FileRefController extends BaseController { } } } + } else if (response instanceof TLRPC.TL_help_appUpdate) { + TLRPC.TL_help_appUpdate appUpdate = (TLRPC.TL_help_appUpdate) response; + result = getFileReference(appUpdate.document, requester.location, needReplacement, locationReplacement); + if (result == null) { + result = getFileReference(appUpdate.sticker, requester.location, needReplacement, locationReplacement); + } } else if (response instanceof TLRPC.WebPage) { result = getFileReference((TLRPC.WebPage) response, requester.location, needReplacement, locationReplacement); } else if (response instanceof TLRPC.TL_account_wallPapers) { @@ -799,8 +866,9 @@ public class FileRefController extends BaseController { } } if (result != null) { - onUpdateObjectReference(requester, result, locationReplacement != null ? locationReplacement[0] : null); - found = true; + if (onUpdateObjectReference(requester, result, locationReplacement != null ? locationReplacement[0] : null, fromCache)) { + found = true; + } } else { sendErrorToObject(requester.args, 1); } @@ -887,30 +955,33 @@ public class FileRefController extends BaseController { private boolean getPeerReferenceReplacement(TLRPC.User user, TLRPC.Chat chat, boolean big, TLRPC.InputFileLocation location, TLRPC.InputFileLocation[] replacement, boolean[] needReplacement) { if (needReplacement != null && needReplacement[0]) { - replacement[0] = new TLRPC.TL_inputPeerPhotoFileLocation(); - replacement[0].id = location.volume_id; - replacement[0].volume_id = location.volume_id; - replacement[0].local_id = location.local_id; - replacement[0].big = big; + TLRPC.TL_inputPeerPhotoFileLocation inputPeerPhotoFileLocation = new TLRPC.TL_inputPeerPhotoFileLocation(); + inputPeerPhotoFileLocation.id = location.volume_id; + inputPeerPhotoFileLocation.volume_id = location.volume_id; + inputPeerPhotoFileLocation.local_id = location.local_id; + inputPeerPhotoFileLocation.big = big; TLRPC.InputPeer peer; if (user != null) { TLRPC.TL_inputPeerUser inputPeerUser = new TLRPC.TL_inputPeerUser(); inputPeerUser.user_id = user.id; inputPeerUser.access_hash = user.access_hash; + inputPeerPhotoFileLocation.photo_id = user.photo.photo_id; peer = inputPeerUser; } else { if (ChatObject.isChannel(chat)) { - TLRPC.TL_inputPeerChat inputPeerChat = new TLRPC.TL_inputPeerChat(); - inputPeerChat.chat_id = chat.id; - peer = inputPeerChat; - } else { TLRPC.TL_inputPeerChannel inputPeerChannel = new TLRPC.TL_inputPeerChannel(); inputPeerChannel.channel_id = chat.id; inputPeerChannel.access_hash = chat.access_hash; peer = inputPeerChannel; + } else { + TLRPC.TL_inputPeerChat inputPeerChat = new TLRPC.TL_inputPeerChat(); + inputPeerChat.chat_id = chat.id; + peer = inputPeerChat; } + inputPeerPhotoFileLocation.photo_id = chat.photo.photo_id; } - replacement[0].peer = peer; + inputPeerPhotoFileLocation.peer = peer; + replacement[0] = inputPeerPhotoFileLocation; return true; } return false; @@ -934,20 +1005,28 @@ public class FileRefController extends BaseController { } private byte[] getFileReference(TLRPC.Chat chat, TLRPC.InputFileLocation location, boolean[] needReplacement, TLRPC.InputFileLocation[] replacement) { - if (chat == null || chat.photo == null || !(location instanceof TLRPC.TL_inputFileLocation)) { + if (chat == null || chat.photo == null || !(location instanceof TLRPC.TL_inputFileLocation) && !(location instanceof TLRPC.TL_inputPeerPhotoFileLocation)) { return null; } - byte[] result = getFileReference(chat.photo.photo_small, location, needReplacement); - if (getPeerReferenceReplacement(null, chat, false, location, replacement, needReplacement)) { - return new byte[0]; - } - if (result == null) { - result = getFileReference(chat.photo.photo_big, location, needReplacement); - if (getPeerReferenceReplacement(null, chat, true, location, replacement, needReplacement)) { + if (location instanceof TLRPC.TL_inputPeerPhotoFileLocation) { + needReplacement[0] = true; + if (getPeerReferenceReplacement(null, chat, false, location, replacement, needReplacement)) { return new byte[0]; } + return null; + } else { + byte[] result = getFileReference(chat.photo.photo_small, location, needReplacement); + if (getPeerReferenceReplacement(null, chat, false, location, replacement, needReplacement)) { + return new byte[0]; + } + if (result == null) { + result = getFileReference(chat.photo.photo_big, location, needReplacement); + if (getPeerReferenceReplacement(null, chat, true, location, replacement, needReplacement)) { + return new byte[0]; + } + } + return result; } - return result; } private byte[] getFileReference(TLRPC.Photo photo, TLRPC.InputFileLocation location, boolean[] needReplacement, TLRPC.InputFileLocation[] replacement) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/GoogleVoiceClientService.java b/TMessagesProj/src/main/java/org/telegram/messenger/GoogleVoiceClientService.java index b31388d65..45039401b 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/GoogleVoiceClientService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/GoogleVoiceClientService.java @@ -44,7 +44,7 @@ public class GoogleVoiceClientService extends SearchActionVerificationClientServ } if (user != null) { ContactsController.getInstance(currentAccount).markAsContacted(contactUri); - SendMessagesHelper.getInstance(currentAccount).sendMessage(text, user.id, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(text, user.id, null, null, null, true, null, null, null, true, 0, null); } } } catch (Exception e) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java index c14c62425..a88d08313 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java @@ -8,6 +8,7 @@ package org.telegram.messenger; +import android.annotation.TargetApi; import android.app.ActivityManager; import android.content.BroadcastReceiver; import android.content.Context; @@ -42,12 +43,14 @@ import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.SlotsDrawable; import org.telegram.ui.Components.ThemePreviewDrawable; +import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; +import java.io.InputStreamReader; import java.io.RandomAccessFile; import java.net.HttpURLConnection; import java.net.SocketException; @@ -57,6 +60,8 @@ import java.net.URLConnection; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -65,6 +70,8 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Stream; +import java.util.zip.GZIPInputStream; public class ImageLoader { @@ -148,7 +155,7 @@ public class ImageLoader { lastProgressTime = currentTime; Utilities.stageQueue.postRunnable(() -> { fileProgresses.put(url, new long[]{uploadedSize, totalSize}); - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.FileLoadProgressChanged, url, uploadedSize, totalSize)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileLoadProgressChanged, url, uploadedSize, totalSize)); }); } } @@ -446,7 +453,7 @@ public class ImageLoader { lastProgressTime = currentTime; Utilities.stageQueue.postRunnable(() -> { fileProgresses.put(cacheImage.url, new long[]{uploadedSize, totalSize}); - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.FileLoadProgressChanged, cacheImage.url, uploadedSize, totalSize)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileLoadProgressChanged, cacheImage.url, uploadedSize, totalSize)); }); } } @@ -612,9 +619,9 @@ public class ImageLoader { fileProgresses.remove(cacheImage.url); AndroidUtilities.runOnUIThread(() -> { if (result) { - NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileDidLoad, cacheImage.url, cacheImage.finalFilePath); + NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileLoaded, cacheImage.url, cacheImage.finalFilePath); } else { - NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileDidFailToLoad, cacheImage.url, 2); + NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileLoadFailed, cacheImage.url, 2); } }); }); @@ -626,7 +633,7 @@ public class ImageLoader { imageLoadQueue.postRunnable(() -> runHttpTasks(true)); Utilities.stageQueue.postRunnable(() -> { fileProgresses.remove(cacheImage.url); - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileDidFailToLoad, cacheImage.url, 1)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(cacheImage.currentAccount).postNotificationName(NotificationCenter.fileLoadFailed, cacheImage.url, 1)); }); } } @@ -729,6 +736,22 @@ public class ImageLoader { } } + public static String decompressGzip(File file) { + final StringBuilder outStr = new StringBuilder(); + if (file == null) { + return ""; + } + try (GZIPInputStream gis = new GZIPInputStream(new FileInputStream(file)); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(gis, "UTF-8"))) { + String line; + while ((line = bufferedReader.readLine()) != null) { + outStr.append(line); + } + return outStr.toString(); + } catch (Exception ignore) { + return ""; + } + } + private class CacheOutTask implements Runnable { private Thread runningThread; private final Object sync = new Object(); @@ -836,7 +859,37 @@ public class ImageLoader { lottieDrawable = new RLottieDrawable(diceEmoji, w, h); } } else { - lottieDrawable = new RLottieDrawable(cacheImage.finalFilePath, w, h, precache, limitFps, colors); + File f = cacheImage.finalFilePath; + RandomAccessFile randomAccessFile = null; + boolean compressed = false; + try { + randomAccessFile = new RandomAccessFile(cacheImage.finalFilePath, "r"); + byte[] bytes; + if (cacheImage.type == ImageReceiver.TYPE_THUMB) { + bytes = headerThumb; + } else { + bytes = header; + } + randomAccessFile.readFully(bytes, 0, 2); + if (bytes[0] == 0x1f && bytes[1] == (byte) 0x8b) { + compressed = true; + } + } catch (Exception e) { + FileLog.e(e); + } finally { + if (randomAccessFile != null) { + try { + randomAccessFile.close(); + } catch (Exception e) { + FileLog.e(e); + } + } + } + if (compressed) { + lottieDrawable = new RLottieDrawable(cacheImage.finalFilePath, decompressGzip(cacheImage.finalFilePath), w, h, precache, limitFps, colors); + } else { + lottieDrawable = new RLottieDrawable(cacheImage.finalFilePath, w, h, precache, limitFps, colors); + } } lottieDrawable.setAutoRepeat(autoRepeat); onPostExecute(lottieDrawable); @@ -909,7 +962,6 @@ public class ImageLoader { if (str.startsWith("riff") && str.endsWith("webp")) { useNativeWebpLoader = true; } - randomAccessFile.close(); } catch (Exception e) { FileLog.e(e); } finally { @@ -1319,6 +1371,11 @@ public class ImageLoader { b.recycle(); } } + if (blurType != 0 && (bitmapH > 100 || bitmapW > 100)) { + image = Bitmaps.createScaledBitmap(image, 80, 80, false); + bitmapH = 80; + bitmapW = 80; + } if (blurType != 0 && bitmapH < 100 && bitmapW < 100) { if (image.getConfig() == Bitmap.Config.ARGB_8888) { Utilities.blurBitmap(image, 3, opts.inPurgeable ? 0 : 1, image.getWidth(), image.getHeight(), image.getRowBytes()); @@ -1680,14 +1737,14 @@ public class ImageLoader { if (operation.lastProgressUpdateTime == 0 || operation.lastProgressUpdateTime < currentTime - 100 || uploadedSize == totalSize) { operation.lastProgressUpdateTime = currentTime; - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.FileUploadProgressChanged, location, uploadedSize, totalSize, isEncrypted)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileUploadProgressChanged, location, uploadedSize, totalSize, isEncrypted)); } } @Override public void fileDidUploaded(final String location, final TLRPC.InputFile inputFile, final TLRPC.InputEncryptedFile inputEncryptedFile, final byte[] key, final byte[] iv, final long totalFileSize) { Utilities.stageQueue.postRunnable(() -> { - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.FileDidUpload, location, inputFile, inputEncryptedFile, key, iv, totalFileSize)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileUploaded, location, inputFile, inputEncryptedFile, key, iv, totalFileSize)); fileProgresses.remove(location); }); } @@ -1695,7 +1752,7 @@ public class ImageLoader { @Override public void fileDidFailedUpload(final String location, final boolean isEncrypted) { Utilities.stageQueue.postRunnable(() -> { - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.FileDidFailUpload, location, isEncrypted)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileUploadFailed, location, isEncrypted)); fileProgresses.remove(location); }); } @@ -1709,7 +1766,7 @@ public class ImageLoader { AndroidUtilities.addMediaToGallery(finalFile.toString()); } } - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileDidLoad, location, finalFile); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileLoaded, location, finalFile); ImageLoader.this.fileDidLoaded(location, finalFile, type); }); } @@ -1719,7 +1776,7 @@ public class ImageLoader { fileProgresses.remove(location); AndroidUtilities.runOnUIThread(() -> { ImageLoader.this.fileDidFailedLoad(location, canceled); - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileDidFailToLoad, location, canceled); + NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileLoadFailed, location, canceled); }); } @@ -1729,7 +1786,7 @@ public class ImageLoader { long currentTime = SystemClock.elapsedRealtime(); if (operation.lastProgressUpdateTime == 0 || operation.lastProgressUpdateTime < currentTime - 500 || uploadedSize == 0) { operation.lastProgressUpdateTime = currentTime; - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.FileLoadProgressChanged, location, uploadedSize, totalSize)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.fileLoadProgressChanged, location, uploadedSize, totalSize)); } } }); @@ -1792,6 +1849,29 @@ public class ImageLoader { testWebFile.remove(url); } + @TargetApi(26) + private static void moveDirectory(File source, File target) { + if (!target.exists() && !target.mkdir()) { + return; + } + try (Stream files = Files.list(source.toPath())) { + files.forEach(path -> { + File dest = new File(target, path.getFileName().toString()); + if (Files.isDirectory(path)) { + moveDirectory(path.toFile(), dest); + } else { + try { + Files.move(path, dest.toPath()); + } catch (Exception e) { + FileLog.e(e); + } + } + }); + } catch (Exception e) { + FileLog.e(e); + } + } + public SparseArray createMediaPaths() { SparseArray mediaDirs = new SparseArray<>(); File cachePath = AndroidUtilities.getCacheDir(); @@ -1824,6 +1904,25 @@ public class ImageLoader { } telegramPath = new File(path, "Telegram"); telegramPath.mkdirs(); + /*int version = 0; + try { + PackageManager pm = ApplicationLoader.applicationContext.getPackageManager(); + ApplicationInfo applicationInfo = pm.getApplicationInfo(ApplicationLoader.applicationContext.getPackageName(), 0); + if (applicationInfo != null) { + version = applicationInfo.targetSdkVersion; + } + } catch (Throwable ignore) { + + } + File newPath = ApplicationLoader.applicationContext.getExternalFilesDir(null); + telegramPath = new File(newPath, "Telegram"); //TODO + if (Build.VERSION.SDK_INT >= 29 && version < 30) { + File oldPath = new File(path, "Telegram"); + long moveStart = SystemClock.elapsedRealtime(); + moveDirectory(oldPath, telegramPath); + long dt = SystemClock.elapsedRealtime() - moveStart; + FileLog.d("move time = " + dt); + }*/ if (Build.VERSION.SDK_INT >= 19 && !telegramPath.isDirectory()) { ArrayList dirs = AndroidUtilities.getDataDirs(); if (dirs != null) { @@ -2352,6 +2451,8 @@ public class ImageLoader { String trueExt = getHttpUrlExtension(location, "jpg"); if (trueExt.equals("mp4") || trueExt.equals("gif")) { img.imageType = FileLoader.IMAGE_TYPE_ANIMATION; + } else if ("tgs".equals(ext)) { + img.imageType = FileLoader.IMAGE_TYPE_LOTTIE; } } } @@ -3208,7 +3309,7 @@ public class ImageLoader { location.local_id = SharedConfig.getLastLocalId(); location.file_reference = new byte[0]; - photoSize = new TLRPC.TL_photoSize(); + photoSize = new TLRPC.TL_photoSize_layer127(); photoSize.location = location; photoSize.w = scaledBitmap.getWidth(); photoSize.h = scaledBitmap.getHeight(); @@ -3381,7 +3482,7 @@ public class ImageLoader { FileLog.e(e); } } - TLRPC.TL_photoSize newPhotoSize = new TLRPC.TL_photoSize(); + TLRPC.TL_photoSize newPhotoSize = new TLRPC.TL_photoSize_layer127(); newPhotoSize.w = photoSize.w; newPhotoSize.h = photoSize.h; newPhotoSize.location = photoSize.location; @@ -3464,7 +3565,7 @@ public class ImageLoader { if (photoSize != null && photoSize.bytes != null && photoSize.bytes.length != 0) { File file = FileLoader.getPathToAttach(photoSize, true); - TLRPC.TL_photoSize newPhotoSize = new TLRPC.TL_photoSize(); + TLRPC.TL_photoSize newPhotoSize = new TLRPC.TL_photoSize_layer127(); newPhotoSize.w = photoSize.w; newPhotoSize.h = photoSize.h; newPhotoSize.location = photoSize.location; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java index e89e5593c..a5f82259c 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLocation.java @@ -28,6 +28,8 @@ public class ImageLocation { public TLRPC.InputStickerSet stickerSet; public int imageType; + public int thumbVersion; + public int currentSize; public long photoId; @@ -142,7 +144,9 @@ public class ImageLocation { } else { dc_id = fileLocation.dc_id; } - return getForPhoto(fileLocation, 0, null, null, inputPeer, type, dc_id, null, null); + ImageLocation location = getForPhoto(fileLocation, 0, null, null, inputPeer, type, dc_id, null, null); + location.photoId = user.photo.photo_id; + return location; } public static ImageLocation getForChat(TLRPC.Chat chat, int type) { @@ -181,10 +185,12 @@ public class ImageLocation { } else { dc_id = fileLocation.dc_id; } - return getForPhoto(fileLocation, 0, null, null, inputPeer, type, dc_id, null, null); + ImageLocation location = getForPhoto(fileLocation, 0, null, null, inputPeer, type, dc_id, null, null); + location.photoId = chat.photo.photo_id; + return location; } - public static ImageLocation getForSticker(TLRPC.PhotoSize photoSize, TLRPC.Document sticker) { + public static ImageLocation getForSticker(TLRPC.PhotoSize photoSize, TLRPC.Document sticker, int thumbVersion) { if (photoSize instanceof TLRPC.TL_photoStrippedSize || photoSize instanceof TLRPC.TL_photoPathSize) { ImageLocation imageLocation = new ImageLocation(); imageLocation.photoSize = photoSize; @@ -200,6 +206,7 @@ public class ImageLocation { if (MessageObject.isAnimatedStickerDocument(sticker, true)) { imageLocation.imageType = FileLoader.IMAGE_TYPE_LOTTIE; } + imageLocation.thumbVersion = thumbVersion; return imageLocation; } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java index 4d5076b91..6a27b7f86 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java @@ -10,17 +10,21 @@ package org.telegram.messenger; import android.graphics.Bitmap; import android.graphics.BitmapShader; +import android.graphics.BlendMode; import android.graphics.Canvas; import android.graphics.ColorFilter; +import android.graphics.ComposeShader; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Path; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; +import android.graphics.PorterDuffXfermode; import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; +import android.os.Build; import android.view.View; import org.telegram.tgnet.TLObject; @@ -171,6 +175,16 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg private SetImageBackup setImageBackup; + private Object blendMode; + + private Bitmap gradientBitmap; + private BitmapShader gradientShader; + private ComposeShader composeShader; + private Bitmap legacyBitmap; + private BitmapShader legacyShader; + private Canvas legacyCanvas; + private Paint legacyPaint; + private ImageLocation strippedLocation; private ImageLocation currentImageLocation; private String currentImageFilter; @@ -385,8 +399,15 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg mediaShader = null; currentImageDrawable = null; imageShader = null; + composeShader = null; thumbShader = null; crossfadeShader = null; + legacyShader = null; + legacyCanvas = null; + if (legacyBitmap != null) { + legacyBitmap.recycle(); + legacyBitmap = null; + } currentExt = ext; currentParentObject = null; @@ -536,8 +557,15 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg currentCacheType = cacheType; staticThumbDrawable = thumb; imageShader = null; + composeShader = null; thumbShader = null; mediaShader = null; + legacyShader = null; + legacyCanvas = null; + if (legacyBitmap != null) { + legacyBitmap.recycle(); + legacyBitmap = null; + } if (useRoundForThumb && staticThumbDrawable != null) { updateDrawableRadius(staticThumbDrawable); } @@ -668,7 +696,7 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg AnimatedFileDrawable fileDrawable = (AnimatedFileDrawable) bitmap; fileDrawable.setParentView(parentView); fileDrawable.setUseSharedQueue(useSharedAnimationQueue); - if (allowStartAnimation) { + if (allowStartAnimation && currentOpenedLayerFlags == 0) { fileDrawable.start(); } fileDrawable.setAllowDecodeSingleFrame(allowDecodeSingleFrame); @@ -693,6 +721,13 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg currentImageDrawable = null; currentImageKey = null; imageShader = null; + composeShader = null; + legacyShader = null; + legacyCanvas = null; + if (legacyBitmap != null) { + legacyBitmap.recycle(); + legacyBitmap = null; + } currentThumbLocation = null; currentThumbFilter = null; @@ -732,6 +767,27 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg mediaShader = shader; } else if (drawable == currentImageDrawable) { imageShader = shader; + if (gradientShader != null && drawable instanceof BitmapDrawable) { + if (Build.VERSION.SDK_INT >= 28) { + composeShader = new ComposeShader(gradientShader, imageShader, PorterDuff.Mode.DST_IN); + } else { + BitmapDrawable bitmapDrawable = (BitmapDrawable) drawable; + int w = bitmapDrawable.getBitmap().getWidth(); + int h = bitmapDrawable.getBitmap().getHeight(); + if (legacyBitmap == null || legacyBitmap.getWidth() != w || legacyBitmap.getHeight() != h) { + if (legacyBitmap != null) { + legacyBitmap.recycle(); + } + legacyBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); + legacyCanvas = new Canvas(legacyBitmap); + legacyShader = new BitmapShader(legacyBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + if (legacyPaint == null) { + legacyPaint = new Paint(); + legacyPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_IN)); + } + } + } + } } } @@ -745,7 +801,7 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } private void updateDrawableRadius(Drawable drawable) { - if (hasRoundRadius() && drawable instanceof BitmapDrawable) { + if ((hasRoundRadius() || gradientShader != null) && drawable instanceof BitmapDrawable) { if (drawable instanceof RLottieDrawable) { } else if (drawable instanceof AnimatedFileDrawable) { @@ -826,6 +882,10 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg if (lottieDrawable != null && allowStartLottieAnimation && (!lottieDrawable.isHeavyDrawable() || currentOpenedLayerFlags == 0)) { lottieDrawable.start(); } + AnimatedFileDrawable animatedFileDrawable = getAnimation(); + if (animatedFileDrawable != null && allowStartAnimation && currentOpenedLayerFlags == 0) { + animatedFileDrawable.stop(); + } if (NotificationCenter.getGlobalInstance().isAnimationInProgress()) { didReceivedNotification(NotificationCenter.stopAllHeavyOperations, currentAccount, 512); } @@ -868,6 +928,13 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } else { paint = bitmapDrawable.getPaint(); } + if (Build.VERSION.SDK_INT >= 29) { + if (blendMode != null && gradientShader == null) { + paint.setBlendMode((BlendMode) blendMode); + } else { + paint.setBlendMode(null); + } + } boolean hasFilter = paint != null && paint.getColorFilter() != null; if (hasFilter && isPressed == 0) { if (shader != null) { @@ -890,7 +957,7 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } } } - if (colorFilter != null) { + if (colorFilter != null && gradientShader == null) { if (shader != null) { roundPaint.setColorFilter(colorFilter); } else { @@ -961,10 +1028,22 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } } } else { - roundPaint.setShader(shader); + if (legacyCanvas != null) { + roundRect.set(0, 0, legacyBitmap.getWidth(), legacyBitmap.getHeight()); + legacyCanvas.drawBitmap(gradientBitmap, null, roundRect, null); + legacyCanvas.drawBitmap(bitmapDrawable.getBitmap(), null, roundRect, legacyPaint); + } + if (shader == imageShader && gradientShader != null) { + if (composeShader != null) { + roundPaint.setShader(composeShader); + } else { + roundPaint.setShader(legacyShader); + } + } else { + roundPaint.setShader(shader); + } float scale = 1.0f / Math.min(scaleW, scaleH); roundRect.set(imageX + sideClip, imageY + sideClip, imageX + imageW - sideClip, imageY + imageH - sideClip); - shaderMatrix.reset(); if (Math.abs(scaleW - scaleH) > 0.0005f) { if (bitmapW / scaleH > realImageW) { bitmapW /= scaleH; @@ -994,7 +1073,35 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg float postScale = (realImageW + AndroidUtilities.roundMessageInset * 2) / realImageW; shaderMatrix.postScale(postScale, postScale, drawRegion.centerX(), drawRegion.centerY()); } + if (legacyShader != null) { + legacyShader.setLocalMatrix(shaderMatrix); + } shader.setLocalMatrix(shaderMatrix); + + if (composeShader != null) { + int bitmapW2 = gradientBitmap.getWidth(); + int bitmapH2 = gradientBitmap.getHeight(); + float scaleW2 = imageW == 0 ? 1.0f : (bitmapW2 / realImageW); + float scaleH2 = imageH == 0 ? 1.0f : (bitmapH2 / realImageH); + if (Math.abs(scaleW2 - scaleH2) > 0.0005f) { + if (bitmapW2 / scaleH2 > realImageW) { + bitmapW2 /= scaleH2; + drawRegion.set(imageX - (bitmapW2 - realImageW) / 2, imageY, imageX + (bitmapW2 + realImageW) / 2, imageY + realImageH); + } else { + bitmapH2 /= scaleW2; + drawRegion.set(imageX, imageY - (bitmapH2 - realImageH) / 2, imageX + realImageW, imageY + (bitmapH2 + realImageH) / 2); + } + } else { + drawRegion.set(imageX, imageY, imageX + realImageW, imageY + realImageH); + } + scale = 1.0f / Math.min(imageW == 0 ? 1.0f : (bitmapW2 / realImageW), imageH == 0 ? 1.0f : (bitmapH2 / realImageH)); + + shaderMatrix.reset(); + shaderMatrix.setTranslate(drawRegion.left + sideClip, drawRegion.top + sideClip); + shaderMatrix.preScale(scale, scale); + gradientShader.setLocalMatrix(shaderMatrix); + } + roundPaint.setAlpha(alpha); if (isRoundRect) { @@ -1071,6 +1178,13 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } if (isVisible) { try { + if (Build.VERSION.SDK_INT >= 29) { + if (blendMode != null) { + bitmapDrawable.getPaint().setBlendMode((BlendMode) blendMode); + } else { + bitmapDrawable.getPaint().setBlendMode(null); + } + } bitmapDrawable.setAlpha(alpha); bitmapDrawable.draw(canvas); } catch (Exception e) { @@ -1107,6 +1221,13 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } if (isVisible) { try { + if (Build.VERSION.SDK_INT >= 29) { + if (blendMode != null) { + bitmapDrawable.getPaint().setBlendMode((BlendMode) blendMode); + } else { + bitmapDrawable.getPaint().setBlendMode(null); + } + } bitmapDrawable.setAlpha(alpha); bitmapDrawable.draw(canvas); } catch (Exception e) { @@ -1145,6 +1266,31 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } } + public void setBlendMode(Object mode) { + blendMode = mode; + invalidate(); + } + + public void setGradientBitmap(Bitmap bitmap) { + if (bitmap != null) { + if (gradientShader == null || gradientBitmap != bitmap) { + gradientShader = new BitmapShader(bitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + updateDrawableRadius(currentImageDrawable); + } + isRoundRect = true; + } else { + gradientShader = null; + composeShader = null; + legacyShader = null; + legacyCanvas = null; + if (legacyBitmap != null) { + legacyBitmap.recycle(); + legacyBitmap = null; + } + } + gradientBitmap = bitmap; + } + private void onBitmapException(Drawable bitmapDrawable) { if (bitmapDrawable == currentMediaDrawable && currentMediaKey != null) { ImageLoader.getInstance().removeImage(currentMediaKey); @@ -1199,6 +1345,12 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } public boolean draw(Canvas canvas) { + boolean result = false; + if (gradientBitmap != null && currentImageKey != null) { + canvas.save(); + canvas.clipRect(imageX, imageY, imageX + imageW, imageY + imageH); + canvas.drawColor(0xff000000); + } try { Drawable drawable = null; AnimatedFileDrawable animation = getAnimation(); @@ -1287,18 +1439,21 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg } checkAlphaAnimation(animationNotReady && crossfadeWithThumb); - return true; + result = true; } else if (staticThumbDrawable != null) { drawDrawable(canvas, staticThumbDrawable, (int) (overrideAlpha * 255), null, thumbOrientation); checkAlphaAnimation(animationNotReady); - return true; + result = true; } else { checkAlphaAnimation(animationNotReady); } } catch (Exception e) { FileLog.e(e); } - return false; + if (gradientBitmap != null && currentImageKey != null) { + canvas.restore(); + } + return result; } public void setManualAlphaAnimator(boolean value) { @@ -1978,7 +2133,7 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg AnimatedFileDrawable fileDrawable = (AnimatedFileDrawable) drawable; fileDrawable.setParentView(parentView); fileDrawable.setUseSharedQueue(useSharedAnimationQueue); - if (allowStartAnimation) { + if (allowStartAnimation && currentOpenedLayerFlags == 0) { fileDrawable.start(); } fileDrawable.setAllowDecodeSingleFrame(allowDecodeSingleFrame); @@ -2115,6 +2270,10 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg if (lottieDrawable != null && lottieDrawable.isHeavyDrawable()) { lottieDrawable.stop(); } + AnimatedFileDrawable animatedFileDrawable = getAnimation(); + if (animatedFileDrawable != null) { + animatedFileDrawable.stop(); + } } } else if (id == NotificationCenter.startAllHeavyOperations) { Integer layer = (Integer) args[0]; @@ -2127,6 +2286,10 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg if (allowStartLottieAnimation && lottieDrawable != null && lottieDrawable.isHeavyDrawable()) { lottieDrawable.start(); } + AnimatedFileDrawable animatedFileDrawable = getAnimation(); + if (allowStartAnimation && animatedFileDrawable != null) { + animatedFileDrawable.start(); + } } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImportingService.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImportingService.java index 1e1c9d3a2..9ac5b9b5a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImportingService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImportingService.java @@ -23,6 +23,7 @@ public class ImportingService extends Service implements NotificationCenter.Noti super(); for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.historyImportProgressChanged); + NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.stickersImportProgressChanged); } } @@ -40,6 +41,7 @@ public class ImportingService extends Service implements NotificationCenter.Noti NotificationManagerCompat.from(ApplicationLoader.applicationContext).cancel(5); for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.historyImportProgressChanged); + NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.stickersImportProgressChanged); } if (BuildVars.LOGS_ENABLED) { FileLog.d("destroy import service"); @@ -48,14 +50,14 @@ public class ImportingService extends Service implements NotificationCenter.Noti @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.historyImportProgressChanged) { - if (!hasImports()) { + if (id == NotificationCenter.historyImportProgressChanged || id == NotificationCenter.stickersImportProgressChanged) { + if (!hasImportingStickers() && !hasImportingStickers()) { stopSelf(); } } } - private boolean hasImports() { + private boolean hasImportingHistory() { for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { if (SendMessagesHelper.getInstance(a).isImportingHistory()) { return true; @@ -64,8 +66,17 @@ public class ImportingService extends Service implements NotificationCenter.Noti return false; } + private boolean hasImportingStickers() { + for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { + if (SendMessagesHelper.getInstance(a).isImportingStickers()) { + return true; + } + } + return false; + } + public int onStartCommand(Intent intent, int flags, int startId) { - if (!hasImports()) { + if (!hasImportingStickers() && !hasImportingHistory()) { stopSelf(); return Service.START_NOT_STICKY; } @@ -79,8 +90,13 @@ public class ImportingService extends Service implements NotificationCenter.Noti builder.setWhen(System.currentTimeMillis()); builder.setChannelId(NotificationsController.OTHER_NOTIFICATIONS_CHANNEL); builder.setContentTitle(LocaleController.getString("AppName", R.string.AppName)); - builder.setTicker(LocaleController.getString("ImporImportingService", R.string.ImporImportingService)); - builder.setContentText(LocaleController.getString("ImporImportingService", R.string.ImporImportingService)); + if (hasImportingHistory()) { + builder.setTicker(LocaleController.getString("ImporImportingService", R.string.ImporImportingService)); + builder.setContentText(LocaleController.getString("ImporImportingService", R.string.ImporImportingService)); + } else { + builder.setTicker(LocaleController.getString("ImporImportingStickersService", R.string.ImporImportingStickersService)); + builder.setContentText(LocaleController.getString("ImporImportingStickersService", R.string.ImporImportingStickersService)); + } } builder.setProgress(100, 0, true); startForeground(5, builder.build()); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java index 476f5b0a9..ccb6aa397 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java @@ -1216,8 +1216,8 @@ public class LocaleController { int idx = result.indexOf(type); if (idx >= 0) { idx += type.length(); - if (idx < result.length() && result.charAt(idx + type.length()) != ' ') { - result = result.substring(0, idx + type.length()) + " " + result.substring(idx + type.length()); + if (idx < result.length() && result.charAt(idx) != ' ') { + result = result.substring(0, idx) + " " + result.substring(idx); } } return result; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java index b7ebe4032..d3a76a645 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MediaController.java @@ -929,7 +929,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, AndroidUtilities.runOnUIThread(() -> { for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { - NotificationCenter.getInstance(a).addObserver(MediaController.this, NotificationCenter.fileDidLoad); + NotificationCenter.getInstance(a).addObserver(MediaController.this, NotificationCenter.fileLoaded); NotificationCenter.getInstance(a).addObserver(MediaController.this, NotificationCenter.httpFileDidLoad); NotificationCenter.getInstance(a).addObserver(MediaController.this, NotificationCenter.didReceiveNewMessages); NotificationCenter.getInstance(a).addObserver(MediaController.this, NotificationCenter.messagesDeleted); @@ -1255,7 +1255,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, @SuppressWarnings("unchecked") @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.httpFileDidLoad) { + if (id == NotificationCenter.fileLoaded || id == NotificationCenter.httpFileDidLoad) { String fileName = (String) args[0]; if (playingMessageObject != null && playingMessageObject.currentAccount == account) { String file = FileLoader.getAttachFileName(playingMessageObject.getDocument()); @@ -2982,7 +2982,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, if (exists) { if (!messageObject.mediaExists && cacheFile != file) { - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.fileDidLoad, FileLoader.getAttachFileName(messageObject.getDocument()), cacheFile)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.fileLoaded, FileLoader.getAttachFileName(messageObject.getDocument()), cacheFile)); } videoPlayer.preparePlayer(Uri.fromFile(cacheFile), "other"); } else { @@ -3078,7 +3078,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, }); if (exists) { if (!messageObject.mediaExists && cacheFile != file) { - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.fileDidLoad, FileLoader.getAttachFileName(messageObject.getDocument()), cacheFile)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(messageObject.currentAccount).postNotificationName(NotificationCenter.fileLoaded, FileLoader.getAttachFileName(messageObject.getDocument()), cacheFile)); } audioPlayer.preparePlayer(Uri.fromFile(cacheFile), "other"); isStreamingCurrentAudio = false; @@ -3535,7 +3535,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, audioToSend.attributes.add(attributeAudio); if (duration > 700) { if (send == 1) { - SendMessagesHelper.getInstance(recordingCurrentAccount).sendMessage(audioToSend, null, recordingAudioFileToSend.getAbsolutePath(), recordDialogId, recordReplyingMsg, recordReplyingTopMsg, null, null, null, null, notify, scheduleDate, 0, null); + SendMessagesHelper.getInstance(recordingCurrentAccount).sendMessage(audioToSend, null, recordingAudioFileToSend.getAbsolutePath(), recordDialogId, recordReplyingMsg, recordReplyingTopMsg, null, null, null, null, notify, scheduleDate, 0, null, null); } NotificationCenter.getInstance(recordingCurrentAccount).postNotificationName(NotificationCenter.audioDidSent, recordingGuid, send == 2 ? audioToSend : null, send == 2 ? recordingAudioFileToSend.getAbsolutePath() : null); } else { @@ -3619,9 +3619,9 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, messageObjects = messages; onFinishRunnable = onFinish; isMusic = messages.get(0).isMusic(); - currentAccount.getNotificationCenter().addObserver(this, NotificationCenter.fileDidLoad); - currentAccount.getNotificationCenter().addObserver(this, NotificationCenter.FileLoadProgressChanged); - currentAccount.getNotificationCenter().addObserver(this, NotificationCenter.fileDidFailToLoad); + currentAccount.getNotificationCenter().addObserver(this, NotificationCenter.fileLoaded); + currentAccount.getNotificationCenter().addObserver(this, NotificationCenter.fileLoadProgressChanged); + currentAccount.getNotificationCenter().addObserver(this, NotificationCenter.fileLoadFailed); progressDialog = new AlertDialog(context, 2); progressDialog.setMessage(LocaleController.getString("Loading", R.string.Loading)); progressDialog.setCanceledOnTouchOutside(false); @@ -3710,9 +3710,9 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, } catch (Exception e) { FileLog.e(e); } - currentAccount.getNotificationCenter().removeObserver(this, NotificationCenter.fileDidLoad); - currentAccount.getNotificationCenter().removeObserver(this, NotificationCenter.FileLoadProgressChanged); - currentAccount.getNotificationCenter().removeObserver(this, NotificationCenter.fileDidFailToLoad); + currentAccount.getNotificationCenter().removeObserver(this, NotificationCenter.fileLoaded); + currentAccount.getNotificationCenter().removeObserver(this, NotificationCenter.fileLoadProgressChanged); + currentAccount.getNotificationCenter().removeObserver(this, NotificationCenter.fileLoadFailed); }); } @@ -3813,12 +3813,12 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.fileDidFailToLoad) { + if (id == NotificationCenter.fileLoaded || id == NotificationCenter.fileLoadFailed) { String fileName = (String) args[0]; if (loadingMessageObjects.remove(fileName) != null) { waitingForFile.countDown(); } - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { String fileName = (String) args[0]; if (loadingMessageObjects.containsKey(fileName)) { Long loadedSize = (Long) args[1]; @@ -4006,6 +4006,40 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, } } + public static String getStickerExt(Uri uri) { + InputStream inputStream = null; + try { + inputStream = ApplicationLoader.applicationContext.getContentResolver().openInputStream(uri); + byte[] header = new byte[12]; + if (inputStream.read(header, 0, 12) == 12) { + if (header[0] == 0x89 && header[1] == 0x50 && header[2] == 0x4E && header[3] == 0x47 && header[4] == 0x0D && header[5] == 0x0A && header[6] == 0x1A && header[7] == 0x0A) { + return "png"; + } + if (header[0] == 0x1f && header[1] == (byte) 0x8b) { + return "tgs"; + } + String str = new String(header); + if (str != null) { + str = str.toLowerCase(); + if (str.startsWith("riff") && str.endsWith("webp")) { + return "webp"; + } + } + } + } catch (Exception e) { + FileLog.e(e); + } finally { + try { + if (inputStream != null) { + inputStream.close(); + } + } catch (Exception e2) { + FileLog.e(e2); + } + } + return null; + } + public static boolean isWebp(Uri uri) { InputStream inputStream = null; try { @@ -4088,10 +4122,16 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, return ""; } - @SuppressLint("DiscouragedPrivateApi") public static String copyFileToCache(Uri uri, String ext) { + return copyFileToCache(uri, ext, -1); + } + + @SuppressLint("DiscouragedPrivateApi") + public static String copyFileToCache(Uri uri, String ext, long sizeLimit) { InputStream inputStream = null; FileOutputStream output = null; + int totalLen = 0; + File f = null; try { String name = FileLoader.fixFileName(getFileName(uri)); if (name == null) { @@ -4099,7 +4139,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, SharedConfig.saveConfig(); name = String.format(Locale.US, "%d.%s", id, ext); } - File f = AndroidUtilities.getSharingDirectory(); + f = AndroidUtilities.getSharingDirectory(); f.mkdirs(); f = new File(f, name); if (AndroidUtilities.isInternalUri(Uri.fromFile(f))) { @@ -4123,6 +4163,10 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, int len; while ((len = inputStream.read(buffer)) != -1) { output.write(buffer, 0, len); + totalLen += len; + if (sizeLimit > 0 && totalLen > sizeLimit) { + return null; + } } return f.getAbsolutePath(); } catch (Exception e) { @@ -4142,6 +4186,9 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, } catch (Exception e2) { FileLog.e(e2); } + if (sizeLimit > 0 && totalLen > sizeLimit) { + f.delete(); + } } return null; } @@ -4166,7 +4213,7 @@ public class MediaController implements AudioManager.OnAudioFocusChangeListener, Cursor cursor = null; try { - if (Build.VERSION.SDK_INT < 23 || Build.VERSION.SDK_INT >= 23 && ApplicationLoader.applicationContext.checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT < 23 || ApplicationLoader.applicationContext.checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) { cursor = MediaStore.Images.Media.query(ApplicationLoader.applicationContext.getContentResolver(), MediaStore.Images.Media.EXTERNAL_CONTENT_URI, projectionPhotos, null, null, (Build.VERSION.SDK_INT > 28 ? MediaStore.Images.Media.DATE_MODIFIED : MediaStore.Images.Media.DATE_TAKEN) + " DESC"); if (cursor != null) { int imageIdColumn = cursor.getColumnIndex(MediaStore.Images.Media._ID); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java index f164f841e..4d6d0e8ab 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MediaDataController.java @@ -1835,7 +1835,7 @@ public class MediaDataController extends BaseController { if (thumb != null) { final ArrayList documents = stickerSet.documents; if (documents != null && !documents.isEmpty()) { - loadStickerSetThumbInternal(thumb, stickerSet, documents.get(0)); + loadStickerSetThumbInternal(thumb, stickerSet, documents.get(0), stickerSet.set.thumb_version); } } } @@ -1851,12 +1851,12 @@ public class MediaDataController extends BaseController { } else { return; } - loadStickerSetThumbInternal(thumb, stickerSet, sticker); + loadStickerSetThumbInternal(thumb, stickerSet, sticker, stickerSet.set.thumb_version); } } - private void loadStickerSetThumbInternal(TLRPC.PhotoSize thumb, Object parentObject, TLRPC.Document sticker) { - final ImageLocation imageLocation = ImageLocation.getForSticker(thumb, sticker); + private void loadStickerSetThumbInternal(TLRPC.PhotoSize thumb, Object parentObject, TLRPC.Document sticker, int thumbVersion) { + final ImageLocation imageLocation = ImageLocation.getForSticker(thumb, sticker, thumbVersion); if (imageLocation != null) { final String ext = imageLocation.imageType == FileLoader.IMAGE_TYPE_LOTTIE ? "tgs" : "webp"; getFileLoader().loadFile(imageLocation, parentObject, ext, 2, 1); @@ -2594,7 +2594,9 @@ public class MediaDataController extends BaseController { final ArrayList objects = new ArrayList<>(); for (int a = 0; a < res.messages.size(); a++) { TLRPC.Message message = res.messages.get(a); - objects.add(new MessageObject(currentAccount, message, usersDict, true, true)); + MessageObject messageObject = new MessageObject(currentAccount, message, usersDict, true, true); + messageObject.createStrippedThumb(); + objects.add(messageObject); } AndroidUtilities.runOnUIThread(() -> { @@ -5163,7 +5165,7 @@ public class MediaDataController extends BaseController { }); } - public void loadBotInfo(final int uid, boolean cache, final int classGuid) { + public void loadBotInfo(final int uid, final long dialogId, boolean cache, final int classGuid) { if (cache) { TLRPC.BotInfo botInfo = botInfos.get(uid); if (botInfo != null) { @@ -5174,7 +5176,7 @@ public class MediaDataController extends BaseController { getMessagesStorage().getStorageQueue().postRunnable(() -> { try { TLRPC.BotInfo botInfo = null; - SQLiteCursor cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT info FROM bot_info WHERE uid = %d", uid)); + SQLiteCursor cursor = getMessagesStorage().getDatabase().queryFinalized(String.format(Locale.US, "SELECT info FROM bot_info_v2 WHERE uid = %d AND dialogId = %d", uid, dialogId)); if (cursor.next()) { NativeByteBuffer data; @@ -5238,19 +5240,20 @@ public class MediaDataController extends BaseController { } } - public void putBotInfo(final TLRPC.BotInfo botInfo) { + public void putBotInfo(long dialogId, TLRPC.BotInfo botInfo) { if (botInfo == null) { return; } botInfos.put(botInfo.user_id, botInfo); getMessagesStorage().getStorageQueue().postRunnable(() -> { try { - SQLitePreparedStatement state = getMessagesStorage().getDatabase().executeFast("REPLACE INTO bot_info(uid, info) VALUES(?, ?)"); + SQLitePreparedStatement state = getMessagesStorage().getDatabase().executeFast("REPLACE INTO bot_info_v2 VALUES(?, ?, ?)"); state.requery(); NativeByteBuffer data = new NativeByteBuffer(botInfo.getObjectSize()); botInfo.serializeToStream(data); state.bindInteger(1, botInfo.user_id); - state.bindByteBuffer(2, data); + state.bindLong(2, dialogId); + state.bindByteBuffer(3, data); state.step(); data.reuse(); state.dispose(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java index 65cd9ca53..0a502aa6a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessageObject.java @@ -9,6 +9,7 @@ package org.telegram.messenger; import android.graphics.Typeface; +import android.graphics.drawable.BitmapDrawable; import android.net.Uri; import android.os.Build; import android.text.Layout; @@ -147,6 +148,7 @@ public class MessageObject { public String previousAttachPath; public SvgHelper.SvgDrawable pathThumb; + public BitmapDrawable strippedThumb; public int currentAccount; @@ -161,6 +163,8 @@ public class MessageObject { public float textXOffset; public int linesCount; + public SendAnimationData sendAnimationData; + private int emojiOnlyCount; private boolean layoutCreated; private int generatedWithMinSize; @@ -193,6 +197,17 @@ public class MessageObject { " . " }; + public static class SendAnimationData { + public float x; + public float y; + public float width; + public float height; + public float currentScale; + public float currentX; + public float currentY; + public float timeAlpha; + } + public static class VCardData { private String company; @@ -977,6 +992,23 @@ public class MessageObject { pathThumb = DocumentObject.getSvgThumb(document, Theme.key_chat_serviceBackground, 1.0f); } + public void createStrippedThumb() { + if (photoThumbs == null || SharedConfig.getDevicePerformanceClass() != SharedConfig.PERFORMANCE_CLASS_HIGH) { + return; + } + try { + for (int a = 0, N = photoThumbs.size(); a < N; a++) { + TLRPC.PhotoSize photoSize = photoThumbs.get(a); + if (photoSize instanceof TLRPC.TL_photoStrippedSize) { + strippedThumb = new BitmapDrawable(ImageLoader.getStrippedPhotoBitmap(photoSize.bytes, "b")); + break; + } + } + } catch (Throwable e) { + FileLog.e(e); + } + } + private void createDateArray(int accountNum, TLRPC.TL_channelAdminLogEvent event, ArrayList messageObjects, HashMap> messagesByDays, boolean addToEnd) { ArrayList dayArray = messagesByDays.get(dateKey); if (dayArray == null) { @@ -1914,7 +1946,7 @@ public class MessageObject { return true; } TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(messageOwner.peer_id.channel_id != 0 ? messageOwner.peer_id.channel_id : messageOwner.peer_id.chat_id); - return chat != null && chat.gigagroup || !ChatObject.isActionBanned(chat, ChatObject.ACTION_SEND_STICKERS); + return chat != null && chat.gigagroup || (!ChatObject.isActionBanned(chat, ChatObject.ACTION_SEND_STICKERS) || ChatObject.hasAdminRights(chat)); } public void generateGameMessageText(TLRPC.User fromUser) { @@ -2923,7 +2955,15 @@ public class MessageObject { } } } else { - messageText = messageOwner.message; + if (messageOwner.message != null) { + try { + messageText = AndroidUtilities.BAD_CHARS_MESSAGE_PATTERN.matcher(messageOwner.message).replaceAll(""); + } catch (Throwable e) { + messageText = messageOwner.message; + } + } else { + messageText = messageOwner.message; + } } } @@ -5251,6 +5291,10 @@ public class MessageObject { return isRoundVideoCached == 1; } + public boolean shouldAnimateSending() { + return isSending() && (type == MessageObject.TYPE_ROUND_VIDEO || isVoice() || isAnyKindOfSticker() && sendAnimationData != null); + } + public boolean hasAttachedStickers() { if (messageOwner.media instanceof TLRPC.TL_messageMediaPhoto) { return messageOwner.media.photo != null && messageOwner.media.photo.has_stickers; @@ -5562,7 +5606,7 @@ public class MessageObject { if (ChatObject.isChannel(chat) && !chat.megagroup && (chat.creator || chat.admin_rights != null && chat.admin_rights.edit_messages)) { return true; } - if (message.out && chat != null && chat.megagroup && (chat.creator || chat.admin_rights != null && chat.admin_rights.pin_messages)) { + if (message.out && chat != null && chat.megagroup && (chat.creator || chat.admin_rights != null && chat.admin_rights.pin_messages || chat.default_banned_rights != null && !chat.default_banned_rights.pin_messages)) { return true; } // @@ -5610,7 +5654,7 @@ public class MessageObject { if (ChatObject.isChannel(chat) && !chat.megagroup && (chat.creator || chat.admin_rights != null && chat.admin_rights.edit_messages)) { return true; } - if (message.out && chat != null && chat.megagroup && (chat.creator || chat.admin_rights != null && chat.admin_rights.pin_messages)) { + if (message.out && chat != null && chat.megagroup && (chat.creator || chat.admin_rights != null && chat.admin_rights.pin_messages || chat.default_banned_rights != null && !chat.default_banned_rights.pin_messages)) { return true; } if (!scheduled && Math.abs(message.date - ConnectionsManager.getInstance(currentAccount).getCurrentTime()) > MessagesController.getInstance(currentAccount).maxEditTime) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java index 5f9560b5f..e84c44032 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java @@ -16,6 +16,7 @@ import android.content.SharedPreferences; import android.location.Location; import android.os.Build; import android.os.Bundle; +import android.os.Looper; import android.os.SystemClock; import android.telephony.TelephonyManager; import android.text.TextUtils; @@ -43,6 +44,7 @@ import org.telegram.ui.ChatActivity; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.JoinCallAlert; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.DialogsActivity; import org.telegram.ui.EditWidgetActivity; import org.telegram.ui.LaunchActivity; @@ -69,6 +71,8 @@ public class MessagesController extends BaseController implements NotificationCe private ConcurrentHashMap users = new ConcurrentHashMap<>(100, 1.0f, 2); private ConcurrentHashMap objectsByUsernames = new ConcurrentHashMap<>(100, 1.0f, 2); + private HashMap activeVoiceChatsMap = new HashMap<>(); + private ArrayList joiningToChannels = new ArrayList<>(); private SparseArray exportedChats = new SparseArray<>(); @@ -299,6 +303,7 @@ public class MessagesController extends BaseController implements NotificationCe public Set exportGroupUri; public Set exportPrivateUri; public boolean autoarchiveAvailable; + public int groipCallVideoMaxParticipants; public boolean suggestStickersApiOnly; public ArrayList gifSearchEmojies = new ArrayList<>(); public HashSet diceEmojies; @@ -685,10 +690,10 @@ public class MessagesController extends BaseController implements NotificationCe getLocationController(); AndroidUtilities.runOnUIThread(() -> { MessagesController messagesController = getMessagesController(); - getNotificationCenter().addObserver(messagesController, NotificationCenter.FileDidUpload); - getNotificationCenter().addObserver(messagesController, NotificationCenter.FileDidFailUpload); - getNotificationCenter().addObserver(messagesController, NotificationCenter.fileDidLoad); - getNotificationCenter().addObserver(messagesController, NotificationCenter.fileDidFailToLoad); + getNotificationCenter().addObserver(messagesController, NotificationCenter.fileUploaded); + getNotificationCenter().addObserver(messagesController, NotificationCenter.fileUploadFailed); + getNotificationCenter().addObserver(messagesController, NotificationCenter.fileLoaded); + getNotificationCenter().addObserver(messagesController, NotificationCenter.fileLoadFailed); getNotificationCenter().addObserver(messagesController, NotificationCenter.messageReceivedByServer); getNotificationCenter().addObserver(messagesController, NotificationCenter.updateMessageMedia); }); @@ -753,6 +758,7 @@ public class MessagesController extends BaseController implements NotificationCe filtersEnabled = mainPreferences.getBoolean("filtersEnabled", false); showFiltersTooltip = mainPreferences.getBoolean("showFiltersTooltip", false); autoarchiveAvailable = mainPreferences.getBoolean("autoarchiveAvailable", false); + groipCallVideoMaxParticipants = mainPreferences.getInt("groipCallVideoMaxParticipants", 30); suggestStickersApiOnly = mainPreferences.getBoolean("suggestStickersApiOnly", false); pendingSuggestions = mainPreferences.getStringSet("pendingSuggestions", null); @@ -1585,6 +1591,17 @@ public class MessagesController extends BaseController implements NotificationCe } break; } + case "groupcall_video_participants_max": { + if (value.value instanceof TLRPC.TL_jsonNumber) { + TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; + if (number.value != groipCallVideoMaxParticipants) { + groipCallVideoMaxParticipants = (int) number.value; + editor.putInt("groipCallVideoMaxParticipants", groipCallVideoMaxParticipants); + changed = true; + } + } + break; + } case "inapp_update_check_delay": { if (value.value instanceof TLRPC.TL_jsonNumber) { TLRPC.TL_jsonNumber number = (TLRPC.TL_jsonNumber) value.value; @@ -2080,7 +2097,7 @@ public class MessagesController extends BaseController implements NotificationCe @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.FileDidUpload) { + if (id == NotificationCenter.fileUploaded) { final String location = (String) args[0]; final TLRPC.InputFile file = (TLRPC.InputFile) args[1]; @@ -2133,7 +2150,7 @@ public class MessagesController extends BaseController implements NotificationCe settings.motion = overrideWallpaperInfo.isMotion; req.settings = settings; getConnectionsManager().sendRequest(req, (response, error) -> { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) response; + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) response; File path = new File(ApplicationLoader.getFilesDirFixed(), overrideWallpaperInfo.originalFileName); if (wallPaper != null) { try { @@ -2238,18 +2255,28 @@ public class MessagesController extends BaseController implements NotificationCe settings.wallpaper_settings.intensity = (int) (accent.patternIntensity * 100); settings.wallpaper_settings.flags |= 8; } else { - settings.wallpaper = new TLRPC.TL_inputWallPaperNoFile(); + TLRPC.TL_inputWallPaperNoFile inputWallPaperNoFile = new TLRPC.TL_inputWallPaperNoFile(); + inputWallPaperNoFile.id = 0; + settings.wallpaper = inputWallPaperNoFile; } settings.wallpaper_settings.motion = accent.patternMotion; if (accent.backgroundOverrideColor != 0) { settings.wallpaper_settings.background_color = (int) accent.backgroundOverrideColor; settings.wallpaper_settings.flags |= 1; } - if (accent.backgroundGradientOverrideColor != 0) { - settings.wallpaper_settings.second_background_color = (int) accent.backgroundGradientOverrideColor; + if (accent.backgroundGradientOverrideColor1 != 0) { + settings.wallpaper_settings.second_background_color = (int) accent.backgroundGradientOverrideColor1; settings.wallpaper_settings.flags |= 16; settings.wallpaper_settings.rotation = AndroidUtilities.getWallpaperRotation(accent.backgroundRotation, true); } + if (accent.backgroundGradientOverrideColor2 != 0) { + settings.wallpaper_settings.third_background_color = (int) accent.backgroundGradientOverrideColor2; + settings.wallpaper_settings.flags |= 32; + } + if (accent.backgroundGradientOverrideColor3 != 0) { + settings.wallpaper_settings.fourth_background_color = (int) accent.backgroundGradientOverrideColor3; + settings.wallpaper_settings.flags |= 64; + } } else { themeInfo.uploadedFile = null; themeInfo.uploadedThumb = null; @@ -2322,7 +2349,7 @@ public class MessagesController extends BaseController implements NotificationCe }); } } - } else if (id == NotificationCenter.FileDidFailUpload) { + } else if (id == NotificationCenter.fileUploadFailed) { final String location = (String) args[0]; if (uploadingAvatar != null && uploadingAvatar.equals(location)) { uploadingAvatar = null; @@ -2468,6 +2495,7 @@ public class MessagesController extends BaseController implements NotificationCe exportedChats.clear(); fullUsers.clear(); fullChats.clear(); + activeVoiceChatsMap.clear(); loadingGroupCalls.clear(); groupCallsByChatId.clear(); dialogsByFolder.clear(); @@ -2851,9 +2879,11 @@ public class MessagesController extends BaseController implements NotificationCe if (chat.participants_count != 0) { oldChat.participants_count = chat.participants_count; } + addOrRemoveActiveVoiceChat(oldChat); } } else { chats.put(chat.id, chat); + addOrRemoveActiveVoiceChat(chat); } } else { if (!fromCache) { @@ -2927,6 +2957,7 @@ public class MessagesController extends BaseController implements NotificationCe } chats.put(chat.id, chat); } + addOrRemoveActiveVoiceChat(chat); } } @@ -2941,6 +2972,35 @@ public class MessagesController extends BaseController implements NotificationCe } } + private void addOrRemoveActiveVoiceChat(TLRPC.Chat chat) { + if (Thread.currentThread() != Looper.getMainLooper().getThread()) { + AndroidUtilities.runOnUIThread(() -> addOrRemoveActiveVoiceChatInternal(chat)); + } else { + addOrRemoveActiveVoiceChatInternal(chat); + } + } + + private void addOrRemoveActiveVoiceChatInternal(TLRPC.Chat chat) { + TLRPC.Chat currentChat = activeVoiceChatsMap.get(chat.id); + if (chat.call_active && chat.call_not_empty && chat.migrated_to == null && !ChatObject.isNotInChat(chat)) { + if (currentChat != null) { + return; + } + activeVoiceChatsMap.put(chat.id, chat); + getNotificationCenter().postNotificationName(NotificationCenter.activeGroupCallsUpdated); + } else { + if (currentChat == null) { + return; + } + activeVoiceChatsMap.remove(chat.id); + getNotificationCenter().postNotificationName(NotificationCenter.activeGroupCallsUpdated); + } + } + + public ArrayList getActiveGroupCalls() { + return new ArrayList<>(activeVoiceChatsMap.keySet()); + } + public void setReferer(String referer) { if (referer == null) { return; @@ -3136,7 +3196,7 @@ public class MessagesController extends BaseController implements NotificationCe if (array == null) { return null; } - TLRPC.ChannelParticipant participant = (TLRPC.ChannelParticipant) array.get(uid); + TLRPC.ChannelParticipant participant = array.get(uid); if (participant == null) { return null; } @@ -3273,7 +3333,7 @@ public class MessagesController extends BaseController implements NotificationCe applyDialogNotificationsSettings(-chat_id, res.full_chat.notify_settings); for (int a = 0; a < res.full_chat.bot_info.size(); a++) { TLRPC.BotInfo botInfo = res.full_chat.bot_info.get(a); - getMediaDataController().putBotInfo(botInfo); + getMediaDataController().putBotInfo(-chat_id, botInfo); } int index = blockePeers.indexOfKey(-chat_id); if (res.full_chat.blocked) { @@ -3339,7 +3399,7 @@ public class MessagesController extends BaseController implements NotificationCe applyDialogNotificationsSettings(user.id, userFull.notify_settings); if (userFull.bot_info instanceof TLRPC.TL_botInfo) { - getMediaDataController().putBotInfo(userFull.bot_info); + getMediaDataController().putBotInfo(user.id, userFull.bot_info); } int index = blockePeers.indexOfKey(user.id); if (userFull.blocked) { @@ -4189,7 +4249,7 @@ public class MessagesController extends BaseController implements NotificationCe } uploadingThemes.put(key, accent != null ? accent : themeInfo); Utilities.globalQueue.postRunnable(() -> { - String thumbPath = Theme.createThemePreviewImage(key, pathToWallpaper != null ? pathToWallpaper.getAbsolutePath() : null); + String thumbPath = Theme.createThemePreviewImage(key, pathToWallpaper != null ? pathToWallpaper.getAbsolutePath() : null, accent); AndroidUtilities.runOnUIThread(() -> { if (thumbPath == null) { uploadingThemes.remove(key); @@ -4216,7 +4276,7 @@ public class MessagesController extends BaseController implements NotificationCe uploadingWallpaperInfo = info; return; } - getFileLoader().cancelUploadFile(uploadingWallpaper, false); + getFileLoader().cancelFileUpload(uploadingWallpaper, false); uploadingWallpaper = null; uploadingWallpaperInfo = null; } @@ -4224,24 +4284,41 @@ public class MessagesController extends BaseController implements NotificationCe uploadingWallpaper = path.getAbsolutePath(); uploadingWallpaperInfo = info; getFileLoader().uploadFile(uploadingWallpaper, false, true, ConnectionsManager.FileTypePhoto); - } else if (!info.isDefault() && !info.isColor() && !info.isTheme()) { - TLRPC.TL_inputWallPaperSlug inputWallPaper = new TLRPC.TL_inputWallPaperSlug(); - inputWallPaper.slug = info.slug; + } else if (!info.isDefault() && !info.isColor() && info.wallpaperId > 0 && !info.isTheme()) { + TLRPC.InputWallPaper inputWallPaper; + if (info.wallpaperId > 0) { + TLRPC.TL_inputWallPaper inputWallPaperId = new TLRPC.TL_inputWallPaper(); + inputWallPaperId.id = info.wallpaperId; + inputWallPaperId.access_hash = info.accessHash; + inputWallPaper = inputWallPaperId; + } else { + TLRPC.TL_inputWallPaperSlug inputWallPaperSlug = new TLRPC.TL_inputWallPaperSlug(); + inputWallPaperSlug.slug = info.slug; + inputWallPaper = inputWallPaperSlug; + } TLRPC.TL_wallPaperSettings settings = new TLRPC.TL_wallPaperSettings(); settings.blur = info.isBlurred; settings.motion = info.isMotion; if (info.color != 0) { - settings.background_color = info.color; + settings.background_color = info.color & 0x00ffffff; settings.flags |= 1; settings.intensity = (int) (info.intensity * 100); settings.flags |= 8; } - if (info.gradientColor != 0) { - settings.second_background_color = info.gradientColor; + if (info.gradientColor1 != 0) { + settings.second_background_color = info.gradientColor1 & 0x00ffffff; settings.rotation = AndroidUtilities.getWallpaperRotation(info.rotation, true); settings.flags |= 16; } + if (info.gradientColor2 != 0) { + settings.third_background_color = info.gradientColor2 & 0x00ffffff; + settings.flags |= 32; + } + if (info.gradientColor3 != 0) { + settings.fourth_background_color = info.gradientColor3 & 0x00ffffff; + settings.flags |= 64; + } TLObject req; if (install) { @@ -4267,7 +4344,7 @@ public class MessagesController extends BaseController implements NotificationCe data.writeBool(info.isBlurred); data.writeBool(info.isMotion); data.writeInt32(info.color); - data.writeInt32(info.gradientColor); + data.writeInt32(info.gradientColor1); data.writeInt32(info.rotation); data.writeDouble(info.intensity); data.writeBool(install); @@ -4282,6 +4359,52 @@ public class MessagesController extends BaseController implements NotificationCe getConnectionsManager().sendRequest(req, (response, error) -> getMessagesStorage().removePendingTask(newTaskId)); } + if ((info.isColor() || info.gradientColor2 != 0) && info.wallpaperId <= 0) { + TLRPC.WallPaper wallPaper; + if (info.isColor()) { + wallPaper = new TLRPC.TL_wallPaperNoFile(); + } else { + wallPaper = new TLRPC.TL_wallPaper(); + wallPaper.slug = info.slug; + wallPaper.document = new TLRPC.TL_documentEmpty(); + } + if (info.wallpaperId == 0) { + wallPaper.id = Utilities.random.nextLong(); + if (wallPaper.id > 0) { + wallPaper.id = -wallPaper.id; + } + } else { + wallPaper.id = info.wallpaperId; + } + wallPaper.dark = MotionBackgroundDrawable.isDark(info.color, info.gradientColor1, info.gradientColor2, info.gradientColor3); + wallPaper.flags |= 4; + wallPaper.settings = new TLRPC.TL_wallPaperSettings(); + wallPaper.settings.blur = info.isBlurred; + wallPaper.settings.motion = info.isMotion; + if (info.color != 0) { + wallPaper.settings.background_color = info.color; + wallPaper.settings.flags |= 1; + wallPaper.settings.intensity = (int) (info.intensity * 100); + wallPaper.settings.flags |= 8; + } + if (info.gradientColor1 != 0) { + wallPaper.settings.second_background_color = info.gradientColor1; + wallPaper.settings.rotation = AndroidUtilities.getWallpaperRotation(info.rotation, true); + wallPaper.settings.flags |= 16; + } + if (info.gradientColor2 != 0) { + wallPaper.settings.third_background_color = info.gradientColor2; + wallPaper.settings.flags |= 32; + } + if (info.gradientColor3 != 0) { + wallPaper.settings.fourth_background_color = info.gradientColor3; + wallPaper.settings.flags |= 64; + } + ArrayList arrayList = new ArrayList<>(); + arrayList.add(wallPaper); + getMessagesStorage().putWallpapers(arrayList, -3); + getMessagesStorage().getWallpapers(); + } } public void markDialogMessageAsDeleted(ArrayList messages, long dialogId) { @@ -4710,6 +4833,9 @@ public class MessagesController extends BaseController implements NotificationCe int max_id_delete = max_id; if (first != 0) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("delete dialog with id " + did); + } boolean isPromoDialog = false; getMessagesStorage().deleteDialog(did, onlyHistory); TLRPC.Dialog dialog = dialogs_dict.get(did); @@ -4818,7 +4944,7 @@ public class MessagesController extends BaseController implements NotificationCe getNotificationCenter().postNotificationName(NotificationCenter.dialogsNeedReload, true); } else { getNotificationCenter().postNotificationName(NotificationCenter.dialogsNeedReload); - getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, false); + getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, false, null); } } getMessagesStorage().getStorageQueue().postRunnable(() -> AndroidUtilities.runOnUIThread(() -> getNotificationsController().removeNotificationsForDialog(did))); @@ -6281,7 +6407,7 @@ public class MessagesController extends BaseController implements NotificationCe if (message.legacy && message.layer < TLRPC.LAYER) { messagesToReload.add(message.id); } else if (message.media instanceof TLRPC.TL_messageMediaUnsupported) { - if (message.media.bytes != null && (message.media.bytes.length == 0 || message.media.bytes.length == 1 && message.media.bytes[0] < TLRPC.LAYER)) { + if (message.media.bytes != null && (message.media.bytes.length == 0 || message.media.bytes.length == 1 && message.media.bytes[0] < TLRPC.LAYER || message.media.bytes.length == 4 && Utilities.bytesToInt(message.media.bytes) < TLRPC.LAYER)) { messagesToReload.add(message.id); } } @@ -10694,7 +10820,7 @@ public class MessagesController extends BaseController implements NotificationCe getMessagesStorage().putUsersAndChats(res.users, res.chats, true, true); ArrayList pushMessages; - if (createMessage && !getMessagesStorage().hasInviteMeMessage(chatId)) { + if (createMessage && Math.abs(getConnectionsManager().getCurrentTime() - res.participant.date) < 24 * 60 * 60 && !getMessagesStorage().hasInviteMeMessage(chatId)) { TLRPC.TL_messageService message = new TLRPC.TL_messageService(); message.media_unread = true; message.unread = true; @@ -12410,6 +12536,8 @@ public class MessagesController extends BaseController implements NotificationCe updatesOnMainThread = new ArrayList<>(); } updatesOnMainThread.add(baseUpdate); + } else if (baseUpdate instanceof TLRPC.TL_updateGroupCallConnection) { + } else if (baseUpdate instanceof TLRPC.TL_updatePhoneCallSignalingData) { if (updatesOnMainThread == null) { updatesOnMainThread = new ArrayList<>(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java index a38b8e9fd..387e25076 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesStorage.java @@ -53,6 +53,10 @@ public class MessagesStorage extends BaseController { void run(int param); } + public interface StringCallback { + void run(String param); + } + public interface BooleanCallback { void run(boolean param); } @@ -89,7 +93,7 @@ public class MessagesStorage extends BaseController { private CountDownLatch openSync = new CountDownLatch(1); private static volatile MessagesStorage[] Instance = new MessagesStorage[UserConfig.MAX_ACCOUNT_COUNT]; - private final static int LAST_DB_VERSION = 78; + private final static int LAST_DB_VERSION = 79; public static MessagesStorage getInstance(int num) { MessagesStorage localInstance = Instance[num]; @@ -368,7 +372,7 @@ public class MessagesStorage extends BaseController { database.executeFast("CREATE TABLE search_recent(did INTEGER PRIMARY KEY, date INTEGER);").stepThis().dispose(); database.executeFast("CREATE TABLE media_counts_v2(uid INTEGER, type INTEGER, count INTEGER, old INTEGER, PRIMARY KEY(uid, type))").stepThis().dispose(); database.executeFast("CREATE TABLE keyvalue(id TEXT PRIMARY KEY, value TEXT)").stepThis().dispose(); - database.executeFast("CREATE TABLE bot_info(uid INTEGER PRIMARY KEY, info BLOB)").stepThis().dispose(); + database.executeFast("CREATE TABLE bot_info_v2(uid INTEGER, dialogId INTEGER, info BLOB, PRIMARY KEY(uid, dialogId))").stepThis().dispose(); database.executeFast("CREATE TABLE pending_tasks(id INTEGER PRIMARY KEY, data BLOB);").stepThis().dispose(); database.executeFast("CREATE TABLE requested_holes(uid INTEGER, seq_out_start INTEGER, seq_out_end INTEGER, PRIMARY KEY (uid, seq_out_start, seq_out_end));").stepThis().dispose(); database.executeFast("CREATE TABLE sharing_locations(uid INTEGER PRIMARY KEY, mid INTEGER, date INTEGER, period INTEGER, message BLOB, proximity INTEGER);").stepThis().dispose(); @@ -593,7 +597,6 @@ public class MessagesStorage extends BaseController { version = 17; } if (version == 17) { - database.executeFast("CREATE TABLE bot_info(uid INTEGER PRIMARY KEY, info BLOB)").stepThis().dispose(); database.executeFast("PRAGMA user_version = 18").stepThis().dispose(); version = 18; } @@ -950,6 +953,12 @@ public class MessagesStorage extends BaseController { version = 78; } if (version == 78) { + database.executeFast("DROP TABLE IF EXISTS bot_info;").stepThis().dispose(); + database.executeFast("CREATE TABLE IF NOT EXISTS bot_info_v2(uid INTEGER, dialogId INTEGER, info BLOB, PRIMARY KEY(uid, dialogId))").stepThis().dispose(); + database.executeFast("PRAGMA user_version = 79").stepThis().dispose(); + version = 79; + } + if (version == 79) { } } catch (Exception e) { @@ -1239,7 +1248,7 @@ public class MessagesStorage extends BaseController { info.isBlurred = data.readBool(false); info.isMotion = data.readBool(false); info.color = data.readInt32(false); - info.gradientColor = data.readInt32(false); + info.gradientColor1 = data.readInt32(false); info.rotation = data.readInt32(false); info.intensity = (float) data.readDouble(false); boolean install = data.readBool(false); @@ -1303,6 +1312,12 @@ public class MessagesStorage extends BaseController { AndroidUtilities.runOnUIThread(() -> getMessagesController().reloadMentionsCountForChannel(inputPeer, taskId)); break; } + case 100: { + final int chatId = data.readInt32(false); + final boolean revoke = data.readBool(false); + AndroidUtilities.runOnUIThread(() -> getSecretChatHelper().declineSecretChat(chatId, revoke, taskId)); + break; + } } data.reuse(); } @@ -2845,7 +2860,7 @@ public class MessagesStorage extends BaseController { storageQueue.postRunnable(() -> { try { if (action == 1) { - database.executeFast("DELETE FROM wallpapers2 WHERE 1").stepThis().dispose(); + database.executeFast("DELETE FROM wallpapers2 WHERE num >= -1").stepThis().dispose(); } database.beginTransaction(); SQLitePreparedStatement state; @@ -2855,14 +2870,18 @@ public class MessagesStorage extends BaseController { state = database.executeFast("UPDATE wallpapers2 SET data = ? WHERE uid = ?"); } for (int a = 0, N = wallPapers.size(); a < N; a++) { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) wallPapers.get(a); + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) wallPapers.get(a); state.requery(); NativeByteBuffer data = new NativeByteBuffer(wallPaper.getObjectSize()); wallPaper.serializeToStream(data); if (action != 0) { state.bindLong(1, wallPaper.id); state.bindByteBuffer(2, data); - state.bindInteger(3, action == 2 ? -1 : a); + if (action < 0) { + state.bindInteger(3, action); + } else { + state.bindInteger(3, action == 2 ? -1 : a); + } } else { state.bindByteBuffer(1, data); state.bindLong(2, wallPaper.id); @@ -2878,16 +2897,26 @@ public class MessagesStorage extends BaseController { }); } + public void deleteWallpaper(long id) { + storageQueue.postRunnable(() -> { + try { + database.executeFast("DELETE FROM wallpapers2 WHERE uid = " + id).stepThis().dispose(); + } catch (Exception e) { + FileLog.e(e); + } + }); + } + public void getWallpapers() { storageQueue.postRunnable(() -> { SQLiteCursor cursor = null; try { cursor = database.queryFinalized("SELECT data FROM wallpapers2 WHERE 1 ORDER BY num ASC"); - final ArrayList wallPapers = new ArrayList<>(); + final ArrayList wallPapers = new ArrayList<>(); while (cursor.next()) { NativeByteBuffer data = cursor.byteBufferValue(0); if (data != null) { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) TLRPC.WallPaper.TLdeserialize(data, data.readInt32(false), false); + TLRPC.WallPaper wallPaper = TLRPC.WallPaper.TLdeserialize(data, data.readInt32(false), false); data.reuse(); if (wallPaper != null) { wallPapers.add(wallPaper); @@ -6425,7 +6454,7 @@ public class MessagesStorage extends BaseController { } } else if (load_type == 1) { long holeMessageId = 0; - cursor = database.queryFinalized(String.format(Locale.US, "SELECT start, end FROM messages_holes WHERE uid = %d AND start >= %d AND start != 1 AND end != 1 ORDER BY start ASC LIMIT 1", dialogId, max_id)); + cursor = database.queryFinalized(String.format(Locale.US, "SELECT start, end FROM messages_holes WHERE uid = %d AND start >= %d AND start != 1 AND end != 1 OR start < %d AND end > %d ORDER BY start ASC LIMIT 1", dialogId, max_id, max_id, max_id)); if (cursor.next()) { holeMessageId = cursor.intValue(0); if (channelId != 0) { @@ -7903,13 +7932,15 @@ public class MessagesStorage extends BaseController { private int getMessageMediaType(TLRPC.Message message) { if (message instanceof TLRPC.TL_message_secret) { - if ((message.media instanceof TLRPC.TL_messageMediaPhoto || MessageObject.isGifMessage(message)) && message.ttl > 0 && message.ttl <= 60 || + if (message.media instanceof TLRPC.TL_messageMediaPhoto || MessageObject.isGifMessage(message) || MessageObject.isVoiceMessage(message) || MessageObject.isVideoMessage(message) || MessageObject.isRoundVideoMessage(message)) { - return 1; - } else if (message.media instanceof TLRPC.TL_messageMediaPhoto || MessageObject.isVideoMessage(message)) { - return 0; + if (message.ttl > 0 && message.ttl <= 60) { + return 1; + } else { + return 0; + } } } else if (message instanceof TLRPC.TL_message && (message.media instanceof TLRPC.TL_messageMediaPhoto || message.media instanceof TLRPC.TL_messageMediaDocument) && message.media.ttl_seconds != 0) { return 1; @@ -8040,7 +8071,7 @@ public class MessagesStorage extends BaseController { putDialogsInternal(dialogs, 0); updateDialogsWithDeletedMessages(new ArrayList<>(), null, false, channel_id); - AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, true)); + AndroidUtilities.runOnUIThread(() -> getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, true, difference)); if (checkInvite) { if (newDialogType == 1) { getMessagesController().checkChatInviter(channel_id, true); @@ -9871,13 +9902,11 @@ public class MessagesStorage extends BaseController { boolean ok = false; if (message.media instanceof TLRPC.TL_messageMediaUnsupported_old) { if (message.media.bytes.length == 0) { - message.media.bytes = new byte[1]; - message.media.bytes[0] = TLRPC.LAYER; + message.media.bytes = Utilities.intToBytes(TLRPC.LAYER); } } else if (message.media instanceof TLRPC.TL_messageMediaUnsupported) { message.media = new TLRPC.TL_messageMediaUnsupported_old(); - message.media.bytes = new byte[1]; - message.media.bytes[0] = TLRPC.LAYER; + message.media.bytes = Utilities.intToBytes(TLRPC.LAYER); message.flags |= TLRPC.MESSAGE_FLAG_HAS_MEDIA; } } @@ -10089,11 +10118,13 @@ public class MessagesStorage extends BaseController { } SQLiteCursor cursor = null; + int readState = 0; try { - cursor = database.queryFinalized(String.format(Locale.US, "SELECT uid FROM messages WHERE mid = %d LIMIT 1", messageId)); + cursor = database.queryFinalized(String.format(Locale.US, "SELECT uid, read_state FROM messages WHERE mid = %d LIMIT 1", messageId)); if (!cursor.next()) { return; } + readState = cursor.intValue(1); } catch (Exception e) { FileLog.e(e); } finally { @@ -10116,7 +10147,7 @@ public class MessagesStorage extends BaseController { message.serializeToStream(data); state.bindLong(1, messageId); state.bindLong(2, message.dialog_id); - state.bindInteger(3, MessageObject.getUnreadFlags(message)); + state.bindInteger(3, readState); state.bindInteger(4, message.send_state); state.bindInteger(5, message.date); state.bindByteBuffer(6, data); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java b/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java index d9a5bdf17..27b1f1255 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MusicPlayerService.java @@ -88,7 +88,7 @@ public class MusicPlayerService extends Service implements NotificationCenter.No NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.messagePlayingDidSeek); NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.httpFileDidLoad); - NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.fileDidLoad); + NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.fileLoaded); } imageReceiver = new ImageReceiver(null); imageReceiver.setDelegate((imageReceiver, set, thumb, memCache) -> { @@ -509,7 +509,7 @@ public class MusicPlayerService extends Service implements NotificationCenter.No NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.messagePlayingDidSeek); NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.httpFileDidLoad); - NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.fileDidLoad); + NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.fileLoaded); } } @@ -536,7 +536,7 @@ public class MusicPlayerService extends Service implements NotificationCenter.No if (messageObject != null && loadingFilePath != null && loadingFilePath.equals(path)) { createNotification(messageObject, false); } - } else if (id == NotificationCenter.fileDidLoad) { + } else if (id == NotificationCenter.fileLoaded) { final String path = (String) args[0]; MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); if (messageObject != null && loadingFilePath != null && loadingFilePath.equals(path)) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java index 03c91b1ad..1e482fd63 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java @@ -22,7 +22,7 @@ import java.util.zip.ZipFile; public class NativeLoader { - private final static int LIB_VERSION = 38; + private final static int LIB_VERSION = 39; private final static String LIB_NAME = "tmessages." + LIB_VERSION; private final static String LIB_SO_NAME = "lib" + LIB_NAME + ".so"; private final static String LOCALE_LIB_SO_NAME = "lib" + LIB_NAME + "loc.so"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java index 56750c0f9..0fbf72c08 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationCenter.java @@ -114,6 +114,8 @@ public class NotificationCenter { public static final int didLoadChatInviter = totalEvents++; public static final int didLoadChatAdmins = totalEvents++; public static final int historyImportProgressChanged = totalEvents++; + public static final int stickersImportProgressChanged = totalEvents++; + public static final int stickersImportComplete = totalEvents++; public static final int dialogDeleted = totalEvents++; public static final int walletPendingTransactionsChanged = totalEvents++; @@ -124,12 +126,12 @@ public class NotificationCenter { public static final int didUpdateConnectionState = totalEvents++; - public static final int FileDidUpload = totalEvents++; - public static final int FileDidFailUpload = totalEvents++; - public static final int FileUploadProgressChanged = totalEvents++; - public static final int FileLoadProgressChanged = totalEvents++; - public static final int fileDidLoad = totalEvents++; - public static final int fileDidFailToLoad = totalEvents++; + public static final int fileUploaded = totalEvents++; + public static final int fileUploadFailed = totalEvents++; + public static final int fileUploadProgressChanged = totalEvents++; + public static final int fileLoadProgressChanged = totalEvents++; + public static final int fileLoaded = totalEvents++; + public static final int fileLoadFailed = totalEvents++; public static final int filePreparingStarted = totalEvents++; public static final int fileNewChunkAvailable = totalEvents++; public static final int filePreparingFailed = totalEvents++; @@ -154,6 +156,9 @@ public class NotificationCenter { public static final int didStartedCall = totalEvents++; public static final int groupCallUpdated = totalEvents++; + public static final int groupCallSpeakingUsersUpdated = totalEvents++; + public static final int groupCallScreencastStateChanged = totalEvents++; + public static final int activeGroupCallsUpdated = totalEvents++; public static final int applyGroupCallVisibleParticipants = totalEvents++; public static final int groupCallTypingsUpdated = totalEvents++; public static final int didEndCall = totalEvents++; @@ -182,7 +187,7 @@ public class NotificationCenter { public static final int wallpapersNeedReload = totalEvents++; public static final int didReceiveSmsCode = totalEvents++; public static final int didReceiveCall = totalEvents++; - public static final int emojiDidLoad = totalEvents++; + public static final int emojiLoaded = totalEvents++; public static final int closeOtherAppActivities = totalEvents++; public static final int cameraInitied = totalEvents++; public static final int didReplacedPhotoInMemCache = totalEvents++; @@ -213,6 +218,7 @@ public class NotificationCenter { public static final int webRtcMicAmplitudeEvent = totalEvents++; public static final int webRtcSpeakerAmplitudeEvent = totalEvents++; public static final int showBulletin = totalEvents++; + public static final int appUpdateAvailable = totalEvents++; private SparseArray> observers = new SparseArray<>(); private SparseArray> removeAfterBroadcast = new SparseArray<>(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationImageProvider.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationImageProvider.java index c615c9c5b..5370c16bc 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationImageProvider.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationImageProvider.java @@ -33,7 +33,7 @@ public class NotificationImageProvider extends ContentProvider implements Notifi @Override public boolean onCreate() { for (int i = 0; i < UserConfig.getActivatedAccountsCount(); i++) { - NotificationCenter.getInstance(i).addObserver(this, NotificationCenter.fileDidLoad); + NotificationCenter.getInstance(i).addObserver(this, NotificationCenter.fileLoaded); } return true; } @@ -41,7 +41,7 @@ public class NotificationImageProvider extends ContentProvider implements Notifi @Override public void shutdown() { for (int i = 0; i < UserConfig.getActivatedAccountsCount(); i++) { - NotificationCenter.getInstance(i).removeObserver(this, NotificationCenter.fileDidLoad); + NotificationCenter.getInstance(i).removeObserver(this, NotificationCenter.fileLoaded); } } @@ -138,7 +138,7 @@ public class NotificationImageProvider extends ContentProvider implements Notifi @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { synchronized (sync) { String name = (String) args[0]; if (waitingForFiles.remove(name)) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java index dface0ef3..cc6f0b10d 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java @@ -67,7 +67,6 @@ import org.telegram.ui.PopupNotificationActivity; import java.io.File; import java.util.ArrayList; -import java.util.Arrays; import java.util.Calendar; import java.util.HashSet; import java.util.List; @@ -107,6 +106,7 @@ public class NotificationsController extends BaseController { public long lastNotificationChannelCreateTime; private Boolean groupsCreated; + private boolean channelGroupsCreated; public static long globalSecretChatId = -(1L << 32); @@ -245,6 +245,7 @@ public class NotificationsController extends BaseController { public void cleanup() { popupMessages.clear(); popupReplyMessages.clear(); + channelGroupsCreated = false; notificationsQueue.postRunnable(() -> { opened_dialog_id = 0; total_unread_count = 0; @@ -2851,27 +2852,62 @@ public class NotificationsController extends BaseController { } catch (Exception e) { FileLog.e(e); } - TLRPC.User user = getMessagesController().getUser(getUserConfig().getClientUserId()); - if (user == null) { - getUserConfig().getCurrentUser(); - } - String userName; - if (user != null) { - userName = " (" + ContactsController.formatName(user.first_name, user.last_name) + ")"; - } else { - userName = ""; - } - - systemNotificationManager.createNotificationChannelGroups(Arrays.asList( - new NotificationChannelGroup("channels" + currentAccount, LocaleController.getString("NotificationsChannels", R.string.NotificationsChannels) + userName), - new NotificationChannelGroup("groups" + currentAccount, LocaleController.getString("NotificationsGroups", R.string.NotificationsGroups) + userName), - new NotificationChannelGroup("private" + currentAccount, LocaleController.getString("NotificationsPrivateChats", R.string.NotificationsPrivateChats) + userName), - new NotificationChannelGroup("other" + currentAccount, LocaleController.getString("NotificationsOther", R.string.NotificationsOther) + userName) - )); - preferences.edit().putBoolean("groupsCreated4", true).commit(); groupsCreated = true; } + if (!channelGroupsCreated) { + List list = systemNotificationManager.getNotificationChannelGroups(); + String channelsId = "channels" + currentAccount; + String groupsId = "groups" + currentAccount; + String privateId = "private" + currentAccount; + String otherId = "other" + currentAccount; + for (int a = 0, N = list.size(); a < N; a++) { + String id = list.get(a).getId(); + if (channelsId != null && channelsId.equals(id)) { + channelsId = null; + } else if (groupsId != null && groupsId.equals(id)) { + groupsId = null; + } else if (privateId != null && privateId.equals(id)) { + privateId = null; + } else if (otherId != null && otherId.equals(id)) { + otherId = null; + } + if (channelsId == null && groupsId == null && privateId == null && otherId == null) { + break; + } + } + + if (channelsId != null || groupsId != null || privateId != null || otherId != null) { + TLRPC.User user = getMessagesController().getUser(getUserConfig().getClientUserId()); + if (user == null) { + getUserConfig().getCurrentUser(); + } + String userName; + if (user != null) { + userName = " (" + ContactsController.formatName(user.first_name, user.last_name) + ")"; + } else { + userName = ""; + } + + ArrayList channelGroups = new ArrayList<>(); + if (channelsId != null) { + channelGroups.add(new NotificationChannelGroup(channelsId, LocaleController.getString("NotificationsChannels", R.string.NotificationsChannels) + userName)); + } + if (groupsId != null) { + channelGroups.add(new NotificationChannelGroup(groupsId, LocaleController.getString("NotificationsGroups", R.string.NotificationsGroups) + userName)); + } + if (privateId != null) { + channelGroups.add(new NotificationChannelGroup(privateId, LocaleController.getString("NotificationsPrivateChats", R.string.NotificationsPrivateChats) + userName)); + } + if (otherId != null) { + channelGroups.add(new NotificationChannelGroup(otherId, LocaleController.getString("NotificationsOther", R.string.NotificationsOther) + userName)); + } + + systemNotificationManager.createNotificationChannelGroups(channelGroups); + } + + channelGroupsCreated = true; + } } @TargetApi(26) diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java index cbe092d01..1dd756f61 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SecretChatHelper.java @@ -604,8 +604,6 @@ public class SecretChatHelper extends BaseController { Utilities.random.nextBytes(layer.random_bytes); toEncryptObject = layer; - int mtprotoVersion = AndroidUtilities.getPeerLayerVersion(chat.layer) >= 73 ? 2 : 1; - if (chat.seq_in == 0 && chat.seq_out == 0) { if (chat.admin_id == getUserConfig().getClientUserId()) { chat.seq_out = 1; @@ -619,15 +617,15 @@ public class SecretChatHelper extends BaseController { layer.in_seq_no = chat.seq_in > 0 ? chat.seq_in : chat.seq_in + 2; layer.out_seq_no = chat.seq_out; chat.seq_out += 2; - if (AndroidUtilities.getPeerLayerVersion(chat.layer) >= 20) { - if (chat.key_create_date == 0) { - chat.key_create_date = getConnectionsManager().getCurrentTime(); - } - chat.key_use_count_out++; - if ((chat.key_use_count_out >= 100 || chat.key_create_date < getConnectionsManager().getCurrentTime() - 60 * 60 * 24 * 7) && chat.exchange_id == 0 && chat.future_key_fingerprint == 0) { - requestNewSecretChatKey(chat); - } + + if (chat.key_create_date == 0) { + chat.key_create_date = getConnectionsManager().getCurrentTime(); } + chat.key_use_count_out++; + if ((chat.key_use_count_out >= 100 || chat.key_create_date < getConnectionsManager().getCurrentTime() - 60 * 60 * 24 * 7) && chat.exchange_id == 0 && chat.future_key_fingerprint == 0) { + requestNewSecretChatKey(chat); + } + getMessagesStorage().updateEncryptedChatSeq(chat, false); if (newMsgObj != null) { newMsgObj.seq_in = layer.in_seq_no; @@ -649,9 +647,7 @@ public class SecretChatHelper extends BaseController { len = toEncrypt.length(); int extraLen = len % 16 != 0 ? 16 - len % 16 : 0; - if (mtprotoVersion == 2) { - extraLen += (2 + Utilities.random.nextInt(3)) * 16; - } + extraLen += (2 + Utilities.random.nextInt(3)) * 16; NativeByteBuffer dataForEncryption = new NativeByteBuffer(len + extraLen); toEncrypt.position(0); @@ -664,17 +660,14 @@ public class SecretChatHelper extends BaseController { byte[] messageKey = new byte[16]; byte[] messageKeyFull; - boolean incoming = mtprotoVersion == 2 && chat.admin_id != getUserConfig().getClientUserId(); - if (mtprotoVersion == 2) { - messageKeyFull = Utilities.computeSHA256(chat.auth_key, 88 + (incoming ? 8 : 0), 32, dataForEncryption.buffer, 0, dataForEncryption.buffer.limit()); - System.arraycopy(messageKeyFull, 8, messageKey, 0, 16); - } else { - messageKeyFull = Utilities.computeSHA1(toEncrypt.buffer); - System.arraycopy(messageKeyFull, messageKeyFull.length - 16, messageKey, 0, 16); - } + boolean incoming = chat.admin_id != getUserConfig().getClientUserId(); + + messageKeyFull = Utilities.computeSHA256(chat.auth_key, 88 + (incoming ? 8 : 0), 32, dataForEncryption.buffer, 0, dataForEncryption.buffer.limit()); + System.arraycopy(messageKeyFull, 8, messageKey, 0, 16); + toEncrypt.reuse(); - MessageKeyData keyData = MessageKeyData.generateMessageKeyData(chat.auth_key, messageKey, incoming, mtprotoVersion); + MessageKeyData keyData = MessageKeyData.generateMessageKeyData(chat.auth_key, messageKey, incoming, 2); Utilities.aesIgeEncryption(dataForEncryption.buffer, keyData.aesKey, keyData.aesIv, true, false, 0, dataForEncryption.limit()); @@ -730,7 +723,7 @@ public class SecretChatHelper extends BaseController { currentChat.key_hash = AndroidUtilities.calcAuthKeyHash(currentChat.auth_key); } - if (AndroidUtilities.getPeerLayerVersion(currentChat.layer) >= 46 && currentChat.key_hash.length == 16) { + if (currentChat.key_hash.length == 16) { try { byte[] sha256 = Utilities.computeSHA256(chat.auth_key, 0, chat.auth_key.length); byte[] key_hash = new byte[36]; @@ -802,7 +795,7 @@ public class SecretChatHelper extends BaseController { if (newPeerLayer <= currentPeerLayer) { return; } - if (chat.key_hash.length == 16 && currentPeerLayer >= 46) { + if (chat.key_hash.length == 16) { try { byte[] sha256 = Utilities.computeSHA256(chat.auth_key, 0, chat.auth_key.length); byte[] key_hash = new byte[36]; @@ -829,7 +822,7 @@ public class SecretChatHelper extends BaseController { from_id = chat.participant_id; } - if (AndroidUtilities.getPeerLayerVersion(chat.layer) >= 20 && chat.exchange_id == 0 && chat.future_key_fingerprint == 0 && chat.key_use_count_in >= 120) { + if (chat.exchange_id == 0 && chat.future_key_fingerprint == 0 && chat.key_use_count_in >= 120) { requestNewSecretChatKey(chat); } @@ -853,14 +846,9 @@ public class SecretChatHelper extends BaseController { if (object instanceof TLRPC.TL_decryptedMessage) { TLRPC.TL_decryptedMessage decryptedMessage = (TLRPC.TL_decryptedMessage) object; TLRPC.TL_message newMessage; - if (AndroidUtilities.getPeerLayerVersion(chat.layer) >= 17) { - newMessage = new TLRPC.TL_message_secret(); - newMessage.ttl = decryptedMessage.ttl; - newMessage.entities = decryptedMessage.entities; - } else { - newMessage = new TLRPC.TL_message(); - newMessage.ttl = chat.ttl; - } + newMessage = new TLRPC.TL_message_secret(); + newMessage.ttl = decryptedMessage.ttl; + newMessage.entities = decryptedMessage.entities; newMessage.message = decryptedMessage.message; newMessage.date = date; newMessage.local_id = newMessage.id = getUserConfig().getNewMessageId(); @@ -932,7 +920,7 @@ public class SecretChatHelper extends BaseController { newMessage.media.flags |= 4; } - TLRPC.TL_photoSize big = new TLRPC.TL_photoSize(); + TLRPC.TL_photoSize big = new TLRPC.TL_photoSize_layer127(); big.w = decryptedMessage.media.w; big.h = decryptedMessage.media.h; big.type = "x"; @@ -1171,7 +1159,7 @@ public class SecretChatHelper extends BaseController { })); getMessagesStorage().deleteDialog(did, 1); getNotificationCenter().postNotificationName(NotificationCenter.dialogsNeedReload); - getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, false); + getNotificationCenter().postNotificationName(NotificationCenter.removeAllMessagesFromDialog, did, false, null); }); return null; } else if (serviceMessage.action instanceof TLRPC.TL_decryptedMessageActionDeleteMessages) { @@ -1512,6 +1500,8 @@ public class SecretChatHelper extends BaseController { MessageKeyData keyData = MessageKeyData.generateMessageKeyData(keyToDecrypt, messageKey, incoming, version); Utilities.aesIgeEncryption(is.buffer, keyData.aesKey, keyData.aesIv, false, false, 24, is.limit() - 24); + int error = 0; + int len = is.readInt32(false); byte[] messageKeyFull; if (version == 2) { @@ -1521,7 +1511,7 @@ public class SecretChatHelper extends BaseController { Utilities.aesIgeEncryption(is.buffer, keyData.aesKey, keyData.aesIv, true, false, 24, is.limit() - 24); is.position(24); } - return false; + error |= 1; } } else { int l = len + 28; @@ -1534,18 +1524,30 @@ public class SecretChatHelper extends BaseController { Utilities.aesIgeEncryption(is.buffer, keyData.aesKey, keyData.aesIv, true, false, 24, is.limit() - 24); is.position(24); } - return false; + error |= 1; } } - if (len <= 0 || len > is.limit() - 28) { - return false; + if (len <= 0) { + error |= 1; + } + if (len > is.limit() - 28) { + error |= 1; } int padding = is.limit() - 28 - len; - if (version == 2 && (padding < 12 || padding > 1024) || version == 1 && padding > 15) { - return false; + if (version == 2) { + if (padding < 12) { + error |= 1; + } + if (padding > 1024) { + error |= 1; + } + } else { + if (padding > 15) { + error |= 1; + } } - // - return true; + + return error == 0; } protected ArrayList decryptMessage(TLRPC.EncryptedMessage message) { @@ -1578,8 +1580,8 @@ public class SecretChatHelper extends BaseController { keyToDecrypt = chat.future_auth_key; new_key_used = true; } - int mtprotoVersion = AndroidUtilities.getPeerLayerVersion(chat.layer) >= 73 ? 2 : 1; - int decryptedWithVersion = mtprotoVersion; + int mtprotoVersion = 2; + int decryptedWithVersion = 2; if (keyToDecrypt != null) { byte[] messageKey = is.readData(16, false); @@ -1607,7 +1609,7 @@ public class SecretChatHelper extends BaseController { TLObject object = TLClassStore.Instance().TLdeserialize(is, is.readInt32(false), false); is.reuse(); - if (!new_key_used && AndroidUtilities.getPeerLayerVersion(chat.layer) >= 20) { + if (!new_key_used) { chat.key_use_count_in++; } if (object instanceof TLRPC.TL_decryptedMessageLayer) { @@ -1707,9 +1709,6 @@ public class SecretChatHelper extends BaseController { } public void requestNewSecretChatKey(final TLRPC.EncryptedChat encryptedChat) { - if (AndroidUtilities.getPeerLayerVersion(encryptedChat.layer) < 20) { - return; - } final byte[] salt = new byte[256]; Utilities.random.nextBytes(salt); @@ -1797,11 +1796,32 @@ public class SecretChatHelper extends BaseController { } public void declineSecretChat(int chat_id, boolean revoke) { + declineSecretChat(chat_id, revoke, 0); + } + + public void declineSecretChat(int chat_id, boolean revoke, long taskId) { + final long newTaskId; + if (taskId == 0) { + NativeByteBuffer data = null; + try { + data = new NativeByteBuffer(4 + 4 + 4); + data.writeInt32(100); + data.writeInt32(chat_id); + data.writeBool(revoke); + } catch (Exception e) { + FileLog.e(e); + } + newTaskId = getMessagesStorage().createPendingTask(data); + } else { + newTaskId = taskId; + } TLRPC.TL_messages_discardEncryption req = new TLRPC.TL_messages_discardEncryption(); req.chat_id = chat_id; req.delete_history = revoke; getConnectionsManager().sendRequest(req, (response, error) -> { - + if (newTaskId != 0) { + getMessagesStorage().removePendingTask(newTaskId); + } }); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java index cd34b18f6..700bf85a4 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java @@ -59,7 +59,6 @@ import org.telegram.ui.ChatActivity; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.AnimatedFileDrawable; import org.telegram.ui.Components.Bulletin; -import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.Point; import org.telegram.ui.PaymentFormActivity; @@ -71,7 +70,6 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.RandomAccessFile; -import java.lang.ref.WeakReference; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; @@ -99,6 +97,9 @@ public class SendMessagesHelper extends BaseController implements NotificationCe private HashMap importingHistoryFiles = new HashMap<>(); private LongSparseArray importingHistoryMap = new LongSparseArray<>(); + private HashMap importingStickersFiles = new HashMap<>(); + private HashMap importingStickersMap = new HashMap<>(); + public class ImportingHistory { public String historyPath; public ArrayList mediaPaths = new ArrayList<>(); @@ -270,6 +271,179 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } } + public static class ImportingSticker { + public String path; + public String emoji; + public boolean validated; + public String mimeType; + public boolean animated; + public TLRPC.TL_inputStickerSetItem item; + + public void uploadMedia(int account, TLRPC.InputFile inputFile, Runnable onFinish) { + TLRPC.TL_messages_uploadMedia req = new TLRPC.TL_messages_uploadMedia(); + req.peer = new TLRPC.TL_inputPeerSelf(); + req.media = new TLRPC.TL_inputMediaUploadedDocument(); + req.media.file = inputFile; + req.media.mime_type = mimeType; + + ConnectionsManager.getInstance(account).sendRequest(req, new RequestDelegate() { + @Override + public void run(TLObject response, TLRPC.TL_error error) { + AndroidUtilities.runOnUIThread(() -> { + if (response instanceof TLRPC.TL_messageMediaDocument) { + TLRPC.TL_messageMediaDocument mediaDocument = (TLRPC.TL_messageMediaDocument) response; + item = new TLRPC.TL_inputStickerSetItem(); + item.document = new TLRPC.TL_inputDocument(); + item.document.id = mediaDocument.document.id; + item.document.access_hash = mediaDocument.document.access_hash; + item.document.file_reference = mediaDocument.document.file_reference; + item.emoji = emoji != null ? emoji : ""; + mimeType = mediaDocument.document.mime_type; + } else if (animated) { + mimeType = "application/x-bad-tgsticker"; + } + onFinish.run(); + }); + } + }, ConnectionsManager.RequestFlagFailOnServerErrors); + } + } + + public class ImportingStickers { + + public HashMap uploadSet = new HashMap<>(); + public HashMap uploadProgresses = new HashMap<>(); + public HashMap uploadSize = new HashMap<>(); + public ArrayList uploadMedia = new ArrayList<>(); + + public String shortName; + public String title; + public String software; + + public long totalSize; + public long uploadedSize; + + public int uploadProgress; + public double estimatedUploadSpeed; + private long lastUploadTime; + private long lastUploadSize; + public int timeUntilFinish = Integer.MAX_VALUE; + + private void initImport() { + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportProgressChanged, shortName); + lastUploadTime = SystemClock.elapsedRealtime(); + for (int a = 0, N = uploadMedia.size(); a < N; a++) { + getFileLoader().uploadFile(uploadMedia.get(a).path, false, true, ConnectionsManager.FileTypeFile); + } + } + + public long getUploadedCount() { + return uploadedSize; + } + + public long getTotalCount() { + return totalSize; + } + + private void onFileFailedToUpload(String path) { + ImportingSticker file = uploadSet.remove(path); + if (file != null) { + uploadMedia.remove(file); + } + } + + private void addUploadProgress(String path, long sz, float progress) { + uploadProgresses.put(path, progress); + uploadSize.put(path, sz); + uploadedSize = 0; + for (HashMap.Entry entry : uploadSize.entrySet()) { + uploadedSize += entry.getValue(); + } + long newTime = SystemClock.elapsedRealtime(); + if (uploadedSize != lastUploadSize && newTime != lastUploadTime) { + double dt = (newTime - lastUploadTime) / 1000.0; + double uploadSpeed = (uploadedSize - lastUploadSize) / dt; + if (estimatedUploadSpeed == 0) { + estimatedUploadSpeed = uploadSpeed; + } else { + double coef = 0.01; + estimatedUploadSpeed = coef * uploadSpeed + (1 - coef) * estimatedUploadSpeed; + } + timeUntilFinish = (int) ((totalSize - uploadedSize) * 1000 / (double) estimatedUploadSpeed); + lastUploadSize = uploadedSize; + lastUploadTime = newTime; + } + float pr = getUploadedCount() / (float) getTotalCount(); + int newProgress = (int) (pr * 100); + if (uploadProgress != newProgress) { + uploadProgress = newProgress; + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportProgressChanged, shortName); + } + } + + private void onMediaImport(String path, long size, TLRPC.InputFile inputFile) { + addUploadProgress(path, size, 1.0f); + + ImportingSticker file = uploadSet.get(path); + if (file == null) { + return; + } + file.uploadMedia(currentAccount, inputFile, () -> { + uploadSet.remove(path); + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportProgressChanged, shortName); + if (uploadSet.isEmpty()) { + startImport(); + } + }); + } + + private void startImport() { + TLRPC.TL_stickers_createStickerSet req = new TLRPC.TL_stickers_createStickerSet(); + req.user_id = new TLRPC.TL_inputUserSelf(); + req.title = title; + req.short_name = shortName; + req.animated = uploadMedia.get(0).animated; + if (software != null) { + req.software = software; + req.flags |= 8; + } + for (int a = 0, N = uploadMedia.size(); a < N; a++) { + ImportingSticker file = uploadMedia.get(a); + if (file.item == null) { + continue; + } + req.stickers.add(file.item); + } + getConnectionsManager().sendRequest(req, new RequestDelegate() { + @Override + public void run(TLObject response, TLRPC.TL_error error) { + AndroidUtilities.runOnUIThread(() -> { + importingStickersMap.remove(shortName); + if (error == null) { + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportProgressChanged, shortName); + } else { + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportProgressChanged, shortName, req, error); + } + if (response instanceof TLRPC.TL_messages_stickerSet) { + if (getNotificationCenter().hasObservers(NotificationCenter.stickersImportComplete)) { + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportComplete, response); + } else { + getMediaDataController().toggleStickerSet(null, response, 2, null, false, false); + } + } + }); + } + }); + } + + public void setImportProgress(int value) { + if (value == 100) { + importingStickersMap.remove(shortName); + } + getNotificationCenter().postNotificationName(NotificationCenter.stickersImportProgressChanged, shortName); + } + } + private static DispatchQueue mediaSendQueue = new DispatchQueue("mediaSendQueue"); private static ThreadPoolExecutor mediaSendThreadPool; @@ -596,16 +770,16 @@ public class SendMessagesHelper extends BaseController implements NotificationCe super(instance); AndroidUtilities.runOnUIThread(() -> { - getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.FileDidUpload); - getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.FileUploadProgressChanged); - getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.FileDidFailUpload); + getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileUploaded); + getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileUploadProgressChanged); + getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileUploadFailed); getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.filePreparingStarted); getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileNewChunkAvailable); getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.filePreparingFailed); getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.httpFileDidFailedLoad); getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.httpFileDidLoad); - getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileDidLoad); - getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileDidFailToLoad); + getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileLoaded); + getNotificationCenter().addObserver(SendMessagesHelper.this, NotificationCenter.fileLoadFailed); }); } @@ -622,12 +796,14 @@ public class SendMessagesHelper extends BaseController implements NotificationCe waitingForVote.clear(); importingHistoryFiles.clear(); importingHistoryMap.clear(); + importingStickersFiles.clear(); + importingStickersMap.clear(); locationProvider.stop(); } @Override public void didReceivedNotification(int id, int account, final Object... args) { - if (id == NotificationCenter.FileUploadProgressChanged) { + if (id == NotificationCenter.fileUploadProgressChanged) { String fileName = (String) args[0]; ImportingHistory importingHistory = importingHistoryFiles.get(fileName); if (importingHistory != null) { @@ -635,7 +811,14 @@ public class SendMessagesHelper extends BaseController implements NotificationCe Long totalSize = (Long) args[2]; importingHistory.addUploadProgress(fileName, loadedSize, loadedSize / (float) totalSize); } - } else if (id == NotificationCenter.FileDidUpload) { + + ImportingStickers importingStickers = importingStickersFiles.get(fileName); + if (importingStickers != null) { + Long loadedSize = (Long) args[1]; + Long totalSize = (Long) args[2]; + importingStickers.addUploadProgress(fileName, loadedSize, loadedSize / (float) totalSize); + } + } else if (id == NotificationCenter.fileUploaded) { final String location = (String) args[0]; final TLRPC.InputFile file = (TLRPC.InputFile) args[1]; final TLRPC.InputEncryptedFile encryptedFile = (TLRPC.InputEncryptedFile) args[2]; @@ -649,6 +832,11 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } } + ImportingStickers importingStickers = importingStickersFiles.get(location); + if (importingStickers != null) { + importingStickers.onMediaImport(location, (Long) args[5], file); + } + ArrayList arr = delayedMessages.get(location); if (arr != null) { for (int a = 0; a < arr.size(); a++) { @@ -764,7 +952,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe delayedMessages.remove(location); } } - } else if (id == NotificationCenter.FileDidFailUpload) { + } else if (id == NotificationCenter.fileUploadFailed) { final String location = (String) args[0]; final boolean enc = (Boolean) args[1]; @@ -773,6 +961,11 @@ public class SendMessagesHelper extends BaseController implements NotificationCe importingHistory.onFileFailedToUpload(location); } + ImportingStickers importingStickers = importingStickersFiles.get(location); + if (importingStickers != null) { + importingStickers.onFileFailedToUpload(location); + } + ArrayList arr = delayedMessages.get(location); if (arr != null) { for (int a = 0; a < arr.size(); a++) { @@ -981,7 +1174,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } delayedMessages.remove(path); } - } else if (id == NotificationCenter.fileDidLoad) { + } else if (id == NotificationCenter.fileLoaded) { String path = (String) args[0]; ArrayList arr = delayedMessages.get(path); if (arr != null) { @@ -990,7 +1183,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } delayedMessages.remove(path); } - } else if (id == NotificationCenter.httpFileDidFailedLoad || id == NotificationCenter.fileDidFailToLoad) { + } else if (id == NotificationCenter.httpFileDidFailedLoad || id == NotificationCenter.fileLoadFailed) { String path = (String) args[0]; ArrayList arr = delayedMessages.get(path); @@ -1142,7 +1335,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (key.startsWith("http")) { ImageLoader.getInstance().cancelLoadHttpFile(key); } else { - getFileLoader().cancelUploadFile(key, enc); + getFileLoader().cancelFileUpload(key, enc); } stopVideoService(key); delayedMessages.remove(key); @@ -1237,7 +1430,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (messageObject.messageOwner.media.photo instanceof TLRPC.TL_photo) { sendMessage((TLRPC.TL_photo) messageObject.messageOwner.media.photo, null, did, messageObject.replyMessageObject, null, messageObject.messageOwner.message, messageObject.messageOwner.entities, null, params, true, 0, messageObject.messageOwner.media.ttl_seconds, messageObject); } else if (messageObject.messageOwner.media.document instanceof TLRPC.TL_document) { - sendMessage((TLRPC.TL_document) messageObject.messageOwner.media.document, null, messageObject.messageOwner.attachPath, did, messageObject.replyMessageObject, null, messageObject.messageOwner.message, messageObject.messageOwner.entities, null, params, true, 0, messageObject.messageOwner.media.ttl_seconds, messageObject); + sendMessage((TLRPC.TL_document) messageObject.messageOwner.media.document, null, messageObject.messageOwner.attachPath, did, messageObject.replyMessageObject, null, messageObject.messageOwner.message, messageObject.messageOwner.entities, null, params, true, 0, messageObject.messageOwner.media.ttl_seconds, messageObject, null); } else if (messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaVenue || messageObject.messageOwner.media instanceof TLRPC.TL_messageMediaGeo) { sendMessage(messageObject.messageOwner.media, did, messageObject.replyMessageObject, null, null, null, true, 0); } else if (messageObject.messageOwner.media.phone_number != null) { @@ -1273,7 +1466,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } else { entities = null; } - sendMessage(messageObject.messageOwner.message, did, messageObject.replyMessageObject, null, webPage, true, entities, null, null, true, 0); + sendMessage(messageObject.messageOwner.message, did, messageObject.replyMessageObject, null, webPage, true, entities, null, null, true, 0, null); } else if ((int) did != 0) { ArrayList arrayList = new ArrayList<>(); arrayList.add(messageObject); @@ -1330,7 +1523,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe performSendMessageRequest(req, newMsgObj, null, null, null, null, false); } - public void sendSticker(TLRPC.Document document, String query, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, Object parentObject, boolean notify, int scheduleDate) { + public void sendSticker(TLRPC.Document document, String query, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, Object parentObject, MessageObject.SendAnimationData sendAnimationData, boolean notify, int scheduleDate) { if (document == null) { return; } @@ -1420,7 +1613,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (bitmapFinal[0] != null && keyFinal[0] != null) { ImageLoader.getInstance().putImageToCache(new BitmapDrawable(bitmapFinal[0]), keyFinal[0]); } - sendMessage((TLRPC.TL_document) finalDocument, null, null, peer, replyToMsg, replyToTopMsg, null, null, null, null, notify, scheduleDate, 0, parentObject); + sendMessage((TLRPC.TL_document) finalDocument, null, null, peer, replyToMsg, replyToTopMsg, null, null, null, null, notify, scheduleDate, 0, parentObject, sendAnimationData); }); }); } else { @@ -1431,7 +1624,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } else { params = null; } - sendMessage((TLRPC.TL_document) finalDocument, null, null, peer, replyToMsg, replyToTopMsg, null, null, null, params, notify, scheduleDate, 0, parentObject); + sendMessage((TLRPC.TL_document) finalDocument, null, null, peer, replyToMsg, replyToTopMsg, null, null, null, params, notify, scheduleDate, 0, parentObject, sendAnimationData); } } @@ -1493,7 +1686,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (msgObj.getId() <= 0 || msgObj.needDrawBluredPreview()) { if (msgObj.type == 0 && !TextUtils.isEmpty(msgObj.messageText)) { TLRPC.WebPage webPage = msgObj.messageOwner.media != null ? msgObj.messageOwner.media.webpage : null; - sendMessage(msgObj.messageText.toString(), peer, null, null, webPage, webPage != null, msgObj.messageOwner.entities, null, null, notify, scheduleDate); + sendMessage(msgObj.messageText.toString(), peer, null, null, webPage, webPage != null, msgObj.messageOwner.entities, null, null, notify, scheduleDate, null); } continue; } @@ -2832,42 +3025,42 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } public void sendMessage(MessageObject retryMessageObject) { - sendMessage(null, null, null, null, null, null, null, null, null, null, retryMessageObject.getDialogId(), retryMessageObject.messageOwner.attachPath, null, null, null, true, retryMessageObject, null, retryMessageObject.messageOwner.reply_markup, retryMessageObject.messageOwner.params, !retryMessageObject.messageOwner.silent, retryMessageObject.scheduled ? retryMessageObject.messageOwner.date : 0, 0, null); + sendMessage(null, null, null, null, null, null, null, null, null, null, retryMessageObject.getDialogId(), retryMessageObject.messageOwner.attachPath, null, null, null, true, retryMessageObject, null, retryMessageObject.messageOwner.reply_markup, retryMessageObject.messageOwner.params, !retryMessageObject.messageOwner.silent, retryMessageObject.scheduled ? retryMessageObject.messageOwner.date : 0, 0, null, null); } public void sendMessage(TLRPC.User user, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate) { - sendMessage(null, null, null, null, null, user, null, null, null, null, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null); + sendMessage(null, null, null, null, null, user, null, null, null, null, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null, null); } public void sendMessage(TLRPC.TL_messageMediaInvoice invoice, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate) { - sendMessage(null, null, null, null, null, null, null, null, null, invoice, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null); + sendMessage(null, null, null, null, null, null, null, null, null, invoice, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null, null); } - public void sendMessage(TLRPC.TL_document document, VideoEditedInfo videoEditedInfo, String path, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, String caption, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject) { - sendMessage(null, caption, null, null, videoEditedInfo, null, document, null, null, null, peer, path, replyToMsg, replyToTopMsg, null, true, null, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject); + public void sendMessage(TLRPC.TL_document document, VideoEditedInfo videoEditedInfo, String path, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, String caption, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData) { + sendMessage(null, caption, null, null, videoEditedInfo, null, document, null, null, null, peer, path, replyToMsg, replyToTopMsg, null, true, null, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, sendAnimationData); } - public void sendMessage(String message, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate) { - sendMessage(message, null, null, null, null, null, null, null, null, null, peer, null, replyToMsg, replyToTopMsg, webPage, searchLinks, null, entities, replyMarkup, params, notify, scheduleDate, 0, null); + public void sendMessage(String message, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, MessageObject.SendAnimationData sendAnimationData) { + sendMessage(message, null, null, null, null, null, null, null, null, null, peer, null, replyToMsg, replyToTopMsg, webPage, searchLinks, null, entities, replyMarkup, params, notify, scheduleDate, 0, null, sendAnimationData); } public void sendMessage(TLRPC.MessageMedia location, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate) { - sendMessage(null, null, location, null, null, null, null, null, null, null, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null); + sendMessage(null, null, location, null, null, null, null, null, null, null, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null, null); } public void sendMessage(TLRPC.TL_messageMediaPoll poll, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate) { - sendMessage(null, null, null, null, null, null, null, null, poll, null, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null); + sendMessage(null, null, null, null, null, null, null, null, poll, null, peer, null, replyToMsg, replyToTopMsg, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null, null); } public void sendMessage(TLRPC.TL_game game, long peer, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate) { - sendMessage(null, null, null, null, null, null, null, game, null, null, peer, null, null, null, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null); + sendMessage(null, null, null, null, null, null, null, game, null, null, peer, null, null, null, null, true, null, null, replyMarkup, params, notify, scheduleDate, 0, null, null); } public void sendMessage(TLRPC.TL_photo photo, String path, long peer, MessageObject replyToMsg, MessageObject replyToTopMsg, String caption, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject) { - sendMessage(null, caption, null, photo, null, null, null, null, null, null, peer, path, replyToMsg, replyToTopMsg, null, true, null, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject); + sendMessage(null, caption, null, photo, null, null, null, null, null, null, peer, path, replyToMsg, replyToTopMsg, null, true, null, entities, replyMarkup, params, notify, scheduleDate, ttl, parentObject, null); } - private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject) { + private void sendMessage(String message, String caption, TLRPC.MessageMedia location, TLRPC.TL_photo photo, VideoEditedInfo videoEditedInfo, TLRPC.User user, TLRPC.TL_document document, TLRPC.TL_game game, TLRPC.TL_messageMediaPoll poll, TLRPC.TL_messageMediaInvoice invoice, long peer, String path, MessageObject replyToMsg, MessageObject replyToTopMsg, TLRPC.WebPage webPage, boolean searchLinks, MessageObject retryMessageObject, ArrayList entities, TLRPC.ReplyMarkup replyMarkup, HashMap params, boolean notify, int scheduleDate, int ttl, Object parentObject, MessageObject.SendAnimationData sendAnimationData) { if (user != null && user.phone == null) { return; } @@ -2945,10 +3138,16 @@ public class SendMessagesHelper extends BaseController implements NotificationCe type = 1; } else if (retryMessageObject.type == 1) { photo = (TLRPC.TL_photo) newMsg.media.photo; + if (retryMessageObject.messageOwner.message != null) { + caption = retryMessageObject.messageOwner.message; + } type = 2; } else if (retryMessageObject.type == 3 || retryMessageObject.type == 5 || retryMessageObject.videoEditedInfo != null) { type = 3; document = (TLRPC.TL_document) newMsg.media.document; + if (retryMessageObject.messageOwner.message != null) { + caption = retryMessageObject.messageOwner.message; + } } else if (retryMessageObject.type == 12) { user = new TLRPC.TL_userRequest_old2(); user.phone = newMsg.media.phone_number; @@ -2964,9 +3163,15 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } else if (retryMessageObject.type == 8 || retryMessageObject.type == 9 || retryMessageObject.type == MessageObject.TYPE_STICKER || retryMessageObject.type == 14 || retryMessageObject.type == MessageObject.TYPE_ANIMATED_STICKER) { document = (TLRPC.TL_document) newMsg.media.document; type = 7; + if (retryMessageObject.messageOwner.message != null) { + caption = retryMessageObject.messageOwner.message; + } } else if (retryMessageObject.type == 2) { document = (TLRPC.TL_document) newMsg.media.document; type = 8; + if (retryMessageObject.messageOwner.message != null) { + caption = retryMessageObject.messageOwner.message; + } } else if (retryMessageObject.type == MessageObject.TYPE_POLL) { poll = (TLRPC.TL_messageMediaPoll) newMsg.media; type = 10; @@ -3359,6 +3564,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } newMsgObj = new MessageObject(currentAccount, newMsg, replyToMsg, true, true); + newMsgObj.sendAnimationData = sendAnimationData; newMsgObj.wasJustSent = true; newMsgObj.scheduled = scheduleDate != 0; if (!newMsgObj.isForwarded() && (newMsgObj.type == 3 || videoEditedInfo != null || newMsgObj.type == 2) && !TextUtils.isEmpty(newMsg.attachPath)) { @@ -3436,11 +3642,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } } else { TLRPC.TL_decryptedMessage reqSend; - if (AndroidUtilities.getPeerLayerVersion(encryptedChat.layer) >= 73) { - reqSend = new TLRPC.TL_decryptedMessage(); - } else { - reqSend = new TLRPC.TL_decryptedMessage_layer45(); - } + reqSend = new TLRPC.TL_decryptedMessage(); reqSend.ttl = newMsg.ttl; if (entities != null && !entities.isEmpty()) { reqSend.entities = entities; @@ -4179,7 +4381,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (size == null || size instanceof TLRPC.TL_photoStrippedSize || size instanceof TLRPC.TL_photoPathSize || size instanceof TLRPC.TL_photoSizeEmpty || size.location == null) { continue; } - TLRPC.TL_photoSize photoSize = new TLRPC.TL_photoSize(); + TLRPC.TL_photoSize photoSize = new TLRPC.TL_photoSize_layer127(); photoSize.type = size.type; photoSize.w = size.w; photoSize.h = size.h; @@ -4490,7 +4692,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe for (int a = 0; a < multiMedia.multi_media.size(); a++) { if (multiMedia.multi_media.get(a).media == inputMedia) { putToSendingMessages(message.messages.get(a), message.scheduled); - getNotificationCenter().postNotificationName(NotificationCenter.FileUploadProgressChanged, key, -1L, -1L, false); + getNotificationCenter().postNotificationName(NotificationCenter.fileUploadProgressChanged, key, -1L, -1L, false); break; } } @@ -4540,7 +4742,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe for (int a = 0; a < multiMedia.files.size(); a++) { if (multiMedia.files.get(a) == inputEncryptedFile) { putToSendingMessages(message.messages.get(a), message.scheduled); - getNotificationCenter().postNotificationName(NotificationCenter.FileUploadProgressChanged, key, -1L, -1L, false); + getNotificationCenter().postNotificationName(NotificationCenter.fileUploadProgressChanged, key, -1L, -1L, false); break; } } @@ -5520,10 +5722,18 @@ public class SendMessagesHelper extends BaseController implements NotificationCe }); } + public ImportingStickers getImportingStickers(String shortName) { + return importingStickersMap.get(shortName); + } + public ImportingHistory getImportingHistory(long dialogId) { return importingHistoryMap.get(dialogId); } + public boolean isImportingStickers() { + return importingStickersMap.size() != 0; + } + public boolean isImportingHistory() { return importingHistoryMap.size() != 0; } @@ -5620,6 +5830,56 @@ public class SendMessagesHelper extends BaseController implements NotificationCe }).start(); } + public void prepareImportStickers(String title, String shortName, String sofrware, ArrayList paths, MessagesStorage.StringCallback onStartImport) { + if (importingStickersMap.get(shortName) != null) { + onStartImport.run(null); + return; + } + new Thread(() -> { + ImportingStickers importingStickers = new ImportingStickers(); + importingStickers.title = title; + importingStickers.shortName = shortName; + importingStickers.software = sofrware; + HashMap files = new HashMap<>(); + for (int a = 0, N = paths.size(); a < N; a++) { + ImportingSticker sticker = paths.get(a); + final File f = new File(sticker.path); + long size; + if (!f.exists() || (size = f.length()) == 0) { + if (a == 0) { + AndroidUtilities.runOnUIThread(() -> { + onStartImport.run(null); + }); + return; + } + continue; + } + importingStickers.totalSize += size; + importingStickers.uploadMedia.add(sticker); + importingStickers.uploadSet.put(sticker.path, sticker); + files.put(sticker.path, importingStickers); + } + AndroidUtilities.runOnUIThread(() -> { + if (importingStickers.uploadMedia.get(0).item != null) { + importingStickers.startImport(); + } else { + importingStickersFiles.putAll(files); + importingStickersMap.put(shortName, importingStickers); + importingStickers.initImport(); + getNotificationCenter().postNotificationName(NotificationCenter.historyImportProgressChanged, shortName); + onStartImport.run(shortName); + } + + Intent intent = new Intent(ApplicationLoader.applicationContext, ImportingService.class); + try { + ApplicationLoader.applicationContext.startService(intent); + } catch (Throwable e) { + FileLog.e(e); + } + }); + }).start(); + } + public TLRPC.TL_photo generatePhotoSizes(String path, Uri imageUri) { return generatePhotoSizes(null, path, imageUri); } @@ -5921,7 +6181,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (editingMessageObject != null) { accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, null, documentFinal, pathFinal, params, false, parentFinal); } else { - accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, pathFinal, dialogId, replyToMsg, replyToTopMsg, captionFinal, entities, null, params, notify, scheduleDate, 0, parentFinal); + accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, pathFinal, dialogId, replyToMsg, replyToTopMsg, captionFinal, entities, null, params, notify, scheduleDate, 0, parentFinal, null); } }); return true; @@ -5958,14 +6218,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe final File f = new File(originalPath); boolean isEncrypted = (int) dialogId == 0; - int enryptedLayer = 0; - if (isEncrypted) { - int high_id = (int) (dialogId >> 32); - TLRPC.EncryptedChat encryptedChat = accountInstance.getMessagesController().getEncryptedChat(high_id); - if (encryptedChat != null) { - enryptedLayer = AndroidUtilities.getPeerLayerVersion(encryptedChat.layer); - } - } if (!isEncrypted && count > 1 && mediaCount % 10 == 0) { groupId = Utilities.random.nextLong(); mediaCount = 0; @@ -6016,7 +6268,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (editingMessageObject != null) { accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, null, documentFinal, messageObject.messageOwner.attachPath, params, false, parentFinal); } else { - accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, messageObject.messageOwner.attachPath, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, null, params, notify, scheduleDate, 0, parentFinal); + accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, messageObject.messageOwner.attachPath, dialogId, replyToMsg, replyToTopMsg, captionFinal, null, null, params, notify, scheduleDate, 0, parentFinal, null); } }); } @@ -6055,14 +6307,6 @@ public class SendMessagesHelper extends BaseController implements NotificationCe Integer[] docType = new Integer[1]; boolean isEncrypted = (int) dialogId == 0; - int enryptedLayer = 0; - if (isEncrypted) { - int high_id = (int) (dialogId >> 32); - TLRPC.EncryptedChat encryptedChat = accountInstance.getMessagesController().getEncryptedChat(high_id); - if (encryptedChat != null) { - enryptedLayer = AndroidUtilities.getPeerLayerVersion(encryptedChat.layer); - } - } if (paths != null) { int count = paths.size(); @@ -6125,11 +6369,11 @@ public class SendMessagesHelper extends BaseController implements NotificationCe @UiThread public static void prepareSendingPhoto(AccountInstance accountInstance, String imageFilePath, Uri imageUri, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, CharSequence caption, ArrayList entities, ArrayList stickers, InputContentInfoCompat inputContent, int ttl, MessageObject editingMessageObject, boolean notify, int scheduleDate) { - prepareSendingPhoto(accountInstance, imageFilePath, null, imageUri, dialogId, replyToMsg, replyToTopMsg, caption, entities, stickers, inputContent, ttl, editingMessageObject, null, notify, scheduleDate); + prepareSendingPhoto(accountInstance, imageFilePath, null, imageUri, dialogId, replyToMsg, replyToTopMsg, caption, entities, stickers, inputContent, ttl, editingMessageObject, null, notify, scheduleDate, false); } @UiThread - public static void prepareSendingPhoto(AccountInstance accountInstance, String imageFilePath, String thumbFilePath, Uri imageUri, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, CharSequence caption, ArrayList entities, ArrayList stickers, InputContentInfoCompat inputContent, int ttl, MessageObject editingMessageObject, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public static void prepareSendingPhoto(AccountInstance accountInstance, String imageFilePath, String thumbFilePath, Uri imageUri, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, CharSequence caption, ArrayList entities, ArrayList stickers, InputContentInfoCompat inputContent, int ttl, MessageObject editingMessageObject, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { SendingMediaInfo info = new SendingMediaInfo(); info.path = imageFilePath; info.thumbPath = thumbFilePath; @@ -6145,7 +6389,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe info.videoEditedInfo = videoEditedInfo; ArrayList infos = new ArrayList<>(); infos.add(info); - prepareSendingMedia(accountInstance, infos, dialogId, replyToMsg, replyToTopMsg, inputContent, false, false, editingMessageObject, notify, scheduleDate); + prepareSendingMedia(accountInstance, infos, dialogId, replyToMsg, replyToTopMsg, inputContent, forceDocument, false, editingMessageObject, notify, scheduleDate); } @UiThread @@ -6410,7 +6654,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (precahcedThumb[0] != null && precachedKey[0] != null) { ImageLoader.getInstance().putImageToCache(new BitmapDrawable(precahcedThumb[0]), precachedKey[0]); } - accountInstance.getSendMessagesHelper().sendMessage(finalDocument, null, finalPathFinal, dialogId, replyToMsg, replyToTopMsg, result.send_message.message, result.send_message.entities, result.send_message.reply_markup, params, notify, scheduleDate, 0, result); + accountInstance.getSendMessagesHelper().sendMessage(finalDocument, null, finalPathFinal, dialogId, replyToMsg, replyToTopMsg, result.send_message.message, result.send_message.entities, result.send_message.reply_markup, params, notify, scheduleDate, 0, result, null); } else if (finalPhoto != null) { accountInstance.getSendMessagesHelper().sendMessage(finalPhoto, result.content != null ? result.content.url : null, dialogId, replyToMsg, replyToTopMsg, result.send_message.message, result.send_message.entities, result.send_message.reply_markup, params, notify, scheduleDate, 0, result); } else if (finalGame != null) { @@ -6430,7 +6674,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } } } - accountInstance.getSendMessagesHelper().sendMessage(result.send_message.message, dialogId, replyToMsg, replyToTopMsg, webPage, !result.send_message.no_webpage, result.send_message.entities, result.send_message.reply_markup, params, notify, scheduleDate); + accountInstance.getSendMessagesHelper().sendMessage(result.send_message.message, dialogId, replyToMsg, replyToTopMsg, webPage, !result.send_message.no_webpage, result.send_message.entities, result.send_message.reply_markup, params, notify, scheduleDate, null); } else if (result.send_message instanceof TLRPC.TL_botInlineMessageMediaVenue) { TLRPC.TL_messageMediaVenue venue = new TLRPC.TL_messageMediaVenue(); venue.geo = result.send_message.geo; @@ -6511,7 +6755,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe int count = (int) Math.ceil(textFinal.length() / 4096.0f); for (int a = 0; a < count; a++) { String mess = textFinal.substring(a * 4096, Math.min((a + 1) * 4096, textFinal.length())); - accountInstance.getSendMessagesHelper().sendMessage(mess, dialogId, null, null, null, true, null, null, null, notify, scheduleDate); + accountInstance.getSendMessagesHelper().sendMessage(mess, dialogId, null, null, null, true, null, null, null, notify, scheduleDate, null); } } }))); @@ -6638,15 +6882,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe HashMap workers; int count = media.size(); boolean isEncrypted = (int) dialogId == 0; - int enryptedLayer = 0; - if (isEncrypted) { - int high_id = (int) (dialogId >> 32); - TLRPC.EncryptedChat encryptedChat = accountInstance.getMessagesController().getEncryptedChat(high_id); - if (encryptedChat != null) { - enryptedLayer = AndroidUtilities.getPeerLayerVersion(encryptedChat.layer); - } - } - if ((!isEncrypted || enryptedLayer >= 73) && !forceDocument && groupMediaFinal) { + if (!forceDocument && groupMediaFinal) { workers = new HashMap<>(); for (int a = 0; a < count; a++) { final SendingMediaInfo info = media.get(a); @@ -6725,7 +6961,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe int mediaCount = 0; for (int a = 0; a < count; a++) { final SendingMediaInfo info = media.get(a); - if (groupMediaFinal && (!isEncrypted || enryptedLayer >= 73) && count > 1 && mediaCount % 10 == 0) { + if (groupMediaFinal && count > 1 && mediaCount % 10 == 0) { lastGroupId = groupId = Utilities.random.nextLong(); mediaCount = 0; } @@ -6824,7 +7060,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (editingMessageObject != null) { accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, null, documentFinal, pathFinal, params, false, parentFinal); } else { - accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, pathFinal, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, 0, parentFinal); + accountInstance.getSendMessagesHelper().sendMessage(documentFinal, null, pathFinal, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, 0, parentFinal, null); } }); } else { @@ -6973,11 +7209,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe accountInstance.getUserConfig().saveConfig(false); TLRPC.TL_documentAttributeVideo attributeVideo; if (isEncrypted) { - if (enryptedLayer >= 66) { - attributeVideo = new TLRPC.TL_documentAttributeVideo(); - } else { - attributeVideo = new TLRPC.TL_documentAttributeVideo_layer65(); - } + attributeVideo = new TLRPC.TL_documentAttributeVideo(); } else { attributeVideo = new TLRPC.TL_documentAttributeVideo(); attributeVideo.supports_streaming = true; @@ -7071,7 +7303,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (editingMessageObject != null) { accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, videoEditedInfo, videoFinal, finalPath, params, false, parentFinal); } else { - accountInstance.getSendMessagesHelper().sendMessage(videoFinal, videoEditedInfo, finalPath, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, info.ttl, parentFinal); + accountInstance.getSendMessagesHelper().sendMessage(videoFinal, videoEditedInfo, finalPath, dialogId, replyToMsg, replyToTopMsg, info.caption, info.entities, null, params, notify, scheduleDate, info.ttl, parentFinal, null); } }); } else { @@ -7564,7 +7796,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe } @UiThread - public static void prepareSendingVideo(AccountInstance accountInstance, String videoPath, VideoEditedInfo info, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, CharSequence caption, ArrayList entities, int ttl, MessageObject editingMessageObject, boolean notify, int scheduleDate) { + public static void prepareSendingVideo(AccountInstance accountInstance, String videoPath, VideoEditedInfo info, long dialogId, MessageObject replyToMsg, MessageObject replyToTopMsg, CharSequence caption, ArrayList entities, int ttl, MessageObject editingMessageObject, boolean notify, int scheduleDate, boolean forceDocument) { if (videoPath == null || videoPath.length() == 0) { return; } @@ -7641,11 +7873,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (encryptedChat == null) { return; } - if (AndroidUtilities.getPeerLayerVersion(encryptedChat.layer) >= 66) { - attributeVideo = new TLRPC.TL_documentAttributeVideo(); - } else { - attributeVideo = new TLRPC.TL_documentAttributeVideo_layer65(); - } + attributeVideo = new TLRPC.TL_documentAttributeVideo(); } else { attributeVideo = new TLRPC.TL_documentAttributeVideo(); attributeVideo.supports_streaming = true; @@ -7715,11 +7943,11 @@ public class SendMessagesHelper extends BaseController implements NotificationCe if (editingMessageObject != null) { accountInstance.getSendMessagesHelper().editMessage(editingMessageObject, null, videoEditedInfo, videoFinal, finalPath, params, false, parentFinal); } else { - accountInstance.getSendMessagesHelper().sendMessage(videoFinal, videoEditedInfo, finalPath, dialogId, replyToMsg, replyToTopMsg, captionFinal, entities, null, params, notify, scheduleDate, ttl, parentFinal); + accountInstance.getSendMessagesHelper().sendMessage(videoFinal, videoEditedInfo, finalPath, dialogId, replyToMsg, replyToTopMsg, captionFinal, entities, null, params, notify, scheduleDate, ttl, parentFinal, null); } }); } else { - prepareSendingDocumentInternal(accountInstance, videoPath, videoPath, null, null, dialogId, replyToMsg, replyToTopMsg, caption, entities, editingMessageObject, null, false, false, notify, scheduleDate, null); + prepareSendingDocumentInternal(accountInstance, videoPath, videoPath, null, null, dialogId, replyToMsg, replyToTopMsg, caption, entities, editingMessageObject, null, false, forceDocument, notify, scheduleDate, null); } }).start(); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java b/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java index d4fa6b1d9..4aaa7ceaa 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SharedConfig.java @@ -12,6 +12,7 @@ import android.app.Activity; import android.app.ActivityManager; import android.content.Context; import android.content.SharedPreferences; +import android.content.pm.PackageInfo; import android.os.Build; import android.os.Environment; import android.os.SystemClock; @@ -22,6 +23,7 @@ import android.util.SparseArray; import org.json.JSONObject; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.SerializedData; +import org.telegram.tgnet.TLRPC; import org.telegram.ui.SwipeGestureSettingsView; import java.io.File; @@ -32,8 +34,6 @@ import java.util.Iterator; import androidx.core.content.pm.ShortcutManagerCompat; -import com.google.android.exoplayer2.util.Log; - public class SharedConfig { public static String pushString = ""; @@ -61,6 +61,7 @@ public class SharedConfig { public static boolean loopStickers; public static int keepMedia = 2; public static int lastKeepMediaCheckTime; + public static int lastLogsCheckTime; public static int searchMessagesAsListHintShows; public static int textSelectionHintShows; public static int scheduledOrNoSoundHintShows; @@ -68,7 +69,6 @@ public class SharedConfig { public static boolean searchMessagesAsListUsed; public static boolean stickersReorderingHintUsed; public static boolean disableVoiceAudioEffects; - public static boolean useMediaStream; private static int lastLocalId = -210000; public static String storageCacheDir; @@ -98,6 +98,7 @@ public class SharedConfig { public static boolean saveStreamMedia = true; public static boolean smoothKeyboard = true; public static boolean pauseMusicOnRecord = true; + public static boolean noiseSupression; public static boolean noStatusBar; public static boolean sortContactsByName; public static boolean sortFilesByName; @@ -111,6 +112,11 @@ public class SharedConfig { public static int fontSize = 16; public static int bubbleRadius = 10; public static int ivFontSize = 16; + + public static TLRPC.TL_help_appUpdate pendingAppUpdate; + public static int pendingAppUpdateBuildVersion; + public static long lastUpdateCheckTime; + private static int devicePerformanceClass; public static boolean drawDialogIcons; @@ -192,8 +198,24 @@ public class SharedConfig { editor.putInt("textSelectionHintShows", textSelectionHintShows); editor.putInt("scheduledOrNoSoundHintShows", scheduledOrNoSoundHintShows); editor.putInt("lockRecordAudioVideoHint", lockRecordAudioVideoHint); - editor.putBoolean("disableVoiceAudioEffects", disableVoiceAudioEffects); editor.putString("storageCacheDir", !TextUtils.isEmpty(storageCacheDir) ? storageCacheDir : ""); + + if (pendingAppUpdate != null) { + try { + SerializedData data = new SerializedData(pendingAppUpdate.getObjectSize()); + pendingAppUpdate.serializeToStream(data); + String str = Base64.encodeToString(data.toByteArray(), Base64.DEFAULT); + editor.putString("appUpdate", str); + editor.putInt("appUpdateBuild", pendingAppUpdateBuildVersion); + data.cleanup(); + } catch (Exception ignore) { + + } + } else { + editor.remove("appUpdate"); + } + editor.putLong("appUpdateCheckTime", lastUpdateCheckTime); + editor.commit(); } catch (Exception e) { FileLog.e(e); @@ -248,6 +270,36 @@ public class SharedConfig { } else { passcodeSalt = new byte[0]; } + lastUpdateCheckTime = preferences.getLong("appUpdateCheckTime", System.currentTimeMillis()); + try { + String update = preferences.getString("appUpdate", null); + if (update != null) { + pendingAppUpdateBuildVersion = preferences.getInt("appUpdateBuild", BuildVars.BUILD_VERSION); + byte[] arr = Base64.decode(update, Base64.DEFAULT); + if (arr != null) { + SerializedData data = new SerializedData(arr); + pendingAppUpdate = (TLRPC.TL_help_appUpdate) TLRPC.help_AppUpdate.TLdeserialize(data, data.readInt32(false), false); + data.cleanup(); + } + } + if (pendingAppUpdate != null) { + long updateTime = 0; + int updateVerstion; + try { + PackageInfo packageInfo = ApplicationLoader.applicationContext.getPackageManager().getPackageInfo(ApplicationLoader.applicationContext.getPackageName(), 0); + updateVerstion = packageInfo.versionCode; + } catch (Exception e) { + FileLog.e(e); + updateVerstion = BuildVars.BUILD_VERSION; + } + if (pendingAppUpdateBuildVersion != updateVerstion) { + pendingAppUpdate = null; + AndroidUtilities.runOnUIThread(SharedConfig::saveConfig); + } + } + } catch (Exception e) { + FileLog.e(e); + } preferences = ApplicationLoader.applicationContext.getSharedPreferences("mainconfig", Activity.MODE_PRIVATE); saveToGallery = preferences.getBoolean("save_gallery", false); @@ -287,6 +339,7 @@ public class SharedConfig { keepMedia = preferences.getInt("keep_media", 2); noStatusBar = preferences.getBoolean("noStatusBar", false); lastKeepMediaCheckTime = preferences.getInt("lastKeepMediaCheckTime", 0); + lastLogsCheckTime = preferences.getInt("lastLogsCheckTime", 0); searchMessagesAsListHintShows = preferences.getInt("searchMessagesAsListHintShows", 0); searchMessagesAsListUsed = preferences.getBoolean("searchMessagesAsListUsed", false); stickersReorderingHintUsed = preferences.getBoolean("stickersReorderingHintUsed", false); @@ -294,8 +347,8 @@ public class SharedConfig { scheduledOrNoSoundHintShows = preferences.getInt("scheduledOrNoSoundHintShows", 0); lockRecordAudioVideoHint = preferences.getInt("lockRecordAudioVideoHint", 0); disableVoiceAudioEffects = preferences.getBoolean("disableVoiceAudioEffects", false); + noiseSupression = preferences.getBoolean("noiseSupression", false); chatSwipeAction = preferences.getInt("ChatSwipeAction", -1); - useMediaStream = preferences.getBoolean("useMediaStream", false); preferences = ApplicationLoader.applicationContext.getSharedPreferences("Notifications", Activity.MODE_PRIVATE); showNotificationsForAllAccounts = preferences.getBoolean("AllAccounts", true); @@ -304,9 +357,9 @@ public class SharedConfig { } public static void increaseBadPasscodeTries() { - SharedConfig.badPasscodeTries++; + badPasscodeTries++; if (badPasscodeTries >= 3) { - switch (SharedConfig.badPasscodeTries) { + switch (badPasscodeTries) { case 3: passcodeRetryInMs = 5000; break; @@ -326,7 +379,7 @@ public class SharedConfig { passcodeRetryInMs = 30000; break; } - SharedConfig.lastUptimeMillis = SystemClock.elapsedRealtime(); + lastUptimeMillis = SystemClock.elapsedRealtime(); } saveConfig(); } @@ -360,6 +413,33 @@ public class SharedConfig { return passportConfigMap; } + public static boolean isAppUpdateAvailable() { + if (pendingAppUpdate == null || pendingAppUpdate.document == null || !AndroidUtilities.isStandaloneApp()) { + return false; + } + int currentVersion; + try { + PackageInfo pInfo = ApplicationLoader.applicationContext.getPackageManager().getPackageInfo(ApplicationLoader.applicationContext.getPackageName(), 0); + currentVersion = pInfo.versionCode; + } catch (Exception e) { + FileLog.e(e); + currentVersion = BuildVars.BUILD_VERSION; + } + return pendingAppUpdateBuildVersion == currentVersion; + } + + public static void setNewAppVersionAvailable(TLRPC.TL_help_appUpdate update) { + pendingAppUpdate = update; + try { + PackageInfo packageInfo = ApplicationLoader.applicationContext.getPackageManager().getPackageInfo(ApplicationLoader.applicationContext.getPackageName(), 0); + pendingAppUpdateBuildVersion = packageInfo.versionCode; + } catch (Exception e) { + FileLog.e(e); + pendingAppUpdateBuildVersion = BuildVars.BUILD_VERSION; + } + saveConfig(); + } + public static boolean checkPasscode(String passcode) { if (passcodeSalt.length == 0) { boolean result = Utilities.MD5(passcode).equals(passcodeHash); @@ -424,16 +504,16 @@ public class SharedConfig { editor.commit(); } - public static void setSearchMessagesAsListUsed(boolean searchMessagesAsListUsed) { - SharedConfig.searchMessagesAsListUsed = searchMessagesAsListUsed; + public static void setSearchMessagesAsListUsed(boolean value) { + searchMessagesAsListUsed = value; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); editor.putBoolean("searchMessagesAsListUsed", searchMessagesAsListUsed); editor.commit(); } - public static void setStickersReorderingHintUsed(boolean stickersReorderingHintUsed) { - SharedConfig.stickersReorderingHintUsed = stickersReorderingHintUsed; + public static void setStickersReorderingHintUsed(boolean value) { + stickersReorderingHintUsed = value; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); SharedPreferences.Editor editor = preferences.edit(); editor.putBoolean("stickersReorderingHintUsed", stickersReorderingHintUsed); @@ -497,6 +577,31 @@ public class SharedConfig { editor.commit(); } + public static void checkLogsToDelete() { + if (!BuildVars.LOGS_ENABLED) { + return; + } + int time = (int) (System.currentTimeMillis() / 1000); + if (Math.abs(time - lastLogsCheckTime) < 60 * 60) { + return; + } + lastLogsCheckTime = time; + Utilities.cacheClearQueue.postRunnable(() -> { + long currentTime = time - 60 * 60 * 24 * 10; + try { + File sdCard = ApplicationLoader.applicationContext.getExternalFilesDir(null); + File dir = new File(sdCard.getAbsolutePath() + "/logs"); + Utilities.clearDir(dir.getAbsolutePath(), 0, currentTime, false); + } catch (Throwable e) { + FileLog.e(e); + } + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + SharedPreferences.Editor editor = preferences.edit(); + editor.putInt("lastLogsCheckTime", lastLogsCheckTime); + editor.commit(); + }); + } + public static void checkKeepMedia() { int time = (int) (System.currentTimeMillis() / 1000); if (Math.abs(time - lastKeepMediaCheckTime) < 60 * 60) { @@ -504,7 +609,7 @@ public class SharedConfig { } lastKeepMediaCheckTime = time; File cacheDir = FileLoader.checkDirectory(FileLoader.MEDIA_DIR_CACHE); - Utilities.globalQueue.postRunnable(() -> { + Utilities.cacheClearQueue.postRunnable(() -> { if (keepMedia != 2) { int days; if (keepMedia == 0) { @@ -551,6 +656,14 @@ public class SharedConfig { editor.commit(); } + public static void toggleNoiseSupression() { + noiseSupression = !noiseSupression; + SharedPreferences preferences = MessagesController.getGlobalMainSettings(); + SharedPreferences.Editor editor = preferences.edit(); + editor.putBoolean("noiseSupression", noiseSupression); + editor.commit(); + } + public static void toggleNoStatusBar() { noStatusBar = !noStatusBar; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); @@ -559,14 +672,6 @@ public class SharedConfig { editor.commit(); } - public static void toggleUseMediaStream() { - useMediaStream = !useMediaStream; - SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - SharedPreferences.Editor editor = preferences.edit(); - editor.putBoolean("useMediaStream", useMediaStream); - editor.commit(); - } - public static void toggleLoopStickers() { loopStickers = !loopStickers; SharedPreferences preferences = MessagesController.getGlobalMainSettings(); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java index 3cf989a84..d3f4718a0 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SvgHelper.java @@ -49,6 +49,7 @@ import org.xml.sax.helpers.DefaultHandler; import java.io.File; import java.io.FileInputStream; +import java.io.InputStream; import java.io.StringReader; import java.lang.ref.WeakReference; import java.util.ArrayList; @@ -99,10 +100,10 @@ public class SvgHelper { public static class SvgDrawable extends Drawable { - private ArrayList commands = new ArrayList<>(); - private HashMap paints = new HashMap<>(); - private int width; - private int height; + protected ArrayList commands = new ArrayList<>(); + protected HashMap paints = new HashMap<>(); + protected int width; + protected int height; private static int[] parentPosition = new int[2]; private Shader backgroundGradient; @@ -270,12 +271,27 @@ public class SvgHelper { } } + public static Bitmap getBitmap(int res, int width, int height, int color) { + try (InputStream stream = ApplicationLoader.applicationContext.getResources().openRawResource(res)) { + SAXParserFactory spf = SAXParserFactory.newInstance(); + SAXParser sp = spf.newSAXParser(); + XMLReader xr = sp.getXMLReader(); + SVGHandler handler = new SVGHandler(width, height, color, false); + xr.setContentHandler(handler); + xr.parse(new InputSource(stream)); + return handler.getBitmap(); + } catch (Exception e) { + FileLog.e(e); + return null; + } + } + public static Bitmap getBitmap(File file, int width, int height, boolean white) { try (FileInputStream stream = new FileInputStream(file)) { SAXParserFactory spf = SAXParserFactory.newInstance(); SAXParser sp = spf.newSAXParser(); XMLReader xr = sp.getXMLReader(); - SVGHandler handler = new SVGHandler(width, height, white, false); + SVGHandler handler = new SVGHandler(width, height, white ? 0xffffffff : null, false); xr.setContentHandler(handler); xr.parse(new InputSource(stream)); return handler.getBitmap(); @@ -290,7 +306,7 @@ public class SvgHelper { SAXParserFactory spf = SAXParserFactory.newInstance(); SAXParser sp = spf.newSAXParser(); XMLReader xr = sp.getXMLReader(); - SVGHandler handler = new SVGHandler(width, height, white, false); + SVGHandler handler = new SVGHandler(width, height, white ? 0xffffffff : null, false); xr.setContentHandler(handler); xr.parse(new InputSource(new StringReader(xml))); return handler.getBitmap(); @@ -305,7 +321,7 @@ public class SvgHelper { SAXParserFactory spf = SAXParserFactory.newInstance(); SAXParser sp = spf.newSAXParser(); XMLReader xr = sp.getXMLReader(); - SVGHandler handler = new SVGHandler(0, 0, false, true); + SVGHandler handler = new SVGHandler(0, 0, null, true); xr.setContentHandler(handler); xr.parse(new InputSource(new StringReader(xml))); return handler.getDrawable(); @@ -315,6 +331,21 @@ public class SvgHelper { } } + public static SvgDrawable getDrawable(int resId, int color) { + try { + SAXParserFactory spf = SAXParserFactory.newInstance(); + SAXParser sp = spf.newSAXParser(); + XMLReader xr = sp.getXMLReader(); + SVGHandler handler = new SVGHandler(0, 0, color, true); + xr.setContentHandler(handler); + xr.parse(new InputSource(ApplicationLoader.applicationContext.getResources().openRawResource(resId))); + return handler.getDrawable(); + } catch (Exception e) { + FileLog.e(e); + return null; + } + } + public static SvgDrawable getDrawableByPath(String pathString, int w, int h) { try { Path path = doPath(pathString); @@ -901,16 +932,16 @@ public class SvgHelper { private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); private RectF rect = new RectF(); private RectF rectTmp = new RectF(); - private boolean whiteOnly; + private Integer paintColor; boolean pushed = false; private HashMap globalStyles = new HashMap<>(); - private SVGHandler(int dw, int dh, boolean white, boolean asDrawable) { + private SVGHandler(int dw, int dh, Integer color, boolean asDrawable) { desiredWidth = dw; desiredHeight = dh; - whiteOnly = white; + paintColor = color; if (asDrawable) { drawable = new SvgDrawable(); } @@ -942,8 +973,8 @@ public class SvgHelper { return true; } else if (atts.getString("fill") == null && atts.getString("stroke") == null) { paint.setStyle(Paint.Style.FILL); - if (whiteOnly) { - paint.setColor(0xffffffff); + if (paintColor != null) { + paint.setColor(paintColor); } else { paint.setColor(0xff000000); } @@ -988,8 +1019,8 @@ public class SvgHelper { } private void doColor(Properties atts, Integer color, boolean fillMode) { - if (whiteOnly) { - paint.setColor(0xffffffff); + if (paintColor != null) { + paint.setColor(paintColor); } else { int c = (0xFFFFFF & color) | 0xFF000000; paint.setColor(c); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java b/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java index 69f4aa902..c1afd5e41 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/UserConfig.java @@ -10,7 +10,6 @@ package org.telegram.messenger; import android.content.Context; import android.content.SharedPreferences; -import android.content.pm.PackageInfo; import android.os.SystemClock; import android.util.Base64; @@ -60,10 +59,6 @@ public class UserConfig extends BaseController { public boolean hasSecureData; public int loginTime; public TLRPC.TL_help_termsOfService unacceptedTermsOfService; - public TLRPC.TL_help_appUpdate pendingAppUpdate; - public int pendingAppUpdateBuildVersion; - public long pendingAppUpdateInstallTime; - public long lastUpdateCheckTime; public long autoDownloadConfigLoadTime; public volatile byte[] savedPasswordHash; @@ -188,25 +183,6 @@ public class UserConfig extends BaseController { editor.remove("terms"); } - if (currentAccount == 0) { - if (pendingAppUpdate != null) { - try { - SerializedData data = new SerializedData(pendingAppUpdate.getObjectSize()); - pendingAppUpdate.serializeToStream(data); - String str = Base64.encodeToString(data.toByteArray(), Base64.DEFAULT); - editor.putString("appUpdate", str); - editor.putInt("appUpdateBuild", pendingAppUpdateBuildVersion); - editor.putLong("appUpdateTime", pendingAppUpdateInstallTime); - editor.putLong("appUpdateCheckTime", lastUpdateCheckTime); - data.cleanup(); - } catch (Exception ignore) { - - } - } else { - editor.remove("appUpdate"); - } - } - SharedConfig.saveConfig(); if (tmpPassword != null) { @@ -338,38 +314,6 @@ public class UserConfig extends BaseController { FileLog.e(e); } - if (currentAccount == 0) { - lastUpdateCheckTime = preferences.getLong("appUpdateCheckTime", System.currentTimeMillis()); - try { - String update = preferences.getString("appUpdate", null); - if (update != null) { - pendingAppUpdateBuildVersion = preferences.getInt("appUpdateBuild", BuildVars.BUILD_VERSION); - pendingAppUpdateInstallTime = preferences.getLong("appUpdateTime", System.currentTimeMillis()); - byte[] arr = Base64.decode(update, Base64.DEFAULT); - if (arr != null) { - SerializedData data = new SerializedData(arr); - pendingAppUpdate = (TLRPC.TL_help_appUpdate) TLRPC.help_AppUpdate.TLdeserialize(data, data.readInt32(false), false); - data.cleanup(); - } - } - if (pendingAppUpdate != null) { - long updateTime = 0; - try { - PackageInfo packageInfo = ApplicationLoader.applicationContext.getPackageManager().getPackageInfo(ApplicationLoader.applicationContext.getPackageName(), 0); - updateTime = Math.max(packageInfo.lastUpdateTime, packageInfo.firstInstallTime); - } catch (Exception e) { - FileLog.e(e); - } - if (pendingAppUpdateBuildVersion != BuildVars.BUILD_VERSION || pendingAppUpdateInstallTime < updateTime) { - pendingAppUpdate = null; - AndroidUtilities.runOnUIThread(() -> saveConfig(false)); - } - } - } catch (Exception e) { - FileLog.e(e); - } - } - migrateOffsetId = preferences.getInt("6migrateOffsetId", 0); if (migrateOffsetId != -1) { migrateOffsetDate = preferences.getInt("6migrateOffsetDate", 0); @@ -484,7 +428,6 @@ public class UserConfig extends BaseController { hasValidDialogLoadIds = true; unacceptedTermsOfService = null; filtersLoaded = false; - pendingAppUpdate = null; hasSecureData = false; loginTime = (int) (System.currentTimeMillis() / 1000); lastContactsSyncTime = (int) (System.currentTimeMillis() / 1000) - 23 * 60 * 60; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java b/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java index 6835c9557..2fdc1d567 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/UserObject.java @@ -59,4 +59,12 @@ public class UserObject { } return !TextUtils.isEmpty(name) ? name : LocaleController.getString("HiddenName", R.string.HiddenName); } + + public static boolean hasPhoto(TLRPC.User user) { + return user != null && user.photo != null && !(user.photo instanceof TLRPC.TL_userProfilePhotoEmpty); + } + + public static TLRPC.UserProfilePhoto getPhoto(TLRPC.User user) { + return hasPhoto(user) ? user.photo : null; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java index 361a30fdf..062e440ab 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/Utilities.java @@ -30,6 +30,7 @@ public class Utilities { public static volatile DispatchQueue stageQueue = new DispatchQueue("stageQueue"); public static volatile DispatchQueue globalQueue = new DispatchQueue("globalQueue"); + public static volatile DispatchQueue cacheClearQueue = new DispatchQueue("cacheClearQueue"); public static volatile DispatchQueue searchQueue = new DispatchQueue("searchQueue"); public static volatile DispatchQueue phoneBookQueue = new DispatchQueue("phoneBookQueue"); @@ -69,6 +70,7 @@ public class Utilities { public static native void stackBlurBitmap(Bitmap bitmap, int radius); public static native void drawDitheredGradient(Bitmap bitmap, int[] colors, int startX, int startY, int endX, int endY); public static native int saveProgressiveJpeg(Bitmap bitmap, int width, int height, int stride, int quality, String path); + public static native void generateGradient(Bitmap bitmap, boolean unpin, int phase, float progress, int width, int height, int stride, int[] colors); public static Bitmap blurWallpaper(Bitmap src) { if (src == null) { @@ -369,6 +371,14 @@ public class Utilities { return (((int) bytes[3] & 0xFF) << 24) + (((int) bytes[2] & 0xFF) << 16) + (((int) bytes[1] & 0xFF) << 8) + ((int) bytes[0] & 0xFF); } + public static byte[] intToBytes(int value) { + return new byte[]{ + (byte) (value >>> 24), + (byte) (value >>> 16), + (byte) (value >>> 8), + (byte) value}; + } + public static String MD5(String md5) { if (md5 == null) { return null; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/VideoEncodingService.java b/TMessagesProj/src/main/java/org/telegram/messenger/VideoEncodingService.java index 75afc0d2a..361e8e3fb 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/VideoEncodingService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/VideoEncodingService.java @@ -39,7 +39,7 @@ public class VideoEncodingService extends Service implements NotificationCenter. } NotificationManagerCompat.from(ApplicationLoader.applicationContext).cancel(4); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.stopEncodingService); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileUploadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileUploadProgressChanged); if (BuildVars.LOGS_ENABLED) { FileLog.d("destroy video service"); } @@ -47,7 +47,7 @@ public class VideoEncodingService extends Service implements NotificationCenter. @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.FileUploadProgressChanged) { + if (id == NotificationCenter.fileUploadProgressChanged) { String fileName = (String) args[0]; if (account == currentAccount && path != null && path.equals(fileName)) { Long loadedSize = (Long) args[1]; @@ -80,8 +80,8 @@ public class VideoEncodingService extends Service implements NotificationCenter. return Service.START_NOT_STICKY; } if (oldAccount != currentAccount) { - NotificationCenter.getInstance(oldAccount).removeObserver(this, NotificationCenter.FileUploadProgressChanged); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileUploadProgressChanged); + NotificationCenter.getInstance(oldAccount).removeObserver(this, NotificationCenter.fileUploadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileUploadProgressChanged); } boolean isGif = intent.getBooleanExtra("gif", false); if (path == null) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/WearDataLayerListenerService.java b/TMessagesProj/src/main/java/org/telegram/messenger/WearDataLayerListenerService.java index 6f9adebb6..a53f92718 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/WearDataLayerListenerService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/WearDataLayerListenerService.java @@ -79,7 +79,7 @@ public class WearDataLayerListenerService extends WearableListenerService { final CyclicBarrier barrier = new CyclicBarrier(2); if (!photo.exists()) { final NotificationCenter.NotificationCenterDelegate listener = (id, account, args) -> { - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { if (BuildVars.LOGS_ENABLED) { FileLog.d("file loaded: " + args[0] + " " + args[0].getClass().getName()); } @@ -95,14 +95,14 @@ public class WearDataLayerListenerService extends WearableListenerService { } }; AndroidUtilities.runOnUIThread(() -> { - NotificationCenter.getInstance(currentAccount).addObserver(listener, NotificationCenter.fileDidLoad); + NotificationCenter.getInstance(currentAccount).addObserver(listener, NotificationCenter.fileLoaded); FileLoader.getInstance(currentAccount).loadFile(ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_SMALL), user, null, 1, 1); }); try { barrier.await(10, TimeUnit.SECONDS); } catch (Exception ignore) { } - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).removeObserver(listener, NotificationCenter.fileDidLoad)); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getInstance(currentAccount).removeObserver(listener, NotificationCenter.fileLoaded)); } if (photo.exists() && photo.length() <= 50 * 1024 * 1024) { byte[] photoData = new byte[(int) photo.length()]; @@ -249,7 +249,7 @@ public class WearDataLayerListenerService extends WearableListenerService { if (dialog_id == 0 || max_id == 0 || currentAccount == -1) { return; } - SendMessagesHelper.getInstance(currentAccount).sendMessage(text.toString(), dialog_id, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(text.toString(), dialog_id, null, null, null, true, null, null, null, true, 0, null); MessagesController.getInstance(currentAccount).markDialogAsRead(dialog_id, max_id, max_id, 0, false, 0, 0, true, 0); } catch (Exception x) { if (BuildVars.LOGS_ENABLED) diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/WearReplyReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/WearReplyReceiver.java index 7677b95c8..70e7e522a 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/WearReplyReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/WearReplyReceiver.java @@ -69,7 +69,7 @@ public class WearReplyReceiver extends BroadcastReceiver { } private void sendMessage(AccountInstance accountInstance, CharSequence text, long dialog_id, int max_id) { - accountInstance.getSendMessagesHelper().sendMessage(text.toString(), dialog_id, null, null, null, true, null, null, null, true, 0); + accountInstance.getSendMessagesHelper().sendMessage(text.toString(), dialog_id, null, null, null, true, null, null, null, true, 0, null); accountInstance.getMessagesController().markDialogAsRead(dialog_id, max_id, max_id, 0, false, 0, 0, true, 0); } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java b/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java index f6f39d670..6b4d40b42 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java @@ -246,15 +246,15 @@ public class Browser { String url = uri.toString(); int idx = url.indexOf("://"); String path = idx >= 0 ? url.substring(idx + 3) : url; - String[] args = path.split("#"); - String finalPath = args[0]; + String fragment = uri.getEncodedFragment(); + String finalPath = fragment == null ? path : path.substring(0, path.indexOf("#" + fragment)); if (finalPath.indexOf('?') >= 0) { finalPath += "&" + token; } else { finalPath += "?" + token; } - if (args.length > 1) { - finalPath += args[1]; + if (fragment != null) { + finalPath += "#" + fragment; } uri = Uri.parse("https://" + finalPath); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java index 0e3152318..419077ca7 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java @@ -65,6 +65,7 @@ public class CameraController implements MediaRecorder.OnInfoListener { private boolean loadingCameras; private ArrayList onFinishCameraInitRunnables = new ArrayList<>(); + CameraView recordingCurrentCameraView; private static volatile CameraController Instance = null; @@ -255,10 +256,6 @@ public class CameraController implements MediaRecorder.OnInfoListener { return cameraInitied && cameraInfos != null && !cameraInfos.isEmpty(); } - public void runOnThreadPool(Runnable runnable) { - threadPool.execute(runnable); - } - public void close(final CameraSession session, final CountDownLatch countDownLatch, final Runnable beforeDestroyRunnable) { session.destroy(); threadPool.execute(() -> { @@ -406,12 +403,12 @@ public class CameraController implements MediaRecorder.OnInfoListener { BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeByteArray(data, 0, data.length, options); - float scaleFactor = Math.max((float) options.outWidth / AndroidUtilities.getPhotoSize(), (float) options.outHeight / AndroidUtilities.getPhotoSize()); - if (scaleFactor < 1) { - scaleFactor = 1; - } +// float scaleFactor = Math.max((float) options.outWidth / AndroidUtilities.getPhotoSize(), (float) options.outHeight / AndroidUtilities.getPhotoSize()); +// if (scaleFactor < 1) { +// scaleFactor = 1; +// } options.inJustDecodeBounds = false; - options.inSampleSize = (int) scaleFactor; + // options.inSampleSize = (int) scaleFactor; options.inPurgeable = true; bitmap = BitmapFactory.decodeByteArray(data, 0, data.length, options); } catch (Throwable e) { @@ -591,13 +588,45 @@ public class CameraController implements MediaRecorder.OnInfoListener { }); } - public void recordVideo(final CameraSession session, final File path, boolean mirror, final VideoTakeCallback callback, final Runnable onVideoStartRecord) { + public void recordVideo(final CameraSession session, final File path, boolean mirror, final VideoTakeCallback callback, final Runnable onVideoStartRecord, CameraView cameraView) { if (session == null) { return; } - final CameraInfo info = session.cameraInfo; final Camera camera = info.camera; + if (cameraView != null) { + recordingCurrentCameraView = cameraView; + onVideoTakeCallback = callback; + recordedFile = path.getAbsolutePath(); + threadPool.execute(() -> { + try { + if (camera != null) { + try { + Camera.Parameters params = camera.getParameters(); + params.setFlashMode(session.getCurrentFlashMode().equals(Camera.Parameters.FLASH_MODE_ON) ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + } catch (Exception e) { + FileLog.e(e); + } + AndroidUtilities.runOnUIThread(() -> { + cameraView.startRecording(path, () -> { + finishRecordingVideo(); + }); + + if (onVideoStartRecord != null) { + onVideoStartRecord.run(); + } + }); + } + } catch (Exception e) { + FileLog.e(e); + } + }); + + return; + } + + threadPool.execute(() -> { try { if (camera != null) { @@ -722,6 +751,11 @@ public class CameraController implements MediaRecorder.OnInfoListener { } public void stopVideoRecording(final CameraSession session, final boolean abandon) { + if (recordingCurrentCameraView != null) { + recordingCurrentCameraView.stopRecording(); + recordingCurrentCameraView = null; + return; + } threadPool.execute(() -> { try { CameraInfo info = session.cameraInfo; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java index 9a1f67fc1..20e62f1b6 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java @@ -9,17 +9,28 @@ package org.telegram.messenger.camera; import android.hardware.Camera; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CaptureRequest; import java.util.ArrayList; public class CameraInfo { + protected int cameraId; protected Camera camera; protected ArrayList pictureSizes = new ArrayList<>(); protected ArrayList previewSizes = new ArrayList<>(); protected final int frontCamera; + protected CameraDevice cameraDevice; + CameraCharacteristics cameraCharacteristics; + CaptureRequest.Builder captureRequestBuilder; + public CameraCaptureSession cameraCaptureSession; + + public CameraInfo(int id, int frontFace) { cameraId = id; frontCamera = frontFace; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java index 1a0ec52b4..4dbd31dbd 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java @@ -345,6 +345,7 @@ public class CameraSession { cameraDisplayOrientation = temp; } camera.setDisplayOrientation(currentOrientation = cameraDisplayOrientation); + diffOrientation = currentOrientation - displayOrientation; if (params != null) { params.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); @@ -395,10 +396,6 @@ public class CameraSession { } catch (Exception e) { // } - - if (params.getMaxNumMeteringAreas() > 0) { - meteringAreaSupported = true; - } } } } catch (Throwable e) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java index 235ff3585..f21a3006d 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java @@ -8,36 +8,91 @@ package org.telegram.messenger.camera; +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.app.Activity; import android.content.Context; +import android.graphics.Bitmap; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.ImageFormat; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.SurfaceTexture; +import android.graphics.drawable.BitmapDrawable; +import android.graphics.drawable.Drawable; import android.hardware.Camera; +import android.media.AudioFormat; +import android.media.AudioRecord; +import android.media.MediaCodec; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaRecorder; +import android.opengl.EGL14; +import android.opengl.EGLExt; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.opengl.GLUtils; +import android.os.Build; +import android.os.Handler; +import android.os.Looper; +import android.os.Message; +import android.os.VibrationEffect; +import android.os.Vibrator; +import android.view.Gravity; +import android.view.HapticFeedbackConstants; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.view.WindowManager; import android.view.animation.DecelerateInterpolator; import android.widget.FrameLayout; +import android.widget.ImageView; + +import androidx.core.graphics.ColorUtils; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.BuildVars; +import org.telegram.messenger.DispatchQueue; +import org.telegram.messenger.FileLog; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.video.MP4Builder; +import org.telegram.messenger.video.Mp4Movie; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.InstantCameraView; +import org.telegram.ui.Components.LayoutHelper; +import java.io.File; +import java.lang.ref.WeakReference; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; import java.util.ArrayList; +import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.CountDownLatch; +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.egl.EGLSurface; +import javax.microedition.khronos.opengles.GL; + @SuppressLint("NewApi") public class CameraView extends FrameLayout implements TextureView.SurfaceTextureListener { + private Size previewSize; + private Size pictureSize; + CameraInfo info; private boolean mirror; private TextureView textureView; + private ImageView blurredStubView; private CameraSession cameraSession; private boolean initied; private CameraViewDelegate delegate; @@ -60,8 +115,134 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur private Paint outerPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private Paint innerPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private boolean optimizeForBarcode; + File recordFile; private DecelerateInterpolator interpolator = new DecelerateInterpolator(); + private volatile int surfaceWidth; + private volatile int surfaceHeight; + + private File cameraFile; + + boolean firstFrameRendered; + private final Object layoutLock = new Object(); + + private float[] mMVPMatrix = new float[16]; + private float[] mSTMatrix = new float[16]; + private float[] moldSTMatrix = new float[16]; + private static final String VERTEX_SHADER = + "uniform mat4 uMVPMatrix;\n" + + "uniform mat4 uSTMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + + "}\n"; + + private static final String FRAGMENT_SCREEN_SHADER = + "#extension GL_OES_EGL_image_external : require\n" + + "precision lowp float;\n" + + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + + private FloatBuffer vertexBuffer; + private FloatBuffer textureBuffer; + + public void setRecordFile(File generateVideoPath) { + recordFile = generateVideoPath; + } + + Runnable onRecordingFinishRunnable; + + public boolean startRecording(File path, Runnable onFinished) { + cameraThread.startRecording(path); + onRecordingFinishRunnable = onFinished; + return true; + } + + public void stopRecording() { + cameraThread.stopRecording(); + } + + ValueAnimator flipAnimator; + boolean flipHalfReached; + + public void startSwitchingAnimation() { + if (flipAnimator != null) { + flipAnimator.cancel(); + } + blurredStubView.animate().setListener(null).cancel(); + if (firstFrameRendered) { + Bitmap bitmap = textureView.getBitmap(100, 100); + if (bitmap != null) { + Utilities.blurBitmap(bitmap, 3, 1, bitmap.getWidth(), bitmap.getHeight(), bitmap.getRowBytes()); + Drawable drawable = new BitmapDrawable(bitmap); + blurredStubView.setBackground(drawable); + } + blurredStubView.setAlpha(0f); + } else { + blurredStubView.setAlpha(1f); + } + blurredStubView.setVisibility(View.VISIBLE); + + synchronized (layoutLock) { + firstFrameRendered = false; + } + + flipHalfReached = false; + flipAnimator = ValueAnimator.ofFloat(0, 1f); + flipAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { + @Override + public void onAnimationUpdate(ValueAnimator valueAnimator) { + float v = (float) valueAnimator.getAnimatedValue(); + + float rotation; + boolean halfReached = false; + if (v < 0.5f) { + rotation = v; + } else { + halfReached = true; + rotation = v - 1f; + } + rotation *= 180; + textureView.setRotationY(rotation); + blurredStubView.setRotationY(rotation); + if (halfReached && !flipHalfReached) { + blurredStubView.setAlpha(1f); + flipHalfReached = true; + } + } + }); + flipAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + flipAnimator = null; + textureView.setTranslationY(0); + textureView.setRotationX(0); + textureView.setRotationY(0); + textureView.setScaleX(1f); + textureView.setScaleY(1f); + + blurredStubView.setRotationY(0); + + if (!flipHalfReached) { + blurredStubView.setAlpha(1f); + flipHalfReached = true; + } + invalidate(); + } + }); + flipAnimator.setDuration(400); + flipAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + flipAnimator.start(); + invalidate(); + } + public interface CameraViewDelegate { void onCameraCreated(Camera camera); @@ -73,7 +254,11 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur initialFrontface = isFrontface = frontface; textureView = new TextureView(context); textureView.setSurfaceTextureListener(this); - addView(textureView); + addView(textureView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); + + blurredStubView = new ImageView(context); + addView(blurredStubView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER)); + blurredStubView.setVisibility(View.GONE); focusAreaSize = AndroidUtilities.dp(96); outerPaint.setColor(0xffffffff); outerPaint.setStyle(Paint.Style.STROKE); @@ -88,6 +273,41 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur } } + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (pictureSize != null && cameraSession != null) { + int frameWidth, frameHeight; + if (cameraSession.getWorldAngle() == 90 || cameraSession.getWorldAngle() == 270) { + frameWidth = pictureSize.getWidth(); + frameHeight = pictureSize.getHeight(); + } else { + frameWidth = pictureSize.getHeight(); + frameHeight = pictureSize.getWidth(); + } + float s = Math.max(MeasureSpec.getSize(widthMeasureSpec) / (float) frameWidth , MeasureSpec.getSize(heightMeasureSpec) / (float) frameHeight); + blurredStubView.getLayoutParams().width = textureView.getLayoutParams().width = (int) (s * frameWidth); + blurredStubView.getLayoutParams().height = textureView.getLayoutParams().height = (int) (s * frameHeight); + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + public float getTextureHeight(float width, float height) { + if (pictureSize == null || cameraSession == null) { + return height; + } + + int frameWidth, frameHeight; + if (cameraSession.getWorldAngle() == 90 || cameraSession.getWorldAngle() == 270) { + frameWidth = pictureSize.getWidth(); + frameHeight = pictureSize.getHeight(); + } else { + frameWidth = pictureSize.getHeight(); + frameHeight = pictureSize.getWidth(); + } + float s = Math.max(width / (float) frameWidth , height / (float) frameHeight); + return (int) (s * frameHeight); + } + @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); @@ -122,16 +342,38 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur public void switchCamera() { if (cameraSession != null) { - CameraController.getInstance().close(cameraSession, null, null); + CameraController.getInstance().close(cameraSession, new CountDownLatch(1), null); cameraSession = null; } initied = false; isFrontface = !isFrontface; - initCamera(); + updateCameraInfoSize(); + cameraThread.reinitForNewCamera(); } - public void initCamera() { - CameraInfo info = null; + public Size getPreviewSize() { + return previewSize; + } + + CameraGLThread cameraThread; + @Override + public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { + updateCameraInfoSize(); + + surfaceHeight = height; + surfaceWidth = width; + + if (cameraThread == null && surface != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("start create thread"); + } + cameraThread = new CameraGLThread(surface); + checkPreviewMatrix(); + } + + } + + private void updateCameraInfoSize() { ArrayList cameraInfos = CameraController.getInstance().getCameras(); if (cameraInfos == null) { return; @@ -152,19 +394,28 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur org.telegram.messenger.camera.Size aspectRatio; int wantedWidth; int wantedHeight; + + int photoMaxWidth; + int photoMaxHeight; if (initialFrontface) { aspectRatio = new Size(16, 9); - wantedWidth = 480; - wantedHeight = 270; + photoMaxWidth = wantedWidth = 480; + photoMaxHeight = wantedHeight = 270; } else { if (Math.abs(screenSize - size4to3) < 0.1f) { aspectRatio = new Size(4, 3); wantedWidth = 1280; wantedHeight = 960; + + photoMaxWidth = 1920; + photoMaxHeight = 1440; } else { aspectRatio = new Size(16, 9); wantedWidth = 1280; wantedHeight = 720; + + photoMaxWidth = 1920; + photoMaxHeight = 1080; } } if (textureView.getWidth() > 0 && textureView.getHeight() > 0) { @@ -177,7 +428,7 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur int height = width * aspectRatio.getHeight() / aspectRatio.getWidth(); previewSize = CameraController.chooseOptimalSize(info.getPreviewSizes(), width, height, aspectRatio); } - org.telegram.messenger.camera.Size pictureSize = CameraController.chooseOptimalSize(info.getPictureSizes(), wantedWidth, wantedHeight, aspectRatio); + pictureSize = CameraController.chooseOptimalSize(info.getPictureSizes(), wantedWidth, wantedHeight, aspectRatio); if (pictureSize.getWidth() >= 1280 && pictureSize.getHeight() >= 1280) { if (Math.abs(screenSize - size4to3) < 0.1f) { aspectRatio = new Size(3, 4); @@ -189,42 +440,24 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur pictureSize = pictureSize2; } } - SurfaceTexture surfaceTexture = textureView.getSurfaceTexture(); - if (previewSize != null && surfaceTexture != null) { - surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); - cameraSession = new CameraSession(info, previewSize, pictureSize, ImageFormat.JPEG); - if (optimizeForBarcode) { - cameraSession.setOptimizeForBarcode(optimizeForBarcode); - } - CameraController.getInstance().open(cameraSession, surfaceTexture, () -> { - if (cameraSession != null) { - cameraSession.setInitied(); - } - checkPreviewMatrix(); - }, () -> { - if (delegate != null) { - delegate.onCameraCreated(cameraSession.cameraInfo.camera); - } - }); - } - } - - public Size getPreviewSize() { - return previewSize; + previewSize = pictureSize; + pictureSize = CameraController.chooseOptimalSize(info.getPictureSizes(), photoMaxWidth, photoMaxHeight, aspectRatio); + requestLayout(); } @Override - public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { - initCamera(); - } - - @Override - public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) { + public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int surfaceW, int surfaceH) { + surfaceHeight = surfaceH; + surfaceWidth = surfaceW; checkPreviewMatrix(); } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { + if (cameraThread != null) { + cameraThread.shutdown(0); + cameraThread = null; + } if (cameraSession != null) { CameraController.getInstance().close(cameraSession, null, null); } @@ -253,51 +486,9 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur if (previewSize == null) { return; } - WindowManager manager = (WindowManager) ApplicationLoader.applicationContext.getSystemService(Activity.WINDOW_SERVICE); - adjustAspectRatio(previewSize.getWidth(), previewSize.getHeight(), manager.getDefaultDisplay().getRotation()); - } - private void adjustAspectRatio(int previewWidth, int previewHeight, int rotation) { - txform.reset(); - - int viewWidth = getWidth(); - int viewHeight = getHeight(); - float viewCenterX = viewWidth / 2; - float viewCenterY = viewHeight / 2; - - float scale; - if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) { - scale = Math.max((float) (viewHeight + clipTop + clipBottom) / previewWidth, (float) (viewWidth) / previewHeight); - } else { - scale = Math.max((float) (viewHeight + clipTop + clipBottom) / previewHeight, (float) (viewWidth) / previewWidth); - } - - float previewWidthScaled = previewWidth * scale; - float previewHeightScaled = previewHeight * scale; - - float scaleX = previewHeightScaled / (viewWidth); - float scaleY = previewWidthScaled / (viewHeight); - - txform.postScale(scaleX, scaleY, viewCenterX, viewCenterY); - - if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { - txform.postRotate(90 * (rotation - 2), viewCenterX, viewCenterY); - } else { - if (Surface.ROTATION_180 == rotation) { - txform.postRotate(180, viewCenterX, viewCenterY); - } - } - - if (mirror) { - txform.postScale(-1, 1, viewCenterX, viewCenterY); - } - if (clipTop != 0) { - txform.postTranslate(0, -clipTop / 2); - } else if (clipBottom != 0) { - txform.postTranslate(0, clipBottom / 2); - } - - textureView.setTransform(txform); + int viewWidth = textureView.getWidth(); + int viewHeight = textureView.getHeight(); Matrix matrix = new Matrix(); if (cameraSession != null) { @@ -306,6 +497,18 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur matrix.postScale(viewWidth / 2000f, viewHeight / 2000f); matrix.postTranslate(viewWidth / 2f, viewHeight / 2f); matrix.invert(this.matrix); + + if (cameraThread != null) { + cameraThread.postRunnable(() -> { + if (cameraThread.currentSession != null) { + int rotationAngle = cameraThread.currentSession.getWorldAngle(); + android.opengl.Matrix.setIdentityM(mMVPMatrix, 0); + if (rotationAngle != 0) { + android.opengl.Matrix.rotateM(mMVPMatrix, 0, rotationAngle, 0, 0, 1); + } + } + }); + } } private Rect calculateTapArea(float x, float y, float coefficient) { @@ -415,4 +618,1405 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur } return result; } + + private float takePictureProgress = 1f; + + public void startTakePictureAnimation() { + takePictureProgress = 0; + invalidate(); + runHaptic(); + } + + public void runHaptic() { + long[] vibrationWaveFormDurationPattern = {0, 1}; + + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) { + + final Vibrator vibrator = (Vibrator) getContext().getSystemService(Context.VIBRATOR_SERVICE); + VibrationEffect vibrationEffect = VibrationEffect.createWaveform(vibrationWaveFormDurationPattern, -1); + + vibrator.cancel(); + vibrator.vibrate(vibrationEffect); + } else { + performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + } + + + @Override + protected void dispatchDraw(Canvas canvas) { + if (flipAnimator != null) { + canvas.drawColor(Color.BLACK); + } + super.dispatchDraw(canvas); + if (takePictureProgress != 1f) { + takePictureProgress += 16 / 150f; + if (takePictureProgress > 1f) { + takePictureProgress = 1f; + } else { + invalidate(); + } + canvas.drawColor(ColorUtils.setAlphaComponent(Color.BLACK, (int) ((1f - takePictureProgress) * 150))); + } + } + + + private int[] position = new int[2]; + private int[] cameraTexture = new int[1]; + private int[] oldCameraTexture = new int[1]; + private VideoRecorder videoEncoder; + + public class CameraGLThread extends DispatchQueue { + + private final static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + private final static int EGL_OPENGL_ES2_BIT = 4; + private SurfaceTexture surfaceTexture; + private EGL10 egl10; + private EGLDisplay eglDisplay; + private EGLContext eglContext; + private EGLSurface eglSurface; + private EGLConfig eglConfig; + private boolean initied; + + private CameraSession currentSession; + + private SurfaceTexture cameraSurface; + + private final int DO_RENDER_MESSAGE = 0; + private final int DO_SHUTDOWN_MESSAGE = 1; + private final int DO_REINIT_MESSAGE = 2; + private final int DO_SETSESSION_MESSAGE = 3; + private final int DO_START_RECORDING = 4; + private final int DO_STOP_RECORDING = 5; + + private int drawProgram; + private int vertexMatrixHandle; + private int textureMatrixHandle; + private int positionHandle; + private int textureHandle; + + private boolean recording; + private boolean needRecord; + + private Integer cameraId = 0; + + //private InstantCameraView.VideoRecorder videoEncoder; + + public CameraGLThread(SurfaceTexture surface) { + super("CameraGLThread"); + surfaceTexture = surface; + } + + private boolean initGL() { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("start init gl"); + } + egl10 = (EGL10) EGLContext.getEGL(); + + eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL10.EGL_NO_DISPLAY) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglGetDisplay failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + finish(); + return false; + } + + int[] version = new int[2]; + if (!egl10.eglInitialize(eglDisplay, version)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglInitialize failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + finish(); + return false; + } + + int[] configsCount = new int[1]; + EGLConfig[] configs = new EGLConfig[1]; + int[] configSpec = new int[]{ + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_RED_SIZE, 8, + EGL10.EGL_GREEN_SIZE, 8, + EGL10.EGL_BLUE_SIZE, 8, + EGL10.EGL_ALPHA_SIZE, 0, + EGL10.EGL_DEPTH_SIZE, 0, + EGL10.EGL_STENCIL_SIZE, 0, + EGL10.EGL_NONE + }; + if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglChooseConfig failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + finish(); + return false; + } else if (configsCount[0] > 0) { + eglConfig = configs[0]; + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglConfig not initialized"); + } + finish(); + return false; + } + int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE}; + eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); + if (eglContext == null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglCreateContext failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + finish(); + return false; + } + + if (surfaceTexture != null) { + eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, surfaceTexture, null); + } else { + finish(); + return false; + } + + if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("createWindowSurface failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + finish(); + return false; + } + if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglMakeCurrent failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + finish(); + return false; + } + GL gl = eglContext.getGL(); + + android.opengl.Matrix.setIdentityM(mSTMatrix, 0); + + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER); + int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SCREEN_SHADER); + if (vertexShader != 0 && fragmentShader != 0) { + drawProgram = GLES20.glCreateProgram(); + GLES20.glAttachShader(drawProgram, vertexShader); + GLES20.glAttachShader(drawProgram, fragmentShader); + GLES20.glLinkProgram(drawProgram); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(drawProgram, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] == 0) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("failed link shader"); + } + GLES20.glDeleteProgram(drawProgram); + drawProgram = 0; + } else { + positionHandle = GLES20.glGetAttribLocation(drawProgram, "aPosition"); + textureHandle = GLES20.glGetAttribLocation(drawProgram, "aTextureCoord"); + vertexMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uMVPMatrix"); + textureMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uSTMatrix"); + } + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("failed creating shader"); + } + finish(); + return false; + } + + GLES20.glGenTextures(1, cameraTexture, 0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + + android.opengl.Matrix.setIdentityM(mMVPMatrix, 0); + + cameraSurface = new SurfaceTexture(cameraTexture[0]); + cameraSurface.setOnFrameAvailableListener(surfaceTexture -> requestRender()); + createCamera(cameraSurface); + if (BuildVars.LOGS_ENABLED) { + FileLog.e("gl initied"); + } + + + float tX = 1.0f / 2.0f; + float tY = 1.0f / 2.0f; + float[] verticesData = { + -1.0f, -1.0f, 0, + 1.0f, -1.0f, 0, + -1.0f, 1.0f, 0, + 1.0f, 1.0f, 0 + }; + float[] texData = { + 0.5f - tX, 0.5f - tY, + 0.5f + tX, 0.5f - tY, + 0.5f - tX, 0.5f + tY, + 0.5f + tX, 0.5f + tY + }; + + vertexBuffer = ByteBuffer.allocateDirect(verticesData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer(); + vertexBuffer.put(verticesData).position(0); + + textureBuffer = ByteBuffer.allocateDirect(texData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer(); + textureBuffer.put(texData).position(0); + + return true; + } + + public void reinitForNewCamera() { + Handler handler = getHandler(); + if (handler != null) { + sendMessage(handler.obtainMessage(DO_REINIT_MESSAGE, info.cameraId), 0); + } + } + + public void finish() { + if (eglSurface != null) { + egl10.eglMakeCurrent(eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); + egl10.eglDestroySurface(eglDisplay, eglSurface); + eglSurface = null; + } + if (eglContext != null) { + egl10.eglDestroyContext(eglDisplay, eglContext); + eglContext = null; + } + if (eglDisplay != null) { + egl10.eglTerminate(eglDisplay); + eglDisplay = null; + } + } + + public void setCurrentSession(CameraSession session) { + Handler handler = getHandler(); + if (handler != null) { + sendMessage(handler.obtainMessage(DO_SETSESSION_MESSAGE, session), 0); + } + } + + final int array[] = new int[1]; + + private void onDraw(Integer cameraId, boolean updateTexImage) { + if (!initied) { + return; + } + + if (!eglContext.equals(egl10.eglGetCurrentContext()) || !eglSurface.equals(egl10.eglGetCurrentSurface(EGL10.EGL_DRAW))) { + if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglMakeCurrent failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + return; + } + } + if (updateTexImage) { + try { + cameraSurface.updateTexImage(); + } catch (Throwable e) { + FileLog.e(e); + } + } + + if (currentSession == null || currentSession.cameraInfo.cameraId != cameraId) { + return; + } + + if (recording && videoEncoder != null) { + videoEncoder.frameAvailable(cameraSurface, cameraId, System.nanoTime()); + } + + cameraSurface.getTransformMatrix(mSTMatrix); + + egl10.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, array); + int drawnWidth = array[0]; + egl10.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, array); + int drawnHeight = array[0]; + + GLES20.glViewport(0, 0, drawnWidth, drawnHeight); + + GLES20.glUseProgram(drawProgram); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]); + + GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer); + GLES20.glEnableVertexAttribArray(positionHandle); + + GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); + GLES20.glEnableVertexAttribArray(textureHandle); + + GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, mSTMatrix, 0); + GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0); + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + + GLES20.glDisableVertexAttribArray(positionHandle); + GLES20.glDisableVertexAttribArray(textureHandle); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + GLES20.glUseProgram(0); + + egl10.eglSwapBuffers(eglDisplay, eglSurface); + + synchronized (layoutLock) { + if (!firstFrameRendered) { + firstFrameRendered = true; + AndroidUtilities.runOnUIThread(() -> { + onFirstFrameRendered(); + }); + } + } + } + + @Override + public void run() { + initied = initGL(); + super.run(); + } + + @Override + public void handleMessage(Message inputMessage) { + int what = inputMessage.what; + + switch (what) { + case DO_RENDER_MESSAGE: + onDraw((Integer) inputMessage.obj, true); + break; + case DO_SHUTDOWN_MESSAGE: + finish(); + if (recording) { + videoEncoder.stopRecording(inputMessage.arg1); + } + Looper looper = Looper.myLooper(); + if (looper != null) { + looper.quit(); + } + break; + case DO_REINIT_MESSAGE: { + if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("eglMakeCurrent failed " + GLUtils.getEGLErrorString(egl10.eglGetError())); + } + return; + } + + if (cameraSurface != null) { + cameraSurface.getTransformMatrix(moldSTMatrix); + cameraSurface.setOnFrameAvailableListener(null); + cameraSurface.release(); + } + + cameraId = (Integer) inputMessage.obj; + + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + + cameraSurface = new SurfaceTexture(cameraTexture[0]); + cameraSurface.setOnFrameAvailableListener(surfaceTexture -> requestRender()); + createCamera(cameraSurface); + break; + } + case DO_SETSESSION_MESSAGE: { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("set gl rednderer session"); + } + CameraSession newSession = (CameraSession) inputMessage.obj; + if (currentSession == newSession) { + int rotationAngle = currentSession.getWorldAngle(); + android.opengl.Matrix.setIdentityM(mMVPMatrix, 0); + if (rotationAngle != 0) { + android.opengl.Matrix.rotateM(mMVPMatrix, 0, rotationAngle, 0, 0, 1); + } + } else { + currentSession = newSession; + cameraId = newSession.cameraInfo.cameraId; + } + break; + } + case DO_START_RECORDING: { + if (!initied) { + return; + } + recordFile = (File) inputMessage.obj; + videoEncoder = new VideoRecorder(); + recording = true; + videoEncoder.startRecording(recordFile, EGL14.eglGetCurrentContext()); + break; + } + case DO_STOP_RECORDING: { + if (videoEncoder != null) { + videoEncoder.stopRecording(0); + videoEncoder = null; + } + recording = false; + break; + } + } + } + + public void shutdown(int send) { + Handler handler = getHandler(); + if (handler != null) { + sendMessage(handler.obtainMessage(DO_SHUTDOWN_MESSAGE, send, 0), 0); + } + } + + public void requestRender() { + Handler handler = getHandler(); + if (handler != null) { + sendMessage(handler.obtainMessage(DO_RENDER_MESSAGE, cameraId), 0); + } + } + + public boolean startRecording(File path) { + Handler handler = getHandler(); + if (handler != null) { + sendMessage(handler.obtainMessage(DO_START_RECORDING, path), 0); + return false; + } + return true; + } + + public void stopRecording() { + Handler handler = getHandler(); + if (handler != null) { + sendMessage(handler.obtainMessage(DO_STOP_RECORDING), 0); + } + } + } + + private void onFirstFrameRendered() { + if (blurredStubView.getVisibility() == View.VISIBLE) { + blurredStubView.animate().alpha(0).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + blurredStubView.setVisibility(View.GONE); + } + }).start(); + } + } + + private int loadShader(int type, String shaderCode) { + int shader = GLES20.glCreateShader(type); + GLES20.glShaderSource(shader, shaderCode); + GLES20.glCompileShader(shader); + int[] compileStatus = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); + if (compileStatus[0] == 0) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e(GLES20.glGetShaderInfoLog(shader)); + } + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + + private void createCamera(final SurfaceTexture surfaceTexture) { + AndroidUtilities.runOnUIThread(() -> { + if (cameraThread == null) { + return; + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("create camera session"); + } + if (previewSize == null) { + updateCameraInfoSize(); + } + if (previewSize == null) { + return; + } + surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); + + cameraSession = new CameraSession(info, previewSize, pictureSize, ImageFormat.JPEG); + cameraThread.setCurrentSession(cameraSession); + requestLayout(); + + CameraController.getInstance().open(cameraSession, surfaceTexture, () -> { + if (cameraSession != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("camera initied"); + } + cameraSession.setInitied(); + } + }, () -> cameraThread.setCurrentSession(cameraSession)); + }); + } + + + private class VideoRecorder implements Runnable { + + private static final String VIDEO_MIME_TYPE = "video/avc"; + private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm"; + private static final int FRAME_RATE = 30; + private static final int IFRAME_INTERVAL = 1; + + private File videoFile; + private int videoWidth; + private int videoHeight; + private int videoBitrate; + private boolean videoConvertFirstWrite = true; + private boolean blendEnabled; + + private Surface surface; + private android.opengl.EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY; + private android.opengl.EGLContext eglContext = EGL14.EGL_NO_CONTEXT; + private android.opengl.EGLContext sharedEglContext; + private android.opengl.EGLConfig eglConfig; + private android.opengl.EGLSurface eglSurface = EGL14.EGL_NO_SURFACE; + + private MediaCodec videoEncoder; + private MediaCodec audioEncoder; + + private int prependHeaderSize; + private boolean firstEncode; + + private MediaCodec.BufferInfo videoBufferInfo; + private MediaCodec.BufferInfo audioBufferInfo; + private MP4Builder mediaMuxer; + private ArrayList buffersToWrite = new ArrayList<>(); + private int videoTrackIndex = -5; + private int audioTrackIndex = -5; + + private long lastCommitedFrameTime; + private long audioStartTime = -1; + + private long currentTimestamp = 0; + private long lastTimestamp = -1; + + private volatile EncoderHandler handler; + + private final Object sync = new Object(); + private boolean ready; + private volatile boolean running; + private volatile int sendWhenDone; + private long skippedTime; + private boolean skippedFirst; + + private long desyncTime; + private long videoFirst = -1; + private long videoLast; + private long audioFirst = -1; + private boolean audioStopedByTime; + + private int drawProgram; + private int vertexMatrixHandle; + private int textureMatrixHandle; + private int positionHandle; + private int textureHandle; + private int zeroTimeStamps; + private Integer lastCameraId = 0; + + private AudioRecord audioRecorder; + private FloatBuffer textureBuffer; + + private ArrayBlockingQueue buffers = new ArrayBlockingQueue<>(10); + private ArrayList keyframeThumbs = new ArrayList<>(); + private DispatchQueue generateKeyframeThumbsQueue; + private int frameCount; + + private Runnable recorderRunnable = new Runnable() { + + @Override + public void run() { + long audioPresentationTimeUs = -1; + int readResult; + boolean done = false; + while (!done) { + if (!running && audioRecorder.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) { + try { + audioRecorder.stop(); + } catch (Exception e) { + done = true; + } + if (sendWhenDone == 0) { + break; + } + } + InstantCameraView.AudioBufferInfo buffer; + if (buffers.isEmpty()) { + buffer = new InstantCameraView.AudioBufferInfo(); + } else { + buffer = buffers.poll(); + } + buffer.lastWroteBuffer = 0; + buffer.results = InstantCameraView.AudioBufferInfo.MAX_SAMPLES; + for (int a = 0; a < InstantCameraView.AudioBufferInfo.MAX_SAMPLES; a++) { + if (audioPresentationTimeUs == -1) { + audioPresentationTimeUs = System.nanoTime() / 1000; + } + + ByteBuffer byteBuffer = buffer.buffer[a]; + byteBuffer.rewind(); + readResult = audioRecorder.read(byteBuffer, 2048); + + if (readResult <= 0) { + buffer.results = a; + if (!running) { + buffer.last = true; + } + break; + } + buffer.offset[a] = audioPresentationTimeUs; + buffer.read[a] = readResult; + int bufferDurationUs = 1000000 * readResult / 44100 / 2; + audioPresentationTimeUs += bufferDurationUs; + } + if (buffer.results >= 0 || buffer.last) { + if (!running && buffer.results < InstantCameraView.AudioBufferInfo.MAX_SAMPLES) { + done = true; + } + handler.sendMessage(handler.obtainMessage(MSG_AUDIOFRAME_AVAILABLE, buffer)); + } else { + if (!running) { + done = true; + } else { + try { + buffers.put(buffer); + } catch (Exception ignore) { + + } + } + } + } + try { + audioRecorder.release(); + } catch (Exception e) { + FileLog.e(e); + } + handler.sendMessage(handler.obtainMessage(MSG_STOP_RECORDING, sendWhenDone, 0)); + } + }; + + public void startRecording(File outputFile, android.opengl.EGLContext sharedContext) { + String model = Build.DEVICE; + if (model == null) { + model = ""; + } + + Size pictureSize; + int bitrate; + pictureSize = new Size(16, 9); + pictureSize = CameraController.chooseOptimalSize(info.getPictureSizes(), 720, 480, pictureSize); + if (Math.min(pictureSize.mHeight, pictureSize.mWidth) >= 720) { + bitrate = 3500000; + } else { + bitrate = 1800000; + } + + videoFile = outputFile; + + if (cameraSession.getWorldAngle() == 90 || cameraSession.getWorldAngle() == 270) { + videoWidth = pictureSize.getWidth(); + videoHeight = pictureSize.getHeight(); + } else { + videoWidth = pictureSize.getHeight(); + videoHeight = pictureSize.getWidth(); + } + videoBitrate = bitrate; + sharedEglContext = sharedContext; + + synchronized (sync) { + if (running) { + return; + } + running = true; + Thread thread = new Thread(this, "TextureMovieEncoder"); + thread.setPriority(Thread.MAX_PRIORITY); + thread.start(); + while (!ready) { + try { + sync.wait(); + } catch (InterruptedException ie) { + // ignore + } + } + } + keyframeThumbs.clear(); + frameCount = 0; + if (generateKeyframeThumbsQueue != null) { + generateKeyframeThumbsQueue.cleanupQueue(); + generateKeyframeThumbsQueue.recycle(); + } + generateKeyframeThumbsQueue = new DispatchQueue("keyframes_thumb_queque"); + handler.sendMessage(handler.obtainMessage(MSG_START_RECORDING)); + } + + public void stopRecording(int send) { + handler.sendMessage(handler.obtainMessage(MSG_STOP_RECORDING, send, 0)); + } + + public void frameAvailable(SurfaceTexture st, Integer cameraId, long timestampInternal) { + synchronized (sync) { + if (!ready) { + return; + } + } + + long timestamp = st.getTimestamp(); + if (timestamp == 0) { + zeroTimeStamps++; + if (zeroTimeStamps > 1) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("fix timestamp enabled"); + } + timestamp = timestampInternal; + } else { + return; + } + } else { + zeroTimeStamps = 0; + } + + handler.sendMessage(handler.obtainMessage(MSG_VIDEOFRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, cameraId)); + } + + @Override + public void run() { + Looper.prepare(); + synchronized (sync) { + handler = new EncoderHandler(this); + ready = true; + sync.notify(); + } + Looper.loop(); + + synchronized (sync) { + ready = false; + } + } + + private void handleAudioFrameAvailable(InstantCameraView.AudioBufferInfo input) { + if (audioStopedByTime) { + return; + } + buffersToWrite.add(input); + if (audioFirst == -1) { + if (videoFirst == -1) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("video record not yet started"); + } + return; + } + while (true) { + boolean ok = false; + for (int a = 0; a < input.results; a++) { + if (a == 0 && Math.abs(videoFirst - input.offset[a]) > 10000000L) { + desyncTime = videoFirst - input.offset[a]; + audioFirst = input.offset[a]; + ok = true; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("detected desync between audio and video " + desyncTime); + } + break; + } + if (input.offset[a] >= videoFirst) { + input.lastWroteBuffer = a; + audioFirst = input.offset[a]; + ok = true; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("found first audio frame at " + a + " timestamp = " + input.offset[a]); + } + break; + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("ignore first audio frame at " + a + " timestamp = " + input.offset[a]); + } + } + } + if (!ok) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("first audio frame not found, removing buffers " + input.results); + } + buffersToWrite.remove(input); + } else { + break; + } + if (!buffersToWrite.isEmpty()) { + input = buffersToWrite.get(0); + } else { + return; + } + } + } + + if (audioStartTime == -1) { + audioStartTime = input.offset[input.lastWroteBuffer]; + } + if (buffersToWrite.size() > 1) { + input = buffersToWrite.get(0); + } + try { + drainEncoder(false); + } catch (Exception e) { + FileLog.e(e); + } + try { + boolean isLast = false; + while (input != null) { + int inputBufferIndex = audioEncoder.dequeueInputBuffer(0); + if (inputBufferIndex >= 0) { + ByteBuffer inputBuffer; + if (Build.VERSION.SDK_INT >= 21) { + inputBuffer = audioEncoder.getInputBuffer(inputBufferIndex); + } else { + ByteBuffer[] inputBuffers = audioEncoder.getInputBuffers(); + inputBuffer = inputBuffers[inputBufferIndex]; + inputBuffer.clear(); + } + long startWriteTime = input.offset[input.lastWroteBuffer]; + for (int a = input.lastWroteBuffer; a <= input.results; a++) { + if (a < input.results) { + if (!running && input.offset[a] >= videoLast - desyncTime) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("stop audio encoding because of stoped video recording at " + input.offset[a] + " last video " + videoLast); + } + audioStopedByTime = true; + isLast = true; + input = null; + buffersToWrite.clear(); + break; + } + if (inputBuffer.remaining() < input.read[a]) { + input.lastWroteBuffer = a; + input = null; + break; + } + inputBuffer.put(input.buffer[a]); + } + if (a >= input.results - 1) { + buffersToWrite.remove(input); + if (running) { + buffers.put(input); + } + if (!buffersToWrite.isEmpty()) { + input = buffersToWrite.get(0); + } else { + isLast = input.last; + input = null; + break; + } + } + } + audioEncoder.queueInputBuffer(inputBufferIndex, 0, inputBuffer.position(), startWriteTime == 0 ? 0 : startWriteTime - audioStartTime, isLast ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); + } + } + } catch (Throwable e) { + FileLog.e(e); + } + } + + private void handleVideoFrameAvailable(long timestampNanos, Integer cameraId) { + try { + drainEncoder(false); + } catch (Exception e) { + FileLog.e(e); + } + long dt; + if (!lastCameraId.equals(cameraId)) { + lastTimestamp = -1; + lastCameraId = cameraId; + } + if (lastTimestamp == -1) { + lastTimestamp = timestampNanos; + if (currentTimestamp != 0) { + dt = (System.currentTimeMillis() - lastCommitedFrameTime) * 1000000; + } else { + dt = 0; + } + } else { + dt = (timestampNanos - lastTimestamp); + lastTimestamp = timestampNanos; + } + lastCommitedFrameTime = System.currentTimeMillis(); + if (!skippedFirst) { + skippedTime += dt; + if (skippedTime < 200000000) { + return; + } + skippedFirst = true; + } + currentTimestamp += dt; + if (videoFirst == -1) { + videoFirst = timestampNanos / 1000; + if (BuildVars.LOGS_ENABLED) { + FileLog.d("first video frame was at " + videoFirst); + } + } + videoLast = timestampNanos; + + + GLES20.glUseProgram(drawProgram); + GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false, 12, vertexBuffer); + GLES20.glEnableVertexAttribArray(positionHandle); + GLES20.glVertexAttribPointer(textureHandle, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); + GLES20.glEnableVertexAttribArray(textureHandle); + GLES20.glUniformMatrix4fv(vertexMatrixHandle, 1, false, mMVPMatrix, 0); + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + if (oldCameraTexture[0] != 0) { + if (!blendEnabled) { + GLES20.glEnable(GLES20.GL_BLEND); + blendEnabled = true; + } + GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, moldSTMatrix, 0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oldCameraTexture[0]); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + } + GLES20.glUniformMatrix4fv(textureMatrixHandle, 1, false, mSTMatrix, 0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexture[0]); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + + GLES20.glDisableVertexAttribArray(positionHandle); + GLES20.glDisableVertexAttribArray(textureHandle); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + GLES20.glUseProgram(0); + + EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, currentTimestamp); + EGL14.eglSwapBuffers(eglDisplay, eglSurface); + } + + private void handleStopRecording(final int send) { + if (running) { + sendWhenDone = send; + running = false; + return; + } + try { + drainEncoder(true); + } catch (Exception e) { + FileLog.e(e); + } + if (videoEncoder != null) { + try { + videoEncoder.stop(); + videoEncoder.release(); + videoEncoder = null; + } catch (Exception e) { + FileLog.e(e); + } + } + if (audioEncoder != null) { + try { + audioEncoder.stop(); + audioEncoder.release(); + audioEncoder = null; + } catch (Exception e) { + FileLog.e(e); + } + } + if (mediaMuxer != null) { + try { + mediaMuxer.finishMovie(); + } catch (Exception e) { + FileLog.e(e); + } + } + + EGL14.eglDestroySurface(eglDisplay, eglSurface); + eglSurface = EGL14.EGL_NO_SURFACE; + if (surface != null) { + surface.release(); + surface = null; + } + if (eglDisplay != EGL14.EGL_NO_DISPLAY) { + EGL14.eglMakeCurrent(eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(eglDisplay, eglContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(eglDisplay); + } + eglDisplay = EGL14.EGL_NO_DISPLAY; + eglContext = EGL14.EGL_NO_CONTEXT; + eglConfig = null; + handler.exit(); + + AndroidUtilities.runOnUIThread(() -> { + onRecordingFinishRunnable.run(); + }); + } + + private void prepareEncoder() { + try { + int recordBufferSize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); + if (recordBufferSize <= 0) { + recordBufferSize = 3584; + } + int bufferSize = 2048 * 24; + if (bufferSize < recordBufferSize) { + bufferSize = ((recordBufferSize / 2048) + 1) * 2048 * 2; + } + for (int a = 0; a < 3; a++) { + buffers.add(new InstantCameraView.AudioBufferInfo()); + } + audioRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); + audioRecorder.startRecording(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("initied audio record with channels " + audioRecorder.getChannelCount() + " sample rate = " + audioRecorder.getSampleRate() + " bufferSize = " + bufferSize); + } + Thread thread = new Thread(recorderRunnable); + thread.setPriority(Thread.MAX_PRIORITY); + thread.start(); + + audioBufferInfo = new MediaCodec.BufferInfo(); + videoBufferInfo = new MediaCodec.BufferInfo(); + + MediaFormat audioFormat = new MediaFormat(); + audioFormat.setString(MediaFormat.KEY_MIME, AUDIO_MIME_TYPE); + audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100); + audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); + audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 32000); + audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 2048 * InstantCameraView.AudioBufferInfo.MAX_SAMPLES); + + audioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE); + audioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + audioEncoder.start(); + + videoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE); + firstEncode = true; + + MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, videoWidth, videoHeight); + + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, videoBitrate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + + videoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + surface = videoEncoder.createInputSurface(); + videoEncoder.start(); + + Mp4Movie movie = new Mp4Movie(); + movie.setCacheFile(videoFile); + movie.setRotation(0); + movie.setSize(videoWidth, videoHeight); + mediaMuxer = new MP4Builder().createMovie(movie, false); + + } catch (Exception ioe) { + throw new RuntimeException(ioe); + } + + if (eglDisplay != EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("EGL already set up"); + } + + eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("unable to get EGL14 display"); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) { + eglDisplay = null; + throw new RuntimeException("unable to initialize EGL14"); + } + + if (eglContext == EGL14.EGL_NO_CONTEXT) { + int renderableType = EGL14.EGL_OPENGL_ES2_BIT; + + int[] attribList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_ALPHA_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, renderableType, + 0x3142, 1, + EGL14.EGL_NONE + }; + android.opengl.EGLConfig[] configs = new android.opengl.EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) { + throw new RuntimeException("Unable to find a suitable EGLConfig"); + } + + int[] attrib2_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL14.EGL_NONE + }; + eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], sharedEglContext, attrib2_list, 0); + eglConfig = configs[0]; + } + + int[] values = new int[1]; + EGL14.eglQueryContext(eglDisplay, eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0); + + if (eglSurface != EGL14.EGL_NO_SURFACE) { + throw new IllegalStateException("surface already created"); + } + + int[] surfaceAttribs = { + EGL14.EGL_NONE + }; + eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0); + if (eglSurface == null) { + throw new RuntimeException("surface was null"); + } + + if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("eglMakeCurrent failed " + GLUtils.getEGLErrorString(EGL14.eglGetError())); + } + throw new RuntimeException("eglMakeCurrent failed"); + } + GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); + + float tX = 1.0f / 2.0f; + float tY = 1.0f / 2.0f; + + float[] texData = { + 0.5f - tX, 0.5f - tY, + 0.5f + tX, 0.5f - tY, + 0.5f - tX, 0.5f + tY, + 0.5f + tX, 0.5f + tY + }; + textureBuffer = ByteBuffer.allocateDirect(texData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer(); + textureBuffer.put(texData).position(0); + + + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER); + int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SCREEN_SHADER); + if (vertexShader != 0 && fragmentShader != 0) { + drawProgram = GLES20.glCreateProgram(); + GLES20.glAttachShader(drawProgram, vertexShader); + GLES20.glAttachShader(drawProgram, fragmentShader); + GLES20.glLinkProgram(drawProgram); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(drawProgram, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] == 0) { + GLES20.glDeleteProgram(drawProgram); + drawProgram = 0; + } else { + positionHandle = GLES20.glGetAttribLocation(drawProgram, "aPosition"); + textureHandle = GLES20.glGetAttribLocation(drawProgram, "aTextureCoord"); + vertexMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uMVPMatrix"); + textureMatrixHandle = GLES20.glGetUniformLocation(drawProgram, "uSTMatrix"); + } + } + } + + public Surface getInputSurface() { + return surface; + } + + public void drainEncoder(boolean endOfStream) throws Exception { + if (endOfStream) { + videoEncoder.signalEndOfInputStream(); + } + + ByteBuffer[] encoderOutputBuffers = null; + if (Build.VERSION.SDK_INT < 21) { + encoderOutputBuffers = videoEncoder.getOutputBuffers(); + } + while (true) { + int encoderStatus = videoEncoder.dequeueOutputBuffer(videoBufferInfo, 10000); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + if (!endOfStream) { + break; + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + if (Build.VERSION.SDK_INT < 21) { + encoderOutputBuffers = videoEncoder.getOutputBuffers(); + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + MediaFormat newFormat = videoEncoder.getOutputFormat(); + if (videoTrackIndex == -5) { + videoTrackIndex = mediaMuxer.addTrack(newFormat, false); + if (newFormat.containsKey(MediaFormat.KEY_PREPEND_HEADER_TO_SYNC_FRAMES) && newFormat.getInteger(MediaFormat.KEY_PREPEND_HEADER_TO_SYNC_FRAMES) == 1) { + ByteBuffer spsBuff = newFormat.getByteBuffer("csd-0"); + ByteBuffer ppsBuff = newFormat.getByteBuffer("csd-1"); + prependHeaderSize = spsBuff.limit() + ppsBuff.limit(); + } + } + } else if (encoderStatus >= 0) { + ByteBuffer encodedData; + if (Build.VERSION.SDK_INT < 21) { + encodedData = encoderOutputBuffers[encoderStatus]; + } else { + encodedData = videoEncoder.getOutputBuffer(encoderStatus); + } + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); + } + if (videoBufferInfo.size > 1) { + if ((videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { + if (prependHeaderSize != 0 && (videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) { + videoBufferInfo.offset += prependHeaderSize; + videoBufferInfo.size -= prependHeaderSize; + } + if (firstEncode && (videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) { + if (videoBufferInfo.size > 100) { + encodedData.position(videoBufferInfo.offset); + byte[] temp = new byte[100]; + encodedData.get(temp); + int nalCount = 0; + for (int a = 0; a < temp.length - 4; a++) { + if (temp[a] == 0 && temp[a + 1] == 0 && temp[a + 2] == 0 && temp[a + 3] == 1) { + nalCount++; + if (nalCount > 1) { + videoBufferInfo.offset += a; + videoBufferInfo.size -= a; + break; + } + } + } + } + firstEncode = false; + } + long availableSize = mediaMuxer.writeSampleData(videoTrackIndex, encodedData, videoBufferInfo, true); + } else if (videoTrackIndex == -5) { + byte[] csd = new byte[videoBufferInfo.size]; + encodedData.limit(videoBufferInfo.offset + videoBufferInfo.size); + encodedData.position(videoBufferInfo.offset); + encodedData.get(csd); + ByteBuffer sps = null; + ByteBuffer pps = null; + for (int a = videoBufferInfo.size - 1; a >= 0; a--) { + if (a > 3) { + if (csd[a] == 1 && csd[a - 1] == 0 && csd[a - 2] == 0 && csd[a - 3] == 0) { + sps = ByteBuffer.allocate(a - 3); + pps = ByteBuffer.allocate(videoBufferInfo.size - (a - 3)); + sps.put(csd, 0, a - 3).position(0); + pps.put(csd, a - 3, videoBufferInfo.size - (a - 3)).position(0); + break; + } + } else { + break; + } + } + + MediaFormat newFormat = MediaFormat.createVideoFormat("video/avc", videoWidth, videoHeight); + if (sps != null && pps != null) { + newFormat.setByteBuffer("csd-0", sps); + newFormat.setByteBuffer("csd-1", pps); + } + videoTrackIndex = mediaMuxer.addTrack(newFormat, false); + } + } + videoEncoder.releaseOutputBuffer(encoderStatus, false); + if ((videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + break; + } + } + } + + if (Build.VERSION.SDK_INT < 21) { + encoderOutputBuffers = audioEncoder.getOutputBuffers(); + } + boolean encoderOutputAvailable = true; + while (true) { + int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 0); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + if (!endOfStream || !running && sendWhenDone == 0) { + break; + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + if (Build.VERSION.SDK_INT < 21) { + encoderOutputBuffers = audioEncoder.getOutputBuffers(); + } + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + MediaFormat newFormat = audioEncoder.getOutputFormat(); + if (audioTrackIndex == -5) { + audioTrackIndex = mediaMuxer.addTrack(newFormat, true); + } + } else if (encoderStatus >= 0) { + ByteBuffer encodedData; + if (Build.VERSION.SDK_INT < 21) { + encodedData = encoderOutputBuffers[encoderStatus]; + } else { + encodedData = audioEncoder.getOutputBuffer(encoderStatus); + } + if (encodedData == null) { + throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); + } + if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + audioBufferInfo.size = 0; + } + if (audioBufferInfo.size != 0) { + mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo, false); + } + audioEncoder.releaseOutputBuffer(encoderStatus, false); + if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + break; + } + } + } + } + + @Override + protected void finalize() throws Throwable { + try { + if (eglDisplay != EGL14.EGL_NO_DISPLAY) { + EGL14.eglMakeCurrent(eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); + EGL14.eglDestroyContext(eglDisplay, eglContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(eglDisplay); + eglDisplay = EGL14.EGL_NO_DISPLAY; + eglContext = EGL14.EGL_NO_CONTEXT; + eglConfig = null; + } + } finally { + super.finalize(); + } + } + } + + private static final int MSG_START_RECORDING = 0; + private static final int MSG_STOP_RECORDING = 1; + private static final int MSG_VIDEOFRAME_AVAILABLE = 2; + private static final int MSG_AUDIOFRAME_AVAILABLE = 3; + + private static class EncoderHandler extends Handler { + private WeakReference mWeakEncoder; + + public EncoderHandler(VideoRecorder encoder) { + mWeakEncoder = new WeakReference<>(encoder); + } + + @Override + public void handleMessage(Message inputMessage) { + int what = inputMessage.what; + Object obj = inputMessage.obj; + + VideoRecorder encoder = mWeakEncoder.get(); + if (encoder == null) { + return; + } + + switch (what) { + case MSG_START_RECORDING: { + try { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("start encoder"); + } + encoder.prepareEncoder(); + } catch (Exception e) { + FileLog.e(e); + encoder.handleStopRecording(0); + Looper.myLooper().quit(); + } + break; + } + case MSG_STOP_RECORDING: { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("stop encoder"); + } + encoder.handleStopRecording(inputMessage.arg1); + break; + } + case MSG_VIDEOFRAME_AVAILABLE: { + long timestamp = (((long) inputMessage.arg1) << 32) | (((long) inputMessage.arg2) & 0xffffffffL); + Integer cameraId = (Integer) inputMessage.obj; + encoder.handleVideoFrameAvailable(timestamp, cameraId); + break; + } + case MSG_AUDIOFRAME_AVAILABLE: { + encoder.handleAudioFrameAvailable((InstantCameraView.AudioBufferInfo) inputMessage.obj); + break; + } + } + } + + public void exit() { + Looper.myLooper().quit(); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/DefaultCameraAPI.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/DefaultCameraAPI.java new file mode 100644 index 000000000..47ab5bf9f --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/DefaultCameraAPI.java @@ -0,0 +1,4 @@ +package org.telegram.messenger.camera; + +public class DefaultCameraAPI { +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java b/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java index 4787d818f..40ff01b95 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/video/TextureRenderer.java @@ -604,7 +604,7 @@ public class TextureRenderer { if (entity.type == 0) { if ((entity.subType & 1) != 0) { entity.metadata = new int[3]; - entity.ptr = RLottieDrawable.create(entity.text, 512, 512, entity.metadata, false, null, false); + entity.ptr = RLottieDrawable.create(entity.text, null, 512, 512, entity.metadata, false, null, false); entity.framesPerDraw = entity.metadata[1] / videoFps; } else { if (Build.VERSION.SDK_INT >= 19) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java index fdf00c313..bc4ee8dcc 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java @@ -66,7 +66,7 @@ public class CallNotificationSoundProvider extends ContentProvider { } try { - VoIPBaseService srv = VoIPBaseService.getSharedInstance(); + VoIPService srv = VoIPService.getSharedInstance(); if (srv != null) { srv.startRingtoneAndVibration(); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java index 47ed54209..b1e9b52e8 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java @@ -1,7 +1,5 @@ package org.telegram.messenger.voip; -import org.json.JSONArray; -import org.json.JSONObject; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildVars; @@ -24,7 +22,6 @@ public class NativeInstance { private PayloadCallback payloadCallback; private AudioLevelsCallback audioLevelsCallback; - private VideoSourcesCallback videoSourcesCallback; private VideoSourcesCallback unknownParticipantsCallback; private RequestBroadcastPartCallback requestBroadcastPartCallback; private RequestBroadcastPartCallback cancelRequestBroadcastPartCallback; @@ -32,6 +29,11 @@ public class NativeInstance { private boolean isGroup; + public static class SsrcGroup { + public String semantics; + public int[] ssrcs; + } + public interface PayloadCallback { void run(int ssrc, String value); } @@ -41,7 +43,7 @@ public class NativeInstance { } public interface VideoSourcesCallback { - void run(int[] ssrcs); + void run(long taskPtr, int[] ssrcs); } public interface RequestBroadcastPartCallback { @@ -60,17 +62,16 @@ public class NativeInstance { return instance; } - public static NativeInstance makeGroup(String logPath, PayloadCallback payloadCallback, AudioLevelsCallback audioLevelsCallback, VideoSourcesCallback videoSourcesCallback, VideoSourcesCallback unknownParticipantsCallback, RequestBroadcastPartCallback requestBroadcastPartCallback, RequestBroadcastPartCallback cancelRequestBroadcastPartCallback) { + public static NativeInstance makeGroup(String logPath, long videoCapturer, boolean screencast, boolean noiseSupression, PayloadCallback payloadCallback, AudioLevelsCallback audioLevelsCallback, VideoSourcesCallback unknownParticipantsCallback, RequestBroadcastPartCallback requestBroadcastPartCallback, RequestBroadcastPartCallback cancelRequestBroadcastPartCallback) { ContextUtils.initialize(ApplicationLoader.applicationContext); NativeInstance instance = new NativeInstance(); instance.payloadCallback = payloadCallback; instance.audioLevelsCallback = audioLevelsCallback; - instance.videoSourcesCallback = videoSourcesCallback; instance.unknownParticipantsCallback = unknownParticipantsCallback; instance.requestBroadcastPartCallback = requestBroadcastPartCallback; instance.cancelRequestBroadcastPartCallback = cancelRequestBroadcastPartCallback; instance.isGroup = true; - instance.nativePtr = makeGroupNativeInstance(instance, logPath, SharedConfig.disableVoiceAudioEffects); + instance.nativePtr = makeGroupNativeInstance(instance, logPath, SharedConfig.disableVoiceAudioEffects, videoCapturer, screencast, noiseSupression); return instance; } @@ -136,36 +137,16 @@ public class NativeInstance { AndroidUtilities.runOnUIThread(() -> audioLevelsCallback.run(uids, levels, voice)); } - private void onIncomingVideoSourcesUpdated(int[] ssrcs) { - if (videoSourcesCallback == null) { - return; - } - AndroidUtilities.runOnUIThread(() -> videoSourcesCallback.run(ssrcs)); - } - - private void onParticipantDescriptionsRequired(int[] ssrcs) { + private void onParticipantDescriptionsRequired(long taskPtr, int[] ssrcs) { if (unknownParticipantsCallback == null) { return; } - AndroidUtilities.runOnUIThread(() -> unknownParticipantsCallback.run(ssrcs)); + AndroidUtilities.runOnUIThread(() -> unknownParticipantsCallback.run(taskPtr, ssrcs)); } - private void onEmitJoinPayload(String ufrag, String pwd, Instance.Fingerprint[] fingerprints, int ssrc) { + private void onEmitJoinPayload(String json, int ssrc) { try { - JSONObject json = new JSONObject(); - json.put("ufrag", ufrag); - json.put("pwd", pwd); - JSONArray array = new JSONArray(); - for (int a = 0; a < fingerprints.length; a++) { - JSONObject object = new JSONObject(); - object.put("hash", fingerprints[a].hash); - object.put("fingerprint", fingerprints[a].fingerprint); - object.put("setup", fingerprints[a].setup); - array.put(object); - } - json.put("fingerprints", array); - json.put("ssrc", ssrc); - AndroidUtilities.runOnUIThread(() -> payloadCallback.run(ssrc, json.toString())); + AndroidUtilities.runOnUIThread(() -> payloadCallback.run(ssrc, json)); } catch (Exception e) { FileLog.e(e); } @@ -179,9 +160,9 @@ public class NativeInstance { cancelRequestBroadcastPartCallback.run(timestamp, 0); } - public native void setJoinResponsePayload(String ufrag, String pwd, Instance.Fingerprint[] fingerprints, Instance.Candidate[] candidates); + public native void setJoinResponsePayload(String payload); public native void prepareForStream(); - public native void resetGroupInstance(boolean disconnect); + public native void resetGroupInstance(boolean set, boolean disconnect); private Instance.FinalState finalState; private CountDownLatch stopBarrier; @@ -207,14 +188,19 @@ public class NativeInstance { stopGroupNative(); } - private static native long makeGroupNativeInstance(NativeInstance instance, String persistentStateFilePath, boolean highQuality); + private static native long makeGroupNativeInstance(NativeInstance instance, String persistentStateFilePath, boolean highQuality, long videoCapturer, boolean screencast, boolean noiseSupression); private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio); - public static native long createVideoCapturer(VideoSink localSink, boolean front); + public static native long createVideoCapturer(VideoSink localSink, int type); public static native void setVideoStateCapturer(long videoCapturer, int videoState); public static native void switchCameraCapturer(long videoCapturer, boolean front); public static native void destroyVideoCapturer(long videoCapturer); - public native void addParticipants(int[] ssrcs, Object[] array); + public native void onMediaDescriptionAvailable(long taskPtr, int[] ssrcs); + public native void setNoiseSuppressionEnabled(boolean value); + public native void activateVideoCapturer(long videoCapturer); + public native long addIncomingVideoOutput(int quality, String endpointId, SsrcGroup[] ssrcGroups, VideoSink remoteSink); + public native void removeIncomingVideoOutput(long nativeRemoteSink); + public native void setVideoEndpointQuality(String endpointId, int quality); public native void setGlobalServerConfig(String serverConfigJson); public native void setBufferSize(int size); public native String getVersion(); @@ -231,8 +217,10 @@ public class NativeInstance { private native void stopNative(); private native void stopGroupNative(); public native void setupOutgoingVideo(VideoSink localSink, boolean front); + public native void setupOutgoingVideoCreated(long videoCapturer); public native void switchCamera(boolean front); public native void setVideoState(int videoState); public native void onSignalingDataReceive(byte[] data); public native void onStreamPartAvailable(long ts, ByteBuffer buffer, int size, long timestamp); + public native boolean hasVideoCapturer(); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/TelegramConnectionService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/TelegramConnectionService.java index e3c96618b..f5555c484 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/TelegramConnectionService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/TelegramConnectionService.java @@ -63,8 +63,8 @@ public class TelegramConnectionService extends ConnectionService { if (BuildVars.LOGS_ENABLED) { FileLog.e("onCreateIncomingConnectionFailed "/*+request*/); } - if (VoIPBaseService.getSharedInstance() != null) { - VoIPBaseService.getSharedInstance().callFailedFromConnectionService(); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().callFailedFromConnectionService(); } } @@ -73,8 +73,8 @@ public class TelegramConnectionService extends ConnectionService { if (BuildVars.LOGS_ENABLED) { FileLog.e("onCreateOutgoingConnectionFailed "/*+request*/); } - if (VoIPBaseService.getSharedInstance() != null) { - VoIPBaseService.getSharedInstance().callFailedFromConnectionService(); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().callFailedFromConnectionService(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java deleted file mode 100644 index a387e89e3..000000000 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java +++ /dev/null @@ -1,206 +0,0 @@ -package org.telegram.messenger.voip; - -import android.annotation.TargetApi; -import android.os.Build; -import android.os.Handler; -import android.os.HandlerThread; - -import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.ApplicationLoader; -import org.telegram.messenger.FileLog; -import org.webrtc.Camera1Enumerator; -import org.webrtc.Camera2Enumerator; -import org.webrtc.CameraEnumerator; -import org.webrtc.CameraVideoCapturer; -import org.webrtc.CapturerObserver; -import org.webrtc.EglBase; -import org.webrtc.Logging; -import org.webrtc.SurfaceTextureHelper; - -@TargetApi(18) -public class VideoCameraCapturer { - - private static final int CAPTURE_WIDTH = Build.VERSION.SDK_INT <= 19 ? 480 : 1280; - private static final int CAPTURE_HEIGHT = Build.VERSION.SDK_INT <= 19 ? 320 : 720; - private static final int CAPTURE_FPS = 30; - - public static EglBase eglBase; - - private CameraVideoCapturer videoCapturer; - private SurfaceTextureHelper videoCapturerSurfaceTextureHelper; - - private HandlerThread thread; - private Handler handler; - - private long nativePtr; - - private static VideoCameraCapturer instance; - private CapturerObserver nativeCapturerObserver; - - public static VideoCameraCapturer getInstance() { - return instance; - } - - public VideoCameraCapturer() { - if (Build.VERSION.SDK_INT < 18) { - return; - } - Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); - Logging.d("VideoCameraCapturer", "device model = " + Build.MANUFACTURER + Build.MODEL); - AndroidUtilities.runOnUIThread(() -> { - instance = this; - thread = new HandlerThread("CallThread"); - thread.start(); - handler = new Handler(thread.getLooper()); - - if (eglBase == null) { - eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); - } - }); - } - - private void init(long ptr, boolean useFrontCamera) { - if (Build.VERSION.SDK_INT < 18) { - return; - } - AndroidUtilities.runOnUIThread(() -> { - if (eglBase == null) { - return; - } - nativePtr = ptr; - CameraEnumerator enumerator = Camera2Enumerator.isSupported(ApplicationLoader.applicationContext) ? new Camera2Enumerator(ApplicationLoader.applicationContext) : new Camera1Enumerator(); - int index = -1; - String[] names = enumerator.getDeviceNames(); - for (int a = 0; a < names.length; a++) { - boolean isFrontFace = enumerator.isFrontFacing(names[a]); - if (isFrontFace == useFrontCamera) { - index = a; - break; - } - } - if (index == -1) { - return; - } - String cameraName = names[index]; - if (videoCapturer == null) { - videoCapturer = enumerator.createCapturer(cameraName, null); - videoCapturerSurfaceTextureHelper = SurfaceTextureHelper.create("VideoCapturerThread", eglBase.getEglBaseContext()); - handler.post(() -> { - nativeCapturerObserver = nativeGetJavaVideoCapturerObserver(nativePtr); - videoCapturer.initialize(videoCapturerSurfaceTextureHelper, ApplicationLoader.applicationContext, nativeCapturerObserver); - videoCapturer.startCapture(CAPTURE_WIDTH, CAPTURE_HEIGHT, CAPTURE_FPS); - }); - } else { - handler.post(() -> videoCapturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { - @Override - public void onCameraSwitchDone(boolean isFrontCamera) { - AndroidUtilities.runOnUIThread(() -> { - if (VoIPBaseService.getSharedInstance() != null) { - VoIPBaseService.getSharedInstance().setSwitchingCamera(false, isFrontCamera); - } - }); - } - - @Override - public void onCameraSwitchError(String errorDescription) { - - } - }, cameraName)); - } - }); - } - - private void onAspectRatioRequested(float aspectRatio) { - /*if (aspectRatio < 0.0001f) { - return; - } - handler.post(() -> { - if (nativeCapturerObserver instanceof NativeCapturerObserver) { - int w; - int h; - if (aspectRatio < 1.0f) { - h = CAPTURE_HEIGHT; - w = (int) (h / aspectRatio); - } else { - w = CAPTURE_WIDTH; - h = (int) (w * aspectRatio); - } - if (w <= 0 || h <= 0) { - return; - } - NativeCapturerObserver observer = (NativeCapturerObserver) nativeCapturerObserver; - NativeAndroidVideoTrackSource source = observer.getNativeAndroidVideoTrackSource(); - source.adaptOutputFormat(new VideoSource.AspectRatio(w, h), w * h, new VideoSource.AspectRatio(h, w), w * h, CAPTURE_FPS); - } - });*/ - } - - private void onStateChanged(long ptr, int state) { - if (Build.VERSION.SDK_INT < 18) { - return; - } - AndroidUtilities.runOnUIThread(() -> { - if (nativePtr != ptr) { - return; - } - handler.post(() -> { - if (videoCapturer == null) { - return; - } - if (state == Instance.VIDEO_STATE_ACTIVE) { - videoCapturer.startCapture(CAPTURE_WIDTH, CAPTURE_HEIGHT, CAPTURE_FPS); - } else { - try { - videoCapturer.stopCapture(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - }); - }); - } - - private void onDestroy() { - if (Build.VERSION.SDK_INT < 18) { - return; - } - AndroidUtilities.runOnUIThread(() -> { - if (eglBase != null) { - eglBase.release(); - eglBase = null; - } - if (instance == this) { - instance = null; - } - handler.post(() -> { - if (videoCapturer != null) { - try { - videoCapturer.stopCapture(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - videoCapturer.dispose(); - videoCapturer = null; - } - if (videoCapturerSurfaceTextureHelper != null) { - videoCapturerSurfaceTextureHelper.dispose(); - videoCapturerSurfaceTextureHelper = null; - } - }); - try { - thread.quitSafely(); - } catch (Exception e) { - FileLog.e(e); - } - }); - } - - private EglBase.Context getSharedEGLContext() { - if (eglBase == null) { - eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); - } - return eglBase != null ? eglBase.getEglBaseContext() : null; - } - - private static native CapturerObserver nativeGetJavaVideoCapturerObserver(long ptr); -} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCapturerDevice.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCapturerDevice.java new file mode 100644 index 000000000..93e403ffa --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCapturerDevice.java @@ -0,0 +1,321 @@ +package org.telegram.messenger.voip; + +import android.annotation.TargetApi; +import android.content.Context; +import android.content.Intent; +import android.graphics.Point; +import android.media.projection.MediaProjection; +import android.os.Build; +import android.os.Handler; +import android.os.HandlerThread; +import android.view.Display; +import android.view.WindowManager; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.FileLog; +import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera2Enumerator; +import org.webrtc.CameraEnumerator; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.CapturerObserver; +import org.webrtc.EglBase; +import org.webrtc.Logging; +import org.webrtc.ScreenCapturerAndroid; +import org.webrtc.SurfaceTextureHelper; +import org.webrtc.VideoCapturer; + +@TargetApi(18) +public class VideoCapturerDevice { + + private static final int CAPTURE_WIDTH = Build.VERSION.SDK_INT <= 19 ? 480 : 1280; + private static final int CAPTURE_HEIGHT = Build.VERSION.SDK_INT <= 19 ? 320 : 720; + private static final int CAPTURE_FPS = 30; + + public static EglBase eglBase; + + public static Intent mediaProjectionPermissionResultData; + + private VideoCapturer videoCapturer; + private SurfaceTextureHelper videoCapturerSurfaceTextureHelper; + + private HandlerThread thread; + private Handler handler; + private int currentWidth; + private int currentHeight; + + private long nativePtr; + + private static VideoCapturerDevice[] instance = new VideoCapturerDevice[2]; + private CapturerObserver nativeCapturerObserver; + + public VideoCapturerDevice(boolean screencast) { + if (Build.VERSION.SDK_INT < 18) { + return; + } + Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); + Logging.d("VideoCapturerDevice", "device model = " + Build.MANUFACTURER + Build.MODEL); + AndroidUtilities.runOnUIThread(() -> { + if (eglBase == null) { + eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); + } + instance[screencast ? 1 : 0] = this; + thread = new HandlerThread("CallThread"); + thread.start(); + handler = new Handler(thread.getLooper()); + }); + } + + public static void checkScreenCapturerSize() { + if (instance[1] == null) { + return; + } + Point size = getScreenCaptureSize(); + if (instance[1].currentWidth != size.x || instance[1].currentHeight != size.y) { + instance[1].currentWidth = size.x; + instance[1].currentHeight = size.y; + VideoCapturerDevice device = instance[1]; + instance[1].handler.post(() -> { + if (device.videoCapturer != null) { + device.videoCapturer.changeCaptureFormat(size.x, size.y, CAPTURE_FPS); + } + }); + } + } + + private static Point getScreenCaptureSize() { + WindowManager wm = (WindowManager) ApplicationLoader.applicationContext.getSystemService(Context.WINDOW_SERVICE); + Display display = wm.getDefaultDisplay(); + Point size = new Point(); + display.getRealSize(size); + + float aspect; + if (size.x > size.y) { + aspect = size.y / (float) size.x; + } else { + aspect = size.x / (float) size.y; + } + int dx = -1; + int dy = -1; + for (int a = 1; a <= 100; a++) { + float val = a * aspect; + if (val == (int) val) { + if (size.x > size.y) { + dx = a; + dy = (int) (a * aspect); + } else { + dy = a; + dx = (int) (a * aspect); + } + break; + } + } + if (dx != -1 && aspect != 1) { + while (size.x > 1000 || size.y > 1000 || size.x % 4 != 0 || size.y % 4 != 0) { + size.x -= dx; + size.y -= dy; + if (size.x < 800 && size.y < 800) { + dx = -1; + break; + } + } + } + if (dx == -1 || aspect == 1) { + float scale = Math.max(size.x / 970.0f, size.y / 970.0f); + size.x = (int) Math.ceil((size.x / scale) / 4.0f) * 4; + size.y = (int) Math.ceil((size.y / scale) / 4.0f) * 4; + } + return size; + } + + private void init(long ptr, String deviceName) { + if (Build.VERSION.SDK_INT < 18) { + return; + } + AndroidUtilities.runOnUIThread(() -> { + if (eglBase == null) { + return; + } + nativePtr = ptr; + if ("screen".equals(deviceName)) { + if (Build.VERSION.SDK_INT < 21) { + return; + } + if (videoCapturer == null) { + videoCapturer = new ScreenCapturerAndroid(mediaProjectionPermissionResultData, new MediaProjection.Callback() { + @Override + public void onStop() { + AndroidUtilities.runOnUIThread(() -> { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().stopScreenCapture(); + } + }); + } + }); + + + Point size = getScreenCaptureSize(); + currentWidth = size.x; + currentHeight = size.y; + videoCapturerSurfaceTextureHelper = SurfaceTextureHelper.create("ScreenCapturerThread", eglBase.getEglBaseContext()); + handler.post(() -> { + if (videoCapturerSurfaceTextureHelper == null || nativePtr == 0) { + return; + } + nativeCapturerObserver = nativeGetJavaVideoCapturerObserver(nativePtr); + videoCapturer.initialize(videoCapturerSurfaceTextureHelper, ApplicationLoader.applicationContext, nativeCapturerObserver); + videoCapturer.startCapture(size.x, size.y, CAPTURE_FPS); + }); + } + } else { + CameraEnumerator enumerator = Camera2Enumerator.isSupported(ApplicationLoader.applicationContext) ? new Camera2Enumerator(ApplicationLoader.applicationContext) : new Camera1Enumerator(); + int index = -1; + String[] names = enumerator.getDeviceNames(); + for (int a = 0; a < names.length; a++) { + boolean isFrontFace = enumerator.isFrontFacing(names[a]); + if (isFrontFace == "front".equals(deviceName)) { + index = a; + break; + } + } + if (index == -1) { + return; + } + String cameraName = names[index]; + if (videoCapturer == null) { + videoCapturer = enumerator.createCapturer(cameraName, null); + videoCapturerSurfaceTextureHelper = SurfaceTextureHelper.create("VideoCapturerThread", eglBase.getEglBaseContext()); + handler.post(() -> { + if (videoCapturerSurfaceTextureHelper == null) { + return; + } + nativeCapturerObserver = nativeGetJavaVideoCapturerObserver(nativePtr); + videoCapturer.initialize(videoCapturerSurfaceTextureHelper, ApplicationLoader.applicationContext, nativeCapturerObserver); + videoCapturer.startCapture(CAPTURE_WIDTH, CAPTURE_HEIGHT, CAPTURE_FPS); + }); + } else { + handler.post(() -> ((CameraVideoCapturer) videoCapturer).switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean isFrontCamera) { + AndroidUtilities.runOnUIThread(() -> { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setSwitchingCamera(false, isFrontCamera); + } + }); + } + + @Override + public void onCameraSwitchError(String errorDescription) { + + } + }, cameraName)); + } + } + }); + } + + private void onAspectRatioRequested(float aspectRatio) { + /*if (aspectRatio < 0.0001f) { + return; + } + handler.post(() -> { + if (nativeCapturerObserver instanceof NativeCapturerObserver) { + int w; + int h; + if (aspectRatio < 1.0f) { + h = CAPTURE_HEIGHT; + w = (int) (h / aspectRatio); + } else { + w = CAPTURE_WIDTH; + h = (int) (w * aspectRatio); + } + if (w <= 0 || h <= 0) { + return; + } + NativeCapturerObserver observer = (NativeCapturerObserver) nativeCapturerObserver; + NativeAndroidVideoTrackSource source = observer.getNativeAndroidVideoTrackSource(); + source.adaptOutputFormat(new VideoSource.AspectRatio(w, h), w * h, new VideoSource.AspectRatio(h, w), w * h, CAPTURE_FPS); + } + });*/ + } + + private void onStateChanged(long ptr, int state) { + if (Build.VERSION.SDK_INT < 18) { + return; + } + AndroidUtilities.runOnUIThread(() -> { + if (nativePtr != ptr) { + return; + } + handler.post(() -> { + if (videoCapturer == null) { + return; + } + if (state == Instance.VIDEO_STATE_ACTIVE) { + videoCapturer.startCapture(CAPTURE_WIDTH, CAPTURE_HEIGHT, CAPTURE_FPS); + } else { + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + }); + }); + } + + private void onDestroy() { + if (Build.VERSION.SDK_INT < 18) { + return; + } + nativePtr = 0; + AndroidUtilities.runOnUIThread(() -> { +// if (eglBase != null) { +// eglBase.release(); +// eglBase = null; +// } + for (int a = 0; a < instance.length; a++) { + if (instance[a] == this) { + instance[a] = null; + break; + } + } + handler.post(() -> { + if (videoCapturer != null) { + try { + videoCapturer.stopCapture(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + videoCapturer.dispose(); + videoCapturer = null; + } + if (videoCapturerSurfaceTextureHelper != null) { + videoCapturerSurfaceTextureHelper.dispose(); + videoCapturerSurfaceTextureHelper = null; + } + }); + try { + thread.quitSafely(); + } catch (Exception e) { + FileLog.e(e); + } + }); + } + + private EglBase.Context getSharedEGLContext() { + if (eglBase == null) { + eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); + } + return eglBase != null ? eglBase.getEglBaseContext() : null; + } + + public static EglBase getEglBase() { + if (eglBase == null) { + eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); + } + return eglBase; + } + + private static native CapturerObserver nativeGetJavaVideoCapturerObserver(long ptr); +} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPActionsReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPActionsReceiver.java index 36fae5271..cb5fcf264 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPActionsReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPActionsReceiver.java @@ -11,8 +11,8 @@ import android.content.Intent; public class VoIPActionsReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { - if (VoIPBaseService.getSharedInstance() != null) { - VoIPBaseService.getSharedInstance().handleNotificationAction(intent); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().handleNotificationAction(intent); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java deleted file mode 100644 index fc8f5edde..000000000 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java +++ /dev/null @@ -1,1893 +0,0 @@ -package org.telegram.messenger.voip; - -import android.Manifest; -import android.annotation.SuppressLint; -import android.annotation.TargetApi; -import android.app.Activity; -import android.app.Notification; -import android.app.NotificationChannel; -import android.app.NotificationManager; -import android.app.PendingIntent; -import android.app.Service; -import android.bluetooth.BluetoothAdapter; -import android.bluetooth.BluetoothDevice; -import android.bluetooth.BluetoothHeadset; -import android.bluetooth.BluetoothProfile; -import android.content.BroadcastReceiver; -import android.content.ComponentName; -import android.content.Context; -import android.content.Intent; -import android.content.IntentFilter; -import android.content.SharedPreferences; -import android.content.pm.PackageManager; -import android.graphics.Bitmap; -import android.graphics.BitmapFactory; -import android.graphics.Canvas; -import android.graphics.Paint; -import android.graphics.Path; -import android.graphics.PorterDuff; -import android.graphics.PorterDuffXfermode; -import android.graphics.drawable.BitmapDrawable; -import android.graphics.drawable.Icon; -import android.hardware.Sensor; -import android.hardware.SensorEvent; -import android.hardware.SensorEventListener; -import android.hardware.SensorManager; -import android.media.AudioAttributes; -import android.media.AudioFormat; -import android.media.AudioManager; -import android.media.AudioTrack; -import android.media.MediaPlayer; -import android.media.MediaRouter; -import android.media.RingtoneManager; -import android.media.SoundPool; -import android.net.ConnectivityManager; -import android.net.NetworkInfo; -import android.net.Uri; -import android.os.Build; -import android.os.Bundle; -import android.os.PowerManager; -import android.os.SystemClock; -import android.os.Vibrator; -import android.telecom.CallAudioState; -import android.telecom.Connection; -import android.telecom.DisconnectCause; -import android.telecom.PhoneAccount; -import android.telecom.PhoneAccountHandle; -import android.telecom.TelecomManager; -import android.telephony.TelephonyManager; -import android.text.SpannableString; -import android.text.TextUtils; -import android.text.style.ForegroundColorSpan; -import android.view.View; -import android.view.WindowManager; -import android.widget.RemoteViews; - -import org.telegram.messenger.AccountInstance; -import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.ApplicationLoader; -import org.telegram.messenger.BuildConfig; -import org.telegram.messenger.BuildVars; -import org.telegram.messenger.ChatObject; -import org.telegram.messenger.ContactsController; -import org.telegram.messenger.FileLoader; -import org.telegram.messenger.FileLog; -import org.telegram.messenger.ImageLoader; -import org.telegram.messenger.LocaleController; -import org.telegram.messenger.MessagesController; -import org.telegram.messenger.NotificationCenter; -import org.telegram.messenger.NotificationsController; -import org.telegram.messenger.R; -import org.telegram.messenger.SharedConfig; -import org.telegram.messenger.StatsController; -import org.telegram.messenger.UserConfig; -import org.telegram.messenger.Utilities; -import org.telegram.tgnet.ConnectionsManager; -import org.telegram.tgnet.TLObject; -import org.telegram.tgnet.TLRPC; -import org.telegram.ui.ActionBar.BottomSheet; -import org.telegram.ui.ActionBar.Theme; -import org.telegram.ui.Components.AvatarDrawable; -import org.telegram.ui.Components.voip.VoIPHelper; -import org.telegram.ui.LaunchActivity; -import org.telegram.ui.VoIPPermissionActivity; -import org.webrtc.voiceengine.WebRtcAudioTrack; - -import java.lang.reflect.Field; -import java.lang.reflect.Method; -import java.util.ArrayList; - -/** - * Created by grishka on 21.07.17. - */ - -@SuppressLint("NewApi") -public abstract class VoIPBaseService extends Service implements SensorEventListener, AudioManager.OnAudioFocusChangeListener, VoIPController.ConnectionStateListener, NotificationCenter.NotificationCenterDelegate { - - protected int currentAccount = -1; - public static final int STATE_WAIT_INIT = Instance.STATE_WAIT_INIT; - public static final int STATE_WAIT_INIT_ACK = Instance.STATE_WAIT_INIT_ACK; - public static final int STATE_ESTABLISHED = Instance.STATE_ESTABLISHED; - public static final int STATE_FAILED = Instance.STATE_FAILED; - public static final int STATE_RECONNECTING = Instance.STATE_RECONNECTING; - public static final int STATE_CREATING = 6; - public static final int STATE_ENDED = 11; - public static final String ACTION_HEADSET_PLUG = "android.intent.action.HEADSET_PLUG"; - - protected static final int ID_ONGOING_CALL_NOTIFICATION = 201; - protected static final int ID_INCOMING_CALL_NOTIFICATION = 202; - - public static final int DISCARD_REASON_HANGUP = 1; - public static final int DISCARD_REASON_DISCONNECT = 2; - public static final int DISCARD_REASON_MISSED = 3; - public static final int DISCARD_REASON_LINE_BUSY = 4; - - public static final int AUDIO_ROUTE_EARPIECE = 0; - public static final int AUDIO_ROUTE_SPEAKER = 1; - public static final int AUDIO_ROUTE_BLUETOOTH = 2; - - protected static final boolean USE_CONNECTION_SERVICE = isDeviceCompatibleWithConnectionServiceAPI(); - - protected static final int PROXIMITY_SCREEN_OFF_WAKE_LOCK = 32; - protected static VoIPBaseService sharedInstance; - protected static Runnable setModeRunnable; - protected static final Object sync = new Object(); - protected NetworkInfo lastNetInfo; - protected int currentState = 0; - protected Notification ongoingCallNotification; - protected NativeInstance tgVoip; - protected boolean wasConnected; - - protected int currentStreamRequestId; - - protected TLRPC.Chat chat; - - protected boolean isVideoAvailable; - protected boolean notificationsDisabled; - protected boolean switchingCamera; - protected boolean isFrontFaceCamera = true; - protected String lastError; - protected PowerManager.WakeLock proximityWakelock; - protected PowerManager.WakeLock cpuWakelock; - protected boolean isProximityNear; - protected boolean isHeadsetPlugged; - protected int previousAudioOutput = -1; - protected ArrayList stateListeners = new ArrayList<>(); - protected MediaPlayer ringtonePlayer; - protected Vibrator vibrator; - protected SoundPool soundPool; - protected int spRingbackID; - protected int spFailedID; - protected int spEndId; - protected int spVoiceChatEndId; - protected int spVoiceChatStartId; - protected int spVoiceChatConnecting; - protected int spBusyId; - protected int spConnectingId; - protected int spPlayId; - protected int spStartRecordId; - protected int spAllowTalkId; - protected boolean needPlayEndSound; - protected boolean hasAudioFocus; - protected boolean micMute; - protected boolean unmutedByHold; - protected BluetoothAdapter btAdapter; - protected Instance.TrafficStats prevTrafficStats; - protected boolean isBtHeadsetConnected; - protected boolean screenOn; - - protected Runnable updateNotificationRunnable; - - protected Runnable onDestroyRunnable; - - protected Runnable switchingStreamTimeoutRunnable; - - protected boolean playedConnectedSound; - protected boolean switchingStream; - - protected int videoState = Instance.VIDEO_STATE_INACTIVE; - - public TLRPC.PhoneCall privateCall; - public ChatObject.Call groupCall; - - public boolean currentGroupModeStreaming = false; - - protected int mySource; - protected String myJson; - protected boolean createGroupCall; - protected int scheduleDate; - protected TLRPC.InputPeer groupCallPeer; - public boolean hasFewPeers; - protected String joinHash; - - protected long callStartTime; - protected boolean playingSound; - protected boolean isOutgoing; - public boolean videoCall; - protected long videoCapturer; - protected Runnable timeoutRunnable; - - protected int currentStreamType; - - private Boolean mHasEarpiece; - private boolean wasEstablished; - protected int signalBarCount; - protected int currentAudioState = Instance.AUDIO_STATE_ACTIVE; - protected int currentVideoState = Instance.VIDEO_STATE_INACTIVE; - protected boolean audioConfigured; - protected int audioRouteToSet = AUDIO_ROUTE_BLUETOOTH; - protected boolean speakerphoneStateToSet; - protected CallConnection systemCallConnection; - protected int callDiscardReason; - protected boolean bluetoothScoActive; - protected boolean needSwitchToBluetoothAfterScoActivates; - protected boolean didDeleteConnectionServiceContact; - protected Runnable connectingSoundRunnable; - - private String currentBluetoothDeviceName; - - public final SharedUIParams sharedUIParams = new SharedUIParams(); - - protected Runnable afterSoundRunnable = new Runnable() { - @Override - public void run() { - - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - am.abandonAudioFocus(VoIPBaseService.this); - am.unregisterMediaButtonEventReceiver(new ComponentName(VoIPBaseService.this, VoIPMediaButtonReceiver.class)); - if (!USE_CONNECTION_SERVICE && sharedInstance == null) { - if (isBtHeadsetConnected) { - am.stopBluetoothSco(); - am.setBluetoothScoOn(false); - bluetoothScoActive = false; - } - am.setSpeakerphoneOn(false); - } - - Utilities.globalQueue.postRunnable(() -> soundPool.release()); - Utilities.globalQueue.postRunnable(setModeRunnable = () -> { - synchronized (sync) { - if (setModeRunnable == null) { - return; - } - setModeRunnable = null; - } - try { - am.setMode(AudioManager.MODE_NORMAL); - } catch (SecurityException x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error setting audio more to normal", x); - } - } - }); - } - }; - - boolean fetchingBluetoothDeviceName; - private BluetoothProfile.ServiceListener serviceListener = new BluetoothProfile.ServiceListener() { - @Override - public void onServiceDisconnected(int profile) { - - } - - @Override - public void onServiceConnected(int profile, BluetoothProfile proxy) { - for (BluetoothDevice device : proxy.getConnectedDevices()) { - if (proxy.getConnectionState(device) != BluetoothProfile.STATE_CONNECTED) { - continue; - } - currentBluetoothDeviceName = device.getName(); - break; - } - BluetoothAdapter.getDefaultAdapter().closeProfileProxy(profile, proxy); - fetchingBluetoothDeviceName = false; - } - }; - - protected BroadcastReceiver receiver = new BroadcastReceiver() { - - @Override - public void onReceive(Context context, Intent intent) { - if (ACTION_HEADSET_PLUG.equals(intent.getAction())) { - isHeadsetPlugged = intent.getIntExtra("state", 0) == 1; - if (isHeadsetPlugged && proximityWakelock != null && proximityWakelock.isHeld()) { - proximityWakelock.release(); - } - if (isHeadsetPlugged) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (am.isSpeakerphoneOn()) { - previousAudioOutput = 0; - } else if (am.isBluetoothScoOn()) { - previousAudioOutput = 2; - } else { - previousAudioOutput = 1; - } - setAudioOutput(1); - } else { - if (previousAudioOutput >= 0) { - setAudioOutput(previousAudioOutput); - previousAudioOutput = -1; - } - } - isProximityNear = false; - updateOutputGainControlState(); - } else if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) { - updateNetworkType(); - } else if (BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED.equals(intent.getAction())) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("bt headset state = " + intent.getIntExtra(BluetoothProfile.EXTRA_STATE, 0)); - } - updateBluetoothHeadsetState(intent.getIntExtra(BluetoothProfile.EXTRA_STATE, BluetoothProfile.STATE_DISCONNECTED) == BluetoothProfile.STATE_CONNECTED); - } else if (AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED.equals(intent.getAction())) { - int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, AudioManager.SCO_AUDIO_STATE_DISCONNECTED); - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Bluetooth SCO state updated: " + state); - } - if (state == AudioManager.SCO_AUDIO_STATE_DISCONNECTED && isBtHeadsetConnected) { - if (!btAdapter.isEnabled() || btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET) != BluetoothProfile.STATE_CONNECTED) { - updateBluetoothHeadsetState(false); - return; - } - } - bluetoothScoActive = state == AudioManager.SCO_AUDIO_STATE_CONNECTED; - if (bluetoothScoActive) { - fetchBluetoothDeviceName(); - if (needSwitchToBluetoothAfterScoActivates) { - needSwitchToBluetoothAfterScoActivates = false; - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - am.setSpeakerphoneOn(false); - am.setBluetoothScoOn(true); - } - } - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } else if (TelephonyManager.ACTION_PHONE_STATE_CHANGED.equals(intent.getAction())) { - String state = intent.getStringExtra(TelephonyManager.EXTRA_STATE); - if (TelephonyManager.EXTRA_STATE_OFFHOOK.equals(state)) { - hangUp(); - } - } else if (Intent.ACTION_SCREEN_ON.equals(intent.getAction())) { - screenOn = true; - for (int i = 0; i< stateListeners.size(); i++) { - stateListeners.get(i).onScreenOnChange(screenOn); - } - } else if (Intent.ACTION_SCREEN_OFF.equals(intent.getAction())) { - screenOn = false; - for (int i = 0; i< stateListeners.size(); i++) { - stateListeners.get(i).onScreenOnChange(screenOn); - } - } - } - }; - - public boolean hasEarpiece() { - if (USE_CONNECTION_SERVICE) { - if (systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { - int routeMask = systemCallConnection.getCallAudioState().getSupportedRouteMask(); - return (routeMask & (CallAudioState.ROUTE_EARPIECE | CallAudioState.ROUTE_WIRED_HEADSET)) != 0; - } - } - if (((TelephonyManager) getSystemService(TELEPHONY_SERVICE)).getPhoneType() != TelephonyManager.PHONE_TYPE_NONE) { - return true; - } - if (mHasEarpiece != null) { - return mHasEarpiece; - } - - // not calculated yet, do it now - try { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - Method method = AudioManager.class.getMethod("getDevicesForStream", Integer.TYPE); - Field field = AudioManager.class.getField("DEVICE_OUT_EARPIECE"); - int earpieceFlag = field.getInt(null); - int bitmaskResult = (int) method.invoke(am, AudioManager.STREAM_VOICE_CALL); - - // check if masked by the earpiece flag - if ((bitmaskResult & earpieceFlag) == earpieceFlag) { - mHasEarpiece = Boolean.TRUE; - } else { - mHasEarpiece = Boolean.FALSE; - } - } catch (Throwable error) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error while checking earpiece! ", error); - } - mHasEarpiece = Boolean.TRUE; - } - - return mHasEarpiece; - } - - protected int getStatsNetworkType() { - int netType = StatsController.TYPE_WIFI; - if (lastNetInfo != null) { - if (lastNetInfo.getType() == ConnectivityManager.TYPE_MOBILE) { - netType = lastNetInfo.isRoaming() ? StatsController.TYPE_ROAMING : StatsController.TYPE_MOBILE; - } - } - return netType; - } - - protected void setSwitchingCamera(boolean switching, boolean isFrontFace) { - switchingCamera = switching; - if (!switching) { - isFrontFaceCamera = isFrontFace; - for (int a = 0; a < stateListeners.size(); a++) { - StateListener l = stateListeners.get(a); - l.onCameraSwitch(isFrontFaceCamera); - } - } - } - - public void registerStateListener(StateListener l) { - if (stateListeners.contains(l)) { - return; - } - stateListeners.add(l); - if (currentState != 0) { - l.onStateChanged(currentState); - } - if (signalBarCount != 0) { - l.onSignalBarsCountChanged(signalBarCount); - } - } - - public void unregisterStateListener(StateListener l) { - stateListeners.remove(l); - } - - public void editCallMember(TLObject object, boolean mute, int volume, Boolean raiseHand) { - if (object == null || groupCall == null) { - return; - } - TLRPC.TL_phone_editGroupCallParticipant req = new TLRPC.TL_phone_editGroupCallParticipant(); - req.call = groupCall.getInputGroupCall(); - if (object instanceof TLRPC.User) { - TLRPC.User user = (TLRPC.User) object; - req.participant = MessagesController.getInputPeer(user); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("edit group call part id = " + req.participant.user_id + " access_hash = " + req.participant.user_id); - } - } else if (object instanceof TLRPC.Chat) { - TLRPC.Chat chat = (TLRPC.Chat) object; - req.participant = MessagesController.getInputPeer(chat); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("edit group call part id = " + (req.participant.chat_id != 0 ? req.participant.chat_id : req.participant.channel_id) + " access_hash = " + req.participant.access_hash); - } - } - req.muted = mute; - if (volume >= 0) { - req.volume = volume; - req.flags |= 2; - } - if (raiseHand != null) { - req.raise_hand = raiseHand; - req.flags |= 4; - } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("edit group call flags = " + req.flags); - } - int account = currentAccount; - AccountInstance.getInstance(account).getConnectionsManager().sendRequest(req, (response, error) -> { - if (response != null) { - AccountInstance.getInstance(account).getMessagesController().processUpdates((TLRPC.Updates) response, false); - } - }); - } - - public boolean isMicMute() { - return micMute; - } - - public void toggleSpeakerphoneOrShowRouteSheet(Context context, boolean fromOverlayWindow) { - if (isBluetoothHeadsetConnected() && hasEarpiece()) { - BottomSheet.Builder builder = new BottomSheet.Builder(context) - .setTitle(LocaleController.getString("VoipOutputDevices", R.string.VoipOutputDevices), true) - .setItems(new CharSequence[]{ - LocaleController.getString("VoipAudioRoutingSpeaker", R.string.VoipAudioRoutingSpeaker), - isHeadsetPlugged ? LocaleController.getString("VoipAudioRoutingHeadset", R.string.VoipAudioRoutingHeadset) : LocaleController.getString("VoipAudioRoutingEarpiece", R.string.VoipAudioRoutingEarpiece), - currentBluetoothDeviceName != null ? currentBluetoothDeviceName : LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth)}, - new int[]{R.drawable.calls_menu_speaker, - isHeadsetPlugged ? R.drawable.calls_menu_headset : R.drawable.calls_menu_phone, - R.drawable.calls_menu_bluetooth}, (dialog, which) -> { - if (getSharedInstance() == null) { - return; - } - setAudioOutput(which); - }); - - BottomSheet bottomSheet = builder.create(); - if (fromOverlayWindow) { - if (Build.VERSION.SDK_INT >= 26) { - bottomSheet.getWindow().setType(WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY); - } else { - bottomSheet.getWindow().setType(WindowManager.LayoutParams.TYPE_SYSTEM_ALERT); - } - } - builder.show(); - return; - } - if (USE_CONNECTION_SERVICE && systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { - if (hasEarpiece()) { - systemCallConnection.setAudioRoute(systemCallConnection.getCallAudioState().getRoute() == CallAudioState.ROUTE_SPEAKER ? CallAudioState.ROUTE_WIRED_OR_EARPIECE : CallAudioState.ROUTE_SPEAKER); - } else { - systemCallConnection.setAudioRoute(systemCallConnection.getCallAudioState().getRoute() == CallAudioState.ROUTE_BLUETOOTH ? CallAudioState.ROUTE_WIRED_OR_EARPIECE : CallAudioState.ROUTE_BLUETOOTH); - } - } else if (audioConfigured && !USE_CONNECTION_SERVICE) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (hasEarpiece()) { - am.setSpeakerphoneOn(!am.isSpeakerphoneOn()); - } else { - am.setBluetoothScoOn(!am.isBluetoothScoOn()); - } - updateOutputGainControlState(); - } else { - speakerphoneStateToSet = !speakerphoneStateToSet; - } - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } - - protected void setAudioOutput(int which) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (USE_CONNECTION_SERVICE && systemCallConnection != null) { - switch (which) { - case 2: - systemCallConnection.setAudioRoute(CallAudioState.ROUTE_BLUETOOTH); - break; - case 1: - systemCallConnection.setAudioRoute(CallAudioState.ROUTE_WIRED_OR_EARPIECE); - break; - case 0: - systemCallConnection.setAudioRoute(CallAudioState.ROUTE_SPEAKER); - break; - } - } else if (audioConfigured && !USE_CONNECTION_SERVICE) { - switch (which) { - case 2: - if (!bluetoothScoActive) { - needSwitchToBluetoothAfterScoActivates = true; - try { - am.startBluetoothSco(); - } catch (Throwable ignore) { - - } - } else { - am.setBluetoothScoOn(true); - am.setSpeakerphoneOn(false); - } - break; - case 1: - if (bluetoothScoActive) { - am.stopBluetoothSco(); - bluetoothScoActive = false; - } - am.setSpeakerphoneOn(false); - am.setBluetoothScoOn(false); - break; - case 0: - if (bluetoothScoActive) { - am.stopBluetoothSco(); - bluetoothScoActive = false; - } - am.setBluetoothScoOn(false); - am.setSpeakerphoneOn(true); - break; - } - updateOutputGainControlState(); - } else { - switch (which) { - case 2: - audioRouteToSet = AUDIO_ROUTE_BLUETOOTH; - speakerphoneStateToSet = false; - break; - case 1: - audioRouteToSet = AUDIO_ROUTE_EARPIECE; - speakerphoneStateToSet = false; - break; - case 0: - audioRouteToSet = AUDIO_ROUTE_SPEAKER; - speakerphoneStateToSet = true; - break; - } - } - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } - - public boolean isSpeakerphoneOn() { - if (USE_CONNECTION_SERVICE && systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { - int route = systemCallConnection.getCallAudioState().getRoute(); - return hasEarpiece() ? route == CallAudioState.ROUTE_SPEAKER : route == CallAudioState.ROUTE_BLUETOOTH; - } else if (audioConfigured && !USE_CONNECTION_SERVICE) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - return hasEarpiece() ? am.isSpeakerphoneOn() : am.isBluetoothScoOn(); - } - return speakerphoneStateToSet; - } - - public int getCurrentAudioRoute() { - if (USE_CONNECTION_SERVICE) { - if (systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { - switch (systemCallConnection.getCallAudioState().getRoute()) { - case CallAudioState.ROUTE_BLUETOOTH: - return AUDIO_ROUTE_BLUETOOTH; - case CallAudioState.ROUTE_EARPIECE: - case CallAudioState.ROUTE_WIRED_HEADSET: - return AUDIO_ROUTE_EARPIECE; - case CallAudioState.ROUTE_SPEAKER: - return AUDIO_ROUTE_SPEAKER; - } - } - return audioRouteToSet; - } - if (audioConfigured) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (am.isBluetoothScoOn()) { - return AUDIO_ROUTE_BLUETOOTH; - } else if (am.isSpeakerphoneOn()) { - return AUDIO_ROUTE_SPEAKER; - } else { - return AUDIO_ROUTE_EARPIECE; - } - } - return audioRouteToSet; - } - - public String getDebugString() { - return tgVoip != null ? tgVoip.getDebugInfo() : ""; - } - - public long getCallDuration() { - if (callStartTime == 0) { - return 0; - } - return SystemClock.elapsedRealtime() - callStartTime; - } - - public static VoIPBaseService getSharedInstance() { - return sharedInstance; - } - - public void stopRinging() { - if (ringtonePlayer != null) { - ringtonePlayer.stop(); - ringtonePlayer.release(); - ringtonePlayer = null; - } - if (vibrator != null) { - vibrator.cancel(); - vibrator = null; - } - } - - protected void showNotification(String name, Bitmap photo) { - Intent intent = new Intent(this, LaunchActivity.class).setAction(groupCall != null ? "voip_chat" : "voip"); - if (groupCall != null) { - intent.putExtra("currentAccount", currentAccount); - } - Notification.Builder builder = new Notification.Builder(this) - .setContentTitle(groupCall != null ? LocaleController.getString("VoipVoiceChat", R.string.VoipVoiceChat) : LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) - .setContentText(name) - .setContentIntent(PendingIntent.getActivity(this, 50, intent, 0)); - if (groupCall != null) { - builder.setSmallIcon(isMicMute() ? R.drawable.voicechat_muted : R.drawable.voicechat_active); - } else { - builder.setSmallIcon(R.drawable.notification); - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { - Intent endIntent = new Intent(this, VoIPActionsReceiver.class); - endIntent.setAction(getPackageName() + ".END_CALL"); - builder.addAction(R.drawable.ic_call_end_white_24dp, groupCall != null ? LocaleController.getString("VoipGroupLeaveAlertTitle", R.string.VoipGroupLeaveAlertTitle) : LocaleController.getString("VoipEndCall", R.string.VoipEndCall), PendingIntent.getBroadcast(this, 0, endIntent, PendingIntent.FLAG_UPDATE_CURRENT)); - builder.setPriority(Notification.PRIORITY_MAX); - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { - builder.setShowWhen(false); - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - builder.setColor(0xff282e31); - builder.setColorized(true); - } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - builder.setColor(0xff2ca5e0); - } - if (Build.VERSION.SDK_INT >= 26) { - NotificationsController.checkOtherNotificationsChannel(); - builder.setChannelId(NotificationsController.OTHER_NOTIFICATIONS_CHANNEL); - } - if (photo != null) { - builder.setLargeIcon(photo); - } - ongoingCallNotification = builder.getNotification(); - startForeground(ID_ONGOING_CALL_NOTIFICATION, ongoingCallNotification); - } - - protected void startRingtoneAndVibration(int chatID) { - SharedPreferences prefs = MessagesController.getNotificationsSettings(currentAccount); - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - boolean needRing = am.getRingerMode() != AudioManager.RINGER_MODE_SILENT; - if (needRing) { - ringtonePlayer = new MediaPlayer(); - ringtonePlayer.setOnPreparedListener(mediaPlayer -> { - try { - ringtonePlayer.start(); - } catch (Throwable e) { - FileLog.e(e); - } - }); - ringtonePlayer.setLooping(true); - if (isHeadsetPlugged) { - ringtonePlayer.setAudioStreamType(AudioManager.STREAM_VOICE_CALL); - } else { - ringtonePlayer.setAudioStreamType(AudioManager.STREAM_RING); - if (!USE_CONNECTION_SERVICE) { - am.requestAudioFocus(this, AudioManager.STREAM_RING, AudioManager.AUDIOFOCUS_GAIN); - } - } - try { - String notificationUri; - if (prefs.getBoolean("custom_" + chatID, false)) { - notificationUri = prefs.getString("ringtone_path_" + chatID, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE).toString()); - } else { - notificationUri = prefs.getString("CallsRingtonePath", RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE).toString()); - } - ringtonePlayer.setDataSource(this, Uri.parse(notificationUri)); - ringtonePlayer.prepareAsync(); - } catch (Exception e) { - FileLog.e(e); - if (ringtonePlayer != null) { - ringtonePlayer.release(); - ringtonePlayer = null; - } - } - int vibrate; - if (prefs.getBoolean("custom_" + chatID, false)) { - vibrate = prefs.getInt("calls_vibrate_" + chatID, 0); - } else { - vibrate = prefs.getInt("vibrate_calls", 0); - } - if ((vibrate != 2 && vibrate != 4 && (am.getRingerMode() == AudioManager.RINGER_MODE_VIBRATE || am.getRingerMode() == AudioManager.RINGER_MODE_NORMAL)) || (vibrate == 4 && am.getRingerMode() == AudioManager.RINGER_MODE_VIBRATE)) { - vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE); - long duration = 700; - if (vibrate == 1) { - duration /= 2; - } else if (vibrate == 3) { - duration *= 2; - } - vibrator.vibrate(new long[]{0, duration, 500}, 0); - } - } - } - - @Override - public void onDestroy() { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("=============== VoIPService STOPPING ==============="); - } - stopForeground(true); - stopRinging(); - if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) { - MessagesController.getInstance(currentAccount).ignoreSetOnline = false; - } - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.appDidLogout); - SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE); - Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY); - if (proximity != null) { - sm.unregisterListener(this); - } - if (proximityWakelock != null && proximityWakelock.isHeld()) { - proximityWakelock.release(); - } - if (updateNotificationRunnable != null) { - Utilities.globalQueue.cancelRunnable(updateNotificationRunnable); - updateNotificationRunnable = null; - } - if (switchingStreamTimeoutRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(switchingStreamTimeoutRunnable); - switchingStreamTimeoutRunnable = null; - } - unregisterReceiver(receiver); - if (timeoutRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); - timeoutRunnable = null; - } - super.onDestroy(); - sharedInstance = null; - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didEndCall)); - if (tgVoip != null) { - StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5); - onTgVoipPreStop(); - if (tgVoip.isGroup()) { - NativeInstance instance = tgVoip; - Utilities.globalQueue.postRunnable(instance::stopGroup); - AccountInstance.getInstance(currentAccount).getConnectionsManager().cancelRequest(currentStreamRequestId, true); - currentStreamRequestId = 0; - } else { - Instance.FinalState state = tgVoip.stop(); - updateTrafficStats(state.trafficStats); - onTgVoipStop(state); - } - prevTrafficStats = null; - callStartTime = 0; - tgVoip = null; - Instance.destroyInstance(); - } - if (videoCapturer != 0) { - NativeInstance.destroyVideoCapturer(videoCapturer); - videoCapturer = 0; - } - cpuWakelock.release(); - if (!playingSound) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (!USE_CONNECTION_SERVICE) { - if (isBtHeadsetConnected) { - am.stopBluetoothSco(); - am.setBluetoothScoOn(false); - am.setSpeakerphoneOn(false); - bluetoothScoActive = false; - } - if (onDestroyRunnable == null) { - Utilities.globalQueue.postRunnable(setModeRunnable = () -> { - synchronized (sync) { - if (setModeRunnable == null) { - return; - } - setModeRunnable = null; - } - try { - am.setMode(AudioManager.MODE_NORMAL); - } catch (SecurityException x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error setting audio more to normal", x); - } - } - }); - } - am.abandonAudioFocus(this); - } - am.unregisterMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); - if (hasAudioFocus) { - am.abandonAudioFocus(this); - } - Utilities.globalQueue.postRunnable(() -> soundPool.release()); - } - - if (USE_CONNECTION_SERVICE) { - if (!didDeleteConnectionServiceContact) { - ContactsController.getInstance(currentAccount).deleteConnectionServiceContact(); - } - if (systemCallConnection != null && !playingSound) { - systemCallConnection.destroy(); - } - } - - ConnectionsManager.getInstance(currentAccount).setAppPaused(true, false); - VoIPHelper.lastCallTime = SystemClock.elapsedRealtime(); - } - - public abstract long getCallID(); - public abstract void hangUp(); - public abstract void hangUp(Runnable onDone); - public abstract void acceptIncomingCall(); - public abstract void declineIncomingCall(int reason, Runnable onDone); - public abstract void declineIncomingCall(); - protected abstract Class getUIActivityClass(); - public abstract CallConnection getConnectionAndStartCall(); - protected abstract void startRinging(); - public abstract void startRingtoneAndVibration(); - protected abstract void updateServerConfig(); - protected abstract void showNotification(); - - protected void onTgVoipPreStop() { - - } - - protected void onTgVoipStop(Instance.FinalState finalState) { - - } - - protected void initializeAccountRelatedThings() { - updateServerConfig(); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.appDidLogout); - ConnectionsManager.getInstance(currentAccount).setAppPaused(false, false); - } - - @SuppressLint("InvalidWakeLockTag") - @Override - public void onCreate() { - super.onCreate(); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("=============== VoIPService STARTING ==============="); - } - try { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER) != null) { - int outFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); - Instance.setBufferSize(outFramesPerBuffer); - } else { - Instance.setBufferSize(AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); - } - - cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)).newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); - cpuWakelock.acquire(); - - btAdapter = am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; - - IntentFilter filter = new IntentFilter(); - filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); - if (!USE_CONNECTION_SERVICE) { - filter.addAction(ACTION_HEADSET_PLUG); - if (btAdapter != null) { - filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); - filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); - } - filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); - filter.addAction(Intent.ACTION_SCREEN_ON); - filter.addAction(Intent.ACTION_SCREEN_OFF); - } - registerReceiver(receiver, filter); - fetchBluetoothDeviceName(); - - am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); - - if (!USE_CONNECTION_SERVICE && btAdapter != null && btAdapter.isEnabled()) { - try { - MediaRouter mr = (MediaRouter) getSystemService(Context.MEDIA_ROUTER_SERVICE); - if (Build.VERSION.SDK_INT < 24) { - int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); - updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } else { - MediaRouter.RouteInfo ri = mr.getSelectedRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO); - if (ri.getDeviceType() == MediaRouter.RouteInfo.DEVICE_TYPE_BLUETOOTH) { - int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); - updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } else { - updateBluetoothHeadsetState(false); - } - } - } catch (Throwable e) { - FileLog.e(e); - } - } - } catch (Exception x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("error initializing voip controller", x); - } - callFailed(); - } - } - - protected void loadResources() { - if (chat != null && SharedConfig.useMediaStream) { - currentStreamType = AudioManager.STREAM_MUSIC; - if (Build.VERSION.SDK_INT >= 21) { - WebRtcAudioTrack.setAudioTrackUsageAttribute(AudioAttributes.USAGE_MEDIA); - } - } else { - currentStreamType = AudioManager.STREAM_VOICE_CALL; - if (Build.VERSION.SDK_INT >= 21) { - WebRtcAudioTrack.setAudioTrackUsageAttribute(AudioAttributes.USAGE_VOICE_COMMUNICATION); - } - } - WebRtcAudioTrack.setAudioStreamType(currentStreamType); - Utilities.globalQueue.postRunnable(() -> { - soundPool = new SoundPool(1, currentStreamType, 0); - spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); - spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); - spFailedID = soundPool.load(this, R.raw.voip_failed, 1); - spEndId = soundPool.load(this, R.raw.voip_end, 1); - spBusyId = soundPool.load(this, R.raw.voip_busy, 1); - spVoiceChatEndId = soundPool.load(this, R.raw.voicechat_leave, 1); - spVoiceChatStartId = soundPool.load(this, R.raw.voicechat_join, 1); - spVoiceChatConnecting = soundPool.load(this, R.raw.voicechat_connecting, 1); - spAllowTalkId = soundPool.load(this, R.raw.voip_onallowtalk, 1); - spStartRecordId = soundPool.load(this, R.raw.voip_recordstart, 1); - }); - } - - protected void dispatchStateChanged(int state) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("== Call " + getCallID() + " state changed to " + state + " =="); - } - currentState = state; - if (USE_CONNECTION_SERVICE && state == STATE_ESTABLISHED /*&& !wasEstablished*/ && systemCallConnection != null) { - systemCallConnection.setActive(); - } - for (int a = 0; a < stateListeners.size(); a++) { - StateListener l = stateListeners.get(a); - l.onStateChanged(state); - } - } - - protected void updateTrafficStats(Instance.TrafficStats trafficStats) { - if (trafficStats == null) { - trafficStats = tgVoip.getTrafficStats(); - } - final long wifiSentDiff = trafficStats.bytesSentWifi - (prevTrafficStats != null ? prevTrafficStats.bytesSentWifi : 0); - final long wifiRecvdDiff = trafficStats.bytesReceivedWifi - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedWifi : 0); - final long mobileSentDiff = trafficStats.bytesSentMobile - (prevTrafficStats != null ? prevTrafficStats.bytesSentMobile : 0); - final long mobileRecvdDiff = trafficStats.bytesReceivedMobile - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedMobile : 0); - prevTrafficStats = trafficStats; - if (wifiSentDiff > 0) { - StatsController.getInstance(currentAccount).incrementSentBytesCount(StatsController.TYPE_WIFI, StatsController.TYPE_CALLS, wifiSentDiff); - } - if (wifiRecvdDiff > 0) { - StatsController.getInstance(currentAccount).incrementReceivedBytesCount(StatsController.TYPE_WIFI, StatsController.TYPE_CALLS, wifiRecvdDiff); - } - if (mobileSentDiff > 0) { - StatsController.getInstance(currentAccount).incrementSentBytesCount(lastNetInfo != null && lastNetInfo.isRoaming() ? StatsController.TYPE_ROAMING : StatsController.TYPE_MOBILE, StatsController.TYPE_CALLS, mobileSentDiff); - } - if (mobileRecvdDiff > 0) { - StatsController.getInstance(currentAccount).incrementReceivedBytesCount(lastNetInfo != null && lastNetInfo.isRoaming() ? StatsController.TYPE_ROAMING : StatsController.TYPE_MOBILE, StatsController.TYPE_CALLS, mobileRecvdDiff); - } - } - - @SuppressLint("InvalidWakeLockTag") - protected void configureDeviceForCall() { - needPlayEndSound = true; - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (!USE_CONNECTION_SERVICE) { - if (currentStreamType == AudioManager.STREAM_VOICE_CALL) { - Utilities.globalQueue.postRunnable(() -> { - try { - am.setMode(AudioManager.MODE_IN_COMMUNICATION); - } catch (Exception e) { - FileLog.e(e); - } - }); - } - am.requestAudioFocus(this, currentStreamType, AudioManager.AUDIOFOCUS_GAIN); - if (isBluetoothHeadsetConnected() && hasEarpiece()) { - switch (audioRouteToSet) { - case AUDIO_ROUTE_BLUETOOTH: - if (!bluetoothScoActive) { - needSwitchToBluetoothAfterScoActivates = true; - try { - am.startBluetoothSco(); - } catch (Throwable ignore) { - - } - } else { - am.setBluetoothScoOn(true); - am.setSpeakerphoneOn(false); - } - break; - case AUDIO_ROUTE_EARPIECE: - am.setBluetoothScoOn(false); - am.setSpeakerphoneOn(false); - break; - case AUDIO_ROUTE_SPEAKER: - am.setBluetoothScoOn(false); - am.setSpeakerphoneOn(true); - break; - } - } else if (isBluetoothHeadsetConnected()) { - am.setBluetoothScoOn(speakerphoneStateToSet); - } else { - am.setSpeakerphoneOn(speakerphoneStateToSet); - } - } - updateOutputGainControlState(); - audioConfigured = true; - - SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE); - Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY); - try { - if (proximity != null) { - proximityWakelock = ((PowerManager) getSystemService(Context.POWER_SERVICE)).newWakeLock(PROXIMITY_SCREEN_OFF_WAKE_LOCK, "telegram-voip-prx"); - sm.registerListener(this, proximity, SensorManager.SENSOR_DELAY_NORMAL); - } - } catch (Exception x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error initializing proximity sensor", x); - } - } - } - - private void fetchBluetoothDeviceName() { - if (fetchingBluetoothDeviceName) { - return; - } - try { - currentBluetoothDeviceName = null; - fetchingBluetoothDeviceName = true; - BluetoothAdapter.getDefaultAdapter().getProfileProxy(this, serviceListener, BluetoothProfile.HEADSET); - } catch (Throwable e) { - FileLog.e(e); - } - } - - @SuppressLint("NewApi") - @Override - public void onSensorChanged(SensorEvent event) { - if (unmutedByHold || currentVideoState == Instance.VIDEO_STATE_ACTIVE || videoState == Instance.VIDEO_STATE_ACTIVE) { - return; - } - if (event.sensor.getType() == Sensor.TYPE_PROXIMITY) { - AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (isHeadsetPlugged || am.isSpeakerphoneOn() || (isBluetoothHeadsetConnected() && am.isBluetoothScoOn())) { - return; - } - boolean newIsNear = event.values[0] < Math.min(event.sensor.getMaximumRange(), 3); - checkIsNear(newIsNear); - } - } - - protected void checkIsNear() { - if (currentVideoState == Instance.VIDEO_STATE_ACTIVE || videoState == Instance.VIDEO_STATE_ACTIVE) { - checkIsNear(false); - } - } - - private void checkIsNear(boolean newIsNear) { - if (newIsNear != isProximityNear) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("proximity " + newIsNear); - } - isProximityNear = newIsNear; - try { - if (isProximityNear) { - proximityWakelock.acquire(); - } else { - proximityWakelock.release(1); // this is non-public API before L - } - } catch (Exception x) { - FileLog.e(x); - } - } - } - - @Override - public void onAccuracyChanged(Sensor sensor, int accuracy) { - - } - - public boolean isBluetoothHeadsetConnected() { - if (USE_CONNECTION_SERVICE && systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { - return (systemCallConnection.getCallAudioState().getSupportedRouteMask() & CallAudioState.ROUTE_BLUETOOTH) != 0; - } - return isBtHeadsetConnected; - } - - public void onAudioFocusChange(int focusChange) { - if (focusChange == AudioManager.AUDIOFOCUS_GAIN) { - hasAudioFocus = true; - } else { - hasAudioFocus = false; - } - } - - protected void updateBluetoothHeadsetState(boolean connected) { - if (connected == isBtHeadsetConnected) { - return; - } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("updateBluetoothHeadsetState: " + connected); - } - isBtHeadsetConnected = connected; - final AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - if (connected && !isRinging() && currentState != 0) { - if (bluetoothScoActive) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("SCO already active, setting audio routing"); - } - am.setSpeakerphoneOn(false); - am.setBluetoothScoOn(true); - } else { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("startBluetoothSco"); - } - needSwitchToBluetoothAfterScoActivates = true; - // some devices ignore startBluetoothSco when called immediately after the headset is connected, so delay it - AndroidUtilities.runOnUIThread(() -> { - try { - am.startBluetoothSco(); - } catch (Throwable ignore) { - - } - }, 500); - } - } else { - bluetoothScoActive = false; - } - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } - - public String getLastError() { - return lastError; - } - - public int getCallState() { - return currentState; - } - - public TLRPC.InputPeer getGroupCallPeer() { - return groupCallPeer; - } - - protected void updateNetworkType() { - if (tgVoip != null) { - if (tgVoip.isGroup()) { - - } else { - tgVoip.setNetworkType(getNetworkType()); - } - } else { - lastNetInfo = getActiveNetworkInfo(); - } - } - - protected int getNetworkType() { - final NetworkInfo info = lastNetInfo = getActiveNetworkInfo(); - int type = Instance.NET_TYPE_UNKNOWN; - if (info != null) { - switch (info.getType()) { - case ConnectivityManager.TYPE_MOBILE: - switch (info.getSubtype()) { - case TelephonyManager.NETWORK_TYPE_GPRS: - type = Instance.NET_TYPE_GPRS; - break; - case TelephonyManager.NETWORK_TYPE_EDGE: - case TelephonyManager.NETWORK_TYPE_1xRTT: - type = Instance.NET_TYPE_EDGE; - break; - case TelephonyManager.NETWORK_TYPE_UMTS: - case TelephonyManager.NETWORK_TYPE_EVDO_0: - type = Instance.NET_TYPE_3G; - break; - case TelephonyManager.NETWORK_TYPE_HSDPA: - case TelephonyManager.NETWORK_TYPE_HSPA: - case TelephonyManager.NETWORK_TYPE_HSPAP: - case TelephonyManager.NETWORK_TYPE_HSUPA: - case TelephonyManager.NETWORK_TYPE_EVDO_A: - case TelephonyManager.NETWORK_TYPE_EVDO_B: - type = Instance.NET_TYPE_HSPA; - break; - case TelephonyManager.NETWORK_TYPE_LTE: - type = Instance.NET_TYPE_LTE; - break; - default: - type = Instance.NET_TYPE_OTHER_MOBILE; - break; - } - break; - case ConnectivityManager.TYPE_WIFI: - type = Instance.NET_TYPE_WIFI; - break; - case ConnectivityManager.TYPE_ETHERNET: - type = Instance.NET_TYPE_ETHERNET; - break; - } - } - return type; - } - - protected NetworkInfo getActiveNetworkInfo() { - return ((ConnectivityManager) getSystemService(CONNECTIVITY_SERVICE)).getActiveNetworkInfo(); - } - - protected void callFailed() { - callFailed(tgVoip != null ? tgVoip.getLastError() : Instance.ERROR_UNKNOWN); - } - - protected Bitmap getRoundAvatarBitmap(TLObject userOrChat) { - Bitmap bitmap = null; - try { - if (userOrChat instanceof TLRPC.User) { - TLRPC.User user = (TLRPC.User) userOrChat; - if (user.photo != null && user.photo.photo_small != null) { - BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(user.photo.photo_small, null, "50_50"); - if (img != null) { - bitmap = img.getBitmap().copy(Bitmap.Config.ARGB_8888, true); - } else { - try { - BitmapFactory.Options opts = new BitmapFactory.Options(); - opts.inMutable = true; - bitmap = BitmapFactory.decodeFile(FileLoader.getPathToAttach(user.photo.photo_small, true).toString(), opts); - } catch (Throwable e) { - FileLog.e(e); - } - } - } - } else { - TLRPC.Chat chat = (TLRPC.Chat) userOrChat; - if (chat.photo != null && chat.photo.photo_small != null) { - BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(chat.photo.photo_small, null, "50_50"); - if (img != null) { - bitmap = img.getBitmap().copy(Bitmap.Config.ARGB_8888, true); - } else { - try { - BitmapFactory.Options opts = new BitmapFactory.Options(); - opts.inMutable = true; - bitmap = BitmapFactory.decodeFile(FileLoader.getPathToAttach(chat.photo.photo_small, true).toString(), opts); - } catch (Throwable e) { - FileLog.e(e); - } - } - } - } - } catch (Throwable e) { - FileLog.e(e); - } - if (bitmap == null) { - Theme.createDialogsResources(this); - AvatarDrawable placeholder; - if (userOrChat instanceof TLRPC.User) { - placeholder = new AvatarDrawable((TLRPC.User) userOrChat); - } else { - placeholder = new AvatarDrawable((TLRPC.Chat) userOrChat); - } - bitmap = Bitmap.createBitmap(AndroidUtilities.dp(42), AndroidUtilities.dp(42), Bitmap.Config.ARGB_8888); - placeholder.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight()); - placeholder.draw(new Canvas(bitmap)); - } - - Canvas canvas = new Canvas(bitmap); - Path circlePath = new Path(); - circlePath.addCircle(bitmap.getWidth() / 2, bitmap.getHeight() / 2, bitmap.getWidth() / 2, Path.Direction.CW); - circlePath.toggleInverseFillType(); - Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); - paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); - canvas.drawPath(circlePath, paint); - return bitmap; - } - - protected void showIncomingNotification(String name, CharSequence subText, TLObject userOrChat, boolean video, int additionalMemberCount) { - Intent intent = new Intent(this, LaunchActivity.class); - intent.setAction("voip"); - Notification.Builder builder = new Notification.Builder(this) - .setContentTitle(video ? LocaleController.getString("VoipInVideoCallBranding", R.string.VoipInVideoCallBranding) : LocaleController.getString("VoipInCallBranding", R.string.VoipInCallBranding)) - .setContentText(name) - .setSmallIcon(R.drawable.notification) - .setSubText(subText) - .setContentIntent(PendingIntent.getActivity(this, 0, intent, 0)); - Uri soundProviderUri = Uri.parse("content://" + BuildConfig.APPLICATION_ID + ".call_sound_provider/start_ringing"); - if (Build.VERSION.SDK_INT >= 26) { - SharedPreferences nprefs = MessagesController.getGlobalNotificationsSettings(); - int chanIndex = nprefs.getInt("calls_notification_channel", 0); - NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); - NotificationChannel oldChannel = nm.getNotificationChannel("incoming_calls2" + chanIndex); - if (oldChannel != null) { - nm.deleteNotificationChannel(oldChannel.getId()); - } - NotificationChannel existingChannel = nm.getNotificationChannel("incoming_calls3" + chanIndex); - boolean needCreate = true; - if (existingChannel != null) { - if (existingChannel.getImportance() < NotificationManager.IMPORTANCE_HIGH || !soundProviderUri.equals(existingChannel.getSound()) || existingChannel.getVibrationPattern() != null || existingChannel.shouldVibrate()) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("User messed up the notification channel; deleting it and creating a proper one"); - } - nm.deleteNotificationChannel("incoming_calls3" + chanIndex); - chanIndex++; - nprefs.edit().putInt("calls_notification_channel", chanIndex).commit(); - } else { - needCreate = false; - } - } - if (needCreate) { - AudioAttributes attrs = new AudioAttributes.Builder() - .setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION) - .setLegacyStreamType(AudioManager.STREAM_RING) - .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION) - .build(); - NotificationChannel chan = new NotificationChannel("incoming_calls3" + chanIndex, LocaleController.getString("IncomingCalls", R.string.IncomingCalls), NotificationManager.IMPORTANCE_HIGH); - chan.setSound(soundProviderUri, attrs); - chan.enableVibration(false); - chan.enableLights(false); - chan.setBypassDnd(true); - try { - nm.createNotificationChannel(chan); - } catch (Exception e) { - FileLog.e(e); - this.stopSelf(); - return; - } - } - builder.setChannelId("incoming_calls3" + chanIndex); - } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - builder.setSound(soundProviderUri, AudioManager.STREAM_RING); - } - Intent endIntent = new Intent(this, VoIPActionsReceiver.class); - endIntent.setAction(getPackageName() + ".DECLINE_CALL"); - endIntent.putExtra("call_id", getCallID()); - CharSequence endTitle = LocaleController.getString("VoipDeclineCall", R.string.VoipDeclineCall); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - endTitle = new SpannableString(endTitle); - ((SpannableString) endTitle).setSpan(new ForegroundColorSpan(0xFFF44336), 0, endTitle.length(), 0); - } - PendingIntent endPendingIntent = PendingIntent.getBroadcast(this, 0, endIntent, PendingIntent.FLAG_CANCEL_CURRENT); - builder.addAction(R.drawable.ic_call_end_white_24dp, endTitle, endPendingIntent); - Intent answerIntent = new Intent(this, VoIPActionsReceiver.class); - answerIntent.setAction(getPackageName() + ".ANSWER_CALL"); - answerIntent.putExtra("call_id", getCallID()); - CharSequence answerTitle = LocaleController.getString("VoipAnswerCall", R.string.VoipAnswerCall); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - answerTitle = new SpannableString(answerTitle); - ((SpannableString) answerTitle).setSpan(new ForegroundColorSpan(0xFF00AA00), 0, answerTitle.length(), 0); - } - PendingIntent answerPendingIntent = PendingIntent.getBroadcast(this, 0, answerIntent, PendingIntent.FLAG_CANCEL_CURRENT); - builder.addAction(R.drawable.ic_call, answerTitle, answerPendingIntent); - builder.setPriority(Notification.PRIORITY_MAX); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { - builder.setShowWhen(false); - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - builder.setColor(0xff2ca5e0); - builder.setVibrate(new long[0]); - builder.setCategory(Notification.CATEGORY_CALL); - builder.setFullScreenIntent(PendingIntent.getActivity(this, 0, intent, 0), true); - if (userOrChat instanceof TLRPC.User) { - TLRPC.User user = (TLRPC.User) userOrChat; - if (!TextUtils.isEmpty(user.phone)) { - builder.addPerson("tel:" + user.phone); - } - } - } - Notification incomingNotification = builder.getNotification(); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - RemoteViews customView = new RemoteViews(getPackageName(), LocaleController.isRTL ? R.layout.call_notification_rtl : R.layout.call_notification); - customView.setTextViewText(R.id.name, name); - boolean subtitleVisible = true; - if (TextUtils.isEmpty(subText)) { - customView.setViewVisibility(R.id.subtitle, View.GONE); - if (UserConfig.getActivatedAccountsCount() > 1) { - TLRPC.User self = UserConfig.getInstance(currentAccount).getCurrentUser(); - customView.setTextViewText(R.id.title, video ? LocaleController.formatString("VoipInVideoCallBrandingWithName", R.string.VoipInVideoCallBrandingWithName, ContactsController.formatName(self.first_name, self.last_name)) : LocaleController.formatString("VoipInCallBrandingWithName", R.string.VoipInCallBrandingWithName, ContactsController.formatName(self.first_name, self.last_name))); - } else { - customView.setTextViewText(R.id.title, video ? LocaleController.getString("VoipInVideoCallBranding", R.string.VoipInVideoCallBranding) : LocaleController.getString("VoipInCallBranding", R.string.VoipInCallBranding)); - } - } else { - if (UserConfig.getActivatedAccountsCount() > 1) { - TLRPC.User self = UserConfig.getInstance(currentAccount).getCurrentUser(); - customView.setTextViewText(R.id.subtitle, LocaleController.formatString("VoipAnsweringAsAccount", R.string.VoipAnsweringAsAccount, ContactsController.formatName(self.first_name, self.last_name))); - } else { - customView.setViewVisibility(R.id.subtitle, View.GONE); - } - customView.setTextViewText(R.id.title, subText); - } - Bitmap avatar = getRoundAvatarBitmap(userOrChat); - customView.setTextViewText(R.id.answer_text, LocaleController.getString("VoipAnswerCall", R.string.VoipAnswerCall)); - customView.setTextViewText(R.id.decline_text, LocaleController.getString("VoipDeclineCall", R.string.VoipDeclineCall)); - customView.setImageViewBitmap(R.id.photo, avatar); - customView.setOnClickPendingIntent(R.id.answer_btn, answerPendingIntent); - customView.setOnClickPendingIntent(R.id.decline_btn, endPendingIntent); - builder.setLargeIcon(avatar); - - incomingNotification.headsUpContentView = incomingNotification.bigContentView = customView; - } - startForeground(ID_INCOMING_CALL_NOTIFICATION, incomingNotification); - startRingtoneAndVibration(); - } - - protected void callFailed(String error) { - try { - throw new Exception("Call " + getCallID() + " failed with error: " + error); - } catch (Exception x) { - FileLog.e(x); - } - lastError = error; - AndroidUtilities.runOnUIThread(() -> dispatchStateChanged(STATE_FAILED)); - if (TextUtils.equals(error, Instance.ERROR_LOCALIZED) && soundPool != null) { - playingSound = true; - Utilities.globalQueue.postRunnable(() -> soundPool.play(spFailedID, 1, 1, 0, 0, 1)); - AndroidUtilities.runOnUIThread(afterSoundRunnable, 1000); - } - if (USE_CONNECTION_SERVICE && systemCallConnection != null) { - systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.ERROR)); - systemCallConnection.destroy(); - systemCallConnection = null; - } - stopSelf(); - } - - void callFailedFromConnectionService() { - if (isOutgoing) { - callFailed(Instance.ERROR_CONNECTION_SERVICE); - } else { - hangUp(); - } - } - - @Override - public void onConnectionStateChanged(int newState, boolean inTransition) { - if (newState == STATE_FAILED) { - callFailed(); - return; - } - if (newState == STATE_ESTABLISHED) { - if (connectingSoundRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); - connectingSoundRunnable = null; - } - Utilities.globalQueue.postRunnable(() -> { - if (spPlayId != 0) { - soundPool.stop(spPlayId); - spPlayId = 0; - } - }); - if (groupCall == null && !wasEstablished) { - wasEstablished = true; - if (!isProximityNear && !privateCall.video) { - Vibrator vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE); - if (vibrator.hasVibrator()) { - vibrator.vibrate(100); - } - } - AndroidUtilities.runOnUIThread(new Runnable() { - @Override - public void run() { - if (tgVoip != null) { - StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), 5); - AndroidUtilities.runOnUIThread(this, 5000); - } - } - }, 5000); - if (isOutgoing) { - StatsController.getInstance(currentAccount).incrementSentItemsCount(getStatsNetworkType(), StatsController.TYPE_CALLS, 1); - } else { - StatsController.getInstance(currentAccount).incrementReceivedItemsCount(getStatsNetworkType(), StatsController.TYPE_CALLS, 1); - } - } - } - if (newState == STATE_RECONNECTING) { - Utilities.globalQueue.postRunnable(() -> { - if (spPlayId != 0) { - soundPool.stop(spPlayId); - } - spPlayId = soundPool.play(groupCall != null ? spVoiceChatConnecting : spConnectingId, 1, 1, 0, -1, 1); - }); - } - dispatchStateChanged(newState); - } - - public void playStartRecordSound() { - Utilities.globalQueue.postRunnable(() -> soundPool.play(spStartRecordId, 0.5f, 0.5f, 0, 0, 1)); - } - - public void playAllowTalkSound() { - Utilities.globalQueue.postRunnable(() -> soundPool.play(spAllowTalkId, 0.5f, 0.5f, 0, 0, 1)); - } - - @Override - public void onSignalBarCountChanged(int newCount) { - AndroidUtilities.runOnUIThread(() -> { - signalBarCount = newCount; - for (int a = 0; a < stateListeners.size(); a++) { - StateListener l = stateListeners.get(a); - l.onSignalBarsCountChanged(newCount); - } - }); - } - - public boolean isBluetoothOn() { - final AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - return am.isBluetoothScoOn(); - } - - public boolean isBluetoothWillOn() { - return needSwitchToBluetoothAfterScoActivates; - } - - public boolean isHeadsetPlugged() { - return isHeadsetPlugged; - } - - public void onMediaStateUpdated(int audioState, int videoState) { - AndroidUtilities.runOnUIThread(() -> { - currentAudioState = audioState; - currentVideoState = videoState; - checkIsNear(); - - for (int a = 0; a < stateListeners.size(); a++) { - StateListener l = stateListeners.get(a); - l.onMediaStateUpdated(audioState, videoState); - } - }); - } - - protected void callEnded() { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Call " + getCallID() + " ended"); - } - if (groupCall != null && (!playedConnectedSound || onDestroyRunnable != null)) { - needPlayEndSound = false; - } - AndroidUtilities.runOnUIThread(() -> dispatchStateChanged(STATE_ENDED)); - int delay = 700; - Utilities.globalQueue.postRunnable(() -> { - if (spPlayId != 0) { - soundPool.stop(spPlayId); - spPlayId = 0; - } - }); - - if (connectingSoundRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); - connectingSoundRunnable = null; - } - if (needPlayEndSound) { - playingSound = true; - if (groupCall == null) { - Utilities.globalQueue.postRunnable(() -> soundPool.play(spEndId, 1, 1, 0, 0, 1)); - } else { - Utilities.globalQueue.postRunnable(() -> soundPool.play(spVoiceChatEndId, 1.0f, 1.0f, 0, 0, 1), 100); - delay = 500; - } - AndroidUtilities.runOnUIThread(afterSoundRunnable, delay); - } - if (timeoutRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); - timeoutRunnable = null; - } - endConnectionServiceCall(needPlayEndSound ? delay : 0); - stopSelf(); - } - - protected void endConnectionServiceCall(long delay) { - if (USE_CONNECTION_SERVICE) { - Runnable r = () -> { - if (systemCallConnection != null) { - switch (callDiscardReason) { - case DISCARD_REASON_HANGUP: - systemCallConnection.setDisconnected(new DisconnectCause(isOutgoing ? DisconnectCause.LOCAL : DisconnectCause.REJECTED)); - break; - case DISCARD_REASON_DISCONNECT: - systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.ERROR)); - break; - case DISCARD_REASON_LINE_BUSY: - systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.BUSY)); - break; - case DISCARD_REASON_MISSED: - systemCallConnection.setDisconnected(new DisconnectCause(isOutgoing ? DisconnectCause.CANCELED : DisconnectCause.MISSED)); - break; - default: - systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.REMOTE)); - break; - } - systemCallConnection.destroy(); - systemCallConnection = null; - } - }; - if (delay > 0) { - AndroidUtilities.runOnUIThread(r, delay); - } else { - r.run(); - } - } - } - - public boolean isOutgoing() { - return isOutgoing; - } - - public void handleNotificationAction(Intent intent) { - if ((getPackageName() + ".END_CALL").equals(intent.getAction())) { - stopForeground(true); - hangUp(); - } else if ((getPackageName() + ".DECLINE_CALL").equals(intent.getAction())) { - stopForeground(true); - declineIncomingCall(DISCARD_REASON_LINE_BUSY, null); - } else if ((getPackageName() + ".ANSWER_CALL").equals(intent.getAction())) { - acceptIncomingCallFromNotification(); - } - } - - private void acceptIncomingCallFromNotification() { - showNotification(); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && (checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || privateCall.video && checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)) { - try { - PendingIntent.getActivity(VoIPBaseService.this, 0, new Intent(VoIPBaseService.this, VoIPPermissionActivity.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK), 0).send(); - } catch (Exception x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error starting permission activity", x); - } - } - return; - } - acceptIncomingCall(); - try { - PendingIntent.getActivity(VoIPBaseService.this, 0, new Intent(VoIPBaseService.this, getUIActivityClass()).setAction("voip"), 0).send(); - } catch (Exception x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error starting incall activity", x); - } - } - } - - public void updateOutputGainControlState() { - if (tgVoip != null) { - if (!USE_CONNECTION_SERVICE) { - final AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); - tgVoip.setAudioOutputGainControlEnabled(hasEarpiece() && !am.isSpeakerphoneOn() && !am.isBluetoothScoOn() && !isHeadsetPlugged); - tgVoip.setEchoCancellationStrength(isHeadsetPlugged || (hasEarpiece() && !am.isSpeakerphoneOn() && !am.isBluetoothScoOn() && !isHeadsetPlugged) ? 0 : 1); - } else { - final boolean isEarpiece = systemCallConnection.getCallAudioState().getRoute() == CallAudioState.ROUTE_EARPIECE; - tgVoip.setAudioOutputGainControlEnabled(isEarpiece); - tgVoip.setEchoCancellationStrength(isEarpiece ? 0 : 1); - } - } - } - - public int getAccount() { - return currentAccount; - } - - @Override - public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.appDidLogout) { - callEnded(); - } - } - - public static boolean isAnyKindOfCallActive() { - if (VoIPService.getSharedInstance() != null) { - return VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING; - } - return false; - } - - protected boolean isFinished() { - return currentState == STATE_ENDED || currentState == STATE_FAILED; - } - - protected boolean isRinging() { - return false; - } - - public int getCurrentAudioState() { - return currentAudioState; - } - - public int getCurrentVideoState() { - return currentVideoState; - } - - @TargetApi(Build.VERSION_CODES.O) - protected PhoneAccountHandle addAccountToTelecomManager() { - TelecomManager tm = (TelecomManager) getSystemService(TELECOM_SERVICE); - TLRPC.User self = UserConfig.getInstance(currentAccount).getCurrentUser(); - PhoneAccountHandle handle = new PhoneAccountHandle(new ComponentName(this, TelegramConnectionService.class), "" + self.id); - PhoneAccount account = new PhoneAccount.Builder(handle, ContactsController.formatName(self.first_name, self.last_name)) - .setCapabilities(PhoneAccount.CAPABILITY_SELF_MANAGED) - .setIcon(Icon.createWithResource(this, R.drawable.ic_launcher_dr)) - .setHighlightColor(0xff2ca5e0) - .addSupportedUriScheme("sip") - .build(); - tm.registerPhoneAccount(account); - return handle; - } - - private static boolean isDeviceCompatibleWithConnectionServiceAPI() { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { - return false; - } - // some non-Google devices don't implement the ConnectionService API correctly so, sadly, - // we'll have to whitelist only a handful of known-compatible devices for now - return false;/*"angler".equals(Build.PRODUCT) // Nexus 6P - || "bullhead".equals(Build.PRODUCT) // Nexus 5X - || "sailfish".equals(Build.PRODUCT) // Pixel - || "marlin".equals(Build.PRODUCT) // Pixel XL - || "walleye".equals(Build.PRODUCT) // Pixel 2 - || "taimen".equals(Build.PRODUCT) // Pixel 2 XL - || "blueline".equals(Build.PRODUCT) // Pixel 3 - || "crosshatch".equals(Build.PRODUCT) // Pixel 3 XL - || MessagesController.getGlobalMainSettings().getBoolean("dbg_force_connection_service", false);*/ - } - - public interface StateListener { - default void onStateChanged(int state) { - - } - - default void onSignalBarsCountChanged(int count) { - - } - - default void onAudioSettingsChanged() { - - } - - default void onMediaStateUpdated(int audioState, int videoState) { - - } - - default void onCameraSwitch(boolean isFrontFace) { - - } - - default void onVideoAvailableChange(boolean isAvailable) { - - } - - default void onScreenOnChange(boolean screenOn) { - - } - } - - public class CallConnection extends Connection { - public CallConnection() { - setConnectionProperties(PROPERTY_SELF_MANAGED); - setAudioModeIsVoip(true); - } - - @Override - public void onCallAudioStateChanged(CallAudioState state) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("ConnectionService call audio state changed: " + state); - } - for (StateListener l : stateListeners) { - l.onAudioSettingsChanged(); - } - } - - @Override - public void onDisconnect() { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("ConnectionService onDisconnect"); - } - setDisconnected(new DisconnectCause(DisconnectCause.LOCAL)); - destroy(); - systemCallConnection = null; - hangUp(); - } - - @Override - public void onAnswer() { - acceptIncomingCallFromNotification(); - } - - @Override - public void onReject() { - needPlayEndSound = false; - declineIncomingCall(DISCARD_REASON_HANGUP, null); - } - - @Override - public void onShowIncomingCallUi() { - startRinging(); - } - - @Override - public void onStateChanged(int state) { - super.onStateChanged(state); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("ConnectionService onStateChanged " + stateToString(state)); - } - if (state == Connection.STATE_ACTIVE) { - ContactsController.getInstance(currentAccount).deleteConnectionServiceContact(); - didDeleteConnectionServiceContact = true; - } - } - - @Override - public void onCallEvent(String event, Bundle extras) { - super.onCallEvent(event, extras); - if (BuildVars.LOGS_ENABLED) - FileLog.d("ConnectionService onCallEvent " + event); - } - - //undocumented API - public void onSilence() { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("onSlience"); - } - stopRinging(); - } - } - - public static class SharedUIParams { - public boolean tapToVideoTooltipWasShowed; - public boolean cameraAlertWasShowed; - public boolean wasVideoCall; - } -} diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java index bd26486d9..b0da6a1e9 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java @@ -14,34 +14,83 @@ import android.annotation.TargetApi; import android.app.Activity; import android.app.KeyguardManager; import android.app.Notification; +import android.app.NotificationChannel; +import android.app.NotificationManager; import android.app.PendingIntent; +import android.app.Service; +import android.bluetooth.BluetoothAdapter; +import android.bluetooth.BluetoothDevice; +import android.bluetooth.BluetoothHeadset; +import android.bluetooth.BluetoothProfile; +import android.content.BroadcastReceiver; +import android.content.ComponentName; +import android.content.Context; import android.content.Intent; +import android.content.IntentFilter; import android.content.SharedPreferences; import android.content.pm.PackageManager; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.drawable.BitmapDrawable; +import android.graphics.drawable.Icon; +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.media.AudioAttributes; +import android.media.AudioFormat; import android.media.AudioManager; +import android.media.AudioTrack; +import android.media.MediaPlayer; +import android.media.MediaRouter; +import android.media.RingtoneManager; +import android.media.SoundPool; import android.media.audiofx.AcousticEchoCanceler; import android.media.audiofx.NoiseSuppressor; +import android.net.ConnectivityManager; +import android.net.NetworkInfo; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.IBinder; import androidx.annotation.Nullable; +import android.os.PowerManager; import android.os.SystemClock; +import android.os.Vibrator; +import android.telecom.CallAudioState; +import android.telecom.Connection; +import android.telecom.DisconnectCause; +import android.telecom.PhoneAccount; +import android.telecom.PhoneAccountHandle; import android.telecom.TelecomManager; +import android.telephony.TelephonyManager; +import android.text.SpannableString; import android.text.TextUtils; +import android.text.style.ForegroundColorSpan; +import android.util.LruCache; import android.view.KeyEvent; +import android.view.View; +import android.view.WindowManager; +import android.widget.RemoteViews; import android.widget.Toast; -import org.json.JSONArray; import org.json.JSONObject; import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.BuildConfig; import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; +import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; +import org.telegram.messenger.ImageLoader; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; @@ -49,30 +98,42 @@ import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.NotificationsController; import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.StatsController; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; import org.telegram.messenger.XiaomiUtilities; import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.JoinCallAlert; import org.telegram.ui.Components.voip.VoIPHelper; import org.telegram.ui.LaunchActivity; import org.telegram.ui.VoIPFeedbackActivity; +import org.telegram.ui.VoIPPermissionActivity; import org.webrtc.VideoFrame; import org.webrtc.VideoSink; +import org.webrtc.voiceengine.WebRtcAudioTrack; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; +import java.lang.reflect.Field; +import java.lang.reflect.Method; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Set; @SuppressLint("NewApi") -public class VoIPService extends VoIPBaseService { +public class VoIPService extends Service implements SensorEventListener, AudioManager.OnAudioFocusChangeListener, VoIPController.ConnectionStateListener, NotificationCenter.NotificationCenterDelegate { public static final int CALL_MIN_LAYER = 65; @@ -84,6 +145,139 @@ public class VoIPService extends VoIPBaseService { public static final int STATE_RINGING = 16; public static final int STATE_BUSY = 17; + public static final int STATE_WAIT_INIT = Instance.STATE_WAIT_INIT; + public static final int STATE_WAIT_INIT_ACK = Instance.STATE_WAIT_INIT_ACK; + public static final int STATE_ESTABLISHED = Instance.STATE_ESTABLISHED; + public static final int STATE_FAILED = Instance.STATE_FAILED; + public static final int STATE_RECONNECTING = Instance.STATE_RECONNECTING; + public static final int STATE_CREATING = 6; + public static final int STATE_ENDED = 11; + public static final String ACTION_HEADSET_PLUG = "android.intent.action.HEADSET_PLUG"; + + private static final int ID_ONGOING_CALL_NOTIFICATION = 201; + private static final int ID_INCOMING_CALL_NOTIFICATION = 202; + + public static final int QUALITY_SMALL = 0; + public static final int QUALITY_MEDIUM = 1; + public static final int QUALITY_FULL = 2; + + public static final int CAPTURE_DEVICE_CAMERA = 0; + public static final int CAPTURE_DEVICE_SCREEN = 1; + + public static final int DISCARD_REASON_HANGUP = 1; + public static final int DISCARD_REASON_DISCONNECT = 2; + public static final int DISCARD_REASON_MISSED = 3; + public static final int DISCARD_REASON_LINE_BUSY = 4; + + public static final int AUDIO_ROUTE_EARPIECE = 0; + public static final int AUDIO_ROUTE_SPEAKER = 1; + public static final int AUDIO_ROUTE_BLUETOOTH = 2; + + private static final boolean USE_CONNECTION_SERVICE = isDeviceCompatibleWithConnectionServiceAPI(); + + private int currentAccount = -1; + private static final int PROXIMITY_SCREEN_OFF_WAKE_LOCK = 32; + private static VoIPService sharedInstance; + private static Runnable setModeRunnable; + private static final Object sync = new Object(); + private NetworkInfo lastNetInfo; + private int currentState = 0; + private boolean wasConnected; + + private boolean reconnectScreenCapture; + + private int currentStreamRequestId; + + private TLRPC.Chat chat; + + private boolean isVideoAvailable; + private boolean notificationsDisabled; + private boolean switchingCamera; + private boolean isFrontFaceCamera = true; + private String lastError; + private PowerManager.WakeLock proximityWakelock; + private PowerManager.WakeLock cpuWakelock; + private boolean isProximityNear; + private boolean isHeadsetPlugged; + private int previousAudioOutput = -1; + private ArrayList stateListeners = new ArrayList<>(); + private MediaPlayer ringtonePlayer; + private Vibrator vibrator; + private SoundPool soundPool; + private int spRingbackID; + private int spFailedID; + private int spEndId; + private int spVoiceChatEndId; + private int spVoiceChatStartId; + private int spVoiceChatConnecting; + private int spBusyId; + private int spConnectingId; + private int spPlayId; + private int spStartRecordId; + private int spAllowTalkId; + private boolean needPlayEndSound; + private boolean hasAudioFocus; + private boolean micMute; + private boolean unmutedByHold; + private BluetoothAdapter btAdapter; + private Instance.TrafficStats prevTrafficStats; + private boolean isBtHeadsetConnected; + + private Runnable updateNotificationRunnable; + + private Runnable onDestroyRunnable; + + private Runnable switchingStreamTimeoutRunnable; + + private boolean playedConnectedSound; + private boolean switchingStream; + private boolean switchingAccount; + + public TLRPC.PhoneCall privateCall; + public ChatObject.Call groupCall; + + public boolean currentGroupModeStreaming; + + private boolean createGroupCall; + private int scheduleDate; + private TLRPC.InputPeer groupCallPeer; + public boolean hasFewPeers; + private String joinHash; + + private int remoteVideoState = Instance.VIDEO_STATE_INACTIVE; + private TLRPC.TL_dataJSON myParams; + + private int[] mySource = new int[2]; + private NativeInstance[] tgVoip = new NativeInstance[2]; + private long[] captureDevice = new long[2]; + private boolean[] destroyCaptureDevice = {true, true}; + private int[] videoState = {Instance.VIDEO_STATE_INACTIVE, Instance.VIDEO_STATE_INACTIVE}; + + private long callStartTime; + private boolean playingSound; + private boolean isOutgoing; + public boolean videoCall; + private Runnable timeoutRunnable; + + private Boolean mHasEarpiece; + private boolean wasEstablished; + private int signalBarCount; + private int remoteAudioState = Instance.AUDIO_STATE_ACTIVE; + private boolean audioConfigured; + private int audioRouteToSet = AUDIO_ROUTE_BLUETOOTH; + private boolean speakerphoneStateToSet; + private CallConnection systemCallConnection; + private int callDiscardReason; + private boolean bluetoothScoActive; + private boolean bluetoothScoConnecting; + private boolean needSwitchToBluetoothAfterScoActivates; + private boolean didDeleteConnectionServiceContact; + private Runnable connectingSoundRunnable; + + public String currentBluetoothDeviceName; + + public final SharedUIParams sharedUIParams = new SharedUIParams(); + private TLRPC.User user; private int callReqId; @@ -111,13 +305,147 @@ public class VoIPService extends VoIPBaseService { private int classGuid; private long currentStreamRequestTimestamp; + public boolean micSwitching; + + private Runnable afterSoundRunnable = new Runnable() { + @Override + public void run() { + + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + am.abandonAudioFocus(VoIPService.this); + am.unregisterMediaButtonEventReceiver(new ComponentName(VoIPService.this, VoIPMediaButtonReceiver.class)); + if (!USE_CONNECTION_SERVICE && sharedInstance == null) { + if (isBtHeadsetConnected) { + am.stopBluetoothSco(); + am.setBluetoothScoOn(false); + bluetoothScoActive = false; + bluetoothScoConnecting = false; + } + am.setSpeakerphoneOn(false); + } + + Utilities.globalQueue.postRunnable(() -> soundPool.release()); + Utilities.globalQueue.postRunnable(setModeRunnable = () -> { + synchronized (sync) { + if (setModeRunnable == null) { + return; + } + setModeRunnable = null; + } + try { + am.setMode(AudioManager.MODE_NORMAL); + } catch (SecurityException x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error setting audio more to normal", x); + } + } + }); + } + }; + + boolean fetchingBluetoothDeviceName; + private BluetoothProfile.ServiceListener serviceListener = new BluetoothProfile.ServiceListener() { + @Override + public void onServiceDisconnected(int profile) { + + } + + @Override + public void onServiceConnected(int profile, BluetoothProfile proxy) { + for (BluetoothDevice device : proxy.getConnectedDevices()) { + if (proxy.getConnectionState(device) != BluetoothProfile.STATE_CONNECTED) { + continue; + } + currentBluetoothDeviceName = device.getName(); + break; + } + BluetoothAdapter.getDefaultAdapter().closeProfileProxy(profile, proxy); + fetchingBluetoothDeviceName = false; + } + }; + + private BroadcastReceiver receiver = new BroadcastReceiver() { + + @Override + public void onReceive(Context context, Intent intent) { + if (ACTION_HEADSET_PLUG.equals(intent.getAction())) { + isHeadsetPlugged = intent.getIntExtra("state", 0) == 1; + if (isHeadsetPlugged && proximityWakelock != null && proximityWakelock.isHeld()) { + proximityWakelock.release(); + } + if (isHeadsetPlugged) { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (am.isSpeakerphoneOn()) { + previousAudioOutput = 0; + } else if (am.isBluetoothScoOn()) { + previousAudioOutput = 2; + } else { + previousAudioOutput = 1; + } + setAudioOutput(1); + } else { + if (previousAudioOutput >= 0) { + setAudioOutput(previousAudioOutput); + previousAudioOutput = -1; + } + } + isProximityNear = false; + updateOutputGainControlState(); + } else if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) { + updateNetworkType(); + } else if (BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED.equals(intent.getAction())) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("bt headset state = " + intent.getIntExtra(BluetoothProfile.EXTRA_STATE, 0)); + } + updateBluetoothHeadsetState(intent.getIntExtra(BluetoothProfile.EXTRA_STATE, BluetoothProfile.STATE_DISCONNECTED) == BluetoothProfile.STATE_CONNECTED); + } else if (AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED.equals(intent.getAction())) { + int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, AudioManager.SCO_AUDIO_STATE_DISCONNECTED); + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Bluetooth SCO state updated: " + state); + } + if (state == AudioManager.SCO_AUDIO_STATE_DISCONNECTED && isBtHeadsetConnected) { + if (!btAdapter.isEnabled() || btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET) != BluetoothProfile.STATE_CONNECTED) { + updateBluetoothHeadsetState(false); + return; + } + } + bluetoothScoConnecting = state == AudioManager.SCO_AUDIO_STATE_CONNECTING; + bluetoothScoActive = state == AudioManager.SCO_AUDIO_STATE_CONNECTED; + if (bluetoothScoActive) { + fetchBluetoothDeviceName(); + if (needSwitchToBluetoothAfterScoActivates) { + needSwitchToBluetoothAfterScoActivates = false; + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + am.setSpeakerphoneOn(false); + am.setBluetoothScoOn(true); + } + } + for (VoIPService.StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else if (TelephonyManager.ACTION_PHONE_STATE_CHANGED.equals(intent.getAction())) { + String state = intent.getStringExtra(TelephonyManager.EXTRA_STATE); + if (TelephonyManager.EXTRA_STATE_OFFHOOK.equals(state)) { + hangUp(); + } + } else if (Intent.ACTION_SCREEN_ON.equals(intent.getAction())) { + for (int i = 0; i< stateListeners.size(); i++) { + stateListeners.get(i).onScreenOnChange(true); + } + } else if (Intent.ACTION_SCREEN_OFF.equals(intent.getAction())) { + for (int i = 0; i< stateListeners.size(); i++) { + stateListeners.get(i).onScreenOnChange(false); + } + } + } + }; public boolean isFrontFaceCamera() { return isFrontFaceCamera; } public void setMicMute(boolean mute, boolean hold, boolean send) { - if (micMute == mute) { + if (micMute == mute || micSwitching) { return; } micMute = mute; @@ -129,7 +457,7 @@ public class VoIPService extends VoIPBaseService { } } if (send) { - editCallMember(UserConfig.getInstance(currentAccount).getCurrentUser(), mute, -1, null); + editCallMember(UserConfig.getInstance(currentAccount).getCurrentUser(), mute, null, null, null, null); Utilities.globalQueue.postRunnable(updateNotificationRunnable = () -> { if (updateNotificationRunnable == null) { return; @@ -140,8 +468,10 @@ public class VoIPService extends VoIPBaseService { } } unmutedByHold = !micMute && hold; - if (tgVoip != null) { - tgVoip.setMuteMicrophone(mute); + for (int a = 0; a < tgVoip.length; a++) { + if (tgVoip[a] != null) { + tgVoip[a].setMuteMicrophone(mute); + } } for (StateListener l : stateListeners) { l.onAudioSettingsChanged(); @@ -160,28 +490,142 @@ public class VoIPService extends VoIPBaseService { return false; } - private static class ProxyVideoSink implements VideoSink { + private final HashMap waitingFrameParticipant = new HashMap<>(); + private final LruCache proxyVideoSinkLruCache = new LruCache(6) { + @Override + protected void entryRemoved(boolean evicted, String key, ProxyVideoSink oldValue, ProxyVideoSink newValue) { + super.entryRemoved(evicted, key, oldValue, newValue); + tgVoip[CAPTURE_DEVICE_CAMERA].removeIncomingVideoOutput(oldValue.nativeInstance); + } + }; + + public boolean hasVideoCapturer() { + return captureDevice[CAPTURE_DEVICE_CAMERA] != 0; + } + + public void checkVideoFrame(TLRPC.TL_groupCallParticipant participant, boolean screencast) { + String endpointId = screencast ? participant.presentationEndpoint : participant.videoEndpoint; + if (endpointId == null) { + return; + } + if ((screencast && participant.hasPresentationFrame != ChatObject.VIDEO_FRAME_NO_FRAME) || (!screencast && participant.hasCameraFrame != ChatObject.VIDEO_FRAME_NO_FRAME)) { + return; + } + + if (proxyVideoSinkLruCache.get(endpointId) != null || (remoteSinks.get(endpointId) != null && waitingFrameParticipant.get(endpointId) == null)) { + if (screencast) { + participant.hasPresentationFrame = ChatObject.VIDEO_FRAME_HAS_FRAME; + } else { + participant.hasCameraFrame = ChatObject.VIDEO_FRAME_HAS_FRAME; + } + return; + } + if (waitingFrameParticipant.containsKey(endpointId)) { + waitingFrameParticipant.put(endpointId, participant); + if (screencast) { + participant.hasPresentationFrame = ChatObject.VIDEO_FRAME_REQUESTING; + } else { + participant.hasCameraFrame = ChatObject.VIDEO_FRAME_REQUESTING; + } + return; + } + if (screencast) { + participant.hasPresentationFrame = ChatObject.VIDEO_FRAME_REQUESTING; + } else { + participant.hasCameraFrame = ChatObject.VIDEO_FRAME_REQUESTING; + } + waitingFrameParticipant.put(endpointId, participant); + addRemoteSink(participant, screencast, new VideoSink() { + @Override + public void onFrame(VideoFrame frame) { + VideoSink thisSink = this; + if (frame != null && frame.getBuffer().getHeight() != 0 && frame.getBuffer().getWidth() != 0) { + AndroidUtilities.runOnUIThread(() -> { + TLRPC.TL_groupCallParticipant currentParticipant = waitingFrameParticipant.remove(endpointId); + ProxyVideoSink proxyVideoSink = remoteSinks.get(endpointId); + if (proxyVideoSink != null && proxyVideoSink.target == thisSink) { + proxyVideoSinkLruCache.put(endpointId, proxyVideoSink); + remoteSinks.remove(endpointId); + proxyVideoSink.setTarget(null); + } + if (currentParticipant != null) { + if (screencast) { + currentParticipant.hasPresentationFrame = ChatObject.VIDEO_FRAME_HAS_FRAME; + } else { + currentParticipant.hasCameraFrame = ChatObject.VIDEO_FRAME_HAS_FRAME; + } + } + if (groupCall != null) { + groupCall.updateVisibleParticipants(); + } + }); + } + } + }, null); + } + + public void clearRemoteSinks() { + proxyVideoSinkLruCache.evictAll(); + } + + public void setAudioRoute(int route) { + if (route == AUDIO_ROUTE_SPEAKER) { + setAudioOutput(0); + } else if (route == AUDIO_ROUTE_EARPIECE) { + setAudioOutput(1); + } else if (route == AUDIO_ROUTE_BLUETOOTH) { + setAudioOutput(2); + } + } + + public static class ProxyVideoSink implements VideoSink { private VideoSink target; private VideoSink background; + private long nativeInstance; + @Override synchronized public void onFrame(VideoFrame frame) { - if (target == null) { - return; + if (target != null) { + target.onFrame(frame); } - - target.onFrame(frame); if (background != null) { background.onFrame(frame); } } - synchronized public void setTarget(VideoSink target) { - this.target = target; + synchronized public void setTarget(VideoSink newTarget) { + if (target != newTarget) { + if (target != null) { + target.setParentSink(null); + } + target = newTarget; + if (target != null) { + target.setParentSink(this); + } + } } - synchronized public void setBackground(VideoSink background) { - this.background = background; + synchronized public void setBackground(VideoSink newBackground) { + if (background != null) { + background.setParentSink(null); + } + background = newBackground; + if (background != null) { + background.setParentSink(this); + } + } + + synchronized public void removeTarget(VideoSink target) { + if (this.target == target) { + this.target = null; + } + } + + synchronized public void removeBackground(VideoSink background) { + if (this.background == background) { + this.background = null; + } } synchronized public void swap() { @@ -192,8 +636,12 @@ public class VoIPService extends VoIPBaseService { } } - private ProxyVideoSink localSink; - private ProxyVideoSink remoteSink; + private ProxyVideoSink[] localSink = new ProxyVideoSink[2]; + private ProxyVideoSink[] remoteSink = new ProxyVideoSink[2]; + private ProxyVideoSink[] currentBackgroundSink = new ProxyVideoSink[2]; + private String[] currentBackgroundEndpointId = new String[2]; + + private HashMap remoteSinks = new HashMap<>(); @Nullable @Override @@ -253,8 +701,10 @@ public class VoIPService extends VoIPBaseService { } } loadResources(); - localSink = new ProxyVideoSink(); - remoteSink = new ProxyVideoSink(); + for (int a = 0; a < localSink.length; a++) { + localSink[a] = new ProxyVideoSink(); + remoteSink[a] = new ProxyVideoSink(); + } try { AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); isHeadsetPlugged = am.isWiredHeadsetOn(); @@ -271,8 +721,16 @@ public class VoIPService extends VoIPBaseService { } if (videoCall) { - videoCapturer = NativeInstance.createVideoCapturer(localSink, isFrontFaceCamera); - videoState = Instance.VIDEO_STATE_ACTIVE; + if (Build.VERSION.SDK_INT < 23 || checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { + captureDevice[CAPTURE_DEVICE_CAMERA] = NativeInstance.createVideoCapturer(localSink[CAPTURE_DEVICE_CAMERA], isFrontFaceCamera ? 1 : 0); + if (chatID != 0) { + videoState[CAPTURE_DEVICE_CAMERA] = Instance.VIDEO_STATE_PAUSED; + } else { + videoState[CAPTURE_DEVICE_CAMERA] = Instance.VIDEO_STATE_ACTIVE; + } + } else { + videoState[CAPTURE_DEVICE_CAMERA] = Instance.VIDEO_STATE_PAUSED; + } if (!isBtHeadsetConnected && !isHeadsetPlugged) { setAudioOutput(0); } @@ -333,12 +791,6 @@ public class VoIPService extends VoIPBaseService { if (videoCall) { isVideoAvailable = true; } - if (videoCall && (Build.VERSION.SDK_INT < 23 || checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED)) { - videoCapturer = NativeInstance.createVideoCapturer(localSink, isFrontFaceCamera); - videoState = Instance.VIDEO_STATE_ACTIVE; - } else { - videoState = Instance.VIDEO_STATE_INACTIVE; - } if (videoCall && !isBtHeadsetConnected && !isHeadsetPlugged) { setAudioOutput(0); } @@ -355,84 +807,8 @@ public class VoIPService extends VoIPBaseService { return START_NOT_STICKY; } - @Override - public void onCreate() { - super.onCreate(); - if (callIShouldHavePutIntoIntent != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - NotificationsController.checkOtherNotificationsChannel(); - Notification.Builder bldr = new Notification.Builder(this, NotificationsController.OTHER_NOTIFICATIONS_CHANNEL) - .setContentTitle(LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) - .setShowWhen(false); - if (groupCall != null) { - bldr.setSmallIcon(isMicMute() ? R.drawable.voicechat_muted : R.drawable.voicechat_active); - } else { - bldr.setSmallIcon(R.drawable.notification); - } - startForeground(ID_ONGOING_CALL_NOTIFICATION, bldr.build()); - } - } - - @Override - protected void updateServerConfig() { - final SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); - Instance.setGlobalServerConfig(preferences.getString("voip_server_config", "{}")); - ConnectionsManager.getInstance(currentAccount).sendRequest(new TLRPC.TL_phone_getCallConfig(), (response, error) -> { - if (error == null) { - String data = ((TLRPC.TL_dataJSON) response).data; - Instance.setGlobalServerConfig(data); - preferences.edit().putString("voip_server_config", data).commit(); - } - }); - } - - @Override - protected void onTgVoipPreStop() { - /*if(BuildConfig.DEBUG){ - String debugLog=controller.getDebugLog(); - TLRPC.TL_phone_saveCallDebug req=new TLRPC.TL_phone_saveCallDebug(); - req.debug=new TLRPC.TL_dataJSON(); - req.debug.data=debugLog; - req.peer=new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash=call.access_hash; - req.peer.id=call.id; - ConnectionsManager.getInstance(currentAccount).sendRequest(req, new RequestDelegate(){ - @Override - public void run(TLObject response, TLRPC.TL_error error){ - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Sent debug logs, response=" + response); - } - } - }); - }*/ - } - - @Override - protected void onTgVoipStop(Instance.FinalState finalState) { - if (user == null) { - return; - } - if (needRateCall || forceRating || finalState.isRatingSuggested) { - startRatingActivity(); - needRateCall = false; - } - if (needSendDebugLog && finalState.debugLog != null) { - TLRPC.TL_phone_saveCallDebug req = new TLRPC.TL_phone_saveCallDebug(); - req.debug = new TLRPC.TL_dataJSON(); - req.debug.data = finalState.debugLog; - req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = privateCall.access_hash; - req.peer.id = privateCall.id; - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Sent debug logs, response = " + response); - } - }); - needSendDebugLog = false; - } - } - public static VoIPService getSharedInstance() { - return sharedInstance instanceof VoIPService ? ((VoIPService) sharedInstance) : null; + return sharedInstance; } public TLRPC.User getUser() { @@ -443,12 +819,19 @@ public class VoIPService extends VoIPBaseService { return chat; } + public void setNoiseSupressionEnabled(boolean enabled) { + if (tgVoip[CAPTURE_DEVICE_CAMERA] == null) { + return; + } + tgVoip[CAPTURE_DEVICE_CAMERA].setNoiseSuppressionEnabled(enabled); + } + public void setGroupCallHash(String hash) { if (!currentGroupModeStreaming || TextUtils.isEmpty(hash) || hash.equals(joinHash)) { return; } joinHash = hash; - createGroupInstance(false); + createGroupInstance(CAPTURE_DEVICE_CAMERA, false); } public int getCallerId() { @@ -459,18 +842,6 @@ public class VoIPService extends VoIPBaseService { } } - public void hangUp() { - hangUp(0, null); - } - - public void hangUp(int discard) { - hangUp(discard, null); - } - - public void hangUp(Runnable onDone) { - hangUp(0, onDone); - } - public void hangUp(int discard, Runnable onDone) { declineIncomingCall(currentState == STATE_RINGING || (currentState == STATE_WAITING && isOutgoing) ? DISCARD_REASON_MISSED : DISCARD_REASON_HANGUP, onDone); if (groupCall != null) { @@ -495,7 +866,7 @@ public class VoIPService extends VoIPBaseService { } else { TLRPC.TL_phone_leaveGroupCall req = new TLRPC.TL_phone_leaveGroupCall(); req.call = groupCall.getInputGroupCall(); - req.source = mySource; + req.source = mySource[CAPTURE_DEVICE_CAMERA]; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (response instanceof TLRPC.TL_updates) { TLRPC.TL_updates updates = (TLRPC.TL_updates) response; @@ -673,47 +1044,7 @@ public class VoIPService extends VoIPBaseService { }), ConnectionsManager.RequestFlagFailOnServerErrors); } - protected void startRinging() { - if (currentState == STATE_WAITING_INCOMING) { - return; - } - if (USE_CONNECTION_SERVICE && systemCallConnection != null) { - systemCallConnection.setRinging(); - } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("starting ringing for call " + privateCall.id); - } - dispatchStateChanged(STATE_WAITING_INCOMING); - if (!notificationsDisabled && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - showIncomingNotification(ContactsController.formatName(user.first_name, user.last_name), null, user, privateCall.video, 0); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Showing incoming call notification"); - } - } else { - startRingtoneAndVibration(user.id); - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Starting incall activity for incoming call"); - } - try { - PendingIntent.getActivity(VoIPService.this, 12345, new Intent(VoIPService.this, LaunchActivity.class).setAction("voip"), 0).send(); - } catch (Exception x) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error starting incall activity", x); - } - } - } - } - - @Override - public void startRingtoneAndVibration() { - if (!startedRinging) { - startRingtoneAndVibration(user.id); - startedRinging = true; - } - } - - @Override - protected boolean isRinging() { + private boolean isRinging() { return currentState == STATE_WAITING_INCOMING; } @@ -721,245 +1052,254 @@ public class VoIPService extends VoIPBaseService { return currentState != STATE_WAIT_INIT && currentState != STATE_CREATING; } - public void acceptIncomingCall() { - MessagesController.getInstance(currentAccount).ignoreSetOnline = false; - stopRinging(); - showNotification(); - configureDeviceForCall(); - startConnectingSound(); - dispatchStateChanged(STATE_EXCHANGING_KEYS); - AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didStartedCall)); - final MessagesStorage messagesStorage = MessagesStorage.getInstance(currentAccount); - TLRPC.TL_messages_getDhConfig req = new TLRPC.TL_messages_getDhConfig(); - req.random_length = 256; - req.version = messagesStorage.getLastSecretVersion(); - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { - if (error == null) { - TLRPC.messages_DhConfig res = (TLRPC.messages_DhConfig) response; - if (response instanceof TLRPC.TL_messages_dhConfig) { - if (!Utilities.isGoodPrime(res.p, res.g)) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("stopping VoIP service, bad prime"); - } - callFailed(); - return; - } - - messagesStorage.setSecretPBytes(res.p); - messagesStorage.setSecretG(res.g); - messagesStorage.setLastSecretVersion(res.version); - MessagesStorage.getInstance(currentAccount).saveSecretParams(messagesStorage.getLastSecretVersion(), messagesStorage.getSecretG(), messagesStorage.getSecretPBytes()); - } - byte[] salt = new byte[256]; - for (int a = 0; a < 256; a++) { - salt[a] = (byte) ((byte) (Utilities.random.nextDouble() * 256) ^ res.random[a]); - } - if (privateCall == null) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("call is null"); - } - callFailed(); - return; - } - a_or_b = salt; - BigInteger g_b = BigInteger.valueOf(messagesStorage.getSecretG()); - BigInteger p = new BigInteger(1, messagesStorage.getSecretPBytes()); - g_b = g_b.modPow(new BigInteger(1, salt), p); - g_a_hash = privateCall.g_a_hash; - - byte[] g_b_bytes = g_b.toByteArray(); - if (g_b_bytes.length > 256) { - byte[] correctedAuth = new byte[256]; - System.arraycopy(g_b_bytes, 1, correctedAuth, 0, 256); - g_b_bytes = correctedAuth; - } - - TLRPC.TL_phone_acceptCall req1 = new TLRPC.TL_phone_acceptCall(); - req1.g_b = g_b_bytes; - req1.peer = new TLRPC.TL_inputPhoneCall(); - req1.peer.id = privateCall.id; - req1.peer.access_hash = privateCall.access_hash; - req1.protocol = new TLRPC.TL_phoneCallProtocol(); - req1.protocol.udp_p2p = req1.protocol.udp_reflector = true; - req1.protocol.min_layer = CALL_MIN_LAYER; - req1.protocol.max_layer = Instance.getConnectionMaxLayer(); - req1.protocol.library_versions.addAll(Instance.AVAILABLE_VERSIONS); - ConnectionsManager.getInstance(currentAccount).sendRequest(req1, (response1, error1) -> AndroidUtilities.runOnUIThread(() -> { - if (error1 == null) { - if (BuildVars.LOGS_ENABLED) { - FileLog.w("accept call ok! " + response1); - } - privateCall = ((TLRPC.TL_phone_phoneCall) response1).phone_call; - if (privateCall instanceof TLRPC.TL_phoneCallDiscarded) { - onCallUpdated(privateCall); - } - } else { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("Error on phone.acceptCall: " + error1); - } - callFailed(); - } - }), ConnectionsManager.RequestFlagFailOnServerErrors); - } else { - callFailed(); - } - }); - } - - public void declineIncomingCall() { - declineIncomingCall(DISCARD_REASON_HANGUP, null); - } - public void requestVideoCall() { - if (tgVoip == null) { + if (tgVoip[CAPTURE_DEVICE_CAMERA] == null) { return; } - tgVoip.setupOutgoingVideo(localSink, isFrontFaceCamera); + tgVoip[CAPTURE_DEVICE_CAMERA].setupOutgoingVideo(localSink[CAPTURE_DEVICE_CAMERA], isFrontFaceCamera); } public void switchCamera() { - if (tgVoip == null || switchingCamera) { - if (videoCapturer != 0 && !switchingCamera) { - NativeInstance.switchCameraCapturer(videoCapturer, !isFrontFaceCamera); + if (tgVoip[CAPTURE_DEVICE_CAMERA] == null || !tgVoip[CAPTURE_DEVICE_CAMERA].hasVideoCapturer() || switchingCamera) { + if (captureDevice[CAPTURE_DEVICE_CAMERA] != 0 && !switchingCamera) { + NativeInstance.switchCameraCapturer(captureDevice[CAPTURE_DEVICE_CAMERA], !isFrontFaceCamera); } return; } switchingCamera = true; - tgVoip.switchCamera(!isFrontFaceCamera); + tgVoip[CAPTURE_DEVICE_CAMERA].switchCamera(!isFrontFaceCamera); } - public void setVideoState(int videoState) { - if (tgVoip == null) { - if (videoCapturer != 0) { - this.videoState = videoState; - NativeInstance.setVideoStateCapturer(videoCapturer, videoState); - } else if (videoState == Instance.VIDEO_STATE_ACTIVE && currentState != STATE_BUSY && currentState != STATE_ENDED) { - videoCapturer = NativeInstance.createVideoCapturer(localSink, isFrontFaceCamera); - this.videoState = Instance.VIDEO_STATE_ACTIVE; + public void createCaptureDevice(boolean screencast) { + int index = screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA; + int deviceType; + if (screencast) { + deviceType = 2; + } else { + deviceType = isFrontFaceCamera ? 1 : 0; + } + if (index == CAPTURE_DEVICE_SCREEN) { + if (captureDevice[index] != 0) { + return; + } + captureDevice[index] = NativeInstance.createVideoCapturer(localSink[index], deviceType); + createGroupInstance(CAPTURE_DEVICE_SCREEN, false); + setVideoState(true, Instance.VIDEO_STATE_ACTIVE); + AccountInstance.getInstance(currentAccount).getNotificationCenter().postNotificationName(NotificationCenter.groupCallScreencastStateChanged); + } else { + if (captureDevice[index] != 0 || tgVoip[index] == null) { + if (tgVoip[index] != null && captureDevice[index] != 0) { + tgVoip[index].activateVideoCapturer(captureDevice[index]); + } + return; + } + captureDevice[index] = NativeInstance.createVideoCapturer(localSink[index], deviceType); + } + } + + public void setupCaptureDevice(boolean screencast, boolean micEnabled) { + int index = screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA; + if (captureDevice[index] == 0 || tgVoip[index] == null) { + return; + } + tgVoip[index].setupOutgoingVideoCreated(captureDevice[index]); + destroyCaptureDevice[index] = false; + videoState[index] = Instance.VIDEO_STATE_ACTIVE; + if (micMute == micEnabled) { + setMicMute(!micEnabled, false, false); + micSwitching = true; + } + if (!screencast && groupCall != null) { + editCallMember(UserConfig.getInstance(currentAccount).getCurrentUser(), !micEnabled, videoState[index] != Instance.VIDEO_STATE_ACTIVE, null, null, () -> micSwitching = false); + } + } + + public void setVideoState(boolean screencast, int state) { + int index = screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA; + if (tgVoip[index] == null) { + if (captureDevice[index] != 0) { + videoState[index] = state; + NativeInstance.setVideoStateCapturer(captureDevice[index], videoState[index]); + } else if (state == Instance.VIDEO_STATE_ACTIVE && currentState != STATE_BUSY && currentState != STATE_ENDED) { + captureDevice[index] = NativeInstance.createVideoCapturer(localSink[index], isFrontFaceCamera ? 1 : 0); + videoState[index] = Instance.VIDEO_STATE_ACTIVE; } return; } - this.videoState = videoState; - tgVoip.setVideoState(videoState); - checkIsNear(); + videoState[index] = state; + tgVoip[index].setVideoState(videoState[index]); + if (captureDevice[index] != 0) { + NativeInstance.setVideoStateCapturer(captureDevice[index], videoState[index]); + } + if (!screencast && groupCall != null) { + editCallMember(UserConfig.getInstance(currentAccount).getCurrentUser(), null, videoState[CAPTURE_DEVICE_CAMERA] != Instance.VIDEO_STATE_ACTIVE, null, null, null); + } + if (!screencast) { + checkIsNear(); + } } - public int getVideoState() { - return videoState; + public void stopScreenCapture() { + if (groupCall == null || videoState[CAPTURE_DEVICE_SCREEN] != Instance.VIDEO_STATE_ACTIVE) { + return; + } + TLRPC.TL_phone_leaveGroupCallPresentation req = new TLRPC.TL_phone_leaveGroupCallPresentation(); + req.call = groupCall.getInputGroupCall(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response != null) { + TLRPC.Updates updates = (TLRPC.Updates) response; + MessagesController.getInstance(currentAccount).processUpdates(updates, false); + } + }); + NativeInstance instance = tgVoip[CAPTURE_DEVICE_SCREEN]; + if (instance != null) { + Utilities.globalQueue.postRunnable(instance::stopGroup); + } + mySource[CAPTURE_DEVICE_SCREEN] = 0; + tgVoip[CAPTURE_DEVICE_SCREEN] = null; + destroyCaptureDevice[CAPTURE_DEVICE_SCREEN] = true; + captureDevice[CAPTURE_DEVICE_SCREEN] = 0; + videoState[CAPTURE_DEVICE_SCREEN] = Instance.VIDEO_STATE_INACTIVE; + AccountInstance.getInstance(currentAccount).getNotificationCenter().postNotificationName(NotificationCenter.groupCallScreencastStateChanged); + } + + public int getVideoState(boolean screencast) { + return videoState[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA]; } public void setSinks(VideoSink local, VideoSink remote) { - localSink.setTarget(local); - remoteSink.setTarget(remote); + setSinks(local, false, remote); + } + + public void setSinks(VideoSink local, boolean screencast, VideoSink remote) { + localSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA].setTarget(local); + remoteSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA].setTarget(remote); + } + + public void setLocalSink(VideoSink local, boolean screencast) { + if (screencast) { + //localSink[CAPTURE_DEVICE_SCREEN].setTarget(local); + } else { + localSink[CAPTURE_DEVICE_CAMERA].setTarget(local); + } + } + + public void setRemoteSink(VideoSink remote, boolean screencast) { + remoteSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA].setTarget(remote); + } + + public ProxyVideoSink addRemoteSink(TLRPC.TL_groupCallParticipant participant, boolean screencast, VideoSink remote, VideoSink background) { + if (tgVoip[CAPTURE_DEVICE_CAMERA] == null) { + return null; + } + String endpointId = screencast ? participant.presentationEndpoint : participant.videoEndpoint; + if (endpointId == null) { + return null; + } + ProxyVideoSink sink = remoteSinks.get(endpointId); + if (sink != null && sink.target == remote) { + return sink; + } + if (sink == null) { + sink = proxyVideoSinkLruCache.remove(endpointId); + } + if (sink == null) { + sink = new ProxyVideoSink(); + } + if (remote != null) { + sink.setTarget(remote); + } + if (background != null) { + sink.setBackground(background); + } + remoteSinks.put(endpointId, sink); + sink.nativeInstance = tgVoip[CAPTURE_DEVICE_CAMERA].addIncomingVideoOutput(QUALITY_MEDIUM, endpointId, createSsrcGroups(screencast ? participant.presentation : participant.video), sink); + return sink; + } + + private NativeInstance.SsrcGroup[] createSsrcGroups(TLRPC.TL_groupCallParticipantVideo video) { + if (video.source_groups.isEmpty()) { + return null; + } + NativeInstance.SsrcGroup[] result = new NativeInstance.SsrcGroup[video.source_groups.size()]; + for (int a = 0; a < result.length; a++) { + result[a] = new NativeInstance.SsrcGroup(); + TLRPC.TL_groupCallParticipantVideoSourceGroup group = video.source_groups.get(a); + result[a].semantics = group.semantics; + result[a].ssrcs = new int[group.sources.size()]; + for (int b = 0; b < result[a].ssrcs.length; b++) { + result[a].ssrcs[b] = group.sources.get(b); + } + } + return result; + } + + public void requestFullScreen(TLRPC.TL_groupCallParticipant participant, boolean screencast) { + if (currentBackgroundSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] != null) { + currentBackgroundSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA].setBackground(null); + } + if (participant == null) { + currentBackgroundSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] = null; + currentBackgroundEndpointId[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] = null; + return; + } + String endpointId = screencast ? participant.presentationEndpoint : participant.videoEndpoint; + if (endpointId == null) { + return; + } + ProxyVideoSink sink = remoteSinks.get(endpointId); + if (sink == null) { + sink = addRemoteSink(participant, screencast, null, null); + } + if (sink != null) { + sink.setBackground(remoteSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA]); + //tgVoip[CAPTURE_DEVICE_CAMERA].setVideoEndpointQuality(endpointId, QUALITY_FULL); TODO + currentBackgroundSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] = sink; + currentBackgroundEndpointId[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] = endpointId; + } else { + //tgVoip[CAPTURE_DEVICE_CAMERA].setVideoEndpointQuality(endpointId, QUALITY_MEDIUM); TODO + currentBackgroundSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] = null; + currentBackgroundEndpointId[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] = null; + } + } + + public void removeRemoteSink(TLRPC.TL_groupCallParticipant participant, boolean presentation) { + if (presentation) { + ProxyVideoSink sink = remoteSinks.remove(participant.presentationEndpoint); + if (sink != null) { + tgVoip[CAPTURE_DEVICE_CAMERA].removeIncomingVideoOutput(sink.nativeInstance); + } + } else { + ProxyVideoSink sink = remoteSinks.remove(participant.videoEndpoint); + if (sink != null) { + tgVoip[CAPTURE_DEVICE_CAMERA].removeIncomingVideoOutput(sink.nativeInstance); + } + } + } + + public boolean isFullscreen(TLRPC.TL_groupCallParticipant participant, boolean screencast) { + return currentBackgroundSink[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA] != null && TextUtils.equals(currentBackgroundEndpointId[screencast ? CAPTURE_DEVICE_SCREEN : CAPTURE_DEVICE_CAMERA], screencast ? participant.presentationEndpoint : participant.videoEndpoint); } public void setBackgroundSinks(VideoSink local, VideoSink remote) { - localSink.setBackground(local); - remoteSink.setBackground(remote); + localSink[CAPTURE_DEVICE_CAMERA].setBackground(local); + remoteSink[CAPTURE_DEVICE_CAMERA].setBackground(remote); } public void swapSinks() { - localSink.swap(); - remoteSink.swap(); - } - - @Override - public void onDestroy() { - super.onDestroy(); - setSinks(null, null); - if (onDestroyRunnable != null) { - onDestroyRunnable.run(); - } - if (ChatObject.isChannel(chat)) { - MessagesController.getInstance(currentAccount).startShortPoll(chat, classGuid, true); - } - } - - @Override - protected Class getUIActivityClass() { - return LaunchActivity.class; + localSink[CAPTURE_DEVICE_CAMERA].swap(); + remoteSink[CAPTURE_DEVICE_CAMERA].swap(); } public boolean isHangingUp() { return currentState == STATE_HANGING_UP; } - public void declineIncomingCall(int reason, final Runnable onDone) { - stopRinging(); - callDiscardReason = reason; - if (currentState == STATE_REQUESTING) { - if (delayedStartOutgoingCall != null) { - AndroidUtilities.cancelRunOnUIThread(delayedStartOutgoingCall); - callEnded(); - } else { - dispatchStateChanged(STATE_HANGING_UP); - endCallAfterRequest = true; - AndroidUtilities.runOnUIThread(() -> { - if (currentState == STATE_HANGING_UP) { - callEnded(); - } - }, 5000); - } - return; - } - if (currentState == STATE_HANGING_UP || currentState == STATE_ENDED) { - return; - } - dispatchStateChanged(STATE_HANGING_UP); - if (privateCall == null) { - onDestroyRunnable = onDone; - callEnded(); - if (callReqId != 0) { - ConnectionsManager.getInstance(currentAccount).cancelRequest(callReqId, false); - callReqId = 0; - } - return; - } - TLRPC.TL_phone_discardCall req = new TLRPC.TL_phone_discardCall(); - req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = privateCall.access_hash; - req.peer.id = privateCall.id; - req.duration = (int) (getCallDuration() / 1000); - req.connection_id = tgVoip != null ? tgVoip.getPreferredRelayId() : 0; - switch (reason) { - case DISCARD_REASON_DISCONNECT: - req.reason = new TLRPC.TL_phoneCallDiscardReasonDisconnect(); - break; - case DISCARD_REASON_MISSED: - req.reason = new TLRPC.TL_phoneCallDiscardReasonMissed(); - break; - case DISCARD_REASON_LINE_BUSY: - req.reason = new TLRPC.TL_phoneCallDiscardReasonBusy(); - break; - case DISCARD_REASON_HANGUP: - default: - req.reason = new TLRPC.TL_phoneCallDiscardReasonHangup(); - break; - } - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { - if (error != null) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("error on phone.discardCall: " + error); - } - } else { - if (response instanceof TLRPC.TL_updates) { - TLRPC.TL_updates updates = (TLRPC.TL_updates) response; - MessagesController.getInstance(currentAccount).processUpdates(updates, false); - } - if (BuildVars.LOGS_ENABLED) { - FileLog.d("phone.discardCall " + response); - } - } - }, ConnectionsManager.RequestFlagFailOnServerErrors); - onDestroyRunnable = onDone; - callEnded(); - } - public void onSignalingData(TLRPC.TL_updatePhoneCallSignalingData data) { - if (user == null || tgVoip == null || tgVoip.isGroup() || getCallID() != data.phone_call_id) { + if (user == null || tgVoip[CAPTURE_DEVICE_CAMERA] == null || tgVoip[CAPTURE_DEVICE_CAMERA].isGroup() || getCallID() != data.phone_call_id) { return; } - tgVoip.onSignalingDataReceive(data.data); + tgVoip[CAPTURE_DEVICE_CAMERA].onSignalingDataReceive(data.data); } public int getSelfId() { @@ -980,16 +1320,15 @@ public class VoIPService extends VoIPBaseService { return; } int selfId = getSelfId(); - NativeInstance instance = tgVoip; for (int a = 0, N = update.participants.size(); a < N; a++) { TLRPC.TL_groupCallParticipant participant = update.participants.get(a); if (participant.left) { if (participant.source != 0) { - if (participant.source == mySource) { + if (participant.source == mySource[CAPTURE_DEVICE_CAMERA]) { int selfCount = 0; for (int b = 0; b < N; b++) { TLRPC.TL_groupCallParticipant p = update.participants.get(b); - if (p.self || p.source == mySource) { + if (p.self || p.source == mySource[CAPTURE_DEVICE_CAMERA]) { selfCount++; } } @@ -1000,15 +1339,15 @@ public class VoIPService extends VoIPBaseService { } } } else if (MessageObject.getPeerId(participant.peer) == selfId) { - if (participant.source != mySource && mySource != 0 && participant.source != 0) { + if (participant.source != mySource[CAPTURE_DEVICE_CAMERA] && mySource[CAPTURE_DEVICE_CAMERA] != 0 && participant.source != 0) { if (BuildVars.LOGS_ENABLED) { - FileLog.d("source mismatch my = " + mySource + " psrc = " + participant.source); + FileLog.d("source mismatch my = " + mySource[CAPTURE_DEVICE_CAMERA] + " psrc = " + participant.source); } hangUp(2); return; } else if (ChatObject.isChannel(chat) && currentGroupModeStreaming && participant.can_self_unmute) { switchingStream = true; - createGroupInstance(false); + createGroupInstance(CAPTURE_DEVICE_CAMERA, false); } if (participant.muted) { setMicMute(true, false, false); @@ -1029,59 +1368,24 @@ public class VoIPService extends VoIPBaseService { return; } boolean newModeStreaming = false; - JSONObject object = null; - if (call.params != null) { + if (myParams != null) { try { - TLRPC.TL_dataJSON json = call.params; - object = new JSONObject(json.data); + JSONObject object = new JSONObject(myParams.data); newModeStreaming = object.optBoolean("stream"); } catch (Exception e) { FileLog.e(e); } } - if ((currentState == STATE_WAIT_INIT || newModeStreaming != currentGroupModeStreaming) && call.params != null) { + if ((currentState == STATE_WAIT_INIT || newModeStreaming != currentGroupModeStreaming) && myParams != null) { if (playedConnectedSound && newModeStreaming != currentGroupModeStreaming) { switchingStream = true; } currentGroupModeStreaming = newModeStreaming; try { if (newModeStreaming) { - tgVoip.prepareForStream(); + tgVoip[CAPTURE_DEVICE_CAMERA].prepareForStream(); } else { - object = object.getJSONObject("transport"); - String ufrag = object.getString("ufrag"); - String pwd = object.getString("pwd"); - JSONArray array = object.getJSONArray("fingerprints"); - Instance.Fingerprint[] fingerprints = new Instance.Fingerprint[array.length()]; - for (int a = 0; a < fingerprints.length; a++) { - JSONObject item = array.getJSONObject(a); - fingerprints[a] = new Instance.Fingerprint( - item.getString("hash"), - item.getString("setup"), - item.getString("fingerprint") - ); - } - array = object.getJSONArray("candidates"); - Instance.Candidate[] candidates = new Instance.Candidate[array.length()]; - for (int a = 0; a < candidates.length; a++) { - JSONObject item = array.getJSONObject(a); - candidates[a] = new Instance.Candidate( - item.optString("port", ""), - item.optString("protocol", ""), - item.optString("network", ""), - item.optString("generation", ""), - item.optString("id", ""), - item.optString("component", ""), - item.optString("foundation", ""), - item.optString("priority", ""), - item.optString("ip", ""), - item.optString("type", ""), - item.optString("tcpType", ""), - item.optString("relAddr", ""), - item.optString("relPort", "") - ); - } - tgVoip.setJoinResponsePayload(ufrag, pwd, fingerprints, candidates); + tgVoip[CAPTURE_DEVICE_CAMERA].setJoinResponsePayload(myParams.data); } dispatchStateChanged(STATE_WAIT_INIT_ACK); } catch (Exception e) { @@ -1308,7 +1612,10 @@ public class VoIPService extends VoIPBaseService { } groupCallPeer = peer; groupCall.setSelfPeer(groupCallPeer); - createGroupInstance(true); + createGroupInstance(CAPTURE_DEVICE_CAMERA, true); + if (videoState[CAPTURE_DEVICE_SCREEN] == Instance.VIDEO_STATE_ACTIVE) { + createGroupInstance(CAPTURE_DEVICE_SCREEN, true); + } } private void startGroupCall(int ssrc, String json, boolean create) { @@ -1320,10 +1627,12 @@ public class VoIPService extends VoIPBaseService { groupCall.call = new TLRPC.TL_groupCall(); groupCall.call.participants_count = 0; groupCall.call.version = 1; + groupCall.call.can_start_video = true; groupCall.call.can_change_join_muted = true; groupCall.chatId = chat.id; groupCall.currentAccount = AccountInstance.getInstance(currentAccount); groupCall.setSelfPeer(groupCallPeer); + groupCall.createNoVideoParticipant(); dispatchStateChanged(STATE_CREATING); TLRPC.TL_phone_createGroupCall req = new TLRPC.TL_phone_createGroupCall(); @@ -1374,19 +1683,18 @@ public class VoIPService extends VoIPBaseService { configureDeviceForCall(); showNotification(); AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didStartedCall)); - createGroupInstance(false); + createGroupInstance(CAPTURE_DEVICE_CAMERA, false); } else { if (getSharedInstance() == null || groupCall == null) { return; } dispatchStateChanged(STATE_WAIT_INIT); - mySource = ssrc; if (BuildVars.LOGS_ENABLED) { - FileLog.d("initital source = " + mySource); + FileLog.d("initital source = " + ssrc); } - myJson = json; TLRPC.TL_phone_joinGroupCall req = new TLRPC.TL_phone_joinGroupCall(); req.muted = true; + req.video_stopped = videoState[CAPTURE_DEVICE_CAMERA] != Instance.VIDEO_STATE_ACTIVE; req.call = groupCall.getInputGroupCall(); req.params = new TLRPC.TL_dataJSON(); req.params.data = json; @@ -1402,6 +1710,7 @@ public class VoIPService extends VoIPBaseService { } ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (response != null) { + AndroidUtilities.runOnUIThread(() -> mySource[CAPTURE_DEVICE_CAMERA] = ssrc); TLRPC.Updates updates = (TLRPC.Updates) response; int selfId = getSelfId(); for (int a = 0, N = updates.updates.size(); a < N; a++) { @@ -1411,14 +1720,18 @@ public class VoIPService extends VoIPBaseService { for (int b = 0, N2 = updateGroupCallParticipants.participants.size(); b < N2; b++) { TLRPC.TL_groupCallParticipant participant = updateGroupCallParticipants.participants.get(b); if (MessageObject.getPeerId(participant.peer) == selfId) { - mySource = participant.source; + AndroidUtilities.runOnUIThread(() -> mySource[CAPTURE_DEVICE_CAMERA] = participant.source); if (BuildVars.LOGS_ENABLED) { - FileLog.d("join source = " + mySource); + FileLog.d("join source = " + participant.source); } - a = N; break; } } + } else if (update instanceof TLRPC.TL_updateGroupCallConnection) { + TLRPC.TL_updateGroupCallConnection updateGroupCallConnection = (TLRPC.TL_updateGroupCallConnection) update; + if (!updateGroupCallConnection.presentation) { + myParams = updateGroupCallConnection.params; + } } } MessagesController.getInstance(currentAccount).processUpdates(updates, false); @@ -1439,7 +1752,7 @@ public class VoIPService extends VoIPBaseService { } hangUp(2); } else if ("GROUPCALL_SSRC_DUPLICATE_MUCH".equals(error.text)) { - createGroupInstance(false); + createGroupInstance(CAPTURE_DEVICE_CAMERA, false); } else { if ("GROUPCALL_INVALID".equals(error.text)) { MessagesController.getInstance(currentAccount).loadFullChat(chat.id, 0, true); @@ -1449,33 +1762,131 @@ public class VoIPService extends VoIPBaseService { } }); } - }, BuildVars.DEBUG_PRIVATE_VERSION ? ConnectionsManager.RequestFlagFailOnServerErrors : 0); + }); } } + private void startScreenCapture(String json) { + if (getSharedInstance() == null || groupCall == null) { + return; + } + mySource[CAPTURE_DEVICE_SCREEN] = 0; + TLRPC.TL_phone_joinGroupCallPresentation req = new TLRPC.TL_phone_joinGroupCallPresentation(); + req.call = groupCall.getInputGroupCall(); + req.params = new TLRPC.TL_dataJSON(); + req.params.data = json; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (response != null) { + TLRPC.Updates updates = (TLRPC.Updates) response; + AndroidUtilities.runOnUIThread(() -> { + if (tgVoip[CAPTURE_DEVICE_SCREEN] != null) { + int selfId = getSelfId(); + for (int a = 0, N = updates.updates.size(); a < N; a++) { + TLRPC.Update update = updates.updates.get(a); + if (update instanceof TLRPC.TL_updateGroupCallConnection) { + TLRPC.TL_updateGroupCallConnection updateGroupCallConnection = (TLRPC.TL_updateGroupCallConnection) update; + if (updateGroupCallConnection.presentation) { + tgVoip[CAPTURE_DEVICE_SCREEN].setJoinResponsePayload(updateGroupCallConnection.params.data); + } + } else if (update instanceof TLRPC.TL_updateGroupCallParticipants) { + TLRPC.TL_updateGroupCallParticipants updateGroupCallParticipants = (TLRPC.TL_updateGroupCallParticipants) update; + for (int b = 0, N2 = updateGroupCallParticipants.participants.size(); b < N2; b++) { + TLRPC.TL_groupCallParticipant participant = updateGroupCallParticipants.participants.get(b); + if (MessageObject.getPeerId(participant.peer) == selfId) { + if (participant.presentation != null) { + for (int c = 0, N3 = participant.presentation.source_groups.size(); c < N3; c++) { + TLRPC.TL_groupCallParticipantVideoSourceGroup sourceGroup = participant.presentation.source_groups.get(c); + if (sourceGroup.sources.size() > 0) { + mySource[CAPTURE_DEVICE_SCREEN] = sourceGroup.sources.get(0); + } + } + } + break; + } + } + } + } + } + }); + MessagesController.getInstance(currentAccount).processUpdates(updates, false); + startGroupCheckShortpoll(); + } else { + AndroidUtilities.runOnUIThread(() -> { + if ("JOIN_AS_PEER_INVALID".equals(error.text)) { + TLRPC.ChatFull chatFull = MessagesController.getInstance(currentAccount).getChatFull(chat.id); + if (chatFull != null) { + if (chatFull instanceof TLRPC.TL_chatFull) { + chatFull.flags &=~ 32768; + } else { + chatFull.flags &=~ 67108864; + } + chatFull.groupcall_default_join_as = null; + JoinCallAlert.resetCache(); + } + hangUp(2); + } else if ("GROUPCALL_SSRC_DUPLICATE_MUCH".equals(error.text)) { + createGroupInstance(CAPTURE_DEVICE_SCREEN, false); + } else { + if ("GROUPCALL_INVALID".equals(error.text)) { + MessagesController.getInstance(currentAccount).loadFullChat(chat.id, 0, true); + } + } + }); + } + }); + } + private Runnable shortPollRunnable; private int checkRequestId; private void startGroupCheckShortpoll() { - if (shortPollRunnable != null || sharedInstance == null || groupCall == null || mySource == 0) { + if (shortPollRunnable != null || sharedInstance == null || groupCall == null || mySource[CAPTURE_DEVICE_CAMERA] == 0 && mySource[CAPTURE_DEVICE_SCREEN] == 0) { return; } AndroidUtilities.runOnUIThread(shortPollRunnable = () -> { - if (shortPollRunnable == null || sharedInstance == null || groupCall == null || mySource == 0) { + if (shortPollRunnable == null || sharedInstance == null || groupCall == null || mySource[CAPTURE_DEVICE_CAMERA] == 0 && mySource[CAPTURE_DEVICE_SCREEN] == 0) { return; } TLRPC.TL_phone_checkGroupCall req = new TLRPC.TL_phone_checkGroupCall(); req.call = groupCall.getInputGroupCall(); - req.source = mySource; + for (int a = 0; a < mySource.length; a++) { + if (mySource[a] != 0) { + req.sources.add(mySource[a]); + } + } checkRequestId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (shortPollRunnable == null || sharedInstance == null || groupCall == null) { return; } shortPollRunnable = null; checkRequestId = 0; - if (response instanceof TLRPC.TL_boolFalse || error != null && error.code == 400) { - createGroupInstance(false); - } else { + boolean recreateCamera = false; + boolean recreateScreenCapture = false; + if (response instanceof TLRPC.Vector) { + TLRPC.Vector vector = (TLRPC.Vector) response; + if (mySource[CAPTURE_DEVICE_CAMERA] != 0 && req.sources.contains(mySource[CAPTURE_DEVICE_CAMERA])) { + if (!vector.objects.contains(mySource[CAPTURE_DEVICE_CAMERA])) { + recreateCamera = true; + } + } + if (mySource[CAPTURE_DEVICE_SCREEN] != 0 && req.sources.contains(mySource[CAPTURE_DEVICE_SCREEN])) { + if (!vector.objects.contains(mySource[CAPTURE_DEVICE_SCREEN])) { + recreateScreenCapture = true; + } + } + } else if (error != null && error.code == 400) { + recreateCamera = true; + if (mySource[CAPTURE_DEVICE_SCREEN] != 0 && req.sources.contains(mySource[CAPTURE_DEVICE_SCREEN])) { + recreateScreenCapture = true; + } + } + if (recreateCamera) { + createGroupInstance(CAPTURE_DEVICE_CAMERA, false); + } + if (recreateScreenCapture) { + createGroupInstance(CAPTURE_DEVICE_SCREEN, false); + } + if (mySource[CAPTURE_DEVICE_SCREEN] != 0 || mySource[CAPTURE_DEVICE_CAMERA] != 0) { startGroupCheckShortpoll(); } })); @@ -1483,6 +1894,9 @@ public class VoIPService extends VoIPBaseService { } private void cancelGroupCheckShortPoll() { + if (mySource[CAPTURE_DEVICE_SCREEN] != 0 || mySource[CAPTURE_DEVICE_CAMERA] != 0) { + return; + } if (checkRequestId != 0) { ConnectionsManager.getInstance(currentAccount).cancelRequest(checkRequestId, false); checkRequestId = 0; @@ -1493,63 +1907,83 @@ public class VoIPService extends VoIPBaseService { } } - private void broadcastUnknownParticipants(int[] unknown, ArrayList uids) { - if (groupCall == null || tgVoip == null) { + private static class RequestedParticipant { + public int audioSsrc; + public TLRPC.TL_groupCallParticipant participant; + + public RequestedParticipant(TLRPC.TL_groupCallParticipant p) { + participant = p; + audioSsrc = p.source; + } + } + + private void broadcastUnknownParticipants(long taskPtr, int[] unknown) { + if (groupCall == null || tgVoip[CAPTURE_DEVICE_CAMERA] == null) { return; } int selfId = getSelfId(); - ArrayList participants = null; - for (int a = 0, N = unknown != null ? unknown.length : uids.size(); a < N; a++) { - int ssrc; - if (unknown != null) { - ssrc = unknown[a]; - } else { - ssrc = uids.get(a); + ArrayList participants = null; + for (int a = 0, N = unknown.length; a < N; a++) { + TLRPC.TL_groupCallParticipant p = groupCall.participantsBySources.get(unknown[a]); + if (p == null) { + p = groupCall.participantsByVideoSources.get(unknown[a]); + if (p == null) { + p = groupCall.participantsByPresentationSources.get(unknown[a]); + } } - TLRPC.TL_groupCallParticipant p = groupCall.participantsBySources.get(ssrc); if (p == null || MessageObject.getPeerId(p.peer) == selfId || p.source == 0) { continue; } - //if (p.params != null && !TextUtils.isEmpty(p.params.data)) { if (participants == null) { participants = new ArrayList<>(); } - participants.add(p); - //} + participants.add(new RequestedParticipant(p)); } if (participants != null) { - String[] jsonArray = new String[participants.size()]; int[] ssrcs = new int[participants.size()]; for (int a = 0, N = participants.size(); a < N; a++) { - TLRPC.TL_groupCallParticipant p = participants.get(a); - jsonArray[a] = null;//p.params.data; - ssrcs[a] = p.source; + RequestedParticipant p = participants.get(a); + ssrcs[a] = p.audioSsrc; } - tgVoip.addParticipants(ssrcs, jsonArray); + tgVoip[CAPTURE_DEVICE_CAMERA].onMediaDescriptionAvailable(taskPtr, ssrcs); for (int a = 0, N = participants.size(); a < N; a++) { - TLRPC.TL_groupCallParticipant p = participants.get(a); - if (p.muted_by_you) { - tgVoip.setVolume(p.source, 0); + RequestedParticipant p = participants.get(a); + if (p.participant.muted_by_you) { + tgVoip[CAPTURE_DEVICE_CAMERA].setVolume(p.audioSsrc, 0); } else { - tgVoip.setVolume(p.source, ChatObject.getParticipantVolume(p) / 10000.0); + tgVoip[CAPTURE_DEVICE_CAMERA].setVolume(p.audioSsrc, ChatObject.getParticipantVolume(p.participant) / 10000.0); } } } } - private void createGroupInstance(boolean switchingAccount) { - cancelGroupCheckShortPoll(); - wasConnected = false; - if (switchingAccount) { - mySource = 0; - tgVoip.stopGroup(); - tgVoip = null; + private void createGroupInstance(int type, boolean switchAccount) { + if (switchAccount) { + mySource[type] = 0; + if (type == CAPTURE_DEVICE_CAMERA) { + switchingAccount = switchAccount; + } } - if (tgVoip == null) { - final String logFilePath = BuildVars.DEBUG_VERSION ? VoIPHelper.getLogFilePath("voip" + groupCall.call.id) : VoIPHelper.getLogFilePath(groupCall.call.id, false); - tgVoip = NativeInstance.makeGroup(logFilePath, (ssrc, json) -> startGroupCall(ssrc, json, true), (uids, levels, voice) -> { - if (sharedInstance == null || groupCall == null) { + cancelGroupCheckShortPoll(); + if (type == CAPTURE_DEVICE_CAMERA) { + wasConnected = false; + } else if (!wasConnected) { + reconnectScreenCapture = true; + return; + } + boolean created = false; + if (tgVoip[type] == null) { + created = true; + final String logFilePath = BuildVars.DEBUG_VERSION ? VoIPHelper.getLogFilePath("voip_" + type + "_" + groupCall.call.id) : VoIPHelper.getLogFilePath(groupCall.call.id, false); + tgVoip[type] = NativeInstance.makeGroup(logFilePath, captureDevice[type], type == CAPTURE_DEVICE_SCREEN, type == CAPTURE_DEVICE_CAMERA && SharedConfig.noiseSupression, (ssrc, json) -> { + if (type == CAPTURE_DEVICE_CAMERA) { + startGroupCall(ssrc, json, true); + } else { + startScreenCapture(json); + } + }, (uids, levels, voice) -> { + if (sharedInstance == null || groupCall == null || type != CAPTURE_DEVICE_CAMERA) { return; } groupCall.processVoiceLevelsUpdate(uids, levels, voice); @@ -1578,20 +2012,20 @@ public class VoIPService extends VoIPBaseService { audioLevelsCallback.run(uids, levels, voice); } } - }, ssrcs -> { - - }, unknown -> { - if (sharedInstance == null || groupCall == null) { + }, (taskPtr, unknown) -> { + if (sharedInstance == null || groupCall == null || type != CAPTURE_DEVICE_CAMERA) { return; } groupCall.processUnknownVideoParticipants(unknown, (ssrcs) -> { if (sharedInstance == null || groupCall == null) { return; } - broadcastUnknownParticipants(null, ssrcs); + broadcastUnknownParticipants(taskPtr, unknown); }); - broadcastUnknownParticipants(unknown, null); }, (timestamp, duration) -> { + if (type != CAPTURE_DEVICE_CAMERA) { + return; + } TLRPC.TL_upload_getFile req = new TLRPC.TL_upload_getFile(); req.limit = 128 * 1024; TLRPC.TL_inputGroupCallStream inputGroupCallStream = new TLRPC.TL_inputGroupCallStream(); @@ -1603,15 +2037,15 @@ public class VoIPService extends VoIPBaseService { req.location = inputGroupCallStream; currentStreamRequestTimestamp = timestamp; currentStreamRequestId = AccountInstance.getInstance(currentAccount).getConnectionsManager().sendRequest(req, (response, error, responseTime) -> { - if (tgVoip == null) { + if (tgVoip[type] == null) { return; } if (response != null) { TLRPC.TL_upload_file res = (TLRPC.TL_upload_file) response; - tgVoip.onStreamPartAvailable(timestamp, res.bytes.buffer, res.bytes.limit(), responseTime); + tgVoip[type].onStreamPartAvailable(timestamp, res.bytes.buffer, res.bytes.limit(), responseTime); } else { if ("GROUPCALL_JOIN_MISSING".equals(error.text)) { - AndroidUtilities.runOnUIThread(() -> createGroupInstance(false)); + AndroidUtilities.runOnUIThread(() -> createGroupInstance(type, false)); } else { int status; if ("TIME_TOO_BIG".equals(error.text) || error.text.startsWith("FLOOD_WAIT")) { @@ -1619,23 +2053,34 @@ public class VoIPService extends VoIPBaseService { } else { status = -1; } - tgVoip.onStreamPartAvailable(timestamp, null, status, responseTime); + tgVoip[type].onStreamPartAvailable(timestamp, null, status, responseTime); } } }, ConnectionsManager.RequestFlagFailOnServerErrors, ConnectionsManager.ConnectionTypeDownload, groupCall.call.stream_dc_id); }, (timestamp, duration) -> { + if (type != CAPTURE_DEVICE_CAMERA) { + return; + } if (currentStreamRequestTimestamp == timestamp) { AccountInstance.getInstance(currentAccount).getConnectionsManager().cancelRequest(currentStreamRequestId, true); currentStreamRequestId = 0; } }); - tgVoip.setOnStateUpdatedListener(this::updateConnectionState); + tgVoip[type].setOnStateUpdatedListener((state, inTransition) -> updateConnectionState(type, state, inTransition)); + } + tgVoip[type].resetGroupInstance(!created, false); + if (captureDevice[type] != 0) { + destroyCaptureDevice[type] = false; + } + if (type == CAPTURE_DEVICE_CAMERA) { + dispatchStateChanged(STATE_WAIT_INIT); } - tgVoip.resetGroupInstance(false); - dispatchStateChanged(STATE_WAIT_INIT); } - private void updateConnectionState(int state, boolean inTransition) { + private void updateConnectionState(int type, int state, boolean inTransition) { + if (type != CAPTURE_DEVICE_CAMERA) { + return; + } dispatchStateChanged(state == 1 || switchingStream ? STATE_ESTABLISHED : STATE_RECONNECTING); if (switchingStream && (state == 0 || state == 1 && inTransition)) { AndroidUtilities.runOnUIThread(switchingStreamTimeoutRunnable = () -> { @@ -1643,13 +2088,13 @@ public class VoIPService extends VoIPBaseService { return; } switchingStream = false; - updateConnectionState(0, true); + updateConnectionState(type, 0, true); switchingStreamTimeoutRunnable = null; }, 3000); } if (state == 0) { startGroupCheckShortpoll(); - if (playedConnectedSound && spPlayId == 0 && !switchingStream) { + if (playedConnectedSound && spPlayId == 0 && !switchingStream && !switchingAccount) { Utilities.globalQueue.postRunnable(() -> { if (spPlayId != 0) { soundPool.stop(spPlayId); @@ -1661,6 +2106,7 @@ public class VoIPService extends VoIPBaseService { cancelGroupCheckShortPoll(); if (!inTransition) { switchingStream = false; + switchingAccount = false; } if (switchingStreamTimeoutRunnable != null) { AndroidUtilities.cancelRunOnUIThread(switchingStreamTimeoutRunnable); @@ -1682,10 +2128,14 @@ public class VoIPService extends VoIPBaseService { } if (!wasConnected) { wasConnected = true; - NativeInstance instance = tgVoip; + if (reconnectScreenCapture) { + createGroupInstance(CAPTURE_DEVICE_SCREEN, false); + reconnectScreenCapture = false; + } + NativeInstance instance = tgVoip[CAPTURE_DEVICE_CAMERA]; if (instance != null) { if (!micMute) { - tgVoip.setMuteMicrophone(false); + instance.setMuteMicrophone(false); } } setParticipantsVolume(); @@ -1694,7 +2144,7 @@ public class VoIPService extends VoIPBaseService { } public void setParticipantsVolume() { - NativeInstance instance = tgVoip; + NativeInstance instance = tgVoip[CAPTURE_DEVICE_CAMERA]; if (instance != null) { for (int a = 0, N = groupCall.participants.size(); a < N; a++) { TLRPC.TL_groupCallParticipant participant = groupCall.participants.valueAt(a); @@ -1711,7 +2161,7 @@ public class VoIPService extends VoIPBaseService { } public void setParticipantVolume(int ssrc, int volume) { - tgVoip.setVolume(ssrc, volume / 10000.0); + tgVoip[CAPTURE_DEVICE_CAMERA].setVolume(ssrc, volume / 10000.0); } public boolean isSwitchingStream() { @@ -1818,23 +2268,40 @@ public class VoIPService extends VoIPBaseService { final Instance.EncryptionKey encryptionKey = new Instance.EncryptionKey(authKey, isOutgoing); boolean newAvailable = "2.7.7".compareTo(privateCall.protocol.library_versions.get(0)) <= 0; - if (videoCapturer != 0 && !newAvailable) { - NativeInstance.destroyVideoCapturer(videoCapturer); - videoCapturer = 0; - videoState = Instance.VIDEO_STATE_INACTIVE; + if (captureDevice[CAPTURE_DEVICE_CAMERA] != 0 && !newAvailable) { + NativeInstance.destroyVideoCapturer(captureDevice[CAPTURE_DEVICE_CAMERA]); + captureDevice[CAPTURE_DEVICE_CAMERA] = 0; + videoState[CAPTURE_DEVICE_CAMERA] = Instance.VIDEO_STATE_INACTIVE; + } + if (!isOutgoing) { + if (videoCall && (Build.VERSION.SDK_INT < 23 || checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED)) { + captureDevice[CAPTURE_DEVICE_CAMERA] = NativeInstance.createVideoCapturer(localSink[CAPTURE_DEVICE_CAMERA], isFrontFaceCamera ? 1 : 0); + videoState[CAPTURE_DEVICE_CAMERA] = Instance.VIDEO_STATE_ACTIVE; + } else { + videoState[CAPTURE_DEVICE_CAMERA] = Instance.VIDEO_STATE_INACTIVE; + } } // init - tgVoip = Instance.makeInstance(privateCall.protocol.library_versions.get(0), config, persistentStateFilePath, endpoints, proxy, getNetworkType(), encryptionKey, remoteSink, videoCapturer, (uids, levels, voice) -> { + tgVoip[CAPTURE_DEVICE_CAMERA] = Instance.makeInstance(privateCall.protocol.library_versions.get(0), config, persistentStateFilePath, endpoints, proxy, getNetworkType(), encryptionKey, remoteSink[CAPTURE_DEVICE_CAMERA], captureDevice[CAPTURE_DEVICE_CAMERA], (uids, levels, voice) -> { if (sharedInstance == null || privateCall == null) { return; } NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.webRtcMicAmplitudeEvent, levels[0]); }); - tgVoip.setOnStateUpdatedListener(this::onConnectionStateChanged); - tgVoip.setOnSignalBarsUpdatedListener(this::onSignalBarCountChanged); - tgVoip.setOnSignalDataListener(this::onSignalingData); - tgVoip.setOnRemoteMediaStateUpdatedListener(this::onMediaStateUpdated); - tgVoip.setMuteMicrophone(micMute); + tgVoip[CAPTURE_DEVICE_CAMERA].setOnStateUpdatedListener(this::onConnectionStateChanged); + tgVoip[CAPTURE_DEVICE_CAMERA].setOnSignalBarsUpdatedListener(this::onSignalBarCountChanged); + tgVoip[CAPTURE_DEVICE_CAMERA].setOnSignalDataListener(this::onSignalingData); + tgVoip[CAPTURE_DEVICE_CAMERA].setOnRemoteMediaStateUpdatedListener((audioState, videoState) -> AndroidUtilities.runOnUIThread(() -> { + remoteAudioState = audioState; + remoteVideoState = videoState; + checkIsNear(); + + for (int a = 0; a < stateListeners.size(); a++) { + StateListener l = stateListeners.get(a); + l.onMediaStateUpdated(audioState, videoState); + } + })); + tgVoip[CAPTURE_DEVICE_CAMERA].setMuteMicrophone(micMute); if (newAvailable != isVideoAvailable) { isVideoAvailable = newAvailable; @@ -1843,13 +2310,13 @@ public class VoIPService extends VoIPBaseService { l.onVideoAvailableChange(isVideoAvailable); } } - videoCapturer = 0; + captureDevice[CAPTURE_DEVICE_CAMERA] = 0; AndroidUtilities.runOnUIThread(new Runnable() { @Override public void run() { - if (tgVoip != null) { - updateTrafficStats(null); + if (tgVoip[CAPTURE_DEVICE_CAMERA] != null) { + updateTrafficStats(tgVoip[CAPTURE_DEVICE_CAMERA], null); AndroidUtilities.runOnUIThread(this, 5000); } } @@ -1862,14 +2329,6 @@ public class VoIPService extends VoIPBaseService { } } - protected void showNotification() { - if (user != null) { - showNotification(ContactsController.formatName(user.first_name, user.last_name), getRoundAvatarBitmap(user)); - } else { - showNotification(chat.title, getRoundAvatarBitmap(chat)); - } - } - public void playConnectedSound() { Utilities.globalQueue.postRunnable(() -> soundPool.play(spVoiceChatStartId, 1.0f, 1.0f, 0, 0, 1)); playedConnectedSound = true; @@ -1918,43 +2377,14 @@ public class VoIPService extends VoIPBaseService { }); } - protected void callFailed(String error) { - if (privateCall != null) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("Discarding failed call"); - } - TLRPC.TL_phone_discardCall req = new TLRPC.TL_phone_discardCall(); - req.peer = new TLRPC.TL_inputPhoneCall(); - req.peer.access_hash = privateCall.access_hash; - req.peer.id = privateCall.id; - req.duration = (int) (getCallDuration() / 1000); - req.connection_id = tgVoip != null ? tgVoip.getPreferredRelayId() : 0; - req.reason = new TLRPC.TL_phoneCallDiscardReasonDisconnect(); - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error1) -> { - if (error1 != null) { - if (BuildVars.LOGS_ENABLED) { - FileLog.e("error on phone.discardCall: " + error1); - } - } else { - if (BuildVars.LOGS_ENABLED) { - FileLog.d("phone.discardCall " + response); - } - } - }); - } - super.callFailed(error); - } - - @Override - public long getCallID() { - return privateCall != null ? privateCall.id : 0; - } - public boolean isVideoAvailable() { return isVideoAvailable; } void onMediaButtonEvent(KeyEvent ev) { + if (ev == null) { + return; + } if (ev.getKeyCode() == KeyEvent.KEYCODE_HEADSETHOOK || ev.getKeyCode() == KeyEvent.KEYCODE_MEDIA_PAUSE || ev.getKeyCode() == KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE) { if (ev.getAction() == KeyEvent.ACTION_UP) { if (currentState == STATE_WAITING_INCOMING) { @@ -1970,13 +2400,6 @@ public class VoIPService extends VoIPBaseService { return g_a; } - @Override - public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.appDidLogout) { - callEnded(); - } - } - public void forceRating() { forceRating = true; } @@ -1991,21 +2414,729 @@ public class VoIPService extends VoIPBaseService { return EncryptionKeyEmojifier.emojifyForCall(Utilities.computeSHA256(os.toByteArray(), 0, os.size())); } - @Override - public void onConnectionStateChanged(int newState, boolean inTransition) { - AndroidUtilities.runOnUIThread(() -> { - if (newState == STATE_ESTABLISHED) { - if (callStartTime == 0) { - callStartTime = SystemClock.elapsedRealtime(); - } - //peerCapabilities = tgVoip.getPeerCapabilities(); + public boolean hasEarpiece() { + if (USE_CONNECTION_SERVICE) { + if (systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { + int routeMask = systemCallConnection.getCallAudioState().getSupportedRouteMask(); + return (routeMask & (CallAudioState.ROUTE_EARPIECE | CallAudioState.ROUTE_WIRED_HEADSET)) != 0; + } + } + if (((TelephonyManager) getSystemService(TELEPHONY_SERVICE)).getPhoneType() != TelephonyManager.PHONE_TYPE_NONE) { + return true; + } + if (mHasEarpiece != null) { + return mHasEarpiece; + } + + // not calculated yet, do it now + try { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + Method method = AudioManager.class.getMethod("getDevicesForStream", Integer.TYPE); + Field field = AudioManager.class.getField("DEVICE_OUT_EARPIECE"); + int earpieceFlag = field.getInt(null); + int bitmaskResult = (int) method.invoke(am, AudioManager.STREAM_VOICE_CALL); + + // check if masked by the earpiece flag + if ((bitmaskResult & earpieceFlag) == earpieceFlag) { + mHasEarpiece = Boolean.TRUE; + } else { + mHasEarpiece = Boolean.FALSE; + } + } catch (Throwable error) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error while checking earpiece! ", error); + } + mHasEarpiece = Boolean.TRUE; + } + + return mHasEarpiece; + } + + private int getStatsNetworkType() { + int netType = StatsController.TYPE_WIFI; + if (lastNetInfo != null) { + if (lastNetInfo.getType() == ConnectivityManager.TYPE_MOBILE) { + netType = lastNetInfo.isRoaming() ? StatsController.TYPE_ROAMING : StatsController.TYPE_MOBILE; + } + } + return netType; + } + + protected void setSwitchingCamera(boolean switching, boolean isFrontFace) { + switchingCamera = switching; + if (!switching) { + isFrontFaceCamera = isFrontFace; + for (int a = 0; a < stateListeners.size(); a++) { + StateListener l = stateListeners.get(a); + l.onCameraSwitch(isFrontFaceCamera); + } + } + } + + public void registerStateListener(StateListener l) { + if (stateListeners.contains(l)) { + return; + } + stateListeners.add(l); + if (currentState != 0) { + l.onStateChanged(currentState); + } + if (signalBarCount != 0) { + l.onSignalBarsCountChanged(signalBarCount); + } + } + + public void unregisterStateListener(StateListener l) { + stateListeners.remove(l); + } + + public void editCallMember(TLObject object, Boolean mute, Boolean muteVideo, Integer volume, Boolean raiseHand, Runnable onComplete) { + if (object == null || groupCall == null) { + return; + } + TLRPC.TL_phone_editGroupCallParticipant req = new TLRPC.TL_phone_editGroupCallParticipant(); + req.call = groupCall.getInputGroupCall(); + if (object instanceof TLRPC.User) { + TLRPC.User user = (TLRPC.User) object; + if (UserObject.isUserSelf(user) && groupCallPeer != null) { + req.participant = groupCallPeer; + } else { + req.participant = MessagesController.getInputPeer(user); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("edit group call part id = " + req.participant.user_id + " access_hash = " + req.participant.user_id); + } + } + } else if (object instanceof TLRPC.Chat) { + TLRPC.Chat chat = (TLRPC.Chat) object; + req.participant = MessagesController.getInputPeer(chat); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("edit group call part id = " + (req.participant.chat_id != 0 ? req.participant.chat_id : req.participant.channel_id) + " access_hash = " + req.participant.access_hash); + } + } + if (mute != null) { + req.muted = mute; + req.flags |= 1; + } + if (volume != null) { + req.volume = volume; + req.flags |= 2; + } + if (raiseHand != null) { + req.raise_hand = raiseHand; + req.flags |= 4; + } + if (muteVideo != null) { + req.video_stopped = muteVideo; + req.flags |= 8; + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("edit group call flags = " + req.flags); + } + int account = currentAccount; + AccountInstance.getInstance(account).getConnectionsManager().sendRequest(req, (response, error) -> { + if (response != null) { + AccountInstance.getInstance(account).getMessagesController().processUpdates((TLRPC.Updates) response, false); + } + if (onComplete != null) { + AndroidUtilities.runOnUIThread(onComplete); } - super.onConnectionStateChanged(newState, inTransition); }); } - @TargetApi(Build.VERSION_CODES.O) + public boolean isMicMute() { + return micMute; + } + + public void toggleSpeakerphoneOrShowRouteSheet(Context context, boolean fromOverlayWindow) { + if (isBluetoothHeadsetConnected() && hasEarpiece()) { + BottomSheet.Builder builder = new BottomSheet.Builder(context) + .setTitle(LocaleController.getString("VoipOutputDevices", R.string.VoipOutputDevices), true) + .setItems(new CharSequence[]{ + LocaleController.getString("VoipAudioRoutingSpeaker", R.string.VoipAudioRoutingSpeaker), + isHeadsetPlugged ? LocaleController.getString("VoipAudioRoutingHeadset", R.string.VoipAudioRoutingHeadset) : LocaleController.getString("VoipAudioRoutingEarpiece", R.string.VoipAudioRoutingEarpiece), + currentBluetoothDeviceName != null ? currentBluetoothDeviceName : LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth)}, + new int[]{R.drawable.calls_menu_speaker, + isHeadsetPlugged ? R.drawable.calls_menu_headset : R.drawable.calls_menu_phone, + R.drawable.calls_menu_bluetooth}, (dialog, which) -> { + if (getSharedInstance() == null) { + return; + } + setAudioOutput(which); + }); + + BottomSheet bottomSheet = builder.create(); + if (fromOverlayWindow) { + if (Build.VERSION.SDK_INT >= 26) { + bottomSheet.getWindow().setType(WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY); + } else { + bottomSheet.getWindow().setType(WindowManager.LayoutParams.TYPE_SYSTEM_ALERT); + } + } + builder.show(); + return; + } + if (USE_CONNECTION_SERVICE && systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { + if (hasEarpiece()) { + systemCallConnection.setAudioRoute(systemCallConnection.getCallAudioState().getRoute() == CallAudioState.ROUTE_SPEAKER ? CallAudioState.ROUTE_WIRED_OR_EARPIECE : CallAudioState.ROUTE_SPEAKER); + } else { + systemCallConnection.setAudioRoute(systemCallConnection.getCallAudioState().getRoute() == CallAudioState.ROUTE_BLUETOOTH ? CallAudioState.ROUTE_WIRED_OR_EARPIECE : CallAudioState.ROUTE_BLUETOOTH); + } + } else if (audioConfigured && !USE_CONNECTION_SERVICE) { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (hasEarpiece()) { + am.setSpeakerphoneOn(!am.isSpeakerphoneOn()); + } else { + am.setBluetoothScoOn(!am.isBluetoothScoOn()); + } + updateOutputGainControlState(); + } else { + speakerphoneStateToSet = !speakerphoneStateToSet; + } + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } + + public void setAudioOutput(int which) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("setAudioOutput " + which); + } + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (USE_CONNECTION_SERVICE && systemCallConnection != null) { + switch (which) { + case 2: + systemCallConnection.setAudioRoute(CallAudioState.ROUTE_BLUETOOTH); + break; + case 1: + systemCallConnection.setAudioRoute(CallAudioState.ROUTE_WIRED_OR_EARPIECE); + break; + case 0: + systemCallConnection.setAudioRoute(CallAudioState.ROUTE_SPEAKER); + break; + } + } else if (audioConfigured && !USE_CONNECTION_SERVICE) { + switch (which) { + case 2: + if (!bluetoothScoActive) { + needSwitchToBluetoothAfterScoActivates = true; + try { + am.startBluetoothSco(); + } catch (Throwable e) { + FileLog.e(e); + } + } else { + am.setBluetoothScoOn(true); + am.setSpeakerphoneOn(false); + } + break; + case 1: + needSwitchToBluetoothAfterScoActivates = false; + if (bluetoothScoActive || bluetoothScoConnecting) { + am.stopBluetoothSco(); + bluetoothScoActive = false; + bluetoothScoConnecting = false; + } + am.setSpeakerphoneOn(false); + am.setBluetoothScoOn(false); + break; + case 0: + needSwitchToBluetoothAfterScoActivates = false; + if (bluetoothScoActive || bluetoothScoConnecting) { + am.stopBluetoothSco(); + bluetoothScoActive = false; + bluetoothScoConnecting = false; + } + am.setBluetoothScoOn(false); + am.setSpeakerphoneOn(true); + break; + } + updateOutputGainControlState(); + } else { + switch (which) { + case 2: + audioRouteToSet = AUDIO_ROUTE_BLUETOOTH; + speakerphoneStateToSet = false; + break; + case 1: + audioRouteToSet = AUDIO_ROUTE_EARPIECE; + speakerphoneStateToSet = false; + break; + case 0: + audioRouteToSet = AUDIO_ROUTE_SPEAKER; + speakerphoneStateToSet = true; + break; + } + } + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } + + public boolean isSpeakerphoneOn() { + if (USE_CONNECTION_SERVICE && systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { + int route = systemCallConnection.getCallAudioState().getRoute(); + return hasEarpiece() ? route == CallAudioState.ROUTE_SPEAKER : route == CallAudioState.ROUTE_BLUETOOTH; + } else if (audioConfigured && !USE_CONNECTION_SERVICE) { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + return hasEarpiece() ? am.isSpeakerphoneOn() : am.isBluetoothScoOn(); + } + return speakerphoneStateToSet; + } + + public int getCurrentAudioRoute() { + if (USE_CONNECTION_SERVICE) { + if (systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { + switch (systemCallConnection.getCallAudioState().getRoute()) { + case CallAudioState.ROUTE_BLUETOOTH: + return AUDIO_ROUTE_BLUETOOTH; + case CallAudioState.ROUTE_EARPIECE: + case CallAudioState.ROUTE_WIRED_HEADSET: + return AUDIO_ROUTE_EARPIECE; + case CallAudioState.ROUTE_SPEAKER: + return AUDIO_ROUTE_SPEAKER; + } + } + return audioRouteToSet; + } + if (audioConfigured) { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (am.isBluetoothScoOn()) { + return AUDIO_ROUTE_BLUETOOTH; + } else if (am.isSpeakerphoneOn()) { + return AUDIO_ROUTE_SPEAKER; + } else { + return AUDIO_ROUTE_EARPIECE; + } + } + return audioRouteToSet; + } + + public String getDebugString() { + return tgVoip[CAPTURE_DEVICE_CAMERA] != null ? tgVoip[CAPTURE_DEVICE_CAMERA].getDebugInfo() : ""; + } + + public long getCallDuration() { + if (callStartTime == 0) { + return 0; + } + return SystemClock.elapsedRealtime() - callStartTime; + } + + public void stopRinging() { + if (ringtonePlayer != null) { + ringtonePlayer.stop(); + ringtonePlayer.release(); + ringtonePlayer = null; + } + if (vibrator != null) { + vibrator.cancel(); + vibrator = null; + } + } + + private void showNotification(String name, Bitmap photo) { + Intent intent = new Intent(this, LaunchActivity.class).setAction(groupCall != null ? "voip_chat" : "voip"); + if (groupCall != null) { + intent.putExtra("currentAccount", currentAccount); + } + Notification.Builder builder = new Notification.Builder(this) + .setContentTitle(groupCall != null ? LocaleController.getString("VoipVoiceChat", R.string.VoipVoiceChat) : LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) + .setContentText(name) + .setContentIntent(PendingIntent.getActivity(this, 50, intent, 0)); + if (groupCall != null) { + builder.setSmallIcon(isMicMute() ? R.drawable.voicechat_muted : R.drawable.voicechat_active); + } else { + builder.setSmallIcon(R.drawable.notification); + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { + Intent endIntent = new Intent(this, VoIPActionsReceiver.class); + endIntent.setAction(getPackageName() + ".END_CALL"); + builder.addAction(R.drawable.ic_call_end_white_24dp, groupCall != null ? LocaleController.getString("VoipGroupLeaveAlertTitle", R.string.VoipGroupLeaveAlertTitle) : LocaleController.getString("VoipEndCall", R.string.VoipEndCall), PendingIntent.getBroadcast(this, 0, endIntent, PendingIntent.FLAG_UPDATE_CURRENT)); + builder.setPriority(Notification.PRIORITY_MAX); + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { + builder.setShowWhen(false); + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + builder.setColor(0xff282e31); + builder.setColorized(true); + } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + builder.setColor(0xff2ca5e0); + } + if (Build.VERSION.SDK_INT >= 26) { + NotificationsController.checkOtherNotificationsChannel(); + builder.setChannelId(NotificationsController.OTHER_NOTIFICATIONS_CHANNEL); + } + if (photo != null) { + builder.setLargeIcon(photo); + } + startForeground(ID_ONGOING_CALL_NOTIFICATION, builder.getNotification()); + } + + private void startRingtoneAndVibration(int chatID) { + SharedPreferences prefs = MessagesController.getNotificationsSettings(currentAccount); + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + boolean needRing = am.getRingerMode() != AudioManager.RINGER_MODE_SILENT; + if (needRing) { + ringtonePlayer = new MediaPlayer(); + ringtonePlayer.setOnPreparedListener(mediaPlayer -> { + try { + ringtonePlayer.start(); + } catch (Throwable e) { + FileLog.e(e); + } + }); + ringtonePlayer.setLooping(true); + if (isHeadsetPlugged) { + ringtonePlayer.setAudioStreamType(AudioManager.STREAM_VOICE_CALL); + } else { + ringtonePlayer.setAudioStreamType(AudioManager.STREAM_RING); + if (!USE_CONNECTION_SERVICE) { + am.requestAudioFocus(this, AudioManager.STREAM_RING, AudioManager.AUDIOFOCUS_GAIN); + } + } + try { + String notificationUri; + if (prefs.getBoolean("custom_" + chatID, false)) { + notificationUri = prefs.getString("ringtone_path_" + chatID, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE).toString()); + } else { + notificationUri = prefs.getString("CallsRingtonePath", RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE).toString()); + } + ringtonePlayer.setDataSource(this, Uri.parse(notificationUri)); + ringtonePlayer.prepareAsync(); + } catch (Exception e) { + FileLog.e(e); + if (ringtonePlayer != null) { + ringtonePlayer.release(); + ringtonePlayer = null; + } + } + int vibrate; + if (prefs.getBoolean("custom_" + chatID, false)) { + vibrate = prefs.getInt("calls_vibrate_" + chatID, 0); + } else { + vibrate = prefs.getInt("vibrate_calls", 0); + } + if ((vibrate != 2 && vibrate != 4 && (am.getRingerMode() == AudioManager.RINGER_MODE_VIBRATE || am.getRingerMode() == AudioManager.RINGER_MODE_NORMAL)) || (vibrate == 4 && am.getRingerMode() == AudioManager.RINGER_MODE_VIBRATE)) { + vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE); + long duration = 700; + if (vibrate == 1) { + duration /= 2; + } else if (vibrate == 3) { + duration *= 2; + } + vibrator.vibrate(new long[]{0, duration, 500}, 0); + } + } + } + @Override + public void onDestroy() { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("=============== VoIPService STOPPING ==============="); + } + stopForeground(true); + stopRinging(); + if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) { + MessagesController.getInstance(currentAccount).ignoreSetOnline = false; + } + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.appDidLogout); + SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE); + Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY); + if (proximity != null) { + sm.unregisterListener(this); + } + if (proximityWakelock != null && proximityWakelock.isHeld()) { + proximityWakelock.release(); + } + if (updateNotificationRunnable != null) { + Utilities.globalQueue.cancelRunnable(updateNotificationRunnable); + updateNotificationRunnable = null; + } + if (switchingStreamTimeoutRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(switchingStreamTimeoutRunnable); + switchingStreamTimeoutRunnable = null; + } + unregisterReceiver(receiver); + if (timeoutRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); + timeoutRunnable = null; + } + super.onDestroy(); + sharedInstance = null; + Arrays.fill(mySource, 0); + cancelGroupCheckShortPoll(); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didEndCall)); + if (tgVoip[CAPTURE_DEVICE_CAMERA] != null) { + StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5); + onTgVoipPreStop(); + if (tgVoip[CAPTURE_DEVICE_CAMERA].isGroup()) { + NativeInstance instance = tgVoip[CAPTURE_DEVICE_CAMERA]; + Utilities.globalQueue.postRunnable(instance::stopGroup); + AccountInstance.getInstance(currentAccount).getConnectionsManager().cancelRequest(currentStreamRequestId, true); + currentStreamRequestId = 0; + } else { + Instance.FinalState state = tgVoip[CAPTURE_DEVICE_CAMERA].stop(); + updateTrafficStats(tgVoip[CAPTURE_DEVICE_CAMERA], state.trafficStats); + onTgVoipStop(state); + } + prevTrafficStats = null; + callStartTime = 0; + tgVoip[CAPTURE_DEVICE_CAMERA] = null; + Instance.destroyInstance(); + } + if (tgVoip[CAPTURE_DEVICE_SCREEN] != null) { + NativeInstance instance = tgVoip[CAPTURE_DEVICE_SCREEN]; + Utilities.globalQueue.postRunnable(instance::stopGroup); + tgVoip[CAPTURE_DEVICE_SCREEN] = null; + } + for (int a = 0; a < captureDevice.length; a++) { + if (captureDevice[a] != 0) { + if (destroyCaptureDevice[a]) { + NativeInstance.destroyVideoCapturer(captureDevice[a]); + } + captureDevice[a] = 0; + } + } + cpuWakelock.release(); + if (!playingSound) { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (!USE_CONNECTION_SERVICE) { + if (isBtHeadsetConnected || bluetoothScoActive || bluetoothScoConnecting) { + am.stopBluetoothSco(); + am.setBluetoothScoOn(false); + am.setSpeakerphoneOn(false); + bluetoothScoActive = false; + bluetoothScoConnecting = false; + } + if (onDestroyRunnable == null) { + Utilities.globalQueue.postRunnable(setModeRunnable = () -> { + synchronized (sync) { + if (setModeRunnable == null) { + return; + } + setModeRunnable = null; + } + try { + am.setMode(AudioManager.MODE_NORMAL); + } catch (SecurityException x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error setting audio more to normal", x); + } + } + }); + } + am.abandonAudioFocus(this); + } + am.unregisterMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); + if (hasAudioFocus) { + am.abandonAudioFocus(this); + } + Utilities.globalQueue.postRunnable(() -> soundPool.release()); + } + + if (USE_CONNECTION_SERVICE) { + if (!didDeleteConnectionServiceContact) { + ContactsController.getInstance(currentAccount).deleteConnectionServiceContact(); + } + if (systemCallConnection != null && !playingSound) { + systemCallConnection.destroy(); + } + } + + ConnectionsManager.getInstance(currentAccount).setAppPaused(true, false); + VoIPHelper.lastCallTime = SystemClock.elapsedRealtime(); + + setSinks(null, null); + if (onDestroyRunnable != null) { + onDestroyRunnable.run(); + } + if (ChatObject.isChannel(chat)) { + MessagesController.getInstance(currentAccount).startShortPoll(chat, classGuid, true); + } + } + + public long getCallID() { + return privateCall != null ? privateCall.id : 0; + } + + public void hangUp() { + hangUp(0, null); + } + + public void hangUp(int discard) { + hangUp(discard, null); + } + + public void hangUp(Runnable onDone) { + hangUp(0, onDone); + } + + public void acceptIncomingCall() { + MessagesController.getInstance(currentAccount).ignoreSetOnline = false; + stopRinging(); + showNotification(); + configureDeviceForCall(); + startConnectingSound(); + dispatchStateChanged(STATE_EXCHANGING_KEYS); + AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didStartedCall)); + final MessagesStorage messagesStorage = MessagesStorage.getInstance(currentAccount); + TLRPC.TL_messages_getDhConfig req = new TLRPC.TL_messages_getDhConfig(); + req.random_length = 256; + req.version = messagesStorage.getLastSecretVersion(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (error == null) { + TLRPC.messages_DhConfig res = (TLRPC.messages_DhConfig) response; + if (response instanceof TLRPC.TL_messages_dhConfig) { + if (!Utilities.isGoodPrime(res.p, res.g)) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("stopping VoIP service, bad prime"); + } + callFailed(); + return; + } + + messagesStorage.setSecretPBytes(res.p); + messagesStorage.setSecretG(res.g); + messagesStorage.setLastSecretVersion(res.version); + MessagesStorage.getInstance(currentAccount).saveSecretParams(messagesStorage.getLastSecretVersion(), messagesStorage.getSecretG(), messagesStorage.getSecretPBytes()); + } + byte[] salt = new byte[256]; + for (int a = 0; a < 256; a++) { + salt[a] = (byte) ((byte) (Utilities.random.nextDouble() * 256) ^ res.random[a]); + } + if (privateCall == null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("call is null"); + } + callFailed(); + return; + } + a_or_b = salt; + BigInteger g_b = BigInteger.valueOf(messagesStorage.getSecretG()); + BigInteger p = new BigInteger(1, messagesStorage.getSecretPBytes()); + g_b = g_b.modPow(new BigInteger(1, salt), p); + g_a_hash = privateCall.g_a_hash; + + byte[] g_b_bytes = g_b.toByteArray(); + if (g_b_bytes.length > 256) { + byte[] correctedAuth = new byte[256]; + System.arraycopy(g_b_bytes, 1, correctedAuth, 0, 256); + g_b_bytes = correctedAuth; + } + + TLRPC.TL_phone_acceptCall req1 = new TLRPC.TL_phone_acceptCall(); + req1.g_b = g_b_bytes; + req1.peer = new TLRPC.TL_inputPhoneCall(); + req1.peer.id = privateCall.id; + req1.peer.access_hash = privateCall.access_hash; + req1.protocol = new TLRPC.TL_phoneCallProtocol(); + req1.protocol.udp_p2p = req1.protocol.udp_reflector = true; + req1.protocol.min_layer = CALL_MIN_LAYER; + req1.protocol.max_layer = Instance.getConnectionMaxLayer(); + req1.protocol.library_versions.addAll(Instance.AVAILABLE_VERSIONS); + ConnectionsManager.getInstance(currentAccount).sendRequest(req1, (response1, error1) -> AndroidUtilities.runOnUIThread(() -> { + if (error1 == null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.w("accept call ok! " + response1); + } + privateCall = ((TLRPC.TL_phone_phoneCall) response1).phone_call; + if (privateCall instanceof TLRPC.TL_phoneCallDiscarded) { + onCallUpdated(privateCall); + } + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error on phone.acceptCall: " + error1); + } + callFailed(); + } + }), ConnectionsManager.RequestFlagFailOnServerErrors); + } else { + callFailed(); + } + }); + } + + public void declineIncomingCall(int reason, final Runnable onDone) { + stopRinging(); + callDiscardReason = reason; + if (currentState == STATE_REQUESTING) { + if (delayedStartOutgoingCall != null) { + AndroidUtilities.cancelRunOnUIThread(delayedStartOutgoingCall); + callEnded(); + } else { + dispatchStateChanged(STATE_HANGING_UP); + endCallAfterRequest = true; + AndroidUtilities.runOnUIThread(() -> { + if (currentState == STATE_HANGING_UP) { + callEnded(); + } + }, 5000); + } + return; + } + if (currentState == STATE_HANGING_UP || currentState == STATE_ENDED) { + return; + } + dispatchStateChanged(STATE_HANGING_UP); + if (privateCall == null) { + onDestroyRunnable = onDone; + callEnded(); + if (callReqId != 0) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(callReqId, false); + callReqId = 0; + } + return; + } + TLRPC.TL_phone_discardCall req = new TLRPC.TL_phone_discardCall(); + req.peer = new TLRPC.TL_inputPhoneCall(); + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; + req.duration = (int) (getCallDuration() / 1000); + req.connection_id = tgVoip[CAPTURE_DEVICE_CAMERA] != null ? tgVoip[CAPTURE_DEVICE_CAMERA].getPreferredRelayId() : 0; + switch (reason) { + case DISCARD_REASON_DISCONNECT: + req.reason = new TLRPC.TL_phoneCallDiscardReasonDisconnect(); + break; + case DISCARD_REASON_MISSED: + req.reason = new TLRPC.TL_phoneCallDiscardReasonMissed(); + break; + case DISCARD_REASON_LINE_BUSY: + req.reason = new TLRPC.TL_phoneCallDiscardReasonBusy(); + break; + case DISCARD_REASON_HANGUP: + default: + req.reason = new TLRPC.TL_phoneCallDiscardReasonHangup(); + break; + } + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (error != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("error on phone.discardCall: " + error); + } + } else { + if (response instanceof TLRPC.TL_updates) { + TLRPC.TL_updates updates = (TLRPC.TL_updates) response; + MessagesController.getInstance(currentAccount).processUpdates(updates, false); + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("phone.discardCall " + response); + } + } + }, ConnectionsManager.RequestFlagFailOnServerErrors); + onDestroyRunnable = onDone; + callEnded(); + } + + public void declineIncomingCall() { + declineIncomingCall(DISCARD_REASON_HANGUP, null); + } + + private Class getUIActivityClass() { + return LaunchActivity.class; + } + + @TargetApi(Build.VERSION_CODES.O) public CallConnection getConnectionAndStartCall() { if (systemCallConnection == null) { if (BuildVars.LOGS_ENABLED) { @@ -2025,4 +3156,1130 @@ public class VoIPService extends VoIPBaseService { } return systemCallConnection; } + + private void startRinging() { + if (currentState == STATE_WAITING_INCOMING) { + return; + } + if (USE_CONNECTION_SERVICE && systemCallConnection != null) { + systemCallConnection.setRinging(); + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("starting ringing for call " + privateCall.id); + } + dispatchStateChanged(STATE_WAITING_INCOMING); + if (!notificationsDisabled && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + showIncomingNotification(ContactsController.formatName(user.first_name, user.last_name), null, user, privateCall.video, 0); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Showing incoming call notification"); + } + } else { + startRingtoneAndVibration(user.id); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Starting incall activity for incoming call"); + } + try { + PendingIntent.getActivity(VoIPService.this, 12345, new Intent(VoIPService.this, LaunchActivity.class).setAction("voip"), 0).send(); + } catch (Exception x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error starting incall activity", x); + } + } + } + } + + public void startRingtoneAndVibration() { + if (!startedRinging) { + startRingtoneAndVibration(user.id); + startedRinging = true; + } + } + + private void updateServerConfig() { + final SharedPreferences preferences = MessagesController.getMainSettings(currentAccount); + Instance.setGlobalServerConfig(preferences.getString("voip_server_config", "{}")); + ConnectionsManager.getInstance(currentAccount).sendRequest(new TLRPC.TL_phone_getCallConfig(), (response, error) -> { + if (error == null) { + String data = ((TLRPC.TL_dataJSON) response).data; + Instance.setGlobalServerConfig(data); + preferences.edit().putString("voip_server_config", data).commit(); + } + }); + } + + private void showNotification() { + if (user != null) { + showNotification(ContactsController.formatName(user.first_name, user.last_name), getRoundAvatarBitmap(user)); + } else { + showNotification(chat.title, getRoundAvatarBitmap(chat)); + } + } + + private void onTgVoipPreStop() { + /*if(BuildConfig.DEBUG){ + String debugLog=controller.getDebugLog(); + TLRPC.TL_phone_saveCallDebug req=new TLRPC.TL_phone_saveCallDebug(); + req.debug=new TLRPC.TL_dataJSON(); + req.debug.data=debugLog; + req.peer=new TLRPC.TL_inputPhoneCall(); + req.peer.access_hash=call.access_hash; + req.peer.id=call.id; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, new RequestDelegate(){ + @Override + public void run(TLObject response, TLRPC.TL_error error){ + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Sent debug logs, response=" + response); + } + } + }); + }*/ + } + + private void onTgVoipStop(Instance.FinalState finalState) { + if (user == null) { + return; + } + if (needRateCall || forceRating || finalState.isRatingSuggested) { + startRatingActivity(); + needRateCall = false; + } + if (needSendDebugLog && finalState.debugLog != null) { + TLRPC.TL_phone_saveCallDebug req = new TLRPC.TL_phone_saveCallDebug(); + req.debug = new TLRPC.TL_dataJSON(); + req.debug.data = finalState.debugLog; + req.peer = new TLRPC.TL_inputPhoneCall(); + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Sent debug logs, response = " + response); + } + }); + needSendDebugLog = false; + } + } + + private void initializeAccountRelatedThings() { + updateServerConfig(); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.appDidLogout); + ConnectionsManager.getInstance(currentAccount).setAppPaused(false, false); + } + + @SuppressLint("InvalidWakeLockTag") + @Override + public void onCreate() { + super.onCreate(); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("=============== VoIPService STARTING ==============="); + } + try { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER) != null) { + int outFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); + Instance.setBufferSize(outFramesPerBuffer); + } else { + Instance.setBufferSize(AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); + } + + cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)).newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); + cpuWakelock.acquire(); + + btAdapter = am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; + + IntentFilter filter = new IntentFilter(); + filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); + if (!USE_CONNECTION_SERVICE) { + filter.addAction(ACTION_HEADSET_PLUG); + if (btAdapter != null) { + filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); + filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); + } + filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); + filter.addAction(Intent.ACTION_SCREEN_ON); + filter.addAction(Intent.ACTION_SCREEN_OFF); + } + registerReceiver(receiver, filter); + fetchBluetoothDeviceName(); + + am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); + + if (!USE_CONNECTION_SERVICE && btAdapter != null && btAdapter.isEnabled()) { + try { + MediaRouter mr = (MediaRouter) getSystemService(Context.MEDIA_ROUTER_SERVICE); + if (Build.VERSION.SDK_INT < 24) { + int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); + updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else { + MediaRouter.RouteInfo ri = mr.getSelectedRoute(MediaRouter.ROUTE_TYPE_LIVE_AUDIO); + if (ri.getDeviceType() == MediaRouter.RouteInfo.DEVICE_TYPE_BLUETOOTH) { + int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); + updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } else { + updateBluetoothHeadsetState(false); + } + } + } catch (Throwable e) { + FileLog.e(e); + } + } + } catch (Exception x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("error initializing voip controller", x); + } + callFailed(); + } + if (callIShouldHavePutIntoIntent != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + NotificationsController.checkOtherNotificationsChannel(); + Notification.Builder bldr = new Notification.Builder(this, NotificationsController.OTHER_NOTIFICATIONS_CHANNEL) + .setContentTitle(LocaleController.getString("VoipOutgoingCall", R.string.VoipOutgoingCall)) + .setShowWhen(false); + if (groupCall != null) { + bldr.setSmallIcon(isMicMute() ? R.drawable.voicechat_muted : R.drawable.voicechat_active); + } else { + bldr.setSmallIcon(R.drawable.notification); + } + startForeground(ID_ONGOING_CALL_NOTIFICATION, bldr.build()); + } + } + + private void loadResources() { + if (Build.VERSION.SDK_INT >= 21) { + WebRtcAudioTrack.setAudioTrackUsageAttribute(AudioAttributes.USAGE_VOICE_COMMUNICATION); + } + Utilities.globalQueue.postRunnable(() -> { + soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); + spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); + spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); + spFailedID = soundPool.load(this, R.raw.voip_failed, 1); + spEndId = soundPool.load(this, R.raw.voip_end, 1); + spBusyId = soundPool.load(this, R.raw.voip_busy, 1); + spVoiceChatEndId = soundPool.load(this, R.raw.voicechat_leave, 1); + spVoiceChatStartId = soundPool.load(this, R.raw.voicechat_join, 1); + spVoiceChatConnecting = soundPool.load(this, R.raw.voicechat_connecting, 1); + spAllowTalkId = soundPool.load(this, R.raw.voip_onallowtalk, 1); + spStartRecordId = soundPool.load(this, R.raw.voip_recordstart, 1); + }); + } + + private void dispatchStateChanged(int state) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("== Call " + getCallID() + " state changed to " + state + " =="); + } + currentState = state; + if (USE_CONNECTION_SERVICE && state == STATE_ESTABLISHED /*&& !wasEstablished*/ && systemCallConnection != null) { + systemCallConnection.setActive(); + } + for (int a = 0; a < stateListeners.size(); a++) { + StateListener l = stateListeners.get(a); + l.onStateChanged(state); + } + } + + private void updateTrafficStats(NativeInstance instance, Instance.TrafficStats trafficStats) { + if (trafficStats == null) { + trafficStats = instance.getTrafficStats(); + } + final long wifiSentDiff = trafficStats.bytesSentWifi - (prevTrafficStats != null ? prevTrafficStats.bytesSentWifi : 0); + final long wifiRecvdDiff = trafficStats.bytesReceivedWifi - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedWifi : 0); + final long mobileSentDiff = trafficStats.bytesSentMobile - (prevTrafficStats != null ? prevTrafficStats.bytesSentMobile : 0); + final long mobileRecvdDiff = trafficStats.bytesReceivedMobile - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedMobile : 0); + prevTrafficStats = trafficStats; + if (wifiSentDiff > 0) { + StatsController.getInstance(currentAccount).incrementSentBytesCount(StatsController.TYPE_WIFI, StatsController.TYPE_CALLS, wifiSentDiff); + } + if (wifiRecvdDiff > 0) { + StatsController.getInstance(currentAccount).incrementReceivedBytesCount(StatsController.TYPE_WIFI, StatsController.TYPE_CALLS, wifiRecvdDiff); + } + if (mobileSentDiff > 0) { + StatsController.getInstance(currentAccount).incrementSentBytesCount(lastNetInfo != null && lastNetInfo.isRoaming() ? StatsController.TYPE_ROAMING : StatsController.TYPE_MOBILE, StatsController.TYPE_CALLS, mobileSentDiff); + } + if (mobileRecvdDiff > 0) { + StatsController.getInstance(currentAccount).incrementReceivedBytesCount(lastNetInfo != null && lastNetInfo.isRoaming() ? StatsController.TYPE_ROAMING : StatsController.TYPE_MOBILE, StatsController.TYPE_CALLS, mobileRecvdDiff); + } + } + + @SuppressLint("InvalidWakeLockTag") + private void configureDeviceForCall() { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("configureDeviceForCall, route to set = " + audioRouteToSet); + } + needPlayEndSound = true; + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (!USE_CONNECTION_SERVICE) { + Utilities.globalQueue.postRunnable(() -> { + try { + am.setMode(AudioManager.MODE_IN_COMMUNICATION); + } catch (Exception e) { + FileLog.e(e); + } + AndroidUtilities.runOnUIThread(() -> { + am.requestAudioFocus(VoIPService.this, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN); + if (isBluetoothHeadsetConnected() && hasEarpiece()) { + switch (audioRouteToSet) { + case AUDIO_ROUTE_BLUETOOTH: + if (!bluetoothScoActive) { + needSwitchToBluetoothAfterScoActivates = true; + try { + am.startBluetoothSco(); + } catch (Throwable e) { + FileLog.e(e); + } + } else { + am.setBluetoothScoOn(true); + am.setSpeakerphoneOn(false); + } + break; + case AUDIO_ROUTE_EARPIECE: + am.setBluetoothScoOn(false); + am.setSpeakerphoneOn(false); + break; + case AUDIO_ROUTE_SPEAKER: + am.setBluetoothScoOn(false); + am.setSpeakerphoneOn(true); + break; + } + } else if (isBluetoothHeadsetConnected()) { + am.setBluetoothScoOn(speakerphoneStateToSet); + } else { + am.setSpeakerphoneOn(speakerphoneStateToSet); + } + updateOutputGainControlState(); + audioConfigured = true; + }); + }); + } + + SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE); + Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY); + try { + if (proximity != null) { + proximityWakelock = ((PowerManager) getSystemService(Context.POWER_SERVICE)).newWakeLock(PROXIMITY_SCREEN_OFF_WAKE_LOCK, "telegram-voip-prx"); + sm.registerListener(this, proximity, SensorManager.SENSOR_DELAY_NORMAL); + } + } catch (Exception x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error initializing proximity sensor", x); + } + } + } + + private void fetchBluetoothDeviceName() { + if (fetchingBluetoothDeviceName) { + return; + } + try { + currentBluetoothDeviceName = null; + fetchingBluetoothDeviceName = true; + BluetoothAdapter.getDefaultAdapter().getProfileProxy(this, serviceListener, BluetoothProfile.HEADSET); + } catch (Throwable e) { + FileLog.e(e); + } + } + + @SuppressLint("NewApi") + @Override + public void onSensorChanged(SensorEvent event) { + if (unmutedByHold || remoteVideoState == Instance.VIDEO_STATE_ACTIVE || videoState[CAPTURE_DEVICE_CAMERA] == Instance.VIDEO_STATE_ACTIVE) { + return; + } + if (event.sensor.getType() == Sensor.TYPE_PROXIMITY) { + AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (isHeadsetPlugged || am.isSpeakerphoneOn() || (isBluetoothHeadsetConnected() && am.isBluetoothScoOn())) { + return; + } + boolean newIsNear = event.values[0] < Math.min(event.sensor.getMaximumRange(), 3); + checkIsNear(newIsNear); + } + } + + private void checkIsNear() { + if (remoteVideoState == Instance.VIDEO_STATE_ACTIVE || videoState[CAPTURE_DEVICE_CAMERA] == Instance.VIDEO_STATE_ACTIVE) { + checkIsNear(false); + } + } + + private void checkIsNear(boolean newIsNear) { + if (newIsNear != isProximityNear) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("proximity " + newIsNear); + } + isProximityNear = newIsNear; + try { + if (isProximityNear) { + proximityWakelock.acquire(); + } else { + proximityWakelock.release(1); // this is non-public API before L + } + } catch (Exception x) { + FileLog.e(x); + } + } + } + + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) { + + } + + public boolean isBluetoothHeadsetConnected() { + if (USE_CONNECTION_SERVICE && systemCallConnection != null && systemCallConnection.getCallAudioState() != null) { + return (systemCallConnection.getCallAudioState().getSupportedRouteMask() & CallAudioState.ROUTE_BLUETOOTH) != 0; + } + return isBtHeadsetConnected; + } + + public void onAudioFocusChange(int focusChange) { + if (focusChange == AudioManager.AUDIOFOCUS_GAIN) { + hasAudioFocus = true; + } else { + hasAudioFocus = false; + } + } + + private void updateBluetoothHeadsetState(boolean connected) { + if (connected == isBtHeadsetConnected) { + return; + } + if (BuildVars.LOGS_ENABLED) { + FileLog.d("updateBluetoothHeadsetState: " + connected); + } + isBtHeadsetConnected = connected; + final AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + if (connected && !isRinging() && currentState != 0) { + if (bluetoothScoActive) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("SCO already active, setting audio routing"); + } + am.setSpeakerphoneOn(false); + am.setBluetoothScoOn(true); + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("startBluetoothSco"); + } + needSwitchToBluetoothAfterScoActivates = true; + AndroidUtilities.runOnUIThread(() -> { + try { + am.startBluetoothSco(); + } catch (Throwable ignore) { + + } + }, 500); + } + } else { + bluetoothScoActive = false; + bluetoothScoConnecting = false; + } + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } + + public String getLastError() { + return lastError; + } + + public int getCallState() { + return currentState; + } + + public TLRPC.InputPeer getGroupCallPeer() { + return groupCallPeer; + } + + private void updateNetworkType() { + if (tgVoip[CAPTURE_DEVICE_CAMERA] != null) { + if (tgVoip[CAPTURE_DEVICE_CAMERA].isGroup()) { + + } else { + tgVoip[CAPTURE_DEVICE_CAMERA].setNetworkType(getNetworkType()); + } + } else { + lastNetInfo = getActiveNetworkInfo(); + } + } + + private int getNetworkType() { + final NetworkInfo info = lastNetInfo = getActiveNetworkInfo(); + int type = Instance.NET_TYPE_UNKNOWN; + if (info != null) { + switch (info.getType()) { + case ConnectivityManager.TYPE_MOBILE: + switch (info.getSubtype()) { + case TelephonyManager.NETWORK_TYPE_GPRS: + type = Instance.NET_TYPE_GPRS; + break; + case TelephonyManager.NETWORK_TYPE_EDGE: + case TelephonyManager.NETWORK_TYPE_1xRTT: + type = Instance.NET_TYPE_EDGE; + break; + case TelephonyManager.NETWORK_TYPE_UMTS: + case TelephonyManager.NETWORK_TYPE_EVDO_0: + type = Instance.NET_TYPE_3G; + break; + case TelephonyManager.NETWORK_TYPE_HSDPA: + case TelephonyManager.NETWORK_TYPE_HSPA: + case TelephonyManager.NETWORK_TYPE_HSPAP: + case TelephonyManager.NETWORK_TYPE_HSUPA: + case TelephonyManager.NETWORK_TYPE_EVDO_A: + case TelephonyManager.NETWORK_TYPE_EVDO_B: + type = Instance.NET_TYPE_HSPA; + break; + case TelephonyManager.NETWORK_TYPE_LTE: + type = Instance.NET_TYPE_LTE; + break; + default: + type = Instance.NET_TYPE_OTHER_MOBILE; + break; + } + break; + case ConnectivityManager.TYPE_WIFI: + type = Instance.NET_TYPE_WIFI; + break; + case ConnectivityManager.TYPE_ETHERNET: + type = Instance.NET_TYPE_ETHERNET; + break; + } + } + return type; + } + + private NetworkInfo getActiveNetworkInfo() { + return ((ConnectivityManager) getSystemService(CONNECTIVITY_SERVICE)).getActiveNetworkInfo(); + } + + private void callFailed() { + callFailed(tgVoip[CAPTURE_DEVICE_CAMERA] != null ? tgVoip[CAPTURE_DEVICE_CAMERA].getLastError() : Instance.ERROR_UNKNOWN); + } + + private Bitmap getRoundAvatarBitmap(TLObject userOrChat) { + Bitmap bitmap = null; + try { + if (userOrChat instanceof TLRPC.User) { + TLRPC.User user = (TLRPC.User) userOrChat; + if (user.photo != null && user.photo.photo_small != null) { + BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(user.photo.photo_small, null, "50_50"); + if (img != null) { + bitmap = img.getBitmap().copy(Bitmap.Config.ARGB_8888, true); + } else { + try { + BitmapFactory.Options opts = new BitmapFactory.Options(); + opts.inMutable = true; + bitmap = BitmapFactory.decodeFile(FileLoader.getPathToAttach(user.photo.photo_small, true).toString(), opts); + } catch (Throwable e) { + FileLog.e(e); + } + } + } + } else { + TLRPC.Chat chat = (TLRPC.Chat) userOrChat; + if (chat.photo != null && chat.photo.photo_small != null) { + BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(chat.photo.photo_small, null, "50_50"); + if (img != null) { + bitmap = img.getBitmap().copy(Bitmap.Config.ARGB_8888, true); + } else { + try { + BitmapFactory.Options opts = new BitmapFactory.Options(); + opts.inMutable = true; + bitmap = BitmapFactory.decodeFile(FileLoader.getPathToAttach(chat.photo.photo_small, true).toString(), opts); + } catch (Throwable e) { + FileLog.e(e); + } + } + } + } + } catch (Throwable e) { + FileLog.e(e); + } + if (bitmap == null) { + Theme.createDialogsResources(this); + AvatarDrawable placeholder; + if (userOrChat instanceof TLRPC.User) { + placeholder = new AvatarDrawable((TLRPC.User) userOrChat); + } else { + placeholder = new AvatarDrawable((TLRPC.Chat) userOrChat); + } + bitmap = Bitmap.createBitmap(AndroidUtilities.dp(42), AndroidUtilities.dp(42), Bitmap.Config.ARGB_8888); + placeholder.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight()); + placeholder.draw(new Canvas(bitmap)); + } + + Canvas canvas = new Canvas(bitmap); + Path circlePath = new Path(); + circlePath.addCircle(bitmap.getWidth() / 2, bitmap.getHeight() / 2, bitmap.getWidth() / 2, Path.Direction.CW); + circlePath.toggleInverseFillType(); + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); + canvas.drawPath(circlePath, paint); + return bitmap; + } + + private void showIncomingNotification(String name, CharSequence subText, TLObject userOrChat, boolean video, int additionalMemberCount) { + Intent intent = new Intent(this, LaunchActivity.class); + intent.setAction("voip"); + Notification.Builder builder = new Notification.Builder(this) + .setContentTitle(video ? LocaleController.getString("VoipInVideoCallBranding", R.string.VoipInVideoCallBranding) : LocaleController.getString("VoipInCallBranding", R.string.VoipInCallBranding)) + .setContentText(name) + .setSmallIcon(R.drawable.notification) + .setSubText(subText) + .setContentIntent(PendingIntent.getActivity(this, 0, intent, 0)); + Uri soundProviderUri = Uri.parse("content://" + BuildConfig.APPLICATION_ID + ".call_sound_provider/start_ringing"); + if (Build.VERSION.SDK_INT >= 26) { + SharedPreferences nprefs = MessagesController.getGlobalNotificationsSettings(); + int chanIndex = nprefs.getInt("calls_notification_channel", 0); + NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); + NotificationChannel oldChannel = nm.getNotificationChannel("incoming_calls2" + chanIndex); + if (oldChannel != null) { + nm.deleteNotificationChannel(oldChannel.getId()); + } + NotificationChannel existingChannel = nm.getNotificationChannel("incoming_calls3" + chanIndex); + boolean needCreate = true; + if (existingChannel != null) { + if (existingChannel.getImportance() < NotificationManager.IMPORTANCE_HIGH || !soundProviderUri.equals(existingChannel.getSound()) || existingChannel.getVibrationPattern() != null || existingChannel.shouldVibrate()) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("User messed up the notification channel; deleting it and creating a proper one"); + } + nm.deleteNotificationChannel("incoming_calls3" + chanIndex); + chanIndex++; + nprefs.edit().putInt("calls_notification_channel", chanIndex).commit(); + } else { + needCreate = false; + } + } + if (needCreate) { + AudioAttributes attrs = new AudioAttributes.Builder() + .setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION) + .setLegacyStreamType(AudioManager.STREAM_RING) + .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION) + .build(); + NotificationChannel chan = new NotificationChannel("incoming_calls3" + chanIndex, LocaleController.getString("IncomingCalls", R.string.IncomingCalls), NotificationManager.IMPORTANCE_HIGH); + chan.setSound(soundProviderUri, attrs); + chan.enableVibration(false); + chan.enableLights(false); + chan.setBypassDnd(true); + try { + nm.createNotificationChannel(chan); + } catch (Exception e) { + FileLog.e(e); + this.stopSelf(); + return; + } + } + builder.setChannelId("incoming_calls3" + chanIndex); + } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + builder.setSound(soundProviderUri, AudioManager.STREAM_RING); + } + Intent endIntent = new Intent(this, VoIPActionsReceiver.class); + endIntent.setAction(getPackageName() + ".DECLINE_CALL"); + endIntent.putExtra("call_id", getCallID()); + CharSequence endTitle = LocaleController.getString("VoipDeclineCall", R.string.VoipDeclineCall); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + endTitle = new SpannableString(endTitle); + ((SpannableString) endTitle).setSpan(new ForegroundColorSpan(0xFFF44336), 0, endTitle.length(), 0); + } + PendingIntent endPendingIntent = PendingIntent.getBroadcast(this, 0, endIntent, PendingIntent.FLAG_CANCEL_CURRENT); + builder.addAction(R.drawable.ic_call_end_white_24dp, endTitle, endPendingIntent); + Intent answerIntent = new Intent(this, VoIPActionsReceiver.class); + answerIntent.setAction(getPackageName() + ".ANSWER_CALL"); + answerIntent.putExtra("call_id", getCallID()); + CharSequence answerTitle = LocaleController.getString("VoipAnswerCall", R.string.VoipAnswerCall); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + answerTitle = new SpannableString(answerTitle); + ((SpannableString) answerTitle).setSpan(new ForegroundColorSpan(0xFF00AA00), 0, answerTitle.length(), 0); + } + PendingIntent answerPendingIntent = PendingIntent.getBroadcast(this, 0, answerIntent, PendingIntent.FLAG_CANCEL_CURRENT); + builder.addAction(R.drawable.ic_call, answerTitle, answerPendingIntent); + builder.setPriority(Notification.PRIORITY_MAX); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { + builder.setShowWhen(false); + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + builder.setColor(0xff2ca5e0); + builder.setVibrate(new long[0]); + builder.setCategory(Notification.CATEGORY_CALL); + builder.setFullScreenIntent(PendingIntent.getActivity(this, 0, intent, 0), true); + if (userOrChat instanceof TLRPC.User) { + TLRPC.User user = (TLRPC.User) userOrChat; + if (!TextUtils.isEmpty(user.phone)) { + builder.addPerson("tel:" + user.phone); + } + } + } + Notification incomingNotification = builder.getNotification(); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + RemoteViews customView = new RemoteViews(getPackageName(), LocaleController.isRTL ? R.layout.call_notification_rtl : R.layout.call_notification); + customView.setTextViewText(R.id.name, name); + boolean subtitleVisible = true; + if (TextUtils.isEmpty(subText)) { + customView.setViewVisibility(R.id.subtitle, View.GONE); + if (UserConfig.getActivatedAccountsCount() > 1) { + TLRPC.User self = UserConfig.getInstance(currentAccount).getCurrentUser(); + customView.setTextViewText(R.id.title, video ? LocaleController.formatString("VoipInVideoCallBrandingWithName", R.string.VoipInVideoCallBrandingWithName, ContactsController.formatName(self.first_name, self.last_name)) : LocaleController.formatString("VoipInCallBrandingWithName", R.string.VoipInCallBrandingWithName, ContactsController.formatName(self.first_name, self.last_name))); + } else { + customView.setTextViewText(R.id.title, video ? LocaleController.getString("VoipInVideoCallBranding", R.string.VoipInVideoCallBranding) : LocaleController.getString("VoipInCallBranding", R.string.VoipInCallBranding)); + } + } else { + if (UserConfig.getActivatedAccountsCount() > 1) { + TLRPC.User self = UserConfig.getInstance(currentAccount).getCurrentUser(); + customView.setTextViewText(R.id.subtitle, LocaleController.formatString("VoipAnsweringAsAccount", R.string.VoipAnsweringAsAccount, ContactsController.formatName(self.first_name, self.last_name))); + } else { + customView.setViewVisibility(R.id.subtitle, View.GONE); + } + customView.setTextViewText(R.id.title, subText); + } + Bitmap avatar = getRoundAvatarBitmap(userOrChat); + customView.setTextViewText(R.id.answer_text, LocaleController.getString("VoipAnswerCall", R.string.VoipAnswerCall)); + customView.setTextViewText(R.id.decline_text, LocaleController.getString("VoipDeclineCall", R.string.VoipDeclineCall)); + customView.setImageViewBitmap(R.id.photo, avatar); + customView.setOnClickPendingIntent(R.id.answer_btn, answerPendingIntent); + customView.setOnClickPendingIntent(R.id.decline_btn, endPendingIntent); + builder.setLargeIcon(avatar); + + incomingNotification.headsUpContentView = incomingNotification.bigContentView = customView; + } + startForeground(ID_INCOMING_CALL_NOTIFICATION, incomingNotification); + startRingtoneAndVibration(); + } + + private void callFailed(String error) { + if (privateCall != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Discarding failed call"); + } + TLRPC.TL_phone_discardCall req = new TLRPC.TL_phone_discardCall(); + req.peer = new TLRPC.TL_inputPhoneCall(); + req.peer.access_hash = privateCall.access_hash; + req.peer.id = privateCall.id; + req.duration = (int) (getCallDuration() / 1000); + req.connection_id = tgVoip[CAPTURE_DEVICE_CAMERA] != null ? tgVoip[CAPTURE_DEVICE_CAMERA].getPreferredRelayId() : 0; + req.reason = new TLRPC.TL_phoneCallDiscardReasonDisconnect(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error1) -> { + if (error1 != null) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("error on phone.discardCall: " + error1); + } + } else { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("phone.discardCall " + response); + } + } + }); + } + try { + throw new Exception("Call " + getCallID() + " failed with error: " + error); + } catch (Exception x) { + FileLog.e(x); + } + lastError = error; + AndroidUtilities.runOnUIThread(() -> dispatchStateChanged(STATE_FAILED)); + if (TextUtils.equals(error, Instance.ERROR_LOCALIZED) && soundPool != null) { + playingSound = true; + Utilities.globalQueue.postRunnable(() -> soundPool.play(spFailedID, 1, 1, 0, 0, 1)); + AndroidUtilities.runOnUIThread(afterSoundRunnable, 1000); + } + if (USE_CONNECTION_SERVICE && systemCallConnection != null) { + systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.ERROR)); + systemCallConnection.destroy(); + systemCallConnection = null; + } + stopSelf(); + } + + void callFailedFromConnectionService() { + if (isOutgoing) { + callFailed(Instance.ERROR_CONNECTION_SERVICE); + } else { + hangUp(); + } + } + + @Override + public void onConnectionStateChanged(int newState, boolean inTransition) { + AndroidUtilities.runOnUIThread(() -> { + if (newState == STATE_ESTABLISHED) { + if (callStartTime == 0) { + callStartTime = SystemClock.elapsedRealtime(); + } + //peerCapabilities = tgVoip.getPeerCapabilities(); + } + if (newState == STATE_FAILED) { + callFailed(); + return; + } + if (newState == STATE_ESTABLISHED) { + if (connectingSoundRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); + connectingSoundRunnable = null; + } + Utilities.globalQueue.postRunnable(() -> { + if (spPlayId != 0) { + soundPool.stop(spPlayId); + spPlayId = 0; + } + }); + if (groupCall == null && !wasEstablished) { + wasEstablished = true; + if (!isProximityNear && !privateCall.video) { + Vibrator vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE); + if (vibrator.hasVibrator()) { + vibrator.vibrate(100); + } + } + AndroidUtilities.runOnUIThread(new Runnable() { + @Override + public void run() { + if (tgVoip[CAPTURE_DEVICE_CAMERA] != null) { + StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), 5); + AndroidUtilities.runOnUIThread(this, 5000); + } + } + }, 5000); + if (isOutgoing) { + StatsController.getInstance(currentAccount).incrementSentItemsCount(getStatsNetworkType(), StatsController.TYPE_CALLS, 1); + } else { + StatsController.getInstance(currentAccount).incrementReceivedItemsCount(getStatsNetworkType(), StatsController.TYPE_CALLS, 1); + } + } + } + if (newState == STATE_RECONNECTING) { + Utilities.globalQueue.postRunnable(() -> { + if (spPlayId != 0) { + soundPool.stop(spPlayId); + } + spPlayId = soundPool.play(groupCall != null ? spVoiceChatConnecting : spConnectingId, 1, 1, 0, -1, 1); + }); + } + dispatchStateChanged(newState); + }); + } + + public void playStartRecordSound() { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spStartRecordId, 0.5f, 0.5f, 0, 0, 1)); + } + + public void playAllowTalkSound() { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spAllowTalkId, 0.5f, 0.5f, 0, 0, 1)); + } + + @Override + public void onSignalBarCountChanged(int newCount) { + AndroidUtilities.runOnUIThread(() -> { + signalBarCount = newCount; + for (int a = 0; a < stateListeners.size(); a++) { + StateListener l = stateListeners.get(a); + l.onSignalBarsCountChanged(newCount); + } + }); + } + + public boolean isBluetoothOn() { + final AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + return am.isBluetoothScoOn(); + } + + public boolean isBluetoothWillOn() { + return needSwitchToBluetoothAfterScoActivates; + } + + public boolean isHeadsetPlugged() { + return isHeadsetPlugged; + } + + private void callEnded() { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("Call " + getCallID() + " ended"); + } + if (groupCall != null && (!playedConnectedSound || onDestroyRunnable != null)) { + needPlayEndSound = false; + } + AndroidUtilities.runOnUIThread(() -> dispatchStateChanged(STATE_ENDED)); + int delay = 700; + Utilities.globalQueue.postRunnable(() -> { + if (spPlayId != 0) { + soundPool.stop(spPlayId); + spPlayId = 0; + } + }); + + if (connectingSoundRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(connectingSoundRunnable); + connectingSoundRunnable = null; + } + if (needPlayEndSound) { + playingSound = true; + if (groupCall == null) { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spEndId, 1, 1, 0, 0, 1)); + } else { + Utilities.globalQueue.postRunnable(() -> soundPool.play(spVoiceChatEndId, 1.0f, 1.0f, 0, 0, 1), 100); + delay = 500; + } + AndroidUtilities.runOnUIThread(afterSoundRunnable, delay); + } + if (timeoutRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(timeoutRunnable); + timeoutRunnable = null; + } + endConnectionServiceCall(needPlayEndSound ? delay : 0); + stopSelf(); + } + + private void endConnectionServiceCall(long delay) { + if (USE_CONNECTION_SERVICE) { + Runnable r = () -> { + if (systemCallConnection != null) { + switch (callDiscardReason) { + case DISCARD_REASON_HANGUP: + systemCallConnection.setDisconnected(new DisconnectCause(isOutgoing ? DisconnectCause.LOCAL : DisconnectCause.REJECTED)); + break; + case DISCARD_REASON_DISCONNECT: + systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.ERROR)); + break; + case DISCARD_REASON_LINE_BUSY: + systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.BUSY)); + break; + case DISCARD_REASON_MISSED: + systemCallConnection.setDisconnected(new DisconnectCause(isOutgoing ? DisconnectCause.CANCELED : DisconnectCause.MISSED)); + break; + default: + systemCallConnection.setDisconnected(new DisconnectCause(DisconnectCause.REMOTE)); + break; + } + systemCallConnection.destroy(); + systemCallConnection = null; + } + }; + if (delay > 0) { + AndroidUtilities.runOnUIThread(r, delay); + } else { + r.run(); + } + } + } + + public boolean isOutgoing() { + return isOutgoing; + } + + public void handleNotificationAction(Intent intent) { + if ((getPackageName() + ".END_CALL").equals(intent.getAction())) { + stopForeground(true); + hangUp(); + } else if ((getPackageName() + ".DECLINE_CALL").equals(intent.getAction())) { + stopForeground(true); + declineIncomingCall(DISCARD_REASON_LINE_BUSY, null); + } else if ((getPackageName() + ".ANSWER_CALL").equals(intent.getAction())) { + acceptIncomingCallFromNotification(); + } + } + + private void acceptIncomingCallFromNotification() { + showNotification(); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && Build.VERSION.SDK_INT < Build.VERSION_CODES.R && (checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED || privateCall.video && checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED)) { + try { + //intent.addFlags(Intent.FLAG_ACTIVITY_NEW_DOCUMENT); + PendingIntent.getActivity(VoIPService.this, 0, new Intent(VoIPService.this, VoIPPermissionActivity.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK), PendingIntent.FLAG_ONE_SHOT).send(); + } catch (Exception x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error starting permission activity", x); + } + } + return; + } + acceptIncomingCall(); + try { + PendingIntent.getActivity(VoIPService.this, 0, new Intent(VoIPService.this, getUIActivityClass()).setAction("voip"), 0).send(); + } catch (Exception x) { + if (BuildVars.LOGS_ENABLED) { + FileLog.e("Error starting incall activity", x); + } + } + } + + public void updateOutputGainControlState() { + if (tgVoip[CAPTURE_DEVICE_CAMERA] != null) { + if (!USE_CONNECTION_SERVICE) { + final AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); + tgVoip[CAPTURE_DEVICE_CAMERA].setAudioOutputGainControlEnabled(hasEarpiece() && !am.isSpeakerphoneOn() && !am.isBluetoothScoOn() && !isHeadsetPlugged); + tgVoip[CAPTURE_DEVICE_CAMERA].setEchoCancellationStrength(isHeadsetPlugged || (hasEarpiece() && !am.isSpeakerphoneOn() && !am.isBluetoothScoOn() && !isHeadsetPlugged) ? 0 : 1); + } else { + final boolean isEarpiece = systemCallConnection.getCallAudioState().getRoute() == CallAudioState.ROUTE_EARPIECE; + tgVoip[CAPTURE_DEVICE_CAMERA].setAudioOutputGainControlEnabled(isEarpiece); + tgVoip[CAPTURE_DEVICE_CAMERA].setEchoCancellationStrength(isEarpiece ? 0 : 1); + } + } + } + + public int getAccount() { + return currentAccount; + } + + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.appDidLogout) { + callEnded(); + } + } + + public static boolean isAnyKindOfCallActive() { + if (VoIPService.getSharedInstance() != null) { + return VoIPService.getSharedInstance().getCallState() != VoIPService.STATE_WAITING_INCOMING; + } + return false; + } + + private boolean isFinished() { + return currentState == STATE_ENDED || currentState == STATE_FAILED; + } + + public int getRemoteAudioState() { + return remoteAudioState; + } + + public int getRemoteVideoState() { + return remoteVideoState; + } + + @TargetApi(Build.VERSION_CODES.O) + private PhoneAccountHandle addAccountToTelecomManager() { + TelecomManager tm = (TelecomManager) getSystemService(TELECOM_SERVICE); + TLRPC.User self = UserConfig.getInstance(currentAccount).getCurrentUser(); + PhoneAccountHandle handle = new PhoneAccountHandle(new ComponentName(this, TelegramConnectionService.class), "" + self.id); + PhoneAccount account = new PhoneAccount.Builder(handle, ContactsController.formatName(self.first_name, self.last_name)) + .setCapabilities(PhoneAccount.CAPABILITY_SELF_MANAGED) + .setIcon(Icon.createWithResource(this, R.drawable.ic_launcher_dr)) + .setHighlightColor(0xff2ca5e0) + .addSupportedUriScheme("sip") + .build(); + tm.registerPhoneAccount(account); + return handle; + } + + private static boolean isDeviceCompatibleWithConnectionServiceAPI() { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { + return false; + } + // some non-Google devices don't implement the ConnectionService API correctly so, sadly, + // we'll have to whitelist only a handful of known-compatible devices for now + return false;/*"angler".equals(Build.PRODUCT) // Nexus 6P + || "bullhead".equals(Build.PRODUCT) // Nexus 5X + || "sailfish".equals(Build.PRODUCT) // Pixel + || "marlin".equals(Build.PRODUCT) // Pixel XL + || "walleye".equals(Build.PRODUCT) // Pixel 2 + || "taimen".equals(Build.PRODUCT) // Pixel 2 XL + || "blueline".equals(Build.PRODUCT) // Pixel 3 + || "crosshatch".equals(Build.PRODUCT) // Pixel 3 XL + || MessagesController.getGlobalMainSettings().getBoolean("dbg_force_connection_service", false);*/ + } + + public interface StateListener { + default void onStateChanged(int state) { + + } + + default void onSignalBarsCountChanged(int count) { + + } + + default void onAudioSettingsChanged() { + + } + + default void onMediaStateUpdated(int audioState, int videoState) { + + } + + default void onCameraSwitch(boolean isFrontFace) { + + } + + default void onVideoAvailableChange(boolean isAvailable) { + + } + + default void onScreenOnChange(boolean screenOn) { + + } + } + + public class CallConnection extends Connection { + public CallConnection() { + setConnectionProperties(PROPERTY_SELF_MANAGED); + setAudioModeIsVoip(true); + } + + @Override + public void onCallAudioStateChanged(CallAudioState state) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("ConnectionService call audio state changed: " + state); + } + for (StateListener l : stateListeners) { + l.onAudioSettingsChanged(); + } + } + + @Override + public void onDisconnect() { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("ConnectionService onDisconnect"); + } + setDisconnected(new DisconnectCause(DisconnectCause.LOCAL)); + destroy(); + systemCallConnection = null; + hangUp(); + } + + @Override + public void onAnswer() { + acceptIncomingCallFromNotification(); + } + + @Override + public void onReject() { + needPlayEndSound = false; + declineIncomingCall(DISCARD_REASON_HANGUP, null); + } + + @Override + public void onShowIncomingCallUi() { + startRinging(); + } + + @Override + public void onStateChanged(int state) { + super.onStateChanged(state); + if (BuildVars.LOGS_ENABLED) { + FileLog.d("ConnectionService onStateChanged " + stateToString(state)); + } + if (state == Connection.STATE_ACTIVE) { + ContactsController.getInstance(currentAccount).deleteConnectionServiceContact(); + didDeleteConnectionServiceContact = true; + } + } + + @Override + public void onCallEvent(String event, Bundle extras) { + super.onCallEvent(event, extras); + if (BuildVars.LOGS_ENABLED) + FileLog.d("ConnectionService onCallEvent " + event); + } + + //undocumented API + public void onSilence() { + if (BuildVars.LOGS_ENABLED) { + FileLog.d("onSlience"); + } + stopRinging(); + } + } + + public static class SharedUIParams { + public boolean tapToVideoTooltipWasShowed; + public boolean cameraAlertWasShowed; + public boolean wasVideoCall; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java index 316c2b480..2c3b4505f 100644 --- a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java +++ b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java @@ -12,7 +12,6 @@ import android.graphics.drawable.BitmapDrawable; import android.os.Build; import android.text.TextUtils; -import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.Utilities; @@ -65,7 +64,7 @@ public class TLRPC { public static final int MESSAGE_FLAG_HAS_BOT_ID = 0x00000800; public static final int MESSAGE_FLAG_EDITED = 0x00008000; - public static final int LAYER = 127; + public static final int LAYER = 130; public static class TL_stats_megagroupStats extends TLObject { public static int constructor = 0xef7ff916; @@ -286,6 +285,34 @@ public class TLRPC { } } + public static class TL_stickers_suggestedShortName extends TLObject { + public static int constructor = 0x85fea03f; + + public String short_name; + + public static TL_stickers_suggestedShortName TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_stickers_suggestedShortName.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_stickers_suggestedShortName", constructor)); + } else { + return null; + } + } + TL_stickers_suggestedShortName result = new TL_stickers_suggestedShortName(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + short_name = stream.readString(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(short_name); + } + } + public static abstract class DraftMessage extends TLObject { public int flags; @@ -405,11 +432,15 @@ public class TLRPC { public FileLocation photo_big; public byte[] stripped_thumb; public int dc_id; + public long photo_id; public BitmapDrawable strippedBitmap; public static ChatPhoto TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { ChatPhoto result = null; switch (constructor) { + case 0x1c6e1c11: + result = new TL_chatPhoto(); + break; case 0x475cdbd5: result = new TL_chatPhoto_layer115(); break; @@ -423,7 +454,7 @@ public class TLRPC { result = new TL_chatPhoto_layer126(); break; case 0x4790ee05: - result = new TL_chatPhoto(); + result = new TL_chatPhoto_layer127(); break; } if (result == null && exception) { @@ -436,6 +467,38 @@ public class TLRPC { } } + public static class TL_chatPhoto extends ChatPhoto { + public static int constructor = 0x1c6e1c11; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + has_video = (flags & 1) != 0; + photo_id = stream.readInt64(exception); + if ((flags & 2) != 0) { + stripped_thumb = stream.readByteArray(exception); + } + dc_id = stream.readInt32(exception); + photo_small = new TL_fileLocationToBeDeprecated(); + photo_small.volume_id = -photo_id; + photo_small.local_id = 'a'; + photo_big = new TL_fileLocationToBeDeprecated(); + photo_big.volume_id = -photo_id; + photo_big.local_id = 'c'; + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = has_video ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + stream.writeInt64(photo_id); + if ((flags & 2) != 0) { + stream.writeByteArray(stripped_thumb); + } + stream.writeInt32(dc_id); + } + } + public static class TL_chatPhoto_layer115 extends TL_chatPhoto { public static int constructor = 0x475cdbd5; @@ -501,7 +564,7 @@ public class TLRPC { } } - public static class TL_chatPhoto extends ChatPhoto { + public static class TL_chatPhoto_layer127 extends TL_chatPhoto { public static int constructor = 0x4790ee05; @@ -1940,10 +2003,10 @@ public class TLRPC { public boolean can_change_join_muted; public boolean join_date_asc; public boolean schedule_start_subscribed; + public boolean can_start_video; public long id; public long access_hash; public int participants_count; - public TL_dataJSON params; public String title; public int stream_dc_id; public int record_start_date; @@ -1957,7 +2020,7 @@ public class TLRPC { case 0x7780bcb4: result = new TL_groupCallDiscarded(); break; - case 0xc95c6654: + case 0x653dbaad: result = new TL_groupCall(); break; } @@ -1990,7 +2053,7 @@ public class TLRPC { } public static class TL_groupCall extends GroupCall { - public static int constructor = 0xc95c6654; + public static int constructor = 0x653dbaad; public void readParams(AbstractSerializedData stream, boolean exception) { @@ -1999,12 +2062,10 @@ public class TLRPC { can_change_join_muted = (flags & 4) != 0; join_date_asc = (flags & 64) != 0; schedule_start_subscribed = (flags & 256) != 0; + can_start_video = (flags & 512) != 0; id = stream.readInt64(exception); access_hash = stream.readInt64(exception); participants_count = stream.readInt32(exception); - if ((flags & 1) != 0) { - params = TL_dataJSON.TLdeserialize(stream, stream.readInt32(exception), exception); - } if ((flags & 8) != 0) { title = stream.readString(exception); } @@ -2026,13 +2087,11 @@ public class TLRPC { flags = can_change_join_muted ? (flags | 4) : (flags &~ 4); flags = join_date_asc ? (flags | 64) : (flags &~ 64); flags = schedule_start_subscribed ? (flags | 256) : (flags &~ 256); + flags = can_start_video ? (flags | 512) : (flags &~ 512); stream.writeInt32(flags); stream.writeInt64(id); stream.writeInt64(access_hash); stream.writeInt32(participants_count); - if ((flags & 1) != 0) { - params.serializeToStream(stream); - } if ((flags & 8) != 0) { stream.writeString(title); } @@ -2451,7 +2510,7 @@ public class TLRPC { case 0xe630b979: result = new TL_inputWallPaper(); break; - case 0x8427bbac: + case 0x967a462e: result = new TL_inputWallPaperNoFile(); break; case 0x72091c80: @@ -2487,11 +2546,17 @@ public class TLRPC { } public static class TL_inputWallPaperNoFile extends InputWallPaper { - public static int constructor = 0x8427bbac; + public static int constructor = 0x967a462e; + public long id; + + public void readParams(AbstractSerializedData stream, boolean exception) { + id = stream.readInt64(exception); + } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); + stream.writeInt64(id); } } @@ -3363,6 +3428,47 @@ public class TLRPC { } } + public static class TL_inputStickerSetItem extends TLObject { + public static int constructor = 0xffa0a496; + + public int flags; + public InputDocument document; + public String emoji; + public TL_maskCoords mask_coords; + + public static TL_inputStickerSetItem TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_inputStickerSetItem.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_inputStickerSetItem", constructor)); + } else { + return null; + } + } + TL_inputStickerSetItem result = new TL_inputStickerSetItem(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + document = InputDocument.TLdeserialize(stream, stream.readInt32(exception), exception); + emoji = stream.readString(exception); + if ((flags & 1) != 0) { + mask_coords = TL_maskCoords.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + document.serializeToStream(stream); + stream.writeString(emoji); + if ((flags & 1) != 0) { + mask_coords.serializeToStream(stream); + } + } + } + public static class TL_langPackDifference extends TLObject { public static int constructor = 0xf385c1f6; @@ -5064,99 +5170,106 @@ public class TLRPC { } } - public static abstract class help_AppUpdate extends TLObject { + public static abstract class help_AppUpdate extends TLObject { - public static help_AppUpdate TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - help_AppUpdate result = null; - switch (constructor) { - case 0x1da7158f: - result = new TL_help_appUpdate(); - break; - case 0xc45a6536: - result = new TL_help_noAppUpdate(); - break; - } - if (result == null && exception) { - throw new RuntimeException(String.format("can't parse magic %x in help_AppUpdate", constructor)); - } - if (result != null) { - result.readParams(stream, exception); - } - return result; - } - } + public static help_AppUpdate TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + help_AppUpdate result = null; + switch (constructor) { + case 0xccbbce30: + result = new TL_help_appUpdate(); + break; + case 0xc45a6536: + result = new TL_help_noAppUpdate(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in help_AppUpdate", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } - public static class TL_help_appUpdate extends help_AppUpdate { - public static int constructor = 0x1da7158f; + public static class TL_help_appUpdate extends help_AppUpdate { + public static int constructor = 0xccbbce30; - public int flags; - public boolean can_not_skip; - public int id; - public String version; - public String text; - public ArrayList entities = new ArrayList<>(); - public Document document; - public String url; + public int flags; + public boolean can_not_skip; + public int id; + public String version; + public String text; + public ArrayList entities = new ArrayList<>(); + public Document document; + public String url; + public Document sticker; - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); can_not_skip = (flags & 1) != 0; - id = stream.readInt32(exception); - version = stream.readString(exception); - text = stream.readString(exception); - int magic = stream.readInt32(exception); - if (magic != 0x1cb5c415) { - if (exception) { - throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); - } - return; - } - int count = stream.readInt32(exception); - for (int a = 0; a < count; a++) { - MessageEntity object = MessageEntity.TLdeserialize(stream, stream.readInt32(exception), exception); - if (object == null) { - return; - } - entities.add(object); - } - if ((flags & 2) != 0) { - document = Document.TLdeserialize(stream, stream.readInt32(exception), exception); - } - if ((flags & 4) != 0) { - url = stream.readString(exception); - } - } + id = stream.readInt32(exception); + version = stream.readString(exception); + text = stream.readString(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + MessageEntity object = MessageEntity.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + entities.add(object); + } + if ((flags & 2) != 0) { + document = Document.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 4) != 0) { + url = stream.readString(exception); + } + if ((flags & 8) != 0) { + sticker = Document.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = can_not_skip ? (flags | 1) : (flags &~ 1); - stream.writeInt32(flags); - stream.writeInt32(id); - stream.writeString(version); - stream.writeString(text); - stream.writeInt32(0x1cb5c415); - int count = entities.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - entities.get(a).serializeToStream(stream); - } - if ((flags & 2) != 0) { - document.serializeToStream(stream); - } - if ((flags & 4) != 0) { - stream.writeString(url); - } - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = can_not_skip ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + stream.writeInt32(id); + stream.writeString(version); + stream.writeString(text); + stream.writeInt32(0x1cb5c415); + int count = entities.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + entities.get(a).serializeToStream(stream); + } + if ((flags & 2) != 0) { + document.serializeToStream(stream); + } + if ((flags & 4) != 0) { + stream.writeString(url); + } + if ((flags & 8) != 0) { + sticker.serializeToStream(stream); + } + } + } - public static class TL_help_noAppUpdate extends help_AppUpdate { - public static int constructor = 0xc45a6536; + public static class TL_help_noAppUpdate extends help_AppUpdate { + public static int constructor = 0xc45a6536; - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } public static class TL_messages_affectedFoundMessages extends TLObject { public static int constructor = 0xef8d3e6c; @@ -8836,146 +8949,225 @@ public class TLRPC { } } - public static abstract class ReplyMarkup extends TLObject { - public ArrayList rows = new ArrayList<>(); - public int flags; - public boolean selective; - public boolean single_use; - public boolean resize; - - public static ReplyMarkup TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { - ReplyMarkup result = null; - switch (constructor) { - case 0x48a30254: - result = new TL_replyInlineMarkup(); - break; - case 0xa03e5b85: - result = new TL_replyKeyboardHide(); - break; - case 0xf4108aa0: - result = new TL_replyKeyboardForceReply(); - break; - case 0x3502758c: - result = new TL_replyKeyboardMarkup(); - break; - } - if (result == null && exception) { - throw new RuntimeException(String.format("can't parse magic %x in ReplyMarkup", constructor)); - } - if (result != null) { - result.readParams(stream, exception); - } - return result; - } - } - - public static class TL_replyInlineMarkup extends ReplyMarkup { - public static int constructor = 0x48a30254; + public static abstract class ReplyMarkup extends TLObject { + public int flags; + public boolean resize; + public boolean single_use; + public boolean selective; + public String placeholder; + public ArrayList rows = new ArrayList<>(); - public void readParams(AbstractSerializedData stream, boolean exception) { - int magic = stream.readInt32(exception); - if (magic != 0x1cb5c415) { - if (exception) { - throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); - } - return; - } - int count = stream.readInt32(exception); - for (int a = 0; a < count; a++) { - TL_keyboardButtonRow object = TL_keyboardButtonRow.TLdeserialize(stream, stream.readInt32(exception), exception); - if (object == null) { - return; - } - rows.add(object); - } - } + public static ReplyMarkup TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + ReplyMarkup result = null; + switch (constructor) { + case 0x85dd99d1: + result = new TL_replyKeyboardMarkup(); + break; + case 0xa03e5b85: + result = new TL_replyKeyboardHide(); + break; + case 0x86b40b08: + result = new TL_replyKeyboardForceReply(); + break; + case 0x3502758c: + result = new TL_replyKeyboardMarkup_layer129(); + break; + case 0xf4108aa0: + result = new TL_replyKeyboardForceReply_layer129(); + break; + case 0x48a30254: + result = new TL_replyInlineMarkup(); + break; + } + if (result == null && exception) { + throw new RuntimeException(String.format("can't parse magic %x in ReplyMarkup", constructor)); + } + if (result != null) { + result.readParams(stream, exception); + } + return result; + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - stream.writeInt32(0x1cb5c415); - int count = rows.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - rows.get(a).serializeToStream(stream); - } - } - } - - public static class TL_replyKeyboardHide extends ReplyMarkup { - public static int constructor = 0xa03e5b85; + public static class TL_replyKeyboardMarkup extends ReplyMarkup { + public static int constructor = 0x85dd99d1; - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - selective = (flags & 4) != 0; - } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + resize = (flags & 1) != 0; + single_use = (flags & 2) != 0; + selective = (flags & 4) != 0; + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_keyboardButtonRow object = TL_keyboardButtonRow.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + rows.add(object); + } + if ((flags & 8) != 0) { + placeholder = stream.readString(exception); + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = selective ? (flags | 4) : (flags &~ 4); - stream.writeInt32(flags); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = resize ? (flags | 1) : (flags &~ 1); + flags = single_use ? (flags | 2) : (flags &~ 2); + flags = selective ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + stream.writeInt32(0x1cb5c415); + int count = rows.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + rows.get(a).serializeToStream(stream); + } + if ((flags & 8) != 0) { + stream.writeString(placeholder); + } + } + } - public static class TL_replyKeyboardForceReply extends ReplyMarkup { - public static int constructor = 0xf4108aa0; + public static class TL_replyKeyboardHide extends ReplyMarkup { + public static int constructor = 0xa03e5b85; - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - single_use = (flags & 2) != 0; - selective = (flags & 4) != 0; - } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + selective = (flags & 4) != 0; + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = single_use ? (flags | 2) : (flags &~ 2); - flags = selective ? (flags | 4) : (flags &~ 4); - stream.writeInt32(flags); - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = selective ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + } + } - public static class TL_replyKeyboardMarkup extends ReplyMarkup { - public static int constructor = 0x3502758c; + public static class TL_replyKeyboardForceReply extends ReplyMarkup { + public static int constructor = 0x86b40b08; - public void readParams(AbstractSerializedData stream, boolean exception) { - flags = stream.readInt32(exception); - resize = (flags & 1) != 0; - single_use = (flags & 2) != 0; - selective = (flags & 4) != 0; - int magic = stream.readInt32(exception); - if (magic != 0x1cb5c415) { - if (exception) { - throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); - } - return; - } - int count = stream.readInt32(exception); - for (int a = 0; a < count; a++) { - TL_keyboardButtonRow object = TL_keyboardButtonRow.TLdeserialize(stream, stream.readInt32(exception), exception); - if (object == null) { - return; - } - rows.add(object); - } - } + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + single_use = (flags & 2) != 0; + selective = (flags & 4) != 0; + if ((flags & 8) != 0) { + placeholder = stream.readString(exception); + } + } - public void serializeToStream(AbstractSerializedData stream) { - stream.writeInt32(constructor); - flags = resize ? (flags | 1) : (flags &~ 1); - flags = single_use ? (flags | 2) : (flags &~ 2); - flags = selective ? (flags | 4) : (flags &~ 4); - stream.writeInt32(flags); - stream.writeInt32(0x1cb5c415); - int count = rows.size(); - stream.writeInt32(count); - for (int a = 0; a < count; a++) { - rows.get(a).serializeToStream(stream); - } - } - } + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = single_use ? (flags | 2) : (flags &~ 2); + flags = selective ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + if ((flags & 8) != 0) { + stream.writeString(placeholder); + } + } + } + + public static class TL_replyKeyboardMarkup_layer129 extends TL_replyKeyboardMarkup { + public static int constructor = 0x3502758c; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + resize = (flags & 1) != 0; + single_use = (flags & 2) != 0; + selective = (flags & 4) != 0; + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_keyboardButtonRow object = TL_keyboardButtonRow.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + rows.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = resize ? (flags | 1) : (flags &~ 1); + flags = single_use ? (flags | 2) : (flags &~ 2); + flags = selective ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + stream.writeInt32(0x1cb5c415); + int count = rows.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + rows.get(a).serializeToStream(stream); + } + } + } + + public static class TL_replyKeyboardForceReply_layer129 extends TL_replyKeyboardForceReply { + public static int constructor = 0xf4108aa0; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + single_use = (flags & 2) != 0; + selective = (flags & 4) != 0; + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = single_use ? (flags | 2) : (flags &~ 2); + flags = selective ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + } + } + + public static class TL_replyInlineMarkup extends ReplyMarkup { + public static int constructor = 0x48a30254; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_keyboardButtonRow object = TL_keyboardButtonRow.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + rows.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(0x1cb5c415); + int count = rows.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + rows.get(a).serializeToStream(stream); + } + } + } public static class TL_webPageAttributeTheme extends TLObject { public static int constructor = 0x54b56617; @@ -13985,13 +14177,16 @@ public class TLRPC { public int size; public double video_start_ts; - public static VideoSize TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + public static VideoSize TLdeserialize(long photo_id, long document_id, AbstractSerializedData stream, int constructor, boolean exception) { VideoSize result = null; switch (constructor) { case 0x435bb987: result = new TL_videoSize_layer115(); break; case 0xe831c556: + result = new TL_videoSize_layer127(); + break; + case 0xde33b094: result = new TL_videoSize(); break; } @@ -14000,6 +14195,20 @@ public class TLRPC { } if (result != null) { result.readParams(stream, exception); + if (result.location == null) { + if (!TextUtils.isEmpty(result.type) && (photo_id != 0 || document_id != 0)) { + result.location = new TL_fileLocationToBeDeprecated(); + if (photo_id != 0) { + result.location.volume_id = -photo_id; + result.location.local_id = result.type.charAt(0); + } else { + result.location.volume_id = -document_id; + result.location.local_id = 1000 + result.type.charAt(0); + } + } else { + result.location = new TL_fileLocationUnavailable(); + } + } } return result; } @@ -14027,7 +14236,7 @@ public class TLRPC { } } - public static class TL_videoSize extends VideoSize { + public static class TL_videoSize_layer127 extends TL_videoSize { public static int constructor = 0xe831c556; @@ -14057,6 +14266,34 @@ public class TLRPC { } } + public static class TL_videoSize extends VideoSize { + public static int constructor = 0xde33b094; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + type = stream.readString(exception); + w = stream.readInt32(exception); + h = stream.readInt32(exception); + size = stream.readInt32(exception); + if ((flags & 1) != 0) { + video_start_ts = stream.readDouble(exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt32(flags); + stream.writeString(type); + stream.writeInt32(w); + stream.writeInt32(h); + stream.writeInt32(size); + if ((flags & 1) != 0) { + stream.writeDouble(video_start_ts); + } + } + } + public static abstract class BotInlineMessage extends TLObject { public int flags; public GeoPoint geo; @@ -16272,11 +16509,8 @@ public class TLRPC { public byte[] file_reference; public String thumb_size; public int flags; - public boolean big; - public InputPeer peer; public long volume_id; public int local_id; - public InputStickerSet stickerset; public long secret; public static InputFileLocation TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { @@ -16288,13 +16522,13 @@ public class TLRPC { case 0xbad07584: result = new TL_inputDocumentFileLocation(); break; - case 0x27d69997: + case 0x37257e99: result = new TL_inputPeerPhotoFileLocation(); break; case 0xf5235d55: result = new TL_inputEncryptedFileLocation(); break; - case 0xdbaeae9: + case 0x9d84f3db: result = new TL_inputStickerSetThumb(); break; case 0xbba51639: @@ -16358,15 +16592,17 @@ public class TLRPC { } public static class TL_inputPeerPhotoFileLocation extends InputFileLocation { - public static int constructor = 0x27d69997; + public static int constructor = 0x37257e99; + public boolean big; + public InputPeer peer; + public long photo_id; public void readParams(AbstractSerializedData stream, boolean exception) { flags = stream.readInt32(exception); big = (flags & 1) != 0; peer = InputPeer.TLdeserialize(stream, stream.readInt32(exception), exception); - volume_id = stream.readInt64(exception); - local_id = stream.readInt32(exception); + photo_id = stream.readInt64(exception); } public void serializeToStream(AbstractSerializedData stream) { @@ -16374,8 +16610,7 @@ public class TLRPC { flags = big ? (flags | 1) : (flags &~ 1); stream.writeInt32(flags); peer.serializeToStream(stream); - stream.writeInt64(volume_id); - stream.writeInt32(local_id); + stream.writeInt64(photo_id); } } @@ -16396,20 +16631,20 @@ public class TLRPC { } public static class TL_inputStickerSetThumb extends InputFileLocation { - public static int constructor = 0xdbaeae9; + public static int constructor = 0x9d84f3db; + public InputStickerSet stickerset; + public int thumb_version; public void readParams(AbstractSerializedData stream, boolean exception) { stickerset = InputStickerSet.TLdeserialize(stream, stream.readInt32(exception), exception); - volume_id = stream.readInt64(exception); - local_id = stream.readInt32(exception); + thumb_version = stream.readInt32(exception); } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); stickerset.serializeToStream(stream); - stream.writeInt64(volume_id); - stream.writeInt32(local_id); + stream.writeInt32(thumb_version); } } @@ -19896,7 +20131,7 @@ public class TLRPC { date = stream.readInt32(exception); duration = stream.readInt32(exception); size = stream.readInt32(exception); - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); dc_id = stream.readInt32(exception); w = stream.readInt32(exception); h = stream.readInt32(exception); @@ -19928,7 +20163,7 @@ public class TLRPC { duration = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); dc_id = stream.readInt32(exception); w = stream.readInt32(exception); h = stream.readInt32(exception); @@ -19961,7 +20196,7 @@ public class TLRPC { caption = stream.readString(exception); duration = stream.readInt32(exception); size = stream.readInt32(exception); - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); dc_id = stream.readInt32(exception); w = stream.readInt32(exception); h = stream.readInt32(exception); @@ -19999,7 +20234,7 @@ public class TLRPC { caption = stream.readString(exception); duration = stream.readInt32(exception); size = stream.readInt32(exception); - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); dc_id = stream.readInt32(exception); w = stream.readInt32(exception); h = stream.readInt32(exception); @@ -20034,7 +20269,7 @@ public class TLRPC { duration = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); dc_id = stream.readInt32(exception); w = stream.readInt32(exception); h = stream.readInt32(exception); @@ -20182,6 +20417,62 @@ public class TLRPC { } } + public static class TL_groupCallParticipantVideo extends TLObject { + public static int constructor = 0x78e41663; + + public int flags; + public boolean paused; + public String endpoint; + public ArrayList source_groups = new ArrayList<>(); + + public static TL_groupCallParticipantVideo TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_groupCallParticipantVideo.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_groupCallParticipantVideo", constructor)); + } else { + return null; + } + } + TL_groupCallParticipantVideo result = new TL_groupCallParticipantVideo(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + paused = (flags & 1) != 0; + endpoint = stream.readString(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + TL_groupCallParticipantVideoSourceGroup object = TL_groupCallParticipantVideoSourceGroup.TLdeserialize(stream, stream.readInt32(exception), exception); + if (object == null) { + return; + } + source_groups.add(object); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = paused ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + stream.writeString(endpoint); + stream.writeInt32(0x1cb5c415); + int count = source_groups.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + source_groups.get(a).serializeToStream(stream); + } + } + } + public static abstract class TopPeerCategory extends TLObject { public static TopPeerCategory TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { @@ -20965,7 +21256,7 @@ public class TLRPC { date = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumbs.add(PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception)); + thumbs.add(PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception)); dc_id = stream.readInt32(exception); int magic = stream.readInt32(exception); if (magic != 0x1cb5c415) { @@ -21013,7 +21304,7 @@ public class TLRPC { date = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumbs.add(PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception)); + thumbs.add(PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception)); dc_id = stream.readInt32(exception); stream.readInt32(exception); int magic = stream.readInt32(exception); @@ -21064,7 +21355,7 @@ public class TLRPC { file_name = stream.readString(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumbs.add(PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception)); + thumbs.add(PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception)); dc_id = stream.readInt32(exception); key = stream.readByteArray(exception); iv = stream.readByteArray(exception); @@ -21108,7 +21399,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, id, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -21125,7 +21416,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - VideoSize object = VideoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + VideoSize object = VideoSize.TLdeserialize(0, id, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -21207,7 +21498,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -21271,7 +21562,7 @@ public class TLRPC { file_name = stream.readString(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumbs.add(PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception)); + thumbs.add(PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception)); dc_id = stream.readInt32(exception); } @@ -21313,7 +21604,7 @@ public class TLRPC { date = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumbs.add(PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception)); + thumbs.add(PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception)); dc_id = stream.readInt32(exception); int magic = stream.readInt32(exception); if (magic != 0x1cb5c415) { @@ -21364,7 +21655,7 @@ public class TLRPC { date = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumbs.add(PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception)); + thumbs.add(PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception)); dc_id = stream.readInt32(exception); int magic = stream.readInt32(exception); if (magic != 0x1cb5c415) { @@ -24051,6 +24342,9 @@ public class TLRPC { case 0x98a12b4b: result = new TL_updateChannelMessageViews(); break; + case 0xb783982: + result = new TL_updateGroupCallConnection(); + break; case 0x871fb939: result = new TL_updateGeoLiveViewed(); break; @@ -24668,6 +24962,27 @@ public class TLRPC { } } + public static class TL_updateGroupCallConnection extends Update { + public static int constructor = 0xb783982; + + public int flags; + public boolean presentation; + public TL_dataJSON params; + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + presentation = (flags & 1) != 0; + params = TL_dataJSON.TLdeserialize(stream, stream.readInt32(exception), exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = presentation ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + params.serializeToStream(stream); + } + } + public static class TL_updateGeoLiveViewed extends Update { public static int constructor = 0x871fb939; @@ -26890,6 +27205,9 @@ public class TLRPC { result = new TL_userProfilePhoto_old(); break; case 0xcc656077: + result = new TL_userProfilePhoto_layer127(); + break; + case 0x82d1f706: result = new TL_userProfilePhoto(); break; } @@ -26927,7 +27245,7 @@ public class TLRPC { } } - public static class TL_userProfilePhoto extends UserProfilePhoto { + public static class TL_userProfilePhoto_layer127 extends TL_userProfilePhoto { public static int constructor = 0xcc656077; @@ -27027,6 +27345,38 @@ public class TLRPC { } } + public static class TL_userProfilePhoto extends UserProfilePhoto { + public static int constructor = 0x82d1f706; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + has_video = (flags & 1) != 0; + photo_id = stream.readInt64(exception); + if ((flags & 2) != 0) { + stripped_thumb = stream.readByteArray(exception); + } + dc_id = stream.readInt32(exception); + photo_small = new TL_fileLocationToBeDeprecated(); + photo_small.volume_id = -photo_id; + photo_small.local_id = 'a'; + photo_big = new TL_fileLocationToBeDeprecated(); + photo_big.volume_id = -photo_id; + photo_big.local_id = 'c'; + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = has_video ? (flags | 1) : (flags &~ 1); + stream.writeInt32(flags); + stream.writeInt64(photo_id); + if ((flags & 2) != 0) { + stream.writeByteArray(stripped_thumb); + } + stream.writeInt32(dc_id); + } + } + public static abstract class MessageEntity extends TLObject { public int offset; public int length; @@ -27538,7 +27888,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27580,7 +27930,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27621,7 +27971,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27679,7 +28029,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27724,7 +28074,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27771,7 +28121,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(id, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27787,7 +28137,7 @@ public class TLRPC { } count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - VideoSize object = VideoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + VideoSize object = VideoSize.TLdeserialize(id, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -27842,7 +28192,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -28169,6 +28519,52 @@ public class TLRPC { } } + public static class TL_groupCallParticipantVideoSourceGroup extends TLObject { + public static int constructor = 0xdcb118b7; + + public String semantics; + public ArrayList sources = new ArrayList<>(); + + public static TL_groupCallParticipantVideoSourceGroup TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + if (TL_groupCallParticipantVideoSourceGroup.constructor != constructor) { + if (exception) { + throw new RuntimeException(String.format("can't parse magic %x in TL_groupCallParticipantVideoSourceGroup", constructor)); + } else { + return null; + } + } + TL_groupCallParticipantVideoSourceGroup result = new TL_groupCallParticipantVideoSourceGroup(); + result.readParams(stream, exception); + return result; + } + + public void readParams(AbstractSerializedData stream, boolean exception) { + semantics = stream.readString(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + sources.add(stream.readInt32(exception)); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(semantics); + stream.writeInt32(0x1cb5c415); + int count = sources.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(sources.get(a)); + } + } + } + public static class TL_help_countryCode extends TLObject { public static int constructor = 0x4203c5ef; @@ -31223,6 +31619,8 @@ public class TLRPC { public boolean motion; public int background_color; public int second_background_color; + public int third_background_color; + public int fourth_background_color; public int intensity; public int rotation; @@ -31233,6 +31631,9 @@ public class TLRPC { result = new TL_wallPaperSettings_layer106(); break; case 0x5086cf8: + result = new TL_wallPaperSettings_layer128(); + break; + case 0x1dc1bca4: result = new TL_wallPaperSettings(); break; } @@ -31276,7 +31677,7 @@ public class TLRPC { } } - public static class TL_wallPaperSettings extends WallPaperSettings { + public static class TL_wallPaperSettings_layer128 extends TL_wallPaperSettings { public static int constructor = 0x5086cf8; @@ -31318,6 +31719,60 @@ public class TLRPC { } } + public static class TL_wallPaperSettings extends WallPaperSettings { + public static int constructor = 0x1dc1bca4; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + flags = stream.readInt32(exception); + blur = (flags & 2) != 0; + motion = (flags & 4) != 0; + if ((flags & 1) != 0) { + background_color = stream.readInt32(exception); + } + if ((flags & 16) != 0) { + second_background_color = stream.readInt32(exception); + } + if ((flags & 32) != 0) { + third_background_color = stream.readInt32(exception); + } + if ((flags & 64) != 0) { + fourth_background_color = stream.readInt32(exception); + } + if ((flags & 8) != 0) { + intensity = stream.readInt32(exception); + } + if ((flags & 16) != 0) { + rotation = stream.readInt32(exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = blur ? (flags | 2) : (flags &~ 2); + flags = motion ? (flags | 4) : (flags &~ 4); + stream.writeInt32(flags); + if ((flags & 1) != 0) { + stream.writeInt32(background_color); + } + if ((flags & 16) != 0) { + stream.writeInt32(second_background_color); + } + if ((flags & 32) != 0) { + stream.writeInt32(third_background_color); + } + if ((flags & 64) != 0) { + stream.writeInt32(fourth_background_color); + } + if ((flags & 8) != 0) { + stream.writeInt32(intensity); + } + if ((flags & 16) != 0) { + stream.writeInt32(rotation); + } + } + } + public static class TL_contacts_found extends TLObject { public static int constructor = 0xb3134d9d; @@ -32010,7 +32465,7 @@ public class TLRPC { date = stream.readInt32(exception); mime_type = stream.readString(exception); size = stream.readInt32(exception); - thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + thumb = PhotoSize.TLdeserialize(0, id, 0, stream, stream.readInt32(exception), exception); dc_id = stream.readInt32(exception); int magic = stream.readInt32(exception); if (magic != 0x1cb5c415) { @@ -34657,7 +35112,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, id, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -34762,7 +35217,7 @@ public class TLRPC { title = stream.readString(exception); short_name = stream.readString(exception); if ((flags & 16) != 0) { - PhotoSize thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (thumb != null) { thumbs.add(thumb); } @@ -34819,7 +35274,7 @@ public class TLRPC { } int count = stream.readInt32(exception); for (int a = 0; a < count; a++) { - PhotoSize object = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize object = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (object == null) { return; } @@ -34881,7 +35336,7 @@ public class TLRPC { title = stream.readString(exception); short_name = stream.readString(exception); if ((flags & 16) != 0) { - PhotoSize thumb = PhotoSize.TLdeserialize(stream, stream.readInt32(exception), exception); + PhotoSize thumb = PhotoSize.TLdeserialize(0, 0, 0, stream, stream.readInt32(exception), exception); if (thumb != null) { thumbs.add(thumb); } @@ -36193,33 +36648,59 @@ public class TLRPC { public byte[] bytes; - public static PhotoSize TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { + public static PhotoSize TLdeserialize(long photo_id, long document_id, long sticker_set_id, AbstractSerializedData stream, int constructor, boolean exception) { PhotoSize result = null; switch (constructor) { case 0xd8214d41: result = new TL_photoPathSize(); break; case 0x77bfb61b: - result = new TL_photoSize(); + result = new TL_photoSize_layer127(); break; case 0xe17e23c: result = new TL_photoSizeEmpty(); break; case 0x5aa86a51: - result = new TL_photoSizeProgressive(); + result = new TL_photoSizeProgressive_layer127(); break; case 0xe0b0bc2e: result = new TL_photoStrippedSize(); break; case 0xe9a734fa: - result = new TL_photoCachedSize(); + result = new TL_photoCachedSize_layer127(); break; + case 0x75c78e60: + result = new TL_photoSize(); + break; + case 0x21e1ad6: + result = new TL_photoCachedSize(); + break; + case 0xfa3efb95: + result = new TL_photoSizeProgressive(); + break; } if (result == null && exception) { throw new RuntimeException(String.format("can't parse magic %x in PhotoSize", constructor)); } if (result != null) { result.readParams(stream, exception); + if (result.location == null) { + if (!TextUtils.isEmpty(result.type) && (photo_id != 0 || document_id != 0 || sticker_set_id != 0)) { + result.location = new TL_fileLocationToBeDeprecated(); + if (photo_id != 0) { + result.location.volume_id = -photo_id; + result.location.local_id = result.type.charAt(0); + } else if (document_id != 0) { + result.location.volume_id = -document_id; + result.location.local_id = 1000 + result.type.charAt(0); + } else if (sticker_set_id != 0) { + result.location.volume_id = -sticker_set_id; + result.location.local_id = 2000 + result.type.charAt(0); + } + } else { + result.location = new TL_fileLocationUnavailable(); + } + } } return result; } @@ -36242,10 +36723,9 @@ public class TLRPC { } } - public static class TL_photoSizeProgressive extends PhotoSize { + public static class TL_photoSizeProgressive_layer127 extends TL_photoSizeProgressive { public static int constructor = 0x5aa86a51; - public ArrayList sizes = new ArrayList<>(); public void readParams(AbstractSerializedData stream, boolean exception) { type = stream.readString(exception); @@ -36283,7 +36763,7 @@ public class TLRPC { } } - public static class TL_photoSize extends PhotoSize { + public static class TL_photoSize_layer127 extends TL_photoSize { public static int constructor = 0x77bfb61b; @@ -36319,7 +36799,7 @@ public class TLRPC { } } - public static class TL_photoCachedSize extends PhotoSize { + public static class TL_photoCachedSize_layer127 extends TL_photoCachedSize { public static int constructor = 0xe9a734fa; @@ -36341,6 +36821,85 @@ public class TLRPC { } } + public static class TL_photoSize extends PhotoSize { + public static int constructor = 0x75c78e60; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + type = stream.readString(exception); + w = stream.readInt32(exception); + h = stream.readInt32(exception); + size = stream.readInt32(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(type); + stream.writeInt32(w); + stream.writeInt32(h); + stream.writeInt32(size); + } + } + + public static class TL_photoCachedSize extends PhotoSize { + public static int constructor = 0x21e1ad6; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + type = stream.readString(exception); + w = stream.readInt32(exception); + h = stream.readInt32(exception); + bytes = stream.readByteArray(exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(type); + stream.writeInt32(w); + stream.writeInt32(h); + stream.writeByteArray(bytes); + } + } + + public static class TL_photoSizeProgressive extends PhotoSize { + public static int constructor = 0xfa3efb95; + + public ArrayList sizes = new ArrayList<>(); + + public void readParams(AbstractSerializedData stream, boolean exception) { + type = stream.readString(exception); + w = stream.readInt32(exception); + h = stream.readInt32(exception); + int magic = stream.readInt32(exception); + if (magic != 0x1cb5c415) { + if (exception) { + throw new RuntimeException(String.format("wrong Vector magic, got %x", magic)); + } + return; + } + int count = stream.readInt32(exception); + for (int a = 0; a < count; a++) { + sizes.add(stream.readInt32(exception)); + } + if (!sizes.isEmpty()) { + size = sizes.get(sizes.size() - 1); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(type); + stream.writeInt32(w); + stream.writeInt32(h); + stream.writeInt32(0x1cb5c415); + int count = sizes.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(sizes.get(a)); + } + } + } + public static class TL_contactFound extends TLObject { public static int constructor = 0xea879f95; @@ -37325,6 +37884,9 @@ public class TLRPC { result = new TL_wallPaper_layer94(); break; case 0x8af40b25: + result = new TL_wallPaperNoFile_layer128(); + break; + case 0xe0804116: result = new TL_wallPaperNoFile(); break; } @@ -37400,7 +37962,7 @@ public class TLRPC { } } - public static class TL_wallPaperNoFile extends WallPaper { + public static class TL_wallPaperNoFile_layer128 extends TL_wallPaperNoFile { public static int constructor = 0x8af40b25; @@ -37424,6 +37986,32 @@ public class TLRPC { } } + public static class TL_wallPaperNoFile extends WallPaper { + public static int constructor = 0xe0804116; + + + public void readParams(AbstractSerializedData stream, boolean exception) { + id = stream.readInt64(exception); + flags = stream.readInt32(exception); + isDefault = (flags & 2) != 0; + dark = (flags & 16) != 0; + if ((flags & 4) != 0) { + settings = WallPaperSettings.TLdeserialize(stream, stream.readInt32(exception), exception); + } + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeInt64(id); + flags = isDefault ? (flags | 2) : (flags &~ 2); + flags = dark ? (flags | 16) : (flags &~ 16); + stream.writeInt32(flags); + if ((flags & 4) != 0) { + settings.serializeToStream(stream); + } + } + } + public static class TL_paymentSavedCredentialsCard extends TLObject { public static int constructor = 0xcdc27a1f; @@ -38605,7 +39193,7 @@ public class TLRPC { } public static class TL_groupCallParticipant extends TLObject { - public static int constructor = 0xb96b25ee; + public static int constructor = 0xeba636fe; public int flags; public boolean muted; @@ -38617,6 +39205,7 @@ public class TLRPC { public boolean muted_by_you; public boolean volume_by_admin; public boolean self; + public boolean video_joined; public Peer peer; public int date; public int active_date; @@ -38624,7 +39213,8 @@ public class TLRPC { public int volume; public String about; public long raise_hand_rating; - public TL_dataJSON params; + public TL_groupCallParticipantVideo video; + public TL_groupCallParticipantVideo presentation; public long lastSpeakTime; //custom; public float amplitude; //custom; public boolean hasVoice; //custom; @@ -38634,6 +39224,11 @@ public class TLRPC { public long lastVisibleDate; //custom public int lastTypingDate; //custom public long lastRaiseHandDate; //custom + public String videoEndpoint; //custom + public String presentationEndpoint; //custom + public int videoIndex;//custom + public int hasPresentationFrame;//custom; + public int hasCameraFrame;//custom; public static TL_groupCallParticipant TLdeserialize(AbstractSerializedData stream, int constructor, boolean exception) { if (TL_groupCallParticipant.constructor != constructor) { @@ -38659,6 +39254,7 @@ public class TLRPC { muted_by_you = (flags & 512) != 0; volume_by_admin = (flags & 1024) != 0; self = (flags & 4096) != 0; + video_joined = (flags & 32768) != 0; peer = Peer.TLdeserialize(stream, stream.readInt32(exception), exception); date = stream.readInt32(exception); if ((flags & 8) != 0) { @@ -38675,7 +39271,10 @@ public class TLRPC { raise_hand_rating = stream.readInt64(exception); } if ((flags & 64) != 0) { - params = TL_dataJSON.TLdeserialize(stream, stream.readInt32(exception), exception); + video = TL_groupCallParticipantVideo.TLdeserialize(stream, stream.readInt32(exception), exception); + } + if ((flags & 16384) != 0) { + presentation = TL_groupCallParticipantVideo.TLdeserialize(stream, stream.readInt32(exception), exception); } } @@ -38690,6 +39289,7 @@ public class TLRPC { flags = muted_by_you ? (flags | 512) : (flags &~ 512); flags = volume_by_admin ? (flags | 1024) : (flags &~ 1024); flags = self ? (flags | 4096) : (flags &~ 4096); + flags = video_joined ? (flags | 32768) : (flags &~ 32768); stream.writeInt32(flags); peer.serializeToStream(stream); stream.writeInt32(date); @@ -38707,7 +39307,10 @@ public class TLRPC { stream.writeInt64(raise_hand_rating); } if ((flags & 64) != 0) { - params.serializeToStream(stream); + video.serializeToStream(stream); + } + if ((flags & 16384) != 0) { + presentation.serializeToStream(stream); } } } @@ -45291,6 +45894,76 @@ public class TLRPC { } } + public static class TL_stickers_createStickerSet extends TLObject { + public static int constructor = 0x9021ab67; + + public int flags; + public boolean masks; + public boolean animated; + public InputUser user_id; + public String title; + public String short_name; + public InputDocument thumb; + public ArrayList stickers = new ArrayList<>(); + public String software; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_messages_stickerSet.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + flags = masks ? (flags | 1) : (flags &~ 1); + flags = animated ? (flags | 2) : (flags &~ 2); + stream.writeInt32(flags); + user_id.serializeToStream(stream); + stream.writeString(title); + stream.writeString(short_name); + if ((flags & 4) != 0) { + thumb.serializeToStream(stream); + } + stream.writeInt32(0x1cb5c415); + int count = stickers.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stickers.get(a).serializeToStream(stream); + } + if ((flags & 8) != 0) { + stream.writeString(software); + } + } + } + + public static class TL_stickers_checkShortName extends TLObject { + public static int constructor = 0x284b3639; + + public String short_name; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Bool.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(short_name); + } + } + + public static class TL_stickers_suggestShortName extends TLObject { + public static int constructor = 0x4dafc503; + + public String title; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_stickers_suggestedShortName.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(title); + } + } + public static class TL_phone_getCallConfig extends TLObject { public static int constructor = 0x55451fa9; @@ -45498,6 +46171,7 @@ public class TLRPC { public int flags; public boolean muted; + public boolean video_stopped; public TL_inputGroupCall call; public InputPeer join_as; public String invite_hash; @@ -45510,6 +46184,7 @@ public class TLRPC { public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); flags = muted ? (flags | 1) : (flags &~ 1); + flags = video_stopped ? (flags | 4) : (flags &~ 4); stream.writeInt32(flags); call.serializeToStream(stream); join_as.serializeToStream(stream); @@ -45646,19 +46321,29 @@ public class TLRPC { } public static class TL_phone_checkGroupCall extends TLObject { - public static int constructor = 0xb74a7bea; + public static int constructor = 0xb59cf977; public TL_inputGroupCall call; - public int source; + public ArrayList sources = new ArrayList<>(); public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { - return Bool.TLdeserialize(stream, constructor, exception); + Vector vector = new Vector(); + int size = stream.readInt32(exception); + for (int a = 0; a < size; a++) { + vector.objects.add(stream.readInt32(exception)); + } + return vector; } public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); call.serializeToStream(stream); - stream.writeInt32(source); + stream.writeInt32(0x1cb5c415); + int count = sources.size(); + stream.writeInt32(count); + for (int a = 0; a < count; a++) { + stream.writeInt32(sources.get(a)); + } } } @@ -45686,14 +46371,17 @@ public class TLRPC { } public static class TL_phone_editGroupCallParticipant extends TLObject { - public static int constructor = 0xd975eb80; + public static int constructor = 0xa5273abf; public int flags; - public boolean muted; public TL_inputGroupCall call; public InputPeer participant; + public boolean muted; public int volume; public boolean raise_hand; + public boolean video_stopped; + public boolean video_paused; + public boolean presentation_paused; public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { return Updates.TLdeserialize(stream, constructor, exception); @@ -45701,16 +46389,27 @@ public class TLRPC { public void serializeToStream(AbstractSerializedData stream) { stream.writeInt32(constructor); - flags = muted ? (flags | 1) : (flags &~ 1); stream.writeInt32(flags); call.serializeToStream(stream); participant.serializeToStream(stream); + if ((flags & 1) != 0) { + stream.writeBool(muted); + } if ((flags & 2) != 0) { stream.writeInt32(volume); } if ((flags & 4) != 0) { stream.writeBool(raise_hand); } + if ((flags & 8) != 0) { + stream.writeBool(video_stopped); + } + if ((flags & 16) != 0) { + stream.writeBool(video_paused); + } + if ((flags & 32) != 0) { + stream.writeBool(presentation_paused); + } } } @@ -45814,6 +46513,38 @@ public class TLRPC { } } + public static class TL_phone_joinGroupCallPresentation extends TLObject { + public static int constructor = 0xcbea6bc4; + + public TL_inputGroupCall call; + public TL_dataJSON params; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + params.serializeToStream(stream); + } + } + + public static class TL_phone_leaveGroupCallPresentation extends TLObject { + public static int constructor = 0x1c50d144; + + public TL_inputGroupCall call; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return Updates.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + call.serializeToStream(stream); + } + } + public static class TL_payments_getPaymentForm extends TLObject { public static int constructor = 0x8a333c8d; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java index a2f1bfa72..f6dd0056b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBar.java @@ -60,6 +60,7 @@ public class ActionBar extends FrameLayout { private ImageView backButtonImageView; private SimpleTextView[] titleTextView = new SimpleTextView[2]; private SimpleTextView subtitleTextView; + private SimpleTextView additionalSubtitleTextView; private View actionModeTop; private int actionModeColor; private int actionBarColor; @@ -160,6 +161,10 @@ public class ActionBar extends FrameLayout { backDrawable.setRotation(isActionModeShowed() ? 1 : 0, false); backDrawable.setRotatedColor(itemsActionModeColor); backDrawable.setColor(itemsColor); + } else if (drawable instanceof MenuDrawable) { + MenuDrawable menuDrawable = (MenuDrawable) drawable; + menuDrawable.setBackColor(actionBarColor); + menuDrawable.setIconColor(itemsColor); } } @@ -201,7 +206,7 @@ public class ActionBar extends FrameLayout { } protected boolean shouldClipChild(View child) { - return clipContent && (child == titleTextView[0] || child == titleTextView[1] || child == subtitleTextView || child == menu || child == backButtonImageView); + return clipContent && (child == titleTextView[0] || child == titleTextView[1] || child == subtitleTextView || child == menu || child == backButtonImageView || child == additionalSubtitleTextView); } @Override @@ -281,6 +286,21 @@ public class ActionBar extends FrameLayout { addView(subtitleTextView, 0, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); } + public void createAdditionalSubtitleTextView() { + if (additionalSubtitleTextView != null) { + return; + } + additionalSubtitleTextView = new SimpleTextView(getContext()); + additionalSubtitleTextView.setGravity(Gravity.LEFT); + additionalSubtitleTextView.setVisibility(GONE); + additionalSubtitleTextView.setTextColor(Theme.getColor(Theme.key_actionBarDefaultSubtitle)); + addView(additionalSubtitleTextView, 0, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); + } + + public SimpleTextView getAdditionalSubtitleTextView() { + return additionalSubtitleTextView; + } + public void setAddToContainer(boolean value) { addToContainer = value; } @@ -757,12 +777,22 @@ public class ActionBar extends FrameLayout { @Override public void setBackgroundColor(int color) { super.setBackgroundColor(actionBarColor = color); + if (backButtonImageView != null) { + Drawable drawable = backButtonImageView.getDrawable(); + if (drawable instanceof MenuDrawable) { + ((MenuDrawable) drawable).setBackColor(color); + } + } } public boolean isActionModeShowed() { return actionMode != null && actionModeVisible; } + public boolean isActionModeShowed(String tag) { + return actionMode != null && actionModeVisible && ((actionModeTag == null && tag == null) || (actionModeTag != null && actionModeTag.equals(tag))); + } + AnimatorSet searchVisibleAnimator; public void onSearchFieldVisibilityChanged(boolean visible) { @@ -808,6 +838,14 @@ public class ActionBar extends FrameLayout { } } + if (visible) { + if (titleTextView[0] != null) { + titleTextView[0].setVisibility(View.GONE); + } + if (titleTextView[1] != null) { + titleTextView[1].setVisibility(View.GONE); + } + } } }); @@ -947,6 +985,9 @@ public class ActionBar extends FrameLayout { titleTextView[i].setTextSize(AndroidUtilities.isTablet() ? 20 : 18); } subtitleTextView.setTextSize(AndroidUtilities.isTablet() ? 16 : 14); + if (additionalSubtitleTextView != null) { + additionalSubtitleTextView.setTextSize(AndroidUtilities.isTablet() ? 16 : 14); + } } else { if (titleTextView[i] != null && titleTextView[i].getVisibility() != GONE) { titleTextView[i].setTextSize(!AndroidUtilities.isTablet() && getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE ? 18 : 20); @@ -954,6 +995,9 @@ public class ActionBar extends FrameLayout { if (subtitleTextView != null && subtitleTextView.getVisibility() != GONE) { subtitleTextView.setTextSize(!AndroidUtilities.isTablet() && getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE ? 14 : 16); } + if (additionalSubtitleTextView != null) { + additionalSubtitleTextView.setTextSize(!AndroidUtilities.isTablet() && getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE ? 14 : 16); + } } } @@ -966,13 +1010,16 @@ public class ActionBar extends FrameLayout { if (subtitleTextView != null && subtitleTextView.getVisibility() != GONE) { subtitleTextView.measure(MeasureSpec.makeMeasureSpec(availableWidth, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.AT_MOST)); } + if (additionalSubtitleTextView != null && additionalSubtitleTextView.getVisibility() != GONE) { + additionalSubtitleTextView.measure(MeasureSpec.makeMeasureSpec(availableWidth, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(20), MeasureSpec.AT_MOST)); + } } } int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { View child = getChildAt(i); - if (child.getVisibility() == GONE || child == titleTextView[0] || child == titleTextView[1] || child == subtitleTextView || child == menu || child == backButtonImageView) { + if (child.getVisibility() == GONE || child == titleTextView[0] || child == titleTextView[1] || child == subtitleTextView || child == menu || child == backButtonImageView || child == additionalSubtitleTextView) { continue; } measureChildWithMargins(child, widthMeasureSpec, 0, MeasureSpec.makeMeasureSpec(getMeasuredHeight(), MeasureSpec.EXACTLY), 0); @@ -1016,10 +1063,15 @@ public class ActionBar extends FrameLayout { subtitleTextView.layout(textLeft, additionalTop + textTop, textLeft + subtitleTextView.getMeasuredWidth(), additionalTop + textTop + subtitleTextView.getTextHeight()); } + if (additionalSubtitleTextView != null && additionalSubtitleTextView.getVisibility() != GONE) { + int textTop = getCurrentActionBarHeight() / 2 + (getCurrentActionBarHeight() / 2 - additionalSubtitleTextView.getTextHeight()) / 2 - AndroidUtilities.dp(!AndroidUtilities.isTablet() && getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE ? 1 : 1); + additionalSubtitleTextView.layout(textLeft, additionalTop + textTop, textLeft + additionalSubtitleTextView.getMeasuredWidth(), additionalTop + textTop + additionalSubtitleTextView.getTextHeight()); + } + int childCount = getChildCount(); for (int i = 0; i < childCount; i++) { View child = getChildAt(i); - if (child.getVisibility() == GONE || child == titleTextView[0] || child == titleTextView[1] || child == subtitleTextView || child == menu || child == backButtonImageView) { + if (child.getVisibility() == GONE || child == titleTextView[0] || child == titleTextView[1] || child == subtitleTextView || child == menu || child == backButtonImageView || child == additionalSubtitleTextView) { continue; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java index d3b548c6e..a422b0196 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenu.java @@ -326,4 +326,8 @@ public class ActionBarMenu extends LinearLayout { } } } + + public void clearSearchFilters() { + + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java index 8eb9d7bf8..dc73aea63 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuItem.java @@ -53,7 +53,6 @@ import android.widget.TextView; import androidx.core.graphics.ColorUtils; import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; @@ -713,7 +712,6 @@ public class ActionBarMenuItem extends FrameLayout { } return false; } else { - searchContainer.setTag(1); searchContainer.setVisibility(VISIBLE); searchContainer.setAlpha(0); if (searchContainerAnimator != null) { @@ -737,6 +735,7 @@ public class ActionBarMenuItem extends FrameLayout { }); searchContainerAnimator.start(); setVisibility(GONE); + clearSearchFilters(); searchField.setText(""); searchField.requestFocus(); if (openKeyboard) { @@ -745,6 +744,7 @@ public class ActionBarMenuItem extends FrameLayout { if (listener != null) { listener.onSearchExpand(); } + searchContainer.setTag(1); return true; } } @@ -762,7 +762,9 @@ public class ActionBarMenuItem extends FrameLayout { } public void addSearchFilter(FiltersView.MediaFilterData filter) { currentSearchFilters.add(filter); - selectedFilterIndex = currentSearchFilters.size() - 1; + if (searchContainer.getTag() != null) { + selectedFilterIndex = currentSearchFilters.size() - 1; + } onFiltersChanged(); } @@ -780,7 +782,7 @@ public class ActionBarMenuItem extends FrameLayout { boolean visible = !currentSearchFilters.isEmpty(); ArrayList localFilters = new ArrayList<>(currentSearchFilters); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && searchContainer.getTag() != null) { TransitionSet transition = new TransitionSet(); ChangeBounds changeBounds = new ChangeBounds(); changeBounds.setDuration(150); @@ -887,17 +889,19 @@ public class ActionBarMenuItem extends FrameLayout { searchFilterLayout.setTag(visible ? 1 : null); float oldX = searchField.getX(); - searchField.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - searchField.getViewTreeObserver().removeOnPreDrawListener(this); - if (searchField.getX() != oldX) { - searchField.setTranslationX(oldX - searchField.getX()); + if (searchContainer.getTag() != null) { + searchField.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + searchField.getViewTreeObserver().removeOnPreDrawListener(this); + if (searchField.getX() != oldX) { + searchField.setTranslationX(oldX - searchField.getX()); + } + searchField.animate().translationX(0).setDuration(250).setStartDelay(0).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + return true; } - searchField.animate().translationX(0).setDuration(250).setStartDelay(0).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); - return true; - } - }); + }); + } checkClearButton(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java index 015dab9d5..f94e2873a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarMenuSubItem.java @@ -31,6 +31,8 @@ public class ActionBarMenuSubItem extends FrameLayout { boolean top; boolean bottom; + private int itemHeight = 48; + public ActionBarMenuSubItem(Context context, boolean top, boolean bottom) { this(context, false, top, bottom); } @@ -69,9 +71,12 @@ public class ActionBarMenuSubItem extends FrameLayout { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(48), View.MeasureSpec.EXACTLY)); + super.onMeasure(widthMeasureSpec, View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(itemHeight), View.MeasureSpec.EXACTLY)); } + public void setItemHeight(int itemHeight) { + this.itemHeight = itemHeight; + } public void setChecked(boolean checked) { if (checkView == null) { return; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java index 4bdc1946b..a08d469ac 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ActionBarPopupWindow.java @@ -17,6 +17,7 @@ import android.graphics.Canvas; import android.graphics.Color; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; +import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Build; @@ -91,8 +92,9 @@ public class ActionBarPopupWindow extends PopupWindow { private boolean animationEnabled = allowAnimation; private ArrayList itemAnimators; private HashMap positions = new HashMap<>(); - private int gapStartY; - private int gapEndY; + private int gapStartY = Integer.MIN_VALUE; + private int gapEndY = Integer.MIN_VALUE; + private Rect bgPaddings = new Rect(); private ScrollView scrollView; protected LinearLayout linearLayout; @@ -106,6 +108,9 @@ public class ActionBarPopupWindow extends PopupWindow { super(context); backgroundDrawable = getResources().getDrawable(R.drawable.popup_fixed_alert2).mutate(); + if (backgroundDrawable != null) { + backgroundDrawable.getPadding(bgPaddings); + } setBackgroundColor(Theme.getColor(Theme.key_actionBarDefaultSubmenuBackground)); setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8), AndroidUtilities.dp(8)); @@ -125,8 +130,8 @@ public class ActionBarPopupWindow extends PopupWindow { if (fitItems) { int maxWidth = 0; int fixWidth = 0; - gapStartY = -1; - gapEndY = -1; + gapStartY = Integer.MIN_VALUE; + gapEndY = Integer.MIN_VALUE; ArrayList viewsToFix = null; for (int a = 0, N = getChildCount(); a < N; a++) { View view = getChildAt(a); @@ -249,6 +254,9 @@ public class ActionBarPopupWindow extends PopupWindow { public void setBackgroundDrawable(Drawable drawable) { backgroundColor = Color.WHITE; backgroundDrawable = drawable; + if (backgroundDrawable != null) { + backgroundDrawable.getPadding(bgPaddings); + } } private void startChildAnimation(View child) { @@ -309,30 +317,42 @@ public class ActionBarPopupWindow extends PopupWindow { @Override protected void onDraw(Canvas canvas) { if (backgroundDrawable != null) { + int start = gapStartY - scrollView.getScrollY(); + int end = gapEndY - scrollView.getScrollY(); for (int a = 0; a < 2; a++) { - if (a == 1 && gapStartY < 0) { + if (a == 1 && start < -AndroidUtilities.dp(16)) { break; } + if (gapStartY != Integer.MIN_VALUE) { + canvas.save(); + canvas.clipRect(0, bgPaddings.top, getMeasuredWidth(), getMeasuredHeight()); + } backgroundDrawable.setAlpha(backAlpha); if (shownFromBotton) { final int height = getMeasuredHeight(); backgroundDrawable.setBounds(0, (int) (height * (1.0f - backScaleY)), (int) (getMeasuredWidth() * backScaleX), height); } else { - if (gapStartY > 0) { + if (start > -AndroidUtilities.dp(16)) { int h = (int) (getMeasuredHeight() * backScaleY); if (a == 0) { - backgroundDrawable.setBounds(0, 0, (int) (getMeasuredWidth() * backScaleX), Math.min(h, gapStartY + AndroidUtilities.dp(16))); + backgroundDrawable.setBounds(0, -scrollView.getScrollY(), (int) (getMeasuredWidth() * backScaleX), Math.min(h, start + AndroidUtilities.dp(16))); } else { - if (h < gapEndY) { + if (h < end) { + if (gapStartY != Integer.MIN_VALUE) { + canvas.restore(); + } continue; } - backgroundDrawable.setBounds(0, gapEndY, (int) (getMeasuredWidth() * backScaleX), h); + backgroundDrawable.setBounds(0, end, (int) (getMeasuredWidth() * backScaleX), h); } } else { - backgroundDrawable.setBounds(0, 0, (int) (getMeasuredWidth() * backScaleX), (int) (getMeasuredHeight() * backScaleY)); + backgroundDrawable.setBounds(0, gapStartY < 0 ? 0 : -AndroidUtilities.dp(16), (int) (getMeasuredWidth() * backScaleX), (int) (getMeasuredHeight() * backScaleY)); } } backgroundDrawable.draw(canvas); + if (gapStartY != Integer.MIN_VALUE) { + canvas.restore(); + } } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java index 448d01bac..9017dc7df 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java @@ -63,6 +63,7 @@ public abstract class BaseFragment { protected Bundle arguments; protected boolean hasOwnBackground = false; protected boolean isPaused = true; + protected Dialog parentDialog; public BaseFragment() { classGuid = ConnectionsManager.generateClassGuid(); @@ -233,6 +234,10 @@ public abstract class BaseFragment { } public void finishFragment() { + if (parentDialog != null) { + parentDialog.dismiss(); + return; + } finishFragment(true); } @@ -248,6 +253,10 @@ public abstract class BaseFragment { if (isFinished || parentLayout == null) { return; } + if (parentDialog != null) { + parentDialog.dismiss(); + return; + } parentLayout.removeFragmentFromStack(this); } @@ -618,4 +627,50 @@ public abstract class BaseFragment { public void setProgressToDrawerOpened(float v) { } + + public ActionBarLayout[] showAsSheet(BaseFragment fragment) { + if (getParentActivity() == null) { + return null; + } + ActionBarLayout[] actionBarLayout = new ActionBarLayout[]{new ActionBarLayout(getParentActivity())}; + BottomSheet bottomSheet = new BottomSheet(getParentActivity(), true) { + { + actionBarLayout[0].init(new ArrayList<>()); + actionBarLayout[0].addFragmentToStack(fragment); + actionBarLayout[0].showLastFragment(); + actionBarLayout[0].setPadding(backgroundPaddingLeft, 0, backgroundPaddingLeft, 0); + containerView = actionBarLayout[0]; + setApplyBottomPadding(false); + setApplyBottomPadding(false); + setOnDismissListener(dialog -> fragment.onFragmentDestroy()); + } + + @Override + protected boolean canDismissWithSwipe() { + return false; + } + + @Override + public void onBackPressed() { + if (actionBarLayout[0] == null || actionBarLayout[0].fragmentsStack.size() <= 1) { + super.onBackPressed(); + } else { + actionBarLayout[0].onBackPressed(); + } + } + + @Override + public void dismiss() { + super.dismiss(); + actionBarLayout[0] = null; + } + }; + fragment.setParentDialog(bottomSheet); + bottomSheet.show(); + return actionBarLayout; + } + + private void setParentDialog(Dialog dialog) { + parentDialog = dialog; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java index 7ee178602..c466f0181 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BottomSheet.java @@ -17,6 +17,7 @@ import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.Insets; import android.graphics.Paint; import android.graphics.PorterDuff; @@ -58,7 +59,6 @@ import org.telegram.ui.Components.AnimationProperties; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; -import org.telegram.ui.LaunchActivity; import java.util.ArrayList; @@ -92,10 +92,18 @@ public class BottomSheet extends Dialog { private CharSequence title; private boolean bigTitle; private int bottomInset; + private int leftInset; + private int rightInset; protected boolean fullWidth; protected boolean isFullscreen; private boolean fullHeight; - protected ColorDrawable backDrawable = new ColorDrawable(0xff000000); + protected ColorDrawable backDrawable = new ColorDrawable(0xff000000) { + @Override + public void setAlpha(int alpha) { + super.setAlpha(alpha); + container.invalidate(); + } + }; protected boolean useLightStatusBar = true; protected boolean useLightNavBar; @@ -216,6 +224,7 @@ public class BottomSheet extends Dialog { currentTranslation = 0; } containerView.setTranslationY(currentTranslation); + container.invalidate(); } } @@ -233,6 +242,7 @@ public class BottomSheet extends Dialog { currentTranslation = 0; } containerView.setTranslationY(currentTranslation); + container.invalidate(); } } @@ -326,6 +336,7 @@ public class BottomSheet extends Dialog { } containerView.setTranslationY(translationY); startedTrackingY = (int) ev.getY(); + container.invalidate(); } } else if (ev == null || ev.getPointerId(0) == startedTrackingPointerId && (ev.getAction() == MotionEvent.ACTION_CANCEL || ev.getAction() == MotionEvent.ACTION_UP || ev.getAction() == MotionEvent.ACTION_POINTER_UP)) { if (velocityTracker == null) { @@ -378,6 +389,8 @@ public class BottomSheet extends Dialog { keyboardVisible = keyboardHeight > AndroidUtilities.dp(20); if (lastInsets != null && Build.VERSION.SDK_INT >= 21) { bottomInset = lastInsets.getSystemWindowInsetBottom(); + leftInset = lastInsets.getSystemWindowInsetLeft(); + rightInset = lastInsets.getSystemWindowInsetRight(); if (Build.VERSION.SDK_INT >= 29) { bottomInset += getAdditionalMandatoryOffsets(); } @@ -426,6 +439,11 @@ public class BottomSheet extends Dialog { } } + @Override + public void requestLayout() { + super.requestLayout(); + } + @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { layoutCount--; @@ -453,12 +471,9 @@ public class BottomSheet extends Dialog { keyboardContentAnimator.cancel(); } keyboardContentAnimator = ValueAnimator.ofFloat(containerView.getTranslationY(), 0); - keyboardContentAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { - @Override - public void onAnimationUpdate(ValueAnimator valueAnimator) { - containerView.setTranslationY((Float) valueAnimator.getAnimatedValue()); - invalidate(); - } + keyboardContentAnimator.addUpdateListener(valueAnimator -> { + containerView.setTranslationY((Float) valueAnimator.getAnimatedValue()); + invalidate(); }); keyboardContentAnimator.addListener(new AnimatorListenerAdapter() { @Override @@ -556,16 +571,16 @@ public class BottomSheet extends Dialog { @Override protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); - if ((drawNavigationBar && bottomInset != 0) || currentPanTranslationY != 0) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - if (navBarColorKey != null) { - backgroundPaint.setColor(Theme.getColor(navBarColorKey)); - } else { - backgroundPaint.setColor(navBarColor); - } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + if (navBarColorKey != null) { + backgroundPaint.setColor(Theme.getColor(navBarColorKey)); } else { - backgroundPaint.setColor(0xff000000); + backgroundPaint.setColor(navBarColor); } + } else { + backgroundPaint.setColor(0xff000000); + } + if ((drawNavigationBar && bottomInset != 0) || currentPanTranslationY != 0) { float translation = 0; if (scrollNavBar || Build.VERSION.SDK_INT >= 29 && getAdditionalMandatoryOffsets() > 0) { float dist = containerView.getMeasuredHeight() - containerView.getTranslationY(); @@ -573,6 +588,18 @@ public class BottomSheet extends Dialog { } int navBarHeight = drawNavigationBar ? bottomInset : 0; canvas.drawRect(containerView.getLeft() + backgroundPaddingLeft, getMeasuredHeight() - navBarHeight + translation - currentPanTranslationY, containerView.getRight() - backgroundPaddingLeft, getMeasuredHeight() + translation, backgroundPaint); + + if (overlayDrawNavBarColor != 0) { + backgroundPaint.setColor(overlayDrawNavBarColor); + canvas.drawRect(containerView.getLeft() + backgroundPaddingLeft, getMeasuredHeight() - navBarHeight + translation - currentPanTranslationY, containerView.getRight() - backgroundPaddingLeft, getMeasuredHeight() + translation, backgroundPaint); + } + } + if (drawNavigationBar && rightInset != 0 && rightInset > leftInset && fullWidth && AndroidUtilities.displaySize.x > AndroidUtilities.displaySize.y) { + canvas.drawRect(containerView.getRight() - backgroundPaddingLeft, containerView.getTranslationY(), containerView.getRight() + rightInset, getMeasuredHeight(), backgroundPaint); + } + + if (drawNavigationBar && leftInset != 0 && leftInset > rightInset && fullWidth && AndroidUtilities.displaySize.x > AndroidUtilities.displaySize.y) { + canvas.drawRect(0, containerView.getTranslationY(), containerView.getLeft() + backgroundPaddingLeft, getMeasuredHeight(), backgroundPaint); } if (containerView.getTranslationY() < 0) { @@ -685,6 +712,10 @@ public class BottomSheet extends Dialog { textView.setTextColor(color); } + public void setIconColor(int color) { + imageView.setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + } + public void setGravity(int gravity) { textView.setGravity(gravity); } @@ -1158,6 +1189,10 @@ public class BottomSheet extends Dialog { cell.imageView.setColorFilter(new PorterDuffColorFilter(icon, PorterDuff.Mode.MULTIPLY)); } + public ArrayList getItemViews() { + return itemViews; + } + public void setItems(CharSequence[] i, int[] icons, final OnClickListener listener) { items = i; itemIcons = icons; @@ -1450,4 +1485,29 @@ public class BottomSheet extends Dialog { this.currentPanTranslationY = currentPanTranslationY; container.invalidate(); } + + private int overlayDrawNavBarColor; + + public void setOverlayNavBarColor(int color) { + overlayDrawNavBarColor = color; + if (container != null) { + container.invalidate(); + } + + if (Color.alpha(color) > 120) { + AndroidUtilities.setLightStatusBar(getWindow(), false); + AndroidUtilities.setLightNavigationBar(getWindow(), false); + } else { + AndroidUtilities.setLightNavigationBar(getWindow(), !useLightNavBar); + AndroidUtilities.setLightStatusBar(getWindow(), !useLightStatusBar); + } + } + + public ViewGroup getContainerView() { + return containerView; + } + + public int getCurrentAccount() { + return currentAccount; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java index 38d0ce579..c2d855d13 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/DrawerLayoutContainer.java @@ -193,13 +193,15 @@ public class DrawerLayoutContainer extends FrameLayout { if (drawerLayout.getVisibility() != newVisibility) { drawerLayout.setVisibility(newVisibility); } - BaseFragment currentFragment = parentActionBarLayout.fragmentsStack.get(0); - if (drawerPosition == drawerLayout.getMeasuredWidth()) { - currentFragment.setProgressToDrawerOpened(1f); - } else if (drawerPosition == 0){ - currentFragment.setProgressToDrawerOpened(0); - } else { - currentFragment.setProgressToDrawerOpened(drawerPosition / drawerLayout.getMeasuredWidth()); + if (!parentActionBarLayout.fragmentsStack.isEmpty()) { + BaseFragment currentFragment = parentActionBarLayout.fragmentsStack.get(0); + if (drawerPosition == drawerLayout.getMeasuredWidth()) { + currentFragment.setProgressToDrawerOpened(1f); + } else if (drawerPosition == 0) { + currentFragment.setProgressToDrawerOpened(0); + } else { + currentFragment.setProgressToDrawerOpened(drawerPosition / drawerLayout.getMeasuredWidth()); + } } setScrimOpacity(drawerPosition / (float) drawerLayout.getMeasuredWidth()); } @@ -317,6 +319,10 @@ public class DrawerLayoutContainer extends FrameLayout { } } + public boolean isAllowOpenDrawer() { + return allowOpenDrawer; + } + public void setAllowOpenDrawerBySwipe(boolean value) { allowOpenDrawerBySwipe = value; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/MenuDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/MenuDrawable.java index 020fc283c..5daf62ece 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/MenuDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/MenuDrawable.java @@ -12,15 +12,18 @@ import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; +import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.SystemClock; import android.view.animation.DecelerateInterpolator; import org.telegram.messenger.AndroidUtilities; +import org.telegram.ui.Components.MediaActionDrawable; public class MenuDrawable extends Drawable { private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + private Paint backPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private boolean reverseAngle; private long lastFrameTime; private boolean animationInProgress; @@ -30,10 +33,38 @@ public class MenuDrawable extends Drawable { private boolean rotateToBack = true; private DecelerateInterpolator interpolator = new DecelerateInterpolator(); private int iconColor; + private int backColor; + + private RectF rect = new RectF(); + + private int type; + private int previousType; + private float typeAnimationProgress; + + private float downloadRadOffset; + private float downloadProgress; + private float animatedDownloadProgress; + private float downloadProgressAnimationStart; + private float downloadProgressTime; + private boolean miniIcon; + + public static int TYPE_DEFAULT = 0; + public static int TYPE_UDPATE_AVAILABLE = 1; + public static int TYPE_UDPATE_DOWNLOADING = 2; public MenuDrawable() { + this(TYPE_DEFAULT); + } + + public MenuDrawable(int type) { super(); paint.setStrokeWidth(AndroidUtilities.dp(2)); + backPaint.setStrokeWidth(AndroidUtilities.density * 1.66f); + backPaint.setStrokeCap(Paint.Cap.ROUND); + backPaint.setStyle(Paint.Style.STROKE); + previousType = TYPE_DEFAULT; + this.type = type; + typeAnimationProgress = 1.0f; } public void setRotateToBack(boolean value) { @@ -62,13 +93,26 @@ public class MenuDrawable extends Drawable { invalidateSelf(); } + public void setType(int value, boolean animated) { + if (type == value) { + return; + } + previousType = type; + type = value; + if (animated) { + typeAnimationProgress = 0.0f; + } else { + typeAnimationProgress = 1.0f; + } + invalidateSelf(); + } + @Override public void draw(Canvas canvas) { + long newTime = SystemClock.elapsedRealtime(); + long dt = newTime - lastFrameTime; if (currentRotation != finalRotation) { - long newTime = SystemClock.elapsedRealtime(); if (lastFrameTime != 0) { - long dt = newTime - lastFrameTime; - currentAnimationTime += dt; if (currentAnimationTime >= 200) { currentRotation = finalRotation; @@ -80,40 +124,139 @@ public class MenuDrawable extends Drawable { } } } - lastFrameTime = newTime; invalidateSelf(); } + if (typeAnimationProgress < 1.0f) { + typeAnimationProgress += dt / 200.0f; + if (typeAnimationProgress > 1.0f) { + typeAnimationProgress = 1.0f; + } + invalidateSelf(); + } + lastFrameTime = newTime; canvas.save(); - canvas.translate(getIntrinsicWidth() / 2, getIntrinsicHeight() / 2); + canvas.translate(getIntrinsicWidth() / 2 - AndroidUtilities.dp(9), getIntrinsicHeight() / 2); float endYDiff; float endXDiff; float startYDiff; float startXDiff; int color1 = iconColor == 0 ? Theme.getColor(Theme.key_actionBarDefaultIcon) : iconColor; - if (rotateToBack) { - canvas.rotate(currentRotation * (reverseAngle ? -180 : 180)); - paint.setColor(color1); - canvas.drawLine(-AndroidUtilities.dp(9), 0, AndroidUtilities.dp(9) - AndroidUtilities.dp(3.0f) * currentRotation, 0, paint); - endYDiff = AndroidUtilities.dp(5) * (1 - Math.abs(currentRotation)) - AndroidUtilities.dp(0.5f) * Math.abs(currentRotation); - endXDiff = AndroidUtilities.dp(9) - AndroidUtilities.dp(2.5f) * Math.abs(currentRotation); - startYDiff = AndroidUtilities.dp(5) + AndroidUtilities.dp(2.0f) * Math.abs(currentRotation); - startXDiff = -AndroidUtilities.dp(9) + AndroidUtilities.dp(7.5f) * Math.abs(currentRotation); + int backColor1 = backColor == 0 ? Theme.getColor(Theme.key_actionBarDefault) : backColor; + + float diffUp = 0; + float diffMiddle = 0; + if (type == TYPE_DEFAULT) { + if (previousType != TYPE_DEFAULT) { + diffUp = AndroidUtilities.dp(9) * (1.0f - typeAnimationProgress); + diffMiddle = AndroidUtilities.dp(7) * (1.0f - typeAnimationProgress); + } } else { - canvas.rotate(currentRotation * (reverseAngle ? -225 : 135)); - int color2 = Theme.getColor(Theme.key_actionBarActionModeDefaultIcon); - paint.setColor(AndroidUtilities.getOffsetColor(color1, color2, currentRotation, 1.0f)); - canvas.drawLine(-AndroidUtilities.dp(9) + AndroidUtilities.dp(1) * currentRotation, 0, AndroidUtilities.dp(9) - AndroidUtilities.dp(1) * currentRotation, 0, paint); - endYDiff = AndroidUtilities.dp(5) * (1 - Math.abs(currentRotation)) - AndroidUtilities.dp(0.5f) * Math.abs(currentRotation); - endXDiff = AndroidUtilities.dp(9) - AndroidUtilities.dp(9) * Math.abs(currentRotation); - startYDiff = AndroidUtilities.dp(5) + AndroidUtilities.dp(3.0f) * Math.abs(currentRotation); - startXDiff = -AndroidUtilities.dp(9) + AndroidUtilities.dp(9) * Math.abs(currentRotation); + if (previousType == TYPE_DEFAULT) { + diffUp = AndroidUtilities.dp(9) * typeAnimationProgress * (1.0f - currentRotation); + diffMiddle = AndroidUtilities.dp(7) * typeAnimationProgress * (1.0f - currentRotation); + } else { + diffUp = AndroidUtilities.dp(9) * (1.0f - currentRotation); + diffMiddle = AndroidUtilities.dp(7) * (1.0f - currentRotation); + } } - canvas.drawLine(startXDiff, -startYDiff, endXDiff, -endYDiff, paint); + if (rotateToBack) { + canvas.rotate(currentRotation * (reverseAngle ? -180 : 180), AndroidUtilities.dp(9), 0); + paint.setColor(color1); + canvas.drawLine(0, 0, AndroidUtilities.dp(18) - AndroidUtilities.dp(3.0f) * currentRotation - diffMiddle, 0, paint); + endYDiff = AndroidUtilities.dp(5) * (1 - Math.abs(currentRotation)) - AndroidUtilities.dp(0.5f) * Math.abs(currentRotation); + endXDiff = AndroidUtilities.dp(18) - AndroidUtilities.dp(2.5f) * Math.abs(currentRotation); + startYDiff = AndroidUtilities.dp(5) + AndroidUtilities.dp(2.0f) * Math.abs(currentRotation); + startXDiff = AndroidUtilities.dp(7.5f) * Math.abs(currentRotation); + } else { + canvas.rotate(currentRotation * (reverseAngle ? -225 : 135), AndroidUtilities.dp(9), 0); + if (miniIcon) { + paint.setColor(color1); + canvas.drawLine(AndroidUtilities.dp(2), 0, AndroidUtilities.dp(16) - diffMiddle, 0, paint); + endYDiff = AndroidUtilities.dp(5) * (1 - Math.abs(currentRotation)) - AndroidUtilities.dp(0.25f) * Math.abs(currentRotation); + endXDiff = AndroidUtilities.dp(16) - AndroidUtilities.dp(6.25f) * Math.abs(currentRotation); + startYDiff = AndroidUtilities.dp(5) + AndroidUtilities.dp(1.25f) * Math.abs(currentRotation); + startXDiff = AndroidUtilities.dp(2) + AndroidUtilities.dp(7) * Math.abs(currentRotation); + } else { + int color2 = Theme.getColor(Theme.key_actionBarActionModeDefaultIcon); + int backColor2 = Theme.getColor(Theme.key_actionBarActionModeDefault); + backColor1 = AndroidUtilities.getOffsetColor(backColor1, backColor2, currentRotation, 1.0f); + paint.setColor(AndroidUtilities.getOffsetColor(color1, color2, currentRotation, 1.0f)); + canvas.drawLine(AndroidUtilities.dp(1) * currentRotation, 0, AndroidUtilities.dp(18) - AndroidUtilities.dp(1) * currentRotation - diffMiddle, 0, paint); + endYDiff = AndroidUtilities.dp(5) * (1 - Math.abs(currentRotation)) - AndroidUtilities.dp(0.5f) * Math.abs(currentRotation); + endXDiff = AndroidUtilities.dp(18) - AndroidUtilities.dp(9) * Math.abs(currentRotation); + startYDiff = AndroidUtilities.dp(5) + AndroidUtilities.dp(3.0f) * Math.abs(currentRotation); + startXDiff = AndroidUtilities.dp(9) * Math.abs(currentRotation); + } + } + canvas.drawLine(startXDiff, -startYDiff, endXDiff - diffUp, -endYDiff, paint); canvas.drawLine(startXDiff, startYDiff, endXDiff, endYDiff, paint); + if (type != TYPE_DEFAULT && currentRotation != 1.0f || previousType != TYPE_DEFAULT && typeAnimationProgress != 1.0f) { + float cx = AndroidUtilities.dp(9 + 8); + float cy = -AndroidUtilities.dp(4.5f); + float rad = AndroidUtilities.density * 5.5f; + canvas.scale(1.0f - currentRotation, 1.0f - currentRotation, cx, cy); + if (type == TYPE_DEFAULT) { + rad *= (1.0f - typeAnimationProgress); + } + backPaint.setColor(backColor1); + canvas.drawCircle(cx, cy, rad, paint); + if (type == TYPE_UDPATE_AVAILABLE || previousType == TYPE_UDPATE_AVAILABLE) { + backPaint.setStrokeWidth(AndroidUtilities.density * 1.66f); + if (previousType == TYPE_UDPATE_AVAILABLE) { + backPaint.setAlpha((int) (255 * (1.0f - typeAnimationProgress))); + } else { + backPaint.setAlpha(255); + } + canvas.drawLine(cx, cy - AndroidUtilities.dp(2), cx, cy, backPaint); + canvas.drawPoint(cx, cy + AndroidUtilities.dp(2.5f), backPaint); + } + if (type == TYPE_UDPATE_DOWNLOADING || previousType == TYPE_UDPATE_DOWNLOADING) { + backPaint.setStrokeWidth(AndroidUtilities.dp(2)); + if (previousType == TYPE_UDPATE_DOWNLOADING) { + backPaint.setAlpha((int) (255 * (1.0f - typeAnimationProgress))); + } else { + backPaint.setAlpha(255); + } + float arcRad = Math.max(4, 360 * animatedDownloadProgress); + rect.set(cx - AndroidUtilities.dp(3), cy - AndroidUtilities.dp(3), cx + AndroidUtilities.dp(3), cy + AndroidUtilities.dp(3)); + canvas.drawArc(rect, downloadRadOffset, arcRad, false, backPaint); + + downloadRadOffset += 360 * dt / 2500.0f; + downloadRadOffset = MediaActionDrawable.getCircleValue(downloadRadOffset); + + float progressDiff = downloadProgress - downloadProgressAnimationStart; + if (progressDiff > 0) { + downloadProgressTime += dt; + if (downloadProgressTime >= 200.0f) { + animatedDownloadProgress = downloadProgress; + downloadProgressAnimationStart = downloadProgress; + downloadProgressTime = 0; + } else { + animatedDownloadProgress = downloadProgressAnimationStart + progressDiff * interpolator.getInterpolation(downloadProgressTime / 200.0f); + } + } + invalidateSelf(); + } + } canvas.restore(); } + public void setUpdateDownloadProgress(float value, boolean animated) { + if (!animated) { + animatedDownloadProgress = value; + downloadProgressAnimationStart = value; + } else { + if (animatedDownloadProgress > value) { + animatedDownloadProgress = value; + } + downloadProgressAnimationStart = animatedDownloadProgress; + } + downloadProgress = value; + downloadProgressTime = 0; + invalidateSelf(); + } + @Override public void setAlpha(int alpha) { @@ -142,4 +285,16 @@ public class MenuDrawable extends Drawable { public void setIconColor(int iconColor) { this.iconColor = iconColor; } + + public void setBackColor(int backColor) { + this.backColor = backColor; + } + + public void setRoundCap() { + paint.setStrokeCap(Paint.Cap.ROUND); + } + + public void setMiniIcon(boolean miniIcon) { + this.miniIcon = miniIcon; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java index a6387e11f..4a3a07165 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/SimpleTextView.java @@ -27,10 +27,7 @@ import android.view.Gravity; import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; -import com.google.android.exoplayer2.util.Log; - import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.LocaleController; import org.telegram.ui.Components.EmptyStubSpan; import org.telegram.ui.Components.StaticLayoutEx; @@ -83,6 +80,9 @@ public class SimpleTextView extends View implements Drawable.Callback { private int fullLayoutLeftOffset; private float fullLayoutLeftCharactersOffset; + private int minusWidth; + private int fullTextMaxLines = 3; + public SimpleTextView(Context context) { super(context); textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); @@ -228,9 +228,8 @@ public class SimpleTextView extends View implements Drawable.Callback { if (buildFullLayout) { CharSequence string = TextUtils.ellipsize(text, textPaint, width, TextUtils.TruncateAt.END); if (!string.equals(text)) { - fullLayout = StaticLayoutEx.createStaticLayout(text, 0, text.length(), textPaint, width, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false, TextUtils.TruncateAt.END, width, 3, false); + fullLayout = StaticLayoutEx.createStaticLayout(text, 0, text.length(), textPaint, width, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false, TextUtils.TruncateAt.END, width, fullTextMaxLines, false); if (fullLayout != null) { - int end = fullLayout.getLineEnd(0); int start = fullLayout.getLineStart(1); CharSequence substr = text.subSequence(0, end); @@ -248,7 +247,7 @@ public class SimpleTextView extends View implements Drawable.Callback { part = "\u200F" + part; } partLayout = new StaticLayout(part, 0, part.length(), textPaint, scrollNonFitText ? AndroidUtilities.dp(2000) : width + AndroidUtilities.dp(8), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); - fullLayout = StaticLayoutEx.createStaticLayout(full, 0, full.length(), textPaint, width + AndroidUtilities.dp(8) + fullLayoutAdditionalWidth, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false, TextUtils.TruncateAt.END, width + fullLayoutAdditionalWidth, 3, false); + fullLayout = StaticLayoutEx.createStaticLayout(full, 0, full.length(), textPaint, width + AndroidUtilities.dp(8) + fullLayoutAdditionalWidth, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false, TextUtils.TruncateAt.END, width + fullLayoutAdditionalWidth, fullTextMaxLines, false); } } else { layout = new StaticLayout(string, 0, string.length(), textPaint, scrollNonFitText ? AndroidUtilities.dp(2000) : width + AndroidUtilities.dp(8), Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false); @@ -293,7 +292,7 @@ public class SimpleTextView extends View implements Drawable.Callback { scrollingOffset = 0; currentScrollDelay = SCROLL_DELAY_MS; } - createLayout(width - getPaddingLeft() - getPaddingRight()); + createLayout(width - getPaddingLeft() - getPaddingRight() - minusWidth); int finalHeight; if (MeasureSpec.getMode(heightMeasureSpec) == MeasureSpec.EXACTLY) { @@ -376,6 +375,16 @@ public class SimpleTextView extends View implements Drawable.Callback { } } + public void setMinusWidth(int value) { + if (value == minusWidth) { + return; + } + minusWidth = value; + if (!recreateLayoutMaybe()) { + invalidate(); + } + } + public Drawable getRightDrawable() { return rightDrawable; } @@ -432,7 +441,7 @@ public class SimpleTextView extends View implements Drawable.Callback { private boolean recreateLayoutMaybe() { if (wasLayout && getMeasuredHeight() != 0 && !buildFullLayout) { - boolean result = createLayout(getMeasuredWidth() - getPaddingLeft() - getPaddingRight()); + boolean result = createLayout(getMeasuredWidth() - getPaddingLeft() - getPaddingRight() - minusWidth); if ((gravity & Gravity.VERTICAL_GRAVITY_MASK) == Gravity.CENTER_VERTICAL) { offsetY = (getMeasuredHeight() - textHeight) / 2 + getPaddingTop(); } else { @@ -568,10 +577,14 @@ public class SimpleTextView extends View implements Drawable.Callback { int prevAlpha = textPaint.getAlpha(); textPaint.setAlpha((int) (255 * (1.0f - fullAlpha))); canvas.save(); + float partOffset = 0; + if (partLayout.getText().length() == 1) { + partOffset = fullTextMaxLines == 1 ? AndroidUtilities.dp(0.5f) : AndroidUtilities.dp(4); + } if (layout.getLineLeft(0) != 0) { - canvas.translate(-layout.getLineWidth(0) + (partLayout.getText().length() == 1 ? AndroidUtilities.dp(4) : 0), 0); + canvas.translate(-layout.getLineWidth(0) + partOffset, 0); } else { - canvas.translate(layout.getLineWidth(0) - (partLayout.getText().length() == 1 ? AndroidUtilities.dp(4) : 0), 0); + canvas.translate(layout.getLineWidth(0) - partOffset, 0); } canvas.translate(-fullLayoutLeftOffset * fullAlpha + fullLayoutLeftCharactersOffset * fullAlpha, 0); partLayout.draw(canvas); @@ -684,10 +697,14 @@ public class SimpleTextView extends View implements Drawable.Callback { if (this.fullLayoutAdditionalWidth != fullLayoutAdditionalWidth || this.fullLayoutLeftOffset != fullLayoutLeftOffset) { this.fullLayoutAdditionalWidth = fullLayoutAdditionalWidth; this.fullLayoutLeftOffset = fullLayoutLeftOffset; - createLayout(getMeasuredWidth()); + createLayout(getMeasuredWidth() - minusWidth); } } + public void setFullTextMaxLines(int fullTextMaxLines) { + this.fullTextMaxLines = fullTextMaxLines; + } + public int getTextColor() { return textPaint.getColor(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java index 5ad981cf6..2a6e1d678 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/Theme.java @@ -18,9 +18,12 @@ import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; +import android.graphics.BitmapShader; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; +import android.graphics.ColorMatrix; +import android.graphics.ColorMatrixColorFilter; import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; @@ -84,6 +87,7 @@ import org.telegram.ui.Components.AudioVisualizerDrawable; import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.FragmentContextViewWavesDrawable; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.Components.MsgClockDrawable; import org.telegram.ui.Components.PathAnimator; import org.telegram.ui.Components.PlayingGameDrawable; @@ -112,7 +116,10 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; +import java.util.concurrent.CountDownLatch; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; import androidx.annotation.UiThread; import androidx.core.graphics.ColorUtils; @@ -763,8 +770,8 @@ public class Theme { } if (load) { if (watingForLoad != null) { - NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileLoadFailed); for (HashMap.Entry entry : watingForLoad.entrySet()) { LoadingPattern loadingPattern = entry.getValue(); FileLoader.getInstance(account).loadFile(ImageLocation.getForDocument(loadingPattern.pattern.document), "wallpaper", null, 0, 1); @@ -772,8 +779,8 @@ public class Theme { } } else { if (watingForLoad == null || watingForLoad.isEmpty()) { - NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileLoadFailed); } } } @@ -790,19 +797,36 @@ public class Theme { int backgroundAccent = accent.accentColor; int backgroundColor = (int) accent.backgroundOverrideColor; - int backgroundGradientColor = (int) accent.backgroundGradientOverrideColor; - if (backgroundGradientColor == 0 && accent.backgroundGradientOverrideColor == 0) { + + int backgroundGradientColor1 = (int) accent.backgroundGradientOverrideColor1; + if (backgroundGradientColor1 == 0 && accent.backgroundGradientOverrideColor1 == 0) { if (backgroundColor != 0) { backgroundAccent = backgroundColor; } - Integer color = values.get(key_chat_wallpaper_gradient_to); + Integer color = values.get(key_chat_wallpaper_gradient_to1); if (color != null) { - backgroundGradientColor = changeColorAccent(themeInfo, backgroundAccent, color); + backgroundGradientColor1 = changeColorAccent(themeInfo, backgroundAccent, color); } } else { backgroundAccent = 0; } + int backgroundGradientColor2 = (int) accent.backgroundGradientOverrideColor2; + if (backgroundGradientColor2 == 0 && accent.backgroundGradientOverrideColor2 == 0) { + Integer color = values.get(key_chat_wallpaper_gradient_to2); + if (color != null) { + backgroundGradientColor2 = changeColorAccent(themeInfo, backgroundAccent, color); + } + } + + int backgroundGradientColor3 = (int) accent.backgroundGradientOverrideColor3; + if (backgroundGradientColor3 == 0 && accent.backgroundGradientOverrideColor3 == 0) { + Integer color = values.get(key_chat_wallpaper_gradient_to3); + if (color != null) { + backgroundGradientColor3 = changeColorAccent(themeInfo, backgroundAccent, color); + } + } + if (backgroundColor == 0) { Integer color = values.get(key_chat_wallpaper); if (color != null) { @@ -812,10 +836,13 @@ public class Theme { Drawable background; int patternColor; - if (backgroundGradientColor != 0) { + if (backgroundGradientColor2 != 0) { + background = null; + patternColor = MotionBackgroundDrawable.getPatternColor(backgroundColor, backgroundGradientColor1, backgroundGradientColor2, backgroundGradientColor3); + } else if (backgroundGradientColor1 != 0) { BackgroundGradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(accent.backgroundRotation); - background = new BackgroundGradientDrawable(orientation, new int[]{backgroundColor, backgroundGradientColor}); - patternColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, backgroundGradientColor)); + background = new BackgroundGradientDrawable(orientation, new int[]{backgroundColor, backgroundGradientColor1}); + patternColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, backgroundGradientColor1)); } else { background = new ColorDrawable(backgroundColor); patternColor = AndroidUtilities.getPatternColor(backgroundColor); @@ -831,16 +858,18 @@ public class Theme { Bitmap dst = Bitmap.createBitmap(patternBitmap.getWidth(), patternBitmap.getHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(dst); - background.setBounds(0, 0, patternBitmap.getWidth(), patternBitmap.getHeight()); - background.draw(canvas); + if (background != null) { + background.setBounds(0, 0, patternBitmap.getWidth(), patternBitmap.getHeight()); + background.draw(canvas); + } Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG); paint.setColorFilter(new PorterDuffColorFilter(patternColor, PorterDuff.Mode.SRC_IN)); - paint.setAlpha((int) (255 * accent.patternIntensity)); + paint.setAlpha((int) (255 * Math.abs(accent.patternIntensity))); canvas.drawBitmap(patternBitmap, 0, 0, paint); FileOutputStream stream = new FileOutputStream(toFile); - dst.compress(Bitmap.CompressFormat.JPEG, 87, stream); + dst.compress(background == null ? Bitmap.CompressFormat.PNG : Bitmap.CompressFormat.JPEG, 87, stream); stream.close(); } catch (Throwable e) { FileLog.e(e); @@ -853,7 +882,7 @@ public class Theme { if (watingForLoad == null) { return; } - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { String location = (String) args[0]; LoadingPattern loadingPattern = watingForLoad.remove(location); if (loadingPattern != null) { @@ -878,7 +907,7 @@ public class Theme { checkCurrentWallpaper(createdAccents, false); }); } - } else if (id == NotificationCenter.fileDidFailToLoad) { + } else if (id == NotificationCenter.fileLoadFailed) { String location = (String) args[0]; if (watingForLoad.remove(location) != null) { checkCurrentWallpaper(null, false); @@ -896,7 +925,9 @@ public class Theme { public int myMessagesAccentColor; public int myMessagesGradientAccentColor; public long backgroundOverrideColor; - public long backgroundGradientOverrideColor; + public long backgroundGradientOverrideColor1; + public long backgroundGradientOverrideColor2; + public long backgroundGradientOverrideColor3; public int backgroundRotation = 45; public String patternSlug = ""; public float patternIntensity; @@ -1088,11 +1119,23 @@ public class Theme { } else if (backgroundOverrideColor != 0) { currentColors.remove(key_chat_wallpaper); } - int backgroundGradientOverride = (int) backgroundGradientOverrideColor; - if (backgroundGradientOverride != 0) { - currentColors.put(key_chat_wallpaper_gradient_to, backgroundGradientOverride); - } else if (backgroundGradientOverrideColor != 0) { - currentColors.remove(key_chat_wallpaper_gradient_to); + int backgroundGradientOverride1 = (int) backgroundGradientOverrideColor1; + if (backgroundGradientOverride1 != 0) { + currentColors.put(key_chat_wallpaper_gradient_to1, backgroundGradientOverride1); + } else if (backgroundGradientOverrideColor1 != 0) { + currentColors.remove(key_chat_wallpaper_gradient_to1); + } + int backgroundGradientOverride2 = (int) backgroundGradientOverrideColor2; + if (backgroundGradientOverride2 != 0) { + currentColors.put(key_chat_wallpaper_gradient_to2, backgroundGradientOverride2); + } else if (backgroundGradientOverrideColor2 != 0) { + currentColors.remove(key_chat_wallpaper_gradient_to2); + } + int backgroundGradientOverride3 = (int) backgroundGradientOverrideColor3; + if (backgroundGradientOverride3 != 0) { + currentColors.put(key_chat_wallpaper_gradient_to3, backgroundGradientOverride3); + } else if (backgroundGradientOverrideColor3 != 0) { + currentColors.remove(key_chat_wallpaper_gradient_to3); } if (backgroundRotation != 45) { currentColors.put(key_chat_wallpaper_gradient_rotation, backgroundRotation); @@ -1101,7 +1144,11 @@ public class Theme { } public File getPathToWallpaper() { - return !TextUtils.isEmpty(patternSlug) ? new File(ApplicationLoader.getFilesDirFixed(), String.format(Locale.US, "%s_%d_%s.jpg", parentTheme.getKey(), id, patternSlug)) : null; + if (id < 100) { + return !TextUtils.isEmpty(patternSlug) ? new File(ApplicationLoader.getFilesDirFixed(), String.format(Locale.US, "%s_%d_%s_v3.jpg", parentTheme.getKey(), id, patternSlug)) : null; + } else { + return !TextUtils.isEmpty(patternSlug) ? new File(ApplicationLoader.getFilesDirFixed(), String.format(Locale.US, "%s_%d_%s.jpg", parentTheme.getKey(), id, patternSlug)) : null; + } } public File saveToFile() { @@ -1124,17 +1171,33 @@ public class Theme { if (selectedColor == null) { selectedColor = 0xffffffff; } - Integer selectedGradientColor = currentColors.get(key_chat_wallpaper_gradient_to); - if (selectedGradientColor == null) { - selectedGradientColor = 0; + Integer selectedGradientColor1 = currentColors.get(key_chat_wallpaper_gradient_to1); + if (selectedGradientColor1 == null) { + selectedGradientColor1 = 0; + } + Integer selectedGradientColor2 = currentColors.get(key_chat_wallpaper_gradient_to2); + if (selectedGradientColor2 == null) { + selectedGradientColor2 = 0; + } + Integer selectedGradientColor3 = currentColors.get(key_chat_wallpaper_gradient_to3); + if (selectedGradientColor3 == null) { + selectedGradientColor3 = 0; } Integer selectedGradientRotation = currentColors.get(key_chat_wallpaper_gradient_rotation); if (selectedGradientRotation == null) { selectedGradientRotation = 45; } String color = String.format("%02x%02x%02x", (byte) (selectedColor >> 16) & 0xff, (byte) (selectedColor >> 8) & 0xff, (byte) (selectedColor & 0xff)).toLowerCase(); - String color2 = selectedGradientColor != 0 ? String.format("%02x%02x%02x", (byte) (selectedGradientColor >> 16) & 0xff, (byte) (selectedGradientColor >> 8) & 0xff, (byte) (selectedGradientColor & 0xff)).toLowerCase() : null; - if (color2 != null) { + String color2 = selectedGradientColor1 != 0 ? String.format("%02x%02x%02x", (byte) (selectedGradientColor1 >> 16) & 0xff, (byte) (selectedGradientColor1 >> 8) & 0xff, (byte) (selectedGradientColor1 & 0xff)).toLowerCase() : null; + String color3 = selectedGradientColor2 != 0 ? String.format("%02x%02x%02x", (byte) (selectedGradientColor2 >> 16) & 0xff, (byte) (selectedGradientColor2 >> 8) & 0xff, (byte) (selectedGradientColor2 & 0xff)).toLowerCase() : null; + String color4 = selectedGradientColor3 != 0 ? String.format("%02x%02x%02x", (byte) (selectedGradientColor3 >> 16) & 0xff, (byte) (selectedGradientColor3 >> 8) & 0xff, (byte) (selectedGradientColor3 & 0xff)).toLowerCase() : null; + if (color2 != null && color3 != null) { + if (color4 != null) { + color += "~" + color2 + "~" + color3 + "~" + color4; + } else { + color += "~" + color2 + "~" + color3; + } + } else if (color2 != null) { color += "-" + color2; color += "&rotation=" + selectedGradientRotation; } @@ -1148,7 +1211,7 @@ public class Theme { for (HashMap.Entry entry : currentColors.entrySet()) { String key = entry.getKey(); if (wallpaperLink != null) { - if (key_chat_wallpaper.equals(key) || key_chat_wallpaper_gradient_to.equals(key)) { + if (key_chat_wallpaper.equals(key) || key_chat_wallpaper_gradient_to1.equals(key) || key_chat_wallpaper_gradient_to2.equals(key) || key_chat_wallpaper_gradient_to3.equals(key)) { continue; } } @@ -1181,11 +1244,15 @@ public class Theme { public String originalFileName = ""; public String slug = ""; public int color; - public int gradientColor; + public int gradientColor1; + public int gradientColor2; + public int gradientColor3; public int rotation; public boolean isBlurred; public boolean isMotion; public float intensity; + public long wallpaperId; + public long accessHash; public ThemeInfo parentTheme; public ThemeAccent parentAccent; @@ -1197,7 +1264,9 @@ public class Theme { public OverrideWallpaperInfo(OverrideWallpaperInfo info, ThemeInfo themeInfo, ThemeAccent accent) { slug = info.slug; color = info.color; - gradientColor = info.gradientColor; + gradientColor1 = info.gradientColor1; + gradientColor2 = info.gradientColor2; + gradientColor3 = info.gradientColor3; rotation = info.rotation; isBlurred = info.isBlurred; isMotion = info.isMotion; @@ -1270,7 +1339,9 @@ public class Theme { jsonObject.put("wall", fileName); jsonObject.put("owall", originalFileName); jsonObject.put("pColor", color); - jsonObject.put("pGrColor", gradientColor); + jsonObject.put("pGrColor", gradientColor1); + jsonObject.put("pGrColor2", gradientColor2); + jsonObject.put("pGrColor3", gradientColor3); jsonObject.put("pGrAngle", rotation); jsonObject.put("wallSlug", slug != null ? slug : ""); jsonObject.put("wBlur", isBlurred); @@ -1302,7 +1373,9 @@ public class Theme { public boolean isBlured; public boolean isMotion; public int patternBgColor; - public int patternBgGradientColor; + public int patternBgGradientColor1; + public int patternBgGradientColor2; + public int patternBgGradientColor3; public int patternBgGradientRotation = 45; public int patternIntensity; @@ -1317,7 +1390,9 @@ public class Theme { public TLRPC.InputFile uploadedFile; private int previewBackgroundColor; - public int previewBackgroundGradientColor; + public int previewBackgroundGradientColor1; + public int previewBackgroundGradientColor2; + public int previewBackgroundGradientColor3; public int previewWallpaperOffset; private int previewInColor; private int previewOutColor; @@ -1388,7 +1463,9 @@ public class Theme { wallpaperInfo.fileName = object.getString("wall"); wallpaperInfo.originalFileName = object.getString("owall"); wallpaperInfo.color = object.getInt("pColor"); - wallpaperInfo.gradientColor = object.getInt("pGrColor"); + wallpaperInfo.gradientColor1 = object.getInt("pGrColor"); + wallpaperInfo.gradientColor2 = object.optInt("pGrColor2"); + wallpaperInfo.gradientColor3 = object.optInt("pGrColor3"); wallpaperInfo.rotation = object.getInt("pGrAngle"); wallpaperInfo.slug = object.getString("wallSlug"); wallpaperInfo.isBlurred = object.getBoolean("wBlur"); @@ -1588,10 +1665,10 @@ public class Theme { } private void setAccentColorOptions(int[] options) { - setAccentColorOptions(options, null, null, null, null, null, null, null, null); + setAccentColorOptions(options, null, null, null, null, null, null, null, null, null, null); } - private void setAccentColorOptions(int[] accent, int[] myMessages, int[] myMessagesGradient, int[] background, int[] backgroundGradient, int[] ids, String[] patternSlugs, int[] patternRotations, int[] patternIntensities) { + private void setAccentColorOptions(int[] accent, int[] myMessages, int[] myMessagesGradient, int[] background, int[] backgroundGradient1, int[] backgroundGradient2, int[] backgroundGradient3, int[] ids, String[] patternSlugs, int[] patternRotations, int[] patternIntensities) { defaultAccentCount = accent.length; themeAccents = new ArrayList<>(); themeAccentsMap = new SparseArray<>(); @@ -1615,11 +1692,25 @@ public class Theme { themeAccent.backgroundOverrideColor = background[a]; } } - if (backgroundGradient != null) { + if (backgroundGradient1 != null) { if (firstAccentIsDefault && themeAccent.id == DEFALT_THEME_ACCENT_ID) { - themeAccent.backgroundGradientOverrideColor = 0x100000000L; + themeAccent.backgroundGradientOverrideColor1 = 0x100000000L; } else { - themeAccent.backgroundGradientOverrideColor = backgroundGradient[a]; + themeAccent.backgroundGradientOverrideColor1 = backgroundGradient1[a]; + } + } + if (backgroundGradient2 != null) { + if (firstAccentIsDefault && themeAccent.id == DEFALT_THEME_ACCENT_ID) { + themeAccent.backgroundGradientOverrideColor2 = 0x100000000L; + } else { + themeAccent.backgroundGradientOverrideColor2 = backgroundGradient2[a]; + } + } + if (backgroundGradient3 != null) { + if (firstAccentIsDefault && themeAccent.id == DEFALT_THEME_ACCENT_ID) { + themeAccent.backgroundGradientOverrideColor3 = 0x100000000L; + } else { + themeAccent.backgroundGradientOverrideColor3 = backgroundGradient3[a]; } } if (patternSlugs != null) { @@ -1643,15 +1734,15 @@ public class Theme { } private void addObservers() { - NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(account).addObserver(this, NotificationCenter.fileLoadFailed); } @UiThread private void removeObservers() { - NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(account).removeObserver(this, NotificationCenter.fileLoadFailed); } private void onFinishLoadingRemoteTheme() { @@ -1669,16 +1760,28 @@ public class Theme { myMessagesGradientAccentColor = 0; } int backgroundOverrideColor = 0; - long backgroundGradientOverrideColor = 0; + long backgroundGradientOverrideColor1 = 0; + long backgroundGradientOverrideColor2 = 0; + long backgroundGradientOverrideColor3 = 0; int backgroundRotation = 0; String patternSlug = null; float patternIntensity = 0; if (settings.wallpaper != null && settings.wallpaper.settings != null) { - backgroundOverrideColor = settings.wallpaper.settings.background_color; + backgroundOverrideColor = getWallpaperColor(settings.wallpaper.settings.background_color); if (settings.wallpaper.settings.second_background_color == 0) { - backgroundGradientOverrideColor = 0x100000000L; + backgroundGradientOverrideColor1 = 0x100000000L; } else { - backgroundGradientOverrideColor = settings.wallpaper.settings.second_background_color; + backgroundGradientOverrideColor1 = getWallpaperColor(settings.wallpaper.settings.second_background_color); + } + if (settings.wallpaper.settings.third_background_color == 0) { + backgroundGradientOverrideColor2 = 0x100000000L; + } else { + backgroundGradientOverrideColor2 = getWallpaperColor(settings.wallpaper.settings.third_background_color); + } + if (settings.wallpaper.settings.fourth_background_color == 0) { + backgroundGradientOverrideColor3 = 0x100000000L; + } else { + backgroundGradientOverrideColor3 = getWallpaperColor(settings.wallpaper.settings.fourth_background_color); } backgroundRotation = AndroidUtilities.getWallpaperRotation(settings.wallpaper.settings.rotation, false); if (!(settings.wallpaper instanceof TLRPC.TL_wallPaperNoFile) && settings.wallpaper.pattern) { @@ -1690,7 +1793,9 @@ public class Theme { settings.message_bottom_color == accent.myMessagesAccentColor && myMessagesGradientAccentColor == accent.myMessagesGradientAccentColor && backgroundOverrideColor == accent.backgroundOverrideColor && - backgroundGradientOverrideColor == accent.backgroundGradientOverrideColor && + backgroundGradientOverrideColor1 == accent.backgroundGradientOverrideColor1 && + backgroundGradientOverrideColor2 == accent.backgroundGradientOverrideColor2 && + backgroundGradientOverrideColor3 == accent.backgroundGradientOverrideColor3 && backgroundRotation == accent.backgroundRotation && TextUtils.equals(patternSlug, accent.patternSlug) && Math.abs(patternIntensity - accent.patternIntensity) < 0.001; @@ -1704,11 +1809,21 @@ public class Theme { themeAccent.myMessagesGradientAccentColor = 0; } if (settings.wallpaper != null && settings.wallpaper.settings != null) { - themeAccent.backgroundOverrideColor = settings.wallpaper.settings.background_color; - if (settings.wallpaper.settings.second_background_color == 0) { - themeAccent.backgroundGradientOverrideColor = 0x100000000L; + themeAccent.backgroundOverrideColor = getWallpaperColor(settings.wallpaper.settings.background_color); + if ((settings.wallpaper.settings.flags & 16) != 0 && settings.wallpaper.settings.second_background_color == 0) { + themeAccent.backgroundGradientOverrideColor1 = 0x100000000L; } else { - themeAccent.backgroundGradientOverrideColor = settings.wallpaper.settings.second_background_color; + themeAccent.backgroundGradientOverrideColor1 = getWallpaperColor(settings.wallpaper.settings.second_background_color); + } + if ((settings.wallpaper.settings.flags & 32) != 0 && settings.wallpaper.settings.third_background_color == 0) { + themeAccent.backgroundGradientOverrideColor2 = 0x100000000L; + } else { + themeAccent.backgroundGradientOverrideColor2 = getWallpaperColor(settings.wallpaper.settings.third_background_color); + } + if ((settings.wallpaper.settings.flags & 64) != 0 && settings.wallpaper.settings.fourth_background_color == 0) { + themeAccent.backgroundGradientOverrideColor3 = 0x100000000L; + } else { + themeAccent.backgroundGradientOverrideColor3 = getWallpaperColor(settings.wallpaper.settings.fourth_background_color); } themeAccent.backgroundRotation = AndroidUtilities.getWallpaperRotation(settings.wallpaper.settings.rotation, false); if (!(settings.wallpaper instanceof TLRPC.TL_wallPaperNoFile) && settings.wallpaper.pattern) { @@ -1757,7 +1872,9 @@ public class Theme { themeAccent.myMessagesAccentColor = accent.myMessagesAccentColor; themeAccent.myMessagesGradientAccentColor = accent.myMessagesGradientAccentColor; themeAccent.backgroundOverrideColor = accent.backgroundOverrideColor; - themeAccent.backgroundGradientOverrideColor = accent.backgroundGradientOverrideColor; + themeAccent.backgroundGradientOverrideColor1 = accent.backgroundGradientOverrideColor1; + themeAccent.backgroundGradientOverrideColor2 = accent.backgroundGradientOverrideColor2; + themeAccent.backgroundGradientOverrideColor3 = accent.backgroundGradientOverrideColor3; themeAccent.backgroundRotation = accent.backgroundRotation; themeAccent.patternSlug = accent.patternSlug; themeAccent.patternIntensity = accent.patternIntensity; @@ -1789,9 +1906,11 @@ public class Theme { Bitmap finalBitmap = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getHeight(), bitmap.getConfig()); Canvas canvas = new Canvas(finalBitmap); int patternColor; - if (patternBgGradientColor != 0) { - patternColor = AndroidUtilities.getAverageColor(patternBgColor, patternBgGradientColor); - GradientDrawable gradientDrawable = new GradientDrawable(BackgroundGradientDrawable.getGradientOrientation(patternBgGradientRotation), new int[]{patternBgColor, patternBgGradientColor}); + if (patternBgGradientColor2 != 0) { + patternColor = MotionBackgroundDrawable.getPatternColor(patternBgColor, patternBgGradientColor1, patternBgGradientColor2, patternBgGradientColor3); + } else if (patternBgGradientColor1 != 0) { + patternColor = AndroidUtilities.getAverageColor(patternBgColor, patternBgGradientColor1); + GradientDrawable gradientDrawable = new GradientDrawable(BackgroundGradientDrawable.getGradientOrientation(patternBgGradientRotation), new int[]{patternBgColor, patternBgGradientColor1}); gradientDrawable.setBounds(0, 0, finalBitmap.getWidth(), finalBitmap.getHeight()); gradientDrawable.draw(canvas); } else { @@ -1809,7 +1928,7 @@ public class Theme { bitmap = Utilities.blurWallpaper(bitmap); } FileOutputStream stream = new FileOutputStream(toPath); - bitmap.compress(Bitmap.CompressFormat.JPEG, 87, stream); + bitmap.compress(patternBgGradientColor2 != 0 ? Bitmap.CompressFormat.PNG : Bitmap.CompressFormat.JPEG, 87, stream); stream.close(); return true; } catch (Throwable e) { @@ -1820,7 +1939,7 @@ public class Theme { @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.fileDidFailToLoad) { + if (id == NotificationCenter.fileLoaded || id == NotificationCenter.fileLoadFailed) { String location = (String) args[0]; if (info != null && info.document != null) { if (location.equals(loadingThemeWallpaperName)) { @@ -1834,14 +1953,16 @@ public class Theme { String name = FileLoader.getAttachFileName(info.document); if (location.equals(name)) { removeObservers(); - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { File locFile = new File(pathToFile); ThemeInfo themeInfo = fillThemeValues(locFile, info.title, info); if (themeInfo != null && themeInfo.pathToWallpaper != null) { File file = new File(themeInfo.pathToWallpaper); if (!file.exists()) { patternBgColor = themeInfo.patternBgColor; - patternBgGradientColor = themeInfo.patternBgGradientColor; + patternBgGradientColor1 = themeInfo.patternBgGradientColor1; + patternBgGradientColor2 = themeInfo.patternBgGradientColor2; + patternBgGradientColor3 = themeInfo.patternBgGradientColor3; patternBgGradientRotation = themeInfo.patternBgGradientRotation; isBlured = themeInfo.isBlured; patternIntensity = themeInfo.patternIntensity; @@ -1874,7 +1995,7 @@ public class Theme { } private static final Object sync = new Object(); - private static final Object wallpaperSync = new Object(); + private static Runnable wallpaperLoadTask; public static final int ACTION_BAR_PHOTO_VIEWER_COLOR = 0x7f000000; public static final int ACTION_BAR_MEDIA_PICKER_COLOR = 0xff333333; @@ -1944,33 +2065,34 @@ public class Theme { private static ThemeInfo currentDayTheme; private static ThemeInfo defaultTheme; private static ThemeInfo previousTheme; + private static boolean changingWallpaper; private static boolean hasPreviousTheme; private static boolean isApplyingAccent; private static boolean switchingNightTheme; private static boolean isInNigthMode; + private static int previousPhase; private static int switchNightThemeDelay; private static long lastDelayUpdateTime; private static BackgroundGradientDrawable.Disposable backgroundGradientDisposable; - public static PorterDuffColorFilter colorFilter; - public static PorterDuffColorFilter colorPressedFilter; - public static PorterDuffColorFilter colorFilter2; - public static PorterDuffColorFilter colorPressedFilter2; private static boolean isCustomTheme; private static int serviceMessageColor; + private static Bitmap serviceBitmap; + public static BitmapShader serviceBitmapShader; + private static Matrix serviceBitmapMatrix; private static int serviceSelectedMessageColor; public static int serviceMessageColorBackup; public static int serviceSelectedMessageColorBackup; private static int serviceMessage2Color; private static int serviceSelectedMessage2Color; public static int currentColor; - private static int currentSelectedColor; private static Drawable wallpaper; private static Drawable themedWallpaper; private static int themedWallpaperFileOffset; private static String themedWallpaperLink; private static boolean isWallpaperMotion; + private static int patternIntensity; private static boolean isPatternWallpaper; public static Paint dividerPaint; @@ -2063,6 +2185,10 @@ public class Theme { public static Paint chat_statusPaint; public static Paint chat_statusRecordPaint; public static Paint chat_actionBackgroundPaint; + public static Paint chat_actionBackgroundSelectedPaint; + public static Paint chat_actionBackgroundPaint2; + public static Paint chat_actionBackgroundSelectedPaint2; + public static Paint chat_actionBackgroundGradientDarkenPaint; public static Paint chat_timeBackgroundPaint; public static Paint chat_composeBackgroundPaint; public static Paint chat_radialProgressPaint; @@ -2167,14 +2293,12 @@ public class Theme { public static Drawable chat_msgBroadcastDrawable; public static Drawable chat_msgBroadcastMediaDrawable; public static Drawable chat_contextResult_shadowUnderSwitchDrawable; - public static Drawable chat_shareDrawable; public static Drawable chat_shareIconDrawable; public static Drawable chat_replyIconDrawable; public static Drawable chat_goIconDrawable; public static Drawable chat_botLinkDrawalbe; public static Drawable chat_botCardDrawalbe; public static Drawable chat_botInlineDrawable; - public static Drawable chat_systemDrawable; public static Drawable chat_commentDrawable; public static Drawable chat_commentStickerDrawable; public static Drawable chat_commentArrowDrawable; @@ -2196,8 +2320,6 @@ public class Theme { public static RLottieDrawable[] chat_attachButtonDrawables = new RLottieDrawable[6]; public static Drawable[] chat_locationDrawable = new Drawable[2]; public static Drawable[] chat_contactDrawable = new Drawable[2]; - public static Drawable[] chat_cornerOuter = new Drawable[4]; - public static Drawable[] chat_cornerInner = new Drawable[4]; public static Drawable[][] chat_fileStatesDrawable = new Drawable[10][2]; public static CombinedDrawable[][] chat_fileMiniStatesDrawable = new CombinedDrawable[6][2]; public static Drawable[][] chat_photoStatesDrawables = new Drawable[13][2]; @@ -2208,6 +2330,7 @@ public class Theme { public static Drawable calllog_msgCallDownGreenDrawable; public static Path[] chat_filePath = new Path[2]; + public static Path[] chat_updatePath = new Path[3]; public static Drawable chat_flameIcon; public static Drawable chat_gifIcon; @@ -2531,8 +2654,6 @@ public class Theme { public static final String key_chat_serviceIcon = "chat_serviceIcon"; public static final String key_chat_serviceBackground = "chat_serviceBackground"; public static final String key_chat_serviceBackgroundSelected = "chat_serviceBackgroundSelected"; - public static final String key_chat_shareBackground = "chat_shareBackground"; - public static final String key_chat_shareBackgroundSelected = "chat_shareBackgroundSelected"; public static final String key_chat_muteIcon = "chat_muteIcon"; public static final String key_chat_lockIcon = "chat_lockIcon"; public static final String key_chat_outSentCheck = "chat_outSentCheck"; @@ -2667,7 +2788,9 @@ public class Theme { public static final String key_chat_linkSelectBackground = "chat_linkSelectBackground"; public static final String key_chat_textSelectBackground = "chat_textSelectBackground"; public static final String key_chat_wallpaper = "chat_wallpaper"; - public static final String key_chat_wallpaper_gradient_to = "chat_wallpaper_gradient_to"; + public static final String key_chat_wallpaper_gradient_to1 = "chat_wallpaper_gradient_to"; + public static final String key_chat_wallpaper_gradient_to2 = "key_chat_wallpaper_gradient_to2"; + public static final String key_chat_wallpaper_gradient_to3 = "key_chat_wallpaper_gradient_to3"; public static final String key_chat_wallpaper_gradient_rotation = "chat_wallpaper_gradient_rotation"; public static final String key_chat_messagePanelBackground = "chat_messagePanelBackground"; public static final String key_chat_messagePanelShadow = "chat_messagePanelShadow"; @@ -3295,8 +3418,6 @@ public class Theme { defaultColors.put(key_chat_inGreenCall, 0xff00c853); defaultColors.put(key_chat_inRedCall, 0xffff4848); defaultColors.put(key_chat_outGreenCall, 0xff00c853); - defaultColors.put(key_chat_shareBackground, 0x66728fa6); - defaultColors.put(key_chat_shareBackgroundSelected, 0x99728fa6); defaultColors.put(key_chat_lockIcon, 0xffffffff); defaultColors.put(key_chat_muteIcon, 0xffb1cce3); defaultColors.put(key_chat_inBubble, 0xffffffff); @@ -3932,8 +4053,6 @@ public class Theme { themeAccentExclusionKeys.add(key_chat_attachGalleryBackground); themeAccentExclusionKeys.add(key_chat_attachFileText); themeAccentExclusionKeys.add(key_chat_attachGalleryText); - themeAccentExclusionKeys.add(key_chat_shareBackground); - themeAccentExclusionKeys.add(key_chat_shareBackgroundSelected); themeAccentExclusionKeys.add(key_statisticChartLine_blue); themeAccentExclusionKeys.add(key_statisticChartLine_green); themeAccentExclusionKeys.add(key_statisticChartLine_red); @@ -3955,11 +4074,16 @@ public class Theme { themeAccentExclusionKeys.add(key_voipgroup_blueText); themeAccentExclusionKeys.add(key_voipgroup_soundButton); themeAccentExclusionKeys.add(key_voipgroup_soundButtonActive); + themeAccentExclusionKeys.add(key_voipgroup_soundButtonActiveScrolled); themeAccentExclusionKeys.add(key_voipgroup_soundButton2); themeAccentExclusionKeys.add(key_voipgroup_soundButtonActive2); + themeAccentExclusionKeys.add(key_voipgroup_soundButtonActive2Scrolled); themeAccentExclusionKeys.add(key_voipgroup_leaveButton); + themeAccentExclusionKeys.add(key_voipgroup_leaveButtonScrolled); themeAccentExclusionKeys.add(key_voipgroup_connectingProgress); themeAccentExclusionKeys.add(key_voipgroup_disabledButton); + themeAccentExclusionKeys.add(key_voipgroup_disabledButtonActive); + themeAccentExclusionKeys.add(key_voipgroup_disabledButtonActiveScrolled); themeAccentExclusionKeys.add(key_voipgroup_unmuteButton); themeAccentExclusionKeys.add(key_voipgroup_unmuteButton2); themeAccentExclusionKeys.add(key_voipgroup_actionBarUnscrolled); @@ -3981,6 +4105,26 @@ public class Theme { themeAccentExclusionKeys.add(key_voipgroup_listSelector); themeAccentExclusionKeys.add(key_voipgroup_inviteMembersBackground); themeAccentExclusionKeys.add(key_voipgroup_dialogBackground); + themeAccentExclusionKeys.add(key_voipgroup_overlayGreen1); + themeAccentExclusionKeys.add(key_voipgroup_overlayGreen2); + themeAccentExclusionKeys.add(key_voipgroup_overlayBlue1); + themeAccentExclusionKeys.add(key_voipgroup_overlayBlue2); + themeAccentExclusionKeys.add(key_voipgroup_topPanelGreen1); + themeAccentExclusionKeys.add(key_voipgroup_topPanelGreen2); + themeAccentExclusionKeys.add(key_voipgroup_topPanelBlue1); + themeAccentExclusionKeys.add(key_voipgroup_topPanelBlue2); + themeAccentExclusionKeys.add(key_voipgroup_topPanelGray); + themeAccentExclusionKeys.add(key_voipgroup_overlayAlertGradientMuted); + themeAccentExclusionKeys.add(key_voipgroup_overlayAlertGradientMuted2); + themeAccentExclusionKeys.add(key_voipgroup_overlayAlertGradientUnmuted); + themeAccentExclusionKeys.add(key_voipgroup_overlayAlertGradientUnmuted2); + themeAccentExclusionKeys.add(key_voipgroup_overlayAlertMutedByAdmin); + themeAccentExclusionKeys.add(key_voipgroup_overlayAlertMutedByAdmin2); + themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminGradient); + themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminGradient2); + themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminGradient3); + themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminMuteButton); + themeAccentExclusionKeys.add(key_voipgroup_mutedByAdminMuteButtonDisabled); myMessagesColorKeys.add(key_chat_outGreenCall); myMessagesColorKeys.add(key_chat_outBubble); @@ -4074,15 +4218,17 @@ public class Theme { themeInfo.currentAccentId = DEFALT_THEME_ACCENT_ID; themeInfo.sortIndex = 1; themeInfo.setAccentColorOptions( - new int[] { 0xFF5890C5, 0xFF239853, 0xFFCE5E82, 0xFF7F63C3, 0xFF2491AD, 0xFF299C2F, 0xFF8854B4, 0xFF328ACF, 0xFF43ACC7, 0xFF52AC44, 0xFFCD5F93, 0xFFD28036, 0xFF8366CC, 0xFFCE4E57, 0xFFD3AE40, 0xFF7B88AB }, - new int[] { 0xFFB8E18D, 0xFFFAFBCC, 0xFFFFF9DC, 0xFFC14F6E, 0xFFD1BD1B, 0xFFFFFAC9, 0xFFFCF6D8, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0x00000000, 0xFFF2FBC9, 0xFFFBF4DF, 0, 0, 0xFFFDEDB4, 0xFFFCF7B6, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0x00000000, 0xFFDFE2A0, 0xFFE2B991, 0xFFD7C1E9, 0xFFDCD1C0, 0xFFEFB576, 0xFFC0A2D1, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0x00000000, 0xFFC1E1A3, 0xFFEBE2BA, 0xFFE8CDD6, 0xFFE0DFC6, 0xFFECE771, 0xFFDECCDE, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 99, 9, 10, 11, 12, 13, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, - new String[] { "", "p-pXcflrmFIBAAAAvXYQk-mCwZU", "JqSUrO0-mFIBAAAAWwTvLzoWGQI", "O-wmAfBPSFADAAAA4zINVfD_bro", "RepJ5uE_SVABAAAAr4d0YhgB850", "-Xc-np9y2VMCAAAARKr0yNNPYW0", "dhf9pceaQVACAAAAbzdVo4SCiZA", "", "", "", "", "", "", "", "", "" }, - new int[] { 0, 180, 45, 0, 45, 180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, - new int[] { 0, 52, 46, 57, 45, 64, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0 } + new int[] { 0xFF5890C5, 0xFF239853, 0xFFCE5E82, 0xFF7F63C3, 0xFF2491AD, 0xFF299C2F, 0xFF8854B4, 0xFF328ACF, 0xFF43ACC7, 0xFF52AC44, 0xFFCD5F93, 0xFFD28036, 0xFF8366CC, 0xFFCE4E57, 0xFFD3AE40, 0xFF7B88AB }, + new int[] { 0xFFB8E18D, 0xFFFAFBCC, 0xFFFFF9DC, 0xFFC14F6E, 0xFFD1BD1B, 0xFFFFFAC9, 0xFFFCF6D8, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0x00000000, 0xFFF2FBC9, 0xFFFBF4DF, 0, 0, 0xFFFDEDB4, 0xFFFCF7B6, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0x00000000, 0xFFdfe2a0, 0xFFf1b290, 0xFFd7c1e9, 0xFFd7b89e, 0xFFec9e73, 0xFFcbb0e4, 0xff9bbce7, 0xff91c5ec, 0xff9bc982, 0xffe4a1c1, 0xffe3ae7d, 0xffb8aaea, 0xffeb9c79, 0xffd3bc74, 0xffa0aace }, + new int[] { 0x00000000, 0xFFbad89d, 0xFFeccf94, 0xFFe8bdd6, 0xFFe6dec2, 0xFFe8d085, 0xFFebc8e9, 0xffc0d9f3, 0xffbfdfec, 0xffe0dd93, 0xffe9bed6, 0xffecd5a2, 0xffc5c9ee, 0xfff0bd99, 0xffe9df9e, 0xffcacedd }, + new int[] { 0x00000000, 0xFFe2dea7, 0xFFe7b384, 0xFFd2aee9, 0xFFdac5ae, 0xFFeea677, 0xFFdfa8d1, 0xff95c3eb, 0xffb5e1d9, 0xffbed595, 0xffcca8e1, 0xffdfb076, 0xffb3b1e2, 0xffe79db4, 0xffe0c88b, 0xffa6add2 }, + new int[] { 0x00000000, 0xFF9ec790, 0xFFebdea8, 0xFFeccb88, 0xFFe5dcbf, 0xFFede4a9, 0xFFedc8a8, 0xffbbd5e8, 0xffbfdbe8, 0xffd1db97, 0xffefcbd7, 0xffecd694, 0xffdfbeed, 0xfff3b182, 0xffe5d397, 0xffcacee8 }, + new int[] { 99, 9, 10, 11, 12, 13, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, + new String[] { "", "p-pXcflrmFIBAAAAvXYQk-mCwZU", "JqSUrO0-mFIBAAAAWwTvLzoWGQI", "O-wmAfBPSFADAAAA4zINVfD_bro", "RepJ5uE_SVABAAAAr4d0YhgB850", "-Xc-np9y2VMCAAAARKr0yNNPYW0", "fqv01SQemVIBAAAApND8LDRUhRU", "fqv01SQemVIBAAAApND8LDRUhRU", "RepJ5uE_SVABAAAAr4d0YhgB850", "lp0prF8ISFAEAAAA_p385_CvG0w", "heptcj-hSVACAAAAC9RrMzOa-cs", "PllZ-bf_SFAEAAAA8crRfwZiDNg", "dhf9pceaQVACAAAAbzdVo4SCiZA", "Ujx2TFcJSVACAAAARJ4vLa50MkM", "p-pXcflrmFIBAAAAvXYQk-mCwZU", "dk_wwlghOFACAAAAfz9xrxi6euw" }, + new int[] { 0, 180, 45, 0, 45, 180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + new int[] { 0, 52, 46, 57, 45, 64, 52, 35, 36, 41, 50, 50, 35, 38, 37, 30 } ); themes.add(currentDayTheme = currentTheme = defaultTheme = themeInfo); themesDict.put("Blue", themeInfo); @@ -4095,15 +4241,17 @@ public class Theme { themeInfo.previewOutColor = 0xff82a8e3; themeInfo.sortIndex = 3; themeInfo.setAccentColorOptions( - new int[] { 0xFF927BD4, 0xFF698AFB, 0xFF23A7F0, 0xFF7B71D1, 0xFF69B955, 0xFF2990EA, 0xFF7082E9, 0xFF66BAED, 0xff3685fa, 0xff46c8ed, 0xff4ab841, 0xffeb7cb1, 0xffee902a, 0xffa281f0, 0xffd34324, 0xffeebd34, 0xff7f8fab, 0xff3581e3 }, - new int[] { 0xFF9D5C99, 0xFF635545, 0xFF31818B, 0xFFAD6426, 0xFF4A7034, 0xFF335D82, 0xFF36576F, 0xFF597563, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF604DA8, 0xFF685D4C, 0xFF1B6080, 0xFF99354E, 0xFF275D3B, 0xFF317A98, 0xFF376E87, 0xFF5E7370, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF28212E, 0xFF171A22, 0xFF071E1F, 0xFF100F13, 0xFF141D12, 0xFF07121C, 0xFF1E2029, 0xFF020403, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF121013, 0xFF26262E, 0xFF141D26, 0xFF221E24, 0xFF1A2114, 0xFF1C2630, 0xFF141518, 0xFF151C1F, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 11, 12, 13, 14, 15, 16, 17, 18, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, - new String[] { "O-wmAfBPSFADAAAA4zINVfD_bro", "RepJ5uE_SVABAAAAr4d0YhgB850", "dk_wwlghOFACAAAAfz9xrxi6euw", "9LW_RcoOSVACAAAAFTk3DTyXN-M", "PllZ-bf_SFAEAAAA8crRfwZiDNg", "-Xc-np9y2VMCAAAARKr0yNNPYW0", "kO4jyq55SFABAAAA0WEpcLfahXk", "CJNyxPMgSVAEAAAAvW9sMwc51cw", "", "", "", "", "", "", "", "", "", "" }, - new int[] { 225, 45, 225, 135, 45, 225, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, - new int[] { 40, 40, 31, 50, 25, 34, 35, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 } + new int[] { 0xFF927BD4, 0xFF698AFB, 0xFF23A7F0, 0xFF7B71D1, 0xFF69B955, 0xFF2990EA, 0xFF7082E9, 0xFF66BAED, 0xff3685fa, 0xff46c8ed, 0xff64AC5F, 0xffeb7cb1, 0xffee902a, 0xffa281f0, 0xffd34324, 0xffeebd34, 0xff7f8fab, 0xff3581e3 }, + new int[] { 0xFF9D5C99, 0xFF635545, 0xFF31818B, 0xFFAD6426, 0xFF4A7034, 0xFF335D82, 0xFF36576F, 0xFF597563, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0xFF604DA8, 0xFF685D4C, 0xFF1B6080, 0xFF99354E, 0xFF275D3B, 0xFF317A98, 0xFF376E87, 0xFF5E7370, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0xFF271e2e, 0xFF171a22, 0xFF071e1f, 0xFF100f13, 0xFF0e1811, 0xFF0b131c, 0xFF1d2129, 0xFF202c2a, 0xff0e141a, 0xff162325, 0xff161d15, 0xff24191e, 0xff251b13, 0xff1f1d29, 0xff22160e, 0xff272115, 0xff171a1b, 0xff0e141a }, + new int[] { 0xFF110e13, 0xFF26262e, 0xFF141d26, 0xFF221a27, 0xFF1f2818, 0xFF192330, 0xFF12161a, 0xFF141a1e, 0xff172431, 0xff0e1718, 0xff172719, 0xff23171c, 0xff201408, 0xff14131c, 0xff2d1d16, 0xff1a160d, 0xff212328, 0xff172431 }, + new int[] { 0xFF2b1e2b, 0xFF15151b, 0xFF0c151a, 0xFF0e0f13, 0xFF0b170f, 0xFF131822, 0xFF17242d, 0xFF16202b, 0xff0f171e, 0xff1e2e2e, 0xff141e14, 0xff2b1929, 0xff2e1f15, 0xff292331, 0xff23140c, 0xff292414, 0xff181a1d, 0xff0f171e }, + new int[] { 0xFF161227, 0xFF1a1916, 0xFF0d272c, 0xFF271d29, 0xFF171d19, 0xFF172331, 0xFF111521, 0xFF051717, 0xff141c2b, 0xff121f1f, 0xff1c261a, 0xff1f141d, 0xff1b130a, 0xff17131b, 0xff2d1924, 0xff1e170e, 0xff212228, 0xff141c2b }, + new int[] { 11, 12, 13, 14, 15, 16, 17, 18, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, + new String[] { "O-wmAfBPSFADAAAA4zINVfD_bro", "RepJ5uE_SVABAAAAr4d0YhgB850", "dk_wwlghOFACAAAAfz9xrxi6euw", "9LW_RcoOSVACAAAAFTk3DTyXN-M", "PllZ-bf_SFAEAAAA8crRfwZiDNg", "-Xc-np9y2VMCAAAARKr0yNNPYW0", "kO4jyq55SFABAAAA0WEpcLfahXk", "CJNyxPMgSVAEAAAAvW9sMwc51cw", "fqv01SQemVIBAAAApND8LDRUhRU", "RepJ5uE_SVABAAAAr4d0YhgB850", "CJNyxPMgSVAEAAAAvW9sMwc51cw", "9LW_RcoOSVACAAAAFTk3DTyXN-M", "9GcNVISdSVADAAAAUcw5BYjELW4", "F5oWoCs7QFACAAAAgf2bD_mg8Bw", "9ShF73d1MFIIAAAAjWnm8_ZMe8Q", "3rX-PaKbSFACAAAAEiHNvcEm6X4", "dk_wwlghOFACAAAAfz9xrxi6euw", "fqv01SQemVIBAAAApND8LDRUhRU" }, + new int[] { 225, 45, 225, 135, 45, 225, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + new int[] { 40, 40, 31, 50, 25, 34, 35, 35, 38, 29, 24, 34, 34, 31, 29, 37, 21, 38 } ); themes.add(themeInfo); themesDict.put("Dark Blue", currentNightTheme = themeInfo); @@ -4116,15 +4264,17 @@ public class Theme { themeInfo.previewOutColor = 0xff6ca1eb; themeInfo.sortIndex = 5; themeInfo.setAccentColorOptions( - new int[] { 0xFF40B1E2, 0xFF41B05D, 0xFFCE8C20, 0xFF57A3EB, 0xFFDE8534, 0xFFCC6189, 0xFF3490EB, 0xFF43ACC7, 0xFF52AC44, 0xFFCD5F93, 0xFFD28036, 0xFF8366CC, 0xFFCE4E57, 0xFFD3AE40, 0xFF7B88AB }, - new int[] { 0xFF319FCA, 0xFF28A359, 0xFF8C5A3F, 0xFF3085D3, 0xFFC95870, 0xFF7871CD, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF4EBEE2, 0xFF6BBC59, 0xFF9E563C, 0xFF48C2D8, 0xFFD87047, 0xFFBE6EAF, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFFB4E3F0, 0xFFDDDEAA, 0xFFDACCA1, 0xFFE3F3F3, 0xFFEEE5B0, 0xFFE5DFEC, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFFF1FDFC, 0xFFC9E9B6, 0xFFE2E1BE, 0xFFC8E6EE, 0xFFEEBEAA, 0xFFE1C6EC, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 9, 10, 11, 12, 13, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, - new String[] { "MIo6r0qGSFAFAAAAtL8TsDzNX60", "dhf9pceaQVACAAAAbzdVo4SCiZA", "fqv01SQemVIBAAAApND8LDRUhRU", "p-pXcflrmFIBAAAAvXYQk-mCwZU", "JqSUrO0-mFIBAAAAWwTvLzoWGQI", "F5oWoCs7QFACAAAAgf2bD_mg8Bw", "", "", "", "", "", "", "", "", "" }, - new int[] { 315, 315, 225, 315, 0, 180 , 0, 0, 0, 0, 0, 0, 0, 0, 0 }, - new int[] { 50, 50, 58, 47, 46, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0 } + new int[] { 0xFF40B1E2, 0xFF41B05D, 0xFFCE8C20, 0xFF57A3EB, 0xFFDE8534, 0xFFCC6189, 0xFF3490EB, 0xFF43ACC7, 0xFF52AC44, 0xFFCD5F93, 0xFFD28036, 0xFF8366CC, 0xFFCE4E57, 0xFFD3AE40, 0xFF7B88AB }, + new int[] { 0xFF319FCA, 0xFF28A359, 0xFF8C5A3F, 0xFF3085D3, 0xFFC95870, 0xFF7871CD, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0xFF4EBEE2, 0xFF6BBC59, 0xFF9E563C, 0xFF48C2D8, 0xFFD87047, 0xFFBE6EAF, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0xFFc5e2f0, 0xFFdadea9, 0xFFd6c9a5, 0xFFe3f3f3, 0xFFeee5b0, 0xFFe5dfec, 0xffe0e7ed, 0xffbfe0eb, 0xffc2e0af, 0xffefd9e4, 0xfff1dfbd, 0xffe1dbec, 0xffedd8d8, 0xffebe1cd, 0xffdcdee5 }, + new int[] { 0xFFe8f4f3, 0xFFbce3ac, 0xFFe6dbaf, 0xFFc8e6ee, 0xFFeebeaa, 0xFFe1c6ec, 0xffbed7f3, 0xffbfe0eb, 0xffcbe19a, 0xffecc6d9, 0xffe8c79b, 0xffbdc1ec, 0xffeecac0, 0xffebe2b5, 0xffc3cadf }, + new int[] { 0xFFb4daf0, 0xFFcde7a9, 0xFFe8c091, 0xFFd9eff3, 0xFFeecf92, 0xFFf6eaf6, 0xffe0e8f3, 0xffcaebec, 0xffb8de89, 0xfff1d8e6, 0xfff3d7a6, 0xffd6d8f5, 0xffedddcd, 0xffebdcc9, 0xffe7edf1 }, + new int[] { 0xFFcff0ef, 0xFFa8cf9b, 0xFFe1d09f, 0xFFb4d6e8, 0xFFeeaf87, 0xFFe5c5cf, 0xffc8dbf3, 0xffaedceb, 0xffcee5a2, 0xfff0c0d9, 0xffdfb48e, 0xffbdbaf2, 0xfff1c9bb, 0xffe7d7ae, 0xffc5c6da }, + new int[] { 9, 10, 11, 12, 13, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, + new String[] { "MIo6r0qGSFAFAAAAtL8TsDzNX60", "dhf9pceaQVACAAAAbzdVo4SCiZA", "fqv01SQemVIBAAAApND8LDRUhRU", "p-pXcflrmFIBAAAAvXYQk-mCwZU", "JqSUrO0-mFIBAAAAWwTvLzoWGQI", "F5oWoCs7QFACAAAAgf2bD_mg8Bw", "fqv01SQemVIBAAAApND8LDRUhRU", "RepJ5uE_SVABAAAAr4d0YhgB850", "PllZ-bf_SFAEAAAA8crRfwZiDNg", "pgJfpFNRSFABAAAACDT8s5sEjfc", "ptuUd96JSFACAAAATobI23sPpz0", "dhf9pceaQVACAAAAbzdVo4SCiZA", "JqSUrO0-mFIBAAAAWwTvLzoWGQI", "9iklpvIPQVABAAAAORQXKur_Eyc", "F5oWoCs7QFACAAAAgf2bD_mg8Bw" }, + new int[] { 315, 315, 225, 315, 0, 180, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + new int[] { 50, 50, 58, 47, 46, 50, 49, 46, 51, 50, 49, 34, 54, 50, 40 } ); themes.add(themeInfo); themesDict.put("Arctic Blue", themeInfo); @@ -4142,6 +4292,8 @@ public class Theme { new int[] { 0xFF3EC1D6, 0xFFC86994, 0xFFDBA12F, 0xFFD08E3B, 0xFF51B5CB, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, new int[] { 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, new int[] { 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + null, + null, new int[] { 9, 10, 11, 12, 13, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, new String[] { "", "", "", "", "", "", "", "", "", "", "", "", "", "" }, new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, @@ -4158,15 +4310,17 @@ public class Theme { themeInfo.previewOutColor = 0xff75A2E6; themeInfo.sortIndex = 4; themeInfo.setAccentColorOptions( - new int[] { 0xFF6ABE3F, 0xFF8D78E3, 0xFFDE5E7E, 0xFF5977E8, 0xFFDBC11A, 0xff3e88f7, 0xff4ab5d3, 0xff4ab841, 0xffd95576, 0xffe27d2b, 0xff936cda, 0xffd04336, 0xffe8ae1c, 0xff7988a3 }, - new int[] { 0xFF8A5294, 0xFFB46C1B, 0xFFAF4F6F, 0xFF266E8D, 0xFF744EB7, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF6855BB, 0xFFA53B4A, 0xFF62499C, 0xFF2F919D, 0xFF298B95, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF020702, 0xFF111314, 0xFF040304, 0xFF0B0C0C, 0xFF060607, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 0xFF0F0E10, 0xFF080809, 0xFF050505, 0xFF0E0E10, 0xFF0D0D10, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, - new int[] { 9, 10, 11, 12, 13, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, - new String[] { "YIxYGEALQVADAAAAA3QbEH0AowY", "9LW_RcoOSVACAAAAFTk3DTyXN-M", "O-wmAfBPSFADAAAA4zINVfD_bro", "F5oWoCs7QFACAAAAgf2bD_mg8Bw", "-Xc-np9y2VMCAAAARKr0yNNPYW0", "", "", "", "", "", "", "", "", "" }, - new int[] { 45, 135, 0, 180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, - new int[] { 34, 47, 52, 48, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0 } + new int[] { 0xFF6ABE3F, 0xFF8D78E3, 0xFFDE5E7E, 0xFF5977E8, 0xFFDBC11A, 0xff3e88f7, 0xff4ab5d3, 0xff4ab841, 0xffd95576, 0xffe27d2b, 0xff936cda, 0xffd04336, 0xffe8ae1c, 0xff7988a3 }, + new int[] { 0xFF8A5294, 0xFFB46C1B, 0xFFAF4F6F, 0xFF266E8D, 0xFF744EB7, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0xFF6855BB, 0xFFA53B4A, 0xFF62499C, 0xFF2F919D, 0xFF298B95, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 }, + new int[] { 0xFF16131c, 0xFF1e1118, 0xFF0f0b10, 0xFF090c0c, 0xFF071519, 0xff0d0e17, 0xff111b1c, 0xff0c110c, 0xff0e0b0d, 0xff1d160f, 0xff09090a, 0xff1c1210, 0xff1d1b18, 0xff0e1012 }, + new int[] { 0xFF201827, 0xFF100f13, 0xFF1b151a, 0xFF141f22, 0xFF0c0c0f, 0xff090a0c, 0xff0a0e0e, 0xff080908, 0xff1a1618, 0xff13100d, 0xff1e1a21, 0xff0f0d0c, 0xff0c0b08, 0xff070707 }, + new int[] { 0xFF0e0b13, 0xFF211623, 0xFF130e12, 0xFF0d0f11, 0xFF10191f, 0xff181c28, 0xff142121, 0xff121812, 0xff130e11, 0xff1a130f, 0xff0b0a0b, 0xff120d0b, 0xff15140f, 0xff101214 }, + new int[] { 0xFF1e192a, 0xFF111016, 0xFF21141a, 0xFF111a1b, 0xFF0a0d13, 0xff0e0f12, 0xff070c0b, 0xff0b0d0b, 0xff22121e, 0xff0f0c0c, 0xff110f17, 0xff070606, 0xff0c0a0a, 0xff09090b }, + new int[] { 9, 10, 11, 12, 13, 0, 1, 2, 3, 4, 5, 6, 7, 8 }, + new String[] { "YIxYGEALQVADAAAAA3QbEH0AowY", "9LW_RcoOSVACAAAAFTk3DTyXN-M", "O-wmAfBPSFADAAAA4zINVfD_bro", "F5oWoCs7QFACAAAAgf2bD_mg8Bw", "-Xc-np9y2VMCAAAARKr0yNNPYW0", "fqv01SQemVIBAAAApND8LDRUhRU", "F5oWoCs7QFACAAAAgf2bD_mg8Bw", "ptuUd96JSFACAAAATobI23sPpz0", "p-pXcflrmFIBAAAAvXYQk-mCwZU", "Nl8Pg2rBQVACAAAA25Lxtb8SDp0", "dhf9pceaQVACAAAAbzdVo4SCiZA", "9GcNVISdSVADAAAAUcw5BYjELW4", "9LW_RcoOSVACAAAAFTk3DTyXN-M", "dk_wwlghOFACAAAAfz9xrxi6euw" }, + new int[] { 45, 135, 0, 180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + new int[] { 34, 47, 52, 48, 54, 50, 37, 56, 48, 49, 40, 64, 38, 48 } ); themes.add(themeInfo); themesDict.put("Night", themeInfo); @@ -4277,9 +4431,13 @@ public class Theme { accent.backgroundOverrideColor = data.readInt32(true); } if (version >= 2) { - accent.backgroundGradientOverrideColor = data.readInt64(true); + accent.backgroundGradientOverrideColor1 = data.readInt64(true); } else { - accent.backgroundGradientOverrideColor = data.readInt32(true); + accent.backgroundGradientOverrideColor1 = data.readInt32(true); + } + if (version >= 6) { + accent.backgroundGradientOverrideColor2 = data.readInt64(true); + accent.backgroundGradientOverrideColor3 = data.readInt64(true); } if (version >= 1) { accent.backgroundRotation = data.readInt32(true); @@ -4337,7 +4495,7 @@ public class Theme { info.lastAccentId = 101; SerializedData data = new SerializedData(4 * (15 + 2)); - data.writeInt32(5); + data.writeInt32(6); data.writeInt32(1); data.writeInt32(accent.id); @@ -4345,7 +4503,9 @@ public class Theme { data.writeInt32(accent.myMessagesAccentColor); data.writeInt32(accent.myMessagesGradientAccentColor); data.writeInt64(accent.backgroundOverrideColor); - data.writeInt64(accent.backgroundGradientOverrideColor); + data.writeInt64(accent.backgroundGradientOverrideColor1); + data.writeInt64(accent.backgroundGradientOverrideColor2); + data.writeInt64(accent.backgroundGradientOverrideColor3); data.writeInt32(accent.backgroundRotation); data.writeInt64(0); data.writeDouble(accent.patternIntensity); @@ -4429,7 +4589,9 @@ public class Theme { overrideWallpaper.fileName = "wallpaper.jpg"; overrideWallpaper.originalFileName = "wallpaper_original.jpg"; } - overrideWallpaper.gradientColor = preferences.getInt("selectedGradientColor", 0); + overrideWallpaper.gradientColor1 = preferences.getInt("selectedGradientColor", 0); + overrideWallpaper.gradientColor2 = preferences.getInt("selectedGradientColor2", 0); + overrideWallpaper.gradientColor3 = preferences.getInt("selectedGradientColor3", 0); overrideWallpaper.rotation = preferences.getInt("selectedGradientRotation", 45); overrideWallpaper.isBlurred = preferences.getBoolean("selectedBackgroundBlurred", false); overrideWallpaper.isMotion = preferences.getBoolean("selectedBackgroundMotion", false); @@ -4707,6 +4869,20 @@ public class Theme { return new CombinedDrawable(defaultDrawable, drawable); } + public static int getWallpaperColor(int color) { + if (color == 0) { + return 0; + } + return color | 0xff000000; + } + + public static float getThemeIntensity(float value) { + if (value < 0 && !getActiveTheme().isDark()) { + return -value; + } + return value; + } + public static void setCombinedDrawableColor(Drawable combinedDrawable, int color, boolean isIcon) { if (!(combinedDrawable instanceof CombinedDrawable)) { return; @@ -4753,6 +4929,39 @@ public class Theme { return defaultDrawable; } + public static Drawable createServiceDrawable(int rad, View view, View containerView) { + return new Drawable() { + + private RectF rect = new RectF(); + + @Override + public void draw(@NonNull Canvas canvas) { + Rect bounds = getBounds(); + rect.set(bounds.left, bounds.top, bounds.right, bounds.bottom); + applyServiceShaderMatrixForView(view, containerView); + canvas.drawRoundRect(rect, rad, rad, chat_actionBackgroundPaint); + if (hasGradientService()) { + canvas.drawRoundRect(rect, rad, rad, Theme.chat_actionBackgroundGradientDarkenPaint); + } + } + + @Override + public void setAlpha(int alpha) { + + } + + @Override + public void setColorFilter(@Nullable ColorFilter colorFilter) { + + } + + @Override + public int getOpacity() { + return PixelFormat.TRANSPARENT; + } + }; + } + public static Drawable createSimpleSelectorRoundRectDrawable(int rad, int defaultColor, int pressedColor) { return createSimpleSelectorRoundRectDrawable(rad, defaultColor, pressedColor, pressedColor); } @@ -5098,6 +5307,10 @@ public class Theme { return isApplyingAccent && currentTheme.overrideWallpaper != null; } + public static boolean isCustomWallpaperColor() { + return hasCustomWallpaper() && currentTheme.overrideWallpaper.color != 0; + } + public static void resetCustomWallpaper(boolean temporary) { if (temporary) { isApplyingAccent = false; @@ -5119,10 +5332,10 @@ public class Theme { getThemeFileValues(new File(themeInfo.pathToFile), null, wallpaperLink); if (!TextUtils.isEmpty(wallpaperLink[0])) { - String ling = wallpaperLink[0]; - themeInfo.pathToWallpaper = new File(ApplicationLoader.getFilesDirFixed(), Utilities.MD5(ling) + ".wp").getAbsolutePath(); + String link = wallpaperLink[0]; + themeInfo.pathToWallpaper = new File(ApplicationLoader.getFilesDirFixed(), Utilities.MD5(link) + ".wp").getAbsolutePath(); try { - Uri data = Uri.parse(ling); + Uri data = Uri.parse(link); themeInfo.slug = data.getQueryParameter("slug"); String mode = data.getQueryParameter("mode"); if (mode != null) { @@ -5143,9 +5356,15 @@ public class Theme { try { String bgColor = data.getQueryParameter("bg_color"); if (!TextUtils.isEmpty(bgColor)) { - themeInfo.patternBgColor = Integer.parseInt(bgColor, 16) | 0xff000000; - if (bgColor.length() > 6) { - themeInfo.patternBgGradientColor = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; + themeInfo.patternBgColor = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; + if (bgColor.length() >= 13 && AndroidUtilities.isValidWallChar(bgColor.charAt(6))) { + themeInfo.patternBgGradientColor1 = Integer.parseInt(bgColor.substring(7, 13), 16) | 0xff000000; + } + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + themeInfo.patternBgGradientColor2 = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + themeInfo.patternBgGradientColor3 = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; } } } catch (Exception ignore) { @@ -5310,8 +5529,14 @@ public class Theme { String bgColor = data.getQueryParameter("bg_color"); if (!TextUtils.isEmpty(bgColor)) { themeInfo.patternBgColor = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; - if (bgColor.length() > 6) { - themeInfo.patternBgGradientColor = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; + if (bgColor.length() >= 13 && AndroidUtilities.isValidWallChar(bgColor.charAt(6))) { + themeInfo.patternBgGradientColor1 = Integer.parseInt(bgColor.substring(7, 13), 16) | 0xff000000; + } + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + themeInfo.patternBgGradientColor2 = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + themeInfo.patternBgGradientColor3 = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; } } } catch (Exception ignore) { @@ -5387,6 +5612,10 @@ public class Theme { } public static void refreshThemeColors() { + refreshThemeColors(false); + } + + public static void refreshThemeColors(boolean bg) { currentColors.clear(); currentColors.putAll(currentColorsNoAccent); shouldDrawGradientIcons = true; @@ -5398,7 +5627,7 @@ public class Theme { applyCommonTheme(); applyDialogsTheme(); applyProfileTheme(); - applyChatTheme(false); + applyChatTheme(false, bg); AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewTheme, false)); } @@ -5553,7 +5782,7 @@ public class Theme { int N = theme.themeAccents.size(); int count = Math.max(0, N - theme.defaultAccentCount); SerializedData data = new SerializedData(4 * (count * 15 + 2)); - data.writeInt32(5); + data.writeInt32(6); data.writeInt32(count); for (int a = 0; a < N; a++) { ThemeAccent accent = theme.themeAccents.get(a); @@ -5565,7 +5794,9 @@ public class Theme { data.writeInt32(accent.myMessagesAccentColor); data.writeInt32(accent.myMessagesGradientAccentColor); data.writeInt64(accent.backgroundOverrideColor); - data.writeInt64(accent.backgroundGradientOverrideColor); + data.writeInt64(accent.backgroundGradientOverrideColor1); + data.writeInt64(accent.backgroundGradientOverrideColor2); + data.writeInt64(accent.backgroundGradientOverrideColor3); data.writeInt32(accent.backgroundRotation); data.writeInt64(0); data.writeDouble(accent.patternIntensity); @@ -5868,8 +6099,15 @@ public class Theme { return 0; } + public static void setChangingWallpaper(boolean value) { + changingWallpaper = value; + if (!changingWallpaper) { + checkAutoNightThemeConditions(false); + } + } + public static void checkAutoNightThemeConditions(boolean force) { - if (previousTheme != null) { + if (previousTheme != null || changingWallpaper) { return; } if (!force && switchNightThemeDelay > 0) { @@ -5980,8 +6218,16 @@ public class Theme { wallpaperLink = "https://attheme.org?slug=" + wallpaperInfo.slug; } else { String color = String.format("%02x%02x%02x", (byte) (wallpaperInfo.color >> 16) & 0xff, (byte) (wallpaperInfo.color >> 8) & 0xff, (byte) (wallpaperInfo.color & 0xff)).toLowerCase(); - String color2 = wallpaperInfo.gradientColor != 0 ? String.format("%02x%02x%02x", (byte) (wallpaperInfo.gradientColor >> 16) & 0xff, (byte) (wallpaperInfo.gradientColor >> 8) & 0xff, (byte) (wallpaperInfo.gradientColor & 0xff)).toLowerCase() : null; - if (color2 != null) { + String color2 = wallpaperInfo.gradientColor1 != 0 ? String.format("%02x%02x%02x", (byte) (wallpaperInfo.gradientColor1 >> 16) & 0xff, (byte) (wallpaperInfo.gradientColor1 >> 8) & 0xff, (byte) (wallpaperInfo.gradientColor1 & 0xff)).toLowerCase() : null; + String color3 = wallpaperInfo.gradientColor2 != 0 ? String.format("%02x%02x%02x", (byte) (wallpaperInfo.gradientColor2 >> 16) & 0xff, (byte) (wallpaperInfo.gradientColor2 >> 8) & 0xff, (byte) (wallpaperInfo.gradientColor2 & 0xff)).toLowerCase() : null; + String color4 = wallpaperInfo.gradientColor3 != 0 ? String.format("%02x%02x%02x", (byte) (wallpaperInfo.gradientColor3 >> 16) & 0xff, (byte) (wallpaperInfo.gradientColor3 >> 8) & 0xff, (byte) (wallpaperInfo.gradientColor3 & 0xff)).toLowerCase() : null; + if (color2 != null && color3 != null) { + if (color4 != null) { + color += "~" + color2 + "~" + color3 + "~" + color4; + } else { + color += "~" + color2 + "~" + color3; + } + } else if (color2 != null) { color += "-" + color2; color += "&rotation=" + wallpaperInfo.rotation; } @@ -6021,7 +6267,7 @@ public class Theme { for (HashMap.Entry entry : colorsMap.entrySet()) { String key = entry.getKey(); if (wallpaperToSave instanceof BitmapDrawable || wallpaperLink != null) { - if (key_chat_wallpaper.equals(key) || key_chat_wallpaper_gradient_to.equals(key)) { + if (key_chat_wallpaper.equals(key) || key_chat_wallpaper_gradient_to1.equals(key) || key_chat_wallpaper_gradient_to2.equals(key) || key_chat_wallpaper_gradient_to3.equals(key)) { continue; } } @@ -6442,7 +6688,7 @@ public class Theme { return color; } - public static String createThemePreviewImage(String pathToFile, String wallpaperPath) { + public static String createThemePreviewImage(String pathToFile, String wallpaperPath, Theme.ThemeAccent accent) { try { String[] wallpaperLink = new String[1]; HashMap colors = getThemeFileValues(new File(pathToFile), null, wallpaperLink); @@ -6460,8 +6706,65 @@ public class Theme { int messageOutColor = getPreviewColor(colors, key_chat_outBubble); Integer messageOutGradientColor = colors.get(key_chat_outBubbleGradient); Integer backgroundColor = colors.get(key_chat_wallpaper); - Integer serviceColor = colors.get(key_chat_serviceBackground); - Integer gradientToColor = colors.get(key_chat_wallpaper_gradient_to); + Integer gradientToColor1 = colors.get(key_chat_wallpaper_gradient_to1); + Integer gradientToColor2 = colors.get(key_chat_wallpaper_gradient_to2); + Integer gradientToColor3 = colors.get(key_chat_wallpaper_gradient_to3); + + int defaultBackgroundColor = backgroundColor != null ? backgroundColor : 0; + int backgroundOverrideColor = (int) accent.backgroundOverrideColor; + int backColor; + if (backgroundOverrideColor == 0 && accent.backgroundOverrideColor != 0) { + backColor = 0; + } else { + backColor = backgroundOverrideColor != 0 ? backgroundOverrideColor : defaultBackgroundColor; + } + + int defaultBackgroundGradient1 = gradientToColor1 != null ? gradientToColor1 : 0; + int backgroundGradientOverrideColor1 = (int) accent.backgroundGradientOverrideColor1; + int color1; + if (backgroundGradientOverrideColor1 == 0 && accent.backgroundGradientOverrideColor1 != 0) { + color1 = 0; + } else { + color1 = backgroundGradientOverrideColor1 != 0 ? backgroundGradientOverrideColor1 : defaultBackgroundGradient1; + } + int defaultBackgroundGradient2 = gradientToColor2 != null ? gradientToColor2 : 0; + int backgroundGradientOverrideColor2 = (int) accent.backgroundGradientOverrideColor2; + int color2; + if (backgroundGradientOverrideColor2 == 0 && accent.backgroundGradientOverrideColor2 != 0) { + color2 = 0; + } else { + color2 = backgroundGradientOverrideColor2 != 0 ? backgroundGradientOverrideColor2 : defaultBackgroundGradient2; + } + int defaultBackgroundGradient3 = gradientToColor3 != null ? gradientToColor3 : 0; + int backgroundGradientOverrideColor3 = (int) accent.backgroundGradientOverrideColor3; + int color3; + if (backgroundGradientOverrideColor3 == 0 && accent.backgroundGradientOverrideColor3 != 0) { + color3 = 0; + } else { + color3 = backgroundGradientOverrideColor3 != 0 ? backgroundGradientOverrideColor3 : defaultBackgroundGradient3; + } + + if (!TextUtils.isEmpty(wallpaperLink[0])) { + try { + Uri data = Uri.parse(wallpaperLink[0]); + String bgColor = data.getQueryParameter("bg_color"); + if (!TextUtils.isEmpty(bgColor)) { + accent.backgroundOverrideColor = backColor = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; + if (bgColor.length() >= 13 && AndroidUtilities.isValidWallChar(bgColor.charAt(6))) { + accent.backgroundGradientOverrideColor1 = color1 = Integer.parseInt(bgColor.substring(7, 13), 16) | 0xff000000; + } + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + accent.backgroundGradientOverrideColor2 = color2 = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + accent.backgroundGradientOverrideColor3 = color3 = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; + } + } + } catch (Exception e) { + FileLog.e(e); + } + } + Drawable backDrawable = ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.preview_back).mutate(); setDrawableColor(backDrawable, actionBarIconColor); @@ -6511,39 +6814,44 @@ public class Theme { options.inJustDecodeBounds = false; Bitmap wallpaper = BitmapFactory.decodeFile(wallpaperPath, options); if (wallpaper != null) { - Paint bitmapPaint = new Paint(); - bitmapPaint.setFilterBitmap(true); - scale = Math.min(wallpaper.getWidth() / 560.0f, wallpaper.getHeight() / 560.0f); - rect.set(0, 0, wallpaper.getWidth() / scale, wallpaper.getHeight() / scale); - rect.offset((bitmap.getWidth() - rect.width()) / 2, (bitmap.getHeight() - rect.height()) / 2); - canvas.drawBitmap(wallpaper, null, rect, bitmapPaint); - hasBackground = true; - if (serviceColor == null) { - serviceColor = AndroidUtilities.calcDrawableColor(new BitmapDrawable(wallpaper))[0]; + if (color2 != 0) { + MotionBackgroundDrawable wallpaperDrawable = new MotionBackgroundDrawable(backColor, color1, color2, color3, true); + wallpaperDrawable.setPatternBitmap((int) (accent.patternIntensity * 100), wallpaper); + wallpaperDrawable.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight()); + wallpaperDrawable.draw(canvas); + } else { + Paint bitmapPaint = new Paint(); + bitmapPaint.setFilterBitmap(true); + scale = Math.min(wallpaper.getWidth() / 560.0f, wallpaper.getHeight() / 560.0f); + rect.set(0, 0, wallpaper.getWidth() / scale, wallpaper.getHeight() / scale); + rect.offset((bitmap.getWidth() - rect.width()) / 2, (bitmap.getHeight() - rect.height()) / 2); + canvas.drawBitmap(wallpaper, null, rect, bitmapPaint); } + hasBackground = true; } } } catch (Throwable e) { FileLog.e(e); } - } else if (backgroundColor != null) { + } else if (backColor != 0) { Drawable wallpaperDrawable; - if (gradientToColor == null) { - wallpaperDrawable = new ColorDrawable(backgroundColor); + if (color1 == 0) { + wallpaperDrawable = new ColorDrawable(backColor); } else { - Integer gradientRotation = colors.get(key_chat_wallpaper_gradient_rotation); - if (gradientRotation == null) { - gradientRotation = 45; + if (color2 != 0) { + wallpaperDrawable = new MotionBackgroundDrawable(backColor, color1, color2, color3, true); + } else { + Integer gradientRotation = colors.get(key_chat_wallpaper_gradient_rotation); + if (gradientRotation == null) { + gradientRotation = 45; + } + final int[] gradientColors = {backColor, gradientToColor2}; + wallpaperDrawable = BackgroundGradientDrawable.createDitheredGradientBitmapDrawable(gradientRotation, gradientColors, bitmap.getWidth(), bitmap.getHeight() - 120); + quality = 90; } - final int[] gradientColors = {backgroundColor, gradientToColor}; - wallpaperDrawable = BackgroundGradientDrawable.createDitheredGradientBitmapDrawable(gradientRotation, gradientColors, bitmap.getWidth(), bitmap.getHeight() - 120); - quality = 90; } wallpaperDrawable.setBounds(0, 120, bitmap.getWidth(), bitmap.getHeight() - 120); wallpaperDrawable.draw(canvas); - if (serviceColor == null) { - serviceColor = AndroidUtilities.calcDrawableColor(new ColorDrawable(backgroundColor))[0]; - } hasBackground = true; } else if (wallpaperFileOffset != null && wallpaperFileOffset >= 0 || !TextUtils.isEmpty(wallpaperLink[0])) { FileInputStream stream = null; @@ -6583,9 +6891,6 @@ public class Theme { rect.offset((bitmap.getWidth() - rect.width()) / 2, (bitmap.getHeight() - rect.height()) / 2); canvas.drawBitmap(wallpaper, null, rect, bitmapPaint); hasBackground = true; - if (serviceColor == null) { - serviceColor = AndroidUtilities.calcDrawableColor(new BitmapDrawable(wallpaper))[0]; - } } } } catch (Throwable e) { @@ -6601,11 +6906,7 @@ public class Theme { } } if (!hasBackground) { - BitmapDrawable catsDrawable = (BitmapDrawable) ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.catstile).mutate(); - if (serviceColor == null) { - serviceColor = AndroidUtilities.calcDrawableColor(catsDrawable)[0]; - } - catsDrawable.setTileModeXY(Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); + Drawable catsDrawable = createDefaultWallpaper(bitmap.getWidth(), bitmap.getHeight() - 120); catsDrawable.setBounds(0, 120, bitmap.getWidth(), bitmap.getHeight() - 120); catsDrawable.draw(canvas); } @@ -6637,14 +6938,6 @@ public class Theme { msgDrawable[0].setTop(323, 522, false, false); msgDrawable[0].draw(canvas); - if (serviceColor != null) { - int x = (bitmap.getWidth() - 126) / 2; - int y = 150; - rect.set(x, y, x + 126, y + 42); - paint.setColor(serviceColor); - canvas.drawRoundRect(rect, 21, 21, paint); - } - paint.setColor(messageFieldColor); canvas.drawRect(0, bitmap.getHeight() - 120, bitmap.getWidth(), bitmap.getHeight(), paint); if (emojiDrawable != null) { @@ -6941,6 +7234,25 @@ public class Theme { dialogs_pinnedDrawable = resources.getDrawable(R.drawable.list_pin); moveUpDrawable = resources.getDrawable(R.drawable.preview_open); + RectF rect = new RectF(); + chat_updatePath[0] = new Path(); + chat_updatePath[2] = new Path(); + float cx = AndroidUtilities.dp(12); + float cy = AndroidUtilities.dp(12); + rect.set(cx - AndroidUtilities.dp(5), cy - AndroidUtilities.dp(5), cx + AndroidUtilities.dp(5), cy + AndroidUtilities.dp(5)); + chat_updatePath[2].arcTo(rect, -160, -110, true); + chat_updatePath[2].arcTo(rect, 20, -110, true); + + chat_updatePath[0].moveTo(cx, cy + AndroidUtilities.dp(5 + 3)); + chat_updatePath[0].lineTo(cx, cy + AndroidUtilities.dp(5 - 3)); + chat_updatePath[0].lineTo(cx + AndroidUtilities.dp(3), cy + AndroidUtilities.dp(5)); + chat_updatePath[0].close(); + + chat_updatePath[0].moveTo(cx, cy - AndroidUtilities.dp(5 + 3)); + chat_updatePath[0].lineTo(cx, cy - AndroidUtilities.dp(5 - 3)); + chat_updatePath[0].lineTo(cx - AndroidUtilities.dp(3), cy - AndroidUtilities.dp(5)); + chat_updatePath[0].close(); + applyDialogsTheme(); } @@ -7010,7 +7322,6 @@ public class Theme { if (chat_msgInDrawable != null) { chat_msgInDrawable = null; currentColor = 0; - currentSelectedColor = 0; createChatResources(context, false); } if (dialogs_namePaint != null) { @@ -7099,6 +7410,11 @@ public class Theme { chat_actionTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); chat_actionTextPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); chat_actionBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + chat_actionBackgroundSelectedPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + chat_actionBackgroundPaint2 = new Paint(Paint.ANTI_ALIAS_FLAG); + chat_actionBackgroundSelectedPaint2 = new Paint(Paint.ANTI_ALIAS_FLAG); + chat_actionBackgroundGradientDarkenPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + chat_actionBackgroundGradientDarkenPaint.setColor(0x2a000000); chat_timeBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); chat_contextResult_titleTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); chat_contextResult_titleTextPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); @@ -7210,8 +7526,6 @@ public class Theme { chat_commentStickerDrawable = resources.getDrawable(R.drawable.msg_msgbubble2); chat_commentArrowDrawable = resources.getDrawable(R.drawable.msg_arrowright); - chat_systemDrawable = resources.getDrawable(R.drawable.system); - chat_contextResult_shadowUnderSwitchDrawable = resources.getDrawable(R.drawable.header_shadow).mutate(); chat_attachButtonDrawables[0] = new RLottieDrawable(R.raw.attach_gallery, "attach_gallery", AndroidUtilities.dp(26), AndroidUtilities.dp(26)); @@ -7222,17 +7536,6 @@ public class Theme { chat_attachButtonDrawables[5] = new RLottieDrawable(R.raw.attach_poll, "attach_poll", AndroidUtilities.dp(26), AndroidUtilities.dp(26)); chat_attachEmptyDrawable = resources.getDrawable(R.drawable.nophotos3); - chat_cornerOuter[0] = resources.getDrawable(R.drawable.corner_out_tl); - chat_cornerOuter[1] = resources.getDrawable(R.drawable.corner_out_tr); - chat_cornerOuter[2] = resources.getDrawable(R.drawable.corner_out_br); - chat_cornerOuter[3] = resources.getDrawable(R.drawable.corner_out_bl); - - chat_cornerInner[0] = resources.getDrawable(R.drawable.corner_in_tr); - chat_cornerInner[1] = resources.getDrawable(R.drawable.corner_in_tl); - chat_cornerInner[2] = resources.getDrawable(R.drawable.corner_in_br); - chat_cornerInner[3] = resources.getDrawable(R.drawable.corner_in_bl); - - chat_shareDrawable = createRoundRectDrawable(AndroidUtilities.dp(16), 0xffffffff); chat_shareIconDrawable = resources.getDrawable(R.drawable.share_arrow).mutate(); chat_replyIconDrawable = resources.getDrawable(R.drawable.fast_reply); chat_goIconDrawable = resources.getDrawable(R.drawable.message_arrow); @@ -7355,7 +7658,7 @@ public class Theme { } - applyChatTheme(fontsOnly); + applyChatTheme(fontsOnly, false); } chat_msgTextPaintOneEmoji.setTextSize(AndroidUtilities.dp(28)); @@ -7433,7 +7736,7 @@ public class Theme { } - public static void applyChatTheme(boolean fontsOnly) { + public static void applyChatTheme(boolean fontsOnly, boolean bg) { if (chat_msgTextPaint == null) { return; } @@ -7614,16 +7917,57 @@ public class Theme { setDrawableColor(chat_attachEmptyDrawable, getColor(key_chat_attachEmptyImage)); - applyChatServiceMessageColor(); + if (!bg) { + applyChatServiceMessageColor(); + } refreshAttachButtonsColors(); } } public static void applyChatServiceMessageColor() { - applyChatServiceMessageColor(null); + applyChatServiceMessageColor(null, null, wallpaper); } - public static void applyChatServiceMessageColor(int[] custom) { + public static boolean hasGradientService() { + return serviceBitmapShader != null; + } + + private static int[] viewPos = new int[2]; + public static void applyServiceShaderMatrixForView(View view, View background) { + if (view == null || background == null) { + return; + } + view.getLocationOnScreen(viewPos); + int x = viewPos[0]; + int y = viewPos[1]; + background.getLocationOnScreen(viewPos); + applyServiceShaderMatrix(background.getMeasuredWidth(), background.getMeasuredHeight(), x, y - viewPos[1]); + } + + public static void applyServiceShaderMatrix(int w, int h, float translationX, float translationY) { + if (serviceBitmapShader == null) { + return; + } + + float bitmapWidth = serviceBitmap.getWidth(); + float bitmapHeight = serviceBitmap.getHeight(); + float maxScale = Math.max(w / bitmapWidth, h / bitmapHeight); + float width = bitmapWidth * maxScale; + float height = bitmapHeight * maxScale; + float x = (w - width) / 2; + float y = (h - height) / 2; + + serviceBitmapMatrix.reset(); + serviceBitmapMatrix.setTranslate(x - translationX, y - translationY); + serviceBitmapMatrix.preScale(maxScale, maxScale); + serviceBitmapShader.setLocalMatrix(serviceBitmapMatrix); + } + + public static void applyChatServiceMessageColor(int[] custom, Drawable wallpaperOverride) { + applyChatServiceMessageColor(custom, wallpaperOverride, wallpaper); + } + + public static void applyChatServiceMessageColor(int[] custom, Drawable wallpaperOverride, Drawable currentWallpaper) { if (chat_actionBackgroundPaint == null) { return; } @@ -7651,22 +7995,76 @@ public class Theme { servicePressedColor = serviceSelectedMessageColor; servicePressedColor2 = serviceSelectedMessage2Color; } - if (currentColor != serviceColor) { - chat_actionBackgroundPaint.setColor(serviceColor); - colorFilter = new PorterDuffColorFilter(serviceColor, PorterDuff.Mode.MULTIPLY); - colorFilter2 = new PorterDuffColorFilter(serviceColor2, PorterDuff.Mode.MULTIPLY); - currentColor = serviceColor; - if (chat_cornerOuter[0] != null) { - for (int a = 0; a < 4; a++) { - chat_cornerOuter[a].setColorFilter(colorFilter); - chat_cornerInner[a].setColorFilter(colorFilter); + + Drawable drawable = wallpaperOverride != null ? wallpaperOverride : currentWallpaper; + if (drawable instanceof MotionBackgroundDrawable) { + Bitmap newBitmap = ((MotionBackgroundDrawable) drawable).getBitmap(); + if (serviceBitmap != newBitmap) { + serviceBitmap = newBitmap; + serviceBitmapShader = new BitmapShader(serviceBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + if (serviceBitmapMatrix == null) { + serviceBitmapMatrix = new Matrix(); } } + setDrawableColor(chat_msgStickerPinnedDrawable, 0xffffffff); + setDrawableColor(chat_msgStickerCheckDrawable, 0xffffffff); + setDrawableColor(chat_msgStickerHalfCheckDrawable, 0xffffffff); + setDrawableColor(chat_msgStickerClockDrawable, 0xffffffff); + setDrawableColor(chat_msgStickerViewsDrawable, 0xffffffff); + setDrawableColor(chat_msgStickerRepliesDrawable, 0xffffffff); + chat_actionTextPaint.setColor(0xffffffff); + chat_actionTextPaint.setColor(0xffffffff); + chat_actionTextPaint.linkColor = 0xffffffff; + chat_botButtonPaint.setColor(0xffffffff); + setDrawableColor(chat_commentStickerDrawable, 0xffffffff); + setDrawableColor(chat_shareIconDrawable, 0xffffffff); + setDrawableColor(chat_replyIconDrawable, 0xffffffff); + setDrawableColor(chat_goIconDrawable, 0xffffffff); + setDrawableColor(chat_botInlineDrawable, 0xffffffff); + setDrawableColor(chat_botLinkDrawalbe, 0xffffffff); + } else { + serviceBitmap = null; + serviceBitmapShader = null; + + setDrawableColorByKey(chat_msgStickerPinnedDrawable, key_chat_serviceText); + setDrawableColorByKey(chat_msgStickerCheckDrawable, key_chat_serviceText); + setDrawableColorByKey(chat_msgStickerHalfCheckDrawable, key_chat_serviceText); + setDrawableColorByKey(chat_msgStickerClockDrawable, key_chat_serviceText); + setDrawableColorByKey(chat_msgStickerViewsDrawable, key_chat_serviceText); + setDrawableColorByKey(chat_msgStickerRepliesDrawable, key_chat_serviceText); + chat_actionTextPaint.setColor(getColor(key_chat_serviceText)); + chat_actionTextPaint.setColor(getColor(key_chat_serviceText)); + chat_actionTextPaint.linkColor = getColor(key_chat_serviceLink); + setDrawableColorByKey(chat_commentStickerDrawable, key_chat_serviceIcon); + setDrawableColorByKey(chat_shareIconDrawable, key_chat_serviceIcon); + setDrawableColorByKey(chat_replyIconDrawable, key_chat_serviceIcon); + setDrawableColorByKey(chat_goIconDrawable, key_chat_serviceIcon); + setDrawableColorByKey(chat_botInlineDrawable, key_chat_serviceIcon); + setDrawableColorByKey(chat_botLinkDrawalbe, key_chat_serviceIcon); + chat_botButtonPaint.setColor(getColor(key_chat_botButtonText)); } - if (currentSelectedColor != servicePressedColor) { - currentSelectedColor = servicePressedColor; - colorPressedFilter = new PorterDuffColorFilter(servicePressedColor, PorterDuff.Mode.MULTIPLY); - colorPressedFilter2 = new PorterDuffColorFilter(servicePressedColor2, PorterDuff.Mode.MULTIPLY); + + chat_actionBackgroundPaint.setColor(serviceColor); + chat_actionBackgroundSelectedPaint.setColor(servicePressedColor); + chat_actionBackgroundPaint2.setColor(serviceColor2); + chat_actionBackgroundSelectedPaint2.setColor(servicePressedColor2); + currentColor = serviceColor; + + if (serviceBitmapShader != null && (currentColors.get(key_chat_serviceBackground) == null || drawable instanceof MotionBackgroundDrawable)) { + chat_actionBackgroundPaint.setShader(serviceBitmapShader); + chat_actionBackgroundSelectedPaint.setShader(serviceBitmapShader); + ColorMatrix colorMatrix = new ColorMatrix(); + colorMatrix.setSaturation(1.8f); + chat_actionBackgroundPaint.setColorFilter(new ColorMatrixColorFilter(colorMatrix)); + chat_actionBackgroundPaint.setAlpha(127); + + chat_actionBackgroundSelectedPaint.setColorFilter(new ColorMatrixColorFilter(colorMatrix)); + chat_actionBackgroundSelectedPaint.setAlpha(200); + } else { + chat_actionBackgroundPaint.setColorFilter(null); + chat_actionBackgroundPaint.setShader(null); + chat_actionBackgroundSelectedPaint.setColorFilter(null); + chat_actionBackgroundSelectedPaint.setShader(null); } } @@ -7733,7 +8131,7 @@ public class Theme { public static int getDefaultColor(String key) { Integer value = defaultColors.get(key); if (value == null) { - if (key.equals(key_chats_menuTopShadow) || key.equals(key_chats_menuTopBackground) || key.equals(key_chats_menuTopShadowCats)) { + if (key.equals(key_chats_menuTopShadow) || key.equals(key_chats_menuTopBackground) || key.equals(key_chats_menuTopShadowCats) || key.equals(key_chat_wallpaper_gradient_to2) || key.equals(key_chat_wallpaper_gradient_to3)) { return 0; } return 0xffff0000; @@ -7812,11 +8210,14 @@ public class Theme { return color; } } + if (serviceBitmapShader != null && (key_chat_serviceText.equals(key) || key_chat_serviceLink.equals(key) || key_chat_serviceIcon.equals(key))) { + return 0xffffffff; + } if (currentTheme == defaultTheme) { boolean useDefault; if (myMessagesColorKeys.contains(key)) { useDefault = currentTheme.isDefaultMyMessages(); - } else if (key_chat_wallpaper.equals(key) || key_chat_wallpaper_gradient_to.equals(key)) { + } else if (key_chat_wallpaper.equals(key) || key_chat_wallpaper_gradient_to1.equals(key) || key_chat_wallpaper_gradient_to2.equals(key) || key_chat_wallpaper_gradient_to3.equals(key)) { useDefault = false; } else { useDefault = currentTheme.isDefaultMainAccent(); @@ -7855,7 +8256,7 @@ public class Theme { } public static void setColor(String key, int color, boolean useDefault) { - if (key.equals(key_chat_wallpaper) || key.equals(key_chat_wallpaper_gradient_to) || key.equals(key_windowBackgroundWhite) || key.equals(key_windowBackgroundGray) || key.equals(key_actionBarDefault) || key.equals(key_actionBarDefaultArchived)) { + if (key.equals(key_chat_wallpaper) || key.equals(key_chat_wallpaper_gradient_to1) || key.equals(key_chat_wallpaper_gradient_to2) || key.equals(key_chat_wallpaper_gradient_to3) || key.equals(key_windowBackgroundWhite) || key.equals(key_windowBackgroundGray) || key.equals(key_actionBarDefault) || key.equals(key_actionBarDefaultArchived)) { color = 0xff000000 | color; } @@ -7871,7 +8272,9 @@ public class Theme { applyChatServiceMessageColor(); break; case key_chat_wallpaper: - case key_chat_wallpaper_gradient_to: + case key_chat_wallpaper_gradient_to1: + case key_chat_wallpaper_gradient_to2: + case key_chat_wallpaper_gradient_to3: case key_chat_wallpaper_gradient_rotation: reloadWallpaper(); break; @@ -7894,7 +8297,9 @@ public class Theme { public static void setThemeWallpaper(ThemeInfo themeInfo, Bitmap bitmap, File path) { currentColors.remove(key_chat_wallpaper); - currentColors.remove(key_chat_wallpaper_gradient_to); + currentColors.remove(key_chat_wallpaper_gradient_to1); + currentColors.remove(key_chat_wallpaper_gradient_to2); + currentColors.remove(key_chat_wallpaper_gradient_to3); currentColors.remove(key_chat_wallpaper_gradient_rotation); themedWallpaperLink = null; themeInfo.setOverrideWallpaper(null); @@ -8033,6 +8438,11 @@ public class Theme { backgroundGradientDisposable.dispose(); backgroundGradientDisposable = null; } + if (wallpaper instanceof MotionBackgroundDrawable) { + previousPhase = ((MotionBackgroundDrawable) wallpaper).getPhase(); + } else { + previousPhase = 0; + } wallpaper = null; themedWallpaper = null; loadWallpaper(); @@ -8061,152 +8471,205 @@ public class Theme { File wallpaperFile; boolean wallpaperMotion; ThemeAccent accent = currentTheme.getAccent(false); - if (accent != null && !hasPreviousTheme) { + if (accent != null) { wallpaperFile = accent.getPathToWallpaper(); wallpaperMotion = accent.patternMotion; } else { wallpaperFile = null; wallpaperMotion = false; } - + int intensity; OverrideWallpaperInfo overrideWallpaper = currentTheme.overrideWallpaper; - Utilities.searchQueue.postRunnable(() -> { - synchronized (wallpaperSync) { - boolean overrideTheme = (!hasPreviousTheme || isApplyingAccent) && overrideWallpaper != null; - if (overrideWallpaper != null) { - isWallpaperMotion = overrideWallpaper != null && overrideWallpaper.isMotion; - isPatternWallpaper = overrideWallpaper != null && overrideWallpaper.color != 0 && !overrideWallpaper.isDefault() && !overrideWallpaper.isColor(); - } else { - isWallpaperMotion = currentTheme.isMotion; - isPatternWallpaper = currentTheme.patternBgColor != 0; + if (overrideWallpaper != null) { + intensity = overrideWallpaper != null ? (int) (overrideWallpaper.intensity * 100) : 100; + } else { + intensity = (int) (accent != null ? (accent.patternIntensity * 100) : currentTheme.patternIntensity); + } + + Utilities.searchQueue.postRunnable(wallpaperLoadTask = () -> { + boolean overrideTheme = (!hasPreviousTheme || isApplyingAccent) && overrideWallpaper != null; + if (overrideWallpaper != null) { + isWallpaperMotion = overrideWallpaper != null && overrideWallpaper.isMotion; + isPatternWallpaper = overrideWallpaper != null && overrideWallpaper.color != 0 && !overrideWallpaper.isDefault() && !overrideWallpaper.isColor(); + } else { + isWallpaperMotion = currentTheme.isMotion; + isPatternWallpaper = currentTheme.patternBgColor != 0; + } + patternIntensity = intensity; + if (!overrideTheme) { + Integer backgroundColor = defaultTheme ? null : currentColors.get(key_chat_wallpaper); + Integer gradientToColor3 = currentColors.get(key_chat_wallpaper_gradient_to3); + if (gradientToColor3 == null) { + gradientToColor3 = 0; } - if (!overrideTheme) { - Integer backgroundColor = defaultTheme ? null : currentColors.get(key_chat_wallpaper); - if (wallpaperFile != null && wallpaperFile.exists()) { - try { - wallpaper = Drawable.createFromPath(wallpaperFile.getAbsolutePath()); - isWallpaperMotion = wallpaperMotion; - isCustomTheme = true; - isPatternWallpaper = true; - } catch (Throwable e) { - FileLog.e(e); - } - } else if (backgroundColor != null) { - Integer gradientToColor = currentColors.get(key_chat_wallpaper_gradient_to); - Integer rotation = currentColors.get(key_chat_wallpaper_gradient_rotation); - if (rotation == null) { - rotation = 45; - } - if (gradientToColor == null || gradientToColor.equals(backgroundColor)) { - wallpaper = new ColorDrawable(backgroundColor); + Integer gradientToColor2 = currentColors.get(key_chat_wallpaper_gradient_to2); + Integer gradientToColor1 = currentColors.get(key_chat_wallpaper_gradient_to1); + if (wallpaperFile != null && wallpaperFile.exists()) { + try { + if (backgroundColor != null && gradientToColor1 != null && gradientToColor2 != null) { + MotionBackgroundDrawable motionBackgroundDrawable = new MotionBackgroundDrawable(backgroundColor, gradientToColor1, gradientToColor2, gradientToColor3, false); + motionBackgroundDrawable.setPatternBitmap(patternIntensity, BitmapFactory.decodeFile(wallpaperFile.getAbsolutePath())); + wallpaper = motionBackgroundDrawable; } else { - final int[] colors = {backgroundColor, gradientToColor}; - final BackgroundGradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(rotation); - final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, colors); - final BackgroundGradientDrawable.Listener listener = new BackgroundGradientDrawable.ListenerAdapter() { - @Override - public void onSizeReady(int width, int height) { - final boolean isOrientationPortrait = AndroidUtilities.displaySize.x <= AndroidUtilities.displaySize.y; - final boolean isGradientPortrait = width <= height; - if (isOrientationPortrait == isGradientPortrait) { - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); - } - } - }; - backgroundGradientDisposable = backgroundGradientDrawable.startDithering(BackgroundGradientDrawable.Sizes.ofDeviceScreen(), listener, 100); - wallpaper = backgroundGradientDrawable; + wallpaper = Drawable.createFromPath(wallpaperFile.getAbsolutePath()); } + isWallpaperMotion = wallpaperMotion; isCustomTheme = true; - } else if (themedWallpaperLink != null) { - try { - File pathToWallpaper = new File(ApplicationLoader.getFilesDirFixed(), Utilities.MD5(themedWallpaperLink) + ".wp"); - Bitmap bitmap = loadScreenSizedBitmap(new FileInputStream(pathToWallpaper), 0); - if (bitmap != null) { - themedWallpaper = wallpaper = new BitmapDrawable(bitmap); - isCustomTheme = true; + isPatternWallpaper = true; + } catch (Throwable e) { + FileLog.e(e); + } + } else if (backgroundColor != null) { + Integer rotation = currentColors.get(key_chat_wallpaper_gradient_rotation); + if (rotation == null) { + rotation = 45; + } + if (gradientToColor1 != null && gradientToColor2 != null) { + MotionBackgroundDrawable motionBackgroundDrawable = new MotionBackgroundDrawable(backgroundColor, gradientToColor1, gradientToColor2, gradientToColor3, false); + motionBackgroundDrawable.setPhase(previousPhase); + wallpaper = motionBackgroundDrawable; + } else if (gradientToColor1 == null || gradientToColor1.equals(backgroundColor)) { + wallpaper = new ColorDrawable(backgroundColor); + } else { + final int[] colors = {backgroundColor, gradientToColor1}; + final BackgroundGradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(rotation); + final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, colors); + final BackgroundGradientDrawable.Listener listener = new BackgroundGradientDrawable.ListenerAdapter() { + @Override + public void onSizeReady(int width, int height) { + final boolean isOrientationPortrait = AndroidUtilities.displaySize.x <= AndroidUtilities.displaySize.y; + final boolean isGradientPortrait = width <= height; + if (isOrientationPortrait == isGradientPortrait) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); + } } - } catch (Exception e) { - FileLog.e(e); + }; + backgroundGradientDisposable = backgroundGradientDrawable.startDithering(BackgroundGradientDrawable.Sizes.ofDeviceScreen(), listener, 100); + wallpaper = backgroundGradientDrawable; + } + isCustomTheme = true; + } else if (themedWallpaperLink != null) { + try { + File pathToWallpaper = new File(ApplicationLoader.getFilesDirFixed(), Utilities.MD5(themedWallpaperLink) + ".wp"); + Bitmap bitmap = loadScreenSizedBitmap(new FileInputStream(pathToWallpaper), 0); + if (bitmap != null) { + themedWallpaper = wallpaper = new BitmapDrawable(bitmap); + isCustomTheme = true; } - } else if (themedWallpaperFileOffset > 0 && (currentTheme.pathToFile != null || currentTheme.assetName != null)) { - try { - File file; - if (currentTheme.assetName != null) { - file = getAssetFile(currentTheme.assetName); - } else { - file = new File(currentTheme.pathToFile); - } - Bitmap bitmap = loadScreenSizedBitmap(new FileInputStream(file), themedWallpaperFileOffset); - if (bitmap != null) { - themedWallpaper = wallpaper = new BitmapDrawable(bitmap); - isCustomTheme = true; - } - } catch (Throwable e) { - FileLog.e(e); + } catch (Exception e) { + FileLog.e(e); + } + } else if (themedWallpaperFileOffset > 0 && (currentTheme.pathToFile != null || currentTheme.assetName != null)) { + try { + File file; + if (currentTheme.assetName != null) { + file = getAssetFile(currentTheme.assetName); + } else { + file = new File(currentTheme.pathToFile); } + Bitmap bitmap = loadScreenSizedBitmap(new FileInputStream(file), themedWallpaperFileOffset); + if (bitmap != null) { + themedWallpaper = wallpaper = new BitmapDrawable(bitmap); + isCustomTheme = true; + } + } catch (Throwable e) { + FileLog.e(e); } } - if (wallpaper == null) { - int selectedColor = overrideWallpaper != null ? overrideWallpaper.color : 0; - try { - if (overrideWallpaper == null || overrideWallpaper.isDefault()) { - wallpaper = ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.background_hd); - isCustomTheme = false; - } else if (!overrideWallpaper.isColor() || overrideWallpaper.gradientColor != 0) { - if (selectedColor != 0 && !isPatternWallpaper) { - if (overrideWallpaper.gradientColor != 0) { - final int[] colors = {selectedColor, overrideWallpaper.gradientColor}; - final BackgroundGradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(overrideWallpaper.rotation); - final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, colors); - final BackgroundGradientDrawable.Listener listener = new BackgroundGradientDrawable.ListenerAdapter() { - @Override - public void onSizeReady(int width, int height) { - final boolean isOrientationPortrait = AndroidUtilities.displaySize.x <= AndroidUtilities.displaySize.y; - final boolean isGradientPortrait = width <= height; - if (isOrientationPortrait == isGradientPortrait) { - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); - } - } - }; - backgroundGradientDisposable = backgroundGradientDrawable.startDithering(BackgroundGradientDrawable.Sizes.ofDeviceScreen(), listener, 100); - wallpaper = backgroundGradientDrawable; - } else { - wallpaper = new ColorDrawable(selectedColor); - } - } else { - File toFile = new File(ApplicationLoader.getFilesDirFixed(), overrideWallpaper.fileName); - if (toFile.exists()) { - Bitmap bitmap = loadScreenSizedBitmap(new FileInputStream(toFile), 0); - if (bitmap != null) { - wallpaper = new BitmapDrawable(bitmap); + } + if (wallpaper == null) { + int selectedColor = overrideWallpaper != null ? overrideWallpaper.color : 0; + try { + if (overrideWallpaper == null || overrideWallpaper.isDefault()) { + wallpaper = createDefaultWallpaper(); + isCustomTheme = false; + } else if (!overrideWallpaper.isColor() || overrideWallpaper.gradientColor1 != 0) { + if (selectedColor != 0 && (!isPatternWallpaper || overrideWallpaper.gradientColor2 != 0)) { + if (overrideWallpaper.gradientColor1 != 0 && overrideWallpaper.gradientColor2 != 0) { + MotionBackgroundDrawable motionBackgroundDrawable = new MotionBackgroundDrawable(overrideWallpaper.color, overrideWallpaper.gradientColor1, overrideWallpaper.gradientColor2, overrideWallpaper.gradientColor3, false); + motionBackgroundDrawable.setPhase(previousPhase); + if (isPatternWallpaper) { + File toFile = new File(ApplicationLoader.getFilesDirFixed(), overrideWallpaper.fileName); + if (toFile.exists()) { + motionBackgroundDrawable.setPatternBitmap((int) (overrideWallpaper.intensity * 100), loadScreenSizedBitmap(new FileInputStream(toFile), 0)); isCustomTheme = true; } } - if (wallpaper == null) { - wallpaper = ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.background_hd); - isCustomTheme = false; + wallpaper = motionBackgroundDrawable; + } else if (overrideWallpaper.gradientColor1 != 0) { + final int[] colors = {selectedColor, overrideWallpaper.gradientColor1}; + final BackgroundGradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(overrideWallpaper.rotation); + final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, colors); + final BackgroundGradientDrawable.Listener listener = new BackgroundGradientDrawable.ListenerAdapter() { + @Override + public void onSizeReady(int width, int height) { + final boolean isOrientationPortrait = AndroidUtilities.displaySize.x <= AndroidUtilities.displaySize.y; + final boolean isGradientPortrait = width <= height; + if (isOrientationPortrait == isGradientPortrait) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); + } + } + }; + backgroundGradientDisposable = backgroundGradientDrawable.startDithering(BackgroundGradientDrawable.Sizes.ofDeviceScreen(), listener, 100); + wallpaper = backgroundGradientDrawable; + } else { + wallpaper = new ColorDrawable(selectedColor); + } + } else { + File toFile = new File(ApplicationLoader.getFilesDirFixed(), overrideWallpaper.fileName); + if (toFile.exists()) { + Bitmap bitmap = loadScreenSizedBitmap(new FileInputStream(toFile), 0); + if (bitmap != null) { + wallpaper = new BitmapDrawable(bitmap); + isCustomTheme = true; } } + if (wallpaper == null) { + wallpaper = createDefaultWallpaper(); + isCustomTheme = false; + } } - } catch (Throwable throwable) { - //ignore - } - if (wallpaper == null) { - if (selectedColor == 0) { - selectedColor = -2693905; - } - wallpaper = new ColorDrawable(selectedColor); } + } catch (Throwable throwable) { + //ignore + } + if (wallpaper == null) { + if (selectedColor == 0) { + selectedColor = -2693905; + } + wallpaper = new ColorDrawable(selectedColor); } - calcBackgroundColor(wallpaper, 1); - AndroidUtilities.runOnUIThread(() -> { - applyChatServiceMessageColor(); - NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); - }); } + calcBackgroundColor(wallpaper, 1); + Drawable drawable = wallpaper; + AndroidUtilities.runOnUIThread(() -> { + wallpaperLoadTask = null; + applyChatServiceMessageColor(null, null, drawable); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); + }); }); } + public static Drawable createDefaultWallpaper() { + return createDefaultWallpaper(0, 0); + } + + public static Drawable createDefaultWallpaper(int w, int h) { + MotionBackgroundDrawable motionBackgroundDrawable = new MotionBackgroundDrawable(0xffdbddbb, 0xff6ba587, 0xffd5d88d, 0xff88b884, w != 0); + if (w <= 0 || h <= 0) { + w = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); + h = Math.max(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); + } + int patternColor; + if (Build.VERSION.SDK_INT >= 29) { + patternColor = 0x57000000; + } else { + patternColor = motionBackgroundDrawable.getPatternColor(); + } + motionBackgroundDrawable.setPatternBitmap(34, SvgHelper.getBitmap(R.raw.default_pattern, w, h, patternColor)); + return motionBackgroundDrawable; + } + private static Bitmap loadScreenSizedBitmap(FileInputStream stream, int offset) { try { BitmapFactory.Options opts = new BitmapFactory.Options(); @@ -8256,14 +8719,17 @@ public class Theme { public static Drawable getThemedWallpaper(boolean thumb, View ownerView) { Integer backgroundColor = currentColors.get(key_chat_wallpaper); File file = null; + MotionBackgroundDrawable motionBackgroundDrawable = null; int offset = 0; if (backgroundColor != null) { - Integer gradientToColor = currentColors.get(key_chat_wallpaper_gradient_to); + Integer gradientToColor1 = currentColors.get(key_chat_wallpaper_gradient_to1); + Integer gradientToColor2 = currentColors.get(key_chat_wallpaper_gradient_to2); + Integer gradientToColor3 = currentColors.get(key_chat_wallpaper_gradient_to3); Integer rotation = currentColors.get(key_chat_wallpaper_gradient_rotation); if (rotation == null) { rotation = 45; } - if (gradientToColor == null) { + if (gradientToColor1 == null) { return new ColorDrawable(backgroundColor); } else { ThemeAccent accent = currentTheme.getAccent(false); @@ -8273,8 +8739,13 @@ public class Theme { file = wallpaperFile; } } - if (file == null) { - final int[] colors = {backgroundColor, gradientToColor}; + if (gradientToColor2 != null) { + motionBackgroundDrawable = new MotionBackgroundDrawable(backgroundColor, gradientToColor1, gradientToColor2, gradientToColor3 != null ? gradientToColor3 : 0, true); + if (file == null) { + return motionBackgroundDrawable; + } + } else if (file == null) { + final int[] colors = {backgroundColor, gradientToColor1}; final GradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(rotation); final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, colors); final BackgroundGradientDrawable.Sizes sizes; @@ -8336,6 +8807,17 @@ public class Theme { opts.inJustDecodeBounds = false; opts.inSampleSize = scaleFactor; Bitmap bitmap = BitmapFactory.decodeStream(stream, null, opts); + if (motionBackgroundDrawable != null) { + int intensity; + ThemeAccent accent = currentTheme.getAccent(false); + if (accent != null) { + intensity = (int) (accent.patternIntensity * 100); + } else { + intensity = 100; + } + motionBackgroundDrawable.setPatternBitmap(intensity, bitmap); + return motionBackgroundDrawable; + } if (bitmap != null) { return new BitmapDrawable(bitmap); } @@ -8365,13 +8847,27 @@ public class Theme { } public static Drawable getCachedWallpaper() { - synchronized (wallpaperSync) { + Drawable drawable; + if (themedWallpaper != null) { + drawable = themedWallpaper; + } else { + drawable = wallpaper; + } + if (drawable == null && wallpaperLoadTask != null) { + CountDownLatch countDownLatch = new CountDownLatch(1); + Utilities.searchQueue.postRunnable(countDownLatch::countDown); + try { + countDownLatch.await(); + } catch (Exception e) { + FileLog.e(e); + } if (themedWallpaper != null) { - return themedWallpaper; + drawable = themedWallpaper; } else { - return wallpaper; + drawable = wallpaper; } } + return drawable; } public static Drawable getCachedWallpaperNonBlocking() { @@ -8387,7 +8883,8 @@ public class Theme { } public static boolean isPatternWallpaper() { - return isPatternWallpaper; + String selectedBgSlug = getSelectedBackgroundSlug(); + return isPatternWallpaper || "CJz3BZ6YGEYBAAAABboWp6SAv04".equals(selectedBgSlug) || "qeZWES8rGVIEAAAARfWlK1lnfiI".equals(selectedBgSlug); } public static AudioVisualizerDrawable getCurrentAudiVisualizerDrawable() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java index a13bbb3a6..a61dcad1c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/ThemeDescription.java @@ -389,10 +389,7 @@ public class ThemeDescription { } } if (viewToInvalidate != null && (changeFlags & FLAG_SERVICEBACKGROUND) != 0) { - Drawable background = viewToInvalidate.getBackground(); - if (background != null) { - background.setColorFilter(Theme.colorFilter); - } + } if ((changeFlags & FLAG_IMAGECOLOR) != 0) { if ((changeFlags & FLAG_CHECKTAG) == 0 || checkTag(currentKey, viewToInvalidate)) { @@ -543,10 +540,7 @@ public class ThemeDescription { } } } else if ((changeFlags & FLAG_SERVICEBACKGROUND) != 0) { - Drawable background = child.getBackground(); - if (background != null) { - background.setColorFilter(Theme.colorFilter); - } + } else if ((changeFlags & FLAG_SELECTOR) != 0) { child.setBackgroundDrawable(Theme.getSelectorDrawable(false)); } else if ((changeFlags & FLAG_SELECTORWHITE) != 0) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java index 3b9d14b00..0198a518c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java @@ -13,6 +13,7 @@ import android.annotation.TargetApi; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; +import android.graphics.Canvas; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.drawable.Drawable; @@ -53,6 +54,7 @@ import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.ShareLocationDrawable; import org.telegram.ui.Components.URLSpanNoUnderline; +import org.telegram.ui.Components.voip.CellFlickerDrawable; import java.util.ArrayList; @@ -73,6 +75,7 @@ public class ActionIntroActivity extends BaseFragment implements LocationControl private int[] colors; private int currentType; + private boolean flickerButton; private String currentGroupCreateAddress; private String currentGroupCreateDisplayAddress; @@ -138,7 +141,7 @@ public class ActionIntroActivity extends BaseFragment implements LocationControl imageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec((int) (height * 0.399f), MeasureSpec.EXACTLY)); titleTextView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.UNSPECIFIED)); descriptionText.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.UNSPECIFIED)); - buttonTextView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(42), MeasureSpec.EXACTLY)); + buttonTextView.measure(MeasureSpec.makeMeasureSpec(width - AndroidUtilities.dp(86), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(42), MeasureSpec.EXACTLY)); } break; } @@ -486,7 +489,26 @@ public class ActionIntroActivity extends BaseFragment implements LocationControl } viewGroup.addView(descriptionText2); - buttonTextView = new TextView(context); + buttonTextView = new TextView(context) { + CellFlickerDrawable cellFlickerDrawable; + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + if (flickerButton) { + if (cellFlickerDrawable == null) { + cellFlickerDrawable = new CellFlickerDrawable(); + cellFlickerDrawable.drawFrame = false; + cellFlickerDrawable.repeatProgress = 2f; + } + cellFlickerDrawable.setParentWidth(getMeasuredWidth()); + AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + cellFlickerDrawable.draw(canvas, AndroidUtilities.rectTmp, AndroidUtilities.dp(4)); + invalidate(); + } + } + }; + buttonTextView.setPadding(AndroidUtilities.dp(34), 0, AndroidUtilities.dp(34), 0); buttonTextView.setGravity(Gravity.CENTER); buttonTextView.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); @@ -562,6 +584,7 @@ public class ActionIntroActivity extends BaseFragment implements LocationControl descriptionText.setText(LocaleController.getString("ChannelAlertText", R.string.ChannelAlertText)); buttonTextView.setText(LocaleController.getString("ChannelAlertCreate2", R.string.ChannelAlertCreate2)); imageView.playAnimation(); + flickerButton = true; break; } case ACTION_TYPE_NEARBY_LOCATION_ACCESS: { @@ -627,6 +650,11 @@ public class ActionIntroActivity extends BaseFragment implements LocationControl } } + if (flickerButton) { + buttonTextView.setPadding(AndroidUtilities.dp(34), AndroidUtilities.dp(8), AndroidUtilities.dp(34), AndroidUtilities.dp(8)); + buttonTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + } + return fragmentView; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java index 46ecf6092..b4122eeda 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/ContactsAdapter.java @@ -198,7 +198,7 @@ public class ContactsAdapter extends RecyclerListView.SectionsAdapter { } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { HashMap> usersSectionsDict = onlyUsers == 2 ? ContactsController.getInstance(currentAccount).usersMutualSectionsDict : ContactsController.getInstance(currentAccount).usersSectionsDict; ArrayList sortedUsersSectionsArray = onlyUsers == 2 ? ContactsController.getInstance(currentAccount).sortedUsersMutualSectionsArray : ContactsController.getInstance(currentAccount).sortedUsersSectionsArray; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java index 28b4661cb..5d1466a4c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java @@ -32,7 +32,6 @@ import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; -import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; @@ -171,7 +170,7 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { if (hasHints) { count += 2 + messagesController.hintDialogs.size(); } else if (dialogsType == 0 && messagesController.dialogs_dict.size() <= 10 && folderId == 0 && messagesController.isDialogsEndReached(folderId)) { - if (ContactsController.getInstance(currentAccount).contacts.isEmpty() && ContactsController.getInstance(currentAccount).isLoadingContacts()) { + if (ContactsController.getInstance(currentAccount).contacts.isEmpty() && !ContactsController.getInstance(currentAccount).doneLoadingContacts) { onlineContacts = null; return (currentCount = 0); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java index 04830f06f..ce8ef9051 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsSearchAdapter.java @@ -29,6 +29,7 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; +import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; @@ -89,8 +90,8 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { private ArrayList recentSearchObjects = new ArrayList<>(); private LongSparseArray recentSearchObjectsById = new LongSparseArray<>(); - private ArrayList localTipUsers = new ArrayList<>(); private ArrayList localTipDates = new ArrayList<>(); + private boolean localTipArchive; private FilteredSearchView.Delegate filtersDelegate; private int folderId; private int currentItemCount; @@ -117,6 +118,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { void needRemoveHint(int did); void needClearList(); void runResultsEnterAnimation(); + boolean isSelected(long dialogId); } private class CategoryAdapterRecycler extends RecyclerListView.SelectionAdapter { @@ -264,6 +266,8 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { req.limit = 20; req.q = query; req.filter = new TLRPC.TL_inputMessagesFilterEmpty(); + req.flags |= 1; + req.folder_id = folderId; if (query.equals(lastMessagesSearchString) && !searchResultMessages.isEmpty()) { MessageObject lastMessage = searchResultMessages.get(searchResultMessages.size() - 1); req.offset_id = lastMessage.getId(); @@ -346,11 +350,11 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { searchAdapterHelper.clear(); } } - notifyDataSetChanged(); if (delegate != null) { delegate.searchStateChanged(waitingResponseCount > 0, true); delegate.runResultsEnterAnimation(); } + notifyDataSetChanged(); } } reqId = 0; @@ -562,9 +566,13 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { MessagesStorage.getInstance(currentAccount).localSearch(dialogsType, q, resultArray, resultArrayNames, encUsers, -1); updateSearchResults(resultArray, resultArrayNames, encUsers, searchId); FiltersView.fillTipDates(q, localTipDates); + localTipArchive = false; + if (q.length() >= 3 && (LocaleController.getString("ArchiveSearchFilter", R.string.ArchiveSearchFilter).toLowerCase().startsWith(q) || "archive".startsWith(query))) { + localTipArchive = true; + } AndroidUtilities.runOnUIThread(() -> { if (filtersDelegate != null) { - filtersDelegate.updateFiltersView(false, null, localTipDates); + filtersDelegate.updateFiltersView(false, null, localTipDates, localTipArchive); } }); }); @@ -587,16 +595,41 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { searchWas = true; for (int a = 0; a < result.size(); a++) { Object obj = result.get(a); + int dialogId = 0; if (obj instanceof TLRPC.User) { TLRPC.User user = (TLRPC.User) obj; MessagesController.getInstance(currentAccount).putUser(user, true); + dialogId = user.id; } else if (obj instanceof TLRPC.Chat) { TLRPC.Chat chat = (TLRPC.Chat) obj; MessagesController.getInstance(currentAccount).putChat(chat, true); + dialogId = -chat.id; } else if (obj instanceof TLRPC.EncryptedChat) { TLRPC.EncryptedChat chat = (TLRPC.EncryptedChat) obj; MessagesController.getInstance(currentAccount).putEncryptedChat(chat, true); } + + if (dialogId != 0) { + TLRPC.Dialog dialog = MessagesController.getInstance(currentAccount).dialogs_dict.get(dialogId); + if (dialog == null) { + int finalDialogId = dialogId; + MessagesStorage.getInstance(currentAccount).getDialogFolderId(dialogId, param -> { + if (param != -1) { + TLRPC.Dialog newDialog = new TLRPC.TL_dialog(); + newDialog.id = finalDialogId; + if (param != 0) { + newDialog.folder_id = param; + } + if (obj instanceof TLRPC.Chat) { + newDialog.flags = ChatObject.isChannel((TLRPC.Chat) obj) ? 1 : 0; + } + MessagesController.getInstance(currentAccount).dialogs_dict.put(finalDialogId, newDialog); + MessagesController.getInstance(currentAccount).getAllDialogs().add(newDialog); + MessagesController.getInstance(currentAccount).sortDialogs(null); + } + }); + } + } } MessagesController.getInstance(currentAccount).putUsers(encUsers, true); searchResult = result; @@ -622,11 +655,12 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { int waitingResponseCount; - public void searchDialogs(String text) { - if (text != null && text.equals(lastSearchText)) { + public void searchDialogs(String text, int folderId) { + if (text != null && text.equals(lastSearchText) && (folderId == this.folderId || TextUtils.isEmpty(text))) { return; } lastSearchText = text; + this.folderId = folderId; if (searchRunnable != null) { Utilities.searchQueue.cancelRunnable(searchRunnable); searchRunnable = null; @@ -657,8 +691,9 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { searchMessagesInternal(null, 0); notifyDataSetChanged(); localTipDates.clear(); + localTipArchive = false; if (filtersDelegate != null) { - filtersDelegate.updateFiltersView(false, null, localTipDates); + filtersDelegate.updateFiltersView(false, null, localTipDates, localTipArchive); } } else { if (needMessagesSearch != 2 && (query.startsWith("#") && query.length() == 1)) { @@ -931,6 +966,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { switch (holder.getItemViewType()) { case 0: { ProfileSearchCell cell = (ProfileSearchCell) holder.itemView; + long oldDialogId = cell.getDialogId(); TLRPC.User user = null; TLRPC.Chat chat = null; @@ -1017,6 +1053,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { } } } + cell.setChecked(false, false); } boolean savedMessages = false; if (user != null && user.id == selfUserId) { @@ -1040,6 +1077,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { } } cell.setData(user != null ? user : chat, encryptedChat, name, username, isRecent, savedMessages); + cell.setChecked(delegate.isSelected(cell.getDialogId()), oldDialogId == cell.getDialogId()); break; } case 1: { @@ -1178,7 +1216,7 @@ public class DialogsSearchAdapter extends RecyclerListView.SelectionAdapter { public void setFiltersDelegate(FilteredSearchView.Delegate filtersDelegate, boolean update) { this.filtersDelegate = filtersDelegate; if (filtersDelegate != null && update) { - filtersDelegate.updateFiltersView(false, null, localTipDates); + filtersDelegate.updateFiltersView(false, null, localTipDates, localTipArchive); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java index ee9f49c24..f526912ca 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/FiltersView.java @@ -27,7 +27,6 @@ import androidx.recyclerview.widget.RecyclerView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ContactsController; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; @@ -216,7 +215,7 @@ public class FiltersView extends RecyclerListView { return usersFilters.get(i); } - public void setUsersAndDates(ArrayList localUsers, ArrayList dates) { + public void setUsersAndDates(ArrayList localUsers, ArrayList dates, boolean archive) { oldItems.clear(); oldItems.addAll(usersFilters); usersFilters.clear(); @@ -254,6 +253,10 @@ public class FiltersView extends RecyclerListView { usersFilters.add(data); } } + if (archive) { + FiltersView.MediaFilterData filterData = new FiltersView.MediaFilterData(R.drawable.chats_archive, R.drawable.chats_archive, LocaleController.getString("ArchiveSearchFilter", R.string.ArchiveSearchFilter), null, FiltersView.FILTER_TYPE_ARCHIVE); + usersFilters.add(filterData); + } if (getAdapter() != null) { UpdateCallback updateCallback = new UpdateCallback(getAdapter()); DiffUtil.calculateDiff(diffUtilsCallback).dispatchUpdatesTo(updateCallback); @@ -672,9 +675,10 @@ public class FiltersView extends RecyclerListView { if (oldItem.chat instanceof TLRPC.Chat && newItem.chat instanceof TLRPC.Chat) { return ((TLRPC.Chat) oldItem.chat).id == ((TLRPC.Chat) newItem.chat).id; } - } - if (oldItem.filterType == FILTER_TYPE_DATE) { + } else if (oldItem.filterType == FILTER_TYPE_DATE) { return oldItem.title.equals(newItem.title); + } else if (oldItem.filterType == FILTER_TYPE_ARCHIVE) { + return true; } } return false; @@ -690,7 +694,8 @@ public class FiltersView extends RecyclerListView { BackupImageView avatarImageView; TextView titleView; - Drawable thumbDrawable; + CombinedDrawable thumbDrawable; + MediaFilterData data; public FilterView(Context context) { super(context); @@ -707,13 +712,28 @@ public class FiltersView extends RecyclerListView { setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(28), Theme.getColor(Theme.key_groupcreate_spanBackground))); titleView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); if (thumbDrawable != null) { - Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_backgroundBlue), false); - Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_actionBarIconBlue), true); + if (data.filterType == FILTER_TYPE_ARCHIVE) { + Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_backgroundArchived), false); + Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_actionBarIconBlue), true); + } else { + Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_backgroundBlue), false); + Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_actionBarIconBlue), true); + } } } public void setData(MediaFilterData data) { + this.data = data; avatarImageView.getImageReceiver().clearImage(); + if (data.filterType == FILTER_TYPE_ARCHIVE) { + thumbDrawable = Theme.createCircleDrawableWithIcon(AndroidUtilities.dp(32), R.drawable.chats_archive); + thumbDrawable.setIconSize(AndroidUtilities.dp(16), AndroidUtilities.dp(16)); + Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_backgroundArchived), false); + Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_actionBarIconBlue), true); + avatarImageView.setImageDrawable(thumbDrawable); + titleView.setText(data.title); + return; + } thumbDrawable = Theme.createCircleDrawableWithIcon(AndroidUtilities.dp(32), data.iconResFilled); Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_backgroundBlue), false); Theme.setCombinedDrawableColor(thumbDrawable, Theme.getColor(Theme.key_avatar_actionBarIconBlue), true); @@ -775,7 +795,6 @@ public class FiltersView extends RecyclerListView { this.chat = chat; } - public boolean isSameType(MediaFilterData filterData) { if (filterType == filterData.filterType) { return true; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java index 9a6806bd2..972116f28 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersAdapter.java @@ -74,19 +74,19 @@ public class StickersAdapter extends RecyclerListView.SelectionAdapter implement MediaDataController.getInstance(currentAccount).checkStickers(MediaDataController.TYPE_IMAGE); MediaDataController.getInstance(currentAccount).checkStickers(MediaDataController.TYPE_MASK); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.newEmojiSuggestionsAvailable); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadFailed); } public void onDestroy() { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.newEmojiSuggestionsAvailable); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadFailed); } @Override public void didReceivedNotification(int id, int account, final Object... args) { - if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.fileDidFailToLoad) { + if (id == NotificationCenter.fileLoaded || id == NotificationCenter.fileLoadFailed) { if (stickers != null && !stickers.isEmpty() && !stickersToLoad.isEmpty() && visible) { String fileName = (String) args[0]; stickersToLoad.remove(fileName); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java index b32361f22..120c5d91a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/StickersSearchAdapter.java @@ -403,7 +403,7 @@ public class StickersSearchAdapter extends RecyclerListView.SelectionAdapter { case 0: { TLRPC.Document sticker = (TLRPC.Document) cache.get(position); StickerEmojiCell cell = (StickerEmojiCell) holder.itemView; - cell.setSticker(sticker, cacheParent.get(position), positionToEmoji.get(position), false); + cell.setSticker(sticker, null, cacheParent.get(position), positionToEmoji.get(position), false); //cell.setRecent(recentStickers.contains(sticker) || favouriteStickers.contains(sticker)); break; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/AvatarPreviewer.java b/TMessagesProj/src/main/java/org/telegram/ui/AvatarPreviewer.java index 3d9eceb18..552bdf3ee 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/AvatarPreviewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/AvatarPreviewer.java @@ -383,15 +383,15 @@ public class AvatarPreviewer { @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); - NotificationCenter.getInstance(UserConfig.selectedAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(UserConfig.selectedAccount).addObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(UserConfig.selectedAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(UserConfig.selectedAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); - NotificationCenter.getInstance(UserConfig.selectedAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(UserConfig.selectedAccount).removeObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(UserConfig.selectedAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(UserConfig.selectedAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); } @Override @@ -399,12 +399,12 @@ public class AvatarPreviewer { if (!showProgress || TextUtils.isEmpty(videoFileName)) { return; } - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { final String fileName = (String) args[0]; if (TextUtils.equals(fileName, videoFileName)) { radialProgress.setProgress(1f, true); } - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { String fileName = (String) args[0]; if (TextUtils.equals(fileName, videoFileName)) { if (radialProgress != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java index 6c9578065..f2ee4b664 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CallLogActivity.java @@ -17,8 +17,10 @@ import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.text.SpannableString; +import android.text.TextUtils; import android.text.style.ImageSpan; import android.util.SparseArray; +import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; @@ -30,12 +32,11 @@ import android.widget.ImageView; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; -import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; -import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBar; @@ -47,22 +48,26 @@ import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Cells.CheckBoxCell; +import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.LoadingCell; import org.telegram.ui.Cells.LocationCell; import org.telegram.ui.Cells.ProfileSearchCell; +import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Components.CheckBox2; import org.telegram.ui.Components.CombinedDrawable; -import org.telegram.ui.Components.EmptyTextProgressView; import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.NumberTextView; +import org.telegram.ui.Components.ProgressButton; +import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.voip.VoIPHelper; import java.util.ArrayList; import java.util.Iterator; +import androidx.annotation.Nullable; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -85,6 +90,10 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. private boolean firstLoaded; private boolean endReached; + private ProgressButton waitingForLoadButton; + + private ArrayList activeGroupCalls; + private ArrayList selectedIds = new ArrayList<>(); private int prevPosition; @@ -98,6 +107,9 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. private Drawable redDrawable; private ImageSpan iconOut, iconIn, iconMissed; private TLRPC.User lastCallUser; + private TLRPC.Chat lastCallChat; + + private Integer waitingForCallChatId; private boolean openTransitionStarted; @@ -108,10 +120,90 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. private static final int delete_all_calls = 1; private static final int delete = 2; + private static class EmptyTextProgressView extends FrameLayout { + + private TextView emptyTextView1; + private TextView emptyTextView2; + private View progressView; + private RLottieImageView imageView; + + public EmptyTextProgressView(Context context) { + this(context, null); + } + + public EmptyTextProgressView(Context context, View progressView) { + super(context); + + addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + this.progressView = progressView; + + imageView = new RLottieImageView(context); + imageView.setAnimation(R.raw.utyan_call, 120, 120); + imageView.setAutoRepeat(false); + addView(imageView, LayoutHelper.createFrame(140, 140, Gravity.CENTER, 52, 4, 52, 60)); + imageView.setOnClickListener(v -> { + if (!imageView.isPlaying()) { + imageView.setProgress(0.0f); + imageView.playAnimation(); + } + }); + + emptyTextView1 = new TextView(context); + emptyTextView1.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + emptyTextView1.setText(LocaleController.getString("NoRecentCalls", R.string.NoRecentCalls)); + emptyTextView1.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); + emptyTextView1.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + emptyTextView1.setGravity(Gravity.CENTER); + addView(emptyTextView1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 17, 40, 17, 0)); + + emptyTextView2 = new TextView(context); + String help = LocaleController.getString("NoRecentCallsInfo", R.string.NoRecentCallsInfo); + if (AndroidUtilities.isTablet() && !AndroidUtilities.isSmallTablet()) { + help = help.replace('\n', ' '); + } + emptyTextView2.setText(help); + emptyTextView2.setTextColor(Theme.getColor(Theme.key_emptyListPlaceholder)); + emptyTextView2.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + emptyTextView2.setGravity(Gravity.CENTER); + emptyTextView2.setLineSpacing(AndroidUtilities.dp(2), 1); + addView(emptyTextView2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 17, 80, 17, 0)); + + progressView.setAlpha(0f); + imageView.setAlpha(0f); + emptyTextView1.setAlpha(0f); + emptyTextView2.setAlpha(0f); + + setOnTouchListener((v, event) -> true); + } + + public void showProgress() { + imageView.animate().alpha(0f).setDuration(150).start(); + emptyTextView1.animate().alpha(0f).setDuration(150).start(); + emptyTextView2.animate().alpha(0f).setDuration(150).start(); + progressView.animate().alpha(1f).setDuration(150).start(); + } + + public void showTextView() { + imageView.animate().alpha(1f).setDuration(150).start(); + emptyTextView1.animate().alpha(1f).setDuration(150).start(); + emptyTextView2.animate().alpha(1f).setDuration(150).start(); + progressView.animate().alpha(0f).setDuration(150).start(); + imageView.playAnimation(); + } + + @Override + public boolean hasOverlappingRendering() { + return false; + } + } + @Override @SuppressWarnings("unchecked") public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.didReceiveNewMessages && firstLoaded) { + if (id == NotificationCenter.didReceiveNewMessages) { + if (!firstLoaded) { + return; + } boolean scheduled = (Boolean) args[2]; if (scheduled) { return; @@ -120,8 +212,8 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. for (MessageObject msg : arr) { if (msg.messageOwner.action instanceof TLRPC.TL_messageActionPhoneCall) { int fromId = msg.getFromChatId(); - int userID = fromId == UserConfig.getInstance(currentAccount).getClientUserId() ? msg.messageOwner.peer_id.user_id : fromId; - int callType = fromId == UserConfig.getInstance(currentAccount).getClientUserId() ? TYPE_OUT : TYPE_IN; + int userID = fromId == getUserConfig().getClientUserId() ? msg.messageOwner.peer_id.user_id : fromId; + int callType = fromId == getUserConfig().getClientUserId() ? TYPE_OUT : TYPE_IN; TLRPC.PhoneCallDiscardReason reason = msg.messageOwner.action.reason; if (callType == TYPE_IN && (reason instanceof TLRPC.TL_phoneCallDiscardReasonMissed || reason instanceof TLRPC.TL_phoneCallDiscardReasonBusy)) { callType = TYPE_MISSED; @@ -137,7 +229,7 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. CallLogRow row = new CallLogRow(); row.calls = new ArrayList<>(); row.calls.add(msg.messageOwner); - row.user = MessagesController.getInstance(currentAccount).getUser(userID); + row.user = getMessagesController().getUser(userID); row.type = callType; row.video = msg.isVideoCall(); calls.add(0, row); @@ -147,7 +239,10 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. if (otherItem != null) { otherItem.setVisibility(calls.isEmpty() ? View.GONE : View.VISIBLE); } - } else if (id == NotificationCenter.messagesDeleted && firstLoaded) { + } else if (id == NotificationCenter.messagesDeleted) { + if (!firstLoaded) { + return; + } boolean scheduled = (Boolean) args[2]; if (scheduled) { return; @@ -171,16 +266,48 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. if (didChange && listViewAdapter != null) { listViewAdapter.notifyDataSetChanged(); } + } else if (id == NotificationCenter.activeGroupCallsUpdated) { + activeGroupCalls = getMessagesController().getActiveGroupCalls(); + if (listViewAdapter != null) { + listViewAdapter.notifyDataSetChanged(); + } + } else if (id == NotificationCenter.chatInfoDidLoad) { + if (waitingForCallChatId == null) { + return; + } + TLRPC.ChatFull chatFull = (TLRPC.ChatFull) args[0]; + if (chatFull.id == waitingForCallChatId) { + ChatObject.Call groupCall = getMessagesController().getGroupCall(waitingForCallChatId, true); + if (groupCall != null) { + if (waitingForLoadButton != null) { + waitingForLoadButton.setDrawProgress(false, false); + } + VoIPHelper.startCall(lastCallChat, null, null, false, getParentActivity(), CallLogActivity.this, getAccountInstance()); + waitingForCallChatId = null; + } + } + } else if (id == NotificationCenter.groupCallUpdated) { + if (waitingForCallChatId == null) { + return; + } + Integer chatId = (Integer) args[0]; + if (waitingForCallChatId.equals(chatId)) { + if (waitingForLoadButton != null) { + waitingForLoadButton.setDrawProgress(false, false); + } + VoIPHelper.startCall(lastCallChat, null, null, false, getParentActivity(), CallLogActivity.this, getAccountInstance()); + waitingForCallChatId = null; + } } } - private class CustomCell extends FrameLayout { + private class CallCell extends FrameLayout { private ImageView imageView; private ProfileSearchCell profileSearchCell; private CheckBox2 checkBox; - public CustomCell(Context context) { + public CallCell(Context context) { super(context); setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); @@ -195,7 +322,11 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_featuredStickers_addButton), PorterDuff.Mode.MULTIPLY)); imageView.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector), 1)); imageView.setScaleType(ImageView.ScaleType.CENTER); - imageView.setOnClickListener(callBtnClickListener); + imageView.setOnClickListener(v -> { + CallLogRow row = (CallLogRow) v.getTag(); + TLRPC.UserFull userFull = getMessagesController().getUserFull(row.user.id); + VoIPHelper.startCall(lastCallUser = row.user, row.video, row.video || userFull != null && userFull.video_calls_available, getParentActivity(), null, getAccountInstance()); + }); imageView.setContentDescription(LocaleController.getString("Call", R.string.Call)); addView(imageView, LayoutHelper.createFrame(48, 48, (LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT) | Gravity.CENTER_VERTICAL, 8, 0, 8, 0)); @@ -214,22 +345,60 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. } } - private View.OnClickListener callBtnClickListener = new View.OnClickListener() { - @Override - public void onClick(View v) { - CallLogRow row = (CallLogRow) v.getTag(); - TLRPC.UserFull userFull = getMessagesController().getUserFull(row.user.id); - VoIPHelper.startCall(lastCallUser = row.user, row.video, row.video || userFull != null && userFull.video_calls_available, getParentActivity(), null, getAccountInstance()); + private class GroupCallCell extends FrameLayout { + + private ProfileSearchCell profileSearchCell; + private ProgressButton button; + + public GroupCallCell(Context context) { + super(context); + + setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + + String text = LocaleController.getString("VoipChatJoin", R.string.VoipChatJoin); + button = new ProgressButton(context); + int width = (int) Math.ceil(button.getPaint().measureText(text)); + + profileSearchCell = new ProfileSearchCell(context); + profileSearchCell.setPadding(LocaleController.isRTL ? (AndroidUtilities.dp(28 + 16) + width) : 0, 0, LocaleController.isRTL ? 0 : (AndroidUtilities.dp(28 + 16) + width), 0); + profileSearchCell.setSublabelOffset(0, -AndroidUtilities.dp(1)); + addView(profileSearchCell, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + button.setText(text); + button.setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + button.setProgressColor(Theme.getColor(Theme.key_featuredStickers_buttonProgress)); + button.setBackgroundRoundRect(Theme.getColor(Theme.key_featuredStickers_addButton), Theme.getColor(Theme.key_featuredStickers_addButtonPressed)); + addView(button, LayoutHelper.createFrameRelatively(LayoutHelper.WRAP_CONTENT, 28, Gravity.TOP | Gravity.END, 0, 18, 14, 0)); + button.setOnClickListener(v -> { + if (waitingForLoadButton != null) { + waitingForLoadButton.setDrawProgress(false, true); + } + Integer tag = (Integer) v.getTag(); + ChatObject.Call call = getMessagesController().getGroupCall(tag, false); + lastCallChat = getMessagesController().getChat(tag); + if (call != null) { + VoIPHelper.startCall(lastCallChat, null, null, false, getParentActivity(), CallLogActivity.this, getAccountInstance()); + } else { + waitingForCallChatId = tag; + getMessagesController().loadFullChat(tag, 0, true); + button.setDrawProgress(true, true); + waitingForLoadButton = button; + } + }); } - }; + } @Override public boolean onFragmentCreate() { super.onFragmentCreate(); getCalls(0, 50); + activeGroupCalls = getMessagesController().getActiveGroupCalls(); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.didReceiveNewMessages); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().addObserver(this, NotificationCenter.didReceiveNewMessages); + getNotificationCenter().addObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().addObserver(this, NotificationCenter.activeGroupCallsUpdated); + getNotificationCenter().addObserver(this, NotificationCenter.chatInfoDidLoad); + getNotificationCenter().addObserver(this, NotificationCenter.groupCallUpdated); return true; } @@ -237,8 +406,11 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. @Override public void onFragmentDestroy() { super.onFragmentDestroy(); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.didReceiveNewMessages); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().removeObserver(this, NotificationCenter.didReceiveNewMessages); + getNotificationCenter().removeObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().removeObserver(this, NotificationCenter.activeGroupCallsUpdated); + getNotificationCenter().removeObserver(this, NotificationCenter.chatInfoDidLoad); + getNotificationCenter().removeObserver(this, NotificationCenter.groupCallUpdated); } @Override @@ -290,7 +462,6 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. flickerLoadingView.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); flickerLoadingView.showDate(false); emptyView = new EmptyTextProgressView(context, flickerLoadingView); - emptyView.setText(LocaleController.getString("NoCallLog", R.string.NoCallLog)); frameLayout.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); listView = new RecyclerListView(context); @@ -301,26 +472,31 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. frameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); listView.setOnItemClickListener((view, position) -> { - if (position < 0 || position >= calls.size()) { - return; - } - CallLogRow row = calls.get(position); - if (actionBar.isActionModeShowed()) { - addOrRemoveSelectedDialog(row.calls, (CustomCell) view); - } else { + if (view instanceof CallCell) { + CallLogRow row = calls.get(position - listViewAdapter.callsStartRow); + if (actionBar.isActionModeShowed()) { + addOrRemoveSelectedDialog(row.calls, (CallCell) view); + } else { + Bundle args = new Bundle(); + args.putInt("user_id", row.user.id); + args.putInt("message_id", row.calls.get(0).id); + getNotificationCenter().postNotificationName(NotificationCenter.closeChats); + presentFragment(new ChatActivity(args), true); + } + } else if (view instanceof GroupCallCell) { + Integer id = activeGroupCalls.get(position - listViewAdapter.activeStartRow); Bundle args = new Bundle(); - args.putInt("user_id", row.user.id); - args.putInt("message_id", row.calls.get(0).id); - NotificationCenter.getInstance(currentAccount).postNotificationName(NotificationCenter.closeChats); + args.putInt("chat_id", id); + getNotificationCenter().postNotificationName(NotificationCenter.closeChats); presentFragment(new ChatActivity(args), true); } }); listView.setOnItemLongClickListener((view, position) -> { - if (position < 0 || position >= calls.size()) { - return false; + if (view instanceof CallCell) { + addOrRemoveSelectedDialog(calls.get(position - listViewAdapter.callsStartRow).calls, (CallCell) view); + return true; } - addOrRemoveSelectedDialog(calls.get(position).calls, (CustomCell) view); - return true; + return false; }); listView.setOnScrollListener(new RecyclerView.OnScrollListener() { @Override @@ -482,8 +658,10 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. actionBar.hideActionMode(); selectedIds.clear(); for (int a = 0, N = listView.getChildCount(); a < N; a++) { - CustomCell cell = (CustomCell) listView.getChildAt(a); - cell.setChecked(false, animated); + View child = listView.getChildAt(a); + if (child instanceof CallCell) { + ((CallCell) child).setChecked(false, animated); + } } } @@ -512,7 +690,7 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. actionModeViews.add(actionMode.addItemWithWidth(delete, R.drawable.msg_delete, AndroidUtilities.dp(54), LocaleController.getString("Delete", R.string.Delete))); } - private boolean addOrRemoveSelectedDialog(ArrayList messages, CustomCell cell) { + private boolean addOrRemoveSelectedDialog(ArrayList messages, CallCell cell) { if (messages.isEmpty()) { return false; } @@ -591,8 +769,8 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. req.filter = new TLRPC.TL_inputMessagesFilterPhoneCalls(); req.q = ""; req.offset_id = max_id; - int reqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { - int oldCount = calls.size(); + int reqId = getConnectionsManager().sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + int oldCount = Math.max(listViewAdapter.callsStartRow, 0) + calls.size(); if (error == null) { SparseArray users = new SparseArray<>(); TLRPC.messages_Messages msgs = (TLRPC.messages_Messages) response; @@ -607,13 +785,13 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. if (msg.action == null || msg.action instanceof TLRPC.TL_messageActionHistoryClear) { continue; } - int callType = MessageObject.getFromChatId(msg) == UserConfig.getInstance(currentAccount).getClientUserId() ? TYPE_OUT : TYPE_IN; + int callType = MessageObject.getFromChatId(msg) == getUserConfig().getClientUserId() ? TYPE_OUT : TYPE_IN; TLRPC.PhoneCallDiscardReason reason = msg.action.reason; if (callType == TYPE_IN && (reason instanceof TLRPC.TL_phoneCallDiscardReasonMissed || reason instanceof TLRPC.TL_phoneCallDiscardReasonBusy)) { callType = TYPE_MISSED; } int fromId = MessageObject.getFromChatId(msg); - int userID = fromId == UserConfig.getInstance(currentAccount).getClientUserId() ? msg.peer_id.user_id : fromId; + int userID = fromId == getUserConfig().getClientUserId() ? msg.peer_id.user_id : fromId; if (currentRow == null || currentRow.user.id != userID || currentRow.type != callType) { if (currentRow != null && !calls.contains(currentRow)) { calls.add(currentRow); @@ -647,7 +825,7 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. listViewAdapter.notifyDataSetChanged(); } }), ConnectionsManager.RequestFlagFailOnServerErrors); - ConnectionsManager.getInstance(currentAccount).bindRequestToGuid(reqId, classGuid); + getConnectionsManager().bindRequestToGuid(reqId, classGuid); } @Override @@ -660,7 +838,7 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. @Override public void onRequestPermissionsResultFragment(int requestCode, String[] permissions, int[] grantResults) { - if (requestCode == 101 || requestCode == 102) { + if (requestCode == 101 || requestCode == 102 || requestCode == 103) { boolean allGranted = true; for (int a = 0; a < grantResults.length; a++) { if (grantResults[a] != PackageManager.PERMISSION_GRANTED) { @@ -669,8 +847,12 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. } } if (grantResults.length > 0 && allGranted) { - TLRPC.UserFull userFull = lastCallUser != null ? getMessagesController().getUserFull(lastCallUser.id) : null; - VoIPHelper.startCall(lastCallUser, requestCode == 102, requestCode == 102 || userFull != null && userFull.video_calls_available, getParentActivity(), null, getAccountInstance()); + if (requestCode == 103) { + VoIPHelper.startCall(lastCallChat, null, null, false, getParentActivity(), CallLogActivity.this, getAccountInstance()); + } else { + TLRPC.UserFull userFull = lastCallUser != null ? getMessagesController().getUserFull(lastCallUser.id) : null; + VoIPHelper.startCall(lastCallUser, requestCode == 102, requestCode == 102 || userFull != null && userFull.video_calls_available, getParentActivity(), null, getAccountInstance()); + } } else { VoIPHelper.permissionDenied(getParentActivity(), null, requestCode); } @@ -680,25 +862,120 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. private class ListAdapter extends RecyclerListView.SelectionAdapter { private Context mContext; + private int activeHeaderRow; + private int callsHeaderRow; + private int activeStartRow; + private int activeEndRow; + private int callsStartRow; + private int callsEndRow; + private int loadingCallsRow; + private int sectionRow; + private int rowsCount; public ListAdapter(Context context) { mContext = context; } + private void updateRows() { + activeHeaderRow = -1; + callsHeaderRow = -1; + activeStartRow = -1; + activeEndRow = -1; + callsStartRow = -1; + callsEndRow = -1; + loadingCallsRow = -1; + sectionRow = -1; + rowsCount = 0; + + if (!activeGroupCalls.isEmpty()) { + activeHeaderRow = rowsCount++; + activeStartRow = rowsCount; + rowsCount += activeGroupCalls.size(); + activeEndRow = rowsCount; + } + if (!calls.isEmpty()) { + if (activeHeaderRow != -1) { + sectionRow = rowsCount++; + callsHeaderRow = rowsCount++; + } + callsStartRow = rowsCount; + rowsCount += calls.size(); + callsEndRow = rowsCount; + if (!endReached) { + loadingCallsRow = rowsCount++; + } + } + } + + @Override + public void notifyDataSetChanged() { + updateRows(); + super.notifyDataSetChanged(); + } + + @Override + public void notifyItemChanged(int position) { + updateRows(); + super.notifyItemChanged(position); + } + + @Override + public void notifyItemChanged(int position, @Nullable Object payload) { + updateRows(); + super.notifyItemChanged(position, payload); + } + + @Override + public void notifyItemRangeChanged(int positionStart, int itemCount) { + updateRows(); + super.notifyItemRangeChanged(positionStart, itemCount); + } + + @Override + public void notifyItemRangeChanged(int positionStart, int itemCount, @Nullable Object payload) { + updateRows(); + super.notifyItemRangeChanged(positionStart, itemCount, payload); + } + + @Override + public void notifyItemInserted(int position) { + updateRows(); + super.notifyItemInserted(position); + } + + @Override + public void notifyItemMoved(int fromPosition, int toPosition) { + updateRows(); + super.notifyItemMoved(fromPosition, toPosition); + } + + @Override + public void notifyItemRangeInserted(int positionStart, int itemCount) { + updateRows(); + super.notifyItemRangeInserted(positionStart, itemCount); + } + + @Override + public void notifyItemRemoved(int position) { + updateRows(); + super.notifyItemRemoved(position); + } + + @Override + public void notifyItemRangeRemoved(int positionStart, int itemCount) { + updateRows(); + super.notifyItemRangeRemoved(positionStart, itemCount); + } + @Override public boolean isEnabled(RecyclerView.ViewHolder holder) { - return holder.getAdapterPosition() != calls.size(); + int type = holder.getItemViewType(); + return type == 0 || type == 4; } @Override public int getItemCount() { - int count = calls.size(); - if (!calls.isEmpty()) { - if (!endReached) { - count++; - } - } - return count; + return rowsCount; } @Override @@ -706,9 +983,7 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. View view; switch (viewType) { case 0: - CustomCell cell = new CustomCell(mContext); - view = cell; - view.setTag(new ViewItem(cell.imageView, cell.profileSearchCell)); + view = new CallCell(mContext); break; case 1: FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext); @@ -719,79 +994,131 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. view = flickerLoadingView; break; case 2: - default: view = new TextInfoPrivacyCell(mContext); view.setBackgroundDrawable(Theme.getThemedDrawable(mContext, R.drawable.greydivider_bottom, Theme.key_windowBackgroundGrayShadow)); break; + case 3: + view = new HeaderCell(mContext); + view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + break; + case 4: + view = new GroupCallCell(mContext); + break; + case 5: + default: + view = new ShadowSectionCell(mContext); } return new RecyclerListView.Holder(view); } @Override public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { - if (holder.itemView instanceof CustomCell) { - CallLogRow row = calls.get(holder.getAdapterPosition()); - ((CustomCell) holder.itemView).setChecked(isSelected(row.calls), false); + if (holder.itemView instanceof CallCell) { + CallLogRow row = calls.get(holder.getAdapterPosition() - callsStartRow); + ((CallCell) holder.itemView).setChecked(isSelected(row.calls), false); + } else if (holder.itemView instanceof GroupCallCell) { + GroupCallCell cell = (GroupCallCell) holder.itemView; + TLRPC.Chat chat = cell.profileSearchCell.getChat(); + if (waitingForCallChatId != null && chat.id == waitingForCallChatId) { + waitingForLoadButton = cell.button; + cell.button.setDrawProgress(true, false); + } else { + cell.button.setDrawProgress(false, false); + } } } @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { - if (holder.getItemViewType() == 0) { - CustomCell customCell = (CustomCell) holder.itemView; - ViewItem viewItem = (ViewItem) customCell.getTag(); - CallLogRow row = calls.get(position); - customCell.imageView.setImageResource(row.video ? R.drawable.profile_video : R.drawable.profile_phone); - ProfileSearchCell cell = viewItem.cell; - TLRPC.Message last = row.calls.get(0); - SpannableString subtitle; - String ldir = LocaleController.isRTL ? "\u202b" : ""; - if (row.calls.size() == 1) { - subtitle = new SpannableString(ldir + " " + LocaleController.formatDateCallLog(last.date)); - } else { - subtitle = new SpannableString(String.format(ldir + " (%d) %s", row.calls.size(), LocaleController.formatDateCallLog(last.date))); + switch (holder.getItemViewType()) { + case 0: { + position -= callsStartRow; + CallLogRow row = calls.get(position); + + CallCell cell = (CallCell) holder.itemView; + cell.imageView.setImageResource(row.video ? R.drawable.profile_video : R.drawable.profile_phone); + TLRPC.Message last = row.calls.get(0); + SpannableString subtitle; + String ldir = LocaleController.isRTL ? "\u202b" : ""; + if (row.calls.size() == 1) { + subtitle = new SpannableString(ldir + " " + LocaleController.formatDateCallLog(last.date)); + } else { + subtitle = new SpannableString(String.format(ldir + " (%d) %s", row.calls.size(), LocaleController.formatDateCallLog(last.date))); + } + switch (row.type) { + case TYPE_OUT: + subtitle.setSpan(iconOut, ldir.length(), ldir.length() + 1, 0); + //cell.setContentDescription(LocaleController.getString("CallMessageOutgoing", R.string.CallMessageOutgoing)); + break; + case TYPE_IN: + subtitle.setSpan(iconIn, ldir.length(), ldir.length() + 1, 0); + //cell.setContentDescription(LocaleController.getString("CallMessageIncoming", R.string.CallMessageIncoming)); + break; + case TYPE_MISSED: + subtitle.setSpan(iconMissed, ldir.length(), ldir.length() + 1, 0); + //cell.setContentDescription(LocaleController.getString("CallMessageIncomingMissed", R.string.CallMessageIncomingMissed)); + break; + } + cell.profileSearchCell.setData(row.user, null, null, subtitle, false, false); + cell.profileSearchCell.useSeparator = position != calls.size() - 1 || !endReached; + cell.imageView.setTag(row); + break; } - switch (row.type) { - case TYPE_OUT: - subtitle.setSpan(iconOut, ldir.length(), ldir.length() + 1, 0); - //cell.setContentDescription(LocaleController.getString("CallMessageOutgoing", R.string.CallMessageOutgoing)); - break; - case TYPE_IN: - subtitle.setSpan(iconIn, ldir.length(), ldir.length() + 1, 0); - //cell.setContentDescription(LocaleController.getString("CallMessageIncoming", R.string.CallMessageIncoming)); - break; - case TYPE_MISSED: - subtitle.setSpan(iconMissed, ldir.length(), ldir.length() + 1, 0); - //cell.setContentDescription(LocaleController.getString("CallMessageIncomingMissed", R.string.CallMessageIncomingMissed)); - break; + case 3: { + HeaderCell cell = (HeaderCell) holder.itemView; + if (position == activeHeaderRow) { + cell.setText(LocaleController.getString("VoipChatActiveChats", R.string.VoipChatActiveChats)); + } else if (position == callsHeaderRow) { + cell.setText(LocaleController.getString("VoipChatRecentCalls", R.string.VoipChatRecentCalls)); + } + break; + } + case 4: { + position -= activeStartRow; + Integer chatId = activeGroupCalls.get(position); + TLRPC.Chat chat = getMessagesController().getChat(chatId); + GroupCallCell cell = (GroupCallCell) holder.itemView; + cell.button.setTag(chat.id); + String text; + if (ChatObject.isChannel(chat) && !chat.megagroup) { + if (TextUtils.isEmpty(chat.username)) { + text = LocaleController.getString("ChannelPrivate", R.string.ChannelPrivate).toLowerCase(); + } else { + text = LocaleController.getString("ChannelPublic", R.string.ChannelPublic).toLowerCase(); + } + } else { + if (chat.has_geo) { + text = LocaleController.getString("MegaLocation", R.string.MegaLocation); + } else if (TextUtils.isEmpty(chat.username)) { + text = LocaleController.getString("MegaPrivate", R.string.MegaPrivate).toLowerCase(); + } else { + text = LocaleController.getString("MegaPublic", R.string.MegaPublic).toLowerCase(); + } + } + cell.profileSearchCell.setData(chat, null, null, text, false, false); + cell.profileSearchCell.useSeparator = position != activeGroupCalls.size() - 1 || !endReached; + break; } - cell.setData(row.user, null, null, subtitle, false, false); - cell.useSeparator = position != calls.size() - 1 || !endReached; - viewItem.button.setTag(row); } } @Override public int getItemViewType(int i) { - if (i < calls.size()) { + if (i == activeHeaderRow || i == callsHeaderRow) { + return 3; + } else if (i >= callsStartRow && i < callsEndRow) { return 0; - } else if (!endReached && i == calls.size()) { + } else if (i >= activeStartRow && i < activeEndRow) { + return 4; + } else if (i == loadingCallsRow) { return 1; + } else if (i == sectionRow) { + return 5; } return 2; } } - private static class ViewItem { - public ProfileSearchCell cell; - public ImageView button; - - public ViewItem(ImageView button, ProfileSearchCell cell) { - this.button = button; - this.cell = cell; - } - } - private static class CallLogRow { public TLRPC.User user; public ArrayList calls; @@ -799,72 +1126,6 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. public boolean video; } - @Override - public ArrayList getThemeDescriptions() { - ArrayList themeDescriptions = new ArrayList<>(); - - ThemeDescription.ThemeDescriptionDelegate cellDelegate = () -> { - if (listView != null) { - int count = listView.getChildCount(); - for (int a = 0; a < count; a++) { - View child = listView.getChildAt(a); - if (child instanceof CustomCell) { - CustomCell cell = (CustomCell) child; - cell.profileSearchCell.update(0); - } - } - } - }; - - - themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_CELLBACKGROUNDCOLOR, new Class[]{LocationCell.class, CustomCell.class}, null, null, null, Theme.key_windowBackgroundWhite)); - themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundGray)); - - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_actionBarDefault)); - themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, Theme.key_actionBarDefault)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, null, Theme.key_actionBarDefaultIcon)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, Theme.key_actionBarDefaultTitle)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SELECTORCOLOR, null, null, null, null, Theme.key_actionBarDefaultSelector)); - - themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_SELECTOR, null, null, null, null, Theme.key_listSelector)); - - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, Theme.dividerPaint, null, null, Theme.key_divider)); - - themeDescriptions.add(new ThemeDescription(emptyView, ThemeDescription.FLAG_TEXTCOLOR, null, null, null, null, Theme.key_emptyListPlaceholder)); - themeDescriptions.add(new ThemeDescription(emptyView, ThemeDescription.FLAG_PROGRESSBAR, null, null, null, null, Theme.key_progressCircle)); - - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{LoadingCell.class}, new String[]{"progressBar"}, null, null, null, Theme.key_progressCircle)); - - themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{TextInfoPrivacyCell.class}, null, null, null, Theme.key_windowBackgroundGrayShadow)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{TextInfoPrivacyCell.class}, new String[]{"textView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText4)); - - themeDescriptions.add(new ThemeDescription(floatingButton, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chats_actionIcon)); - themeDescriptions.add(new ThemeDescription(floatingButton, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chats_actionBackground)); - themeDescriptions.add(new ThemeDescription(floatingButton, ThemeDescription.FLAG_BACKGROUNDFILTER | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, null, null, null, null, Theme.key_chats_actionPressedBackground)); - - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, new String[]{"imageView"}, null, null, null, Theme.key_featuredStickers_addButton)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, null, new Drawable[]{Theme.dialogs_verifiedCheckDrawable}, null, Theme.key_chats_verifiedCheck)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, null, new Drawable[]{Theme.dialogs_verifiedDrawable}, null, Theme.key_chats_verifiedBackground)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, Theme.dialogs_offlinePaint, null, null, Theme.key_windowBackgroundWhiteGrayText3)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, Theme.dialogs_onlinePaint, null, null, Theme.key_windowBackgroundWhiteBlueText3)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, null, new Paint[]{Theme.dialogs_namePaint[0], Theme.dialogs_namePaint[1], Theme.dialogs_searchNamePaint}, null, null, Theme.key_chats_name)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, null, new Paint[]{Theme.dialogs_nameEncryptedPaint[0], Theme.dialogs_nameEncryptedPaint[1], Theme.dialogs_searchNameEncryptedPaint}, null, null, Theme.key_chats_secretName)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CustomCell.class}, null, Theme.avatarDrawables, null, Theme.key_avatar_text)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundRed)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundOrange)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundViolet)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundGreen)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundCyan)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundBlue)); - themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundPink)); - - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, null, new Drawable[]{greenDrawable, greenDrawable2, Theme.calllog_msgCallUpRedDrawable, Theme.calllog_msgCallDownRedDrawable}, null, Theme.key_calls_callReceivedGreenIcon)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, null, new Drawable[]{redDrawable, Theme.calllog_msgCallUpGreenDrawable, Theme.calllog_msgCallDownGreenDrawable}, null, Theme.key_calls_callReceivedRedIcon)); - themeDescriptions.add(new ThemeDescription(flickerLoadingView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundWhite)); - - return themeDescriptions; - } - @Override protected void onTransitionAnimationStart(boolean isOpen, boolean backward) { super.onTransitionAnimationStart(isOpen, backward); @@ -902,7 +1163,8 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. AnimatorSet animatorSet = new AnimatorSet(); for (int i = 0; i < n; i++) { View child = listView.getChildAt(i); - if (child == finalProgressView || listView.getChildAdapterPosition(child) < from) { + RecyclerView.ViewHolder holder = listView.getChildViewHolder(child); + if (child == finalProgressView || listView.getChildAdapterPosition(child) < from || child instanceof GroupCallCell || child instanceof HeaderCell && holder.getAdapterPosition() == listViewAdapter.activeHeaderRow) { continue; } child.setAlpha(0); @@ -937,4 +1199,74 @@ public class CallLogActivity extends BaseFragment implements NotificationCenter. } }); } + + @Override + public ArrayList getThemeDescriptions() { + ArrayList themeDescriptions = new ArrayList<>(); + + ThemeDescription.ThemeDescriptionDelegate cellDelegate = () -> { + if (listView != null) { + int count = listView.getChildCount(); + for (int a = 0; a < count; a++) { + View child = listView.getChildAt(a); + if (child instanceof CallCell) { + CallCell cell = (CallCell) child; + cell.profileSearchCell.update(0); + } + } + } + }; + + + themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_CELLBACKGROUNDCOLOR, new Class[]{LocationCell.class, CallCell.class, HeaderCell.class, GroupCallCell.class}, null, null, null, Theme.key_windowBackgroundWhite)); + themeDescriptions.add(new ThemeDescription(fragmentView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundGray)); + + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_actionBarDefault)); + themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, Theme.key_actionBarDefault)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, null, Theme.key_actionBarDefaultIcon)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, Theme.key_actionBarDefaultTitle)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SELECTORCOLOR, null, null, null, null, Theme.key_actionBarDefaultSelector)); + + themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_SELECTOR, null, null, null, null, Theme.key_listSelector)); + + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, Theme.dividerPaint, null, null, Theme.key_divider)); + + themeDescriptions.add(new ThemeDescription(emptyView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{EmptyTextProgressView.class}, new String[]{"emptyTextView1"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + themeDescriptions.add(new ThemeDescription(emptyView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{EmptyTextProgressView.class}, new String[]{"emptyTextView2"}, null, null, null, Theme.key_emptyListPlaceholder)); + + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{LoadingCell.class}, new String[]{"progressBar"}, null, null, null, Theme.key_progressCircle)); + + themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{TextInfoPrivacyCell.class}, null, null, null, Theme.key_windowBackgroundGrayShadow)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{TextInfoPrivacyCell.class}, new String[]{"textView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText4)); + + themeDescriptions.add(new ThemeDescription(floatingButton, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_chats_actionIcon)); + themeDescriptions.add(new ThemeDescription(floatingButton, ThemeDescription.FLAG_BACKGROUNDFILTER, null, null, null, null, Theme.key_chats_actionBackground)); + themeDescriptions.add(new ThemeDescription(floatingButton, ThemeDescription.FLAG_BACKGROUNDFILTER | ThemeDescription.FLAG_DRAWABLESELECTEDSTATE, null, null, null, null, Theme.key_chats_actionPressedBackground)); + + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, new String[]{"imageView"}, null, null, null, Theme.key_featuredStickers_addButton)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, null, new Drawable[]{Theme.dialogs_verifiedCheckDrawable}, null, Theme.key_chats_verifiedCheck)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, null, new Drawable[]{Theme.dialogs_verifiedDrawable}, null, Theme.key_chats_verifiedBackground)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, Theme.dialogs_offlinePaint, null, null, Theme.key_windowBackgroundWhiteGrayText3)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, Theme.dialogs_onlinePaint, null, null, Theme.key_windowBackgroundWhiteBlueText3)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, null, new Paint[]{Theme.dialogs_namePaint[0], Theme.dialogs_namePaint[1], Theme.dialogs_searchNamePaint}, null, null, Theme.key_chats_name)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, null, new Paint[]{Theme.dialogs_nameEncryptedPaint[0], Theme.dialogs_nameEncryptedPaint[1], Theme.dialogs_searchNameEncryptedPaint}, null, null, Theme.key_chats_secretName)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{CallCell.class}, null, Theme.avatarDrawables, null, Theme.key_avatar_text)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundRed)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundOrange)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundViolet)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundGreen)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundCyan)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundBlue)); + themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, cellDelegate, Theme.key_avatar_backgroundPink)); + + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, null, new Drawable[]{greenDrawable, greenDrawable2, Theme.calllog_msgCallUpRedDrawable, Theme.calllog_msgCallDownRedDrawable}, null, Theme.key_calls_callReceivedGreenIcon)); + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, null, new Drawable[]{redDrawable, Theme.calllog_msgCallUpGreenDrawable, Theme.calllog_msgCallDownGreenDrawable}, null, Theme.key_calls_callReceivedRedIcon)); + themeDescriptions.add(new ThemeDescription(flickerLoadingView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundWhite)); + + themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_BACKGROUNDFILTER, new Class[]{ShadowSectionCell.class}, null, null, null, Theme.key_windowBackgroundGrayShadow)); + + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{HeaderCell.class}, new String[]{"textView"}, null, null, null, Theme.key_windowBackgroundWhiteBlueHeader)); + + return themeDescriptions; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java index 2e09cb922..1b92f9417 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CameraScanActivity.java @@ -166,9 +166,9 @@ public class CameraScanActivity extends BaseFragment implements Camera.PreviewCa public CameraScanActivity(int type) { super(); CameraController.getInstance().initCamera(() -> { - if (cameraView != null) { - cameraView.initCamera(); - } +// if (cameraView != null) { +// cameraView.initCamera(); +// } }); currentType = type; if (currentType == TYPE_QR) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java index f54e44a98..8c0198691 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/AboutLinkCell.java @@ -22,23 +22,25 @@ import android.text.style.URLSpan; import android.util.TypedValue; import android.view.Gravity; import android.view.MotionEvent; +import android.view.ViewConfiguration; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.FrameLayout; import android.widget.TextView; -import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; - import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; +import org.telegram.messenger.R; import org.telegram.messenger.browser.Browser; import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.BottomSheet; +import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.AlertsCreator; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.LinkPath; -import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.StaticLayoutEx; import org.telegram.ui.Components.URLSpanNoUnderline; @@ -82,6 +84,7 @@ public class AboutLinkCell extends FrameLayout { if (pressedLink != null) { pressedLink = null; } + AndroidUtilities.cancelRunOnUIThread(longPressedRunnable); invalidate(); } @@ -106,6 +109,42 @@ public class AboutLinkCell extends FrameLayout { requestLayout(); } + Runnable longPressedRunnable = new Runnable() { + @Override + public void run() { + if (pressedLink != null) { + String url; + if (pressedLink instanceof URLSpanNoUnderline) { + url = ((URLSpanNoUnderline) pressedLink).getURL(); + } else if (pressedLink instanceof URLSpan) { + url = ((URLSpan) pressedLink).getURL(); + } else { + url = pressedLink.toString(); + } + + ClickableSpan pressedLinkFinal = pressedLink; + BottomSheet.Builder builder = new BottomSheet.Builder(parentFragment.getParentActivity()); + builder.setTitle(url); + builder.setItems(new CharSequence[]{LocaleController.getString("Open", R.string.Open), LocaleController.getString("Copy", R.string.Copy)}, (dialog, which) -> { + if (which == 0) { + onLinkClick(pressedLinkFinal); + } else if (which == 1) { + AndroidUtilities.addToClipboard(url); + if (url.startsWith("@")) { + BulletinFactory.of(parentFragment).createSimpleBulletin(R.raw.copy, LocaleController.getString("UsernameCopied", R.string.UsernameCopied)).show(); + } else if (url.startsWith("#") || url.startsWith("$")) { + BulletinFactory.of(parentFragment).createSimpleBulletin(R.raw.copy, LocaleController.getString("HashtagCopied", R.string.HashtagCopied)).show(); + } else { + BulletinFactory.of(parentFragment).createSimpleBulletin(R.raw.copy, LocaleController.getString("LinkCopied", R.string.LinkCopied)).show(); + } + } + }); + builder.show(); + resetPressedLink(); + } + } + }; + @Override public boolean onTouchEvent(MotionEvent event) { float x = event.getX(); @@ -137,6 +176,8 @@ public class AboutLinkCell extends FrameLayout { } catch (Exception e) { FileLog.e(e); } + + AndroidUtilities.runOnUIThread(longPressedRunnable, ViewConfiguration.getLongPressTimeout()); } else { resetPressedLink(); } @@ -149,23 +190,7 @@ public class AboutLinkCell extends FrameLayout { } } else if (pressedLink != null) { try { - if (pressedLink instanceof URLSpanNoUnderline) { - String url = ((URLSpanNoUnderline) pressedLink).getURL(); - if (url.startsWith("@") || url.startsWith("#") || url.startsWith("/")) { - didPressUrl(url); - } - } else { - if (pressedLink instanceof URLSpan) { - String url = ((URLSpan) pressedLink).getURL(); - if (AndroidUtilities.shouldShowUrlInAlert(url)) { - AlertsCreator.showOpenUrlAlert(parentFragment, url, true, true); - } else { - Browser.openUrl(getContext(), url); - } - } else { - pressedLink.onClick(this); - } - } + onLinkClick(pressedLink); } catch (Exception e) { FileLog.e(e); } @@ -179,6 +204,26 @@ public class AboutLinkCell extends FrameLayout { return result || super.onTouchEvent(event); } + private void onLinkClick(ClickableSpan pressedLink) { + if (pressedLink instanceof URLSpanNoUnderline) { + String url = ((URLSpanNoUnderline) pressedLink).getURL(); + if (url.startsWith("@") || url.startsWith("#") || url.startsWith("/")) { + didPressUrl(url); + } + } else { + if (pressedLink instanceof URLSpan) { + String url = ((URLSpan) pressedLink).getURL(); + if (AndroidUtilities.shouldShowUrlInAlert(url)) { + AlertsCreator.showOpenUrlAlert(parentFragment, url, true, true); + } else { + Browser.openUrl(getContext(), url); + } + } else { + pressedLink.onClick(this); + } + } + } + @SuppressLint("DrawAllocation") @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java index 7806486e9..044d6e3d1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ArchivedStickerSetCell.java @@ -177,7 +177,7 @@ public class ArchivedStickerSetCell extends FrameLayout implements Checkable { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + imageLocation = ImageLocation.getForSticker(thumb, sticker, set.set.thumb_version); } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/AudioPlayerCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/AudioPlayerCell.java index 9b465f83e..49d941a0e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/AudioPlayerCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/AudioPlayerCell.java @@ -11,21 +11,15 @@ package org.telegram.ui.Cells; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Canvas; -import android.graphics.Paint; import android.text.Layout; import android.text.SpannableStringBuilder; import android.text.StaticLayout; -import android.text.TextPaint; import android.text.TextUtils; -import android.text.style.ReplacementSpan; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; -import androidx.annotation.NonNull; -import androidx.annotation.Nullable; - import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLoader; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java index a7bc2ae51..728f42cd5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatActionCell.java @@ -12,8 +12,10 @@ import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.ColorFilter; +import android.graphics.Path; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; +import android.graphics.RectF; import android.text.Layout; import android.text.Spannable; import android.text.StaticLayout; @@ -21,6 +23,7 @@ import android.text.TextUtils; import android.text.style.URLSpan; import android.view.MotionEvent; import android.view.SoundEffectConstants; +import android.view.ViewGroup; import android.view.accessibility.AccessibilityNodeInfo; import org.telegram.messenger.AndroidUtilities; @@ -43,6 +46,8 @@ import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.URLSpanNoUnderline; import org.telegram.ui.PhotoViewer; +import java.util.ArrayList; + public class ChatActionCell extends BaseCell implements DownloadController.FileDownloadProgressListener { public interface ChatActionCellDelegate { @@ -81,6 +86,10 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD private int previousWidth; private boolean imagePressed; + private float viewTop; + private int backgroundHeight; + private boolean visiblePartSet; + private ImageLocation currentVideoLocation; private float lastTouchX; @@ -98,6 +107,11 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD private String overrideText; private ColorFilter overrideColorFilter; private int overrideColor; + private ArrayList lineWidths = new ArrayList<>(); + private ArrayList lineHeights = new ArrayList<>(); + private Path backgroundPath = new Path(); + private RectF rect = new RectF(); + private boolean invalidatePath = true; private ChatActionCellDelegate delegate; @@ -222,6 +236,15 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD return imageReceiver; } + public void setVisiblePart(float visibleTop, int parentH) { + visiblePartSet = true; + backgroundHeight = parentH; + viewTop = visibleTop; + if (Theme.hasGradientService()) { + invalidate(); + } + } + @Override protected void onLongPress() { if (delegate != null) { @@ -367,6 +390,7 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD private void createLayout(CharSequence text, int width) { int maxWidth = width - AndroidUtilities.dp(30); + invalidatePath = true; textLayout = new StaticLayout(text, Theme.chat_actionTextPaint, maxWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); textHeight = 0; textWidth = 0; @@ -437,36 +461,6 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD return customDate; } - private int findMaxWidthAroundLine(int line) { - int width = (int) Math.ceil(textLayout.getLineWidth(line)); - int count = textLayout.getLineCount(); - for (int a = line + 1; a < count; a++) { - int w = (int) Math.ceil(textLayout.getLineWidth(a)); - if (Math.abs(w - width) < AndroidUtilities.dp(10)) { - width = Math.max(w, width); - } else { - break; - } - } - for (int a = line - 1; a >= 0; a--) { - int w = (int) Math.ceil(textLayout.getLineWidth(a)); - if (Math.abs(w - width) < AndroidUtilities.dp(10)) { - width = Math.max(w, width); - } else { - break; - } - } - return width; - } - - private boolean isLineTop(int prevWidth, int currentWidth, int line, int count, int cornerRest) { - return line == 0 || !(line < 0 || line >= count) && findMaxWidthAroundLine(line - 1) + cornerRest * 3 < prevWidth; - } - - private boolean isLineBottom(int nextWidth, int currentWidth, int line, int count, int cornerRest) { - return line == count - 1 || !(line < 0 || line > count - 1) && findMaxWidthAroundLine(line + 1) + cornerRest * 3 < nextWidth; - } - @Override protected void onDraw(Canvas canvas) { if (currentMessageObject != null && currentMessageObject.type == 11) { @@ -483,219 +477,145 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD overrideColor = color; overrideColorFilter = new PorterDuffColorFilter(overrideColor, PorterDuff.Mode.MULTIPLY); } - for (int a = 0; a < 4; a++) { - Theme.chat_cornerOuter[a].setColorFilter(overrideColorFilter); - Theme.chat_cornerInner[a].setColorFilter(overrideColorFilter); - } Theme.chat_actionBackgroundPaint.setColor(overrideColor); Theme.chat_actionTextPaint.setColor(Theme.getColor(overrideText)); } + if (invalidatePath) { + invalidatePath = false; + lineWidths.clear(); + final int count = textLayout.getLineCount(); + final int corner = AndroidUtilities.dp(11); + final int cornerIn = AndroidUtilities.dp(8); - final int count = textLayout.getLineCount(); - final int corner = AndroidUtilities.dp(11); - final int cornerOffset = AndroidUtilities.dp(6); - final int cornerRest = corner - cornerOffset; - final int cornerIn = AndroidUtilities.dp(8); - int y = AndroidUtilities.dp(7); - int previousLineBottom = 0; - int previousLineHeight = 0; - int dx; - int dx2; - int dy; - int previousLineWidth = 0; - for (int a = 0; a < count; a++) { - int width = findMaxWidthAroundLine(a); - int nextWidth = a < count - 1 ? findMaxWidthAroundLine(a + 1) : 0; - int w1 = 0; - int w2 = 0; - if (previousLineWidth != 0) { - int dw = width - previousLineWidth; - if (dw > 0 && dw < AndroidUtilities.dp(15) * 2) { - width = w1 = previousLineWidth + AndroidUtilities.dp(15) * 2; - } - - } - if (nextWidth != 0) { - int dw = width - nextWidth; - if (dw > 0 && dw < AndroidUtilities.dp(15) * 2) { - width = w2 = nextWidth + AndroidUtilities.dp(15) * 2; - } - } - if (w1 != 0 && w2 != 0) { - width = Math.max(w1, w2); - } - previousLineWidth = width; - int x = (getMeasuredWidth() - width - cornerRest) / 2; - width += cornerRest; - int lineBottom = textLayout.getLineBottom(a); - int height = lineBottom - previousLineBottom; - int additionalHeight = 0; - previousLineBottom = lineBottom; - - boolean drawBottomCorners = a == count - 1; - boolean drawTopCorners = a == 0; - - if (drawTopCorners) { - y -= AndroidUtilities.dp(3); - height += AndroidUtilities.dp(3); - } - if (drawBottomCorners) { - height += AndroidUtilities.dp(3); - } - - int yOld = y; - int hOld = height; - - int drawInnerBottom = 0; - int drawInnerTop = 0; - int nextLineWidth = 0; int prevLineWidth = 0; - if (!drawBottomCorners && a + 1 < count) { - nextLineWidth = findMaxWidthAroundLine(a + 1) + cornerRest; - if (nextLineWidth + cornerRest * 2 < width) { - drawInnerBottom = 1; - drawBottomCorners = true; - } else if (width + cornerRest * 2 < nextLineWidth) { - drawInnerBottom = 2; - } else { - drawInnerBottom = 3; + for (int a = 0; a < count; a++) { + int lineWidth = (int) Math.ceil(textLayout.getLineWidth(a)); + if (a != 0) { + int diff = prevLineWidth - lineWidth; + if (diff > 0 && diff <= corner + cornerIn) { + lineWidth = prevLineWidth; + } } + lineWidths.add(lineWidth); + prevLineWidth = lineWidth; } - if (!drawTopCorners && a > 0) { - prevLineWidth = findMaxWidthAroundLine(a - 1) + cornerRest; - if (prevLineWidth + cornerRest * 2 < width) { - drawInnerTop = 1; - drawTopCorners = true; - } else if (width + cornerRest * 2 < prevLineWidth) { - drawInnerTop = 2; - } else { - drawInnerTop = 3; + for (int a = count - 2; a >= 0; a--) { + int lineWidth = lineWidths.get(a); + int diff = prevLineWidth - lineWidth; + if (diff > 0 && diff <= corner + cornerIn) { + lineWidth = prevLineWidth; } + lineWidths.set(a, lineWidth); + prevLineWidth = lineWidth; } - if (drawInnerBottom != 0) { - if (drawInnerBottom == 1) { - int nextX = (getMeasuredWidth() - nextLineWidth) / 2; - additionalHeight = AndroidUtilities.dp(3); + int y = AndroidUtilities.dp(4); + int x = getMeasuredWidth() / 2; + int previousLineBottom = 0; - if (isLineBottom(nextLineWidth, width, a + 1, count, cornerRest)) { - canvas.drawRect(x + cornerOffset, y + height, nextX - cornerRest, y + height + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - canvas.drawRect(nextX + nextLineWidth + cornerRest, y + height, x + width - cornerOffset, y + height + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - } else { - canvas.drawRect(x + cornerOffset, y + height, nextX, y + height + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - canvas.drawRect(nextX + nextLineWidth, y + height, x + width - cornerOffset, y + height + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - } - } else if (drawInnerBottom == 2) { - additionalHeight = AndroidUtilities.dp(3); + final int cornerOffset = AndroidUtilities.dp(3); + final int cornerInSmall = AndroidUtilities.dp(6); + final int cornerRest = corner - cornerOffset; - dy = y + height - AndroidUtilities.dp(11); + lineHeights.clear(); + backgroundPath.reset(); + backgroundPath.moveTo(x, y); - dx = x - cornerIn; - if (drawInnerTop != 2 && drawInnerTop != 3) { - dx -= cornerRest; - } - if (drawTopCorners || drawBottomCorners) { - canvas.drawRect(dx + cornerIn, dy + AndroidUtilities.dp(3), dx + cornerIn + corner, dy + corner, Theme.chat_actionBackgroundPaint); - } - Theme.chat_cornerInner[2].setBounds(dx, dy, dx + cornerIn, dy + cornerIn); - Theme.chat_cornerInner[2].draw(canvas); + for (int a = 0; a < count; a++) { + int lineWidth = lineWidths.get(a); + int lineBottom = textLayout.getLineBottom(a); + int nextLineWidth = a < count - 1 ? lineWidths.get(a + 1) : 0; - dx = x + width; - if (drawInnerTop != 2 && drawInnerTop != 3) { - dx += cornerRest; - } - if (drawTopCorners || drawBottomCorners) { - canvas.drawRect(dx - corner, dy + AndroidUtilities.dp(3), dx, dy + corner, Theme.chat_actionBackgroundPaint); - } - Theme.chat_cornerInner[3].setBounds(dx, dy, dx + cornerIn, dy + cornerIn); - Theme.chat_cornerInner[3].draw(canvas); - } else { - additionalHeight = AndroidUtilities.dp(6); - } - } - if (drawInnerTop != 0) { - if (drawInnerTop == 1) { - int prevX = (getMeasuredWidth() - prevLineWidth) / 2; - - y -= AndroidUtilities.dp(3); + int height = lineBottom - previousLineBottom; + if (a == 0 || lineWidth > prevLineWidth) { height += AndroidUtilities.dp(3); - - if (isLineTop(prevLineWidth, width, a - 1, count, cornerRest)) { - canvas.drawRect(x + cornerOffset, y, prevX - cornerRest, y + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - canvas.drawRect(prevX + prevLineWidth + cornerRest, y, x + width - cornerOffset, y + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - } else { - canvas.drawRect(x + cornerOffset, y, prevX, y + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - canvas.drawRect(prevX + prevLineWidth, y, x + width - cornerOffset, y + AndroidUtilities.dp(3), Theme.chat_actionBackgroundPaint); - } - } else if (drawInnerTop == 2) { - y -= AndroidUtilities.dp(3); + } + if (a == count - 1 || lineWidth > nextLineWidth) { height += AndroidUtilities.dp(3); + } - dy = previousLineHeight; + previousLineBottom = lineBottom; - dx = x - cornerIn; - if (drawInnerBottom != 2 && drawInnerBottom != 3) { - dx -= cornerRest; - } - if (drawTopCorners || drawBottomCorners) { - canvas.drawRect(dx + cornerIn, y + AndroidUtilities.dp(3), dx + cornerIn + corner, y + AndroidUtilities.dp(11), Theme.chat_actionBackgroundPaint); - } - Theme.chat_cornerInner[0].setBounds(dx, dy, dx + cornerIn, dy + cornerIn); - Theme.chat_cornerInner[0].draw(canvas); + float startX = x + lineWidth / 2.0f; - dx = x + width; - if (drawInnerBottom != 2 && drawInnerBottom != 3) { - dx += cornerRest; - } - if (drawTopCorners || drawBottomCorners) { - canvas.drawRect(dx - corner, y + AndroidUtilities.dp(3), dx, y + AndroidUtilities.dp(11), Theme.chat_actionBackgroundPaint); - } - Theme.chat_cornerInner[1].setBounds(dx, dy, dx + cornerIn, dy + cornerIn); - Theme.chat_cornerInner[1].draw(canvas); + int innerCornerRad; + if (a != count - 1 && lineWidth < nextLineWidth && a != 0 && lineWidth < prevLineWidth) { + innerCornerRad = cornerInSmall; } else { - y -= AndroidUtilities.dp(6); - height += AndroidUtilities.dp(6); + innerCornerRad = cornerIn; + } + + if (a == 0 || lineWidth > prevLineWidth) { + rect.set(startX - cornerOffset - corner, y, startX + cornerRest, y + corner * 2); + backgroundPath.arcTo(rect, -90, 90); + } else if (lineWidth < prevLineWidth) { + rect.set(startX + cornerRest, y, startX + cornerRest + innerCornerRad * 2, y + innerCornerRad * 2); + backgroundPath.arcTo(rect, -90, -90); + } + y += height; + int yOffset = y; + if (a != count - 1 && lineWidth < nextLineWidth) { + y -= AndroidUtilities.dp(3); + height -= AndroidUtilities.dp(3); + } + if (a != 0 && lineWidth < prevLineWidth) { + y -= AndroidUtilities.dp(3); + height -= AndroidUtilities.dp(3); + } + lineHeights.add(height); + + if (a == count - 1 || lineWidth > nextLineWidth) { + rect.set(startX - cornerOffset - corner, y - corner * 2, startX + cornerRest, y); + backgroundPath.arcTo(rect, 0, 90); + } else if (lineWidth < nextLineWidth) { + rect.set(startX + cornerRest, y - innerCornerRad * 2, startX + cornerRest + innerCornerRad * 2, y); + backgroundPath.arcTo(rect, 180, -90); + } + + prevLineWidth = lineWidth; + } + for (int a = count - 1; a >= 0; a--) { + prevLineWidth = a != 0 ? lineWidths.get(a - 1) : 0; + int lineWidth = lineWidths.get(a); + int nextLineWidth = a != count - 1 ? lineWidths.get(a + 1) : 0; + int lineBottom = textLayout.getLineBottom(a); + float startX = x - lineWidth / 2; + + int innerCornerRad; + if (a != count - 1 && lineWidth < nextLineWidth && a != 0 && lineWidth < prevLineWidth) { + innerCornerRad = cornerInSmall; + } else { + innerCornerRad = cornerIn; + } + + if (a == count - 1 || lineWidth > nextLineWidth) { + rect.set(startX - cornerRest, y - corner * 2, startX + cornerOffset + corner, y); + backgroundPath.arcTo(rect, 90, 90); + } else if (lineWidth < nextLineWidth) { + rect.set(startX - cornerRest - innerCornerRad * 2, y - innerCornerRad * 2, startX - cornerRest, y); + backgroundPath.arcTo(rect, 90, -90); + } + + y -= lineHeights.get(a); + + if (a == 0 || lineWidth > prevLineWidth) { + rect.set(startX - cornerRest, y, startX + cornerOffset + corner, y + corner * 2); + backgroundPath.arcTo(rect, 180, 90); + } else if (lineWidth < prevLineWidth) { + rect.set(startX - cornerRest - innerCornerRad * 2, y, startX - cornerRest, y + innerCornerRad * 2); + backgroundPath.arcTo(rect, 0, -90); } } - - if (drawTopCorners || drawBottomCorners) { - canvas.drawRect(x + cornerOffset, yOld, x + width - cornerOffset, yOld + hOld, Theme.chat_actionBackgroundPaint); - } else { - canvas.drawRect(x, yOld, x + width, yOld + hOld, Theme.chat_actionBackgroundPaint); - } - - dx = x - cornerRest; - dx2 = x + width - cornerOffset; - if (drawTopCorners && !drawBottomCorners && drawInnerBottom != 2) { - canvas.drawRect(dx, y + corner, dx + corner, y + height + additionalHeight - AndroidUtilities.dp(6), Theme.chat_actionBackgroundPaint); - canvas.drawRect(dx2, y + corner, dx2 + corner, y + height + additionalHeight - AndroidUtilities.dp(6), Theme.chat_actionBackgroundPaint); - } else if (drawBottomCorners && !drawTopCorners && drawInnerTop != 2) { - canvas.drawRect(dx, y + corner - AndroidUtilities.dp(5), dx + corner, y + height + additionalHeight - corner, Theme.chat_actionBackgroundPaint); - canvas.drawRect(dx2, y + corner - AndroidUtilities.dp(5), dx2 + corner, y + height + additionalHeight - corner, Theme.chat_actionBackgroundPaint); - } else if (drawTopCorners || drawBottomCorners) { - canvas.drawRect(dx, y + corner, dx + corner, y + height + additionalHeight - corner, Theme.chat_actionBackgroundPaint); - canvas.drawRect(dx2, y + corner, dx2 + corner, y + height + additionalHeight - corner, Theme.chat_actionBackgroundPaint); - } - - if (drawTopCorners) { - Theme.chat_cornerOuter[0].setBounds(dx, y, dx + corner, y + corner); - Theme.chat_cornerOuter[0].draw(canvas); - Theme.chat_cornerOuter[1].setBounds(dx2, y, dx2 + corner, y + corner); - Theme.chat_cornerOuter[1].draw(canvas); - } - - if (drawBottomCorners) { - dy = y + height + additionalHeight - corner; - - Theme.chat_cornerOuter[2].setBounds(dx2, dy, dx2 + corner, dy + corner); - Theme.chat_cornerOuter[2].draw(canvas); - Theme.chat_cornerOuter[3].setBounds(dx, dy, dx + corner, dy + corner); - Theme.chat_cornerOuter[3].draw(canvas); - } - - y += height; - - previousLineHeight = y + additionalHeight; + backgroundPath.close(); + } + if (!visiblePartSet) { + ViewGroup parent = (ViewGroup) getParent(); + backgroundHeight = parent.getMeasuredHeight(); + } + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, 0, viewTop + AndroidUtilities.dp(4)); + canvas.drawPath(backgroundPath, Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawPath(backgroundPath, Theme.chat_actionBackgroundGradientDarkenPaint); } canvas.save(); @@ -704,10 +624,6 @@ public class ChatActionCell extends BaseCell implements DownloadController.FileD canvas.restore(); if (overrideColorFilter != null) { - for (int a = 0; a < 4; a++) { - Theme.chat_cornerOuter[a].setColorFilter(Theme.colorFilter); - Theme.chat_cornerInner[a].setColorFilter(Theme.colorFilter); - } Theme.chat_actionBackgroundPaint.setColor(Theme.currentColor); Theme.chat_actionTextPaint.setColor(Theme.getColor(Theme.key_chat_serviceText)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatLoadingCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatLoadingCell.java index b732f26ba..7658e72cf 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatLoadingCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatLoadingCell.java @@ -10,10 +10,10 @@ package org.telegram.ui.Cells; import android.content.Context; import android.view.Gravity; +import android.view.View; import android.widget.FrameLayout; import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.R; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.RadialProgressView; @@ -23,12 +23,11 @@ public class ChatLoadingCell extends FrameLayout { private FrameLayout frameLayout; private RadialProgressView progressBar; - public ChatLoadingCell(Context context) { + public ChatLoadingCell(Context context, View parent) { super(context); frameLayout = new FrameLayout(context); - frameLayout.setBackgroundResource(R.drawable.system_loader); - frameLayout.getBackground().setColorFilter(Theme.colorFilter); + frameLayout.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(18), frameLayout, parent)); addView(frameLayout, LayoutHelper.createFrame(36, 36, Gravity.CENTER)); progressBar = new RadialProgressView(context); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java index af764505c..0c50bd708 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ChatMessageCell.java @@ -109,6 +109,7 @@ import org.telegram.ui.Components.InfiniteProgress; import org.telegram.ui.Components.LinkPath; import org.telegram.ui.Components.MediaActionDrawable; import org.telegram.ui.Components.MessageBackgroundDrawable; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.Components.Point; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RadialProgress2; @@ -404,9 +405,13 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate private boolean drawInstantView; private int drawInstantViewType; private int imageBackgroundColor; - private int imageBackgroundGradientColor; + private float imageBackgroundIntensity; + private int imageBackgroundGradientColor1; + private int imageBackgroundGradientColor2; + private int imageBackgroundGradientColor3; private int imageBackgroundGradientRotation = 45; private LinearGradient gradientShader; + private MotionBackgroundDrawable motionBackgroundDrawable; private int imageBackgroundSideColor; private int imageBackgroundSideWidth; private boolean drawJoinGroupView; @@ -738,6 +743,9 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate private int lastRepliesCount; private float selectedBackgroundProgress; + private float viewTop; + private int backgroundHeight; + private boolean scheduledInvalidate; private final boolean ALPHA_PROPERTY_WORKAROUND = Build.VERSION.SDK_INT == 28; @@ -2417,13 +2425,16 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate fullyDraw = draw; } - public void setVisiblePart(int position, int height, int parent, float parentOffset) { + public void setVisiblePart(int position, int height, int parent, float parentOffset, float visibleTop, int parentH) { + parentHeight = parentH; + backgroundHeight = parentH; + viewTop = visibleTop; if (parent != parentHeight || parentOffset != this.parentViewTopOffset) { this.parentViewTopOffset = parentOffset; parentHeight = parent; invalidate(); } - if (currentBackgroundDrawable != null && currentBackgroundDrawable.getGradientShader() != null) { + if (currentMessageObject != null && (Theme.hasGradientService() && currentMessageObject.shouldDrawWithoutBackground() || drawSideButton != 0 || !botButtons.isEmpty()) || currentBackgroundDrawable != null && currentBackgroundDrawable.getGradientShader() != null) { invalidate(); } @@ -2977,6 +2988,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate photoImage.setCrossfadeWithOldImage(false); photoImage.setCrossfadeDuration(ImageReceiver.DEFAULT_CROSSFADE_DURATION); + photoImage.setGradientBitmap(null); lastSendState = messageObject.messageOwner.send_state; lastDeleteDate = messageObject.messageOwner.destroyTime; lastViewsCount = messageObject.messageOwner.views; @@ -3088,8 +3100,13 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate drawCommentButton = false; photoImage.setSideClip(0); gradientShader = null; + motionBackgroundDrawable = null; + imageBackgroundColor = 0; - imageBackgroundGradientColor = 0; + imageBackgroundGradientColor1 = 0; + imageBackgroundGradientColor2 = 0; + imageBackgroundIntensity = 0; + imageBackgroundGradientColor3 = 0; imageBackgroundGradientRotation = 45; imageBackgroundSideColor = 0; mediaBackground = false; @@ -3304,7 +3321,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate drawInstantViewType = 6; try { Uri url = Uri.parse(messageObject.messageOwner.media.webpage.url); - int intensity = Utilities.parseInt(url.getQueryParameter("intensity")); + imageBackgroundIntensity = Utilities.parseInt(url.getQueryParameter("intensity")); String bgColor = url.getQueryParameter("bg_color"); String rotation = url.getQueryParameter("rotation"); if (rotation != null) { @@ -3315,26 +3332,42 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (document != null && "image/png".equals(document.mime_type)) { bgColor = "ffffff"; } - if (intensity == 0) { - intensity = 50; + if (imageBackgroundIntensity == 0) { + imageBackgroundIntensity = 50; } } if (bgColor != null) { imageBackgroundColor = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; int averageColor = imageBackgroundColor; - if (bgColor.length() > 6) { - imageBackgroundGradientColor = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; - averageColor = AndroidUtilities.getAverageColor(imageBackgroundColor, imageBackgroundGradientColor); + if (bgColor.length() >= 13 && AndroidUtilities.isValidWallChar(bgColor.charAt(6))) { + imageBackgroundGradientColor1 = Integer.parseInt(bgColor.substring(7, 13), 16) | 0xff000000; + averageColor = AndroidUtilities.getAverageColor(imageBackgroundColor, imageBackgroundGradientColor1); + } + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + imageBackgroundGradientColor2 = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + imageBackgroundGradientColor3 = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; + } + if (imageBackgroundIntensity < 0) { + imageBackgroundSideColor = 0xff111111; + } else { + imageBackgroundSideColor = AndroidUtilities.getPatternSideColor(averageColor); } - imageBackgroundSideColor = AndroidUtilities.getPatternSideColor(averageColor); photoImage.setColorFilter(new PorterDuffColorFilter(AndroidUtilities.getPatternColor(averageColor), PorterDuff.Mode.SRC_IN)); - photoImage.setAlpha(intensity / 100.0f); + photoImage.setAlpha(Math.abs(imageBackgroundIntensity) / 100.0f); } else { String color = url.getLastPathSegment(); - if (color != null && (color.length() == 6 || color.length() == 13 && color.charAt(6) == '-')) { + if (color != null && color.length() >= 6) { imageBackgroundColor = Integer.parseInt(color.substring(0, 6), 16) | 0xff000000; - if (color.length() > 6) { - imageBackgroundGradientColor = Integer.parseInt(color.substring(7), 16) | 0xff000000; + if (color.length() >= 13 && AndroidUtilities.isValidWallChar(color.charAt(6))) { + imageBackgroundGradientColor1 = Integer.parseInt(color.substring(7, 13), 16) | 0xff000000; + } + if (color.length() >= 20 && AndroidUtilities.isValidWallChar(color.charAt(13))) { + imageBackgroundGradientColor2 = Integer.parseInt(color.substring(14, 20), 16) | 0xff000000; + } + if (color.length() == 27 && AndroidUtilities.isValidWallChar(color.charAt(20))) { + imageBackgroundGradientColor3 = Integer.parseInt(color.substring(21), 16) | 0xff000000; } currentPhotoObject = new TLRPC.TL_photoSizeEmpty(); currentPhotoObject.type = "s"; @@ -4952,21 +4985,19 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { currentPhotoObject = FileLoader.getClosestPhotoSizeWithSize(messageObject.photoThumbs, AndroidUtilities.getPhotoSize()); photoParentObject = messageObject.photoThumbsObject; - int maxPhotoWidth; boolean useFullWidth = false; if (messageObject.type == MessageObject.TYPE_ROUND_VIDEO) { - maxPhotoWidth = photoWidth = AndroidUtilities.roundMessageSize; documentAttach = messageObject.getDocument(); documentAttachType = DOCUMENT_ATTACH_TYPE_ROUND; } else { if (AndroidUtilities.isTablet()) { - maxPhotoWidth = photoWidth = (int) (AndroidUtilities.getMinTabletSide() * 0.7f); + photoWidth = (int) (AndroidUtilities.getMinTabletSide() * 0.7f); } else { if (currentPhotoObject != null && (messageObject.type == MessageObject.TYPE_PHOTO || messageObject.type == MessageObject.TYPE_VIDEO || messageObject.type == 8) && currentPhotoObject.w >= currentPhotoObject.h) { - maxPhotoWidth = photoWidth = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) - AndroidUtilities.dp(64 + (checkNeedDrawShareButton(messageObject) ? 10 : 0)); + photoWidth = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) - AndroidUtilities.dp(64 + (checkNeedDrawShareButton(messageObject) ? 10 : 0)); useFullWidth = true; } else { - maxPhotoWidth = photoWidth = (int) (Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) * 0.7f); + photoWidth = (int) (Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) * 0.7f); } } } @@ -5592,6 +5623,8 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate linkPreviewHeight += height; totalHeight += height; + boolean hasNonRtl = false; + for (int a = 0; a < descriptionLayout.getLineCount(); a++) { int lineLeft = (int) Math.ceil(descriptionLayout.getLineLeft(a)); if (lineLeft != 0) { @@ -5600,12 +5633,21 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { descriptionX = Math.max(descriptionX, -lineLeft); } + } else { + hasNonRtl = true; } } + if (hasNonRtl) { + descriptionX = 0; + } } catch (Exception e) { FileLog.e(e); } + if (messageObject.type == MessageObject.TYPE_PHOTO || messageObject.type == MessageObject.TYPE_VIDEO) { + totalHeight += AndroidUtilities.dp(6); + } + totalHeight += AndroidUtilities.dp(17); if (captionNewLine != 0) { totalHeight -= AndroidUtilities.dp(14); @@ -6962,7 +7004,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate private void drawContent(Canvas canvas) { if (needNewVisiblePart && currentMessageObject.type == 0) { getLocalVisibleRect(scrollRect); - setVisiblePart(scrollRect.top, scrollRect.bottom - scrollRect.top, parentHeight, parentViewTopOffset); + setVisiblePart(scrollRect.top, scrollRect.bottom - scrollRect.top, parentHeight, parentViewTopOffset, viewTop, backgroundHeight); needNewVisiblePart = false; } @@ -7107,17 +7149,32 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } if (imageBackgroundColor != 0) { rect.set(photoImage.getImageX(), photoImage.getImageY(), photoImage.getImageX2(), photoImage.getImageY2()); - if (imageBackgroundGradientColor != 0) { - if (gradientShader == null) { - Rect r = BackgroundGradientDrawable.getGradientPoints(AndroidUtilities.getWallpaperRotation(imageBackgroundGradientRotation, false), (int) rect.width(), (int) rect.height()); - gradientShader = new LinearGradient(r.left, r.top, r.right, r.bottom, new int[]{imageBackgroundColor, imageBackgroundGradientColor}, null, Shader.TileMode.CLAMP); + if (imageBackgroundGradientColor1 != 0) { + if (imageBackgroundGradientColor2 != 0) { + if (motionBackgroundDrawable == null) { + motionBackgroundDrawable = new MotionBackgroundDrawable(imageBackgroundColor, imageBackgroundGradientColor1, imageBackgroundGradientColor2, imageBackgroundGradientColor3, true); + if (imageBackgroundIntensity < 0) { + photoImage.setGradientBitmap(motionBackgroundDrawable.getBitmap()); + } + if (!photoImage.hasImageSet()) { + motionBackgroundDrawable.setRoundRadius(AndroidUtilities.dp(4)); + } + } + } else { + if (gradientShader == null) { + Rect r = BackgroundGradientDrawable.getGradientPoints(AndroidUtilities.getWallpaperRotation(imageBackgroundGradientRotation, false), (int) rect.width(), (int) rect.height()); + gradientShader = new LinearGradient(r.left, r.top, r.right, r.bottom, new int[]{imageBackgroundColor, imageBackgroundGradientColor1}, null, Shader.TileMode.CLAMP); + } + Theme.chat_instantViewPaint.setShader(gradientShader); } - Theme.chat_instantViewPaint.setShader(gradientShader); } else { Theme.chat_instantViewPaint.setShader(null); Theme.chat_instantViewPaint.setColor(imageBackgroundColor); } - if (imageBackgroundSideColor != 0) { + if (motionBackgroundDrawable != null) { + motionBackgroundDrawable.setBounds((int) rect.left, (int) rect.top, (int) rect.right, (int) rect.bottom); + motionBackgroundDrawable.draw(canvas); + } else if (imageBackgroundSideColor != 0) { canvas.drawRect(photoImage.getImageX(), photoImage.getImageY(), photoImage.getImageX2(), photoImage.getImageY2(), Theme.chat_instantViewPaint); } else { canvas.drawRoundRect(rect, AndroidUtilities.dp(4), AndroidUtilities.dp(4), Theme.chat_instantViewPaint); @@ -7445,7 +7502,14 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate int oldAlpha = Theme.chat_actionBackgroundPaint.getAlpha(); Theme.chat_actionBackgroundPaint.setAlpha((int) (oldAlpha * timeAlpha)); - canvas.drawRoundRect(rect, AndroidUtilities.dp(4), AndroidUtilities.dp(4), Theme.chat_actionBackgroundPaint); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + int oldAlpha2 = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (oldAlpha2 * timeAlpha)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundGradientDarkenPaint); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(oldAlpha2); + } Theme.chat_actionBackgroundPaint.setAlpha(oldAlpha); if (!playing && currentMessageObject.isContentUnread()) { @@ -7820,9 +7884,14 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate for (int a = 0; a < botButtons.size(); a++) { BotButton button = botButtons.get(a); int y = button.y + layoutHeight - AndroidUtilities.dp(2) + transitionParams.deltaBottom; - Theme.chat_systemDrawable.setColorFilter(a == pressedBotButton ? Theme.colorPressedFilter : Theme.colorFilter); - Theme.chat_systemDrawable.setBounds(button.x + addX, y, button.x + addX + button.width, y + button.height); - Theme.chat_systemDrawable.draw(canvas); + + rect.set(button.x + addX, y, button.x + addX + button.width, y + button.height); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), a == pressedBotButton ? Theme.chat_actionBackgroundSelectedPaint : Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundGradientDarkenPaint); + } + canvas.save(); canvas.translate(button.x + addX + AndroidUtilities.dp(5), y + (AndroidUtilities.dp(44) - button.title.getLineBottom(button.title.getLineCount() - 1)) / 2); button.title.draw(canvas); @@ -9455,7 +9524,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (thumbPhotoSize == photoSize) { thumbPhotoSize = null; } - if (photoSize == null || messageObject.replyMessageObject.isAnyKindOfSticker() || messageObject.isAnyKindOfSticker() && !AndroidUtilities.isTablet() || messageObject.replyMessageObject.isSecretMedia()) { + if (photoSize == null || messageObject.replyMessageObject.isAnyKindOfSticker() || messageObject.isAnyKindOfSticker() && !AndroidUtilities.isTablet() || messageObject.replyMessageObject.isSecretMedia() || messageObject.replyMessageObject.isWebpageDocument()) { replyImageReceiver.setImageBitmap((Drawable) null); needReplyImage = false; } else { @@ -10127,19 +10196,6 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } if (drawSideButton != 0) { - if (sideButtonPressed) { - if (!Theme.isCustomTheme() || Theme.hasThemeKey(Theme.key_chat_shareBackgroundSelected)) { - Theme.chat_shareDrawable.setColorFilter(Theme.getShareColorFilter(Theme.getColor(Theme.key_chat_shareBackgroundSelected), true)); - } else { - Theme.chat_shareDrawable.setColorFilter(Theme.colorPressedFilter2); - } - } else { - if (!Theme.isCustomTheme() || Theme.hasThemeKey(Theme.key_chat_shareBackground)) { - Theme.chat_shareDrawable.setColorFilter(Theme.getShareColorFilter(Theme.getColor(Theme.key_chat_shareBackground), false)); - } else { - Theme.chat_shareDrawable.setColorFilter(Theme.colorFilter2); - } - } if (currentMessageObject.isOutOwner()) { sideStartX = getCurrentBackgroundLeft() - AndroidUtilities.dp(8 + 32); if (currentMessagesGroup != null) { @@ -10164,8 +10220,14 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate sideStartY -= AndroidUtilities.dp(18); height += AndroidUtilities.dp(18); } - setDrawableBounds(Theme.chat_shareDrawable, sideStartX, sideStartY, AndroidUtilities.dp(32), height); - Theme.chat_shareDrawable.draw(canvas); + + rect.set(sideStartX, sideStartY, sideStartX + AndroidUtilities.dp(32), sideStartY + height); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), sideButtonPressed ? Theme.chat_actionBackgroundSelectedPaint : Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), Theme.chat_actionBackgroundGradientDarkenPaint); + } + setDrawableBounds(Theme.chat_commentStickerDrawable, sideStartX + AndroidUtilities.dp(4), sideStartY + AndroidUtilities.dp(4)); Theme.chat_commentStickerDrawable.draw(canvas); @@ -10187,8 +10249,13 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate canvas.restore(); } } else { - setDrawableBounds(Theme.chat_shareDrawable, sideStartX, sideStartY, AndroidUtilities.dp(32), AndroidUtilities.dp(32)); - Theme.chat_shareDrawable.draw(canvas); + rect.set(sideStartX, sideStartY, sideStartX + AndroidUtilities.dp(32), sideStartY + AndroidUtilities.dp(32)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), sideButtonPressed ? Theme.chat_actionBackgroundSelectedPaint : Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), Theme.chat_actionBackgroundGradientDarkenPaint); + } + if (drawSideButton == 2) { if (currentMessageObject.isOutOwner()) { setDrawableBounds(Theme.chat_goIconDrawable, sideStartX + AndroidUtilities.dp(10), sideStartY + AndroidUtilities.dp(9)); @@ -10425,7 +10492,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (drawNameLayout && nameLayout != null) { canvas.save(); - int oldAlpha = 255; + int oldAlpha; if (currentMessageObject.shouldDrawWithoutBackground()) { Theme.chat_namePaint.setColor(Theme.getColor(Theme.key_chat_stickerNameText)); @@ -10436,13 +10503,19 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } nameY = layoutHeight - AndroidUtilities.dp(38); float alphaProgress = currentMessageObject.isOut() && (checkBoxVisible || checkBoxAnimationInProgress) ? (1.0f - checkBoxAnimationProgress) : 1.0f; - Theme.chat_systemDrawable.setAlpha((int) (alphaProgress * 255)); - Theme.chat_systemDrawable.setColorFilter(Theme.colorFilter); - Theme.chat_systemDrawable.setBounds((int) nameX - AndroidUtilities.dp(12), (int) nameY - AndroidUtilities.dp(5), (int) nameX + AndroidUtilities.dp(12) + nameWidth, (int) nameY + AndroidUtilities.dp(22)); - Theme.chat_systemDrawable.draw(canvas); - if (checkBoxVisible || checkBoxAnimationInProgress) { - Theme.chat_systemDrawable.setAlpha(oldAlpha); + + rect.set((int) nameX - AndroidUtilities.dp(12), (int) nameY - AndroidUtilities.dp(5), (int) nameX + AndroidUtilities.dp(12) + nameWidth, (int) nameY + AndroidUtilities.dp(22)); + oldAlpha = Theme.chat_actionBackgroundPaint.getAlpha(); + Theme.chat_actionBackgroundPaint.setAlpha((int) (alphaProgress * oldAlpha)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + int oldAlpha2 = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (oldAlpha2 * timeAlpha)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundGradientDarkenPaint); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(oldAlpha2); } + nameX -= nameOffsetX; int color = Theme.getColor(Theme.key_chat_stickerViaBotNameText); color = (Theme.getColor(Theme.key_chat_stickerViaBotNameText) & 0x00ffffff) | ((int) (Color.alpha(color) * alphaProgress) << 24); @@ -10452,7 +10525,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (viaSpan2 != null) { viaSpan2.setColor(color); } - Theme.chat_systemDrawable.setAlpha(255); + Theme.chat_actionBackgroundPaint.setAlpha(oldAlpha); } else { if (mediaBackground || currentMessageObject.isOutOwner()) { nameX = backgroundDrawableLeft + transitionParams.deltaLeft + AndroidUtilities.dp(11) - nameOffsetX + getExtraTextX(); @@ -10550,9 +10623,13 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate forwardNameY = AndroidUtilities.dp(12); int backWidth = forwardedNameWidth + AndroidUtilities.dp(14); - Theme.chat_systemDrawable.setColorFilter(Theme.colorFilter); - Theme.chat_systemDrawable.setBounds((int) forwardNameX - AndroidUtilities.dp(7), forwardNameY - AndroidUtilities.dp(6), (int) forwardNameX - AndroidUtilities.dp(7) + backWidth, forwardNameY + AndroidUtilities.dp(38)); - Theme.chat_systemDrawable.draw(canvas); + + rect.set((int) forwardNameX - AndroidUtilities.dp(7), forwardNameY - AndroidUtilities.dp(6), (int) forwardNameX - AndroidUtilities.dp(7) + backWidth, forwardNameY + AndroidUtilities.dp(38)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundGradientDarkenPaint); + } } else { forwardNameY = AndroidUtilities.dp(10 + (drawNameLayout ? 19 : 0)); if (currentMessageObject.isOutOwner()) { @@ -10628,12 +10705,28 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } if (currentMessageObject.shouldDrawWithoutBackground()) { Theme.chat_replyLinePaint.setColor(Theme.getColor(Theme.key_chat_stickerReplyLine)); + int oldAlpha = Theme.chat_replyLinePaint.getAlpha(); + Theme.chat_replyLinePaint.setAlpha((int) (oldAlpha * timeAlpha)); Theme.chat_replyNamePaint.setColor(Theme.getColor(Theme.key_chat_stickerReplyNameText)); + oldAlpha = Theme.chat_replyNamePaint.getAlpha(); + Theme.chat_replyNamePaint.setAlpha((int) (oldAlpha * timeAlpha)); Theme.chat_replyTextPaint.setColor(Theme.getColor(Theme.key_chat_stickerReplyMessageText)); + oldAlpha = Theme.chat_replyTextPaint.getAlpha(); + Theme.chat_replyTextPaint.setAlpha((int) (oldAlpha * timeAlpha)); int backWidth = Math.max(replyNameWidth, replyTextWidth) + AndroidUtilities.dp(14); - Theme.chat_systemDrawable.setColorFilter(Theme.colorFilter); - Theme.chat_systemDrawable.setBounds((int) replyStartX - AndroidUtilities.dp(7), replyStartY - AndroidUtilities.dp(6), (int) replyStartX - AndroidUtilities.dp(7) + backWidth, replyStartY + AndroidUtilities.dp(41)); - Theme.chat_systemDrawable.draw(canvas); + + rect.set((int) replyStartX - AndroidUtilities.dp(7), replyStartY - AndroidUtilities.dp(6), (int) replyStartX - AndroidUtilities.dp(7) + backWidth, replyStartY + AndroidUtilities.dp(41)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); + oldAlpha = Theme.chat_actionBackgroundPaint.getAlpha(); + Theme.chat_actionBackgroundPaint.setAlpha((int) (oldAlpha * timeAlpha)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundPaint); + Theme.chat_actionBackgroundPaint.setAlpha(oldAlpha); + if (Theme.hasGradientService()) { + oldAlpha = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (oldAlpha * timeAlpha)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), Theme.chat_actionBackgroundGradientDarkenPaint); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(oldAlpha); + } } else { if (currentMessageObject.isOutOwner()) { Theme.chat_replyLinePaint.setColor(Theme.getColor(Theme.key_chat_outReplyLine)); @@ -11196,7 +11289,14 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate float y1 = timeY - AndroidUtilities.dp(23); rect.set(x1, y1, x1 + timeWidth + AndroidUtilities.dp((bigRadius ? 12 : 8) + (currentMessageObject.isOutOwner() ? 20 : 0)), y1 + AndroidUtilities.dp(17)); + Theme.applyServiceShaderMatrix(getMeasuredWidth(), backgroundHeight, getX(), viewTop); canvas.drawRoundRect(rect, r, r, paint); + if (paint == Theme.chat_actionBackgroundPaint && Theme.hasGradientService()) { + int oldAlpha2 = Theme.chat_actionBackgroundGradientDarkenPaint.getAlpha(); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha((int) (oldAlpha2 * timeAlpha * alpha)); + canvas.drawRoundRect(rect, r, r, Theme.chat_actionBackgroundGradientDarkenPaint); + Theme.chat_actionBackgroundGradientDarkenPaint.setAlpha(oldAlpha2); + } paint.setAlpha(oldAlpha); float additionalX = -timeLayout.getLineLeft(0); @@ -11217,8 +11317,8 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (statusDrawableAnimationInProgress) { boolean outDrawClock = (animateFromStatusDrawableParams & 4) != 0; boolean outDrawError = (animateFromStatusDrawableParams & 8) != 0; - drawClokOrErroLayout(canvas, outDrawClock, outDrawError, layoutHeight, alpha, timeYOffset, timeX, 1f - statusDrawableProgress, drawSelectionBackground); - drawClokOrErroLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, statusDrawableProgress, drawSelectionBackground); + drawClockOrErrorLayout(canvas, outDrawClock, outDrawError, layoutHeight, alpha, timeYOffset, timeX, 1f - statusDrawableProgress, drawSelectionBackground); + drawClockOrErrorLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, statusDrawableProgress, drawSelectionBackground); if (!currentMessageObject.isOutOwner()) { if (!outDrawClock && !outDrawError) { @@ -11234,7 +11334,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate drawViewsAndRepliesLayout(canvas, layoutHeight, alpha, timeYOffset, timeX, 1f, drawSelectionBackground); } } - drawClokOrErroLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, 1f, drawSelectionBackground); + drawClockOrErrorLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, 1f, drawSelectionBackground); } if (currentMessageObject.isOutOwner()) { @@ -11273,8 +11373,8 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate if (statusDrawableAnimationInProgress) { boolean outDrawClock = (animateFromStatusDrawableParams & 4) != 0; boolean outDrawError = (animateFromStatusDrawableParams & 8) != 0; - drawClokOrErroLayout(canvas, outDrawClock, outDrawError, layoutHeight, alpha, timeYOffset, timeX, 1f - statusDrawableProgress, drawSelectionBackground); - drawClokOrErroLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, statusDrawableProgress, drawSelectionBackground); + drawClockOrErrorLayout(canvas, outDrawClock, outDrawError, layoutHeight, alpha, timeYOffset, timeX, 1f - statusDrawableProgress, drawSelectionBackground); + drawClockOrErrorLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, statusDrawableProgress, drawSelectionBackground); if (!currentMessageObject.isOutOwner()) { if (!outDrawClock && !outDrawError) { @@ -11290,7 +11390,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate drawViewsAndRepliesLayout(canvas, layoutHeight, alpha, timeYOffset, timeX, 1f, drawSelectionBackground); } } - drawClokOrErroLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, 1f, drawSelectionBackground); + drawClockOrErrorLayout(canvas, drawClock, drawError, layoutHeight, alpha, timeYOffset, timeX, 1f, drawSelectionBackground); } if (currentMessageObject.isOutOwner()) { @@ -11416,7 +11516,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } - private void drawClokOrErroLayout(Canvas canvas, boolean drawTime, boolean drawError, float layoutHeight, float alpha, float timeYOffset, float timeX, float progress, boolean drawSelectionBackground) { + private void drawClockOrErrorLayout(Canvas canvas, boolean drawTime, boolean drawError, float layoutHeight, float alpha, float timeYOffset, float timeX, float progress, boolean drawSelectionBackground) { boolean useScale = progress != 1f; float scale = 0.5f + 0.5f * progress; alpha *= progress; @@ -11671,10 +11771,6 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate pinnedDrawable.draw(canvas); pinnedDrawable.setAlpha(255); - if (isPinned) { - offsetX = pinnedDrawable.getIntrinsicWidth() + AndroidUtilities.dp(6); - } - if (useScale) { canvas.restore(); } @@ -11756,7 +11852,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate } else { setDrawableBounds(Theme.chat_msgStickerCheckDrawable, layoutWidth - AndroidUtilities.dp(bigRadius ? 23.5f : 21.5f) - Theme.chat_msgStickerCheckDrawable.getIntrinsicWidth(), timeY - Theme.chat_msgStickerCheckDrawable.getIntrinsicHeight() + timeYOffset); } - Theme.chat_msgStickerCheckDrawable.setAlpha((int) (255 * alpha)); + Theme.chat_msgStickerCheckDrawable.setAlpha((int) (255 * timeAlpha * alpha)); drawable = Theme.chat_msgStickerCheckDrawable; } else { if (drawCheck1) { @@ -11817,7 +11913,7 @@ public class ChatMessageCell extends BaseCell implements SeekBar.SeekBarDelegate Drawable drawable; if (currentMessageObject.shouldDrawWithoutBackground()) { setDrawableBounds(Theme.chat_msgStickerHalfCheckDrawable, layoutWidth - AndroidUtilities.dp(bigRadius ? 23.5f : 21.5f) - Theme.chat_msgStickerHalfCheckDrawable.getIntrinsicWidth(), timeY - Theme.chat_msgStickerHalfCheckDrawable.getIntrinsicHeight() + timeYOffset); - Theme.chat_msgStickerHalfCheckDrawable.setAlpha((int) (255 * alpha)); + Theme.chat_msgStickerHalfCheckDrawable.setAlpha((int) (255 * timeAlpha * alpha)); drawable = Theme.chat_msgStickerHalfCheckDrawable; } else { setDrawableBounds(Theme.chat_msgMediaHalfCheckDrawable, layoutWidth - AndroidUtilities.dp(bigRadius ? 23.5f : 21.5f) - Theme.chat_msgMediaHalfCheckDrawable.getIntrinsicWidth(), timeY - Theme.chat_msgMediaHalfCheckDrawable.getIntrinsicHeight() + timeYOffset); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java index 8aabe028a..68ad42d07 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DialogCell.java @@ -143,6 +143,7 @@ public class DialogCell extends BaseCell { private boolean lastUnreadState; private int lastSendState; private boolean dialogMuted; + private float dialogMutedProgress; private MessageObject message; private boolean clearingDialog; private CharSequence lastMessageString; @@ -1047,6 +1048,8 @@ public class DialogCell extends BaseCell { } else { innerMessage = String.format("\uD83C\uDFAE %s", message.messageOwner.media.game.title); } + } else if (message.messageOwner.media instanceof TLRPC.TL_messageMediaInvoice) { + innerMessage = message.messageOwner.media.title; } else if (message.type == 14) { if (Build.VERSION.SDK_INT >= 18) { innerMessage = String.format("\uD83C\uDFA7 \u2068%s - %s\u2069", message.getMusicAuthor(), message.getMusicTitle()); @@ -1161,6 +1164,8 @@ public class DialogCell extends BaseCell { messageString = "\uD83D\uDCCA " + mediaPoll.poll.question; } else if (message.messageOwner.media instanceof TLRPC.TL_messageMediaGame) { messageString = "\uD83C\uDFAE " + message.messageOwner.media.game.title; + } else if (message.messageOwner.media instanceof TLRPC.TL_messageMediaInvoice) { + messageString = message.messageOwner.media.title; } else if (message.type == 14) { messageString = String.format("\uD83C\uDFA7 %s - %s", message.getMusicAuthor(), message.getMusicTitle()); } else { @@ -2187,6 +2192,10 @@ public class DialogCell extends BaseCell { requestLayout(); } + if (!animated) { + dialogMutedProgress = dialogMuted ? 1f : 0f; + } + invalidate(); } @@ -2596,9 +2605,34 @@ public class DialogCell extends BaseCell { lastStatusDrawableParams = (this.drawClock ? 1 : 0) + (this.drawCheck1 ? 2 : 0) + (this.drawCheck2 ? 4 : 0); } - if (dialogMuted && !drawVerified && drawScam == 0) { + if ((dialogMuted || dialogMutedProgress > 0) && !drawVerified && drawScam == 0) { + if (dialogMuted && dialogMutedProgress != 1f) { + dialogMutedProgress += 16 / 150f; + if (dialogMutedProgress > 1f) { + dialogMutedProgress = 1f; + } else { + invalidate(); + } + } else if (!dialogMuted && dialogMutedProgress != 0f) { + dialogMutedProgress -= 16 / 150f; + if (dialogMutedProgress < 0f) { + dialogMutedProgress = 0f; + } else { + invalidate(); + } + } setDrawableBounds(Theme.dialogs_muteDrawable, nameMuteLeft - AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 0 : 1), AndroidUtilities.dp(SharedConfig.useThreeLinesLayout ? 13.5f : 17.5f)); - Theme.dialogs_muteDrawable.draw(canvas); + if (dialogMutedProgress != 1f) { + canvas.save(); + canvas.scale(dialogMutedProgress, dialogMutedProgress, Theme.dialogs_muteDrawable.getBounds().centerX(), Theme.dialogs_muteDrawable.getBounds().centerY()); + Theme.dialogs_muteDrawable.setAlpha((int) (255 * dialogMutedProgress)); + Theme.dialogs_muteDrawable.draw(canvas); + Theme.dialogs_muteDrawable.setAlpha(255); + canvas.restore(); + } else { + Theme.dialogs_muteDrawable.draw(canvas); + } + } else if (drawVerified) { setDrawableBounds(Theme.dialogs_verifiedDrawable, nameMuteLeft, AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 12.5f : 16.5f)); setDrawableBounds(Theme.dialogs_verifiedCheckDrawable, nameMuteLeft, AndroidUtilities.dp(useForceThreeLines || SharedConfig.useThreeLinesLayout ? 12.5f : 16.5f)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DividerCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DividerCell.java index 028298c1e..0c7ecf444 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DividerCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DividerCell.java @@ -10,13 +10,20 @@ package org.telegram.ui.Cells; import android.content.Context; import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; import android.view.View; +import androidx.core.graphics.ColorUtils; + import org.telegram.messenger.AndroidUtilities; import org.telegram.ui.ActionBar.Theme; public class DividerCell extends View { + boolean forceDarkTheme; + Paint paint; + public DividerCell(Context context) { super(context); setPadding(0, AndroidUtilities.dp(8), 0, AndroidUtilities.dp(8)); @@ -29,6 +36,19 @@ public class DividerCell extends View { @Override protected void onDraw(Canvas canvas) { - canvas.drawLine(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getPaddingTop(), Theme.dividerPaint); + Paint localPaint = Theme.dividerPaint; + if (forceDarkTheme) { + if (paint == null) { + paint = new Paint(); + paint.setColor(ColorUtils.blendARGB(Color.BLACK, Theme.getColor(Theme.key_voipgroup_dialogBackground), 0.2f)); + } + localPaint = paint; + } + + canvas.drawLine(getPaddingLeft(), getPaddingTop(), getWidth() - getPaddingRight(), getPaddingTop(), localPaint); + } + + public void setForceDarkTheme(boolean forceDarkTheme) { + this.forceDarkTheme = forceDarkTheme; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java index 78a01c4cb..3276eebbe 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/DrawerActionCell.java @@ -11,7 +11,6 @@ package org.telegram.ui.Cells; import android.content.Context; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; -import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.util.TypedValue; import android.view.Gravity; @@ -26,7 +25,6 @@ import org.telegram.ui.Components.LayoutHelper; public class DrawerActionCell extends FrameLayout { private TextView textView; - private RectF rect = new RectF(); public DrawerActionCell(Context context) { super(context); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java index 7080b3237..3be259278 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell.java @@ -194,7 +194,7 @@ public class FeaturedStickerSetCell extends FrameLayout { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + imageLocation = ImageLocation.getForSticker(thumb, sticker, set.set.thumb_version); } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java index 0c3013158..be6f12af1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/FeaturedStickerSetCell2.java @@ -198,7 +198,7 @@ public class FeaturedStickerSetCell2 extends FrameLayout { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else { // unique thumb TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + imageLocation = ImageLocation.getForSticker(thumb, sticker, set.set.thumb_version); } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java index 2e5d92d71..fe73aa500 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/GroupCallUserCell.java @@ -176,7 +176,6 @@ public class GroupCallUserCell extends FrameLayout { muteButton.setScaleX(0.6f + 0.4f * (1f - progressToAvatarPreview)); muteButton.setScaleY(0.6f + 0.4f * (1f - progressToAvatarPreview)); - invalidate(); } @@ -193,6 +192,12 @@ public class GroupCallUserCell extends FrameLayout { } } + public void setDrawAvatar(boolean draw) { + if (avatarImageView.getImageReceiver().getVisible() != draw) { + avatarImageView.getImageReceiver().setVisible(draw, true); + } + } + private static class VerifiedDrawable extends Drawable { private Drawable[] drawables = new Drawable[2]; @@ -441,7 +446,7 @@ public class GroupCallUserCell extends FrameLayout { return avatarImageView.getImageReceiver().hasNotThumb(); } - public void setData(AccountInstance account, TLRPC.TL_groupCallParticipant groupCallParticipant, ChatObject.Call call, int self, TLRPC.FileLocation uploadingAvatar) { + public void setData(AccountInstance account, TLRPC.TL_groupCallParticipant groupCallParticipant, ChatObject.Call call, int self, TLRPC.FileLocation uploadingAvatar, boolean animated) { currentCall = call; accountInstance = account; selfId = self; @@ -484,7 +489,7 @@ public class GroupCallUserCell extends FrameLayout { } } } - applyParticipantChanges(false); + applyParticipantChanges(animated); } public void setDrawDivider(boolean draw) { @@ -872,11 +877,17 @@ public class GroupCallUserCell extends FrameLayout { avatarImageView.setScaleX(avatarWavesDrawable.getAvatarScale()); avatarImageView.setScaleY(avatarWavesDrawable.getAvatarScale()); + avatarProgressView.setScaleX(avatarWavesDrawable.getAvatarScale()); avatarProgressView.setScaleY(avatarWavesDrawable.getAvatarScale()); + super.dispatchDraw(canvas); } + public void getAvatarPosition(int[] pos) { + avatarImageView.getLocationInWindow(pos); + } + public static class AvatarWavesDrawable { float amplitude; @@ -1029,4 +1040,11 @@ public class GroupCallUserCell extends FrameLayout { info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, participant.muted && !participant.can_self_unmute ? LocaleController.getString("VoipUnmute", R.string.VoipUnmute) : LocaleController.getString("VoipMute", R.string.VoipMute))); } } + + public int getPeerId() { + if (participant == null) { + return 0; + } + return MessageObject.getPeerId(participant.peer); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/HintDialogCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/HintDialogCell.java index d7b6cba1c..6e4cc382b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/HintDialogCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/HintDialogCell.java @@ -19,7 +19,6 @@ import android.widget.FrameLayout; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.MessagesController; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; @@ -101,7 +100,6 @@ public class HintDialogCell extends FrameLayout { public void update() { int uid = (int) dialog_id; - TLRPC.FileLocation photo = null; if (uid > 0) { currentUser = MessagesController.getInstance(currentAccount).getUser(uid); avatarDrawable.setInfo(currentUser); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PatternCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PatternCell.java index fce5d1931..bdafa19d4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PatternCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PatternCell.java @@ -1,13 +1,21 @@ package org.telegram.ui.Cells; +import android.annotation.SuppressLint; import android.content.Context; +import android.graphics.BlendMode; import android.graphics.Canvas; import android.graphics.LinearGradient; +import android.graphics.Outline; import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.Shader; +import android.os.Build; import android.text.TextUtils; +import android.view.View; +import android.view.ViewOutlineProvider; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DownloadController; @@ -20,6 +28,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.MediaActionDrawable; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.Components.RadialProgress2; import java.io.File; @@ -32,11 +41,14 @@ public class PatternCell extends BackupImageView implements DownloadController.F private TLRPC.TL_wallPaper currentPattern; private int currentAccount = UserConfig.selectedAccount; private LinearGradient gradientShader; + private int currentBackgroundColor; private int currentGradientColor1; private int currentGradientColor2; + private int currentGradientColor3; private int currentGradientAngle; private Paint backgroundPaint; + private MotionBackgroundDrawable backgroundDrawable; private int TAG; @@ -45,10 +57,14 @@ public class PatternCell extends BackupImageView implements DownloadController.F public interface PatternCellDelegate { TLRPC.TL_wallPaper getSelectedPattern(); - int getPatternColor(); - int getBackgroundGradientColor(); + int getBackgroundGradientColor1(); + int getBackgroundGradientColor2(); + int getBackgroundGradientColor3(); int getBackgroundGradientAngle(); int getBackgroundColor(); + int getPatternColor(); + int getCheckColor(); + float getIntensity(); } public PatternCell(Context context, int maxSize, PatternCellDelegate patternCellDelegate) { @@ -63,6 +79,16 @@ public class PatternCell extends BackupImageView implements DownloadController.F backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); TAG = DownloadController.getInstance(currentAccount).generateObserverTag(); + + if (Build.VERSION.SDK_INT >= 21) { + setOutlineProvider(new ViewOutlineProvider() { + @Override + public void getOutline(View view, Outline outline) { + outline.setRoundRect(AndroidUtilities.dp(1), AndroidUtilities.dp(1), view.getMeasuredWidth() - AndroidUtilities.dp(1), view.getMeasuredHeight() - AndroidUtilities.dp(1), AndroidUtilities.dp(6)); + } + }); + setClipToOutline(true); + } } public void setPattern(TLRPC.TL_wallPaper wallPaper) { @@ -93,6 +119,11 @@ public class PatternCell extends BackupImageView implements DownloadController.F invalidate(); } + @Override + public void invalidate() { + super.invalidate(); + } + private void updateButtonState(Object image, boolean ifSame, boolean animated) { if (image instanceof TLRPC.TL_wallPaper || image instanceof MediaController.SearchImage) { File path; @@ -139,38 +170,77 @@ public class PatternCell extends BackupImageView implements DownloadController.F } } + @SuppressLint("DrawAllocation") @Override protected void onDraw(Canvas canvas) { - getImageReceiver().setAlpha(0.8f); + float intensity = delegate.getIntensity(); + imageReceiver.setAlpha(Math.abs(intensity)); + imageReceiver.setBlendMode(null); int backgroundColor = delegate.getBackgroundColor(); - int backgroundGradientColor = delegate.getBackgroundGradientColor(); + int backgroundGradientColor1 = delegate.getBackgroundGradientColor1(); + int backgroundGradientColor2 = delegate.getBackgroundGradientColor2(); + int backgroundGradientColor3 = delegate.getBackgroundGradientColor3(); int backgroundGradientAngle = delegate.getBackgroundGradientAngle(); - int patternColor = delegate.getPatternColor(); + int checkColor = delegate.getCheckColor(); - if (backgroundGradientColor != 0) { - if (gradientShader == null || backgroundColor != currentGradientColor1 || backgroundGradientColor != currentGradientColor2 || backgroundGradientAngle != currentGradientAngle) { - currentGradientColor1 = backgroundColor; - currentGradientColor2 = backgroundGradientColor; + if (backgroundGradientColor1 != 0) { + if (gradientShader == null || backgroundColor != currentBackgroundColor || backgroundGradientColor1 != currentGradientColor1 || backgroundGradientColor2 != currentGradientColor2 || backgroundGradientColor3 != currentGradientColor3 || backgroundGradientAngle != currentGradientAngle) { + currentBackgroundColor = backgroundColor; + currentGradientColor1 = backgroundGradientColor1; + currentGradientColor2 = backgroundGradientColor2; + currentGradientColor3 = backgroundGradientColor3; currentGradientAngle = backgroundGradientAngle; - final Rect r = BackgroundGradientDrawable.getGradientPoints(currentGradientAngle, getMeasuredWidth(), getMeasuredHeight()); - gradientShader = new LinearGradient(r.left, r.top, r.right, r.bottom, new int[]{backgroundColor, backgroundGradientColor}, null, Shader.TileMode.CLAMP); + if (backgroundGradientColor2 != 0) { + gradientShader = null; + if (backgroundDrawable != null) { + backgroundDrawable.setColors(backgroundColor, backgroundGradientColor1, backgroundGradientColor2, backgroundGradientColor3, false); + } else { + backgroundDrawable = new MotionBackgroundDrawable(backgroundColor, backgroundGradientColor1, backgroundGradientColor2, backgroundGradientColor3, true); + backgroundDrawable.setRoundRadius(AndroidUtilities.dp(6)); + backgroundDrawable.setParentView(this); + } + if (intensity < 0) { + imageReceiver.setGradientBitmap(backgroundDrawable.getBitmap()); + } else { + imageReceiver.setGradientBitmap(null); + if (Build.VERSION.SDK_INT >= 29) { + imageReceiver.setBlendMode(BlendMode.SOFT_LIGHT); + } else { + imageReceiver.setColorFilter(new PorterDuffColorFilter(delegate.getPatternColor(), PorterDuff.Mode.SRC_IN)); + } + } + } else { + final Rect r = BackgroundGradientDrawable.getGradientPoints(currentGradientAngle, getMeasuredWidth(), getMeasuredHeight()); + gradientShader = new LinearGradient(r.left, r.top, r.right, r.bottom, new int[]{backgroundColor, backgroundGradientColor1}, null, Shader.TileMode.CLAMP); + backgroundDrawable = null; + imageReceiver.setGradientBitmap(null); + } } } else { gradientShader = null; + backgroundDrawable = null; + imageReceiver.setGradientBitmap(null); } - backgroundPaint.setShader(gradientShader); - if (gradientShader == null) { - backgroundPaint.setColor(backgroundColor); + if (backgroundDrawable != null) { + backgroundDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + backgroundDrawable.draw(canvas); + } else { + backgroundPaint.setShader(gradientShader); + if (gradientShader == null) { + backgroundPaint.setColor(backgroundColor); + } + rect.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), backgroundPaint); } - rect.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); - canvas.drawRoundRect(rect, AndroidUtilities.dp(6), AndroidUtilities.dp(6), backgroundPaint); super.onDraw(canvas); - radialProgress.setColors(patternColor, patternColor, 0xffffffff, 0xffffffff); - radialProgress.draw(canvas); + if (radialProgress.getIcon() != MediaActionDrawable.ICON_NONE) { + radialProgress.setColors(checkColor, checkColor, 0xffffffff, 0xffffffff); + radialProgress.draw(canvas); + } } @Override @@ -180,23 +250,33 @@ public class PatternCell extends BackupImageView implements DownloadController.F @Override public void onFailedDownload(String fileName, boolean canceled) { - if (canceled) { - radialProgress.setIcon(MediaActionDrawable.ICON_NONE, false, true); - } else { - updateButtonState(currentPattern, true, canceled); + TLRPC.TL_wallPaper selectedPattern = delegate.getSelectedPattern(); + boolean isSelected = currentPattern == null && selectedPattern == null || selectedPattern != null && currentPattern != null && currentPattern.id == selectedPattern.id; + if (isSelected) { + if (canceled) { + radialProgress.setIcon(MediaActionDrawable.ICON_NONE, false, true); + } else { + updateButtonState(currentPattern, true, canceled); + } } } @Override public void onSuccessDownload(String fileName) { radialProgress.setProgress(1, true); - updateButtonState(currentPattern, false, true); + TLRPC.TL_wallPaper selectedPattern = delegate.getSelectedPattern(); + boolean isSelected = currentPattern == null && selectedPattern == null || selectedPattern != null && currentPattern != null && currentPattern.id == selectedPattern.id; + if (isSelected) { + updateButtonState(currentPattern, false, true); + } } @Override public void onProgressDownload(String fileName, long downloadedSize, long totalSize) { radialProgress.setProgress(Math.min(1f, downloadedSize / (float) totalSize), true); - if (radialProgress.getIcon() != MediaActionDrawable.ICON_EMPTY) { + TLRPC.TL_wallPaper selectedPattern = delegate.getSelectedPattern(); + boolean isSelected = currentPattern == null && selectedPattern == null || selectedPattern != null && currentPattern != null && currentPattern.id == selectedPattern.id; + if (isSelected && radialProgress.getIcon() != MediaActionDrawable.ICON_EMPTY) { updateButtonState(currentPattern, false, true); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoEditRadioCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoEditRadioCell.java index bc0702a30..af1c03f67 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoEditRadioCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/PhotoEditRadioCell.java @@ -70,18 +70,15 @@ public class PhotoEditRadioCell extends FrameLayout { radioButton.setSize(AndroidUtilities.dp(20)); radioButton.setTag(a); tintButtonsContainer.addView(radioButton, LayoutHelper.createLinear(0, LayoutHelper.MATCH_PARENT, 1.0f / tintShadowColors.length)); - radioButton.setOnClickListener(new OnClickListener() { - @Override - public void onClick(View v) { - RadioButton radioButton = (RadioButton) v; - if (currentType == 0) { - currentColor = tintShadowColors[(Integer) radioButton.getTag()]; - } else { - currentColor = tintHighlighsColors[(Integer) radioButton.getTag()]; - } - updateSelectedTintButton(true); - onClickListener.onClick(PhotoEditRadioCell.this); + radioButton.setOnClickListener(v -> { + RadioButton radioButton1 = (RadioButton) v; + if (currentType == 0) { + currentColor = tintShadowColors[(Integer) radioButton1.getTag()]; + } else { + currentColor = tintHighlighsColors[(Integer) radioButton1.getTag()]; } + updateSelectedTintButton(true); + onClickListener.onClick(PhotoEditRadioCell.this); }); } addView(tintButtonsContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 40, Gravity.LEFT | Gravity.TOP, 96, 0, 24, 0)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java index c1797d11f..e8d526ed9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ProfileSearchCell.java @@ -33,6 +33,8 @@ import org.telegram.tgnet.TLRPC; import org.telegram.messenger.UserConfig; import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.CheckBox2; +import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.NotificationsSettingsActivity; public class ProfileSearchCell extends BaseCell { @@ -85,12 +87,20 @@ public class ProfileSearchCell extends BaseCell { private RectF rect = new RectF(); + CheckBox2 checkBox; + public ProfileSearchCell(Context context) { super(context); avatarImage = new ImageReceiver(this); avatarImage.setRoundRadius(AndroidUtilities.dp(23)); avatarDrawable = new AvatarDrawable(); + + checkBox = new CheckBox2(context, 21); + checkBox.setColor(null, Theme.key_windowBackgroundWhite, Theme.key_checkboxCheck); + checkBox.setDrawUnchecked(false); + checkBox.setDrawBackgroundAsArc(3); + addView(checkBox); } public void setData(TLObject object, TLRPC.EncryptedChat ec, CharSequence n, CharSequence s, boolean needCount, boolean saved) { @@ -191,6 +201,9 @@ public class ProfileSearchCell extends BaseCell { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (checkBox != null) { + checkBox.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(24), MeasureSpec.EXACTLY)); + } setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), AndroidUtilities.dp(60) + (useSeparator ? 1 : 0)); } @@ -199,6 +212,11 @@ public class ProfileSearchCell extends BaseCell { if (user == null && chat == null && encryptedChat == null) { return; } + if (checkBox != null) { + int x = LocaleController.isRTL ? (right - left) - AndroidUtilities.dp(42) : AndroidUtilities.dp(42); + int y = AndroidUtilities.dp(36); + checkBox.layout(x, y, x + checkBox.getMeasuredWidth(), y + checkBox.getMeasuredHeight()); + } if (changed) { buildLayout(); } @@ -394,28 +412,26 @@ public class ProfileSearchCell extends BaseCell { nameTop = AndroidUtilities.dp(20); } } else { - if (chat != null) { - if (ChatObject.isChannel(chat) && !chat.megagroup) { - if (chat.participants_count != 0) { - statusString = LocaleController.formatPluralString("Subscribers", chat.participants_count); - } else { - if (TextUtils.isEmpty(chat.username)) { - statusString = LocaleController.getString("ChannelPrivate", R.string.ChannelPrivate).toLowerCase(); - } else { - statusString = LocaleController.getString("ChannelPublic", R.string.ChannelPublic).toLowerCase(); - } - } + if (ChatObject.isChannel(chat) && !chat.megagroup) { + if (chat.participants_count != 0) { + statusString = LocaleController.formatPluralString("Subscribers", chat.participants_count); } else { - if (chat.participants_count != 0) { - statusString = LocaleController.formatPluralString("Members", chat.participants_count); + if (TextUtils.isEmpty(chat.username)) { + statusString = LocaleController.getString("ChannelPrivate", R.string.ChannelPrivate).toLowerCase(); } else { - if (chat.has_geo) { - statusString = LocaleController.getString("MegaLocation", R.string.MegaLocation); - } else if (TextUtils.isEmpty(chat.username)) { - statusString = LocaleController.getString("MegaPrivate", R.string.MegaPrivate).toLowerCase(); - } else { - statusString = LocaleController.getString("MegaPublic", R.string.MegaPublic).toLowerCase(); - } + statusString = LocaleController.getString("ChannelPublic", R.string.ChannelPublic).toLowerCase(); + } + } + } else { + if (chat.participants_count != 0) { + statusString = LocaleController.formatPluralString("Members", chat.participants_count); + } else { + if (chat.has_geo) { + statusString = LocaleController.getString("MegaLocation", R.string.MegaLocation); + } else if (TextUtils.isEmpty(chat.username)) { + statusString = LocaleController.getString("MegaPrivate", R.string.MegaPrivate).toLowerCase(); + } else { + statusString = LocaleController.getString("MegaPublic", R.string.MegaPublic).toLowerCase(); } } } @@ -428,6 +444,7 @@ public class ProfileSearchCell extends BaseCell { nameTop = AndroidUtilities.dp(9); nameLockTop -= AndroidUtilities.dp(10); } else { + nameTop = AndroidUtilities.dp(20); statusLayout = null; } @@ -501,14 +518,14 @@ public class ProfileSearchCell extends BaseCell { if (user.photo != null) { photo = user.photo.photo_small; } - avatarImage.setForUserOrChat(user, avatarDrawable); + avatarImage.setImage(ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_SMALL), "50_50", ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_STRIPPED), "50_50", avatarDrawable, user, 0); } } else if (chat != null) { if (chat.photo != null) { photo = chat.photo.photo_small; } avatarDrawable.setInfo(chat); - avatarImage.setForUserOrChat(chat, avatarDrawable); + avatarImage.setImage(ImageLocation.getForUserOrChat(chat, ImageLocation.TYPE_SMALL), "50_50", ImageLocation.getForUserOrChat(chat, ImageLocation.TYPE_STRIPPED), "50_50", avatarDrawable, chat, 0); } else { avatarDrawable.setInfo(0, null, null); avatarImage.setImage(null, null, avatarDrawable, null, null, 0); @@ -661,4 +678,15 @@ public class ProfileSearchCell extends BaseCell { } info.setText(builder.toString()); } + + public long getDialogId() { + return dialog_id; + } + + public void setChecked(boolean checked, boolean animated) { + if (checkBox == null) { + return; + } + checkBox.setChecked(checked, animated); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java index 6d4fc46a5..b45fbaa7d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedDocumentCell.java @@ -8,38 +8,27 @@ package org.telegram.ui.Cells; -import android.animation.Animator; -import android.animation.AnimatorSet; -import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Canvas; -import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.drawable.Drawable; import android.os.Build; import android.text.SpannableStringBuilder; import android.text.TextUtils; -import android.text.style.ReplacementSpan; import android.transition.ChangeBounds; import android.transition.Fade; import android.transition.TransitionManager; import android.transition.TransitionSet; -import android.transition.TransitionValues; -import android.transition.Visibility; import android.util.TypedValue; import android.view.Gravity; import android.view.View; -import android.view.ViewGroup; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; -import androidx.annotation.NonNull; -import androidx.annotation.Nullable; - import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DownloadController; import org.telegram.messenger.ImageLoader; @@ -51,7 +40,6 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLRPC; -import org.telegram.ui.ActionBar.ActionBarMenuItem; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.CheckBox2; @@ -80,6 +68,8 @@ public class SharedDocumentCell extends FrameLayout implements DownloadControlle private TextView rightDateTextView; private TextView captionTextView; + private boolean drawDownloadIcon = true; + private boolean needDivider; private int currentAccount = UserConfig.selectedAccount; @@ -160,7 +150,7 @@ public class SharedDocumentCell extends FrameLayout implements DownloadControlle nameTextView.setEllipsize(TextUtils.TruncateAt.END); nameTextView.setGravity((LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.CENTER_VERTICAL); - LinearLayout linearLayout = null; + LinearLayout linearLayout; if (viewType == VIEW_TYPE_PICKER) { nameTextView.setLines(1); nameTextView.setMaxLines(1); @@ -245,6 +235,10 @@ public class SharedDocumentCell extends FrameLayout implements DownloadControlle } } + public void setDrawDownloadIcon(boolean value) { + drawDownloadIcon = value; + } + public void setTextAndValueAndTypeAndThumb(String text, String value, String type, String thumb, int resId, boolean divider) { nameTextView.setText(text); dateTextView.setText(value); @@ -379,7 +373,7 @@ public class SharedDocumentCell extends FrameLayout implements DownloadControlle loading = false; TLRPC.Document document = messageObject.getDocument(); - if (messageObject != null && document != null) { + if (document != null) { int idx; String name = null; if (messageObject.isMusic()) { @@ -422,7 +416,11 @@ public class SharedDocumentCell extends FrameLayout implements DownloadControlle thumbImageView.getImageReceiver().setShouldGenerateQualityThumb(bigthumb == null); thumbImageView.setVisibility(VISIBLE); - thumbImageView.setImage(ImageLocation.getForDocument(bigthumb, document), "40_40", ImageLocation.getForDocument(thumb, document), "40_40_b", null, 0, 1, messageObject); + if (messageObject.strippedThumb != null) { + thumbImageView.setImage(ImageLocation.getForDocument(bigthumb, document), "40_40", null, null, messageObject.strippedThumb, null, null, 1, messageObject); + } else { + thumbImageView.setImage(ImageLocation.getForDocument(bigthumb, document), "40_40", ImageLocation.getForDocument(thumb, document), "40_40_b", null, 0, 1, messageObject); + } } long date = (long) messageObject.messageOwner.date * 1000; if (viewType == VIEW_TYPE_GLOBAL_SEARCH) { @@ -480,7 +478,7 @@ public class SharedDocumentCell extends FrameLayout implements DownloadControlle } if (message != null && message.messageOwner.media != null) { loaded = false; - if (message.attachPathExists || message.mediaExists) { + if (message.attachPathExists || message.mediaExists || !drawDownloadIcon) { statusImageView.setVisibility(INVISIBLE); progressView.setVisibility(INVISIBLE); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java index a3945b041..bc28bf16f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/SharedPhotoVideoCell.java @@ -192,7 +192,11 @@ public class SharedPhotoVideoCell extends FrameLayout { qualityThumb = null; } if (thumb != null) { - imageView.setImage(ImageLocation.getForDocument(qualityThumb, document), "100_100", ImageLocation.getForDocument(thumb, document), "b", ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.photo_placeholder_in), null, null, 0, messageObject); + if (messageObject.strippedThumb != null) { + imageView.setImage(ImageLocation.getForDocument(qualityThumb, document), "100_100", null, messageObject.strippedThumb, messageObject); + } else { + imageView.setImage(ImageLocation.getForDocument(qualityThumb, document), "100_100", ImageLocation.getForDocument(thumb, document), "b", ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.photo_placeholder_in), null, null, 0, messageObject); + } } else { imageView.setImageResource(R.drawable.photo_placeholder_in); } @@ -204,9 +208,17 @@ public class SharedPhotoVideoCell extends FrameLayout { if (currentPhotoObject == currentPhotoObjectThumb) { currentPhotoObjectThumb = null; } - imageView.getImageReceiver().setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), "100_100", ImageLocation.getForObject(currentPhotoObjectThumb, messageObject.photoThumbsObject), "b", currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); + if (messageObject.strippedThumb != null) { + imageView.getImageReceiver().setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), "100_100", null, null, messageObject.strippedThumb, currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); + } else { + imageView.getImageReceiver().setImage(ImageLocation.getForObject(currentPhotoObject, messageObject.photoThumbsObject), "100_100", ImageLocation.getForObject(currentPhotoObjectThumb, messageObject.photoThumbsObject), "b", currentPhotoObject != null ? currentPhotoObject.size : 0, null, messageObject, messageObject.shouldEncryptPhotoOrVideo() ? 2 : 1); + } } else { - imageView.setImage(null, null, ImageLocation.getForObject(currentPhotoObjectThumb, messageObject.photoThumbsObject), "b", ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.photo_placeholder_in), null, null, 0, messageObject); + if (messageObject.strippedThumb != null) { + imageView.setImage(null, null, null, null, messageObject.strippedThumb, null, null, 0, messageObject); + } else { + imageView.setImage(null, null, ImageLocation.getForObject(currentPhotoObjectThumb, messageObject.photoThumbsObject), "b", ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.photo_placeholder_in), null, null, 0, messageObject); + } } } else { videoInfoContainer.setVisibility(INVISIBLE); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java index d37ca70ce..96b2ced06 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerCell.java @@ -23,6 +23,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DocumentObject; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.ImageReceiver; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; @@ -140,6 +141,21 @@ public class StickerCell extends FrameLayout { return imageView.getImageReceiver().getBitmap() != null; } + public MessageObject.SendAnimationData getSendAnimationData() { + ImageReceiver imageReceiver = imageView.getImageReceiver(); + if (!imageReceiver.hasNotThumb()) { + return null; + } + MessageObject.SendAnimationData data = new MessageObject.SendAnimationData(); + int[] position = new int[2]; + imageView.getLocationInWindow(position); + data.x = imageReceiver.getCenterX() + position[0]; + data.y = imageReceiver.getCenterY() + position[1]; + data.width = imageReceiver.getImageWidth(); + data.height = imageReceiver.getImageHeight(); + return data; + } + @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { boolean result = super.drawChild(canvas, child, drawingTime); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java index ca8fa8f2c..83073f715 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerEmojiCell.java @@ -20,6 +20,7 @@ import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.DocumentObject; +import org.telegram.messenger.ImageReceiver; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLoader; @@ -27,6 +28,7 @@ import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; +import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; @@ -38,7 +40,9 @@ public class StickerEmojiCell extends FrameLayout { private BackupImageView imageView; private TLRPC.Document sticker; + private SendMessagesHelper.ImportingSticker stickerPath; private Object parentObject; + private String currentEmoji; private TextView emojiTextView; private float alpha = 1; private boolean changingAlpha; @@ -71,6 +75,14 @@ public class StickerEmojiCell extends FrameLayout { return sticker; } + public SendMessagesHelper.ImportingSticker getStickerPath() { + return stickerPath != null && stickerPath.validated ? stickerPath : null; + } + + public String getEmoji() { + return currentEmoji; + } + public Object getParentObject() { return parentObject; } @@ -84,11 +96,44 @@ public class StickerEmojiCell extends FrameLayout { } public void setSticker(TLRPC.Document document, Object parent, boolean showEmoji) { - setSticker(document, parent, null, showEmoji); + setSticker(document, null, parent, null, showEmoji); } - public void setSticker(TLRPC.Document document, Object parent, String emoji, boolean showEmoji) { - if (document != null) { + public void setSticker(SendMessagesHelper.ImportingSticker path) { + setSticker(null, path, null, path.emoji, path.emoji != null); + } + + public MessageObject.SendAnimationData getSendAnimationData() { + ImageReceiver imageReceiver = imageView.getImageReceiver(); + if (!imageReceiver.hasNotThumb()) { + return null; + } + MessageObject.SendAnimationData data = new MessageObject.SendAnimationData(); + int[] position = new int[2]; + imageView.getLocationInWindow(position); + data.x = imageReceiver.getCenterX() + position[0]; + data.y = imageReceiver.getCenterY() + position[1]; + data.width = imageReceiver.getImageWidth(); + data.height = imageReceiver.getImageHeight(); + return data; + } + + public void setSticker(TLRPC.Document document, SendMessagesHelper.ImportingSticker path, Object parent, String emoji, boolean showEmoji) { + currentEmoji = emoji; + if (path != null) { + stickerPath = path; + if (path.validated) { + imageView.setImage(ImageLocation.getForPath(path.path), "80_80", null, null, DocumentObject.getSvgRectThumb(Theme.key_dialogBackgroundGray, 1.0f), null, path.animated ? "tgs" : null, 0, null); + } else { + imageView.setImage(null, null, null, null, DocumentObject.getSvgRectThumb(Theme.key_dialogBackgroundGray, 1.0f), null, path.animated ? "tgs" : null, 0, null); + } + if (emoji != null) { + emojiTextView.setText(Emoji.replaceEmoji(emoji, emojiTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(16), false)); + emojiTextView.setVisibility(VISIBLE); + } else { + emojiTextView.setVisibility(INVISIBLE); + } + } else if (document != null) { sticker = document; parentObject = parent; TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java index 85e70feb7..9a928e678 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/StickerSetCell.java @@ -193,7 +193,7 @@ public class StickerSetCell extends FrameLayout { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + imageLocation = ImageLocation.getForSticker(thumb, sticker, set.set.thumb_version); } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java index 98298e579..c878fbfa8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextCell.java @@ -26,9 +26,9 @@ import org.telegram.ui.ActionBar.Theme; public class TextCell extends FrameLayout { - private SimpleTextView textView; - private SimpleTextView valueTextView; - private ImageView imageView; + public final SimpleTextView textView; + public final SimpleTextView valueTextView; + public final ImageView imageView; private ImageView valueImageView; private int leftPadding; private boolean needDivider; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java index 5a8fdfb80..dcc5f6c16 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/TextSettingsCell.java @@ -12,11 +12,14 @@ import android.animation.Animator; import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.text.TextUtils; import android.util.TypedValue; import android.view.Gravity; +import android.view.View; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.FrameLayout; import android.widget.ImageView; @@ -36,6 +39,17 @@ public class TextSettingsCell extends FrameLayout { private ImageView valueImageView; private boolean needDivider; private boolean canDisable; + private boolean drawLoading; + private int padding; + + private boolean incrementLoadingProgress; + private float loadingProgress; + private float drawLoadingProgress; + private int loadingSize; + private boolean measureDelay; + private int changeProgressStartDelay; + + Paint paint; public TextSettingsCell(Context context) { this(context, 21); @@ -43,6 +57,7 @@ public class TextSettingsCell extends FrameLayout { public TextSettingsCell(Context context, int padding) { super(context); + this.padding = padding; textView = new TextView(context); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); @@ -89,6 +104,14 @@ public class TextSettingsCell extends FrameLayout { textView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(getMeasuredHeight(), MeasureSpec.EXACTLY)); } + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + if (measureDelay && getParent() != null) { + changeProgressStartDelay = (int) ((getTop() / (float) ((View) getParent()).getMeasuredHeight()) * 150f); + } + } + public TextView getTextView() { return textView; } @@ -178,15 +201,76 @@ public class TextSettingsCell extends FrameLayout { } @Override - protected void onDraw(Canvas canvas) { + protected void dispatchDraw(Canvas canvas) { + if (drawLoading || drawLoadingProgress != 0) { + if (paint == null) { + paint = new Paint(Paint.ANTI_ALIAS_FLAG); + paint.setColor(Theme.getColor(Theme.key_dialogSearchBackground)); + } + //LocaleController.isRTL ? Gravity.LEFT : Gravity.RIGHT; + if (incrementLoadingProgress) { + loadingProgress += 16 / 1000f; + if (loadingProgress > 1f) { + loadingProgress = 1f; + incrementLoadingProgress = false; + } + } else { + loadingProgress -= 16 / 1000f; + if (loadingProgress < 0) { + loadingProgress = 0; + incrementLoadingProgress = true; + } + } + + if (changeProgressStartDelay > 0) { + changeProgressStartDelay -= 15; + } else if (drawLoading && drawLoadingProgress != 1f) { + drawLoadingProgress += 16 / 150f; + if (drawLoadingProgress > 1f) { + drawLoadingProgress = 1f; + } + } else if (!drawLoading && drawLoadingProgress != 0) { + drawLoadingProgress -= 16 / 150f; + if (drawLoadingProgress < 0) { + drawLoadingProgress = 0; + } + } + + float alpha = (0.6f + 0.4f * loadingProgress) * drawLoadingProgress; + paint.setAlpha((int) (255 * alpha)); + int cy = getMeasuredHeight() >> 1; + AndroidUtilities.rectTmp.set(getMeasuredWidth() - AndroidUtilities.dp(padding) - AndroidUtilities.dp(loadingSize), cy - AndroidUtilities.dp(3), getMeasuredWidth() - AndroidUtilities.dp(padding), cy + AndroidUtilities.dp(3)); + if (LocaleController.isRTL) { + AndroidUtilities.rectTmp.left = getMeasuredWidth() - AndroidUtilities.rectTmp.left; + AndroidUtilities.rectTmp.right = getMeasuredWidth() - AndroidUtilities.rectTmp.right; + } + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(3), AndroidUtilities.dp(3), paint); + invalidate(); + } + valueTextView.setAlpha(1f - drawLoadingProgress); + super.dispatchDraw(canvas); + if (needDivider) { canvas.drawLine(LocaleController.isRTL ? 0 : AndroidUtilities.dp(20), getMeasuredHeight() - 1, getMeasuredWidth() - (LocaleController.isRTL ? AndroidUtilities.dp(20) : 0), getMeasuredHeight() - 1, Theme.dividerPaint); } } + @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); info.setEnabled(isEnabled()); } + + public void setDrawLoading(boolean drawLoading, int size, boolean animated) { + this.drawLoading = drawLoading; + this.loadingSize = size; + + if (!animated) { + drawLoadingProgress = drawLoading ? 1f : 0f; + } else { + measureDelay = true; + } + invalidate(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java index 2f0b5e998..2f5519620 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemePreviewMessagesCell.java @@ -20,6 +20,7 @@ import org.telegram.ui.ActionBar.ActionBarLayout; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.BackgroundGradientDrawable; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MotionBackgroundDrawable; public class ThemePreviewMessagesCell extends LinearLayout { @@ -172,7 +173,7 @@ public class ThemePreviewMessagesCell extends LinearLayout { } else { drawable.setAlpha(255); } - if (drawable instanceof ColorDrawable || drawable instanceof GradientDrawable) { + if (drawable instanceof ColorDrawable || drawable instanceof GradientDrawable || drawable instanceof MotionBackgroundDrawable) { drawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); if (drawable instanceof BackgroundGradientDrawable) { final BackgroundGradientDrawable backgroundGradientDrawable = (BackgroundGradientDrawable) drawable; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java index 6749b434e..302603e8b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/ThemesHorizontalListCell.java @@ -47,6 +47,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.Components.RadioButton; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.ThemeActivity; @@ -237,6 +238,7 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi for (int b = 0; b < modes.length; b++) { if ("blur".equals(modes[b])) { themeInfo.isBlured = true; + break; } } } @@ -247,8 +249,14 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi String bgColor = uri.getQueryParameter("bg_color"); if (!TextUtils.isEmpty(bgColor)) { themeInfo.patternBgColor = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; - if (bgColor.length() > 6) { - themeInfo.patternBgGradientColor = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; + if (bgColor.length() >= 13 && AndroidUtilities.isValidWallChar(bgColor.charAt(6))) { + themeInfo.patternBgGradientColor1 = Integer.parseInt(bgColor.substring(7, 13), 16) | 0xff000000; + } + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + themeInfo.patternBgGradientColor2 = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + themeInfo.patternBgGradientColor3 = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; } } } catch (Exception ignore) { @@ -277,7 +285,7 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi } else { if ((idx = line.indexOf('=')) != -1) { String key = line.substring(0, idx); - if (key.equals(Theme.key_chat_inBubble) || key.equals(Theme.key_chat_outBubble) || key.equals(Theme.key_chat_wallpaper) || key.equals(Theme.key_chat_wallpaper_gradient_to)) { + if (key.equals(Theme.key_chat_inBubble) || key.equals(Theme.key_chat_outBubble) || key.equals(Theme.key_chat_wallpaper) || key.equals(Theme.key_chat_wallpaper_gradient_to1) || key.equals(Theme.key_chat_wallpaper_gradient_to2) || key.equals(Theme.key_chat_wallpaper_gradient_to3)) { String param = line.substring(idx + 1); int value; if (param.length() > 0 && param.charAt(0) == '#') { @@ -299,8 +307,14 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi case Theme.key_chat_wallpaper: themeInfo.setPreviewBackgroundColor(value); break; - case Theme.key_chat_wallpaper_gradient_to: - themeInfo.previewBackgroundGradientColor = value; + case Theme.key_chat_wallpaper_gradient_to1: + themeInfo.previewBackgroundGradientColor1 = value; + break; + case Theme.key_chat_wallpaper_gradient_to2: + themeInfo.previewBackgroundGradientColor2 = value; + break; + case Theme.key_chat_wallpaper_gradient_to3: + themeInfo.previewBackgroundGradientColor3 = value; break; } } @@ -330,7 +344,7 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi req.wallpaper = inputWallPaperSlug; ConnectionsManager.getInstance(themeInfo.account).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (response instanceof TLRPC.TL_wallPaper) { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) response; + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) response; String name = FileLoader.getAttachFileName(wallPaper.document); if (!loadingThemes.containsKey(name)) { loadingThemes.put(name, themeInfo); @@ -362,8 +376,13 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi bitmapShader = null; backgroundDrawable = null; double[] hsv = null; - if (themeInfo.previewBackgroundGradientColor != 0) { - final GradientDrawable drawable = new GradientDrawable(GradientDrawable.Orientation.BL_TR, new int[]{themeInfo.getPreviewBackgroundColor(), themeInfo.previewBackgroundGradientColor}); + if (themeInfo.previewBackgroundGradientColor1 != 0 && themeInfo.previewBackgroundGradientColor2 != 0) { + final MotionBackgroundDrawable drawable = new MotionBackgroundDrawable(themeInfo.getPreviewBackgroundColor(), themeInfo.previewBackgroundGradientColor1, themeInfo.previewBackgroundGradientColor2, themeInfo.previewBackgroundGradientColor3, true); + drawable.setRoundRadius(AndroidUtilities.dp(6)); + backgroundDrawable = drawable; + hsv = AndroidUtilities.rgbToHsv(Color.red(themeInfo.getPreviewBackgroundColor()), Color.green(themeInfo.getPreviewBackgroundColor()), Color.blue(themeInfo.getPreviewBackgroundColor())); + } else if (themeInfo.previewBackgroundGradientColor1 != 0) { + final GradientDrawable drawable = new GradientDrawable(GradientDrawable.Orientation.BL_TR, new int[]{themeInfo.getPreviewBackgroundColor(), themeInfo.previewBackgroundGradientColor1}); drawable.setCornerRadius(AndroidUtilities.dp(6)); backgroundDrawable = drawable; hsv = AndroidUtilities.rgbToHsv(Color.red(themeInfo.getPreviewBackgroundColor()), Color.green(themeInfo.getPreviewBackgroundColor()), Color.blue(themeInfo.getPreviewBackgroundColor())); @@ -385,10 +404,10 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi hasWhiteBackground = false; } if (themeInfo.getPreviewBackgroundColor() == 0 && themeInfo.previewParsed && backgroundDrawable == null) { - BitmapDrawable drawable = (BitmapDrawable) getResources().getDrawable(R.drawable.catstile).mutate(); - bitmapShader = new BitmapShader(drawable.getBitmap(), Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); - bitmapPaint.setShader(bitmapShader); - backgroundDrawable = drawable; + backgroundDrawable = Theme.createDefaultWallpaper(100, 200); + if (backgroundDrawable instanceof MotionBackgroundDrawable) { + ((MotionBackgroundDrawable) backgroundDrawable).setRoundRadius(AndroidUtilities.dp(6)); + } } invalidate(); } @@ -837,8 +856,8 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi protected void onAttachedToWindow() { super.onAttachedToWindow(); for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { - NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.fileLoadFailed); } } @@ -846,14 +865,14 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi protected void onDetachedFromWindow() { super.onDetachedFromWindow(); for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { - NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.fileLoadFailed); } } @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { String fileName = (String) args[0]; File file = (File) args[1]; Theme.ThemeInfo info = loadingThemes.get(fileName); @@ -868,7 +887,7 @@ public class ThemesHorizontalListCell extends RecyclerListView implements Notifi checkVisibleTheme(info); } } - } else if (id == NotificationCenter.fileDidFailToLoad) { + } else if (id == NotificationCenter.fileLoadFailed) { String fileName = (String) args[0]; loadingThemes.remove(fileName); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Cells/WallpaperCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Cells/WallpaperCell.java index e95c228bd..6f715a742 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Cells/WallpaperCell.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Cells/WallpaperCell.java @@ -13,6 +13,7 @@ import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.content.Context; +import android.graphics.BlendMode; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.PorterDuff; @@ -31,11 +32,13 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.MediaController; import org.telegram.messenger.R; +import org.telegram.messenger.SvgHelper; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.BackupImageView; import org.telegram.ui.Components.CheckBox; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.WallpapersListActivity; public class WallpaperCell extends FrameLayout { @@ -61,7 +64,7 @@ public class WallpaperCell extends FrameLayout { @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); - if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper || currentWallpaper instanceof WallpapersListActivity.FileWallpaper) { canvas.drawLine(1, 0, getMeasuredWidth() - 1, 0, framePaint); canvas.drawLine(0, 0, 0, getMeasuredHeight(), framePaint); canvas.drawLine(getMeasuredWidth() - 1, 0, getMeasuredWidth() - 1, getMeasuredHeight(), framePaint); @@ -102,16 +105,18 @@ public class WallpaperCell extends FrameLayout { return super.onTouchEvent(event); } - public void setWallpaper(Object object, String selectedBackgroundSlug, Drawable themedWallpaper, boolean themed) { + public void setWallpaper(Object object, Object selectedWallpaper, Drawable themedWallpaper, boolean themed) { currentWallpaper = object; imageView.setVisibility(VISIBLE); imageView2.setVisibility(INVISIBLE); imageView.setBackgroundDrawable(null); imageView.getImageReceiver().setColorFilter(null); imageView.getImageReceiver().setAlpha(1.0f); + imageView.getImageReceiver().setBlendMode(null); + imageView.getImageReceiver().setGradientBitmap(null); + isSelected = object == selectedWallpaper; if (object instanceof TLRPC.TL_wallPaper) { TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; - isSelected = selectedBackgroundSlug.equals(wallPaper.slug); TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(wallPaper.document.thumbs, 100); TLRPC.PhotoSize image = FileLoader.getClosestPhotoSizeWithSize(wallPaper.document.thumbs, 320); if (image == thumb) { @@ -119,10 +124,31 @@ public class WallpaperCell extends FrameLayout { } int size = image != null ? image.size : wallPaper.document.size; if (wallPaper.pattern) { - imageView.setBackgroundColor(0xff000000 | wallPaper.settings.background_color); - imageView.setImage(ImageLocation.getForDocument(image, wallPaper.document), "100_100", ImageLocation.getForDocument(thumb, wallPaper.document), null, "jpg", size, 1, wallPaper); - imageView.getImageReceiver().setColorFilter(new PorterDuffColorFilter(AndroidUtilities.getPatternColor(wallPaper.settings.background_color), PorterDuff.Mode.SRC_IN)); - imageView.getImageReceiver().setAlpha(wallPaper.settings.intensity / 100.0f); + int patternColor; + if (wallPaper.settings.third_background_color != 0) { + MotionBackgroundDrawable motionBackgroundDrawable = new MotionBackgroundDrawable(wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color, true); + if (wallPaper.settings.intensity >= 0 || !Theme.getActiveTheme().isDark()) { + imageView.setBackground(motionBackgroundDrawable); + if (Build.VERSION.SDK_INT >= 29) { + imageView.getImageReceiver().setBlendMode(BlendMode.SOFT_LIGHT); + } + } else { + imageView.getImageReceiver().setGradientBitmap(motionBackgroundDrawable.getBitmap()); + } + patternColor = MotionBackgroundDrawable.getPatternColor(wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color); + } else { + imageView.setBackgroundColor(Theme.getWallpaperColor(wallPaper.settings.background_color)); + patternColor = AndroidUtilities.getPatternColor(wallPaper.settings.background_color); + } + if (Build.VERSION.SDK_INT < 29 || wallPaper.settings.third_background_color == 0) { + imageView.getImageReceiver().setColorFilter(new PorterDuffColorFilter(AndroidUtilities.getPatternColor(patternColor), PorterDuff.Mode.SRC_IN)); + } + if (image != null) { + imageView.setImage(ImageLocation.getForDocument(image, wallPaper.document), "100_100", ImageLocation.getForDocument(thumb, wallPaper.document), null, "jpg", size, 1, wallPaper); + } else { + imageView.setImage(ImageLocation.getForDocument(thumb, wallPaper.document), "100_100", null, null, "jpg", size, 1, wallPaper); + } + imageView.getImageReceiver().setAlpha(Math.abs(wallPaper.settings.intensity) / 100.0f); } else { if (image != null) { imageView.setImage(ImageLocation.getForDocument(image, wallPaper.document), "100_100", ImageLocation.getForDocument(thumb, wallPaper.document), "100_100_b", "jpg", size, 1, wallPaper); @@ -132,20 +158,51 @@ public class WallpaperCell extends FrameLayout { } } else if (object instanceof WallpapersListActivity.ColorWallpaper) { WallpapersListActivity.ColorWallpaper wallPaper = (WallpapersListActivity.ColorWallpaper) object; - if (wallPaper.path != null) { - imageView.setImage(wallPaper.path.getAbsolutePath(), "100_100", null); + if (wallPaper.path != null || wallPaper.pattern != null || Theme.DEFAULT_BACKGROUND_SLUG.equals(wallPaper.slug)) { + int patternColor; + if (wallPaper.gradientColor2 != 0) { + MotionBackgroundDrawable motionBackgroundDrawable = new MotionBackgroundDrawable(wallPaper.color, wallPaper.gradientColor1, wallPaper.gradientColor2, wallPaper.gradientColor3, true); + if (wallPaper.intensity >= 0) { + imageView.setBackground(new MotionBackgroundDrawable(wallPaper.color, wallPaper.gradientColor1, wallPaper.gradientColor2, wallPaper.gradientColor3, true)); + if (Build.VERSION.SDK_INT >= 29) { + imageView.getImageReceiver().setBlendMode(BlendMode.SOFT_LIGHT); + } + } else { + imageView.getImageReceiver().setGradientBitmap(motionBackgroundDrawable.getBitmap()); + } + patternColor = MotionBackgroundDrawable.getPatternColor(wallPaper.color, wallPaper.gradientColor1, wallPaper.gradientColor2, wallPaper.gradientColor3); + } else { + patternColor = AndroidUtilities.getPatternColor(wallPaper.color); + } + if (Theme.DEFAULT_BACKGROUND_SLUG.equals(wallPaper.slug)) { + if (wallPaper.defaultCache == null) { + wallPaper.defaultCache = SvgHelper.getBitmap(R.raw.default_pattern, 100, 180, patternColor); + } + imageView.setImageBitmap(wallPaper.defaultCache); + imageView.getImageReceiver().setAlpha(Math.abs(wallPaper.intensity)); + } else if (wallPaper.path != null) { + imageView.setImage(wallPaper.path.getAbsolutePath(), "100_100", null); + } else { + TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(wallPaper.pattern.document.thumbs, 100); + int size = thumb != null ? thumb.size : wallPaper.pattern.document.size; + imageView.setImage(ImageLocation.getForDocument(thumb, wallPaper.pattern.document), "100_100", null, null, "jpg", size, 1, wallPaper.pattern); + imageView.getImageReceiver().setAlpha(Math.abs(wallPaper.intensity)); + if (Build.VERSION.SDK_INT < 29 || wallPaper.gradientColor2 == 0) { + imageView.getImageReceiver().setColorFilter(new PorterDuffColorFilter(AndroidUtilities.getPatternColor(patternColor), PorterDuff.Mode.SRC_IN)); + } + } } else { imageView.setImageBitmap(null); - if (wallPaper.gradientColor != 0) { - imageView.setBackground(new GradientDrawable(GradientDrawable.Orientation.BL_TR, new int[]{0xff000000 | wallPaper.color, 0xff000000 | wallPaper.gradientColor})); + if (wallPaper.isGradient) { + imageView.setBackground(new MotionBackgroundDrawable(wallPaper.color, wallPaper.gradientColor1, wallPaper.gradientColor2, wallPaper.gradientColor3, true)); + } else if (wallPaper.gradientColor1 != 0) { + imageView.setBackground(new GradientDrawable(GradientDrawable.Orientation.BL_TR, new int[]{0xff000000 | wallPaper.color, 0xff000000 | wallPaper.gradientColor1})); } else { imageView.setBackgroundColor(0xff000000 | wallPaper.color); } } - isSelected = selectedBackgroundSlug.equals(wallPaper.slug); } else if (object instanceof WallpapersListActivity.FileWallpaper) { WallpapersListActivity.FileWallpaper wallPaper = (WallpapersListActivity.FileWallpaper) object; - isSelected = selectedBackgroundSlug.equals(wallPaper.slug); if (wallPaper.originalPath != null) { imageView.setImage(wallPaper.originalPath.getAbsolutePath(), "100_100", null); } else if (wallPaper.path != null) { @@ -312,14 +369,14 @@ public class WallpaperCell extends FrameLayout { } } - public void setWallpaper(int type, int index, Object wallpaper, String selectedBackgroundSlug, Drawable themedWallpaper, boolean themed) { + public void setWallpaper(int type, int index, Object wallpaper, Object selectedWallpaper, Drawable themedWallpaper, boolean themed) { currentType = type; if (wallpaper == null) { wallpaperViews[index].setVisibility(GONE); wallpaperViews[index].clearAnimation(); } else { wallpaperViews[index].setVisibility(VISIBLE); - wallpaperViews[index].setWallpaper(wallpaper, selectedBackgroundSlug, themedWallpaper, themed); + wallpaperViews[index].setWallpaper(wallpaper, selectedWallpaper, themedWallpaper, themed); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java index 085368ecf..ba68ee6bb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChannelAdminLogActivity.java @@ -257,7 +257,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio @Override public boolean onFragmentCreate() { super.onFragmentCreate(); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagePlayingDidStart); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagePlayingDidReset); @@ -271,7 +271,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio @Override public void onFragmentDestroy() { super.onFragmentDestroy(); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagePlayingDidStart); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagePlayingDidReset); @@ -402,7 +402,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (chatListView != null) { chatListView.invalidateViews(); } @@ -482,9 +482,9 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio } else if (id == NotificationCenter.didSetNewWallpapper) { if (fragmentView != null) { contentView.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); - progressView2.getBackground().setColorFilter(Theme.colorFilter); + progressView2.invalidate(); if (emptyView != null) { - emptyView.getBackground().setColorFilter(Theme.colorFilter); + emptyView.invalidate(); } chatListView.invalidateViews(); } @@ -717,12 +717,11 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio contentView.addView(emptyViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); emptyViewContainer.setOnTouchListener((v, event) -> true); - emptyView = new TextView(context); emptyView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); emptyView.setGravity(Gravity.CENTER); emptyView.setTextColor(Theme.getColor(Theme.key_chat_serviceText)); - emptyView.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(10), Theme.getServiceMessageColor())); + emptyView.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(6), emptyView, contentView)); emptyView.setPadding(AndroidUtilities.dp(16), AndroidUtilities.dp(16), AndroidUtilities.dp(16), AndroidUtilities.dp(16)); emptyViewContainer.addView(emptyView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 16, 0, 16, 0)); @@ -967,8 +966,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio contentView.addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); progressView2 = new View(context); - progressView2.setBackgroundResource(R.drawable.system_loader); - progressView2.getBackground().setColorFilter(Theme.colorFilter); + progressView2.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(18), progressView2, contentView)); progressView.addView(progressView2, LayoutHelper.createFrame(36, 36, Gravity.CENTER)); progressBar = new RadialProgressView(context); @@ -1393,7 +1391,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio path = FileLoader.getPathToMessage(selectedObject.messageOwner).toString(); } if (selectedObject.type == 3 || selectedObject.type == 1) { - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); selectedObject = null; return; @@ -1502,7 +1500,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio if (path == null || path.length() == 0) { path = FileLoader.getPathToMessage(selectedObject.messageOwner).toString(); } - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); selectedObject = null; return; @@ -1515,7 +1513,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio break; } case 10: { - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); selectedObject = null; return; @@ -1767,7 +1765,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio if (viewBottom > height) { viewBottom = viewTop + height; } - messageCell.setVisiblePart(viewTop, viewBottom - viewTop, contentView.getHeightWithKeyboard() - AndroidUtilities.dp(48) - chatListView.getTop(), 0); + messageCell.setVisiblePart(viewTop, viewBottom - viewTop, contentView.getHeightWithKeyboard() - AndroidUtilities.dp(48) - chatListView.getTop(), 0, view.getY() + actionBar.getMeasuredHeight() - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); MessageObject messageObject = messageCell.getMessageObject(); if (roundVideoContainer != null && messageObject.isRoundVideo() && MediaController.getInstance().isPlayingMessage(messageObject)) { @@ -1778,6 +1776,9 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio roundVideoContainer.invalidate(); foundTextureViewMessage = true; } + } else if (view instanceof ChatActionCell) { + ChatActionCell cell = (ChatActionCell) view; + cell.setVisiblePart(view.getY() + actionBar.getMeasuredHeight() - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); } if (view.getBottom() <= chatListView.getPaddingTop()) { continue; @@ -2455,7 +2456,7 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio } else if (viewType == 2) { view = new ChatUnreadCell(mContext); } else { - view = new ChatLoadingCell(mContext); + view = new ChatLoadingCell(mContext, contentView); } view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); @@ -2510,6 +2511,34 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio @Override public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ChatMessageCell || holder.itemView instanceof ChatActionCell) { + View view = holder.itemView; + holder.itemView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + view.getViewTreeObserver().removeOnPreDrawListener(this); + + int height = chatListView.getMeasuredHeight(); + int top = view.getTop(); + int bottom = view.getBottom(); + int viewTop = top >= 0 ? 0 : -top; + int viewBottom = view.getMeasuredHeight(); + if (viewBottom > height) { + viewBottom = viewTop + height; + } + + if (holder.itemView instanceof ChatMessageCell) { + ((ChatMessageCell) view).setVisiblePart(viewTop, viewBottom - viewTop, contentView.getHeightWithKeyboard() - AndroidUtilities.dp(48) - chatListView.getTop(), 0, view.getY() + actionBar.getMeasuredHeight() - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); + } else if (holder.itemView instanceof ChatActionCell) { + if (actionBar != null && contentView != null) { + ((ChatActionCell) view).setVisiblePart(view.getY() + actionBar.getMeasuredHeight() - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); + } + } + + return true; + } + }); + } if (holder.itemView instanceof ChatMessageCell) { final ChatMessageCell messageCell = (ChatMessageCell) holder.itemView; MessageObject message = messageCell.getMessageObject(); @@ -2517,24 +2546,6 @@ public class ChannelAdminLogActivity extends BaseFragment implements Notificatio messageCell.setBackgroundDrawable(null); messageCell.setCheckPressed(true, false); - messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - messageCell.getViewTreeObserver().removeOnPreDrawListener(this); - - int height = chatListView.getMeasuredHeight(); - int top = messageCell.getTop(); - int bottom = messageCell.getBottom(); - int viewTop = top >= 0 ? 0 : -top; - int viewBottom = messageCell.getMeasuredHeight(); - if (viewBottom > height) { - viewBottom = viewTop + height; - } - messageCell.setVisiblePart(viewTop, viewBottom - viewTop, contentView.getHeightWithKeyboard() - AndroidUtilities.dp(48) - chatListView.getTop(), 0); - - return true; - } - }); messageCell.setHighlighted(false); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Charts/BaseChartView.java b/TMessagesProj/src/main/java/org/telegram/ui/Charts/BaseChartView.java index 2b718acc1..1529283d9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Charts/BaseChartView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Charts/BaseChartView.java @@ -16,6 +16,8 @@ import android.graphics.Rect; import android.graphics.RectF; import android.os.Build; import android.os.Bundle; +import android.os.VibrationEffect; +import android.os.Vibrator; import android.text.TextPaint; import android.view.HapticFeedbackConstants; import android.view.MotionEvent; @@ -163,6 +165,8 @@ public abstract class BaseChartView public float chartFullWidth; public RectF chartArea = new RectF(); + VibrationEffect vibrationEffect; + private ValueAnimator.AnimatorUpdateListener pickerHeightUpdateListener = new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { @@ -1057,13 +1061,29 @@ public abstract class BaseChartView if (selectedIndex > endXIndex) selectedIndex = endXIndex; if (selectedIndex < startXIndex) selectedIndex = startXIndex; - legendShowing = true; - animateLegend(true); - moveLegend(offset); - if (dateSelectionListener != null) { - dateSelectionListener.onDateSelected(getSelectedDate()); + if (oldSelectedX != selectedIndex) { + legendShowing = true; + animateLegend(true); + moveLegend(offset); + if (dateSelectionListener != null) { + dateSelectionListener.onDateSelected(getSelectedDate()); + } + + runSmoothHaptic(); + invalidate(); + } + } + + protected void runSmoothHaptic() { + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) { + final Vibrator vibrator = (Vibrator) getContext().getSystemService(Context.VIBRATOR_SERVICE); + if (vibrationEffect == null) { + long[] vibrationWaveFormDurationPattern = {0, 2}; + vibrationEffect = VibrationEffect.createWaveform(vibrationWaveFormDurationPattern, -1); + } + vibrator.cancel(); + vibrator.vibrate(vibrationEffect); } - invalidate(); } public boolean animateLegentTo = false; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Charts/StackBarChartView.java b/TMessagesProj/src/main/java/org/telegram/ui/Charts/StackBarChartView.java index 2cba72b64..d772343b0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Charts/StackBarChartView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Charts/StackBarChartView.java @@ -153,6 +153,7 @@ public class StackBarChartView extends BaseChartView animateSendingViews = new ArrayList<>(); + private SparseArray[] messagesDict = new SparseArray[]{new SparseArray<>(), new SparseArray<>()}; private SparseArray repliesMessagesDict = new SparseArray<>(); private HashMap> messagesByDays = new HashMap<>(); @@ -554,7 +559,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private boolean[] forwardEndReached = new boolean[]{true, true}; private boolean loading; private boolean firstLoading = true; - private boolean firstUnreadSent = false; + private boolean chatWasReset; + private boolean firstUnreadSent; private int loadsCount; private int last_message_id = 0; private long mergeDialogId; @@ -689,6 +695,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private int scrollAnimationIndex; private int scrollCallbackAnimationIndex; + private boolean showSearchAsIcon; + private final static int[] allowedNotificationsDuringChatListAnimations = new int[]{ NotificationCenter.messagesRead, NotificationCenter.threadMessagesRead, @@ -789,6 +797,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int textColor; int panelBackgroundColor; int counterColor; + CharSequence lastText; public UnreadCounterTextView(Context context) { super(context); @@ -800,6 +809,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } public void setText(CharSequence text, boolean animatedFromBottom) { + if (lastText == text) { + return; + } + lastText = text; this.animatedFromBottom = animatedFromBottom; textLayoutOut = textLayout; layoutTextWidth = (int) Math.ceil(layoutPaint.measureText(text, 0, text.length())); @@ -1029,7 +1042,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { if (index < 0 || index >= botContextResults.size()) { return; } @@ -1297,7 +1310,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } getNotificationCenter().addObserver(this, NotificationCenter.messagesDidLoad); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); getNotificationCenter().addObserver(this, NotificationCenter.didUpdateConnectionState); getNotificationCenter().addObserver(this, NotificationCenter.updateInterfaces); if (chatMode != MODE_PINNED) { @@ -1443,13 +1456,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (chatMode == 0) { if (userId != 0 && currentUser.bot) { - getMediaDataController().loadBotInfo(userId, true, classGuid); + getMediaDataController().loadBotInfo(userId, userId, true, classGuid); } else if (chatInfo instanceof TLRPC.TL_chatFull) { for (int a = 0; a < chatInfo.participants.participants.size(); a++) { TLRPC.ChatParticipant participant = chatInfo.participants.participants.get(a); TLRPC.User user = getMessagesController().getUser(participant.user_id); if (user != null && user.bot) { - getMediaDataController().loadBotInfo(user.id, true, classGuid); + getMediaDataController().loadBotInfo(user.id, -chatInfo.id, true, classGuid); } } } @@ -1564,7 +1577,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not getNotificationCenter().removePostponeNotificationsCallback(postponeNotificationsWhileLoadingCallback); getMessagesController().setLastCreatedDialogId(dialog_id, chatMode == MODE_SCHEDULED, false); getNotificationCenter().removeObserver(this, NotificationCenter.messagesDidLoad); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); getNotificationCenter().removeObserver(this, NotificationCenter.didUpdateConnectionState); getNotificationCenter().removeObserver(this, NotificationCenter.updateInterfaces); getNotificationCenter().removeObserver(this, NotificationCenter.didReceiveNewMessages); @@ -1923,9 +1936,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } openAttachMenu(); } else if (id == bot_help) { - getSendMessagesHelper().sendMessage("/help", dialog_id, null, null, null, false, null, null, null, true, 0); + getSendMessagesHelper().sendMessage("/help", dialog_id, null, null, null, false, null, null, null, true, 0, null); } else if (id == bot_settings) { - getSendMessagesHelper().sendMessage("/settings", dialog_id, null, null, null, false, null, null, null, true, 0); + getSendMessagesHelper().sendMessage("/settings", dialog_id, null, null, null, false, null, null, null, true, 0, null); } else if (id == search) { openSearchWithText(null); } else if (id == call || id == video_call) { @@ -1997,6 +2010,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not ActionBarMenu menu = actionBar.createMenu(); if (currentEncryptedChat == null && chatMode == 0 && reportType < 0) { + searchIconItem = menu.addItem(search, R.drawable.ic_ab_search); searchItem = menu.addItem(0, R.drawable.ic_ab_search).setIsSearchField(true).setActionBarMenuItemSearchListener(new ActionBarMenuItem.ActionBarMenuItemSearchListener() { boolean searchWas; @@ -2037,6 +2051,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (attachItem != null) { attachItem.setVisibility(View.GONE); } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.GONE); + } } else if (chatActivityEnterView.hasText() && TextUtils.isEmpty(chatActivityEnterView.getSlowModeTimer())) { if (headerItem != null) { headerItem.setVisibility(View.GONE); @@ -2047,10 +2064,16 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (attachItem != null) { attachItem.setVisibility(View.VISIBLE); } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.GONE); + } } else { if (headerItem != null) { headerItem.setVisibility(View.VISIBLE); } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.VISIBLE); + } if (editTextItem != null) { editTextItem.setVisibility(View.GONE); } @@ -2298,6 +2321,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int inputFieldHeight = 0; int lastHeight; + int lastWidth; + ArrayList drawTimeAfter = new ArrayList<>(); ArrayList drawNamesAfter = new ArrayList<>(); ArrayList drawCaptionAfter = new ArrayList<>(); @@ -2569,6 +2594,20 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not setFragmentPanTranslationOffset(chatActivityEnterView.getEmojiPadding()); } } + for (int a = 0, N = animateSendingViews.size(); a < N; a++) { + ChatMessageCell cell = animateSendingViews.get(a); + MessageObject.SendAnimationData data = cell.getMessageObject().sendAnimationData; + if (data != null) { + canvas.save(); + ImageReceiver imageReceiver = cell.getPhotoImage(); + canvas.translate(data.currentX, data.currentY); + canvas.scale(data.currentScale, data.currentScale); + canvas.translate(-imageReceiver.getCenterX(), -imageReceiver.getCenterY()); + cell.setTimeAlpha(data.timeAlpha); + animateSendingViews.get(a).draw(canvas); + canvas.restore(); + } + } if (scrimView != null) { canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); float listTop = chatListView.getY() + chatListViewPaddingTop - chatListViewPaddingVisibleOffset - AndroidUtilities.dp(4); @@ -2725,6 +2764,44 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int widthSize = MeasureSpec.getSize(widthMeasureSpec); int heightSize = allHeight = MeasureSpec.getSize(heightMeasureSpec); + if (lastWidth != widthSize) { + globalIgnoreLayout = true; + lastWidth = widthMeasureSpec; + if (!inPreviewMode && currentUser != null && currentUser.self) { + SimpleTextView textView = avatarContainer.getTitleTextView(); + int textWidth = (int) textView.getPaint().measureText(textView.getText(), 0, textView.getText().length()); + if (widthSize - AndroidUtilities.dp(96 + 56) > textWidth + AndroidUtilities.dp(10)) { + showSearchAsIcon = true; + } else { + showSearchAsIcon = false; + } + } else { + showSearchAsIcon = false; + } + if (showSearchAsIcon) { + if (avatarContainer != null && avatarContainer.getLayoutParams() != null) { + ((MarginLayoutParams) avatarContainer.getLayoutParams()).rightMargin = AndroidUtilities.dp(96); + } + if (!actionBar.isSearchFieldVisible() && searchIconItem != null) { + searchIconItem.setVisibility(View.VISIBLE); + } + if (headerItem != null) { + headerItem.hideSubItem(search); + } + } else { + if (headerItem != null) { + headerItem.showSubItem(search); + } + if (avatarContainer != null && avatarContainer.getLayoutParams() != null) { + ((MarginLayoutParams) avatarContainer.getLayoutParams()).rightMargin = AndroidUtilities.dp(40); + } + if (searchIconItem != null) { + searchIconItem.setVisibility(View.GONE); + } + } + globalIgnoreLayout = false; + } + setMeasuredDimension(widthSize, heightSize); heightSize -= getPaddingTop(); @@ -2747,6 +2824,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatEmojiViewPadding = 0; } } + setEmojiKeyboardHeight(chatEmojiViewPadding); int childCount = getChildCount(); @@ -3065,12 +3143,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not greetingsViewContainer = new ChatGreetingsView(context, currentUser, distance, currentAccount, preloadedGreetingsSticker); greetingsViewContainer.setListener((sticker) -> { animatingDocuments.put(sticker, 0); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, null, true, 0); }); + greetingsViewContainer.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(10), greetingsViewContainer, contentView)); emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); } else if (currentEncryptedChat == null) { if (!isThreadChat() && chatMode == 0 && (currentUser != null && currentUser.self || currentChat != null && currentChat.creator)) { - bigEmptyView = new ChatBigEmptyView(context, currentChat != null ? ChatBigEmptyView.EMPTY_VIEW_TYPE_GROUP : ChatBigEmptyView.EMPTY_VIEW_TYPE_SAVED); + bigEmptyView = new ChatBigEmptyView(context, contentView, currentChat != null ? ChatBigEmptyView.EMPTY_VIEW_TYPE_GROUP : ChatBigEmptyView.EMPTY_VIEW_TYPE_SAVED); emptyViewContainer.addView(bigEmptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); if (currentChat != null) { bigEmptyView.setStatusText(AndroidUtilities.replaceTags(LocaleController.getString("GroupEmptyTitle1", R.string.GroupEmptyTitle1))); @@ -3094,8 +3173,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not greetingsViewContainer = new ChatGreetingsView(context, currentUser, distance, currentAccount, preloadedGreetingsSticker); greetingsViewContainer.setListener((sticker) -> { animatingDocuments.put(sticker, 0); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, null, dialog_id, null, null, null, null, true, 0); }); + greetingsViewContainer.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(10), greetingsViewContainer, contentView)); emptyViewContainer.addView(greetingsViewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 68, 0, 68, 0)); } else { emptyView = new TextView(context); @@ -3103,15 +3183,14 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not emptyView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); emptyView.setGravity(Gravity.CENTER); emptyView.setTextColor(Theme.getColor(Theme.key_chat_serviceText)); - emptyView.setBackgroundResource(R.drawable.system); - emptyView.getBackground().setColorFilter(Theme.colorFilter); + emptyView.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(6), emptyView, contentView)); emptyView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); emptyView.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(2), AndroidUtilities.dp(10), AndroidUtilities.dp(3)); emptyViewContainer.addView(emptyView, new FrameLayout.LayoutParams(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); } } } else { - bigEmptyView = new ChatBigEmptyView(context, ChatBigEmptyView.EMPTY_VIEW_TYPE_SECRET); + bigEmptyView = new ChatBigEmptyView(context, contentView, ChatBigEmptyView.EMPTY_VIEW_TYPE_SECRET); if (currentEncryptedChat.admin_id == getUserConfig().getClientUserId()) { bigEmptyView.setStatusText(LocaleController.formatString("EncryptedPlaceholderTitleOutgoing", R.string.EncryptedPlaceholderTitleOutgoing, UserObject.getFirstName(currentUser))); } else { @@ -3255,6 +3334,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not super.setItemAnimator(animator); } + private RectF rect = new RectF(); private void drawReplyButton(Canvas canvas) { if (slidingView == null) { return; @@ -3284,6 +3364,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } int alpha; + int alpha2; + int oldAlpha = Theme.chat_actionBackgroundPaint.getAlpha(); float scale; if (showing) { if (replyButtonProgress <= 0.8f) { @@ -3292,24 +3374,28 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not scale = 1.2f - 0.2f * ((replyButtonProgress - 0.8f) / 0.2f); } alpha = (int) Math.min(255, 255 * (replyButtonProgress / 0.8f)); + alpha2 = (int) Math.min(oldAlpha, oldAlpha * (replyButtonProgress / 0.8f)); } else { scale = replyButtonProgress; alpha = (int) Math.min(255, 255 * replyButtonProgress); + alpha2 = (int) Math.min(oldAlpha, oldAlpha * replyButtonProgress); } - Theme.chat_shareDrawable.setAlpha(alpha); + + Theme.chat_actionBackgroundPaint.setAlpha(alpha2); Theme.chat_replyIconDrawable.setAlpha(alpha); float x = getMeasuredWidth() + slidingView.getNonAnimationTranslationX(false) / 2; float y = slidingView.getTop() + slidingView.getMeasuredHeight() / 2; - if (!Theme.isCustomTheme() || Theme.hasThemeKey(Theme.key_chat_shareBackground)) { - Theme.chat_shareDrawable.setColorFilter(Theme.getShareColorFilter(Theme.getColor(Theme.key_chat_shareBackground), false)); - } else { - Theme.chat_shareDrawable.setColorFilter(Theme.colorFilter2); + rect.set((int) (x - AndroidUtilities.dp(16) * scale), (int) (y - AndroidUtilities.dp(16) * scale), (int) (x + AndroidUtilities.dp(16) * scale), (int) (y + AndroidUtilities.dp(16) * scale)); + + Theme.applyServiceShaderMatrix(getMeasuredWidth(), AndroidUtilities.displaySize.y, 0, getY() + rect.top); + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), Theme.chat_actionBackgroundGradientDarkenPaint); } - Theme.chat_shareDrawable.setBounds((int) (x - AndroidUtilities.dp(16) * scale), (int) (y - AndroidUtilities.dp(16) * scale), (int) (x + AndroidUtilities.dp(16) * scale), (int) (y + AndroidUtilities.dp(16) * scale)); - Theme.chat_shareDrawable.draw(canvas); + Theme.chat_replyIconDrawable.setBounds((int) (x - AndroidUtilities.dp(7) * scale), (int) (y - AndroidUtilities.dp(6) * scale), (int) (x + AndroidUtilities.dp(7) * scale), (int) (y + AndroidUtilities.dp(5) * scale)); Theme.chat_replyIconDrawable.draw(canvas); - Theme.chat_shareDrawable.setAlpha(255); + Theme.chat_actionBackgroundPaint.setAlpha(oldAlpha); Theme.chat_replyIconDrawable.setAlpha(255); } @@ -3326,7 +3412,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not slidingView = (ChatMessageCell) view; MessageObject message = slidingView.getMessageObject(); if (chatMode != 0 || threadMessageObjects != null && threadMessageObjects.contains(message) || - currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) < 46 || getMessageType(message) == 1 && (message.getDialogId() == mergeDialogId || message.needDrawBluredPreview()) || currentEncryptedChat == null && message.getId() < 0 || bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE || @@ -3712,6 +3797,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (child instanceof ChatMessageCell) { cell = (ChatMessageCell) child; + if (animateSendingViews.contains(cell)) { + skipDraw = true; + } MessageObject.GroupedMessagePosition position = cell.getCurrentPosition(); group = cell.getCurrentMessagesGroup(); if (position != null) { @@ -4582,8 +4670,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not contentView.addView(progressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT)); progressView2 = new View(context); - progressView2.setBackgroundResource(R.drawable.system_loader); - progressView2.getBackground().setColorFilter(Theme.colorFilter); + progressView2.setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(18), progressView2, contentView)); progressView.addView(progressView2, LayoutHelper.createFrame(36, 36, Gravity.CENTER)); progressBar = new RadialProgressView(context); @@ -5530,7 +5617,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not mentionsAdapter.setParentFragment(this); mentionsAdapter.setChatInfo(chatInfo); mentionsAdapter.setNeedUsernames(currentChat != null); - mentionsAdapter.setNeedBotContext(currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); + mentionsAdapter.setNeedBotContext(true); mentionsAdapter.setBotsCount(currentChat != null ? botsCount : 1); mentionListView.setOnItemClickListener(mentionsOnItemClickListener = (view, position) -> { if (mentionsAdapter.isBannedInline()) { @@ -5566,7 +5653,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (mentionsAdapter.isBotCommands()) { if (chatMode == MODE_SCHEDULED) { AlertsCreator.createScheduleDatePickerDialog(getParentActivity(), dialog_id, (notify, scheduleDate) -> { - getSendMessagesHelper().sendMessage((String) object, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, notify, scheduleDate); + getSendMessagesHelper().sendMessage((String) object, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, notify, scheduleDate, null); chatActivityEnterView.setFieldText(""); hideFieldPanel(false); }); @@ -5574,7 +5661,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (checkSlowMode(view)) { return; } - getSendMessagesHelper().sendMessage((String) object, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0); + getSendMessagesHelper().sendMessage((String) object, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null); chatActivityEnterView.setFieldText(""); hideFieldPanel(false); } @@ -6144,7 +6231,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } if (!loading) { - mentionsAdapter.setNeedBotContext(currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); + mentionsAdapter.setNeedBotContext(true); if (editingMessageObject != null) { AndroidUtilities.runOnUIThread(() -> hideFieldPanel(true), 30); } @@ -6153,7 +6240,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityEnterView.setFieldFocused(); waitingForKeyboard = true; } - chatActivityEnterView.setAllowStickersAndGifs(currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 23, currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46, waitingForKeyboard); + chatActivityEnterView.setAllowStickersAndGifs(true, true, waitingForKeyboard); if (editingMessageObjectReqId != 0) { getConnectionsManager().cancelRequest(editingMessageObjectReqId, true); editingMessageObjectReqId = 0; @@ -6293,6 +6380,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not bottomPanelTranslationY = chatActivityEnterView.pannelAniamationInProgress() ? chatActivityEnterView.getEmojiPadding() - translation : 0; bottomPanelTranslationYReverse = chatActivityEnterView.pannelAniamationInProgress() ? translation : 0; chatActivityEnterView.setTranslationY(translation); + contentView.setEmojiOffset(chatActivityEnterView.pannelAniamationInProgress(), bottomPanelTranslationY); translation += chatActivityEnterView.getTopViewTranslation(); chatListView.setTranslationY(translation); @@ -6330,12 +6418,15 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityEnterView.setChatInfo(chatInfo); } chatActivityEnterView.setId(id_chat_compose_panel); - chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands); + chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands, false); chatActivityEnterView.setMinimumHeight(AndroidUtilities.dp(51)); - chatActivityEnterView.setAllowStickersAndGifs(currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 23, currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); + chatActivityEnterView.setAllowStickersAndGifs(true, currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); if (inPreviewMode) { chatActivityEnterView.setVisibility(View.INVISIBLE); } + if (!ChatObject.isChannel(currentChat) || currentChat.megagroup) { + chatActivityEnterView.setBotInfo(botInfo); + } contentView.addView(chatActivityEnterView, contentView.getChildCount() - 1, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.BOTTOM)); chatActivityEnterTopView = new ChatActivityEnterTopView(context) { @@ -6586,7 +6677,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not final ContentPreviewViewer.ContentPreviewViewerDelegate contentPreviewViewerDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { @Override public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { - chatActivityEnterView.onStickerSelected(sticker, query, parent, true, notify, scheduleDate); + chatActivityEnterView.onStickerSelected(sticker, query, parent, null, true, notify, scheduleDate); } @Override @@ -6850,7 +6941,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (botUserLast != null && botUserLast.length() != 0) { getMessagesController().sendBotStart(currentUser, botUserLast); } else { - getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0); + getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0, null); } } else { AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); @@ -6866,7 +6957,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (botUser.length() != 0) { getMessagesController().sendBotStart(currentUser, botUser); } else { - getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0); + getSendMessagesHelper().sendMessage("/start", dialog_id, null, null, null, false, null, null, null, true, 0, null); } botUser = null; updateBottomOverlay(); @@ -7160,12 +7251,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } }); - pinchToZoomHelper.setClipBoundsListener(new PinchToZoomHelper.ClipBoundsListener() { - @Override - public void getClipTopBottom(float[] topBottom) { - topBottom[1] = chatListView.getBottom(); - topBottom[0] = chatListView.getTop() + chatListViewPaddingTop - AndroidUtilities.dp(4); - } + pinchToZoomHelper.setClipBoundsListener(topBottom -> { + topBottom[1] = chatListView.getBottom(); + topBottom[0] = chatListView.getTop() + chatListViewPaddingTop - AndroidUtilities.dp(4); }); return fragmentView; } @@ -7490,7 +7578,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } private void updateChatListViewTopPadding() { - if (!invalidateChatListViewTopPadding || chatListView == null) { + if (!invalidateChatListViewTopPadding || chatListView == null || fixedKeyboardHeight > 0) { return; } float topPanelViewH = Math.max(0, AndroidUtilities.dp(48) + topChatPanelViewOffset); @@ -7502,7 +7590,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatListViewPaddingTop = AndroidUtilities.dp(4) + contentPaddingTop + topPanelViewH + pinnedViewH; chatListViewPaddingVisibleOffset = 0; chatListViewPaddingTop += contentPanTranslation + bottomPanelTranslationY; - if (bottomPanelTranslationY == 0 && !chatActivityEnterView.pannelAniamationInProgress() && contentView.getLayoutParams().height < 0) { + + if (bottomPanelTranslationY == 0 && !chatActivityEnterView.pannelAniamationInProgress() && (contentView.getLayoutParams().height < 0 || (contentView.getKeyboardHeight() <= AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing()))) { chatListViewPaddingTop += contentView.getKeyboardHeight() <= AndroidUtilities.dp(20) && !AndroidUtilities.isInMultiwindow && !inBubbleMode ? chatActivityEnterView.getEmojiPadding() : contentView.getKeyboardHeight(); } if (!inPreviewMode && chatActivityEnterView != null) { @@ -7630,10 +7719,18 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else if (mentionListView != null) { mentionListView.setTranslationY(bottomPanelTranslationYReverse); } - + if (chatActivityEnterView != null && chatActivityEnterView.botCommandsMenuContainer != null) { + chatActivityEnterView.botCommandsMenuContainer.setTranslationY(bottomPanelTranslationYReverse); + } if (alertView != null && alertView.getVisibility() == View.VISIBLE) { alertView.setTranslationY(contentPanTranslation + contentPaddingTop - AndroidUtilities.dp(50) * (1f - alertViewEnterProgress)); } + if (bottomOverlayChat != null) { + bottomOverlayChat.setTranslationY(bottomPanelTranslationYReverse); + } + if (bottomMessagesActionContainer != null) { + bottomMessagesActionContainer.setTranslationY(bottomPanelTranslationYReverse); + } } private TextureView createTextureView(boolean add) { @@ -8016,7 +8113,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return; } if (chatAttachAlert == null) { - chatAttachAlert = new ChatAttachAlert(getParentActivity(), this, false) { + chatAttachAlert = new ChatAttachAlert(getParentActivity(), this, false, false) { @Override public void dismissInternal() { if (chatAttachAlert.isShowing()) { @@ -8031,7 +8128,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not }; chatAttachAlert.setDelegate(new ChatAttachAlert.ChatAttachViewDelegate() { @Override - public void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate) { + public void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate, boolean forceDocument) { if (getParentActivity() == null || chatAttachAlert == null) { return; } @@ -8065,7 +8162,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not photoEntry.reset(); } fillEditingMediaWithCaption(photos.get(0).caption, photos.get(0).entities); - SendMessagesHelper.prepareSendingMedia(getAccountInstance(), photos, dialog_id, replyingMessageObject, getThreadMessage(), null, button == 4, arg, editingMessageObject, notify, scheduleDate); + SendMessagesHelper.prepareSendingMedia(getAccountInstance(), photos, dialog_id, replyingMessageObject, getThreadMessage(), null, button == 4 || forceDocument, arg, editingMessageObject, notify, scheduleDate); afterMessageSend(); } if (scheduleDate != 0) { @@ -8154,7 +8251,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } getMessagesController().deleteDialog(dialog_id, 1, revoke); clearingHistory = false; - clearHistory(false); + clearHistory(false, null); chatAdapter.notifyDataSetChanged(); }, () -> { clearingHistory = false; @@ -8292,7 +8389,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } private void initStickers() { - if (chatActivityEnterView == null || getParentActivity() == null || stickersAdapter != null || currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) < 23) { + if (chatActivityEnterView == null || getParentActivity() == null || stickersAdapter != null) { return; } stickersListView.setPadding(AndroidUtilities.dp(18), 0, AndroidUtilities.dp(18), 0); @@ -8363,12 +8460,16 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (chatMode == 0 && checkSlowMode(view)) { return; } + MessageObject.SendAnimationData sendAnimationData = null; + if (view instanceof StickerCell) { + sendAnimationData = ((StickerCell) view).getSendAnimationData(); + } TLRPC.TL_document document = (TLRPC.TL_document) item; if (chatMode == MODE_SCHEDULED) { String query = stickersAdapter.getQuery(); - AlertsCreator.createScheduleDatePickerDialog(getParentActivity(), dialog_id, (notify, scheduleDate) -> SendMessagesHelper.getInstance(currentAccount).sendSticker(document, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate)); + AlertsCreator.createScheduleDatePickerDialog(getParentActivity(), dialog_id, (notify, scheduleDate) -> SendMessagesHelper.getInstance(currentAccount).sendSticker(document, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, null, notify, scheduleDate)); } else { - getSendMessagesHelper().sendSticker(document, stickersAdapter.getQuery(), dialog_id, replyingMessageObject, getThreadMessage(), parent, true, 0); + getSendMessagesHelper().sendSticker(document, stickersAdapter.getQuery(), dialog_id, replyingMessageObject, getThreadMessage(), parent, sendAnimationData, true, 0); } hideFieldPanel(false); chatActivityEnterView.addStickerToRecent(document); @@ -8377,7 +8478,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not String emoji = (String) item; SpannableString string = new SpannableString(emoji); Emoji.replaceEmoji(string, chatActivityEnterView.getEditField().getPaint().getFontMetricsInt(), AndroidUtilities.dp(20), false); - //stickersAdapter.loadStikersForEmoji("", false); chatActivityEnterView.setFieldText(string, false); } }); @@ -8444,7 +8544,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not voiceHintTextView.showForView(chatActivityEnterView.getAudioVideoButtonContainer(), true); } - private boolean checkSlowMode(View view) { + public boolean checkSlowMode(View view) { CharSequence time = chatActivityEnterView.getSlowModeTimer(); if (time != null) { showSlowModeHint(view, true, time); @@ -8551,14 +8651,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } if (mediaBanTooltip == null) { - mediaBanTooltip = new CorrectlyMeasuringTextView(getParentActivity()); - mediaBanTooltip.setBackgroundDrawable(Theme.createRoundRectDrawable(AndroidUtilities.dp(3), Theme.getColor(Theme.key_chat_gifSaveHintBackground))); - mediaBanTooltip.setTextColor(Theme.getColor(Theme.key_chat_gifSaveHintText)); - mediaBanTooltip.setPadding(AndroidUtilities.dp(8), AndroidUtilities.dp(7), AndroidUtilities.dp(8), AndroidUtilities.dp(7)); - mediaBanTooltip.setGravity(Gravity.CENTER_VERTICAL); - mediaBanTooltip.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + mediaBanTooltip = new HintView(getParentActivity(), 9); mediaBanTooltip.setVisibility(View.GONE); - frameLayout.addView(mediaBanTooltip, index + 1, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.RIGHT | Gravity.BOTTOM, 30, 0, 5, 3)); + frameLayout.addView(mediaBanTooltip, index + 1, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 10, 0, 10, 0)); } if (ChatObject.isActionBannedByDefault(currentChat, ChatObject.ACTION_SEND_MEDIA)) { @@ -8574,37 +8669,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } - mediaBanTooltip.setVisibility(View.VISIBLE); - AnimatorSet AnimatorSet = new AnimatorSet(); - AnimatorSet.playTogether( - ObjectAnimator.ofFloat(mediaBanTooltip, View.ALPHA, 0.0f, 1.0f) - ); - AnimatorSet.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - AndroidUtilities.runOnUIThread(() -> { - if (mediaBanTooltip == null) { - return; - } - AnimatorSet AnimatorSet = new AnimatorSet(); - AnimatorSet.playTogether( - ObjectAnimator.ofFloat(mediaBanTooltip, View.ALPHA, 0.0f) - ); - AnimatorSet.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - if (mediaBanTooltip != null) { - mediaBanTooltip.setVisibility(View.GONE); - } - } - }); - AnimatorSet.setDuration(300); - AnimatorSet.start(); - }, 5000); - } - }); - AnimatorSet.setDuration(300); - AnimatorSet.start(); + mediaBanTooltip.showForView(chatActivityEnterView.getSendButton(), true); } private void showNoSoundHint() { @@ -9087,6 +9152,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not ignoreAttachOnPause = value; } + public ChatActivityEnterView getChatActivityEnterViewForStickers() { + return bottomOverlayChat.getVisibility() != View.VISIBLE && (currentChat == null || ChatObject.canSendStickers(currentChat)) ? chatActivityEnterView : null; + } + public ChatActivityEnterView getChatActivityEnterView() { return chatActivityEnterView; } @@ -9172,7 +9241,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (ChatObject.isChannel(currentChat) && currentChat.banned_rights != null && currentChat.banned_rights.send_gifs) { allowGifs = false; } else { - allowGifs = currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46; + allowGifs = true; } PhotoAlbumPickerActivity fragment = new PhotoAlbumPickerActivity(PhotoAlbumPickerActivity.SELECT_TYPE_ALL, allowGifs, true, ChatActivity.this); if (currentChat != null && !ChatObject.hasAdminRights(currentChat) && currentChat.slowmode_enabled) { @@ -9240,7 +9309,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (ChatObject.isChannel(currentChat) && currentChat.banned_rights != null && currentChat.banned_rights.send_gifs) { return false; } else { - return currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46; + return true; } } @@ -9254,8 +9323,13 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } @Override - public void didSelectFiles(ArrayList files, String caption, boolean notify, int scheduleDate) { + public void didSelectFiles(ArrayList files, String caption, ArrayList fmessages, boolean notify, int scheduleDate) { fillEditingMediaWithCaption(caption, null); + if (!fmessages.isEmpty() && !TextUtils.isEmpty(caption)) { + SendMessagesHelper.getInstance(currentAccount).sendMessage(caption, dialog_id, null, null, null, true, null, null, null, true, 0, null); + caption = null; + } + getSendMessagesHelper().sendMessage(fmessages, dialog_id, true, 0); SendMessagesHelper.prepareSendingDocuments(getAccountInstance(), files, files, null, caption, null, dialog_id, replyingMessageObject, getThreadMessage(), null, editingMessageObject, notify, scheduleDate); afterMessageSend(); } @@ -9287,7 +9361,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } if (!hasNoGifs && !TextUtils.isEmpty(photos.get(0).caption)) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(photos.get(0).caption, dialog_id, replyingMessageObject, getThreadMessage(), null, false, photos.get(0).entities, null, null, notify, scheduleDate); + SendMessagesHelper.getInstance(currentAccount).sendMessage(photos.get(0).caption, dialog_id, replyingMessageObject, getThreadMessage(), null, false, photos.get(0).entities, null, null, notify, scheduleDate, null); } for (int a = 0; a < photos.size(); a++) { SendMessagesHelper.SendingMediaInfo info = photos.get(a); @@ -9332,7 +9406,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } private void searchLinks(final CharSequence charSequence, final boolean force) { - if (currentEncryptedChat != null && (getMessagesController().secretWebpagePreview == 0 || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) < 46) || editingMessageObject != null && !editingMessageObject.isWebpage()) { + if (currentEncryptedChat != null && getMessagesController().secretWebpagePreview == 0 || editingMessageObject != null && !editingMessageObject.isWebpage()) { return; } if (force && foundWebPage != null) { @@ -9910,7 +9984,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (photoSize == thumbPhotoSize) { thumbPhotoSize = null; } - if (photoSize == null || photoSize instanceof TLRPC.TL_photoSizeEmpty || photoSize.location instanceof TLRPC.TL_fileLocationUnavailable || thumbMediaMessageObject.isAnyKindOfSticker() || thumbMediaMessageObject != null && thumbMediaMessageObject.isSecretMedia()) { + if (photoSize == null || photoSize instanceof TLRPC.TL_photoSizeEmpty || photoSize.location instanceof TLRPC.TL_fileLocationUnavailable || thumbMediaMessageObject.isAnyKindOfSticker() || thumbMediaMessageObject.isSecretMedia() || thumbMediaMessageObject.isWebpageDocument()) { replyImageView.setImageBitmap(null); replyImageLocation = null; replyImageLocationObject = null; @@ -9955,7 +10029,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not forwardingMessages = null; forwardMessages(messagesToForward, false, notify, scheduleDate != 0 && scheduleDate != 0x7ffffffe ? scheduleDate + 1 : scheduleDate); } - chatActivityEnterView.setForceShowSendButton(false, false); + chatActivityEnterView.setForceShowSendButton(false, animated); if (!waitingForSendingMessageLoad) { chatActivityEnterView.hideTopView(animated); } @@ -10256,6 +10330,16 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not continue; } + int viewTop = top >= 0 ? 0 : -top; + int viewBottom = view.getMeasuredHeight(); + if (viewBottom > height) { + viewBottom = viewTop + height; + } + int keyboardOffset = contentView.getKeyboardHeight(); + if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.pannelAniamationInProgress()) { + keyboardOffset = chatActivityEnterView.getEmojiPadding(); + } + if (view instanceof ChatMessageCell) { ChatMessageCell messageCell = (ChatMessageCell) view; messageObject = messageCell.getMessageObject(); @@ -10264,16 +10348,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not maxVisibleMessageObject = messageObject; } - int viewTop = top >= 0 ? 0 : -top; - int viewBottom = messageCell.getMeasuredHeight(); - if (viewBottom > height) { - viewBottom = viewTop + height; - } - int keyboardOffset = contentView.getKeyboardHeight(); - if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.pannelAniamationInProgress()) { - keyboardOffset = chatActivityEnterView.getEmojiPadding(); - } - messageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset); + messageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset, view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); if (!threadMessageVisible && threadMessageObject != null && messageObject == threadMessageObject && messageCell.getBottom() > chatListViewPaddingTop) { threadMessageVisible = true; @@ -10293,10 +10368,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } } else if (view instanceof ChatActionCell) { - messageObject = ((ChatActionCell) view).getMessageObject(); + ChatActionCell cell = (ChatActionCell) view; + messageObject = cell.getMessageObject(); if (messageObject != null && messageObject.getDialogId() == dialog_id && messageObject.getId() > maxVisibleId) { maxVisibleId = Math.max(maxVisibleId, messageObject.getId()); } + cell.setVisiblePart(view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); } else if (view instanceof BotHelpCell) { view.invalidate(); } @@ -11115,7 +11192,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private void checkActionBarMenu(boolean animated) { if (currentEncryptedChat != null && !(currentEncryptedChat instanceof TLRPC.TL_encryptedChat) || - currentChat != null && (chatInfo == null || chatInfo.ttl_period == 0) || + currentChat != null && (chatMode != 0 || threadMessageId != 0 || chatInfo == null || chatInfo.ttl_period == 0) || currentUser != null && (UserObject.isDeleted(currentUser) || currentEncryptedChat == null && (userInfo == null || userInfo.ttl_period == 0))) { if (timeItem2 != null) { timeItem2.setVisibility(View.GONE); @@ -11467,8 +11544,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not final int newEditVisibility = canEditMessagesCount == 1 && selectedCount == 1 ? View.VISIBLE : View.GONE; if (replyButton != null) { boolean allowChatActions = true; - if (currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) < 46 || - bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE || + if (bottomOverlayChat != null && bottomOverlayChat.getVisibility() == View.VISIBLE || currentChat != null && (ChatObject.isNotInChat(currentChat) && !isThreadChat() || ChatObject.isChannel(currentChat) && !ChatObject.canPost(currentChat) && !currentChat.megagroup || !ChatObject.canSendMessages(currentChat))) { allowChatActions = false; } @@ -11771,8 +11847,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { - sendMedia((MediaController.PhotoEntry) cameraPhoto.get(0), videoEditedInfo, notify, scheduleDate); + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { + sendMedia((MediaController.PhotoEntry) cameraPhoto.get(0), videoEditedInfo, notify, scheduleDate, forceDocument); } @Override @@ -11782,7 +11858,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not }, this); } else { fillEditingMediaWithCaption(caption, null); - SendMessagesHelper.prepareSendingVideo(getAccountInstance(), videoPath, null, dialog_id, replyingMessageObject, getThreadMessage(), null, null, 0, editingMessageObject, true, 0); + SendMessagesHelper.prepareSendingVideo(getAccountInstance(), videoPath, null, dialog_id, replyingMessageObject, getThreadMessage(), null, null, 0, editingMessageObject, true, 0, false); afterMessageSend(); } } @@ -11820,12 +11896,12 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { if (editingMessageObject != object) { return; } if (entry.isCropped || entry.isPainted || entry.isFiltered || videoEditedInfo != null) { - sendMedia(entry, videoEditedInfo, notify, scheduleDate); + sendMedia(entry, videoEditedInfo, notify, scheduleDate, forceDocument); } else { chatActivityEnterView.doneEditingMessage(); } @@ -12071,98 +12147,189 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public void didReceivedNotification(int id, int account, final Object... args) { if (id == NotificationCenter.messagesDidLoad) { int guid = (Integer) args[10]; - if (guid == classGuid) { - int queryLoadIndex = (Integer) args[11]; - boolean doNotRemoveLoadIndex; - if (queryLoadIndex < 0) { - doNotRemoveLoadIndex = true; - queryLoadIndex = -queryLoadIndex; + if (guid != classGuid) { + return; + } + int queryLoadIndex = (Integer) args[11]; + boolean doNotRemoveLoadIndex; + if (queryLoadIndex < 0) { + doNotRemoveLoadIndex = true; + queryLoadIndex = -queryLoadIndex; + } else { + doNotRemoveLoadIndex = false; + } + if (!doNotRemoveLoadIndex && !fragmentBeginToShow && !paused) { + int[] alowedNotifications = new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, + NotificationCenter.closeChats, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}; + if (transitionAnimationIndex == 0) { + transitionAnimationIndex = getNotificationCenter().setAnimationInProgress(transitionAnimationIndex, alowedNotifications); + AndroidUtilities.runOnUIThread(() -> getNotificationCenter().onAnimationFinish(transitionAnimationIndex), 800); } else { - doNotRemoveLoadIndex = false; + getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, alowedNotifications); } - if (!doNotRemoveLoadIndex && !fragmentBeginToShow && !paused) { - int[] alowedNotifications = new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, - NotificationCenter.closeChats, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}; - if (transitionAnimationIndex == 0) { - transitionAnimationIndex = getNotificationCenter().setAnimationInProgress(transitionAnimationIndex, alowedNotifications); - AndroidUtilities.runOnUIThread(() -> getNotificationCenter().onAnimationFinish(transitionAnimationIndex), 800); - } else { - getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, alowedNotifications); - } + } + int index = waitingForLoad.indexOf(queryLoadIndex); + int currentUserId = getUserConfig().getClientUserId(); + int mode = (Integer) args[14]; + boolean isCache = (Boolean) args[3]; + boolean postponedScroll = postponedScrollToLastMessageQueryIndex > 0 && queryLoadIndex == postponedScrollToLastMessageQueryIndex; + if (postponedScroll) { + postponedScrollToLastMessageQueryIndex = 0; + } + + if (index == -1) { + if (chatMode == MODE_SCHEDULED && mode == MODE_SCHEDULED && !isCache) { + waitingForReplyMessageLoad = true; + waitingForLoad.add(lastLoadIndex); + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, AndroidUtilities.isTablet() ? 30 : 20, 0, 0, true, 0, classGuid, 2, 0, ChatObject.isChannel(currentChat), chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++); } - int index = waitingForLoad.indexOf(queryLoadIndex); - int currentUserId = getUserConfig().getClientUserId(); - int mode = (Integer) args[14]; - boolean isCache = (Boolean) args[3]; - boolean postponedScroll = postponedScrollToLastMessageQueryIndex > 0 && queryLoadIndex == postponedScrollToLastMessageQueryIndex; - if (postponedScroll) { - postponedScrollToLastMessageQueryIndex = 0; + return; + } else if (!doNotRemoveLoadIndex) { + waitingForLoad.remove(index); + } + ArrayList messArr = (ArrayList) args[2]; + if (messages.isEmpty() && messArr.size() == 1 && MessageObject.isSystemSignUp(messArr.get(0))) { + forceHistoryEmpty = true; + endReached[0] = endReached[1] = true; + forwardEndReached[0] = forwardEndReached[1] = true; + firstLoading = false; + showProgressView(false); + if (!fragmentOpened) { + chatListView.setAnimateEmptyView(false, 1); + chatListView.setEmptyView(emptyViewContainer); + chatListView.setAnimateEmptyView(true, 1); + } else { + chatListView.setEmptyView(emptyViewContainer); } - if (index == -1) { - if (chatMode == MODE_SCHEDULED && mode == MODE_SCHEDULED && !isCache) { - waitingForReplyMessageLoad = true; - waitingForLoad.add(lastLoadIndex); - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, AndroidUtilities.isTablet() ? 30 : 20, 0, 0, true, 0, classGuid, 2, 0, ChatObject.isChannel(currentChat), chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++); - } - return; - } else if (!doNotRemoveLoadIndex) { - waitingForLoad.remove(index); - } - ArrayList messArr = (ArrayList) args[2]; - if (messages.isEmpty() && messArr.size() == 1 && MessageObject.isSystemSignUp(messArr.get(0))){ - forceHistoryEmpty = true; - endReached[0] = endReached[1] = true; - forwardEndReached[0] = forwardEndReached[1] = true; - firstLoading = false; - showProgressView(false); - if (!fragmentOpened) { - chatListView.setAnimateEmptyView(false, 1); - chatListView.setEmptyView(emptyViewContainer); - chatListView.setAnimateEmptyView(true, 1); - } else { - chatListView.setEmptyView(emptyViewContainer); - } + chatAdapter.notifyDataSetChanged(); + resumeDelayedFragmentAnimation(); + MessageObject messageObject = messArr.get(0); + getMessagesController().markDialogAsRead(dialog_id, messageObject.getId(), messageObject.getId(), messageObject.messageOwner.date, false, 0, 0, true, 0); - chatAdapter.notifyDataSetChanged(); - resumeDelayedFragmentAnimation(); - MessageObject messageObject = messArr.get(0); - if (messageObject.isUnread()) { - getMessagesController().markDialogAsRead(dialog_id, messageObject.getId(), messageObject.getId(), messageObject.messageOwner.date, false, 0, 0, true, 0); - } - return; + return; + } + if (chatMode != mode) { + if (chatMode != MODE_SCHEDULED) { + scheduledMessagesCount = messArr.size(); + updateScheduledInterface(true); } - if (chatMode != mode) { - if (chatMode != MODE_SCHEDULED) { - scheduledMessagesCount = messArr.size(); - updateScheduledInterface(true); - } - return; - } - boolean createUnreadLoading = false; - boolean showDateAfter = waitingForReplyMessageLoad; - if (waitingForReplyMessageLoad) { - if (chatMode != MODE_SCHEDULED && !createUnreadMessageAfterIdLoading) { - boolean found = false; - for (int a = 0; a < messArr.size(); a++) { - MessageObject obj = messArr.get(a); - if (obj.getId() == startLoadFromMessageId) { + return; + } + boolean createUnreadLoading = false; + boolean showDateAfter = waitingForReplyMessageLoad; + if (waitingForReplyMessageLoad) { + if (chatMode != MODE_SCHEDULED && !createUnreadMessageAfterIdLoading) { + boolean found = false; + for (int a = 0; a < messArr.size(); a++) { + MessageObject obj = messArr.get(a); + if (obj.getId() == startLoadFromMessageId) { + found = true; + break; + } + if (a + 1 < messArr.size()) { + MessageObject obj2 = messArr.get(a + 1); + if (obj.getId() >= startLoadFromMessageId && obj2.getId() < startLoadFromMessageId) { + startLoadFromMessageId = obj.getId(); found = true; break; } - if (a + 1 < messArr.size()) { - MessageObject obj2 = messArr.get(a + 1); - if (obj.getId() >= startLoadFromMessageId && obj2.getId() < startLoadFromMessageId) { - startLoadFromMessageId = obj.getId(); - found = true; - break; - } + } + } + if (!found) { + startLoadFromMessageId = 0; + return; + } + } + int startLoadFrom = startLoadFromMessageId; + boolean needSelect = needSelectFromMessageId; + int unreadAfterId = createUnreadMessageAfterId; + createUnreadLoading = createUnreadMessageAfterIdLoading; + clearChatData(); + if (chatMode == 0) { + createUnreadMessageAfterId = unreadAfterId; + startLoadFromMessageId = startLoadFrom; + needSelectFromMessageId = needSelect; + } + } + + loadsCount++; + long did = (Long) args[0]; + int loadIndex = did == dialog_id ? 0 : 1; + int count = (Integer) args[1]; + int fnid = (Integer) args[4]; + int last_unread_date = (Integer) args[7]; + int load_type = (Integer) args[8]; + boolean isEnd = (Boolean) args[9]; + int loaded_max_id = (Integer) args[12]; + int loaded_mentions_count = chatWasReset ? 0 : (Integer) args[13]; + + if (loaded_mentions_count < 0) { + loaded_mentions_count *= -1; + hasAllMentionsLocal = false; + } else if (first) { + hasAllMentionsLocal = true; + } + if (load_type == 4) { + startLoadFromMessageId = loaded_max_id; + + for (int a = messArr.size() - 1; a > 0; a--) { + MessageObject obj = messArr.get(a); + if (obj.type < 0 && obj.getId() == startLoadFromMessageId) { + startLoadFromMessageId = messArr.get(a - 1).getId(); + break; + } + } + } + + if (postponedScroll) { + if (load_type == 0 && isCache && messArr.size() < count) { + postponedScrollToLastMessageQueryIndex = lastLoadIndex; + waitingForLoad.add(lastLoadIndex); + getMessagesController().loadMessages(dialog_id, mergeDialogId, false, count, 0, 0, false, 0, classGuid, 0, 0, ChatObject.isChannel(currentChat), chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++); + return; + } + + if (load_type == 4) { + postponedScrollMessageId = startLoadFromMessageId; + } + + if (progressDialog != null) { + progressDialog.dismiss(); + } + showPinnedProgress(false); + if (postponedScrollIsCanceled) { + return; + } + if (postponedScrollMessageId == 0) { + clearChatData(); + } else { + if (showScrollToMessageError) { + boolean found = false; + for (int k = 0; k < messArr.size(); k++) { + if (messArr.get(k).getId() == postponedScrollMessageId) { + found = true; + break; } } if (!found) { - startLoadFromMessageId = 0; + if (isThreadChat()) { + Bundle bundle = new Bundle(); + if (currentEncryptedChat != null) { + bundle.putInt("enc_id", currentEncryptedChat.id); + } else if (currentChat != null) { + bundle.putInt("chat_id", currentChat.id); + } else { + bundle.putInt("user_id", currentUser.id); + } + bundle.putInt("message_id", postponedScrollMessageId); + presentFragment(new ChatActivity(bundle), true); + } else { + BulletinFactory.of(this).createErrorBulletin(LocaleController.getString("MessageNotFound", R.string.MessageNotFound)).show(); + } return; } + showScrollToMessageError = false; } int startLoadFrom = startLoadFromMessageId; boolean needSelect = needSelectFromMessageId; @@ -12175,852 +12342,767 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not needSelectFromMessageId = needSelect; } } + } - loadsCount++; - long did = (Long) args[0]; - int loadIndex = did == dialog_id ? 0 : 1; - int count = (Integer) args[1]; - int fnid = (Integer) args[4]; - int last_unread_date = (Integer) args[7]; - int load_type = (Integer) args[8]; - boolean isEnd = (Boolean) args[9]; - int loaded_max_id = (Integer) args[12]; - int loaded_mentions_count = (Integer) args[13]; + if (chatListItemAnimator != null) { + chatListItemAnimator.setShouldAnimateEnterFromBottom(false); + } - if (loaded_mentions_count < 0) { - loaded_mentions_count *= -1; - hasAllMentionsLocal = false; - } else if (first) { - hasAllMentionsLocal = true; - } - if (load_type == 4) { - startLoadFromMessageId = loaded_max_id; - - for (int a = messArr.size() - 1; a > 0; a--) { - MessageObject obj = messArr.get(a); - if (obj.type < 0 && obj.getId() == startLoadFromMessageId) { - startLoadFromMessageId = messArr.get(a - 1).getId(); - break; - } - } - } - - if (postponedScroll) { - if (load_type == 0 && isCache && messArr.size() < count) { - postponedScrollToLastMessageQueryIndex = lastLoadIndex; - waitingForLoad.add(lastLoadIndex); - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, count, 0, 0, false, 0, classGuid, 0, 0, ChatObject.isChannel(currentChat), chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++); - return; - } - - if (load_type == 4) { - postponedScrollMessageId = startLoadFromMessageId; - } - - if (progressDialog != null) { - progressDialog.dismiss(); - } - showPinnedProgress(false); - if (postponedScrollIsCanceled) { - return; - } - if (postponedScrollMessageId == 0) { - clearChatData(); - } else { - if (showScrollToMessageError) { - boolean found = false; - for (int k = 0; k < messArr.size(); k++) { - if (messArr.get(k).getId() == postponedScrollMessageId) { - found = true; - break; - } - } - if (!found) { - if (isThreadChat()) { - Bundle bundle = new Bundle(); - if (currentEncryptedChat != null) { - bundle.putInt("enc_id", currentEncryptedChat.id); - } else if (currentChat != null) { - bundle.putInt("chat_id", currentChat.id); - } else { - bundle.putInt("user_id", currentUser.id); - } - bundle.putInt("message_id", postponedScrollMessageId); - presentFragment(new ChatActivity(bundle), true); - } else { - BulletinFactory.of(this).createErrorBulletin(LocaleController.getString("MessageNotFound", R.string.MessageNotFound)).show(); - } - return; - } - showScrollToMessageError = false; - } - int startLoadFrom = startLoadFromMessageId; - boolean needSelect = needSelectFromMessageId; - int unreadAfterId = createUnreadMessageAfterId; - createUnreadLoading = createUnreadMessageAfterIdLoading; - clearChatData(); - if (chatMode == 0) { - createUnreadMessageAfterId = unreadAfterId; - startLoadFromMessageId = startLoadFrom; - needSelectFromMessageId = needSelect; - } - } - } - - if (chatListItemAnimator != null) { - chatListItemAnimator.setShouldAnimateEnterFromBottom(false); - } - - int unread_to_load = 0; - if (fnid != 0) { + int unread_to_load = 0; + if (fnid != 0) { + if (!chatWasReset) { last_message_id = (Integer) args[5]; - if (load_type == 3) { - if (loadingFromOldPosition) { + } + if (load_type == 3) { + if (loadingFromOldPosition) { + if (!chatWasReset) { unread_to_load = (Integer) args[6]; if (unread_to_load != 0) { createUnreadMessageAfterId = fnid; } - loadingFromOldPosition = false; } - first_unread_id = 0; - } else { - first_unread_id = fnid; + loadingFromOldPosition = false; + } + first_unread_id = 0; + } else { + first_unread_id = fnid; + if (!chatWasReset) { unread_to_load = (Integer) args[6]; } - } else if (startLoadFromMessageId != 0 && (load_type == 3 || load_type == 4)) { - last_message_id = (Integer) args[5]; } - int newRowsCount = 0; + } else if (!chatWasReset && startLoadFromMessageId != 0 && (load_type == 3 || load_type == 4)) { + last_message_id = (Integer) args[5]; + } + int newRowsCount = 0; - if (load_type != 0 && (isThreadChat() && first_unread_id != 0 || startLoadFromMessageId != 0 || last_message_id != 0)) { - forwardEndReached[loadIndex] = false; - } - if ((load_type == 1 || load_type == 3) && loadIndex == 1) { - endReached[0] = cacheEndReached[0] = true; - forwardEndReached[0] = false; - minMessageId[0] = 0; - } - if (chatMode == MODE_SCHEDULED) { - endReached[0] = cacheEndReached[0] = true; - forwardEndReached[0] = forwardEndReached[0] = true; - } + if (load_type != 0 && (isThreadChat() && first_unread_id != 0 || startLoadFromMessageId != 0 || last_message_id != 0)) { + forwardEndReached[loadIndex] = false; + } + if ((load_type == 1 || load_type == 3) && loadIndex == 1) { + endReached[0] = cacheEndReached[0] = true; + forwardEndReached[0] = false; + minMessageId[0] = 0; + } + if (chatMode == MODE_SCHEDULED) { + endReached[0] = cacheEndReached[0] = true; + forwardEndReached[0] = forwardEndReached[0] = true; + } - if (loadsCount == 1 && messArr.size() > 20) { - loadsCount++; - } + if (loadsCount == 1 && messArr.size() > 20) { + loadsCount++; + } - boolean isFirstLoading = firstLoading; - if (firstLoading) { - if (!forwardEndReached[loadIndex]) { - messages.clear(); - messagesByDays.clear(); - groupedMessagesMap.clear(); - threadMessageAdded = false; - for (int a = 0; a < 2; a++) { - messagesDict[a].clear(); - if (currentEncryptedChat == null) { - maxMessageId[a] = Integer.MAX_VALUE; - minMessageId[a] = Integer.MIN_VALUE; - } else { - maxMessageId[a] = Integer.MIN_VALUE; - minMessageId[a] = Integer.MAX_VALUE; - } - maxDate[a] = Integer.MIN_VALUE; - minDate[a] = 0; - } - } - firstLoading = false; - AndroidUtilities.runOnUIThread(() -> { - getNotificationCenter().runDelayedNotifications(); - resumeDelayedFragmentAnimation(); - }); - } - - if (isThreadChat() && (load_type == 2 || load_type == 3) && !isCache) { - if (load_type == 3 && scrollToThreadMessage) { - startLoadFromMessageId = threadMessageId; - } - int beforMax = 0; - int afterMax = 0; - boolean hasMaxId = false; - for (int a = 0, N = messArr.size(); a < N; a++) { - MessageObject message = messArr.get(a); - int mid = message.getId(); - if (mid == loaded_max_id) { - hasMaxId = true; - } - if (mid > loaded_max_id) { - afterMax++; + if (firstLoading) { + if (!forwardEndReached[loadIndex]) { + messages.clear(); + messagesByDays.clear(); + groupedMessagesMap.clear(); + threadMessageAdded = false; + for (int a = 0; a < 2; a++) { + messagesDict[a].clear(); + if (currentEncryptedChat == null) { + maxMessageId[a] = Integer.MAX_VALUE; + minMessageId[a] = Integer.MIN_VALUE; } else { - beforMax++; + maxMessageId[a] = Integer.MIN_VALUE; + minMessageId[a] = Integer.MAX_VALUE; } + maxDate[a] = Integer.MIN_VALUE; + minDate[a] = 0; } - int num; - if (load_type == 2) { - num = 10; + } + firstLoading = false; + AndroidUtilities.runOnUIThread(() -> { + getNotificationCenter().runDelayedNotifications(); + resumeDelayedFragmentAnimation(); + }); + } + + if (isThreadChat() && (load_type == 2 || load_type == 3) && !isCache) { + if (load_type == 3 && scrollToThreadMessage) { + startLoadFromMessageId = threadMessageId; + } + int beforMax = 0; + int afterMax = 0; + boolean hasMaxId = false; + for (int a = 0, N = messArr.size(); a < N; a++) { + MessageObject message = messArr.get(a); + int mid = message.getId(); + if (mid == loaded_max_id) { + hasMaxId = true; + } + if (mid > loaded_max_id) { + afterMax++; } else { - num = count / 2; - } - if (hasMaxId) { - num++; - } - if (beforMax < num) { - endReached[0] = true; - } - if (afterMax < count - num) { - forwardEndReached[0] = true; + beforMax++; } } - if (chatMode == MODE_PINNED) { - endReached[loadIndex] = true; + int num; + if (load_type == 2) { + num = 10; + } else { + num = count / 2; } - - if (load_type == 0 && forwardEndReached[0] && !pendingSendMessages.isEmpty()) { - for (int a = 0, N = messArr.size(); a < N; a++) { - MessageObject existing = pendingSendMessagesDict.get(messArr.get(a).getId()); - if (existing != null) { - pendingSendMessagesDict.remove(existing.getId()); - pendingSendMessages.remove(existing); - } - } - if (!pendingSendMessages.isEmpty()) { - int pasteIndex = 0; - int date = pendingSendMessages.get(0).messageOwner.date; - if (!messArr.isEmpty()) { - if (date >= messArr.get(0).messageOwner.date) { - pasteIndex = 0; - } else if (date <= messArr.get(messArr.size() - 1).messageOwner.date) { - pasteIndex = messArr.size(); - } else { - for (int a = 0, N = messArr.size(); a < N - 1; a++) { - if (messArr.get(a).messageOwner.date >= date && messArr.get(a + 1).messageOwner.date <= date) { - pasteIndex = a + 1; - } - } - } - } - messArr = new ArrayList<>(messArr); - messArr.addAll(pasteIndex, pendingSendMessages); - pendingSendMessages.clear(); - pendingSendMessagesDict.clear(); - } + if (hasMaxId) { + num++; } - - if (!threadMessageAdded && isThreadChat() && (load_type == 0 && messArr.size() < count || (load_type == 2 || load_type == 3) && endReached[0])) { - TLRPC.Message msg = new TLRPC.TL_message(); - if (threadMessageObject.getRepliesCount() == 0) { - if (isComments) { - msg.message = LocaleController.getString("NoComments", R.string.NoComments); - } else { - msg.message = LocaleController.getString("NoReplies", R.string.NoReplies); - } - } else { - msg.message = LocaleController.getString("DiscussionStarted", R.string.DiscussionStarted); - } - msg.id = 0; - msg.date = threadMessageObject.messageOwner.date; - replyMessageHeaderObject = new MessageObject(currentAccount, msg, false, false); - replyMessageHeaderObject.type = 10; - replyMessageHeaderObject.contentType = 1; - replyMessageHeaderObject.isDateObject = true; - replyMessageHeaderObject.stableId = lastStableId++; - messArr.add(replyMessageHeaderObject); - updateReplyMessageHeader(false); - - messArr.addAll(threadMessageObjects); - - count += 2; - threadMessageAdded = true; + if (beforMax < num) { + endReached[0] = true; } - if (load_type == 1) { - Collections.reverse(messArr); - } - if (currentEncryptedChat == null) { - getMediaDataController().loadReplyMessagesForMessages(messArr, dialog_id, chatMode == MODE_SCHEDULED, null); - } - int approximateHeightSum = 0; - if ((load_type == 2 || load_type == 1) && messArr.isEmpty() && !isCache) { + if (!chatWasReset && afterMax < count - num) { forwardEndReached[0] = true; } - LongSparseArray newGroups = null; - LongSparseArray changedGroups = null; - MediaController mediaController = MediaController.getInstance(); - TLRPC.MessageAction dropPhotoAction = null; - boolean createdWas = false; - boolean moveCurrentDateObject = false; - boolean scrolledToUnread = false; + } + if (chatMode == MODE_PINNED) { + endReached[loadIndex] = true; + } + + if (load_type == 0 && forwardEndReached[0] && !pendingSendMessages.isEmpty()) { for (int a = 0, N = messArr.size(); a < N; a++) { - MessageObject obj = messArr.get(N - a - 1); - TLRPC.MessageAction action = obj.messageOwner.action; - if (a == 0 && action instanceof TLRPC.TL_messageActionChatCreate) { - createdWas = true; - } else if (!createdWas) { - break; - } else if (a < 2 && action instanceof TLRPC.TL_messageActionChatEditPhoto) { - dropPhotoAction = action; + MessageObject existing = pendingSendMessagesDict.get(messArr.get(a).getId()); + if (existing != null) { + pendingSendMessagesDict.remove(existing.getId()); + pendingSendMessages.remove(existing); } } - for (int a = 0; a < messArr.size(); a++) { - MessageObject obj = messArr.get(a); - if (obj.replyMessageObject != null) { - repliesMessagesDict.put(obj.replyMessageObject.getId(), obj.replyMessageObject); - } - int messageId = obj.getId(); - if (threadMessageId != 0) { - if (messageId <= (obj.isOut() ? threadMaxOutboxReadId : threadMaxInboxReadId)) { - obj.setIsRead(); - } - } - approximateHeightSum += obj.getApproximateHeight(); - if (currentUser != null) { - if (currentUser.self) { - obj.messageOwner.out = true; - } - if (chatMode != MODE_SCHEDULED && (currentUser.bot && obj.isOut() || currentUser.id == currentUserId)) { - obj.setIsRead(); - } - } - if (messagesDict[loadIndex].indexOfKey(messageId) >= 0) { - continue; - } - if (threadMessageId != 0 && obj.messageOwner instanceof TLRPC.TL_messageEmpty) { - continue; - } - if (currentEncryptedChat != null && obj.messageOwner.stickerVerified == 0) { - getMediaDataController().verifyAnimatedStickerMessage(obj.messageOwner); - } - addToPolls(obj, null); - if (isSecretChat()) { - checkSecretMessageForLocation(obj); - } - if (mediaController.isPlayingMessage(obj)) { - MessageObject player = mediaController.getPlayingMessageObject(); - obj.audioProgress = player.audioProgress; - obj.audioProgressSec = player.audioProgressSec; - obj.audioPlayerDuration = player.audioPlayerDuration; - } - if (loadIndex == 0 && ChatObject.isChannel(currentChat) && messageId == 1) { - endReached[loadIndex] = true; - cacheEndReached[loadIndex] = true; - } - if (messageId > 0) { - maxMessageId[loadIndex] = Math.min(messageId, maxMessageId[loadIndex]); - minMessageId[loadIndex] = Math.max(messageId, minMessageId[loadIndex]); - } else if (currentEncryptedChat != null) { - maxMessageId[loadIndex] = Math.max(messageId, maxMessageId[loadIndex]); - minMessageId[loadIndex] = Math.min(messageId, minMessageId[loadIndex]); - } - if (obj.messageOwner.date != 0) { - maxDate[loadIndex] = Math.max(maxDate[loadIndex], obj.messageOwner.date); - if (minDate[loadIndex] == 0 || obj.messageOwner.date < minDate[loadIndex]) { - minDate[loadIndex] = obj.messageOwner.date; - } - } - - if (messageId != 0 && messageId == last_message_id) { - forwardEndReached[loadIndex] = true; - } - - TLRPC.MessageAction action = obj.messageOwner.action; - if (obj.type < 0 || loadIndex == 1 && action instanceof TLRPC.TL_messageActionChatMigrateTo) { - continue; - } - - if (currentChat != null && currentChat.creator && (action instanceof TLRPC.TL_messageActionChatCreate || dropPhotoAction != null && action == dropPhotoAction)) { - continue; - } - if (obj.messageOwner.action instanceof TLRPC.TL_messageActionChannelMigrateFrom) { - continue; - } - - if (needAnimateToMessage != null && needAnimateToMessage.getId() == messageId && messageId < 0 && chatMode != MODE_SCHEDULED) { - obj = needAnimateToMessage; - animatingMessageObjects.add(obj); - needAnimateToMessage = null; - } - - messagesDict[loadIndex].put(messageId, obj); - ArrayList dayArray = messagesByDays.get(obj.dateKey); - - if (dayArray == null) { - dayArray = new ArrayList<>(); - messagesByDays.put(obj.dateKey, dayArray); - TLRPC.Message dateMsg = new TLRPC.TL_message(); - if (chatMode == MODE_SCHEDULED) { - if (obj.messageOwner.date == 0x7ffffffe) { - dateMsg.message = LocaleController.getString("MessageScheduledUntilOnline", R.string.MessageScheduledUntilOnline); - } else { - dateMsg.message = LocaleController.formatString("MessageScheduledOn", R.string.MessageScheduledOn, LocaleController.formatDateChat(obj.messageOwner.date, true)); + if (!pendingSendMessages.isEmpty()) { + int pasteIndex = 0; + int date = pendingSendMessages.get(0).messageOwner.date; + if (!messArr.isEmpty()) { + if (date >= messArr.get(0).messageOwner.date) { + pasteIndex = 0; + } else if (date <= messArr.get(messArr.size() - 1).messageOwner.date) { + pasteIndex = messArr.size(); + } else { + for (int a = 0, N = messArr.size(); a < N - 1; a++) { + if (messArr.get(a).messageOwner.date >= date && messArr.get(a + 1).messageOwner.date <= date) { + pasteIndex = a + 1; + } } - } else { - dateMsg.message = LocaleController.formatDateChat(obj.messageOwner.date); } - dateMsg.id = 0; - Calendar calendar = Calendar.getInstance(); - calendar.setTimeInMillis(((long) obj.messageOwner.date) * 1000); - calendar.set(Calendar.HOUR_OF_DAY, 0); - calendar.set(Calendar.MINUTE, 0); - dateMsg.date = (int) (calendar.getTimeInMillis() / 1000); - MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); - dateObj.type = 10; - dateObj.contentType = 1; - dateObj.isDateObject = true; - dateObj.stableId = lastStableId++; - if (load_type == 1) { - messages.add(0, dateObj); - } else { - messages.add(dateObj); - } - newRowsCount++; + } + messArr = new ArrayList<>(messArr); + messArr.addAll(pasteIndex, pendingSendMessages); + pendingSendMessages.clear(); + pendingSendMessagesDict.clear(); + } + } + + if (!threadMessageAdded && isThreadChat() && (load_type == 0 && messArr.size() < count || (load_type == 2 || load_type == 3) && endReached[0])) { + TLRPC.Message msg = new TLRPC.TL_message(); + if (threadMessageObject.getRepliesCount() == 0) { + if (isComments) { + msg.message = LocaleController.getString("NoComments", R.string.NoComments); } else { - if (!moveCurrentDateObject && !messages.isEmpty() && messages.get(messages.size() - 1).isDateObject) { - messages.get(messages.size() - 1).stableId = lastStableId++; - moveCurrentDateObject = true; - } + msg.message = LocaleController.getString("NoReplies", R.string.NoReplies); } + } else { + msg.message = LocaleController.getString("DiscussionStarted", R.string.DiscussionStarted); + } + msg.id = 0; + msg.date = threadMessageObject.messageOwner.date; + replyMessageHeaderObject = new MessageObject(currentAccount, msg, false, false); + replyMessageHeaderObject.type = 10; + replyMessageHeaderObject.contentType = 1; + replyMessageHeaderObject.isDateObject = true; + replyMessageHeaderObject.stableId = lastStableId++; + messArr.add(replyMessageHeaderObject); + updateReplyMessageHeader(false); - if (obj.hasValidGroupId()) { - MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(obj.getGroupIdForUse()); - if (groupedMessages != null) { - if (messages.size() > 1) { - MessageObject previous; - if (load_type == 1) { - previous = messages.get(0); - } else { - previous = messages.get(messages.size() - 2); - } - if (previous.getGroupIdForUse() == obj.getGroupIdForUse()) { - if (previous.localGroupId != 0) { - obj.localGroupId = previous.localGroupId; - groupedMessages = groupedMessagesMap.get(previous.localGroupId); - } - } else if (previous.getGroupIdForUse() != obj.getGroupIdForUse()) { - obj.localGroupId = Utilities.random.nextLong(); - groupedMessages = null; - } - } - } - if (groupedMessages == null) { - groupedMessages = new MessageObject.GroupedMessages(); - groupedMessages.groupId = obj.getGroupId(); - groupedMessagesMap.put(groupedMessages.groupId, groupedMessages); - } else if (newGroups == null || newGroups.indexOfKey(obj.getGroupId()) < 0) { - if (changedGroups == null) { - changedGroups = new LongSparseArray<>(); - } - changedGroups.put(obj.getGroupId(), groupedMessages); - } - if (newGroups == null) { - newGroups = new LongSparseArray<>(); - } - newGroups.put(groupedMessages.groupId, groupedMessages); - if (load_type == 1) { - groupedMessages.messages.add(obj); + messArr.addAll(threadMessageObjects); + + count += 2; + threadMessageAdded = true; + } + if (load_type == 1) { + Collections.reverse(messArr); + } + if (currentEncryptedChat == null) { + getMediaDataController().loadReplyMessagesForMessages(messArr, dialog_id, chatMode == MODE_SCHEDULED, null); + } + int approximateHeightSum = 0; + if (!chatWasReset && (load_type == 2 || load_type == 1) && messArr.isEmpty() && !isCache) { + forwardEndReached[0] = true; + } + LongSparseArray newGroups = null; + LongSparseArray changedGroups = null; + MediaController mediaController = MediaController.getInstance(); + TLRPC.MessageAction dropPhotoAction = null; + boolean createdWas = false; + boolean moveCurrentDateObject = false; + boolean scrolledToUnread = false; + for (int a = 0, N = messArr.size(); a < N; a++) { + MessageObject obj = messArr.get(N - a - 1); + TLRPC.MessageAction action = obj.messageOwner.action; + if (a == 0 && action instanceof TLRPC.TL_messageActionChatCreate) { + createdWas = true; + } else if (!createdWas) { + break; + } else if (a < 2 && action instanceof TLRPC.TL_messageActionChatEditPhoto) { + dropPhotoAction = action; + } + } + for (int a = 0; a < messArr.size(); a++) { + MessageObject obj = messArr.get(a); + if (obj.replyMessageObject != null) { + repliesMessagesDict.put(obj.replyMessageObject.getId(), obj.replyMessageObject); + } + int messageId = obj.getId(); + if (threadMessageId != 0) { + if (messageId <= (obj.isOut() ? threadMaxOutboxReadId : threadMaxInboxReadId)) { + obj.setIsRead(); + } + } + approximateHeightSum += obj.getApproximateHeight(); + if (currentUser != null) { + if (currentUser.self) { + obj.messageOwner.out = true; + } + if (chatMode != MODE_SCHEDULED && (currentUser.bot && obj.isOut() || currentUser.id == currentUserId)) { + obj.setIsRead(); + } + } + if (messagesDict[loadIndex].indexOfKey(messageId) >= 0) { + continue; + } + if (threadMessageId != 0 && obj.messageOwner instanceof TLRPC.TL_messageEmpty) { + continue; + } + if (currentEncryptedChat != null && obj.messageOwner.stickerVerified == 0) { + getMediaDataController().verifyAnimatedStickerMessage(obj.messageOwner); + } + addToPolls(obj, null); + if (isSecretChat()) { + checkSecretMessageForLocation(obj); + } + if (mediaController.isPlayingMessage(obj)) { + MessageObject player = mediaController.getPlayingMessageObject(); + obj.audioProgress = player.audioProgress; + obj.audioProgressSec = player.audioProgressSec; + obj.audioPlayerDuration = player.audioPlayerDuration; + } + if (loadIndex == 0 && ChatObject.isChannel(currentChat) && messageId == 1) { + endReached[loadIndex] = true; + cacheEndReached[loadIndex] = true; + } + if (messageId > 0) { + maxMessageId[loadIndex] = Math.min(messageId, maxMessageId[loadIndex]); + minMessageId[loadIndex] = Math.max(messageId, minMessageId[loadIndex]); + } else if (currentEncryptedChat != null) { + maxMessageId[loadIndex] = Math.max(messageId, maxMessageId[loadIndex]); + minMessageId[loadIndex] = Math.min(messageId, minMessageId[loadIndex]); + } + if (obj.messageOwner.date != 0) { + maxDate[loadIndex] = Math.max(maxDate[loadIndex], obj.messageOwner.date); + if (minDate[loadIndex] == 0 || obj.messageOwner.date < minDate[loadIndex]) { + minDate[loadIndex] = obj.messageOwner.date; + } + } + + if (!chatWasReset && messageId != 0 && messageId == last_message_id) { + forwardEndReached[loadIndex] = true; + } + + TLRPC.MessageAction action = obj.messageOwner.action; + if (obj.type < 0 || loadIndex == 1 && action instanceof TLRPC.TL_messageActionChatMigrateTo) { + continue; + } + + if (currentChat != null && currentChat.creator && (action instanceof TLRPC.TL_messageActionChatCreate || dropPhotoAction != null && action == dropPhotoAction)) { + continue; + } + if (obj.messageOwner.action instanceof TLRPC.TL_messageActionChannelMigrateFrom) { + continue; + } + + if (needAnimateToMessage != null && needAnimateToMessage.getId() == messageId && messageId < 0 && chatMode != MODE_SCHEDULED) { + obj = needAnimateToMessage; + animatingMessageObjects.add(obj); + needAnimateToMessage = null; + } + + messagesDict[loadIndex].put(messageId, obj); + ArrayList dayArray = messagesByDays.get(obj.dateKey); + + if (dayArray == null) { + dayArray = new ArrayList<>(); + messagesByDays.put(obj.dateKey, dayArray); + TLRPC.Message dateMsg = new TLRPC.TL_message(); + if (chatMode == MODE_SCHEDULED) { + if (obj.messageOwner.date == 0x7ffffffe) { + dateMsg.message = LocaleController.getString("MessageScheduledUntilOnline", R.string.MessageScheduledUntilOnline); } else { - groupedMessages.messages.add(0, obj); + dateMsg.message = LocaleController.formatString("MessageScheduledOn", R.string.MessageScheduledOn, LocaleController.formatDateChat(obj.messageOwner.date, true)); } - } else if (obj.getGroupIdForUse() != 0) { - obj.messageOwner.grouped_id = 0; - obj.localSentGroupId = 0; + } else { + dateMsg.message = LocaleController.formatDateChat(obj.messageOwner.date); } - - newRowsCount++; - dayArray.add(obj); - obj.stableId = lastStableId++; + dateMsg.id = 0; + Calendar calendar = Calendar.getInstance(); + calendar.setTimeInMillis(((long) obj.messageOwner.date) * 1000); + calendar.set(Calendar.HOUR_OF_DAY, 0); + calendar.set(Calendar.MINUTE, 0); + dateMsg.date = (int) (calendar.getTimeInMillis() / 1000); + MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); + dateObj.type = 10; + dateObj.contentType = 1; + dateObj.isDateObject = true; + dateObj.stableId = lastStableId++; if (load_type == 1) { - messages.add(0, obj); + messages.add(0, dateObj); } else { + messages.add(dateObj); + } + newRowsCount++; + } else { + if (!moveCurrentDateObject && !messages.isEmpty() && messages.get(messages.size() - 1).isDateObject) { messages.get(messages.size() - 1).stableId = lastStableId++; - messages.add(messages.size() - 1, obj); + moveCurrentDateObject = true; + } + } + + if (obj.hasValidGroupId()) { + MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(obj.getGroupIdForUse()); + if (groupedMessages != null) { + if (messages.size() > 1) { + MessageObject previous; + if (load_type == 1) { + previous = messages.get(0); + } else { + previous = messages.get(messages.size() - 2); + } + if (previous.getGroupIdForUse() == obj.getGroupIdForUse()) { + if (previous.localGroupId != 0) { + obj.localGroupId = previous.localGroupId; + groupedMessages = groupedMessagesMap.get(previous.localGroupId); + } + } else if (previous.getGroupIdForUse() != obj.getGroupIdForUse()) { + obj.localGroupId = Utilities.random.nextLong(); + groupedMessages = null; + } + } + } + if (groupedMessages == null) { + groupedMessages = new MessageObject.GroupedMessages(); + groupedMessages.groupId = obj.getGroupId(); + groupedMessagesMap.put(groupedMessages.groupId, groupedMessages); + } else if (newGroups == null || newGroups.indexOfKey(obj.getGroupId()) < 0) { + if (changedGroups == null) { + changedGroups = new LongSparseArray<>(); + } + changedGroups.put(obj.getGroupId(), groupedMessages); + } + if (newGroups == null) { + newGroups = new LongSparseArray<>(); + } + newGroups.put(groupedMessages.groupId, groupedMessages); + if (load_type == 1) { + groupedMessages.messages.add(obj); + } else { + groupedMessages.messages.add(0, obj); + } + } else if (obj.getGroupIdForUse() != 0) { + obj.messageOwner.grouped_id = 0; + obj.localSentGroupId = 0; + } + + newRowsCount++; + dayArray.add(obj); + obj.stableId = lastStableId++; + if (load_type == 1) { + messages.add(0, obj); + } else { + messages.get(messages.size() - 1).stableId = lastStableId++; + messages.add(messages.size() - 1, obj); + } + + MessageObject prevObj; + if (currentEncryptedChat == null) { + if (createUnreadMessageAfterId != 0 && load_type != 1 && a + 1 < messArr.size()) { + prevObj = messArr.get(a + 1); + if (obj.isOut() && !obj.messageOwner.from_scheduled || prevObj.getId() >= createUnreadMessageAfterId) { + prevObj = null; + } + } else { + prevObj = null; + } + } else { + if (createUnreadMessageAfterId != 0 && load_type != 1 && a - 1 >= 0) { + prevObj = messArr.get(a - 1); + if (obj.isOut() && !obj.messageOwner.from_scheduled || prevObj.getId() >= createUnreadMessageAfterId) { + prevObj = null; + } + } else { + prevObj = null; + } + } + if (load_type == 2 && messageId != 0 && messageId == first_unread_id) { + if ((approximateHeightSum > AndroidUtilities.displaySize.y / 2 || isThreadChat()) || !forwardEndReached[0]) { + if (!isThreadChat() || threadMaxInboxReadId != 0) { + TLRPC.Message dateMsg = new TLRPC.TL_message(); + dateMsg.message = ""; + dateMsg.id = 0; + MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); + dateObj.type = 6; + dateObj.contentType = 2; + dateObj.stableId = lastStableId++; + messages.add(messages.size() - 1, dateObj); + unreadMessageObject = dateObj; + scrollToMessage = unreadMessageObject; + } else { + scrollToMessage = obj; + } + scrollToMessagePosition = -10000; + scrolledToUnread = true; + newRowsCount++; + } + } else if ((load_type == 3 || load_type == 4) && (startLoadFromMessageId < 0 && messageId == startLoadFromMessageId || startLoadFromMessageId > 0 && messageId > 0 && messageId <= startLoadFromMessageId)) { + removeSelectedMessageHighlight(); + if (needSelectFromMessageId && messageId == startLoadFromMessageId) { + highlightMessageId = messageId; + } + if (showScrollToMessageError && messageId != startLoadFromMessageId) { + BulletinFactory.of(this).createErrorBulletin(LocaleController.getString("MessageNotFound", R.string.MessageNotFound)).show(); + } + scrollToMessage = obj; + if (postponedScroll) { + postponedScrollMessageId = scrollToMessage.getId(); + } + startLoadFromMessageId = 0; + if (scrollToMessagePosition == -10000) { + scrollToMessagePosition = -9000; + } + } + if (load_type != 2 && unreadMessageObject == null && createUnreadMessageAfterId != 0 && + (currentEncryptedChat == null && (!obj.isOut() || obj.messageOwner.from_scheduled) && messageId >= createUnreadMessageAfterId || currentEncryptedChat != null && (!obj.isOut() || obj.messageOwner.from_scheduled) && messageId <= createUnreadMessageAfterId) && + (load_type == 1 || prevObj != null || prevObj == null && createUnreadLoading && a == messArr.size() - 1)) { + TLRPC.Message dateMsg = new TLRPC.TL_message(); + dateMsg.message = ""; + dateMsg.id = 0; + MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); + dateObj.type = 6; + dateObj.contentType = 2; + dateObj.stableId = lastStableId++; + if (load_type == 1) { + messages.add(1, dateObj); + } else { + messages.add(messages.size() - 1, dateObj); + } + unreadMessageObject = dateObj; + if (load_type == 3) { + scrollToMessage = unreadMessageObject; + startLoadFromMessageId = 0; + scrollToMessagePosition = -9000; + } + newRowsCount++; + } + } + if (createUnreadLoading) { + createUnreadMessageAfterId = 0; + } + if (load_type == 0 && newRowsCount == 0) { + loadsCount--; + } + + if (forwardEndReached[loadIndex] && loadIndex != 1) { + first_unread_id = 0; + last_message_id = 0; + createUnreadMessageAfterId = 0; + } + + if (load_type == 1) { + if (!chatWasReset && messArr.size() != count && (!isCache || currentEncryptedChat != null || forwardEndReached[loadIndex])) { + forwardEndReached[loadIndex] = true; + if (loadIndex != 1) { + first_unread_id = 0; + last_message_id = 0; + createUnreadMessageAfterId = 0; + chatAdapter.notifyItemRemoved(chatAdapter.loadingDownRow); + } + startLoadFromMessageId = 0; + } + if (newRowsCount > 0) { + int firstVisPos = chatLayoutManager.findFirstVisibleItemPosition(); + int lastVisPos = chatLayoutManager.findLastVisibleItemPosition(); + int top = 0; + MessageObject scrollToMessageObject = null; + if (firstVisPos != RecyclerView.NO_POSITION) { + for (int i = firstVisPos; i <= lastVisPos; i++) { + View v = chatLayoutManager.findViewByPosition(i); + if (v instanceof ChatMessageCell) { + scrollToMessageObject = ((ChatMessageCell) v).getMessageObject(); + top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); + break; + } else if (v instanceof ChatActionCell) { + scrollToMessageObject = ((ChatActionCell) v).getMessageObject(); + top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); + break; + } + } } - MessageObject prevObj; - if (currentEncryptedChat == null) { - if (createUnreadMessageAfterId != 0 && load_type != 1 && a + 1 < messArr.size()) { - prevObj = messArr.get(a + 1); - if (obj.isOut() && !obj.messageOwner.from_scheduled || prevObj.getId() >= createUnreadMessageAfterId) { - prevObj = null; + if (!postponedScroll) { + chatAdapter.notifyItemRangeInserted(1, newRowsCount); + if (scrollToMessageObject != null) { + int scrollToIndex = messages.indexOf(scrollToMessageObject); + if (scrollToIndex > 0) { + chatLayoutManager.scrollToPositionWithOffset(chatAdapter.messagesStartRow + scrollToIndex, top); } - } else { - prevObj = null; - } - } else { - if (createUnreadMessageAfterId != 0 && load_type != 1 && a - 1 >= 0) { - prevObj = messArr.get(a - 1); - if (obj.isOut() && !obj.messageOwner.from_scheduled || prevObj.getId() >= createUnreadMessageAfterId) { - prevObj = null; - } - } else { - prevObj = null; } } - if (load_type == 2 && messageId != 0 && messageId == first_unread_id) { - if ((approximateHeightSum > AndroidUtilities.displaySize.y / 2 || isThreadChat()) || !forwardEndReached[0]) { - if (!isThreadChat() || threadMaxInboxReadId != 0) { - TLRPC.Message dateMsg = new TLRPC.TL_message(); - dateMsg.message = ""; - dateMsg.id = 0; - MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); - dateObj.type = 6; - dateObj.contentType = 2; - dateObj.stableId = lastStableId++; - messages.add(messages.size() - 1, dateObj); - unreadMessageObject = dateObj; - scrollToMessage = unreadMessageObject; + } + loadingForward = false; + } else { + if (messArr.size() < count && load_type != 3 && load_type != 4) { + if (isCache) { + if (currentEncryptedChat != null || loadIndex == 1 && mergeDialogId != 0 && isEnd) { + endReached[loadIndex] = true; + } + if (load_type != 2) { + cacheEndReached[loadIndex] = true; + } + } else if (load_type != 2 || messArr.size() == 0 && messages.isEmpty()) { + endReached[loadIndex] = true; + } + } + loading = false; + + if (chatListView != null && chatScrollHelper != null) { + if (first || scrollToTopOnResume || forceScrollToTop) { + forceScrollToTop = false; + if (!postponedScroll) { + chatAdapter.notifyDataSetChanged(true); + } + if (scrollToMessage != null) { + int yOffset; + boolean bottom = true; + if (startLoadFromMessageOffset != Integer.MAX_VALUE) { + yOffset = -startLoadFromMessageOffset - chatListView.getPaddingBottom(); + startLoadFromMessageOffset = Integer.MAX_VALUE; + } else if (scrollToMessagePosition == -9000) { + yOffset = getScrollOffsetForMessage(scrollToMessage); + bottom = false; + } else if (scrollToMessagePosition == -10000) { + yOffset = -AndroidUtilities.dp(11); + if (scrolledToUnread && threadMessageId != 0) { + yOffset += AndroidUtilities.dp(48); + } + bottom = false; } else { - scrollToMessage = obj; + yOffset = scrollToMessagePosition; + } + if (!postponedScroll) { + if (!messages.isEmpty()) { + if (chatAdapter.loadingUpRow >= 0 && !messages.isEmpty() && (messages.get(messages.size() - 1) == scrollToMessage || messages.get(messages.size() - 2) == scrollToMessage)) { + chatLayoutManager.scrollToPositionWithOffset(chatAdapter.loadingUpRow, yOffset, bottom); + } else { + chatLayoutManager.scrollToPositionWithOffset(chatAdapter.messagesStartRow + messages.indexOf(scrollToMessage), yOffset, bottom); + } + } + } + chatListView.invalidate(); + if (scrollToMessagePosition == -10000 || scrollToMessagePosition == -9000) { + canShowPagedownButton = true; + updatePagedownButtonVisibility(true); + if (unread_to_load != 0) { + if (pagedownButtonCounter != null) { + if (prevSetUnreadCount != newUnreadMessageCount) { + pagedownButtonCounter.setCount(newUnreadMessageCount = unread_to_load, openAnimationEnded); + prevSetUnreadCount = newUnreadMessageCount; + } + } + } } scrollToMessagePosition = -10000; - scrolledToUnread = true; - newRowsCount++; - } - } else if ((load_type == 3 || load_type == 4) && (startLoadFromMessageId < 0 && messageId == startLoadFromMessageId || startLoadFromMessageId > 0 && messageId > 0 && messageId <= startLoadFromMessageId)) { - removeSelectedMessageHighlight(); - if (needSelectFromMessageId && messageId == startLoadFromMessageId) { - highlightMessageId = messageId; - } - if (showScrollToMessageError && messageId != startLoadFromMessageId) { - BulletinFactory.of(this).createErrorBulletin(LocaleController.getString("MessageNotFound", R.string.MessageNotFound)).show(); - } - scrollToMessage = obj; - if (postponedScroll) { - postponedScrollMessageId = scrollToMessage.getId(); - } - startLoadFromMessageId = 0; - if (scrollToMessagePosition == -10000) { - scrollToMessagePosition = -9000; - } - } - if (load_type != 2 && unreadMessageObject == null && createUnreadMessageAfterId != 0 && - (currentEncryptedChat == null && (!obj.isOut() || obj.messageOwner.from_scheduled) && messageId >= createUnreadMessageAfterId || currentEncryptedChat != null && (!obj.isOut() || obj.messageOwner.from_scheduled) && messageId <= createUnreadMessageAfterId) && - (load_type == 1 || prevObj != null || prevObj == null && createUnreadLoading && a == messArr.size() - 1)) { - TLRPC.Message dateMsg = new TLRPC.TL_message(); - dateMsg.message = ""; - dateMsg.id = 0; - MessageObject dateObj = new MessageObject(currentAccount, dateMsg, false, false); - dateObj.type = 6; - dateObj.contentType = 2; - dateObj.stableId = lastStableId++; - if (load_type == 1) { - messages.add(1, dateObj); + scrollToMessage = null; } else { - messages.add(messages.size() - 1, dateObj); + moveScrollToLastMessage(); } - unreadMessageObject = dateObj; - if (load_type == 3) { - scrollToMessage = unreadMessageObject; - startLoadFromMessageId = 0; - scrollToMessagePosition = -9000; - } - newRowsCount++; - } - } - if (createUnreadLoading) { - createUnreadMessageAfterId = 0; - } - if (load_type == 0 && newRowsCount == 0) { - loadsCount--; - } - - if (forwardEndReached[loadIndex] && loadIndex != 1) { - first_unread_id = 0; - last_message_id = 0; - createUnreadMessageAfterId = 0; - } - - if (load_type == 1) { - if (messArr.size() != count && (!isCache || currentEncryptedChat != null || forwardEndReached[loadIndex])) { - forwardEndReached[loadIndex] = true; - if (loadIndex != 1) { - first_unread_id = 0; - last_message_id = 0; - createUnreadMessageAfterId = 0; - chatAdapter.notifyItemRemoved(chatAdapter.loadingDownRow); - } - startLoadFromMessageId = 0; - } - if (newRowsCount > 0) { - int firstVisPos = chatLayoutManager.findFirstVisibleItemPosition(); - int lastVisPos = chatLayoutManager.findLastVisibleItemPosition(); - int top = 0; - MessageObject scrollToMessageObject = null; - if (firstVisPos != RecyclerView.NO_POSITION) { - for (int i = firstVisPos; i <= lastVisPos; i++) { - View v = chatLayoutManager.findViewByPosition(i); - if (v instanceof ChatMessageCell) { - scrollToMessageObject = ((ChatMessageCell) v).getMessageObject(); - top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); - break; - } else if (v instanceof ChatActionCell) { - scrollToMessageObject = ((ChatActionCell) v).getMessageObject(); - top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); - break; - } + if (loaded_mentions_count != 0) { + showMentionDownButton(true, true); + if (mentiondownButtonCounter != null) { + mentiondownButtonCounter.setVisibility(View.VISIBLE); + mentiondownButtonCounter.setText(String.format("%d", newMentionsCount = loaded_mentions_count)); } } - - if (!postponedScroll) { - chatAdapter.notifyItemRangeInserted(1, newRowsCount); - if (scrollToMessageObject != null) { + } else { + if (newRowsCount != 0) { + int firstVisPos = chatLayoutManager.findFirstVisibleItemPosition(); + int lastVisPos = chatLayoutManager.findLastVisibleItemPosition(); + int top = 0; + MessageObject scrollToMessageObject = null; + if (firstVisPos != RecyclerView.NO_POSITION) { + for (int i = firstVisPos; i <= lastVisPos; i++) { + View v = chatLayoutManager.findViewByPosition(i); + if (v instanceof ChatMessageCell) { + scrollToMessageObject = ((ChatMessageCell) v).getMessageObject(); + top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); + break; + } else if (v instanceof ChatActionCell) { + scrollToMessageObject = ((ChatActionCell) v).getMessageObject(); + top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); + break; + } + } + } + int insertStart = chatAdapter.messagesEndRow; + int loadingUpRow = chatAdapter.loadingUpRow; + chatAdapter.updateRowsInternal(); + if (loadingUpRow >= 0 && chatAdapter.loadingUpRow < 0) { + chatAdapter.notifyItemRemoved(loadingUpRow); + } + if (newRowsCount > 0) { + if (moveCurrentDateObject) { + chatAdapter.notifyItemRemoved(insertStart - 1); + chatAdapter.notifyItemRangeInserted(insertStart - 1, newRowsCount + 1); + } else { + chatAdapter.notifyItemChanged(insertStart - 1); + chatAdapter.notifyItemRangeInserted(insertStart, newRowsCount); + } + } + if (!postponedScroll && scrollToMessageObject != null) { int scrollToIndex = messages.indexOf(scrollToMessageObject); if (scrollToIndex > 0) { chatLayoutManager.scrollToPositionWithOffset(chatAdapter.messagesStartRow + scrollToIndex, top); } } - } - } - loadingForward = false; - } else { - if (messArr.size() < count && load_type != 3 && load_type != 4) { - if (isCache) { - if (currentEncryptedChat != null || loadIndex == 1 && mergeDialogId != 0 && isEnd) { - endReached[loadIndex] = true; - } - if (load_type != 2) { - cacheEndReached[loadIndex] = true; - } - } else if (load_type != 2 || messArr.size() == 0 && messages.isEmpty()) { - endReached[loadIndex] = true; - } - } - loading = false; - - if (chatListView != null && chatScrollHelper != null) { - if (first || scrollToTopOnResume || forceScrollToTop) { - forceScrollToTop = false; - if (!postponedScroll) { - chatAdapter.notifyDataSetChanged(true); - } - if (scrollToMessage != null) { - int yOffset; - boolean bottom = true; - if (startLoadFromMessageOffset != Integer.MAX_VALUE) { - yOffset = -startLoadFromMessageOffset - chatListView.getPaddingBottom(); - startLoadFromMessageOffset = Integer.MAX_VALUE; - } else if (scrollToMessagePosition == -9000) { - yOffset = getScrollOffsetForMessage(scrollToMessage); - bottom = false; - } else if (scrollToMessagePosition == -10000) { - yOffset = -AndroidUtilities.dp(11); - if (scrolledToUnread && threadMessageId != 0) { - yOffset += AndroidUtilities.dp(48); - } - bottom = false; - } else { - yOffset = scrollToMessagePosition; - } - if (!postponedScroll) { - if (!messages.isEmpty()) { - if (chatAdapter.loadingUpRow >= 0 && !messages.isEmpty() && (messages.get(messages.size() - 1) == scrollToMessage || messages.get(messages.size() - 2) == scrollToMessage)) { - chatLayoutManager.scrollToPositionWithOffset(chatAdapter.loadingUpRow, yOffset, bottom); - } else { - chatLayoutManager.scrollToPositionWithOffset(chatAdapter.messagesStartRow + messages.indexOf(scrollToMessage), yOffset, bottom); - } - } - } - chatListView.invalidate(); - if (scrollToMessagePosition == -10000 || scrollToMessagePosition == -9000) { - canShowPagedownButton = true; - updatePagedownButtonVisibility(true); - if (unread_to_load != 0) { - if (pagedownButtonCounter != null) { - if (prevSetUnreadCount != newUnreadMessageCount) { - pagedownButtonCounter.setCount(newUnreadMessageCount = unread_to_load, openAnimationEnded); - prevSetUnreadCount = newUnreadMessageCount; - } - } - } - } - scrollToMessagePosition = -10000; - scrollToMessage = null; - } else { - moveScrollToLastMessage(); - } - if (loaded_mentions_count != 0) { - showMentionDownButton(true, true); - if (mentiondownButtonCounter != null) { - mentiondownButtonCounter.setVisibility(View.VISIBLE); - mentiondownButtonCounter.setText(String.format("%d", newMentionsCount = loaded_mentions_count)); - } - } + } else if (chatAdapter.loadingUpRow >= 0 && endReached[loadIndex] && (loadIndex == 0 && mergeDialogId == 0 || loadIndex == 1)) { + chatAdapter.notifyItemRemoved(chatAdapter.loadingUpRow); } else { - if (newRowsCount != 0) { - int firstVisPos = chatLayoutManager.findFirstVisibleItemPosition(); - int lastVisPos = chatLayoutManager.findLastVisibleItemPosition(); - int top = 0; - MessageObject scrollToMessageObject = null; - if (firstVisPos != RecyclerView.NO_POSITION) { - for (int i = firstVisPos; i <= lastVisPos; i++) { - View v = chatLayoutManager.findViewByPosition(i); - if (v instanceof ChatMessageCell) { - scrollToMessageObject = ((ChatMessageCell) v).getMessageObject(); - top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); - break; - } else if (v instanceof ChatActionCell) { - scrollToMessageObject = ((ChatActionCell) v).getMessageObject(); - top = chatListView.getMeasuredHeight() - v.getBottom() - chatListView.getPaddingBottom(); - break; - } - } - } - int insertStart = chatAdapter.messagesEndRow; - int loadingUpRow = chatAdapter.loadingUpRow; - chatAdapter.updateRowsInternal(); - if (loadingUpRow >= 0 && chatAdapter.loadingUpRow < 0) { - chatAdapter.notifyItemRemoved(loadingUpRow); - } - if (newRowsCount > 0) { - if (moveCurrentDateObject) { - chatAdapter.notifyItemRemoved(insertStart - 1); - chatAdapter.notifyItemRangeInserted(insertStart - 1, newRowsCount + 1); - } else { - chatAdapter.notifyItemChanged(insertStart - 1); - chatAdapter.notifyItemRangeInserted(insertStart, newRowsCount); - } - } - if (!postponedScroll && scrollToMessageObject != null) { - int scrollToIndex = messages.indexOf(scrollToMessageObject); - if (scrollToIndex > 0) { - chatLayoutManager.scrollToPositionWithOffset(chatAdapter.messagesStartRow + scrollToIndex, top); - } - } - } else if (chatAdapter.loadingUpRow >= 0 && endReached[loadIndex] && (loadIndex == 0 && mergeDialogId == 0 || loadIndex == 1)) { - chatAdapter.notifyItemRemoved(chatAdapter.loadingUpRow); - } else { - chatAdapter.notifyDataSetChanged(true); - } + chatAdapter.notifyDataSetChanged(true); } + } - if (paused) { - scrollToTopOnResume = true; - if (scrollToMessage != null) { - scrollToTopUnReadOnResume = true; - } - } - - if (first) { - if (chatListView != null) { - if (!fragmentBeginToShow) { - chatListView.setAnimateEmptyView(false, 1); - chatListView.setEmptyView(emptyViewContainer); - chatListView.setAnimateEmptyView(true, 1); - } else { - chatListView.setEmptyView(emptyViewContainer); - } - } - } - } else { + if (paused) { scrollToTopOnResume = true; if (scrollToMessage != null) { scrollToTopUnReadOnResume = true; } } - } - if (newGroups != null) { - for (int a = 0; a < newGroups.size(); a++) { - MessageObject.GroupedMessages groupedMessages = newGroups.valueAt(a); - groupedMessages.calculate(); - if (chatAdapter != null && changedGroups != null && changedGroups.indexOfKey(newGroups.keyAt(a)) >= 0) { - MessageObject messageObject = groupedMessages.messages.get(groupedMessages.messages.size() - 1); - int idx = messages.indexOf(messageObject); - if (idx >= 0) { - if (chatListItemAnimator != null) { - chatListItemAnimator.groupWillChanged(groupedMessages); - } - chatAdapter.notifyItemRangeChanged(idx + chatAdapter.messagesStartRow, groupedMessages.messages.size()); + + if (first) { + if (chatListView != null) { + if (!fragmentBeginToShow) { + chatListView.setAnimateEmptyView(false, 1); + chatListView.setEmptyView(emptyViewContainer); + chatListView.setAnimateEmptyView(true, 1); + } else { + chatListView.setEmptyView(emptyViewContainer); } } } - } - - if (first && messages.size() > 0) { - first = false; - if (isThreadChat()) { - invalidateMessagesVisiblePart(); - } - } - if (messages.isEmpty() && currentEncryptedChat == null && currentUser != null && currentUser.bot && botUser == null) { - botUser = ""; - updateBottomOverlay(); - } - - if (newRowsCount == 0 && (mergeDialogId != 0 && loadIndex == 0 || currentEncryptedChat != null && !endReached[0])) { - first = true; - if (chatListView != null) { - chatListView.setEmptyView(null); - } - if (emptyViewContainer != null) { - emptyViewContainer.setVisibility(View.INVISIBLE); - } } else { - showProgressView(false); - } - if (newRowsCount == 0 && mergeDialogId != 0 && loadIndex == 0) { - getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, - NotificationCenter.closeChats, NotificationCenter.messagesDidLoad, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}); - } - if (showDateAfter) { - showFloatingDateView(false); - } - checkScrollForLoad(false); - - if (postponedScroll) { - chatAdapter.notifyDataSetChanged(); - if (progressDialog != null) { - progressDialog.dismiss(); + scrollToTopOnResume = true; + if (scrollToMessage != null) { + scrollToTopUnReadOnResume = true; } - updatePinnedListButton(false); - if (postponedScrollMessageId == 0) { - chatScrollHelperCallback.scrollTo = null; - chatScrollHelperCallback.lastBottom = true; - chatScrollHelperCallback.lastItemOffset = 0; - chatScrollHelperCallback.lastPadding = (int) chatListViewPaddingTop; - chatScrollHelper.scrollToPosition(0, 0, true, true); - } else { - MessageObject object = messagesDict[loadIndex].get(postponedScrollMessageId); - if (object != null) { - MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(object.getGroupId()); - if (object.getGroupId() != 0 && groupedMessages != null) { - MessageObject primary = groupedMessages.findPrimaryMessageObject(); - if (primary != null) { - object = primary; - } - } - } - - if (object != null) { - int k = messages.indexOf(object); - if (k >= 0) { - int fromPosition = chatLayoutManager.findFirstVisibleItemPosition(); - highlightMessageId = object.getId(); - int direction; - if (postponedScrollMinMessageId != 0) { - if (highlightMessageId < 0 && postponedScrollMinMessageId < 0) { - direction = highlightMessageId < postponedScrollMinMessageId ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - } else { - direction = highlightMessageId > postponedScrollMinMessageId ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - } - } else { - direction = fromPosition > k ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; - } - chatScrollHelper.setScrollDirection(direction); - - if (!needSelectFromMessageId) { - removeSelectedMessageHighlight(); - } - - int yOffset = getScrollOffsetForMessage(object); - chatScrollHelperCallback.scrollTo = object; - chatScrollHelperCallback.lastBottom = false; - chatScrollHelperCallback.lastItemOffset = yOffset; - chatScrollHelperCallback.lastPadding = (int) chatListViewPaddingTop; - chatScrollHelper.scrollToPosition(chatAdapter.messagesStartRow + k, yOffset, false, true); + } + } + if (newGroups != null) { + for (int a = 0; a < newGroups.size(); a++) { + MessageObject.GroupedMessages groupedMessages = newGroups.valueAt(a); + groupedMessages.calculate(); + if (chatAdapter != null && changedGroups != null && changedGroups.indexOfKey(newGroups.keyAt(a)) >= 0) { + MessageObject messageObject = groupedMessages.messages.get(groupedMessages.messages.size() - 1); + int idx = messages.indexOf(messageObject); + if (idx >= 0) { + if (chatListItemAnimator != null) { + chatListItemAnimator.groupWillChanged(groupedMessages); } + chatAdapter.notifyItemRangeChanged(idx + chatAdapter.messagesStartRow, groupedMessages.messages.size()); } } } } - } else if (id == NotificationCenter.emojiDidLoad) { + + if (first && messages.size() > 0) { + first = false; + if (isThreadChat()) { + invalidateMessagesVisiblePart(); + } + } + if (messages.isEmpty() && currentEncryptedChat == null && currentUser != null && currentUser.bot && botUser == null) { + botUser = ""; + updateBottomOverlay(); + } + + if (newRowsCount == 0 && (mergeDialogId != 0 && loadIndex == 0 || currentEncryptedChat != null && !endReached[0])) { + first = true; + if (chatListView != null) { + chatListView.setEmptyView(null); + } + if (emptyViewContainer != null) { + emptyViewContainer.setVisibility(View.INVISIBLE); + } + } else { + showProgressView(false); + } + if (newRowsCount == 0 && mergeDialogId != 0 && loadIndex == 0) { + getNotificationCenter().updateAllowedNotifications(transitionAnimationIndex, new int[]{NotificationCenter.chatInfoDidLoad, NotificationCenter.groupCallUpdated, NotificationCenter.dialogsNeedReload, NotificationCenter.scheduledMessagesUpdated, + NotificationCenter.closeChats, NotificationCenter.messagesDidLoad, NotificationCenter.botKeyboardDidLoad, NotificationCenter.userInfoDidLoad, NotificationCenter.pinnedInfoDidLoad, NotificationCenter.needDeleteDialog/*, NotificationCenter.botInfoDidLoad*/}); + } + if (showDateAfter) { + showFloatingDateView(false); + } + checkScrollForLoad(false); + + if (postponedScroll) { + chatAdapter.notifyDataSetChanged(); + if (progressDialog != null) { + progressDialog.dismiss(); + } + updatePinnedListButton(false); + if (postponedScrollMessageId == 0) { + chatScrollHelperCallback.scrollTo = null; + chatScrollHelperCallback.lastBottom = true; + chatScrollHelperCallback.lastItemOffset = 0; + chatScrollHelperCallback.lastPadding = (int) chatListViewPaddingTop; + chatScrollHelper.scrollToPosition(0, 0, true, true); + } else { + MessageObject object = messagesDict[loadIndex].get(postponedScrollMessageId); + if (object != null) { + MessageObject.GroupedMessages groupedMessages = groupedMessagesMap.get(object.getGroupId()); + if (object.getGroupId() != 0 && groupedMessages != null) { + MessageObject primary = groupedMessages.findPrimaryMessageObject(); + if (primary != null) { + object = primary; + } + } + } + + if (object != null) { + int k = messages.indexOf(object); + if (k >= 0) { + int fromPosition = chatLayoutManager.findFirstVisibleItemPosition(); + highlightMessageId = object.getId(); + int direction; + if (postponedScrollMinMessageId != 0) { + if (highlightMessageId < 0 && postponedScrollMinMessageId < 0) { + direction = highlightMessageId < postponedScrollMinMessageId ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + } else { + direction = highlightMessageId > postponedScrollMinMessageId ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + } + } else { + direction = fromPosition > k ? RecyclerAnimationScrollHelper.SCROLL_DIRECTION_DOWN : RecyclerAnimationScrollHelper.SCROLL_DIRECTION_UP; + } + chatScrollHelper.setScrollDirection(direction); + + if (!needSelectFromMessageId) { + removeSelectedMessageHighlight(); + } + + int yOffset = getScrollOffsetForMessage(object); + chatScrollHelperCallback.scrollTo = object; + chatScrollHelperCallback.lastBottom = false; + chatScrollHelperCallback.lastItemOffset = yOffset; + chatScrollHelperCallback.lastPadding = (int) chatListViewPaddingTop; + chatScrollHelper.scrollToPosition(chatAdapter.messagesStartRow + k, yOffset, false, true); + } + } + } + } + chatWasReset = false; + } else if (id == NotificationCenter.emojiLoaded) { if (chatListView != null) { chatListView.invalidateViews(); } @@ -13607,7 +13689,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (!isThreadChat()) { hasBotsCommands = true; } - getMediaDataController().loadBotInfo(user.id, true, classGuid); + getMediaDataController().loadBotInfo(user.id, -chatInfo.id, true, classGuid); } } if (chatListView != null) { @@ -13629,12 +13711,17 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (chatListView != null) { chatListView.invalidateViews(); } - if (mentionsAdapter != null && (!ChatObject.isChannel(currentChat) || currentChat != null && currentChat.megagroup)) { - mentionsAdapter.setBotInfo(botInfo); + if (!ChatObject.isChannel(currentChat) || currentChat != null && currentChat.megagroup) { + if (mentionsAdapter != null) { + mentionsAdapter.setBotInfo(botInfo); + } + if (chatActivityEnterView != null) { + chatActivityEnterView.setBotInfo(botInfo); + } } } if (chatActivityEnterView != null) { - chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands); + chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands, true); } if (mentionsAdapter != null) { mentionsAdapter.setBotsCount(botsCount); @@ -13697,11 +13784,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not updateSecretStatus(); initStickers(); if (chatActivityEnterView != null) { - chatActivityEnterView.setAllowStickersAndGifs(currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 23, currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46); + chatActivityEnterView.setAllowStickersAndGifs(true, true); chatActivityEnterView.checkRoundVideo(); } if (mentionsAdapter != null) { - mentionsAdapter.setNeedBotContext(!chatActivityEnterView.isEditingMessage() && (currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46)); + mentionsAdapter.setNeedBotContext(!chatActivityEnterView.isEditingMessage()); } } } else if (id == NotificationCenter.messagesReadEncrypted) { @@ -13732,7 +13819,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } MessagesController.getInstance(currentAccount).addToViewsQueue(threadMessageObject); } else { - clearHistory((Boolean) args[1]); + clearHistory((Boolean) args[1], (TLRPC.TL_updates_channelDifferenceTooLong) args[2]); } } } else if (id == NotificationCenter.screenshotTook) { @@ -14305,11 +14392,17 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatAdapter.notifyItemChanged(chatAdapter.botInfoRow); } } - if (mentionsAdapter != null && (!ChatObject.isChannel(currentChat) || currentChat != null && currentChat.megagroup)) { - mentionsAdapter.setBotInfo(botInfo); + if (!ChatObject.isChannel(currentChat) || currentChat != null && currentChat.megagroup) { + if (mentionsAdapter != null) { + mentionsAdapter.setBotInfo(botInfo); + } + + if (chatActivityEnterView != null) { + chatActivityEnterView.setBotInfo(botInfo); + } } if (chatActivityEnterView != null) { - chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands); + chatActivityEnterView.setBotsCount(botsCount, hasBotsCommands, true); } } updateBotButtons(); @@ -14561,12 +14654,15 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else if (id == NotificationCenter.didSetNewWallpapper) { if (fragmentView != null) { contentView.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); - progressView2.getBackground().setColorFilter(Theme.colorFilter); + progressView2.invalidate(); if (emptyView != null) { - emptyView.getBackground().setColorFilter(Theme.colorFilter); + emptyView.invalidate(); } if (bigEmptyView != null) { - bigEmptyView.getBackground().setColorFilter(Theme.colorFilter); + bigEmptyView.invalidate(); + } + if (floatingDateView != null) { + floatingDateView.invalidate(); } chatListView.invalidateViews(); } @@ -14808,7 +14904,47 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } - private void clearHistory(boolean overwrite) { + private void clearHistory(boolean overwrite, TLRPC.TL_updates_channelDifferenceTooLong differenceTooLong) { + if (overwrite) { + if (firstLoading) { + chatWasReset = true; + last_message_id = differenceTooLong.dialog.top_message; + createUnreadMessageAfterId = 0; + } else { + if (differenceTooLong.dialog.top_message > minMessageId[0]) { + last_message_id = Math.max(last_message_id, differenceTooLong.dialog.top_message); + createUnreadMessageAfterId = Math.max(minMessageId[0] + 1, differenceTooLong.dialog.read_inbox_max_id); + } + } + forwardEndReached[0] = false; + if (chatAdapter != null && chatAdapter.loadingDownRow < 0) { + chatAdapter.notifyItemInserted(0); + } + newUnreadMessageCount = differenceTooLong.dialog.unread_count; + newMentionsCount = differenceTooLong.dialog.unread_mentions_count; + if (prevSetUnreadCount != newUnreadMessageCount) { + if (pagedownButtonCounter != null) { + pagedownButtonCounter.setCount(newUnreadMessageCount, openAnimationEnded); + } + prevSetUnreadCount = newUnreadMessageCount; + updatePagedownButtonVisibility(true); + } + if (newMentionsCount != differenceTooLong.dialog.unread_mentions_count) { + newMentionsCount = differenceTooLong.dialog.unread_mentions_count; + if (newMentionsCount <= 0) { + newMentionsCount = 0; + hasAllMentionsLocal = true; + showMentionDownButton(false, true); + } else { + if (mentiondownButtonCounter != null) { + mentiondownButtonCounter.setText(String.format("%d", newMentionsCount)); + } + showMentionDownButton(true, true); + } + } + checkScrollForLoad(false); + return; + } messages.clear(); waitingForLoad.clear(); messagesByDays.clear(); @@ -14838,34 +14974,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not chatActivityEnterView.setButtons(null, false); } } - if (overwrite) { - if (chatAdapter != null) { - showProgressView(chatAdapter.botInfoRow < 0); - chatListView.setEmptyView(null); - } - for (int a = 0; a < 2; a++) { - endReached[a] = false; - cacheEndReached[a] = false; - forwardEndReached[a] = true; - } - first = true; - firstLoading = true; - loading = true; - startLoadFromMessageId = 0; - needSelectFromMessageId = false; - waitingForLoad.add(lastLoadIndex); - if (startLoadFromMessageIdSaved != 0) { - startLoadFromMessageId = startLoadFromMessageIdSaved; - startLoadFromMessageIdSaved = 0; - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, AndroidUtilities.isTablet() ? 30 : 20, startLoadFromMessageId, 0, true, 0, classGuid, 3, 0, ChatObject.isChannel(currentChat), chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++); - } else { - getMessagesController().loadMessages(dialog_id, mergeDialogId, false, AndroidUtilities.isTablet() ? 30 : 20, 0, 0, true, 0, classGuid, 2, 0, ChatObject.isChannel(currentChat), chatMode, threadMessageId, replyMaxReadId, lastLoadIndex++); - } - } else { - if (progressView != null) { - showProgressView(false); - chatListView.setEmptyView(emptyViewContainer); - } + + if (progressView != null) { + showProgressView(false); + chatListView.setEmptyView(emptyViewContainer); } if (chatAdapter != null) { @@ -14985,6 +15097,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not messageObject.localGroupId = localId; } if (messageObject.isOut()) { + Drawable wallpaper = Theme.getCachedWallpaperNonBlocking(); + if (wallpaper instanceof MotionBackgroundDrawable) { + ((MotionBackgroundDrawable) wallpaper).switchToNextPosition(); + } if (!notifiedSearch) { notifiedSearch = true; NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.closeSearchByActiveAction); @@ -15060,10 +15176,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not boolean reloadMegagroup = false; if (!forwardEndReached[0]) { int currentMaxDate = Integer.MIN_VALUE; - int currentMinMsgId = Integer.MIN_VALUE; - if (currentEncryptedChat != null) { - currentMinMsgId = Integer.MAX_VALUE; - } for (int a = 0; a < arr.size(); a++) { MessageObject obj = arr.get(a); @@ -15085,7 +15197,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } TLRPC.MessageAction action = obj.messageOwner.action; if (avatarContainer != null && currentEncryptedChat != null && action instanceof TLRPC.TL_messageEncryptedAction && action.encryptedAction instanceof TLRPC.TL_decryptedMessageActionSetMessageTTL) { - avatarContainer.setTime(((TLRPC.TL_decryptedMessageActionSetMessageTTL) action.encryptedAction).ttl_seconds); + avatarContainer.setTime(action.encryptedAction.ttl_seconds); } if (action instanceof TLRPC.TL_messageActionChatMigrateTo) { migrateToNewChat(obj); @@ -15093,7 +15205,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else if (currentChat != null && currentChat.megagroup && (action instanceof TLRPC.TL_messageActionChatAddUser || action instanceof TLRPC.TL_messageActionChatDeleteUser)) { reloadMegagroup = true; } - if (a == 0 && obj.messageOwner.id < 0 && (obj.type == MessageObject.TYPE_ROUND_VIDEO || obj.isVoice()) && chatMode != MODE_SCHEDULED) { + if (a == 0 && obj.shouldAnimateSending() && chatMode != MODE_SCHEDULED) { needAnimateToMessage = obj; } if (obj.isOut() && obj.wasJustSent) { @@ -15122,10 +15234,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not obj.checkLayout(); currentMaxDate = Math.max(currentMaxDate, obj.messageOwner.date); if (messageId > 0) { - currentMinMsgId = Math.max(messageId, currentMinMsgId); last_message_id = Math.max(last_message_id, messageId); } else if (currentEncryptedChat != null) { - currentMinMsgId = Math.min(messageId, currentMinMsgId); last_message_id = Math.min(last_message_id, messageId); } @@ -15201,7 +15311,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } TLRPC.MessageAction action = obj.messageOwner.action; if (avatarContainer != null && currentEncryptedChat != null && action instanceof TLRPC.TL_messageEncryptedAction && action.encryptedAction instanceof TLRPC.TL_decryptedMessageActionSetMessageTTL) { - avatarContainer.setTime(((TLRPC.TL_decryptedMessageActionSetMessageTTL) action.encryptedAction).ttl_seconds); + avatarContainer.setTime(action.encryptedAction.ttl_seconds); } if (obj.type < 0 || messagesDict[0].indexOfKey(messageId) >= 0) { continue; @@ -15222,7 +15332,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } } addToPolls(obj, null); - if (a == 0 && obj.messageOwner.id < 0 && (obj.type == MessageObject.TYPE_ROUND_VIDEO || obj.isVoice()) && chatMode != MODE_SCHEDULED) { + if (a == 0 && obj.shouldAnimateSending() && chatMode != MODE_SCHEDULED) { animatingMessageObjects.add(obj); } @@ -16407,9 +16517,10 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); chatActivityEnterView.setVisibility(View.INVISIBLE); + bottomOverlayChat.setVisibility(View.INVISIBLE); } }).start(); - bottomOverlayChat.setVisibility(View.INVISIBLE); + chatActivityEnterView.setFieldFocused(false); if (chatActivityEnterView.isTopViewVisible()) { @@ -17583,7 +17694,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } activityResumeTime = System.currentTimeMillis(); if (openImport && getSendMessagesHelper().getImportingHistory(dialog_id) != null) { - ImportingAlert alert = new ImportingAlert(getParentActivity(), this); + ImportingAlert alert = new ImportingAlert(getParentActivity(), null, this); alert.setOnHideListener(dialog -> { if (fragmentContextView != null) { fragmentContextView.checkImport(false); @@ -18342,7 +18453,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } allowEdit = captionsCount < 2; } - if (chatMode == MODE_SCHEDULED || threadMessageObjects != null && threadMessageObjects.contains(message) || currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) < 46 || + if (chatMode == MODE_SCHEDULED || threadMessageObjects != null && threadMessageObjects.contains(message) || type == 1 && message.getDialogId() == mergeDialogId || message.messageOwner.action instanceof TLRPC.TL_messageActionSecureValuesSent || currentEncryptedChat == null && message.getId() < 0 || @@ -19032,6 +19143,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not selectedMessagesCountTextView.setNumber(selectedMessagesIds[0].size() + selectedMessagesIds[1].size(), false); updateVisibleRows(); + if (chatActivityEnterView != null) { + chatActivityEnterView.hideBotCommands(); + } } private void startEditingMessageObject(MessageObject messageObject) { @@ -19275,7 +19389,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not break; } case 4: { - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); selectedObject = null; selectedObjectGroup = null; @@ -19403,7 +19517,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (path == null || path.length() == 0) { path = FileLoader.getPathToMessage(selectedObject.messageOwner).toString(); } - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); selectedObject = null; selectedObjectGroup = null; @@ -19425,7 +19539,8 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not break; } case 10: { - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + //TODO scopped storage + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); selectedObject = null; selectedObjectGroup = null; @@ -19852,7 +19967,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - getSendMessagesHelper().sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + getSendMessagesHelper().sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); } getSendMessagesHelper().sendMessage(fmessages, did, true, 0); } @@ -19950,6 +20065,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } else if (chatActivityEnterView != null && chatActivityEnterView.isPopupShowing()) { chatActivityEnterView.hidePopup(true); return false; + } else if (chatActivityEnterView != null && chatActivityEnterView.botCommandsMenuIsShowing()) { + chatActivityEnterView.hideBotCommands(); + return false; } return true; } @@ -20148,6 +20266,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (threadMessageId == 0 && searchItem != null) { searchItem.setVisibility(View.VISIBLE); } + if (searchIconItem != null && showSearchAsIcon) { + searchIconItem.setVisibility(View.GONE); + } searchItemVisible = true; updateSearchButtons(0, 0, -1); updateBottomOverlay(); @@ -20226,7 +20347,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } public boolean allowGroupPhotos() { - return !isEditingMessageMedia() && (currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 73); + return !isEditingMessageMedia(); } public TLRPC.EncryptedChat getCurrentEncryptedChat() { @@ -20261,24 +20382,24 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not afterMessageSend(); } - public void sendMedia(MediaController.PhotoEntry photoEntry, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendMedia(MediaController.PhotoEntry photoEntry, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { if (photoEntry == null) { return; } fillEditingMediaWithCaption(photoEntry.caption, photoEntry.entities); if (photoEntry.isVideo) { if (videoEditedInfo != null) { - SendMessagesHelper.prepareSendingVideo(getAccountInstance(), photoEntry.path, videoEditedInfo, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate); + SendMessagesHelper.prepareSendingVideo(getAccountInstance(), photoEntry.path, videoEditedInfo, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate, forceDocument); } else { - SendMessagesHelper.prepareSendingVideo(getAccountInstance(), photoEntry.path, null, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate); + SendMessagesHelper.prepareSendingVideo(getAccountInstance(), photoEntry.path, null, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate, forceDocument); } afterMessageSend(); } else { if (photoEntry.imagePath != null) { - SendMessagesHelper.prepareSendingPhoto(getAccountInstance(), photoEntry.imagePath, photoEntry.thumbPath, null, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate); + SendMessagesHelper.prepareSendingPhoto(getAccountInstance(), photoEntry.imagePath, photoEntry.thumbPath, null, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate, forceDocument); afterMessageSend(); } else if (photoEntry.path != null) { - SendMessagesHelper.prepareSendingPhoto(getAccountInstance(), photoEntry.path, photoEntry.thumbPath, null, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate); + SendMessagesHelper.prepareSendingPhoto(getAccountInstance(), photoEntry.path, photoEntry.thumbPath, null, dialog_id, replyingMessageObject, getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate, forceDocument); afterMessageSend(); } } @@ -21007,6 +21128,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not private boolean isBot; private int rowCount; private int botInfoRow = -5; + private int botInfoEmptyRow = -5; private int loadingUpRow = -5; private int loadingDownRow = -5; private int messagesStartRow; @@ -21072,14 +21194,27 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public int getItemCount() { + botInfoEmptyRow = -5; + if (clearingHistory) { + if (currentUser != null && currentUser.bot && chatMode == 0 && (botInfo.size() > 0 && botInfo.get(currentUser.id).description != null || UserObject.isReplyUser(currentUser))) { + botInfoEmptyRow = 0; + return 1; + } + return 0; + } return clearingHistory ? 0 : rowCount; } @Override public long getItemId(int position) { + if (clearingHistory) { + if (position == botInfoEmptyRow) { + return 1; + } + } if (position >= messagesStartRow && position < messagesEndRow) { return messages.get(position - messagesStartRow).stableId; - } else if (position == botInfoRow) { + } else if (position == botInfoRow || position == botInfoEmptyRow) { return 1; } else if (position == loadingUpRow) { return 2; @@ -21560,7 +21695,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not return; } if (message.isDice()) { - undoView.showWithAction(0, chatActivityEnterView.getVisibility() == View.VISIBLE && bottomOverlay.getVisibility() != View.VISIBLE ? UndoView.ACTION_DICE_INFO : UndoView.ACTION_DICE_NO_SEND_INFO, message.getDiceEmoji(), null, () -> getSendMessagesHelper().sendMessage(message.getDiceEmoji(), dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0)); + undoView.showWithAction(0, chatActivityEnterView.getVisibility() == View.VISIBLE && bottomOverlay.getVisibility() != View.VISIBLE ? UndoView.ACTION_DICE_INFO : UndoView.ACTION_DICE_NO_SEND_INFO, message.getDiceEmoji(), null, () -> getSendMessagesHelper().sendMessage(message.getDiceEmoji(), dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null)); } else if (message.isAnimatedEmoji()) { restartSticker(cell); } else if (message.needDrawBluredPreview()) { @@ -21824,7 +21959,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } }); } else if (viewType == 4) { - view = new ChatLoadingCell(mContext); + view = new ChatLoadingCell(mContext, contentView); } view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); @@ -21832,7 +21967,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { - if (position == botInfoRow) { + if (position == botInfoRow || position == botInfoEmptyRow) { BotHelpCell helpView = (BotHelpCell) holder.itemView; if (UserObject.isReplyUser(currentUser)) { helpView.setText(false, LocaleController.getString("RepliesChatInfo", R.string.RepliesChatInfo)); @@ -21985,94 +22120,206 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not int index; if ((index = animatingMessageObjects.indexOf(message)) != -1) { animatingMessageObjects.remove(index); - if (message.type == MessageObject.TYPE_ROUND_VIDEO) { - if (instantCameraView.getTextureView() != null) { - messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { + if (message.type == MessageObject.TYPE_ROUND_VIDEO && instantCameraView.getTextureView() != null) { + messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { - PipRoundVideoView pipRoundVideoView = PipRoundVideoView.getInstance(); - if (pipRoundVideoView != null) { - pipRoundVideoView.showTemporary(true); + PipRoundVideoView pipRoundVideoView = PipRoundVideoView.getInstance(); + if (pipRoundVideoView != null) { + pipRoundVideoView.showTemporary(true); + } + + messageCell.getViewTreeObserver().removeOnPreDrawListener(this); + ImageReceiver imageReceiver = messageCell.getPhotoImage(); + float w = imageReceiver.getImageWidth(); + org.telegram.ui.Components.Rect rect = instantCameraView.getCameraRect(); + float scale = w / rect.width; + int[] position = new int[2]; + messageCell.getTransitionParams().ignoreAlpha = true; + messageCell.setAlpha(0.0f); + messageCell.setTimeAlpha(0.0f); + messageCell.getLocationOnScreen(position); + position[0] += imageReceiver.getImageX() - messageCell.getAnimationOffsetX(); + position[1] += imageReceiver.getImageY() - messageCell.getTranslationY(); + final InstantCameraView.InstantViewCameraContainer cameraContainer = instantCameraView.getCameraContainer(); + cameraContainer.setPivotX(0.0f); + cameraContainer.setPivotY(0.0f); + AnimatorSet animatorSet = new AnimatorSet(); + + cameraContainer.setImageReceiver(imageReceiver); + + instantCameraView.cancelBlur(); + + AnimatorSet allAnimators = new AnimatorSet(); + animatorSet.playTogether( + ObjectAnimator.ofFloat(cameraContainer, View.SCALE_X, scale), + ObjectAnimator.ofFloat(cameraContainer, View.SCALE_Y, scale), + ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_Y, position[1] - rect.y), + ObjectAnimator.ofFloat(instantCameraView.getSwitchButtonView(), View.ALPHA, 0.0f), + ObjectAnimator.ofInt(instantCameraView.getPaint(), AnimationProperties.PAINT_ALPHA, 0), + ObjectAnimator.ofFloat(instantCameraView.getMuteImageView(), View.ALPHA, 0.0f) + ); + animatorSet.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + ObjectAnimator o = ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_X, position[0] - rect.x); + o.setInterpolator(CubicBezierInterpolator.DEFAULT); + + allAnimators.playTogether(o, animatorSet); + allAnimators.setStartDelay(120); + allAnimators.setDuration(180); + + allAnimators.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + messageCell.setAlpha(1.0f); + messageCell.getTransitionParams().ignoreAlpha = false; + Property ALPHA = new AnimationProperties.FloatProperty("alpha") { + @Override + public void setValue(ChatMessageCell object, float value) { + object.setTimeAlpha(value); + } + + @Override + public Float get(ChatMessageCell object) { + return object.getTimeAlpha(); + } + }; + + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.playTogether( + ObjectAnimator.ofFloat(cameraContainer, View.ALPHA, 0.0f), + ObjectAnimator.ofFloat(messageCell, ALPHA, 1.0f) + ); + animatorSet.setDuration(100); + animatorSet.setInterpolator(new DecelerateInterpolator()); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + instantCameraView.hideCamera(true); + instantCameraView.setVisibility(View.INVISIBLE); + } + }); + animatorSet.start(); } - - messageCell.getViewTreeObserver().removeOnPreDrawListener(this); - ImageReceiver imageReceiver = messageCell.getPhotoImage(); - float w = imageReceiver.getImageWidth(); - org.telegram.ui.Components.Rect rect = instantCameraView.getCameraRect(); - float scale = w / rect.width; - int[] position = new int[2]; - messageCell.getTransitionParams().ignoreAlpha = true; - messageCell.setAlpha(0.0f); - messageCell.setTimeAlpha(0.0f); - messageCell.getLocationOnScreen(position); - position[0] += imageReceiver.getImageX() - messageCell.getAnimationOffsetX(); - position[1] += imageReceiver.getImageY() - messageCell.getTranslationY(); - final InstantCameraView.InstantViewCameraContainer cameraContainer = instantCameraView.getCameraContainer(); - cameraContainer.setPivotX(0.0f); - cameraContainer.setPivotY(0.0f); - AnimatorSet animatorSet = new AnimatorSet(); - - cameraContainer.setImageReceiver(imageReceiver); - - instantCameraView.cancelBlur(); - - AnimatorSet allAnimators = new AnimatorSet(); - animatorSet.playTogether( - ObjectAnimator.ofFloat(cameraContainer, View.SCALE_X, scale), - ObjectAnimator.ofFloat(cameraContainer, View.SCALE_Y, scale), - ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_Y, position[1] - rect.y), - ObjectAnimator.ofFloat(instantCameraView.getSwitchButtonView(), View.ALPHA, 0.0f), - ObjectAnimator.ofInt(instantCameraView.getPaint(), AnimationProperties.PAINT_ALPHA, 0), - ObjectAnimator.ofFloat(instantCameraView.getMuteImageView(), View.ALPHA, 0.0f) - ); - animatorSet.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); - ObjectAnimator o = ObjectAnimator.ofFloat(cameraContainer, View.TRANSLATION_X, position[0] - rect.x); - o.setInterpolator(CubicBezierInterpolator.DEFAULT); - - allAnimators.playTogether(o, animatorSet); - allAnimators.setStartDelay(120); - allAnimators.setDuration(180); - - allAnimators.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - messageCell.setAlpha(1.0f); - messageCell.getTransitionParams().ignoreAlpha = false; - Property ALPHA = new AnimationProperties.FloatProperty("alpha") { - @Override - public void setValue(ChatMessageCell object, float value) { - object.setTimeAlpha(value); - } - - @Override - public Float get(ChatMessageCell object) { - return object.getTimeAlpha(); - } - }; - - AnimatorSet animatorSet = new AnimatorSet(); - animatorSet.playTogether( - ObjectAnimator.ofFloat(cameraContainer, View.ALPHA, 0.0f), - ObjectAnimator.ofFloat(messageCell, ALPHA, 1.0f) - ); - animatorSet.setDuration(100); - animatorSet.setInterpolator(new DecelerateInterpolator()); - animatorSet.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - instantCameraView.hideCamera(true); - instantCameraView.setVisibility(View.INVISIBLE); - } - }); - animatorSet.start(); - } - }); - allAnimators.start(); + }); + allAnimators.start(); + return true; + } + }); + } else if (message.isAnyKindOfSticker()) { + messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + messageCell.getViewTreeObserver().removeOnPreDrawListener(this); + MessageObject.SendAnimationData sendAnimationData = messageCell.getMessageObject().sendAnimationData; + if (sendAnimationData == null) { return true; } - }); - } + animateSendingViews.add(messageCell); + ImageReceiver imageReceiver = messageCell.getPhotoImage(); + float w = imageReceiver.getImageWidth(); + float scale = sendAnimationData.width / w; + int[] position = new int[2]; + messageCell.getTransitionParams().ignoreAlpha = true; + messageCell.getLocationInWindow(position); + position[1] -= messageCell.getTranslationY(); + if (chatActivityEnterView.isTopViewVisible()) { + position[1] += AndroidUtilities.dp(48); + } + + AnimatorSet allAnimators = new AnimatorSet(); + + Property param1 = new AnimationProperties.FloatProperty("p1") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.currentScale = value; + } + + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.currentScale; + } + }; + Property param2 = new AnimationProperties.FloatProperty("p2") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.currentX = value; + if (fragmentView != null) { + fragmentView.invalidate(); + } + } + + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.currentX; + } + }; + Property param3 = new AnimationProperties.FloatProperty("p3") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.currentY = value; + if (fragmentView != null) { + fragmentView.invalidate(); + } + } + + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.currentY; + } + }; + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.playTogether( + ObjectAnimator.ofFloat(sendAnimationData, param1, scale, 1.0f), + ObjectAnimator.ofFloat(sendAnimationData, param3, sendAnimationData.y, position[1] + imageReceiver.getCenterY()) + ); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); + ObjectAnimator o = ObjectAnimator.ofFloat(sendAnimationData, param2, sendAnimationData.x, position[0] + imageReceiver.getCenterX()); + o.setInterpolator(CubicBezierInterpolator.EASE_OUT_QUINT); + + allAnimators.playTogether(o, animatorSet); + allAnimators.setDuration(250); + + allAnimators.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + animateSendingViews.remove(messageCell); + if (fragmentView != null) { + fragmentView.invalidate(); + chatListView.invalidate(); + } + messageCell.setAlpha(1.0f); + messageCell.getTransitionParams().ignoreAlpha = false; + } + }); + allAnimators.start(); + + Property ALPHA = new AnimationProperties.FloatProperty("alpha") { + @Override + public void setValue(MessageObject.SendAnimationData object, float value) { + object.timeAlpha = value; + if (fragmentView != null) { + fragmentView.invalidate(); + } + } + + @Override + public Float get(MessageObject.SendAnimationData object) { + return object.timeAlpha; + } + }; + + AnimatorSet animatorSet2 = new AnimatorSet(); + animatorSet2.playTogether( + ObjectAnimator.ofFloat(sendAnimationData, ALPHA, 0.0f, 1.0f) + ); + animatorSet2.setDuration(100); + animatorSet2.setStartDelay(150); + animatorSet2.setInterpolator(new DecelerateInterpolator()); + animatorSet2.start(); + return true; + } + }); } else { if (chatActivityEnterView.canShowVoiceMessageTransition()) { messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @@ -22113,6 +22360,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public int getItemViewType(int position) { + if (clearingHistory) { + if (position == botInfoEmptyRow) { + return 3; + } + } if (position >= messagesStartRow && position < messagesEndRow) { return messages.get(position - messagesStartRow).contentType; } else if (position == botInfoRow) { @@ -22123,6 +22375,39 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not @Override public void onViewAttachedToWindow(RecyclerView.ViewHolder holder) { + if (holder.itemView instanceof ChatMessageCell || holder.itemView instanceof ChatActionCell) { + View view = holder.itemView; + holder.itemView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + view.getViewTreeObserver().removeOnPreDrawListener(this); + + int height = chatListView.getMeasuredHeight(); + int top = view.getTop(); + int bottom = view.getBottom(); + int viewTop = top >= 0 ? 0 : -top; + int viewBottom = view.getMeasuredHeight(); + if (viewBottom > height) { + viewBottom = viewTop + height; + } + int recyclerChatViewHeight = (contentView.getHeightWithKeyboard() - (inPreviewMode ? 0 : AndroidUtilities.dp(48)) - chatListView.getTop()); + int keyboardOffset = contentView.getKeyboardHeight(); + int parentHeight = viewBottom - viewTop; + if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.pannelAniamationInProgress()) { + keyboardOffset = chatActivityEnterView.getEmojiPadding(); + } + if (holder.itemView instanceof ChatMessageCell) { + ((ChatMessageCell) view).setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset, view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); + } else if (holder.itemView instanceof ChatActionCell) { + if (actionBar != null && contentView != null) { + ((ChatActionCell) view).setVisiblePart(view.getY() + (isKeyboardVisible() ? chatListView.getTop() : actionBar.getMeasuredHeight()) - contentView.getBackgroundTranslationY(), contentView.getBackgroundSizeY()); + } + } + + return true; + } + }); + } if (holder.itemView instanceof ChatMessageCell) { final ChatMessageCell messageCell = (ChatMessageCell) holder.itemView; MessageObject message = messageCell.getMessageObject(); @@ -22166,30 +22451,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not messageCell.setHighlightedText(null); } - messageCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - messageCell.getViewTreeObserver().removeOnPreDrawListener(this); - - int height = chatListView.getMeasuredHeight(); - int top = messageCell.getTop(); - int bottom = messageCell.getBottom(); - int viewTop = top >= 0 ? 0 : -top; - int viewBottom = messageCell.getMeasuredHeight(); - if (viewBottom > height) { - viewBottom = viewTop + height; - } - int recyclerChatViewHeight = (contentView.getHeightWithKeyboard() - (inPreviewMode ? 0 : AndroidUtilities.dp(48)) - chatListView.getTop()); - int keyboardOffset = contentView.getKeyboardHeight(); - int parentHeight = viewBottom - viewTop; - if (keyboardOffset < AndroidUtilities.dp(20) && chatActivityEnterView.isPopupShowing() || chatActivityEnterView.pannelAniamationInProgress()) { - keyboardOffset = chatActivityEnterView.getEmojiPadding(); - } - messageCell.setVisiblePart(viewTop, viewBottom - viewTop, recyclerChatViewHeight, keyboardOffset); - - return true; - } - }); if (!inPreviewMode || !messageCell.isHighlighted()) { messageCell.setHighlighted(highlightMessageId != Integer.MAX_VALUE && messageCell.getMessageObject().getId() == highlightMessageId); if (highlightMessageId != Integer.MAX_VALUE) { @@ -22682,7 +22943,9 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not ArrayList themeDescriptions = new ArrayList<>(); themeDescriptions.add(new ThemeDescription(fragmentView, 0, null, null, null, null, Theme.key_chat_wallpaper)); - themeDescriptions.add(new ThemeDescription(fragmentView, 0, null, null, null, null, Theme.key_chat_wallpaper_gradient_to)); + themeDescriptions.add(new ThemeDescription(fragmentView, 0, null, null, null, null, Theme.key_chat_wallpaper_gradient_to1)); + themeDescriptions.add(new ThemeDescription(fragmentView, 0, null, null, null, null, Theme.key_chat_wallpaper_gradient_to2)); + themeDescriptions.add(new ThemeDescription(fragmentView, 0, null, null, null, null, Theme.key_chat_wallpaper_gradient_to3)); themeDescriptions.add(new ThemeDescription(messagesSearchListView, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_windowBackgroundWhite)); @@ -22930,8 +23193,6 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_pollHintDrawable[1]}, null, Theme.key_chat_outPreviewInstantText)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_psaHelpDrawable[0]}, null, Theme.key_chat_inViews)); themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, new Drawable[]{Theme.chat_psaHelpDrawable[1]}, null, Theme.key_chat_outViews)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_shareBackground)); - themeDescriptions.add(new ThemeDescription(chatListView, 0, new Class[]{ChatMessageCell.class}, null, null, null, Theme.key_chat_shareBackgroundSelected)); themeDescriptions.add(new ThemeDescription(messagesSearchListView, 0, new Class[]{DialogCell.class}, null, Theme.avatarDrawables, null, Theme.key_avatar_text)); themeDescriptions.add(new ThemeDescription(messagesSearchListView, 0, new Class[]{DialogCell.class}, Theme.dialogs_countPaint, null, null, Theme.key_chats_unreadCounter)); @@ -23222,6 +23483,11 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertMutedByAdmin)); themeDescriptions.add(new ThemeDescription(null, 0, null, null, null, null, Theme.key_voipgroup_overlayAlertMutedByAdmin2)); + if (chatActivityEnterView != null) { + themeDescriptions.add(new ThemeDescription(chatActivityEnterView.botCommandsMenuContainer.listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{BotCommandsMenuView.BotCommandView.class}, new String[]{"description"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); + themeDescriptions.add(new ThemeDescription(chatActivityEnterView.botCommandsMenuContainer.listView, ThemeDescription.FLAG_TEXTCOLOR, new Class[]{BotCommandsMenuView.BotCommandView.class}, new String[]{"command"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText)); + } + return themeDescriptions; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java index 1215b966d..82ebee4c1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java @@ -62,7 +62,6 @@ import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.FileLog; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; @@ -2023,11 +2022,11 @@ public class AlertsCreator { int diff = (int) ((time - systemTime) / 1000); String t; if (diff > 24 * 60 * 60) { - t = LocaleController.formatPluralString("DaysSchedule", diff / (24 * 60 * 60)); + t = LocaleController.formatPluralString("DaysSchedule", Math.round(diff / (24 * 60 * 60.0f))); } else if (diff >= 60 * 60) { - t = LocaleController.formatPluralString("HoursSchedule", diff / (60 * 60)); + t = LocaleController.formatPluralString("HoursSchedule", Math.round(diff / (60 * 60.0f))); } else if (diff >= 60) { - t = LocaleController.formatPluralString("MinutesSchedule", diff / 60); + t = LocaleController.formatPluralString("MinutesSchedule", Math.round(diff / 60.0f)); } else { t = LocaleController.formatPluralString("SecondsSchedule", diff); } @@ -3430,7 +3429,11 @@ public class AlertsCreator { builder.setPositiveButton(LocaleController.getString("Enable", R.string.Enable), (dialogInterface, i) -> { if (activity != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - activity.startActivity(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + activity.getPackageName()))); + try { + activity.startActivity(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + activity.getPackageName()))); + } catch (Exception e) { + FileLog.e(e); + } } } }); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java index a2265d91e..d2d713047 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AudioPlayerAlert.java @@ -57,6 +57,7 @@ import com.google.android.exoplayer2.C; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildConfig; +import org.telegram.messenger.BuildVars; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLoader; @@ -234,8 +235,8 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagePlayingDidStart); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagePlayingProgressDidChanged); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.musicDidLoad); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.moreMusicDidLoad); @@ -1300,7 +1301,7 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); } SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessages, did, true, 0); } @@ -1403,7 +1404,7 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. parentActivity.presentFragment(new ChatActivity(args), false, false); dismiss(); } else if (id == 5) { - if (Build.VERSION.SDK_INT >= 23 && parentActivity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && parentActivity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { parentActivity.requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); return; } @@ -1548,13 +1549,13 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. layoutManager.scrollToPositionWithOffset(position + addedCount, offset); } } - } else if (id == NotificationCenter.fileDidLoad) { + } else if (id == NotificationCenter.fileLoaded) { String name = (String) args[0]; if (name.equals(currentFile)) { updateTitle(false); currentAudioFinishedLoading = true; } - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { String name = (String) args[0]; if (name.equals(currentFile)) { MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); @@ -1640,8 +1641,8 @@ public class AudioPlayerAlert extends BottomSheet implements NotificationCenter. NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagePlayingPlayStateChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagePlayingDidStart); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagePlayingProgressDidChanged); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.musicDidLoad); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.moreMusicDidLoad); DownloadController.getInstance(currentAccount).removeLoadingFileObserver(this); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java index fdb9c787d..757f84e17 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AvatarsImageView.java @@ -32,6 +32,9 @@ import java.util.Random; public class AvatarsImageView extends FrameLayout { + + public final static int STYLE_GROUP_CALL_TOOLTIP = 10; + DrawingState[] currentStates = new DrawingState[3]; DrawingState[] animatingStates = new DrawingState[3]; boolean wasDraw; @@ -217,7 +220,7 @@ public class AvatarsImageView extends FrameLayout { if (id == AccountInstance.getInstance(account).getUserConfig().getClientUserId()) { animatingStates[index].lastSpeakTime = 0; } else { - animatingStates[index].lastSpeakTime = participant.active_date; + animatingStates[index].lastSpeakTime = participant.lastActiveDate; } } else { animatingStates[index].lastSpeakTime = participant.active_date; @@ -237,8 +240,9 @@ public class AvatarsImageView extends FrameLayout { } else { animatingStates[index].imageReceiver.setForUserOrChat(currentChat, animatingStates[index].avatarDrawable); } - animatingStates[index].imageReceiver.setRoundRadius(AndroidUtilities.dp(currentStyle == 4 ? 16 : 12)); - int size = AndroidUtilities.dp(currentStyle == 4 ? 32 : 24); + boolean bigAvatars = currentStyle == 4 || currentStyle == STYLE_GROUP_CALL_TOOLTIP; + animatingStates[index].imageReceiver.setRoundRadius(AndroidUtilities.dp(bigAvatars ? 16 : 12)); + int size = AndroidUtilities.dp(bigAvatars ? 32 : 24); animatingStates[index].imageReceiver.setImageCoords(0, 0, size, size); invalidate(); } @@ -248,15 +252,17 @@ public class AvatarsImageView extends FrameLayout { protected void onDraw(Canvas canvas) { wasDraw = true; - int size = AndroidUtilities.dp(currentStyle == 4 ? 32 : 24); - int toAdd = AndroidUtilities.dp(currentStyle == 4 ? 24 : 20); + boolean bigAvatars = currentStyle == 4 || currentStyle == STYLE_GROUP_CALL_TOOLTIP; + int size = AndroidUtilities.dp(bigAvatars ? 32 : 24); + int toAdd = AndroidUtilities.dp(bigAvatars ? 24 : 20); int drawCount = 0; for (int i = 0; i < 3; i++) { if (currentStates[i].id != 0) { drawCount++; } } - int ax = centered ? (getMeasuredWidth() - drawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2 : (currentStyle == 0 ? 0 : AndroidUtilities.dp(10)); + int startPadding = (currentStyle == 0 || currentStyle == STYLE_GROUP_CALL_TOOLTIP) ? 0 : AndroidUtilities.dp(10); + int ax = centered ? (getMeasuredWidth() - drawCount * toAdd - AndroidUtilities.dp(bigAvatars ? 8 : 4)) / 2 : startPadding; boolean isMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); if (currentStyle == 4) { paint.setColor(Theme.getColor(Theme.key_inappPlayerBackground)); @@ -270,9 +276,10 @@ public class AvatarsImageView extends FrameLayout { animateToDrawCount++; } } - boolean useAlphaLayer = currentStyle == 0 || currentStyle == 1 || currentStyle == 3 || currentStyle == 4 || currentStyle == 5; + boolean useAlphaLayer = currentStyle == 0 || currentStyle == 1 || currentStyle == 3 || currentStyle == 4 || currentStyle == 5 || currentStyle == STYLE_GROUP_CALL_TOOLTIP; if (useAlphaLayer) { - canvas.saveLayerAlpha(0, 0, getMeasuredWidth(), getMeasuredHeight(), 255, Canvas.ALL_SAVE_FLAG); + float padding = currentStyle == STYLE_GROUP_CALL_TOOLTIP ? AndroidUtilities.dp(16) : 0; + canvas.saveLayerAlpha(-padding, -padding, getMeasuredWidth() + padding, getMeasuredHeight() + padding, 255, Canvas.ALL_SAVE_FLAG); } for (int a = 2; a >= 0; a--) { for (int k = 0; k < 2; k++) { @@ -290,13 +297,13 @@ public class AvatarsImageView extends FrameLayout { continue; } if (k == 0) { - int toAx = centered ? (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2 : AndroidUtilities.dp(10); + int toAx = centered ? (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(bigAvatars ? 8 : 4)) / 2 : startPadding; imageReceiver.setImageX(toAx + toAdd * a); } else { imageReceiver.setImageX(ax + toAdd * a); } - if (currentStyle == 0) { + if (currentStyle == 0 || currentStyle == STYLE_GROUP_CALL_TOOLTIP) { imageReceiver.setImageY((getMeasuredHeight() - size) / 2f); } else { imageReceiver.setImageY(AndroidUtilities.dp(currentStyle == 4 ? 8 : 6)); @@ -316,12 +323,12 @@ public class AvatarsImageView extends FrameLayout { alpha = transitionProgress; needRestore = true; } else if (states[a].animationType == DrawingState.ANIMATION_TYPE_MOVE) { - int toAx = centered ? (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2 : AndroidUtilities.dp(10); + int toAx = centered ? (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(bigAvatars ? 8 : 4)) / 2 : startPadding; int toX = toAx + toAdd * a; int fromX = ax + toAdd * states[a].moveFromIndex; imageReceiver.setImageX((int) (toX * transitionProgress + fromX * (1f - transitionProgress))); } else if (states[a].animationType == DrawingState.ANIMATION_TYPE_NONE && centered) { - int toAx = (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(currentStyle == 4 ? 8 : 4)) / 2; + int toAx = (getMeasuredWidth() - animateToDrawCount * toAdd - AndroidUtilities.dp(bigAvatars ? 8 : 4)) / 2; int toX = toAx + toAdd * a; int fromX = ax + toAdd * a; imageReceiver.setImageX((int) (toX * transitionProgress + fromX * (1f - transitionProgress))); @@ -358,21 +365,35 @@ public class AvatarsImageView extends FrameLayout { invalidate(); } avatarScale = states[a].wavesDrawable.getAvatarScale(); - } else if (currentStyle == 4) { + } else if (currentStyle == 4 || currentStyle == STYLE_GROUP_CALL_TOOLTIP) { canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(17), xRefP); if (states[a].wavesDrawable == null) { states[a].wavesDrawable = new GroupCallUserCell.AvatarWavesDrawable(AndroidUtilities.dp(17), AndroidUtilities.dp(21)); } - states[a].wavesDrawable.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_listeningText), (int) (255 * 0.3f * alpha))); + if (currentStyle == STYLE_GROUP_CALL_TOOLTIP) { + states[a].wavesDrawable.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_speakingText), (int) (255 * 0.3f * alpha))); + } else { + states[a].wavesDrawable.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_listeningText), (int) (255 * 0.3f * alpha))); + } long currentTime = System.currentTimeMillis(); if (currentTime - states[a].lastUpdateTime > 100) { states[a].lastUpdateTime = currentTime; - if (ConnectionsManager.getInstance(UserConfig.selectedAccount).getCurrentTime() - states[a].lastSpeakTime <= 5) { - states[a].wavesDrawable.setShowWaves(true, this); - states[a].wavesDrawable.setAmplitude(random.nextInt() % 100); + if (currentStyle == STYLE_GROUP_CALL_TOOLTIP) { + if (states[a].participant != null && states[a].participant.amplitude > 0) { + states[a].wavesDrawable.setShowWaves(true, this); + float amplitude = states[a].participant.amplitude * 15f; + states[a].wavesDrawable.setAmplitude(amplitude); + } else { + states[a].wavesDrawable.setShowWaves(false, this); + } } else { - states[a].wavesDrawable.setShowWaves(false, this); - states[a].wavesDrawable.setAmplitude(0); + if (ConnectionsManager.getInstance(UserConfig.selectedAccount).getCurrentTime() - states[a].lastSpeakTime <= 5) { + states[a].wavesDrawable.setShowWaves(true, this); + states[a].wavesDrawable.setAmplitude(random.nextInt() % 100); + } else { + states[a].wavesDrawable.setShowWaves(false, this); + states[a].wavesDrawable.setAmplitude(0); + } } } states[a].wavesDrawable.update(); @@ -380,13 +401,13 @@ public class AvatarsImageView extends FrameLayout { avatarScale = states[a].wavesDrawable.getAvatarScale(); } else { if (useAlphaLayer) { - canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(currentStyle == 4 ? 17 : 13), xRefP); + canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(bigAvatars ? 17 : 13), xRefP); } else { int paintAlpha = paint.getAlpha(); if (alpha != 1f) { paint.setAlpha((int) (paintAlpha * alpha)); } - canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(currentStyle == 4 ? 17 : 13), paint); + canvas.drawCircle(imageReceiver.getCenterX(), imageReceiver.getCenterY(), AndroidUtilities.dp(bigAvatars ? 17 : 13), paint); if (alpha != 1f) { paint.setAlpha(paintAlpha); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlockingUpdateView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlockingUpdateView.java index 8c47089c3..6beacf395 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlockingUpdateView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlockingUpdateView.java @@ -31,7 +31,7 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; -import org.telegram.messenger.UserConfig; +import org.telegram.messenger.SharedConfig; import org.telegram.messenger.browser.Browser; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; @@ -75,8 +75,8 @@ public class BlockingUpdateView extends FrameLayout implements NotificationCente pressCount++; if (pressCount >= 10) { setVisibility(GONE); - UserConfig.getInstance(0).pendingAppUpdate = null; - UserConfig.getInstance(0).saveConfig(false); + SharedConfig.pendingAppUpdate = null; + SharedConfig.saveConfig(); } }); @@ -161,26 +161,26 @@ public class BlockingUpdateView extends FrameLayout implements NotificationCente public void setVisibility(int visibility) { super.setVisibility(visibility); if (visibility == GONE) { - NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileDidFailToLoad); - NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileLoadFailed); + NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileLoadProgressChanged); } } @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad) { + if (id == NotificationCenter.fileLoaded) { String location = (String) args[0]; if (fileName != null && fileName.equals(location)) { showProgress(false); openApkInstall((Activity) getContext(), appUpdate.document); } - } else if (id == NotificationCenter.fileDidFailToLoad) { + } else if (id == NotificationCenter.fileLoadFailed) { String location = (String) args[0]; if (fileName != null && fileName.equals(location)) { showProgress(false); } - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { String location = (String) args[0]; if (fileName != null && fileName.equals(location)) { Long loadedSize = (Long) args[1]; @@ -304,9 +304,9 @@ public class BlockingUpdateView extends FrameLayout implements NotificationCente } else { acceptTextView.setText(LocaleController.getString("Update", R.string.Update)); } - NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileDidFailToLoad); - NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileLoadFailed); + NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileLoadProgressChanged); if (check) { TLRPC.TL_help_getAppUpdate req = new TLRPC.TL_help_getAppUpdate(); try { @@ -322,8 +322,8 @@ public class BlockingUpdateView extends FrameLayout implements NotificationCente final TLRPC.TL_help_appUpdate res = (TLRPC.TL_help_appUpdate) response; if (!res.can_not_skip) { setVisibility(GONE); - UserConfig.getInstance(0).pendingAppUpdate = null; - UserConfig.getInstance(0).saveConfig(false); + SharedConfig.pendingAppUpdate = null; + SharedConfig.saveConfig(); } } })); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurBehindDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurBehindDrawable.java index f650aaafe..3f9bce717 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurBehindDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BlurBehindDrawable.java @@ -119,7 +119,6 @@ public class BlurBehindDrawable { blurredBitmapTmp[i] = Bitmap.createBitmap((int) (lastW / DOWN_SCALE), (int) (h / DOWN_SCALE), Bitmap.Config.ARGB_8888); blurCanvas[i] = new Canvas(blurredBitmapTmp[i]); } catch (Exception e) { - e.printStackTrace(); FileLog.e(e); AndroidUtilities.runOnUIThread(() -> { error = true; @@ -184,13 +183,21 @@ public class BlurBehindDrawable { queue.cleanupQueue(); queue.postRunnable(() -> { if (renderingBitmap != null) { - renderingBitmap[0].recycle(); - renderingBitmap[1].recycle(); + if (renderingBitmap[0] != null) { + renderingBitmap[0].recycle(); + } + if (renderingBitmap[1] != null) { + renderingBitmap[1].recycle(); + } renderingBitmap = null; } if (backgroundBitmap != null) { - backgroundBitmap[0].recycle(); - backgroundBitmap[1].recycle(); + if (backgroundBitmap[0] != null) { + backgroundBitmap[0].recycle(); + } + if (backgroundBitmap[1] != null) { + backgroundBitmap[1].recycle(); + } backgroundBitmap = null; } renderingBitmapCanvas = null; @@ -307,13 +314,19 @@ public class BlurBehindDrawable { long t = System.currentTimeMillis(); if (backgroundBitmap[i] == null) { int w = width; - backgroundBitmap[i] = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); - backgroundBitmapCanvas[i] = new Canvas(backgroundBitmap[i]); - backgroundBitmapCanvas[i].scale(DOWN_SCALE, DOWN_SCALE); + try { + backgroundBitmap[i] = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); + backgroundBitmapCanvas[i] = new Canvas(backgroundBitmap[i]); + backgroundBitmapCanvas[i].scale(DOWN_SCALE, DOWN_SCALE); + } catch (Throwable e) { + FileLog.e(e); + } } emptyPaint.setAlpha(255); Utilities.stackBlurBitmap(blurredBitmapTmp[i], getBlurRadius()); - backgroundBitmapCanvas[i].drawBitmap(blurredBitmapTmp[i], 0, 0, emptyPaint); + if (backgroundBitmapCanvas[i] != null) { + backgroundBitmapCanvas[i].drawBitmap(blurredBitmapTmp[i], 0, 0, emptyPaint); + } if (canceled) { return; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuContainer.java new file mode 100644 index 000000000..b32c5f401 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuContainer.java @@ -0,0 +1,246 @@ +package org.telegram.ui.Components; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ObjectAnimator; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.view.MotionEvent; +import android.view.View; +import android.view.animation.OvershootInterpolator; +import android.widget.FrameLayout; + +import androidx.core.view.NestedScrollingParent; +import androidx.core.view.NestedScrollingParentHelper; +import androidx.core.view.ViewCompat; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.R; +import org.telegram.ui.ActionBar.Theme; + +public class BotCommandsMenuContainer extends FrameLayout implements NestedScrollingParent { + + private ObjectAnimator currentAnimation = null; + private NestedScrollingParentHelper nestedScrollingParentHelper; + + public RecyclerListView listView; + Paint backgroundPaint = new Paint(); + Paint topBackground = new Paint(Paint.ANTI_ALIAS_FLAG); + + boolean dismissed = true; + + float scrollYOffset; + + Drawable shadowDrawable; + public BotCommandsMenuContainer(Context context) { + super(context); + + nestedScrollingParentHelper = new NestedScrollingParentHelper(this); + shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow_round).mutate(); + listView = new RecyclerListView(context) { + @Override + protected void dispatchDraw(Canvas canvas) { + if (listView.getLayoutManager() == null || listView.getAdapter() == null || listView.getAdapter().getItemCount() == 0) { + super.dispatchDraw(canvas); + return; + } + View firstView = listView.getLayoutManager().findViewByPosition(0); + float y = 0; + if (firstView != null) { + y = firstView.getY(); + } + if (y < 0) { + y = 0; + } + scrollYOffset = y; + y -= AndroidUtilities.dp(8); + if (y > 0) { + shadowDrawable.setBounds(-AndroidUtilities.dp(8), (int) y - AndroidUtilities.dp(24), getMeasuredWidth() + AndroidUtilities.dp(8), (int) y); + shadowDrawable.draw(canvas); + } + canvas.drawRect(0, y, getMeasuredWidth(), getMeasuredHeight() + AndroidUtilities.dp(16), backgroundPaint); + AndroidUtilities.rectTmp.set(getMeasuredWidth() / 2f - AndroidUtilities.dp(12), y - AndroidUtilities.dp(4),getMeasuredWidth() / 2f + AndroidUtilities.dp(12), y); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(4), AndroidUtilities.dp(4), topBackground); + super.dispatchDraw(canvas); + } + }; + listView.setClipToPadding(false); + addView(listView); + updateColors(); + setClipChildren(false); + } + + @Override + public boolean onStartNestedScroll(View child, View target, int nestedScrollAxes) { + return !dismissed && nestedScrollAxes == ViewCompat.SCROLL_AXIS_VERTICAL; + } + + @Override + public void onNestedScrollAccepted(View child, View target, int nestedScrollAxes) { + nestedScrollingParentHelper.onNestedScrollAccepted(child, target, nestedScrollAxes); + if (dismissed) { + return; + } + cancelCurrentAnimation(); + } + + @Override + public void onStopNestedScroll(View target) { + nestedScrollingParentHelper.onStopNestedScroll(target); + if (dismissed) { + return; + } + checkDismiss(); + } + + private void checkDismiss() { + if (dismissed) { + return; + } + if (listView.getTranslationY() > AndroidUtilities.dp(16)) { + dismiss(); + } else { + playEnterAnim(false); + } + } + + @Override + public void onNestedScroll(View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) { + if (dismissed) { + return; + } + cancelCurrentAnimation(); + if (dyUnconsumed != 0) { + float currentTranslation = listView.getTranslationY(); + currentTranslation -= dyUnconsumed; + if (currentTranslation < 0) { + currentTranslation = 0; + } + listView.setTranslationY(currentTranslation); + invalidate(); + } + } + + @Override + public void onNestedPreScroll(View target, int dx, int dy, int[] consumed) { + if (dismissed) { + return; + } + cancelCurrentAnimation(); + float currentTranslation = listView.getTranslationY(); + if (currentTranslation > 0 && dy > 0) { + currentTranslation -= dy; + consumed[1] = dy; + if (currentTranslation < 0) { + currentTranslation = 0; + } + listView.setTranslationY(currentTranslation); + invalidate(); + } + } + + @Override + public boolean onNestedFling(View target, float velocityX, float velocityY, boolean consumed) { + return false; + } + + @Override + public boolean onNestedPreFling(View target, float velocityX, float velocityY) { + return false; + } + + @Override + public int getNestedScrollAxes() { + return nestedScrollingParentHelper.getNestedScrollAxes(); + } + + private void cancelCurrentAnimation() { + if (currentAnimation != null) { + currentAnimation.removeAllListeners(); + currentAnimation.cancel(); + currentAnimation = null; + } + } + + private boolean entering; + + public void show() { + if (getVisibility() != View.VISIBLE) { + setVisibility(View.VISIBLE); + listView.scrollToPosition(0); + entering = true; + dismissed = false; + } else if (dismissed) { + dismissed = false; + cancelCurrentAnimation(); + playEnterAnim(false); + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + if (entering && !dismissed) { + listView.setTranslationY(listView.getMeasuredHeight() - listView.getPaddingTop() + AndroidUtilities.dp(16)); + playEnterAnim(true); + entering = false; + } + } + + private void playEnterAnim(boolean firstTime) { + if (dismissed) { + return; + } + currentAnimation = ObjectAnimator.ofFloat(listView, TRANSLATION_Y, listView.getTranslationY(), 0); + if (firstTime) { + currentAnimation.setDuration(320); + currentAnimation.setInterpolator(new OvershootInterpolator(0.8f)); + } else { + currentAnimation.setDuration(150); + currentAnimation.setInterpolator(CubicBezierInterpolator.DEFAULT); + } + currentAnimation.start(); + } + + public void dismiss() { + if (!dismissed) { + dismissed = true; + cancelCurrentAnimation(); + currentAnimation = ObjectAnimator.ofFloat(listView, TRANSLATION_Y, listView.getTranslationY(), getMeasuredHeight() - scrollYOffset + AndroidUtilities.dp(40)); + currentAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + setVisibility(View.GONE); + currentAnimation = null; + } + }); + currentAnimation.setDuration(150); + currentAnimation.setInterpolator(CubicBezierInterpolator.DEFAULT); + currentAnimation.start(); + onDismiss(); + } + } + + protected void onDismiss() { + + } + + @Override + public boolean dispatchTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN && ev.getY() < scrollYOffset - AndroidUtilities.dp(24)) { + return false; + } + return super.dispatchTouchEvent(ev); + } + + public void updateColors() { + topBackground.setColor(Theme.getColor(Theme.key_dialogGrayLine)); + backgroundPaint.setColor(Theme.getColor(Theme.key_windowBackgroundWhite)); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhite), PorterDuff.Mode.MULTIPLY)); + invalidate(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java new file mode 100644 index 000000000..216a47007 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotCommandsMenuView.java @@ -0,0 +1,281 @@ +package org.telegram.ui.Components; + +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.drawable.Drawable; +import android.text.Layout; +import android.text.StaticLayout; +import android.text.TextPaint; +import android.text.TextUtils; +import android.util.SparseArray; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.View; +import android.view.ViewGroup; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.MenuDrawable; +import org.telegram.ui.ActionBar.Theme; + +import java.util.ArrayList; + +public class BotCommandsMenuView extends View { + + final Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + final TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + final MenuDrawable backDrawable = new MenuDrawable() { + @Override + public void invalidateSelf() { + super.invalidateSelf(); + invalidate(); + } + }; + boolean expanded; + float expandProgress; + + StaticLayout menuText; + boolean isOpened; + + Drawable backgroundDrawable; + + public BotCommandsMenuView(Context context) { + super(context); + updateColors(); + backDrawable.setMiniIcon(true); + backDrawable.setRotateToBack(false); + backDrawable.setRotation(0f, false); + backDrawable.setCallback(this); + textPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + backDrawable.setRoundCap(); + backgroundDrawable = Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(16), Color.TRANSPARENT, Theme.getColor(Theme.key_windowBackgroundWhite)); + backgroundDrawable.setCallback(this); + } + + private void updateColors() { + paint.setColor(Theme.getColor(Theme.key_chat_messagePanelVoiceBackground)); + int textColor = Theme.getColor(Theme.key_windowBackgroundWhite); + backDrawable.setBackColor(textColor); + backDrawable.setIconColor(textColor); + textPaint.setColor(textColor); + } + + int lastSize; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int size = MeasureSpec.getSize(widthMeasureSpec) + MeasureSpec.getSize(heightMeasureSpec) << 16; + if (lastSize != size) { + backDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + textPaint.setTextSize(AndroidUtilities.dp(15)); + lastSize = size; + String string = LocaleController.getString("BotsMenuTitle", R.string.BotsMenuTitle); + int w = (int) textPaint.measureText(string); + menuText = StaticLayoutEx.createStaticLayout(string, textPaint, w, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0f, false, TextUtils.TruncateAt.END, w, 1); + } + onTranslationChanged((menuText.getWidth() + AndroidUtilities.dp(4)) * expandProgress); + int width = AndroidUtilities.dp(40); + if (expanded) { + width += menuText.getWidth() + AndroidUtilities.dp(4); + } + + super.onMeasure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(32), MeasureSpec.EXACTLY)); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + boolean update = false; + if (expanded && expandProgress != 1f) { + expandProgress += 16f / 150f; + if (expandProgress > 1) { + expandProgress = 1f; + } else { + invalidate(); + } + update = true; + } else if (!expanded && expandProgress != 0) { + expandProgress -= 16f / 150f; + if (expandProgress < 0) { + expandProgress = 0; + } else { + invalidate(); + } + update = true; + } + + float expandProgress = CubicBezierInterpolator.DEFAULT.getInterpolation(this.expandProgress); + if (update && expandProgress > 0) { + textPaint.setAlpha((int) (255 * expandProgress)); + } + AndroidUtilities.rectTmp.set(0, 0, AndroidUtilities.dp(40) + (menuText.getWidth() + AndroidUtilities.dp(4)) * expandProgress, getMeasuredHeight()); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(16), AndroidUtilities.dp(16), paint); + backgroundDrawable.setBounds((int) AndroidUtilities.rectTmp.left, (int) AndroidUtilities.rectTmp.top, (int) AndroidUtilities.rectTmp.right, (int) AndroidUtilities.rectTmp.bottom); + backgroundDrawable.draw(canvas); + canvas.save(); + canvas.translate(AndroidUtilities.dp(8), AndroidUtilities.dp(4)); + backDrawable.draw(canvas); + canvas.restore(); + + + + if (expandProgress > 0) { + canvas.save(); + canvas.translate(AndroidUtilities.dp(34), (getMeasuredHeight() - menuText.getHeight()) / 2f); + menuText.draw(canvas); + canvas.restore(); + } + + if (update) { + onTranslationChanged((menuText.getWidth() + AndroidUtilities.dp(4)) * expandProgress); + } + super.dispatchDraw(canvas); + } + + protected void onTranslationChanged(float translationX) { + + } + + public void setExpanded(boolean expanded, boolean animated) { + if (this.expanded != expanded) { + this.expanded = expanded; + if (!animated) { + expandProgress = expanded ? 1f : 0f; + } + requestLayout(); + invalidate(); + } + } + + public boolean isOpened() { + return isOpened; + } + + public static class BotCommandsAdapter extends RecyclerListView.SelectionAdapter { + + ArrayList newResult = new ArrayList<>(); + ArrayList newResultHelp = new ArrayList<>(); + + public BotCommandsAdapter() { + + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + return true; + } + + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { +// FlickerLoadingView flickerLoadingView = new FlickerLoadingView(parent.getContext()); +// flickerLoadingView.setIsSingleCell(true); +// flickerLoadingView.setViewType(FlickerLoadingView.BOTS_MENU_TYPE); +// return new RecyclerListView.Holder(flickerLoadingView); + BotCommandView view = new BotCommandView(parent.getContext()); + view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); + return new RecyclerListView.Holder(view); + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + BotCommandView view = (BotCommandView) holder.itemView; + view.command.setText(newResult.get(position)); + view.description.setText(newResultHelp.get(position)); + view.commandStr = newResult.get(position); + } + + @Override + public int getItemCount() { + return newResult.size(); + } + + public void setBotInfo(SparseArray botInfo) { + newResult.clear(); + newResultHelp.clear(); + for (int b = 0; b < botInfo.size(); b++) { + TLRPC.BotInfo info = botInfo.valueAt(b); + for (int a = 0; a < info.commands.size(); a++) { + TLRPC.TL_botCommand botCommand = info.commands.get(a); + if (botCommand != null && botCommand.command != null) { + newResult.add("/" + botCommand.command); + if (botCommand.description != null && botCommand.description.length() > 1) { + newResultHelp.add(botCommand.description.substring(0, 1).toUpperCase() + botCommand.description.substring(1).toLowerCase()); + } else { + newResultHelp.add(botCommand.description); + } + } + } + } + notifyDataSetChanged(); + } + } + + public void setOpened(boolean opened) { + if (isOpened != opened) { + isOpened = opened; + } + backDrawable.setRotation(opened ? 1f : 0f, true); + } + + public static class BotCommandView extends LinearLayout { + + TextView command; + TextView description; + String commandStr; + + public BotCommandView(@NonNull Context context) { + super(context); + setOrientation(HORIZONTAL); + setPadding(AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16), 0); + + description = new TextView(context); + description.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + description.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + description.setTag(Theme.key_windowBackgroundWhiteBlackText); + description.setLines(1); + description.setEllipsize(TextUtils.TruncateAt.END); + addView(description, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 1f, Gravity.CENTER_VERTICAL, 0, 0, AndroidUtilities.dp(8), 0)); + + command = new TextView(context); + command.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + command.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); + command.setTag(Theme.key_windowBackgroundWhiteGrayText); + addView(command, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, 0f, Gravity.CENTER_VERTICAL)); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(36), MeasureSpec.EXACTLY)); + } + + public String getCommand() { + return commandStr; + } + } + + @Override + protected boolean verifyDrawable(@NonNull Drawable who) { + return super.verifyDrawable(who) || backgroundDrawable == who; + } + + @Override + protected void drawableStateChanged() { + super.drawableStateChanged(); + backgroundDrawable.setState(getDrawableState()); + } + + @Override + public void jumpDrawablesToCurrentState() { + super.jumpDrawablesToCurrentState(); + backgroundDrawable.jumpToCurrentState(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java index c07df0486..e5002304e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/BotKeyboardView.java @@ -48,9 +48,17 @@ public class BotKeyboardView extends LinearLayout { container = new LinearLayout(context); container.setOrientation(VERTICAL); scrollView.addView(container); - AndroidUtilities.setScrollViewEdgeEffectColor(scrollView, Theme.getColor(Theme.key_chat_emojiPanelBackground)); + updateColors(); + } + public void updateColors() { + AndroidUtilities.setScrollViewEdgeEffectColor(scrollView, Theme.getColor(Theme.key_chat_emojiPanelBackground)); setBackgroundColor(Theme.getColor(Theme.key_chat_emojiPanelBackground)); + for (int i = 0; i < buttonViews.size(); i++) { + buttonViews.get(i).setTextColor(Theme.getColor(Theme.key_chat_botKeyboardButtonText)); + buttonViews.get(i).setBackgroundDrawable(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), Theme.getColor(Theme.key_chat_botKeyboardButtonBackground), Theme.getColor(Theme.key_chat_botKeyboardButtonBackgroundPressed))); + } + invalidate(); } public void setDelegate(BotKeyboardViewDelegate botKeyboardViewDelegate) { @@ -106,9 +114,10 @@ public class BotKeyboardView extends LinearLayout { TextView textView = new TextView(getContext()); textView.setTag(button); textView.setTextColor(Theme.getColor(Theme.key_chat_botKeyboardButtonText)); + textView.setBackgroundDrawable(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), Theme.getColor(Theme.key_chat_botKeyboardButtonBackground), Theme.getColor(Theme.key_chat_botKeyboardButtonBackgroundPressed))); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); textView.setGravity(Gravity.CENTER); - textView.setBackgroundDrawable(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), Theme.getColor(Theme.key_chat_botKeyboardButtonBackground), Theme.getColor(Theme.key_chat_botKeyboardButtonBackgroundPressed))); + textView.setPadding(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); textView.setText(Emoji.replaceEmoji(button.text, textView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(16), false)); layout.addView(textView, LayoutHelper.createLinear(0, LayoutHelper.MATCH_PARENT, weight, 0, 0, b != row.buttons.size() - 1 ? 10 : 0, 0)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivityEnterTopView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterTopView.java similarity index 98% rename from TMessagesProj/src/main/java/org/telegram/ui/ChatActivityEnterTopView.java rename to TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterTopView.java index 41999a227..fd7765fd1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivityEnterTopView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterTopView.java @@ -1,4 +1,4 @@ -package org.telegram.ui; +package org.telegram.ui.Components; import android.content.Context; import android.view.View; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java index f45b25c31..8fc5ed592 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatActivityEnterView.java @@ -16,7 +16,9 @@ import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.app.Activity; +import android.content.ClipData; import android.content.ClipDescription; +import android.content.ClipboardManager; import android.content.Context; import android.content.SharedPreferences; import android.content.pm.PackageManager; @@ -49,8 +51,8 @@ import android.text.TextPaint; import android.text.TextUtils; import android.text.TextWatcher; import android.text.style.ImageSpan; -import android.util.Log; import android.util.Property; +import android.util.SparseArray; import android.util.TypedValue; import android.view.ActionMode; import android.view.Gravity; @@ -86,6 +88,7 @@ import androidx.core.view.inputmethod.EditorInfoCompat; import androidx.core.view.inputmethod.InputConnectionCompat; import androidx.core.view.inputmethod.InputContentInfoCompat; import androidx.customview.widget.ExploreByTouchHelper; +import androidx.recyclerview.widget.LinearLayoutManager; import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; @@ -219,11 +222,17 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe private NumberTextView captionLimitView; private int currentLimit = -1; private int codePointCount; - CrossOutDrawable notifySilentDrawable; + private CrossOutDrawable notifySilentDrawable; private Runnable moveToSendStateRunnable; boolean messageTransitionIsRunning; + private BotCommandsMenuView botCommandsMenuButton; + public BotCommandsMenuContainer botCommandsMenuContainer; + private BotCommandsMenuView.BotCommandsAdapter botCommandsAdapter; + + private HashMap animationParamsX = new HashMap<>(); + private class SeekBarWaveformView extends View { public SeekBarWaveformView(Context context) { @@ -355,7 +364,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe protected float topViewEnterProgress; protected int animatedTop; public ValueAnimator currentTopViewAnimation; - private ReplaceableIconDrawable botButtonDrawablel; + private ReplaceableIconDrawable botButtonDrawable; private boolean isPaste; @@ -410,6 +419,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe private int lastSizeChangeValue1; private boolean lastSizeChangeValue2; + private int[] location = new int[2]; + private Activity parentActivity; private ChatActivity parentFragment; private long dialog_id; @@ -1394,7 +1405,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } drawingCircleRadius = radius; } - + public void drawIcon(Canvas canvas, int cx, int cy, float alpha) { Drawable drawable; Drawable replaceDrawable = null; @@ -1621,7 +1632,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messageReceivedByServer); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.sendingMessagesChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.audioRecordTooShort); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); parentActivity = context; parentFragment = fragment; @@ -1649,6 +1660,17 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe int x = getMeasuredWidth() - AndroidUtilities.dp(botButton != null && botButton.getVisibility() == VISIBLE ? 96 : 48) - AndroidUtilities.dp(48); scheduledButton.layout(x, scheduledButton.getTop(), x + scheduledButton.getMeasuredWidth(), scheduledButton.getBottom()); } + if (!animationParamsX.isEmpty()) { + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + Float fromX = animationParamsX.get(child); + if (fromX != null) { + child.setTranslationX(fromX - child.getLeft()); + child.animate().translationX(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + } + animationParamsX.clear(); + } } @Override @@ -1796,7 +1818,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } else { openKeyboardInternal(); } - + return false; } try { return super.onTouchEvent(event); @@ -1850,6 +1872,41 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe if (id == android.R.id.paste) { isPaste = true; } + + ClipboardManager clipboard = (ClipboardManager) getContext().getSystemService(Context.CLIPBOARD_SERVICE); + ClipData clipData = clipboard.getPrimaryClip(); + if (clipData != null) { + ClipDescription description = clipData.getDescription(); + if (clipData.getItemCount() == 1 && clipData.getDescription().hasMimeType("image/*")) { +// final File cameraFile = AndroidUtilities.generatePicturePath(fragment.isSecretChat(), null); +// try { +// InputStream fis = context.getContentResolver().openInputStream(clipData.getItemAt(0).getUri()); +// FileOutputStream fos = new FileOutputStream(cameraFile); +// FileUtils.copy(fis, fos); +// fis.close(); +// fos.close(); +// MediaController.PhotoEntry photoEntry = new MediaController.PhotoEntry(0, -1, 0, cameraFile.getAbsolutePath(), 0, false, 0, 0, 0); +// ArrayList entries = new ArrayList<>(); +// entries.add(photoEntry); +// PhotoViewer.getInstance().setParentActivity(parentActivity); +// PhotoViewer.getInstance().openPhotoForSelect(entries, 0, 2, false, new PhotoViewer.EmptyPhotoViewerProvider() { +// @Override +// public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { +// super.sendButtonPressed(index, videoEditedInfo, notify, scheduleDate, forceDocument); +// } +// }, parentFragment); +// } catch (FileNotFoundException e) { +// e.printStackTrace(); +// } catch (IOException e) { +// e.printStackTrace(); +// } + if (description.hasMimeType("image/gif")) { + SendMessagesHelper.prepareSendingDocument(accountInstance, null, null, clipData.getItemAt(0).getUri(), null, "image/gif", dialog_id, replyingMessageObject, getThreadMessage(), null, null, true, 0); + } else { + SendMessagesHelper.prepareSendingPhoto(accountInstance, null, clipData.getItemAt(0).getUri(), dialog_id, replyingMessageObject, getThreadMessage(), null, null, null, null, 0, null, true, 0); + } + } + } return super.onTextContextMenuItem(id); } }; @@ -2045,6 +2102,10 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe }); doneButtonColorAnimator.setDuration(150).start(); } + if (botCommandsMenuContainer != null) { + botCommandsMenuContainer.dismiss(); + } + checkBotMenu(); } }); @@ -2079,10 +2140,79 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe attachLayout.setClipChildren(false); frameLayout.addView(attachLayout, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 48, Gravity.BOTTOM | Gravity.RIGHT)); + + botCommandsMenuButton = new BotCommandsMenuView(getContext()); + botCommandsMenuButton.setOnClickListener(view -> { + boolean open = !botCommandsMenuButton.isOpened(); + botCommandsMenuButton.setOpened(open); + if (open) { + botCommandsMenuContainer.show(); + } else { + botCommandsMenuContainer.dismiss(); + } + }); + frameLayout.addView(botCommandsMenuButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 32, Gravity.BOTTOM | Gravity.LEFT, 10, 8, 10, 8)); + AndroidUtilities.updateViewVisibilityAnimated(botCommandsMenuButton, false, 1f, false); + botCommandsMenuButton.setExpanded(true, false); + + LinearLayoutManager layoutManager = new LinearLayoutManager(context); + + botCommandsMenuContainer = new BotCommandsMenuContainer(context) { + @Override + protected void onDismiss() { + super.onDismiss(); + botCommandsMenuButton.setOpened(false); + } + }; + botCommandsMenuContainer.listView.setLayoutManager(layoutManager); + botCommandsMenuContainer.listView.setAdapter(botCommandsAdapter = new BotCommandsMenuView.BotCommandsAdapter()); + botCommandsMenuContainer.listView.setOnItemClickListener(new RecyclerListView.OnItemClickListener() { + @Override + public void onItemClick(View view, int position) { + if (view instanceof BotCommandsMenuView.BotCommandView) { + String command = ((BotCommandsMenuView.BotCommandView) view).getCommand(); + if (TextUtils.isEmpty(command)) { + return; + } + if (isInScheduleMode()) { + AlertsCreator.createScheduleDatePickerDialog(parentActivity, dialog_id, (notify, scheduleDate) -> { + SendMessagesHelper.getInstance(currentAccount).sendMessage((String) command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, notify, scheduleDate, null); + setFieldText(""); + botCommandsMenuContainer.dismiss(); + }); + } else { + if (fragment != null && fragment.checkSlowMode(view)) { + return; + } + SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null); + setFieldText(""); + botCommandsMenuContainer.dismiss(); + } + + } + } + }); + botCommandsMenuContainer.listView.setOnItemLongClickListener(new RecyclerListView.OnItemLongClickListener() { + @Override + public boolean onItemClick(View view, int position) { + if (view instanceof BotCommandsMenuView.BotCommandView) { + String command = ((BotCommandsMenuView.BotCommandView) view).getCommand(); + setFieldText(command + " "); + botCommandsMenuContainer.dismiss(); + return true; + } + return false; + } + }); + botCommandsMenuContainer.setClipToPadding(false); + sizeNotifierLayout.addView(botCommandsMenuContainer, 14, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.BOTTOM, 0, 0, 0, 47)); + botCommandsMenuContainer.setVisibility(View.GONE); + + botButton = new ImageView(context); - botButton.setImageDrawable(botButtonDrawablel = new ReplaceableIconDrawable(context)); - botButtonDrawablel.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.MULTIPLY)); - botButtonDrawablel.setIcon(R.drawable.input_bot2, false); + botButton.setImageDrawable(botButtonDrawable = new ReplaceableIconDrawable(context)); + botButtonDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_messagePanelIcons), PorterDuff.Mode.MULTIPLY)); + botButtonDrawable.setIcon(R.drawable.input_bot2, false); botButton.setScaleType(ImageView.ScaleType.CENTER); if (Build.VERSION.SDK_INT >= 21) { botButton.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_listSelector))); @@ -2749,6 +2879,13 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe checkChannelRights(); } + private void checkBotMenu() { + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setExpanded(TextUtils.isEmpty(messageEditText.getText()) && !(keyboardVisible || waitingForKeyboardOpen || isPopupShowing()), true); + beginDelayedTransition(); + } + } + protected void onLineCountChanged(int oldLineCount, int newLineCount) { } @@ -2911,7 +3048,6 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe sendPopupLayout.measure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), MeasureSpec.AT_MOST)); sendPopupWindow.setFocusable(true); - int[] location = new int[2]; view.getLocationInWindow(location); int y; if (keyboardVisible && ChatActivityEnterView.this.getMeasuredHeight() > AndroidUtilities.dp(topView != null && topView.getVisibility() == VISIBLE ? 48 + 58 : 58)) { @@ -3409,7 +3545,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messageReceivedByServer); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.sendingMessagesChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.audioRecordTooShort); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (emojiView != null) { emojiView.onDestroy(); } @@ -3550,9 +3686,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe int high_id = (int) (dialog_id >> 32); if (lower_id == 0 && high_id != 0) { TLRPC.EncryptedChat encryptedChat = accountInstance.getMessagesController().getEncryptedChat(high_id); - if (AndroidUtilities.getPeerLayerVersion(encryptedChat.layer) >= 66) { - hasRecordVideo = true; - } + hasRecordVideo = true; } else { hasRecordVideo = true; } @@ -3588,8 +3722,12 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } private void updateFieldHint(boolean animated) { - if (editingMessageObject != null) { + if (replyingMessageObject != null && replyingMessageObject.messageOwner.reply_markup != null && !TextUtils.isEmpty(replyingMessageObject.messageOwner.reply_markup.placeholder)) { + messageEditText.setHintText(replyingMessageObject.messageOwner.reply_markup.placeholder, animated); + } else if (editingMessageObject != null) { messageEditText.setHintText(editingCaption ? LocaleController.getString("Caption", R.string.Caption) : LocaleController.getString("TypeMessage", R.string.TypeMessage)); + } else if (botKeyboardViewVisible && botButtonsMessageObject != null && botButtonsMessageObject.messageOwner.reply_markup != null && !TextUtils.isEmpty(botButtonsMessageObject.messageOwner.reply_markup.placeholder)) { + messageEditText.setHintText(botButtonsMessageObject.messageOwner.reply_markup.placeholder, animated); } else { boolean isChannel = false; boolean anonymously = false; @@ -3635,6 +3773,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe replyingMessageObject = null; } MediaController.getInstance().setReplyingMessage(messageObject, getThreadMessage()); + updateFieldHint(false); } public void setWebPage(TLRPC.WebPage webPage, boolean searchWebPages) { @@ -3695,6 +3834,18 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe ObjectAnimator.ofFloat(messageEditText, View.ALPHA, 1f), ObjectAnimator.ofFloat(messageEditText, View.TRANSLATION_X, 0) ); + + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setAlpha(0f); + botCommandsMenuButton.setScaleY(0); + botCommandsMenuButton.setScaleX(0); + + recordPannelAnimation.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 1.0f), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 1.0f), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 1.0f) + ); + } recordPannelAnimation.setDuration(150); recordPannelAnimation.addListener(new AnimatorListenerAdapter() { @Override @@ -3773,6 +3924,19 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe ObjectAnimator.ofFloat(emojiButton[1], View.SCALE_X, 1.0f), ObjectAnimator.ofFloat(emojiButton[1], View.SCALE_Y, 1.0f) ); + + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setAlpha(0f); + botCommandsMenuButton.setScaleY(0); + botCommandsMenuButton.setScaleX(0); + + iconsEndAnimator.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 1.0f), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 1.0f), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 1.0f) + ); + } + iconsEndAnimator.setDuration(150); iconsEndAnimator.setStartDelay(600); @@ -3853,7 +4017,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe if (playing != null && playing == audioToSendMessageObject) { MediaController.getInstance().cleanupPlayer(true, true); } - SendMessagesHelper.getInstance(currentAccount).sendMessage(audioToSend, null, audioToSendPath, dialog_id, replyingMessageObject, getThreadMessage(), null, null, null, null, notify, scheduleDate, 0, null); + SendMessagesHelper.getInstance(currentAccount).sendMessage(audioToSend, null, audioToSendPath, dialog_id, replyingMessageObject, getThreadMessage(), null, null, null, null, notify, scheduleDate, 0, null, null); if (delegate != null) { delegate.onMessageSend(null, notify, scheduleDate); } @@ -3901,7 +4065,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } CharSequence[] message = new CharSequence[]{AndroidUtilities.getTrimmedString(messageEditText.getText())}; ArrayList entities = MediaDataController.getInstance(currentAccount).getEntities(message, supportsSendingNewEntities()); - if (!TextUtils.equals(message[0], editingMessageObject.messageText) || entities != null && !entities.isEmpty() || editingMessageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage) { + if (!TextUtils.equals(message[0], editingMessageObject.messageText) || entities != null && !entities.isEmpty() || entities == null && !editingMessageObject.messageOwner.entities.isEmpty() || editingMessageObject.messageOwner.media instanceof TLRPC.TL_messageMediaWebPage) { editingMessageObject.editingMessage = message[0]; editingMessageObject.editingMessageEntities = entities; editingMessageObject.editingMessageSearchWebPage = messageWebPageSearch; @@ -3957,7 +4121,17 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } CharSequence[] message = new CharSequence[]{AndroidUtilities.getTrimmedString(text.subSequence(start, end))}; ArrayList entities = MediaDataController.getInstance(currentAccount).getEntities(message, supportsNewEntities); - SendMessagesHelper.getInstance(currentAccount).sendMessage(message[0].toString(), dialog_id, replyingMessageObject, getThreadMessage(), messageWebPage, messageWebPageSearch, entities, null, null, notify, scheduleDate); + MessageObject.SendAnimationData sendAnimationData; + if (message[0].length() < 20) { + sendAnimationData = new MessageObject.SendAnimationData(); + sendAnimationData.width = sendAnimationData.height = AndroidUtilities.dp(22); + messageEditText.getLocationInWindow(location); + sendAnimationData.x = location[0] + AndroidUtilities.dp(11); + sendAnimationData.y = location[1] + AndroidUtilities.dp(8 + 11); + } else { + sendAnimationData = null; + } + SendMessagesHelper.getInstance(currentAccount).sendMessage(message[0].toString(), dialog_id, replyingMessageObject, getThreadMessage(), messageWebPage, messageWebPageSearch, entities, null, null, notify, scheduleDate, sendAnimationData); start = end + 1; } while (end != text.length()); return true; @@ -4505,7 +4679,11 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } if (attachLayout != null) { - attachLayout.setVisibility(VISIBLE); + if (attachLayout.getVisibility() != View.VISIBLE) { + attachLayout.setVisibility(VISIBLE); + attachLayout.setAlpha(0f); + attachLayout.setScaleX(0f); + } runningAnimation2 = new AnimatorSet(); ArrayList animators = new ArrayList<>(); animators.add(ObjectAnimator.ofFloat(attachLayout, View.ALPHA, 1.0f)); @@ -4786,7 +4964,14 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe if (videoSendButton != null) { iconChanges.playTogether(ObjectAnimator.ofFloat(videoSendButton, View.ALPHA, 0)); } - iconChanges.setStartDelay(150); + + if (botCommandsMenuButton != null) { + iconChanges.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 0), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 0), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 0) + ); + } AnimatorSet viewTransition = new AnimatorSet(); viewTransition.playTogether( @@ -4897,6 +5082,13 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe ObjectAnimator.ofFloat(messageEditText, View.TRANSLATION_X, 0), ObjectAnimator.ofFloat(recordCircle, "slideToCancelProgress", 1f) ); + if (botCommandsMenuButton != null) { + runningAnimationAudio.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 1), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 1), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 1) + ); + } if (audioSendButton != null) { audioSendButton.setScaleX(1f); audioSendButton.setScaleY(1f); @@ -5024,6 +5216,14 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe ); } + if (botCommandsMenuButton != null) { + iconsAnimator.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 0), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 0), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 0) + ); + } + iconsAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { @@ -5083,6 +5283,11 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe emojiButton[i].setScaleX(0f); emojiButton[i].setAlpha(0f); } + if (botCommandsMenuButton != null) { + botCommandsMenuButton.setAlpha(0f); + botCommandsMenuButton.setScaleX(0f); + botCommandsMenuButton.setScaleY(0f); + } } }); @@ -5105,6 +5310,12 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe ObjectAnimator.ofFloat(recordDot, View.SCALE_X, 0) ); + if (botCommandsMenuButton != null) { + iconsAnimator.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 1), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 1), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 1)); + } AnimatorSet recordTimer = new AnimatorSet(); recordTimer.playTogether( ObjectAnimator.ofFloat(recordTimerView, View.ALPHA, 0.0f), @@ -5251,12 +5462,16 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe ObjectAnimator.ofFloat(emojiButton[1], View.SCALE_Y, 1), ObjectAnimator.ofFloat(emojiButton[1], View.SCALE_X, 1), ObjectAnimator.ofFloat(emojiButton[1], View.ALPHA, 1), - ObjectAnimator.ofFloat(recordDot, View.SCALE_Y, 0), ObjectAnimator.ofFloat(recordDot, View.SCALE_X, 0), - ObjectAnimator.ofFloat(audioVideoButtonContainer, View.ALPHA, 1.0f) ); + if (botCommandsMenuButton != null) { + iconsAnimator.playTogether( + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_Y, 1), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.SCALE_X, 1), + ObjectAnimator.ofFloat(botCommandsMenuButton, View.ALPHA, 1)); + } if (audioSendButton != null) { audioSendButton.setScaleX(1f); audioSendButton.setScaleY(1f); @@ -5378,9 +5593,9 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } TLRPC.User user = messageObject != null && (int) dialog_id < 0 ? accountInstance.getMessagesController().getUser(messageObject.messageOwner.from_id.user_id) : null; if ((botCount != 1 || username) && user != null && user.bot && !command.contains("@")) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(String.format(Locale.US, "%s@%s", command, user.username), dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(String.format(Locale.US, "%s@%s", command, user.username), dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null); } else { - SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(command, dialog_id, replyingMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null); } } } @@ -5624,6 +5839,12 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe int color = Theme.getColor(Theme.key_chat_messagePanelVoicePressed); int defaultAlpha = Color.alpha(color); doneCheckDrawable.setColorFilter(new PorterDuffColorFilter(ColorUtils.setAlphaComponent(color, (int) (defaultAlpha * (0.58f + 0.42f * doneButtonEnabledProgress))), PorterDuff.Mode.MULTIPLY)); + if (botCommandsMenuContainer != null) { + botCommandsMenuContainer.updateColors(); + } + if (botKeyboardView != null) { + botKeyboardView.updateColors(); + } } private void updateRecordedDeleteIconColors() { @@ -5836,29 +6057,47 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } } - private void updateBotButton() { + private void updateBotButton(boolean animated) { if (botButton == null) { return; } + if (!parentFragment.openAnimationEnded) { + animated = false; + } + boolean canShowBotsMenu = hasBotCommands && dialog_id > 0; +// if (canShowBotsMenu && ) { +// TLRPC.Chat chat = accountInstance.getMessagesController().getChat(-(int) dialog_id); +// canShowBotsMenu = chat == null || !chat.megagroup; +// } + if (hasBotCommands || botReplyMarkup != null) { - if (botButton.getVisibility() != VISIBLE) { - botButton.setVisibility(VISIBLE); - } if (botReplyMarkup != null) { + if (botButton.getVisibility() != VISIBLE) { + botButton.setVisibility(VISIBLE); + } if (isPopupShowing() && currentPopupContentType == 1) { - botButtonDrawablel.setIcon(R.drawable.input_keyboard, true); + botButtonDrawable.setIcon(R.drawable.input_keyboard, true); botButton.setContentDescription(LocaleController.getString("AccDescrShowKeyboard", R.string.AccDescrShowKeyboard)); } else { - botButtonDrawablel.setIcon(R.drawable.input_bot2, true); + botButtonDrawable.setIcon(R.drawable.input_bot2, true); botButton.setContentDescription(LocaleController.getString("AccDescrBotKeyboard", R.string.AccDescrBotKeyboard)); } } else { - botButtonDrawablel.setIcon(R.drawable.input_bot1, true); - botButton.setContentDescription(LocaleController.getString("AccDescrBotCommands", R.string.AccDescrBotCommands)); + if (!canShowBotsMenu) { + botButtonDrawable.setIcon(R.drawable.input_bot1, true); + botButton.setContentDescription(LocaleController.getString("AccDescrBotCommands", R.string.AccDescrBotCommands)); + botButton.setVisibility(VISIBLE); + } else { + botButton.setVisibility(GONE); + } } } else { botButton.setVisibility(GONE); } + AndroidUtilities.updateViewVisibilityAnimated(botCommandsMenuButton, canShowBotsMenu && hasBotCommands, 0.5f, animated); + if (animated) { + beginDelayedTransition(); + } updateFieldRight(2); attachLayout.setPivotX(AndroidUtilities.dp((botButton == null || botButton.getVisibility() == GONE) && (notifyButton == null || notifyButton.getVisibility() == GONE) ? 48 : 96)); } @@ -5872,11 +6111,11 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe return false; } - public void setBotsCount(int count, boolean hasCommands) { + public void setBotsCount(int count, boolean hasCommands, boolean animated) { botCount = count; if (hasBotCommands != hasCommands) { hasBotCommands = hasCommands; - updateBotButton(); + updateBotButton(animated); } } @@ -5953,7 +6192,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } } } - updateBotButton(); + updateBotButton(true); } public boolean didPressedBotButton(final TLRPC.KeyboardButton button, final MessageObject replyMessageObject, final MessageObject messageObject) { @@ -5961,7 +6200,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe return false; } if (button instanceof TLRPC.TL_keyboardButton) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(button.text, dialog_id, replyMessageObject, getThreadMessage(), null, false, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(button.text, dialog_id, replyMessageObject, getThreadMessage(), null, false, null, null, null, true, 0, null); } else if (button instanceof TLRPC.TL_keyboardButtonUrl) { AlertsCreator.showOpenUrlAlert(parentFragment, button.url, false, true); } else if (button instanceof TLRPC.TL_keyboardButtonRequestPhone) { @@ -6103,7 +6342,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } @Override - public void onStickerSelected(View view, TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { + public void onStickerSelected(View view, TLRPC.Document sticker, String query, Object parent, MessageObject.SendAnimationData sendAnimationData, boolean notify, int scheduleDate) { if (trendingStickersAlert != null) { trendingStickersAlert.dismiss(); trendingStickersAlert = null; @@ -6122,7 +6361,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } setStickersExpanded(false, true, false); } - ChatActivityEnterView.this.onStickerSelected(sticker, query, parent, false, notify, scheduleDate); + ChatActivityEnterView.this.onStickerSelected(sticker, query, parent, sendAnimationData, false, notify, scheduleDate); if ((int) dialog_id == 0 && MessageObject.isGifDocument(sticker)) { accountInstance.getMessagesController().saveGif(parent, sticker); } @@ -6154,7 +6393,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } if (gif instanceof TLRPC.Document) { TLRPC.Document document = (TLRPC.Document) gif; - SendMessagesHelper.getInstance(currentAccount).sendSticker(document, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate); + SendMessagesHelper.getInstance(currentAccount).sendSticker(document, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, null, notify, scheduleDate); MediaDataController.getInstance(currentAccount).addRecentGif(document, (int) (System.currentTimeMillis() / 1000)); if ((int) dialog_id == 0) { accountInstance.getMessagesController().saveGif(parent, document); @@ -6386,9 +6625,9 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe } @Override - public void onStickerSelected(TLRPC.Document sticker, String query, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { + public void onStickerSelected(TLRPC.Document sticker, String query, Object parent, MessageObject.SendAnimationData sendAnimationData, boolean clearsInputField, boolean notify, int scheduleDate) { if (isInScheduleMode() && scheduleDate == 0) { - AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (n, s) -> onStickerSelected(sticker, query, parent, clearsInputField, n, s)); + AlertsCreator.createScheduleDatePickerDialog(parentActivity, parentFragment.getDialogId(), (n, s) -> onStickerSelected(sticker, query, parent, sendAnimationData, clearsInputField, n, s)); } else { if (slowModeTimer > 0 && !isInScheduleMode()) { if (delegate != null) { @@ -6402,7 +6641,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe emojiView.hideSearchKeyboard(); } setStickersExpanded(false, true, false); - SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, notify, scheduleDate); + SendMessagesHelper.getInstance(currentAccount).sendSticker(sticker, query, dialog_id, replyingMessageObject, getThreadMessage(), parent, sendAnimationData, notify, scheduleDate); if (delegate != null) { delegate.onMessageSend(null, true, scheduleDate); } @@ -6512,7 +6751,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe emojiPadding = currentHeight; sizeNotifierLayout.requestLayout(); setEmojiButtonImage(true, true); - updateBotButton(); + updateBotButton(true); onWindowSizeChanged(); if (smoothKeyboard && !keyboardVisible && currentHeight != previusHeight) { panelAnimation = new AnimatorSet(); @@ -6639,7 +6878,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe onWindowSizeChanged(); } } - updateBotButton(); + updateBotButton(true); } if (stickersTabOpen || emojiTabOpen) { @@ -6648,6 +6887,8 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe if (stickersExpanded && show != 1) { setStickersExpanded(false, false, false); } + updateFieldHint(false); + checkBotMenu(); } private void setEmojiButtonImage(boolean byOpen, boolean animated) { @@ -6850,6 +7091,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe lastSizeChangeValue1 = height; lastSizeChangeValue2 = isWidthGreater; keyboardVisible = height > 0; + checkBotMenu(); return; } if (height > AndroidUtilities.dp(50) && keyboardVisible && !AndroidUtilities.isInMultiwindow) { @@ -6929,6 +7171,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe boolean oldValue = keyboardVisible; keyboardVisible = height > 0; + checkBotMenu(); if (keyboardVisible && isPopupShowing() && stickersExpansionAnim == null) { showPopup(0, currentPopupContentType); } @@ -6963,7 +7206,7 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe @SuppressWarnings("unchecked") @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (emojiView != null) { emojiView.invalidateViews(); } @@ -7926,4 +8169,81 @@ public class ChatActivityEnterView extends FrameLayout implements NotificationCe public RecordCircle getRecordCicle() { return recordCircle; } + + int botCommandLastPosition = -1; + int botCommandLastTop; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (botCommandsMenuButton != null && botCommandsMenuButton.getTag() != null) { + botCommandsMenuButton.measure(widthMeasureSpec, heightMeasureSpec); + for (int i = 0; i < emojiButton.length; i++) { + ((MarginLayoutParams) emojiButton[i].getLayoutParams()).leftMargin = AndroidUtilities.dp(10) + botCommandsMenuButton.getMeasuredWidth(); + } + ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(57) + botCommandsMenuButton.getMeasuredWidth(); + } else { + for (int i = 0; i < emojiButton.length; i++) { + ((MarginLayoutParams) emojiButton[i].getLayoutParams()).leftMargin = AndroidUtilities.dp(3); + } + ((MarginLayoutParams) messageEditText.getLayoutParams()).leftMargin = AndroidUtilities.dp(50); + } + if (botCommandsMenuContainer != null) { + int padding; + if (botCommandsAdapter.getItemCount() > 4) { + padding = Math.max(0, sizeNotifierLayout.getMeasuredHeight() - AndroidUtilities.dp(58 + 36 * 4.3f)); + } else { + padding = Math.max(0, sizeNotifierLayout.getMeasuredHeight() - AndroidUtilities.dp(58 + 36 * Math.max(1, Math.min(4, botCommandsAdapter.getItemCount())))); + } + + if (botCommandsMenuContainer.listView.getPaddingTop() != padding) { + botCommandsMenuContainer.listView.setTopGlowOffset(padding); + if (botCommandLastPosition == -1 && botCommandsMenuContainer.getVisibility() == View.VISIBLE && botCommandsMenuContainer.listView.getLayoutManager() != null) { + LinearLayoutManager layoutManager = (LinearLayoutManager) botCommandsMenuContainer.listView.getLayoutManager(); + int p = layoutManager.findFirstVisibleItemPosition(); + if (p >= 0) { + View view = layoutManager.findViewByPosition(p); + if (view != null) { + botCommandLastPosition = p; + botCommandLastTop = view.getTop() - botCommandsMenuContainer.listView.getPaddingTop(); + } + } + } + botCommandsMenuContainer.listView.setPadding(0, padding, 0, AndroidUtilities.dp(8)); + } + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + if (botCommandLastPosition != -1) { + LinearLayoutManager layoutManager = (LinearLayoutManager) botCommandsMenuContainer.listView.getLayoutManager(); + if (layoutManager != null) { + layoutManager.scrollToPositionWithOffset(botCommandLastPosition, botCommandLastTop); + } + botCommandLastPosition = -1; + } + } + + private void beginDelayedTransition() { + animationParamsX.put(emojiButton[0], emojiButton[0].getX()); + animationParamsX.put(emojiButton[1], emojiButton[1].getX()); + animationParamsX.put(messageEditText, messageEditText.getX()); + } + + public void setBotInfo(SparseArray botInfo) { + if (botCommandsAdapter != null) { + botCommandsAdapter.setBotInfo(botInfo); + } + } + + public boolean botCommandsMenuIsShowing() { + return botCommandsMenuButton != null && botCommandsMenuButton.isOpened(); + } + + public void hideBotCommands() { + botCommandsMenuButton.setOpened(false); + botCommandsMenuContainer.dismiss(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java index 751bfc9b7..b4995ee21 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlert.java @@ -54,7 +54,6 @@ import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; @@ -104,7 +103,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } public interface ChatAttachViewDelegate { - void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate); + void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate, boolean forceDocument); View getRevealView(); void didSelectBot(TLRPC.User user); void onCameraOpened(); @@ -389,9 +388,11 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N protected boolean paused; - private Paint attachButtonPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private final Paint attachButtonPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private float bottomPannelTranslation; - private boolean forceDarkTheme; + private final boolean forceDarkTheme; + private final boolean showingFromDialog; + private class AttachButton extends FrameLayout { @@ -564,9 +565,10 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N float currentPanTranslationY; @SuppressLint("ClickableViewAccessibility") - public ChatAttachAlert(Context context, final BaseFragment parentFragment, boolean forceDarkTheme) { + public ChatAttachAlert(Context context, final BaseFragment parentFragment, boolean forceDarkTheme, boolean showingFromDialog) { super(context, false); this.forceDarkTheme = forceDarkTheme; + this.showingFromDialog = showingFromDialog; drawNavigationBar = true; inBubbleMode = parentFragment instanceof ChatActivity && parentFragment.isInBubbleMode(); openInterpolator = new OvershootInterpolator(0.7f); @@ -1141,7 +1143,11 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } }); fragment.setMaxSelectedPhotos(maxSelectedPhotos, allowOrder); - baseFragment.presentFragment(fragment); + if (showingFromDialog) { + baseFragment.showAsSheet(fragment); + } else { + baseFragment.presentFragment(fragment); + } dismiss(); }); @@ -1246,7 +1252,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } showLayout(pollLayout); } else { - delegate.didPressedButton((Integer) view.getTag(), true, true, 0); + delegate.didPressedButton((Integer) view.getTag(), true, true, 0, false); } int left = view.getLeft(); int right = view.getRight(); @@ -1749,7 +1755,7 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N } applyCaption(); buttonPressed = true; - delegate.didPressedButton(7, true, notify, scheduleDate); + delegate.didPressedButton(7, true, notify, scheduleDate, false); } private void showLayout(AttachAlertLayout layout) { @@ -1880,9 +1886,9 @@ public class ChatAttachAlert extends BottomSheet implements NotificationCenter.N layouts[4] = documentLayout = new ChatAttachAlertDocumentLayout(this, getContext(), false); documentLayout.setDelegate(new ChatAttachAlertDocumentLayout.DocumentSelectActivityDelegate() { @Override - public void didSelectFiles(ArrayList files, String caption, boolean notify, int scheduleDate) { + public void didSelectFiles(ArrayList files, String caption, ArrayList fmessages, boolean notify, int scheduleDate) { if (baseFragment instanceof ChatActivity) { - ((ChatActivity) baseFragment).didSelectFiles(files, caption, notify, scheduleDate); + ((ChatActivity) baseFragment).didSelectFiles(files, caption, fmessages, notify, scheduleDate); } else if (baseFragment instanceof PassportActivity) { ((PassportActivity) baseFragment).didSelectFiles(files, caption, notify, scheduleDate); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertContactsLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertContactsLayout.java index 8f496793d..f2d6937da 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertContactsLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertContactsLayout.java @@ -569,7 +569,7 @@ public class ChatAttachAlertContactsLayout extends ChatAttachAlert.AttachAlertLa } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { if (section == 0 || section == getSectionCount() - 1) { return false; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java index 7e3ae8c7a..5c5c4e818 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertDocumentLayout.java @@ -8,27 +8,29 @@ package org.telegram.ui.Components; +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.AnimatorSet; +import android.animation.ObjectAnimator; import android.annotation.SuppressLint; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; -import android.graphics.PorterDuff; -import android.graphics.PorterDuffColorFilter; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Environment; import android.os.StatFs; import android.text.TextUtils; -import android.util.TypedValue; +import android.util.SparseArray; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; +import android.view.ViewTreeObserver; import android.widget.EditText; -import android.widget.ImageView; -import android.widget.LinearLayout; -import android.widget.TextView; +import android.widget.FrameLayout; +import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildVars; @@ -36,20 +38,27 @@ import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; +import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.UserConfig; import org.telegram.messenger.Utilities; +import org.telegram.tgnet.TLObject; +import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.ActionBarMenu; import org.telegram.ui.ActionBar.ActionBarMenuItem; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; +import org.telegram.ui.Adapters.FiltersView; +import org.telegram.ui.Cells.GraySectionCell; import org.telegram.ui.Cells.HeaderCell; import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.SharedDocumentCell; import org.telegram.ui.ChatActivity; +import org.telegram.ui.FilteredSearchView; import org.telegram.ui.PhotoPickerActivity; import java.io.BufferedReader; @@ -59,6 +68,8 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; +import java.util.Locale; import java.util.StringTokenizer; import androidx.recyclerview.widget.LinearLayoutManager; @@ -68,7 +79,7 @@ import androidx.recyclerview.widget.RecyclerView; public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLayout { public interface DocumentSelectActivityDelegate { - void didSelectFiles(ArrayList files, String caption, boolean notify, int scheduleDate); + void didSelectFiles(ArrayList files, String caption, ArrayList fmessages, boolean notify, int scheduleDate); void didSelectPhotos(ArrayList photos, boolean notify, int scheduleDate); void startDocumentSelectActivity(); @@ -84,14 +95,16 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa private ActionBarMenuItem searchItem; private ActionBarMenuItem sortItem; + private FiltersView filtersView; + private AnimatorSet filtersViewAnimator; + private FlickerLoadingView loadingView; + private boolean sendPressed; private boolean ignoreLayout; - private LinearLayout emptyView; - private ImageView emptyImageView; - private TextView emptyTitleTextView; - private TextView emptySubtitleTextView; + private StickerEmptyView emptyView; + private float additionalTranslationY; private boolean hasFiles; @@ -102,6 +115,7 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa private DocumentSelectActivityDelegate delegate; private HashMap selectedFiles = new HashMap<>(); private ArrayList selectedFilesOrder = new ArrayList<>(); + private HashMap selectedMessages = new HashMap<>(); private boolean scrolling; private ArrayList recentItems = new ArrayList<>(); private int maxSelectedFiles = -1; @@ -194,12 +208,19 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa listView.setAdapter(listAdapter); } listAdapter.notifyDataSetChanged(); - searchAdapter.search(null); + searchAdapter.search(null, true); } @Override public void onTextChanged(EditText editText) { - searchAdapter.search(editText.getText().toString()); + searchAdapter.search(editText.getText().toString(), false); + } + + @Override + public void onSearchFilterCleared(FiltersView.MediaFilterData filterData) { + searchAdapter.removeSearchFilter(filterData); + searchAdapter.search(searchItem.getSearchField().getText().toString(), false); + searchAdapter.updateFiltersView(true, null, null,true); } }); searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); @@ -212,34 +233,25 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa sortItem = menu.addItem(sort_button, sortByName ? R.drawable.contacts_sort_time : R.drawable.contacts_sort_name); sortItem.setContentDescription(LocaleController.getString("AccDescrContactSorting", R.string.AccDescrContactSorting)); - emptyView = new LinearLayout(context); - emptyView.setOrientation(LinearLayout.VERTICAL); - emptyView.setGravity(Gravity.CENTER); - emptyView.setVisibility(View.GONE); + addView(loadingView = new FlickerLoadingView(context)); + + emptyView = new StickerEmptyView(context, loadingView, StickerEmptyView.STICKER_TYPE_SEARCH) { + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY + additionalTranslationY); + } + + @Override + public float getTranslationY() { + return super.getTranslationY() - additionalTranslationY; + } + }; addView(emptyView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + emptyView.setVisibility(View.GONE); emptyView.setOnTouchListener((v, event) -> true); - emptyImageView = new ImageView(context); - emptyImageView.setImageResource(R.drawable.files_empty); - emptyImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_dialogEmptyImage), PorterDuff.Mode.MULTIPLY)); - emptyView.addView(emptyImageView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); - - emptyTitleTextView = new TextView(context); - emptyTitleTextView.setTextColor(Theme.getColor(Theme.key_dialogEmptyText)); - emptyTitleTextView.setGravity(Gravity.CENTER); - emptyTitleTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - emptyTitleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 17); - emptyTitleTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), 0); - emptyView.addView(emptyTitleTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 11, 0, 0)); - - emptySubtitleTextView = new TextView(context); - emptySubtitleTextView.setTextColor(Theme.getColor(Theme.key_dialogEmptyText)); - emptySubtitleTextView.setGravity(Gravity.CENTER); - emptySubtitleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); - emptySubtitleTextView.setPadding(AndroidUtilities.dp(40), 0, AndroidUtilities.dp(40), 0); - emptyView.addView(emptySubtitleTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 6, 0, 0)); - listView = new RecyclerListView(context); + listView.setSectionsType(2); listView.setVerticalScrollBarEnabled(false); listView.setLayoutManager(layoutManager = new FillLastLinearLayoutManager(context, LinearLayoutManager.VERTICAL, false, AndroidUtilities.dp(56), listView) { @Override @@ -261,7 +273,6 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa startSmoothScroll(linearSmoothScroller); } }); - listView.setEmptyView(emptyView); listView.setClipToPadding(false); listView.setAdapter(listAdapter = new ListAdapter(context)); listView.setPadding(0, 0, 0, AndroidUtilities.dp(48)); @@ -273,6 +284,16 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa public void onScrolled(RecyclerView recyclerView, int dx, int dy) { parentAlert.updateLayout(ChatAttachAlertDocumentLayout.this, true, dy); updateEmptyViewPosition(); + + if (listView.getAdapter() == searchAdapter) { + int firstVisibleItem = layoutManager.findFirstVisibleItemPosition(); + int lastVisibleItem = layoutManager.findLastVisibleItemPosition(); + int visibleItemCount = Math.abs(lastVisibleItem - firstVisibleItem) + 1; + int totalItemCount = recyclerView.getAdapter().getItemCount(); + if (visibleItemCount > 0 && lastVisibleItem >= totalItemCount - 10) { + searchAdapter.loadMore(); + } + } } @Override @@ -296,100 +317,116 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa }); listView.setOnItemClickListener((view, position) -> { - ListItem item; + Object object; if (listView.getAdapter() == listAdapter) { - item = listAdapter.getItem(position); + object = listAdapter.getItem(position); } else { - item = searchAdapter.getItem(position); + object = searchAdapter.getItem(position); } - if (item == null) { - return; - } - File file = item.file; - if (file == null) { - if (item.icon == R.drawable.files_gallery) { - HashMap selectedPhotos = new HashMap<>(); - ArrayList selectedPhotosOrder = new ArrayList<>(); - ChatActivity chatActivity; - if (parentAlert.baseFragment instanceof ChatActivity) { - chatActivity = (ChatActivity) parentAlert.baseFragment; - } else { - chatActivity = null; - } - - PhotoPickerActivity fragment = new PhotoPickerActivity(0, MediaController.allMediaAlbumEntry, selectedPhotos, selectedPhotosOrder, 0, chatActivity != null, chatActivity); - fragment.setDocumentsPicker(true); - fragment.setDelegate(new PhotoPickerActivity.PhotoPickerActivityDelegate() { - @Override - public void selectedPhotosChanged() { - + if (object instanceof ListItem) { + ListItem item = (ListItem) object; + File file = item.file; + if (file == null) { + if (item.icon == R.drawable.files_gallery) { + HashMap selectedPhotos = new HashMap<>(); + ArrayList selectedPhotosOrder = new ArrayList<>(); + ChatActivity chatActivity; + if (parentAlert.baseFragment instanceof ChatActivity) { + chatActivity = (ChatActivity) parentAlert.baseFragment; + } else { + chatActivity = null; } - @Override - public void actionButtonPressed(boolean canceled, boolean notify, int scheduleDate) { - if (!canceled) { - sendSelectedPhotos(selectedPhotos, selectedPhotosOrder, notify, scheduleDate); + PhotoPickerActivity fragment = new PhotoPickerActivity(0, MediaController.allMediaAlbumEntry, selectedPhotos, selectedPhotosOrder, 0, chatActivity != null, chatActivity, false); + fragment.setDocumentsPicker(true); + fragment.setDelegate(new PhotoPickerActivity.PhotoPickerActivityDelegate() { + @Override + public void selectedPhotosChanged() { + } - } - @Override - public void onCaptionChanged(CharSequence text) { + @Override + public void actionButtonPressed(boolean canceled, boolean notify, int scheduleDate) { + if (!canceled) { + sendSelectedPhotos(selectedPhotos, selectedPhotosOrder, notify, scheduleDate); + } + } - } + @Override + public void onCaptionChanged(CharSequence text) { - @Override - public void onOpenInPressed() { - delegate.startDocumentSelectActivity(); + } + + @Override + public void onOpenInPressed() { + delegate.startDocumentSelectActivity(); + } + }); + fragment.setMaxSelectedPhotos(maxSelectedFiles, false); + parentAlert.baseFragment.presentFragment(fragment); + parentAlert.dismiss(); + } else if (item.icon == R.drawable.files_music) { + if (delegate != null) { + delegate.startMusicSelectActivity(); } - }); - fragment.setMaxSelectedPhotos(maxSelectedFiles, false); - parentAlert.baseFragment.presentFragment(fragment); - parentAlert.dismiss(); - } else if (item.icon == R.drawable.files_music) { - if (delegate != null) { - delegate.startMusicSelectActivity(); + } else if (!BuildVars.NO_SCOPED_STORAGE && item.icon == R.drawable.files_storage) { + delegate.startDocumentSelectActivity(); + } else { + int top = getTopForScroll(); + HistoryEntry he = history.remove(history.size() - 1); + parentAlert.actionBar.setTitle(he.title); + if (he.dir != null) { + listFiles(he.dir); + } else { + listRoots(); + } + updateSearchButton(); + layoutManager.scrollToPositionWithOffset(0, top); + } + } else if (file.isDirectory()) { + HistoryEntry he = new HistoryEntry(); + View child = listView.getChildAt(0); + RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); + if (holder != null) { + he.scrollItem = holder.getAdapterPosition(); + he.scrollOffset = child.getTop(); + he.dir = currentDir; + he.title = parentAlert.actionBar.getTitle(); + history.add(he); + if (!listFiles(file)) { + history.remove(he); + return; + } + parentAlert.actionBar.setTitle(item.title); } } else { - int top = getTopForScroll(); - HistoryEntry he = history.remove(history.size() - 1); - parentAlert.actionBar.setTitle(he.title); - if (he.dir != null) { - listFiles(he.dir); - } else { - listRoots(); - } - updateSearchButton(); - layoutManager.scrollToPositionWithOffset(0, top); + onItemClick(view, item); } - } else if (file.isDirectory()) { - HistoryEntry he = new HistoryEntry(); - View child = listView.getChildAt(0); - RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); - he.scrollItem = holder.getAdapterPosition(); - he.scrollOffset = child.getTop(); - he.dir = currentDir; - he.title = parentAlert.actionBar.getTitle(); - history.add(he); - if (!listFiles(file)) { - history.remove(he); - return; - } - parentAlert.actionBar.setTitle(item.title); } else { - onItemClick(view, item); + onItemClick(view, object); } }); listView.setOnItemLongClickListener((view, position) -> { - ListItem item; + Object object; if (listView.getAdapter() == listAdapter) { - item = listAdapter.getItem(position); + object = listAdapter.getItem(position); } else { - item = searchAdapter.getItem(position); + object = searchAdapter.getItem(position); } - return onItemClick(view, item); + return onItemClick(view, object); }); + filtersView = new FiltersView(context); + filtersView.setOnItemClickListener((view, position) -> { + filtersView.cancelClickRunnables(true); + searchAdapter.addSearchFilter(filtersView.getFilterAt(position)); + }); + filtersView.setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); + addView(filtersView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP)); + filtersView.setTranslationY(-AndroidUtilities.dp(44)); + filtersView.setVisibility(INVISIBLE); + listRoots(); updateSearchButton(); updateEmptyView(); @@ -481,6 +518,8 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa listView.setPadding(0, padding, 0, AndroidUtilities.dp(48)); ignoreLayout = false; } + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) filtersView.getLayoutParams(); + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); } @Override @@ -503,55 +542,80 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa @Override int getSelectedItemsCount() { - return selectedFiles.size(); + return selectedFiles.size() + selectedMessages.size(); } @Override void sendSelectedItems(boolean notify, int scheduleDate) { - if (selectedFiles.size() == 0 || delegate == null || sendPressed) { + if (selectedFiles.size() == 0 && selectedMessages.size() == 0 || delegate == null || sendPressed) { return; } sendPressed = true; + ArrayList fmessages = new ArrayList<>(); + Iterator idIterator = selectedMessages.keySet().iterator(); + while (idIterator.hasNext()) { + FilteredSearchView.MessageHashId hashId = idIterator.next(); + fmessages.add(selectedMessages.get(hashId)); + } ArrayList files = new ArrayList<>(selectedFilesOrder); - delegate.didSelectFiles(files, parentAlert.commentTextView.getText().toString(), notify, scheduleDate); + delegate.didSelectFiles(files, parentAlert.commentTextView.getText().toString(), fmessages, notify, scheduleDate); + parentAlert.dismiss(); } - private boolean onItemClick(View view, ListItem item) { - if (item == null || item.file == null || item.file.isDirectory()) { + private boolean onItemClick(View view, Object object) { + boolean add; + if (object instanceof ListItem) { + ListItem item = (ListItem) object; + if (item.file == null || item.file.isDirectory()) { + return false; + } + String path = item.file.getAbsolutePath(); + if (selectedFiles.containsKey(path)) { + selectedFiles.remove(path); + selectedFilesOrder.remove(path); + add = false; + } else { + if (!item.file.canRead()) { + showErrorBox(LocaleController.getString("AccessError", R.string.AccessError)); + return false; + } + if (canSelectOnlyImageFiles && item.thumb == null) { + showErrorBox(LocaleController.formatString("PassportUploadNotImage", R.string.PassportUploadNotImage)); + return false; + } + if (item.file.length() > FileLoader.MAX_FILE_SIZE) { + showErrorBox(LocaleController.formatString("FileUploadLimit", R.string.FileUploadLimit, AndroidUtilities.formatFileSize(FileLoader.MAX_FILE_SIZE))); + return false; + } + if (maxSelectedFiles >= 0 && selectedFiles.size() >= maxSelectedFiles) { + showErrorBox(LocaleController.formatString("PassportUploadMaxReached", R.string.PassportUploadMaxReached, LocaleController.formatPluralString("Files", maxSelectedFiles))); + return false; + } + if (item.file.length() == 0) { + return false; + } + selectedFiles.put(path, item); + selectedFilesOrder.add(path); + add = true; + } + scrolling = false; + } else if (object instanceof MessageObject) { + MessageObject message = (MessageObject) object; + FilteredSearchView.MessageHashId hashId = new FilteredSearchView.MessageHashId(message.getId(), message.getDialogId()); + if (selectedMessages.containsKey(hashId)) { + selectedMessages.remove(hashId); + add = false; + } else { + if (selectedMessages.size() >= 100) { + return false; + } + selectedMessages.put(hashId, message); + add = true; + } + } else { return false; } - String path = item.file.getAbsolutePath(); - boolean add; - if (selectedFiles.containsKey(path)) { - selectedFiles.remove(path); - selectedFilesOrder.remove(path); - add = false; - } else { - if (!item.file.canRead()) { - showErrorBox(LocaleController.getString("AccessError", R.string.AccessError)); - return false; - } - if (canSelectOnlyImageFiles && item.thumb == null) { - showErrorBox(LocaleController.formatString("PassportUploadNotImage", R.string.PassportUploadNotImage)); - return false; - } - if (item.file.length() > FileLoader.MAX_FILE_SIZE) { - showErrorBox(LocaleController.formatString("FileUploadLimit", R.string.FileUploadLimit, AndroidUtilities.formatFileSize(FileLoader.MAX_FILE_SIZE))); - return false; - } - if (maxSelectedFiles >= 0 && selectedFiles.size() >= maxSelectedFiles) { - showErrorBox(LocaleController.formatString("PassportUploadMaxReached", R.string.PassportUploadMaxReached, LocaleController.formatPluralString("Files", maxSelectedFiles))); - return false; - } - if (item.file.length() == 0) { - return false; - } - selectedFiles.put(path, item); - selectedFilesOrder.add(path); - add = true; - } - scrolling = false; if (view instanceof SharedDocumentCell) { ((SharedDocumentCell) view).setChecked(add, true); } @@ -599,23 +663,25 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa public void loadRecentFiles() { try { File[] files = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).listFiles(); - for (int a = 0; a < files.length; a++) { - File file = files[a]; - if (file.isDirectory()) { - continue; + if (files != null) { + for (int a = 0; a < files.length; a++) { + File file = files[a]; + if (file.isDirectory()) { + continue; + } + ListItem item = new ListItem(); + item.title = file.getName(); + item.file = file; + String fname = file.getName(); + String[] sp = fname.split("\\."); + item.ext = sp.length > 1 ? sp[sp.length - 1] : "?"; + item.subtitle = AndroidUtilities.formatFileSize(file.length()); + fname = fname.toLowerCase(); + if (fname.endsWith(".jpg") || fname.endsWith(".png") || fname.endsWith(".gif") || fname.endsWith(".jpeg")) { + item.thumb = file.getAbsolutePath(); + } + recentItems.add(item); } - ListItem item = new ListItem(); - item.title = file.getName(); - item.file = file; - String fname = file.getName(); - String[] sp = fname.split("\\."); - item.ext = sp.length > 1 ? sp[sp.length - 1] : "?"; - item.subtitle = AndroidUtilities.formatFileSize(file.length()); - fname = fname.toLowerCase(); - if (fname.endsWith(".jpg") || fname.endsWith(".png") || fname.endsWith(".gif") || fname.endsWith(".jpeg")) { - item.thumb = file.getAbsolutePath(); - } - recentItems.add(item); } sortRecentItems(); } catch (Exception e) { @@ -652,14 +718,12 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa return -1; } else if (rhs.file == null) { return 1; - } else if (lhs.file == null && rhs.file == null) { - return 0; } boolean isDir1 = lhs.file.isDirectory(); boolean isDir2 = rhs.file.isDirectory(); if (isDir1 != isDir2) { return isDir1 ? -1 : 1; - } else if (isDir1 && isDir2 || sortByName) { + } else if (isDir1 || sortByName) { return lhs.file.getName().compareToIgnoreCase(rhs.file.getName()); } else { long lm = lhs.file.lastModified(); @@ -689,6 +753,8 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa @Override void onShow() { selectedFiles.clear(); + selectedMessages.clear(); + searchAdapter.currentSearchFilters.clear(); selectedFilesOrder.clear(); history.clear(); listRoots(); @@ -713,19 +779,15 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa if (child == null) { return; } - emptyView.setTranslationY((emptyView.getMeasuredHeight() - getMeasuredHeight() + child.getTop()) / 2); + float oldTranslation = emptyView.getTranslationY(); + additionalTranslationY = (emptyView.getMeasuredHeight() - getMeasuredHeight() + child.getTop()) / 2; + emptyView.setTranslationY(oldTranslation); } private void updateEmptyView() { - if (searching) { - emptyTitleTextView.setText(LocaleController.getString("NoFilesFound", R.string.NoFilesFound)); - } else { - emptyTitleTextView.setText(LocaleController.getString("NoFilesFound", R.string.NoFilesFound)); - emptySubtitleTextView.setText(LocaleController.getString("NoFilesInfo", R.string.NoFilesInfo)); - } boolean visible; if (listView.getAdapter() == searchAdapter) { - visible = searchAdapter.searchResult.isEmpty(); + visible = searchAdapter.searchResult.isEmpty() && searchAdapter.sections.isEmpty(); } else { visible = listAdapter.getItemCount() == 1; } @@ -738,12 +800,7 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa return; } if (!searchItem.isSearchFieldVisible()) { - searchItem.setVisibility(hasFiles ? View.VISIBLE : View.GONE); - } - if (history.isEmpty()) { - searchItem.setSearchFieldHint(LocaleController.getString("SearchRecentFiles", R.string.SearchRecentFiles)); - } else { - searchItem.setSearchFieldHint(LocaleController.getString("Search", R.string.Search)); + searchItem.setVisibility(hasFiles || history.isEmpty() ? View.VISIBLE : View.GONE); } } @@ -885,87 +942,88 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa items.clear(); HashSet paths = new HashSet<>(); - String defaultPath = Environment.getExternalStorageDirectory().getPath(); - boolean isDefaultPathRemovable = Environment.isExternalStorageRemovable(); - String defaultPathState = Environment.getExternalStorageState(); - if (defaultPathState.equals(Environment.MEDIA_MOUNTED) || defaultPathState.equals(Environment.MEDIA_MOUNTED_READ_ONLY)) { + if (!BuildVars.NO_SCOPED_STORAGE) { ListItem ext = new ListItem(); - if (Environment.isExternalStorageRemovable()) { - ext.title = LocaleController.getString("SdCard", R.string.SdCard); - ext.icon = R.drawable.files_internal; - ext.subtitle = LocaleController.getString("ExternalFolderInfo", R.string.ExternalFolderInfo); - } else { - ext.title = LocaleController.getString("InternalStorage", R.string.InternalStorage); - ext.icon = R.drawable.files_storage; - ext.subtitle = LocaleController.getString("InternalFolderInfo", R.string.InternalFolderInfo); - } - ext.file = Environment.getExternalStorageDirectory(); + ext.title = LocaleController.getString("InternalStorage", R.string.InternalStorage); + ext.icon = R.drawable.files_storage; + ext.subtitle = LocaleController.getString("InternalFolderInfo", R.string.InternalFolderInfo); items.add(ext); - paths.add(defaultPath); - } + } else { + String defaultPath = Environment.getExternalStorageDirectory().getPath(); + String defaultPathState = Environment.getExternalStorageState(); + if (defaultPathState.equals(Environment.MEDIA_MOUNTED) || defaultPathState.equals(Environment.MEDIA_MOUNTED_READ_ONLY)) { + ListItem ext = new ListItem(); + if (Environment.isExternalStorageRemovable()) { + ext.title = LocaleController.getString("SdCard", R.string.SdCard); + ext.icon = R.drawable.files_internal; + ext.subtitle = LocaleController.getString("ExternalFolderInfo", R.string.ExternalFolderInfo); + } else { + ext.title = LocaleController.getString("InternalStorage", R.string.InternalStorage); + ext.icon = R.drawable.files_storage; + ext.subtitle = LocaleController.getString("InternalFolderInfo", R.string.InternalFolderInfo); + } + ext.file = Environment.getExternalStorageDirectory(); + items.add(ext); + paths.add(defaultPath); + } - BufferedReader bufferedReader = null; - try { - bufferedReader = new BufferedReader(new FileReader("/proc/mounts")); - String line; - while ((line = bufferedReader.readLine()) != null) { - if (line.contains("vfat") || line.contains("/mnt")) { - if (BuildVars.LOGS_ENABLED) { - FileLog.d(line); - } - StringTokenizer tokens = new StringTokenizer(line, " "); - String unused = tokens.nextToken(); - String path = tokens.nextToken(); - if (paths.contains(path)) { - continue; - } - if (line.contains("/dev/block/vold")) { - if (!line.contains("/mnt/secure") && !line.contains("/mnt/asec") && !line.contains("/mnt/obb") && !line.contains("/dev/mapper") && !line.contains("tmpfs")) { - if (!new File(path).isDirectory()) { - int index = path.lastIndexOf('/'); - if (index != -1) { - String newPath = "/storage/" + path.substring(index + 1); - if (new File(newPath).isDirectory()) { - path = newPath; + BufferedReader bufferedReader = null; + try { + bufferedReader = new BufferedReader(new FileReader("/proc/mounts")); + String line; + while ((line = bufferedReader.readLine()) != null) { + if (line.contains("vfat") || line.contains("/mnt")) { + if (BuildVars.LOGS_ENABLED) { + FileLog.d(line); + } + StringTokenizer tokens = new StringTokenizer(line, " "); + String unused = tokens.nextToken(); + String path = tokens.nextToken(); + if (paths.contains(path)) { + continue; + } + if (line.contains("/dev/block/vold")) { + if (!line.contains("/mnt/secure") && !line.contains("/mnt/asec") && !line.contains("/mnt/obb") && !line.contains("/dev/mapper") && !line.contains("tmpfs")) { + if (!new File(path).isDirectory()) { + int index = path.lastIndexOf('/'); + if (index != -1) { + String newPath = "/storage/" + path.substring(index + 1); + if (new File(newPath).isDirectory()) { + path = newPath; + } } } - } - paths.add(path); - try { - ListItem item = new ListItem(); - if (path.toLowerCase().contains("sd")) { - item.title = LocaleController.getString("SdCard", R.string.SdCard); - } else { - item.title = LocaleController.getString("ExternalStorage", R.string.ExternalStorage); + paths.add(path); + try { + ListItem item = new ListItem(); + if (path.toLowerCase().contains("sd")) { + item.title = LocaleController.getString("SdCard", R.string.SdCard); + } else { + item.title = LocaleController.getString("ExternalStorage", R.string.ExternalStorage); + } + item.subtitle = LocaleController.getString("ExternalFolderInfo", R.string.ExternalFolderInfo); + item.icon = R.drawable.files_internal; + item.file = new File(path); + items.add(item); + } catch (Exception e) { + FileLog.e(e); } - item.subtitle = LocaleController.getString("ExternalFolderInfo", R.string.ExternalFolderInfo); - item.icon = R.drawable.files_internal; - item.file = new File(path); - items.add(item); - } catch (Exception e) { - FileLog.e(e); } } } } - } - } catch (Exception e) { - FileLog.e(e); - } finally { - if (bufferedReader != null) { - try { - bufferedReader.close(); - } catch (Exception e) { - FileLog.e(e); + } catch (Exception e) { + FileLog.e(e); + } finally { + if (bufferedReader != null) { + try { + bufferedReader.close(); + } catch (Exception e) { + FileLog.e(e); + } } } } - /*ListItem fs = new ListItem(); - fs.title = "/"; - fs.subtitle = LocaleController.getString("SystemRoot", R.string.SystemRoot); - fs.icon = R.drawable.files_folder; - fs.file = new File("/"); - items.add(fs);*/ ListItem fs; try { @@ -1132,22 +1190,64 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa } } - public class SearchAdapter extends RecyclerListView.SelectionAdapter { + public class SearchAdapter extends RecyclerListView.SectionsAdapter { private Context mContext; + private ArrayList searchResult = new ArrayList<>(); private Runnable searchRunnable; - private int reqId = 0; - private int lastReqId; + private Runnable localSearchRunnable; + + private long currentSearchDialogId; + private FiltersView.MediaFilterData currentSearchFilter; + private long currentSearchMinDate; + private long currentSearchMaxDate; + + private int searchIndex; + private int nextSearchRate; + + private final FilteredSearchView.MessageHashId messageHashIdTmp = new FilteredSearchView.MessageHashId(0, 0); + + private String lastSearchFilterQueryString; + private String lastMessagesSearchString; + private String currentDataQuery; + + private ArrayList localTipChats = new ArrayList<>(); + private ArrayList localTipDates = new ArrayList<>(); + + public ArrayList messages = new ArrayList<>(); + public SparseArray messagesById = new SparseArray<>(); + public ArrayList sections = new ArrayList<>(); + public HashMap> sectionArrays = new HashMap<>(); + + private ArrayList currentSearchFilters = new ArrayList<>(); + + private boolean isLoading; + private int requestIndex; + private boolean firstLoading = true; + private int animationIndex = -1; + private boolean endReached; + + private Runnable clearCurrentResultsRunnable = new Runnable() { + @Override + public void run() { + if (isLoading) { + messages.clear(); + sections.clear(); + sectionArrays.clear(); + notifyDataSetChanged(); + } + } + }; public SearchAdapter(Context context) { mContext = context; } - public void search(final String query) { - if (searchRunnable != null) { - AndroidUtilities.cancelRunOnUIThread(searchRunnable); - searchRunnable = null; + public void search(final String query, boolean reset) { + if (localSearchRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(localSearchRunnable); + localSearchRunnable = null; } if (TextUtils.isEmpty(query)) { if (!searchResult.isEmpty()) { @@ -1158,11 +1258,12 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa } notifyDataSetChanged(); } else { - AndroidUtilities.runOnUIThread(searchRunnable = () -> { + AndroidUtilities.runOnUIThread(localSearchRunnable = () -> { final ArrayList copy = new ArrayList<>(items); if (history.isEmpty()) { copy.addAll(0, recentItems); } + boolean hasFilters = !currentSearchFilters.isEmpty(); Utilities.searchQueue.postRunnable(() -> { String search1 = query.trim().toLowerCase(); if (search1.length() == 0) { @@ -1181,21 +1282,23 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa ArrayList resultArray = new ArrayList<>(); - for (int a = 0; a < copy.size(); a++) { - ListItem entry = copy.get(a); - if (entry.file == null || entry.file.isDirectory()) { - continue; - } - for (int b = 0; b < search.length; b++) { - String q = search[b]; - - boolean ok = false; - if (entry.title != null) { - ok = entry.title.toLowerCase().contains(q); + if (!hasFilters) { + for (int a = 0; a < copy.size(); a++) { + ListItem entry = copy.get(a); + if (entry.file == null || entry.file.isDirectory()) { + continue; } - if (ok) { - resultArray.add(entry); - break; + for (int b = 0; b < search.length; b++) { + String q = search[b]; + + boolean ok = false; + if (entry.title != null) { + ok = entry.title.toLowerCase().contains(q); + } + if (ok) { + resultArray.add(entry); + break; + } } } } @@ -1204,6 +1307,408 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa }); }, 300); } + + if (!canSelectOnlyImageFiles && history.isEmpty()) { + int dialogId = 0; + long minDate = 0; + long maxDate = 0; + for (int i = 0; i < currentSearchFilters.size(); i++) { + FiltersView.MediaFilterData data = currentSearchFilters.get(i); + if (data.filterType == FiltersView.FILTER_TYPE_CHAT) { + if (data.chat instanceof TLRPC.User) { + dialogId = ((TLRPC.User) data.chat).id; + } else if (data.chat instanceof TLRPC.Chat) { + dialogId = -((TLRPC.Chat) data.chat).id; + } + } else if (data.filterType == FiltersView.FILTER_TYPE_DATE) { + minDate = data.dateData.minDate; + maxDate = data.dateData.maxDate; + } + } + + searchGlobal(dialogId, minDate, maxDate, FiltersView.filters[2], query, reset); + } + } + + public void loadMore() { + if (searchAdapter.isLoading || searchAdapter.endReached || currentSearchFilter == null) { + return; + } + searchGlobal(currentSearchDialogId, currentSearchMinDate, currentSearchMaxDate, currentSearchFilter, lastMessagesSearchString, false); + } + + public void removeSearchFilter(FiltersView.MediaFilterData filterData) { + currentSearchFilters.remove(filterData); + } + + public void clear() { + currentSearchFilters.clear(); + } + + private void addSearchFilter(FiltersView.MediaFilterData filter) { + if (!currentSearchFilters.isEmpty()) { + for (int i = 0; i < currentSearchFilters.size(); i++) { + if (filter.isSameType(currentSearchFilters.get(i))) { + return; + } + } + } + currentSearchFilters.add(filter); + parentAlert.actionBar.setSearchFilter(filter); + parentAlert.actionBar.setSearchFieldText(""); + updateFiltersView(true, null, null, true); + } + + private void updateFiltersView(boolean showMediaFilters, ArrayList users, ArrayList dates, boolean animated) { + boolean hasMediaFilter = false; + boolean hasUserFilter = false; + boolean hasDataFilter = false; + + for (int i = 0; i < currentSearchFilters.size(); i++) { + if (currentSearchFilters.get(i).isMedia()) { + hasMediaFilter = true; + } else if (currentSearchFilters.get(i).filterType == FiltersView.FILTER_TYPE_CHAT) { + hasUserFilter = true; + } else if (currentSearchFilters.get(i).filterType == FiltersView.FILTER_TYPE_DATE) { + hasDataFilter = true; + } + } + + boolean visible = false; + boolean hasUsersOrDates = (users != null && !users.isEmpty()) || (dates != null && !dates.isEmpty()); + if (!hasMediaFilter && !hasUsersOrDates && showMediaFilters) { + } else if (hasUsersOrDates) { + ArrayList finalUsers = (users != null && !users.isEmpty() && !hasUserFilter) ? users : null; + ArrayList finalDates = (dates != null && !dates.isEmpty() && !hasDataFilter) ? dates : null; + if (finalUsers != null || finalDates != null) { + visible = true; + filtersView.setUsersAndDates(finalUsers, finalDates, false); + } + } + if (!visible) { + filtersView.setUsersAndDates(null, null, false); + } + filtersView.setEnabled(visible); + if (visible && filtersView.getTag() != null || !visible && filtersView.getTag() == null) { + return; + } + filtersView.setTag(visible ? 1 : null); + if (filtersViewAnimator != null) { + filtersViewAnimator.cancel(); + } + if (animated) { + if (visible) { + filtersView.setVisibility(VISIBLE); + } + filtersViewAnimator = new AnimatorSet(); + filtersViewAnimator.playTogether( + ObjectAnimator.ofFloat(listView, View.TRANSLATION_Y, visible ? AndroidUtilities.dp(44) : 0), + ObjectAnimator.ofFloat(filtersView, View.TRANSLATION_Y, visible ? 0 : -AndroidUtilities.dp(44)), + ObjectAnimator.ofFloat(loadingView, View.TRANSLATION_Y, visible ? AndroidUtilities.dp(44) : 0), + ObjectAnimator.ofFloat(emptyView, View.TRANSLATION_Y, visible ? AndroidUtilities.dp(44) : 0)); + filtersViewAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (filtersView.getTag() == null) { + filtersView.setVisibility(INVISIBLE); + } + filtersViewAnimator = null; + } + }); + filtersViewAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); + filtersViewAnimator.setDuration(180); + filtersViewAnimator.start(); + } else { + filtersView.getAdapter().notifyDataSetChanged(); + listView.setTranslationY(visible ? AndroidUtilities.dp(44) : 0); + filtersView.setTranslationY(visible ? 0 : -AndroidUtilities.dp(44)); + loadingView.setTranslationY(visible ? AndroidUtilities.dp(44) : 0); + emptyView.setTranslationY(visible ? AndroidUtilities.dp(44) : 0); + filtersView.setVisibility(visible ? VISIBLE : INVISIBLE); + } + } + + private void searchGlobal(long dialogId, long minDate, long maxDate, FiltersView.MediaFilterData searchFilter, String query, boolean clearOldResults) { + String currentSearchFilterQueryString = String.format(Locale.ENGLISH, "%d%d%d%d%s", dialogId, minDate, maxDate, searchFilter.filterType, query); + boolean filterAndQueryIsSame = lastSearchFilterQueryString != null && lastSearchFilterQueryString.equals(currentSearchFilterQueryString); + boolean forceClear = !filterAndQueryIsSame && clearOldResults; + boolean filterIsSame = dialogId == currentSearchDialogId && currentSearchMinDate == minDate && currentSearchMaxDate == maxDate; + currentSearchFilter = searchFilter; + this.currentSearchDialogId = dialogId; + this.currentSearchMinDate = minDate; + this.currentSearchMaxDate = maxDate; + if (searchRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(searchRunnable); + } + AndroidUtilities.cancelRunOnUIThread(clearCurrentResultsRunnable); + if (filterAndQueryIsSame && clearOldResults) { + return; + } + if (forceClear) { + messages.clear(); + sections.clear(); + sectionArrays.clear(); + isLoading = true; + emptyView.setVisibility(View.VISIBLE); + notifyDataSetChanged(); + requestIndex++; + firstLoading = true; + if (listView.getPinnedHeader() != null) { + listView.getPinnedHeader().setAlpha(0); + } + localTipChats.clear(); + localTipDates.clear(); + } + isLoading = true; + notifyDataSetChanged(); + + if (!filterAndQueryIsSame) { + clearCurrentResultsRunnable.run(); + emptyView.showProgress(true, !clearOldResults); + } + + if (TextUtils.isEmpty(query)) { + localTipDates.clear(); + localTipChats.clear(); + updateFiltersView(false, null, null, true); + } + requestIndex++; + final int requestId = requestIndex; + + AccountInstance accountInstance = AccountInstance.getInstance(UserConfig.selectedAccount); + + AndroidUtilities.runOnUIThread(searchRunnable = () -> { + TLObject request; + + ArrayList resultArray = null; + if (dialogId != 0) { + final TLRPC.TL_messages_search req = new TLRPC.TL_messages_search(); + req.q = query; + req.limit = 20; + req.filter = currentSearchFilter.filter; + req.peer = accountInstance.getMessagesController().getInputPeer((int) dialogId); + if (minDate > 0) { + req.min_date = (int) (minDate / 1000); + } + if (maxDate > 0) { + req.max_date = (int) (maxDate / 1000); + } + if (filterAndQueryIsSame && query.equals(lastMessagesSearchString) && !messages.isEmpty()) { + MessageObject lastMessage = messages.get(messages.size() - 1); + req.offset_id = lastMessage.getId(); + } else { + req.offset_id = 0; + } + request = req; + } else { + if (!TextUtils.isEmpty(query)) { + resultArray = new ArrayList<>(); + ArrayList resultArrayNames = new ArrayList<>(); + ArrayList encUsers = new ArrayList<>(); + accountInstance.getMessagesStorage().localSearch(0, query, resultArray, resultArrayNames, encUsers, -1); + } + + final TLRPC.TL_messages_searchGlobal req = new TLRPC.TL_messages_searchGlobal(); + req.limit = 20; + req.q = query; + req.filter = currentSearchFilter.filter; + if (minDate > 0) { + req.min_date = (int) (minDate / 1000); + } + if (maxDate > 0) { + req.max_date = (int) (maxDate / 1000); + } + if (filterAndQueryIsSame && query.equals(lastMessagesSearchString) && !messages.isEmpty()) { + MessageObject lastMessage = messages.get(messages.size() - 1); + req.offset_id = lastMessage.getId(); + req.offset_rate = nextSearchRate; + int id; + if (lastMessage.messageOwner.peer_id.channel_id != 0) { + id = -lastMessage.messageOwner.peer_id.channel_id; + } else if (lastMessage.messageOwner.peer_id.chat_id != 0) { + id = -lastMessage.messageOwner.peer_id.chat_id; + } else { + id = lastMessage.messageOwner.peer_id.user_id; + } + req.offset_peer = accountInstance.getMessagesController().getInputPeer(id); + } else { + req.offset_rate = 0; + req.offset_id = 0; + req.offset_peer = new TLRPC.TL_inputPeerEmpty(); + } + request = req; + } + + lastMessagesSearchString = query; + lastSearchFilterQueryString = currentSearchFilterQueryString; + + ArrayList finalResultArray = resultArray; + final ArrayList dateData = new ArrayList<>(); + FiltersView.fillTipDates(lastMessagesSearchString, dateData); + accountInstance.getConnectionsManager().sendRequest(request, (response, error) -> { + ArrayList messageObjects = new ArrayList<>(); + if (error == null) { + TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; + int n = res.messages.size(); + for (int i = 0; i < n; i++) { + MessageObject messageObject = new MessageObject(accountInstance.getCurrentAccount(), res.messages.get(i), false, true); + messageObject.setQuery(query); + messageObjects.add(messageObject); + } + } + + AndroidUtilities.runOnUIThread(() -> { + if (requestId != requestIndex) { + return; + } + isLoading = false; + if (error != null) { + emptyView.title.setText(LocaleController.getString("SearchEmptyViewTitle2", R.string.SearchEmptyViewTitle2)); + emptyView.subtitle.setVisibility(View.VISIBLE); + emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); + emptyView.showProgress(false, true); + return; + } + + emptyView.showProgress(false); + + TLRPC.messages_Messages res = (TLRPC.messages_Messages) response; + nextSearchRate = res.next_rate; + accountInstance.getMessagesStorage().putUsersAndChats(res.users, res.chats, true, true); + accountInstance.getMessagesController().putUsers(res.users, false); + accountInstance.getMessagesController().putChats(res.chats, false); + if (!filterAndQueryIsSame) { + messages.clear(); + messagesById.clear(); + sections.clear(); + sectionArrays.clear(); + } + int totalCount = res.count; + currentDataQuery = query; + int n = messageObjects.size(); + for (int i = 0; i < n; i++) { + MessageObject messageObject = messageObjects.get(i); + ArrayList messageObjectsByDate = sectionArrays.get(messageObject.monthKey); + if (messageObjectsByDate == null) { + messageObjectsByDate = new ArrayList<>(); + sectionArrays.put(messageObject.monthKey, messageObjectsByDate); + sections.add(messageObject.monthKey); + } + messageObjectsByDate.add(messageObject); + messages.add(messageObject); + messagesById.put(messageObject.getId(), messageObject); + } + if (messages.size() > totalCount) { + totalCount = messages.size(); + } + endReached = messages.size() >= totalCount; + + if (messages.isEmpty()) { + if (TextUtils.isEmpty(currentDataQuery) && dialogId == 0 && minDate == 0) { + emptyView.title.setText(LocaleController.getString("SearchEmptyViewTitle", R.string.SearchEmptyViewTitle)); + emptyView.subtitle.setVisibility(View.VISIBLE); + emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitleFiles", R.string.SearchEmptyViewFilteredSubtitleFiles)); + } else { + emptyView.title.setText(LocaleController.getString("SearchEmptyViewTitle2", R.string.SearchEmptyViewTitle2)); + emptyView.subtitle.setVisibility(View.VISIBLE); + emptyView.subtitle.setText(LocaleController.getString("SearchEmptyViewFilteredSubtitle2", R.string.SearchEmptyViewFilteredSubtitle2)); + } + } + + if (!filterAndQueryIsSame) { + localTipChats.clear(); + if (finalResultArray != null) { + localTipChats.addAll(finalResultArray); + } + if (query.length() >= 3 && (LocaleController.getString("SavedMessages", R.string.SavedMessages).toLowerCase().startsWith(query) || "saved messages".startsWith(query))) { + boolean found = false; + for (int i = 0; i < localTipChats.size(); i++) { + if (localTipChats.get(i) instanceof TLRPC.User) + if (UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser().id == ((TLRPC.User) localTipChats.get(i)).id) { + found = true; + break; + } + } + if (!found) { + localTipChats.add(0, UserConfig.getInstance(UserConfig.selectedAccount).getCurrentUser()); + } + } + localTipDates.clear(); + localTipDates.addAll(dateData); + updateFiltersView(TextUtils.isEmpty(currentDataQuery), localTipChats, localTipDates, true); + } + firstLoading = false; + View progressView = null; + int progressViewPosition = -1; + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (child instanceof FlickerLoadingView) { + progressView = child; + progressViewPosition = listView.getChildAdapterPosition(child); + } + } + final View finalProgressView = progressView; + if (progressView != null) { + listView.removeView(progressView); + } + if (loadingView.getVisibility() == View.VISIBLE && listView.getChildCount() <= 1 || progressView != null) { + int finalProgressViewPosition = progressViewPosition; + getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + getViewTreeObserver().removeOnPreDrawListener(this); + int n = listView.getChildCount(); + AnimatorSet animatorSet = new AnimatorSet(); + for (int i = 0; i < n; i++) { + View child = listView.getChildAt(i); + if (finalProgressView != null) { + if (listView.getChildAdapterPosition(child) < finalProgressViewPosition) { + continue; + } + } + child.setAlpha(0); + int s = Math.min(listView.getMeasuredHeight(), Math.max(0, child.getTop())); + int delay = (int) ((s / (float) listView.getMeasuredHeight()) * 100); + ObjectAnimator a = ObjectAnimator.ofFloat(child, View.ALPHA, 0, 1f); + a.setStartDelay(delay); + a.setDuration(200); + animatorSet.playTogether(a); + } + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + accountInstance.getNotificationCenter().onAnimationFinish(animationIndex); + } + }); + animationIndex = accountInstance.getNotificationCenter().setAnimationInProgress(animationIndex, null); + animatorSet.start(); + + if (finalProgressView != null && finalProgressView.getParent() == null) { + listView.addView(finalProgressView); + RecyclerView.LayoutManager layoutManager = listView.getLayoutManager(); + if (layoutManager != null) { + layoutManager.ignoreView(finalProgressView); + Animator animator = ObjectAnimator.ofFloat(finalProgressView, ALPHA, finalProgressView.getAlpha(), 0); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + finalProgressView.setAlpha(1f); + layoutManager.stopIgnoringView(finalProgressView); + listView.removeView(finalProgressView); + } + }); + animator.start(); + } + } + return true; + } + }); + } + notifyDataSetChanged(); + }); + }); + }, (filterAndQueryIsSame && !messages.isEmpty()) ? 0 : 350); + loadingView.setViewType(FlickerLoadingView.FILES_TYPE); } private void updateSearchResults(final ArrayList result, String query) { @@ -1212,7 +1717,6 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa if (listView.getAdapter() != searchAdapter) { listView.setAdapter(searchAdapter); } - emptySubtitleTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("NoFilesFoundInfo", R.string.NoFilesFoundInfo, query))); } searchResult = result; notifyDataSetChanged(); @@ -1220,63 +1724,202 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa } @Override - public boolean isEnabled(RecyclerView.ViewHolder holder) { - return holder.getItemViewType() == 0; + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { + int type = holder.getItemViewType(); + return type == 1 || type == 4; } @Override - public int getItemCount() { - return searchResult.size() + 1; + public int getSectionCount() { + int count = 2; + if (!sections.isEmpty()) { + count += sections.size() + (endReached ? 0 : 1); + } + return count; } - public ListItem getItem(int position) { - if (position < searchResult.size()) { - return searchResult.get(position); + @Override + public Object getItem(int section, int position) { + if (section == 0) { + if (position < searchResult.size()) { + return searchResult.get(position); + } + } else { + section--; + if (section < sections.size()) { + ArrayList arrayList = sectionArrays.get(sections.get(section)); + if (arrayList != null) { + return arrayList.get(position - (section == 0 && searchResult.isEmpty() ? 0 : 1)); + } + } } return null; } + @Override + public int getCountForSection(int section) { + if (section == 0) { + return searchResult.size(); + } + section--; + if (section < sections.size()) { + ArrayList arrayList = sectionArrays.get(sections.get(section)); + if (arrayList != null) { + return arrayList.size() + (section == 0 && searchResult.isEmpty() ? 0 : 1); + } else { + return 0; + } + } + return 1; + } + + @Override + public View getSectionHeaderView(int section, View view) { + GraySectionCell sectionCell = (GraySectionCell) view; + if (sectionCell == null) { + sectionCell = new GraySectionCell(mContext); + sectionCell.setBackgroundColor(Theme.getColor(Theme.key_graySection) & 0xf2ffffff); + } + if (section == 0 || section == 1 && searchResult.isEmpty()) { + sectionCell.setAlpha(0f); + return sectionCell; + } + section--; + if (section < sections.size()) { + sectionCell.setAlpha(1.0f); + String name = sections.get(section); + ArrayList messageObjects = sectionArrays.get(name); + if (messageObjects != null) { + MessageObject messageObject = messageObjects.get(0); + String str; + if (section == 0 && !searchResult.isEmpty()) { + str = LocaleController.getString("GlobalSearch", R.string.GlobalSearch); + } else { + str = LocaleController.formatSectionDate(messageObject.messageOwner.date); + } + sectionCell.setText(str); + } + } + return view; + } + @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view; switch (viewType) { case 0: - view = new SharedDocumentCell(mContext, SharedDocumentCell.VIEW_TYPE_PICKER); + view = new GraySectionCell(mContext); break; case 1: + case 4: + SharedDocumentCell documentCell = new SharedDocumentCell(mContext, viewType == 1 ? SharedDocumentCell.VIEW_TYPE_PICKER : SharedDocumentCell.VIEW_TYPE_GLOBAL_SEARCH); + documentCell.setDrawDownloadIcon(false); + view = documentCell; + break; + case 2: + FlickerLoadingView flickerLoadingView = new FlickerLoadingView(mContext); + flickerLoadingView.setViewType(FlickerLoadingView.FILES_TYPE); + flickerLoadingView.setIsSingleCell(true); + view = flickerLoadingView; + break; + case 3: default: view = new View(mContext); break; } + view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); return new RecyclerListView.Holder(view); } @Override - public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { - if (holder.getItemViewType() == 0) { - ListItem item = getItem(position); - SharedDocumentCell documentCell = (SharedDocumentCell) holder.itemView; - - if (item.icon != 0) { - documentCell.setTextAndValueAndTypeAndThumb(item.title, item.subtitle, null, null, item.icon, false); - } else { - String type = item.ext.toUpperCase().substring(0, Math.min(item.ext.length(), 4)); - documentCell.setTextAndValueAndTypeAndThumb(item.title, item.subtitle, type, item.thumb, 0, false); + public void onBindViewHolder(int section, int position, RecyclerView.ViewHolder holder) { + int viewType = holder.getItemViewType(); + if (viewType == 2 || viewType == 3) { + return; + } + switch (viewType) { + case 0: { + section--; + String name = sections.get(section); + ArrayList messageObjects = sectionArrays.get(name); + if (messageObjects == null) { + return; + } + MessageObject messageObject = messageObjects.get(0); + String str; + if (section == 0 && !searchResult.isEmpty()) { + str = LocaleController.getString("GlobalSearch", R.string.GlobalSearch); + } else { + str = LocaleController.formatSectionDate(messageObject.messageOwner.date); + } + ((GraySectionCell) holder.itemView).setText(str); + break; } - if (item.file != null) { - documentCell.setChecked(selectedFiles.containsKey(item.file.toString()), !scrolling); - } else { - documentCell.setChecked(false, !scrolling); + case 1: + case 4: { + SharedDocumentCell sharedDocumentCell = (SharedDocumentCell) holder.itemView; + if (section == 0) { + ListItem item = (ListItem) getItem(position); + SharedDocumentCell documentCell = (SharedDocumentCell) holder.itemView; + if (item.icon != 0) { + documentCell.setTextAndValueAndTypeAndThumb(item.title, item.subtitle, null, null, item.icon, false); + } else { + String type = item.ext.toUpperCase().substring(0, Math.min(item.ext.length(), 4)); + documentCell.setTextAndValueAndTypeAndThumb(item.title, item.subtitle, type, item.thumb, 0, false); + } + if (item.file != null) { + documentCell.setChecked(selectedFiles.containsKey(item.file.toString()), !scrolling); + } else { + documentCell.setChecked(false, !scrolling); + } + } else { + section--; + if (section != 0 || !searchResult.isEmpty()) { + position--; + } + String name = sections.get(section); + ArrayList messageObjects = sectionArrays.get(name); + if (messageObjects == null) { + return; + } + MessageObject messageObject = messageObjects.get(position); + boolean animated = sharedDocumentCell.getMessage() != null && sharedDocumentCell.getMessage().getId() == messageObject.getId(); + sharedDocumentCell.setDocument(messageObject, position != messageObjects.size() - 1 || section == sections.size() - 1 && isLoading); + sharedDocumentCell.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + sharedDocumentCell.getViewTreeObserver().removeOnPreDrawListener(this); + if (parentAlert.actionBar.isActionModeShowed()) { + messageHashIdTmp.set(messageObject.getId(), messageObject.getDialogId()); + sharedDocumentCell.setChecked(selectedMessages.containsKey(messageHashIdTmp), animated); + } else { + sharedDocumentCell.setChecked(false, animated); + } + return true; + } + }); + } + break; } } } @Override - public int getItemViewType(int i) { - if (i < searchResult.size()) { - return 0; + public int getItemViewType(int section, int position) { + if (section == 0) { + return 1; + } else if (section == getSectionCount() - 1) { + return 3; } - return 1; + section--; + if (section < sections.size()) { + if ((section != 0 || !searchResult.isEmpty()) && position == 0) { + return 0; + } else { + return 4; + } + } + return 2; } @Override @@ -1284,15 +1927,22 @@ public class ChatAttachAlertDocumentLayout extends ChatAttachAlert.AttachAlertLa super.notifyDataSetChanged(); updateEmptyView(); } + + @Override + public String getLetter(int position) { + return null; + } + + @Override + public int getPositionForScrollProgress(float progress) { + return 0; + } } @Override ArrayList getThemeDescriptions() { ArrayList themeDescriptions = new ArrayList<>(); themeDescriptions.add(new ThemeDescription(searchItem.getSearchField(), ThemeDescription.FLAG_CURSORCOLOR, null, null, null, null, Theme.key_dialogTextBlack)); - themeDescriptions.add(new ThemeDescription(emptyImageView, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_dialogEmptyImage)); - themeDescriptions.add(new ThemeDescription(emptyTitleTextView, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_dialogEmptyText)); - themeDescriptions.add(new ThemeDescription(emptySubtitleTextView, ThemeDescription.FLAG_IMAGECOLOR, null, null, null, null, Theme.key_dialogEmptyText)); themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, Theme.key_dialogScrollGlow)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java index 4ee764590..94b5fd23d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAttachAlertPhotoLayout.java @@ -19,7 +19,10 @@ import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.Outline; +import android.graphics.Paint; +import android.graphics.Path; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; @@ -82,6 +85,7 @@ import java.util.Collections; import java.util.HashMap; import androidx.annotation.Keep; +import androidx.core.graphics.ColorUtils; import androidx.exifinterface.media.ExifInterface; import androidx.recyclerview.widget.GridLayoutManager; import androidx.recyclerview.widget.LinearLayoutManager; @@ -191,6 +195,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou private final static int open_in = 2; boolean forceDarkTheme; + private int animationIndex = -1; private class BasePhotoProvider extends PhotoViewer.EmptyPhotoViewerProvider { @Override @@ -298,6 +303,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou cell.showCheck(false); return object; } + return null; } @@ -360,7 +366,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { MediaController.PhotoEntry photoEntry = getPhotoEntryAtPosition(index); if (photoEntry != null) { photoEntry.editedInfo = videoEditedInfo; @@ -369,7 +375,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou addToSelectedPhotos(photoEntry, -1); } parentAlert.applyCaption(); - parentAlert.delegate.didPressedButton(7, true, notify, scheduleDate); + parentAlert.delegate.didPressedButton(7, true, notify, scheduleDate, forceDocument); } }; @@ -494,6 +500,12 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } return super.onInterceptTouchEvent(e); } + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + PhotoViewer.getInstance().checkCurrentImageVisibility(); + } }; gridView.setAdapter(adapter = new PhotoAttachAdapter(context, true)); adapter.createCache(); @@ -617,7 +629,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou openCamera(true); } else { if (parentAlert.delegate != null) { - parentAlert.delegate.didPressedButton(0, false, true, 0); + parentAlert.delegate.didPressedButton(0, false, true, 0, false); } } } @@ -625,7 +637,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou gridView.setOnItemLongClickListener((view, position) -> { if (position == 0 && selectedAlbumEntry == galleryAlbumEntry) { if (parentAlert.delegate != null) { - parentAlert.delegate.didPressedButton(0, false, true, 0); + parentAlert.delegate.didPressedButton(0, false, true, 0, false); } return true; } else if (view instanceof PhotoAttachPhotoCell) { @@ -680,14 +692,44 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou progressView.showTextView(); } - recordTime = new TextView(context); + Paint recordPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + recordPaint.setColor(0xffda564d); + recordTime = new TextView(context) { + + float alpha = 0f; + boolean isIncr; + + @Override + protected void onDraw(Canvas canvas) { + + recordPaint.setAlpha((int) (125 + 130 * alpha)); + + if (!isIncr) { + alpha -= 16 / 600.0f; + if (alpha <= 0) { + alpha = 0; + isIncr = true; + } + } else { + alpha += 16 / 600.0f; + if (alpha >= 1) { + alpha = 1; + isIncr = false; + } + } + super.onDraw(canvas); + canvas.drawCircle(AndroidUtilities.dp(14), getMeasuredHeight() / 2, AndroidUtilities.dp(4), recordPaint); + invalidate(); + } + }; + AndroidUtilities.updateViewVisibilityAnimated(recordTime, false, 1f, false); recordTime.setBackgroundResource(R.drawable.system); recordTime.getBackground().setColorFilter(new PorterDuffColorFilter(0x66000000, PorterDuff.Mode.MULTIPLY)); recordTime.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); recordTime.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); recordTime.setAlpha(0.0f); recordTime.setTextColor(0xffffffff); - recordTime.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(5), AndroidUtilities.dp(10), AndroidUtilities.dp(5)); + recordTime.setPadding(AndroidUtilities.dp(24), AndroidUtilities.dp(5), AndroidUtilities.dp(10), AndroidUtilities.dp(5)); container.addView(recordTime, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 0, 16, 0, 0)); cameraPanel = new FrameLayout(context) { @@ -726,6 +768,14 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou flashModeButton[a].layout(cx3 - flashModeButton[a].getMeasuredWidth() / 2, cy3 - flashModeButton[a].getMeasuredHeight() / 2, cx3 + flashModeButton[a].getMeasuredWidth() / 2, cy3 + flashModeButton[a].getMeasuredHeight() / 2); } } + + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + if (parentAlert != null) { + parentAlert.setOverlayNavBarColor(ColorUtils.setAlphaComponent(Color.BLACK, (int) (alpha * 255))); + } + } }; cameraPanel.setVisibility(View.GONE); cameraPanel.setAlpha(0.0f); @@ -782,12 +832,12 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } for (int a = 0; a < 2; a++) { - flashModeButton[a].setAlpha(0.0f); + flashModeButton[a].animate().alpha(0f).translationX(AndroidUtilities.dp(30)).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); } - switchCameraButton.setAlpha(0.0f); - tooltipTextView.setAlpha(0.0f); + switchCameraButton.animate().alpha(0f).translationX(-AndroidUtilities.dp(30)).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + tooltipTextView.animate().alpha(0f).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); outputFile = AndroidUtilities.generateVideoPath(parentAlert.baseFragment instanceof ChatActivity && ((ChatActivity) parentAlert.baseFragment).isSecretChat()); - recordTime.setAlpha(1.0f); + AndroidUtilities.updateViewVisibilityAnimated(recordTime, true); recordTime.setText(AndroidUtilities.formatLongDuration(0)); videoRecordTime = 0; videoRecordRunnable = () -> { @@ -814,8 +864,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou photoEntry.cropState.lockedAspectRatio = 1.0f; } openPhotoViewer(photoEntry, false, false); - }, () -> AndroidUtilities.runOnUIThread(videoRecordRunnable, 1000)); + }, () -> AndroidUtilities.runOnUIThread(videoRecordRunnable, 1000), cameraView); shutterButton.setState(ShutterButton.State.RECORDING, true); + cameraView.runHaptic(); return true; } @@ -871,8 +922,10 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou photoEntry.canDeleteAfter = true; openPhotoViewer(photoEntry, sameTakePictureOrientation, false); }); + cameraView.startTakePictureAnimation(); } + @Override public boolean onTranslationChanged(float x, float y) { boolean isPortrait = container.getWidth() < container.getHeight(); @@ -908,6 +961,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } canSaveCameraPreview = false; cameraView.switchCamera(); + cameraView.startSwitchingAnimation(); ObjectAnimator animator = ObjectAnimator.ofFloat(switchCameraButton, View.SCALE_X, 0.0f).setDuration(100); animator.addListener(new AnimatorListenerAdapter() { @Override @@ -917,6 +971,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } }); animator.start(); + }); switchCameraButton.setContentDescription(LocaleController.getString("AccDescrSwitchCamera", R.string.AccDescrSwitchCamera)); @@ -945,7 +1000,8 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou ObjectAnimator.ofFloat(nextImage, View.TRANSLATION_Y, -AndroidUtilities.dp(48), 0), ObjectAnimator.ofFloat(currentImage, View.ALPHA, 1.0f, 0.0f), ObjectAnimator.ofFloat(nextImage, View.ALPHA, 0.0f, 1.0f)); - animatorSet.setDuration(200); + animatorSet.setDuration(220); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animator) { @@ -1163,7 +1219,8 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou ObjectAnimator.ofFloat(flashModeButton[0], View.ALPHA, 0.0f), ObjectAnimator.ofFloat(flashModeButton[1], View.ALPHA, 0.0f), ObjectAnimator.ofFloat(cameraPhotoRecyclerView, View.ALPHA, 0.0f)); - animatorSet.setDuration(200); + animatorSet.setDuration(220); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); animatorSet.start(); } } @@ -1209,12 +1266,14 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou if (parentAlert.baseFragment == null) { return; } + for (int a = 0; a < 2; a++) { - flashModeButton[a].setAlpha(1.0f); + flashModeButton[a].animate().alpha(1f).translationX(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); } - switchCameraButton.setAlpha(1.0f); - tooltipTextView.setAlpha(1.0f); - recordTime.setAlpha(0.0f); + switchCameraButton.animate().alpha(1f).translationX(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + tooltipTextView.animate().alpha(1f).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + AndroidUtilities.updateViewVisibilityAnimated(recordTime, false); + AndroidUtilities.cancelRunOnUIThread(videoRecordRunnable); videoRecordRunnable = null; AndroidUtilities.unlockOrientation(parentAlert.baseFragment.getParentActivity()); @@ -1316,7 +1375,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou public void needAddMorePhotos() { cancelTakingPhotos = false; if (mediaFromExternalCamera) { - parentAlert.delegate.didPressedButton(0, true, true, 0); + parentAlert.delegate.didPressedButton(0, true, true, 0, false); return; } if (!cameraOpened) { @@ -1329,7 +1388,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { if (cameraPhotos.isEmpty() || parentAlert.baseFragment == null) { return; } @@ -1343,13 +1402,13 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } parentAlert.applyCaption(); - parentAlert.delegate.didPressedButton(8, true, notify, scheduleDate); + closeCamera(false); + parentAlert.delegate.didPressedButton(forceDocument ? 4 : 8, true, notify, scheduleDate, forceDocument); cameraPhotos.clear(); selectedPhotosOrder.clear(); selectedPhotos.clear(); adapter.notifyDataSetChanged(); cameraAttachAdapter.notifyDataSetChanged(); - closeCamera(false); parentAlert.dismiss(); } @@ -1359,7 +1418,7 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou return false; } int locked = Settings.System.getInt(parentAlert.baseFragment.getParentActivity().getContentResolver(), Settings.System.ACCELEROMETER_ROTATION, 0); - return sameTakePictureOrientation || locked == 1; + return true;//sameTakePictureOrientation || locked == 1; } @Override @@ -1463,6 +1522,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou int count = gridView.getChildCount(); for (int a = 0; a < count; a++) { View view = gridView.getChildAt(a); + if (view.getTop() >= gridView.getMeasuredHeight() - parentAlert.getClipLayoutBottom()) { + continue; + } if (view instanceof PhotoAttachPhotoCell) { PhotoAttachPhotoCell cell = (PhotoAttachPhotoCell) view; if ((Integer) cell.getImageView().getTag() == index) { @@ -1530,8 +1592,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } + boolean cameraExpanded; private void openCamera(boolean animated) { - if (cameraView == null || cameraInitAnimation != null || !cameraView.isInitied()) { + if (cameraView == null || cameraInitAnimation != null || !cameraView.isInitied() || parentAlert.isDismissed()) { return; } if (parentAlert.avatarPicker == 2 || parentAlert.baseFragment instanceof ChatActivity) { @@ -1554,10 +1617,14 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou cameraPanel.setVisibility(View.VISIBLE); cameraPanel.setTag(null); animateCameraValues[0] = 0; - animateCameraValues[1] = (int) (itemSize - cameraViewOffsetX); - animateCameraValues[2] = (int) (itemSize - cameraViewOffsetY - cameraViewOffsetBottomY); + animateCameraValues[1] = itemSize; + animateCameraValues[2] = itemSize; + additionCloseCameraY = 0; + cameraExpanded = true; if (animated) { + setCameraOpenProgress(0); cameraAnimationInProgress = true; + animationIndex = NotificationCenter.getInstance(parentAlert.currentAccount).setAnimationInProgress(animationIndex, null); ArrayList animators = new ArrayList<>(); animators.add(ObjectAnimator.ofFloat(this, "cameraOpenProgress", 0.0f, 1.0f)); animators.add(ObjectAnimator.ofFloat(cameraPanel, View.ALPHA, 1.0f)); @@ -1571,17 +1638,24 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } AnimatorSet animatorSet = new AnimatorSet(); animatorSet.playTogether(animators); - animatorSet.setDuration(200); + animatorSet.setDuration(350); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animator) { + NotificationCenter.getInstance(parentAlert.currentAccount).onAnimationFinish(animationIndex); cameraAnimationInProgress = false; if (Build.VERSION.SDK_INT >= 21 && cameraView != null) { cameraView.invalidateOutline(); + } else if (cameraView != null) { + cameraView.invalidate(); } if (cameraOpened) { parentAlert.delegate.onCameraOpened(); } + if (Build.VERSION.SDK_INT >= 21 && cameraView != null) { + cameraView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_FULLSCREEN); + } } }); animatorSet.start(); @@ -1597,9 +1671,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } parentAlert.delegate.onCameraOpened(); - } - if (Build.VERSION.SDK_INT >= 21) { - cameraView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_FULLSCREEN); + if (Build.VERSION.SDK_INT >= 21) { + cameraView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_FULLSCREEN); + } } cameraOpened = true; cameraView.setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO); @@ -1625,18 +1699,41 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou return; } if (cameraView == null) { - cameraView = new CameraView(parentAlert.baseFragment.getParentActivity(), parentAlert.openWithFrontFaceCamera); + cameraView = new CameraView(parentAlert.baseFragment.getParentActivity(), parentAlert.openWithFrontFaceCamera) { + @Override + protected void dispatchDraw(Canvas canvas) { + if (Build.VERSION.SDK_INT >= 21) { + super.dispatchDraw(canvas); + } else { + if (cameraAnimationInProgress) { + AndroidUtilities.rectTmp.set(animationClipLeft + cameraViewOffsetX * (1f - cameraOpenProgress), animationClipTop + cameraViewOffsetY * (1f - cameraOpenProgress), animationClipRight, animationClipBottom); + } else if (!cameraAnimationInProgress && !cameraOpened) { + AndroidUtilities.rectTmp.set(cameraViewOffsetX, cameraViewOffsetY, getMeasuredWidth(), getMeasuredHeight()); + } else { + AndroidUtilities.rectTmp.set(0 , 0, getMeasuredWidth(), getMeasuredHeight()); + } + canvas.clipRect(AndroidUtilities.rectTmp); + super.dispatchDraw(canvas); + canvas.restore(); + } + + } + }; + cameraView.setRecordFile(AndroidUtilities.generateVideoPath(parentAlert.baseFragment instanceof ChatActivity && ((ChatActivity) parentAlert.baseFragment).isSecretChat())); cameraView.setFocusable(true); if (Build.VERSION.SDK_INT >= 21) { + Path path = new Path(); + float[] radii = new float[8]; + cameraView.setOutlineProvider(new ViewOutlineProvider() { @Override public void getOutline(View view, Outline outline) { if (cameraAnimationInProgress) { - int rad = AndroidUtilities.dp(8 * parentAlert.cornerRadius * cameraOpenProgress); - outline.setRoundRect(0, 0, view.getMeasuredWidth() + rad, view.getMeasuredHeight() + rad, rad); + AndroidUtilities.rectTmp.set(animationClipLeft + cameraViewOffsetX * (1f - cameraOpenProgress), animationClipTop + cameraViewOffsetY * (1f - cameraOpenProgress), animationClipRight, animationClipBottom); + outline.setRect((int) AndroidUtilities.rectTmp.left,(int) AndroidUtilities.rectTmp.top, (int) AndroidUtilities.rectTmp.right, (int) AndroidUtilities.rectTmp.bottom); } else if (!cameraAnimationInProgress && !cameraOpened) { int rad = AndroidUtilities.dp(8 * parentAlert.cornerRadius); - outline.setRoundRect(0, 0, view.getMeasuredWidth() + rad, view.getMeasuredHeight() + rad, rad); + outline.setRoundRect((int) cameraViewOffsetX, (int) cameraViewOffsetY, view.getMeasuredWidth() + rad, view.getMeasuredHeight() + rad, rad); } else { outline.setRect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight()); } @@ -1743,9 +1840,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou cameraZoom = 0.0f; } cameraView.setTranslationX(cameraViewLocation[0]); - cameraView.setTranslationY(cameraViewLocation[1]); + cameraView.setTranslationY(cameraViewLocation[1] + currentPanTranslationY); cameraIcon.setTranslationX(cameraViewLocation[0]); - cameraIcon.setTranslationY(cameraViewLocation[1]); + cameraIcon.setTranslationY(cameraViewLocation[1] + cameraViewOffsetY + currentPanTranslationY); } public void hideCamera(boolean async) { @@ -1910,21 +2007,20 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } + float additionCloseCameraY; + public void closeCamera(boolean animated) { if (takingPhoto || cameraView == null) { return; } - animateCameraValues[1] = (int) (itemSize - cameraViewOffsetX); - animateCameraValues[2] = (int) (itemSize - cameraViewOffsetY - cameraViewOffsetBottomY); + animateCameraValues[1] = itemSize; + animateCameraValues[2] = itemSize; if (zoomControlHideRunnable != null) { AndroidUtilities.cancelRunOnUIThread(zoomControlHideRunnable); zoomControlHideRunnable = null; } if (animated) { - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) cameraView.getLayoutParams(); - animateCameraValues[0] = layoutParams.topMargin = (int) cameraView.getTranslationY(); - cameraView.setLayoutParams(layoutParams); - cameraView.setTranslationY(0); + additionCloseCameraY = cameraView.getTranslationY(); cameraAnimationInProgress = true; ArrayList animators = new ArrayList<>(); @@ -1939,15 +2035,23 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou break; } } + + animationIndex = NotificationCenter.getInstance(parentAlert.currentAccount).setAnimationInProgress(animationIndex, null); AnimatorSet animatorSet = new AnimatorSet(); animatorSet.playTogether(animators); - animatorSet.setDuration(200); + animatorSet.setDuration(220); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animator) { + NotificationCenter.getInstance(parentAlert.currentAccount).onAnimationFinish(animationIndex); + cameraExpanded = false; + setCameraOpenProgress(0f); cameraAnimationInProgress = false; if (Build.VERSION.SDK_INT >= 21 && cameraView != null) { cameraView.invalidateOutline(); + } else if (cameraView != null){ + cameraView.invalidate(); } cameraOpened = false; if (cameraPanel != null) { @@ -1967,6 +2071,8 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou }); animatorSet.start(); } else { + cameraExpanded = false; + setCameraOpenProgress(0f); animateCameraValues[0] = 0; setCameraOpenProgress(0); cameraPanel.setAlpha(0); @@ -1994,6 +2100,11 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } + float animationClipTop; + float animationClipBottom; + float animationClipRight; + float animationClipLeft; + @Keep public void setCameraOpenProgress(float value) { if (cameraView == null) { @@ -2005,37 +2116,80 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou boolean isPortrait = AndroidUtilities.displaySize.x < AndroidUtilities.displaySize.y; float endWidth = parentAlert.getContainer().getWidth() - parentAlert.getLeftInset() - parentAlert.getRightInset(); float endHeight = parentAlert.getContainer().getHeight() - parentAlert.getBottomInset(); + + float fromX = cameraViewLocation[0]; + float fromY = cameraViewLocation[1]; + float toX = 0; + float toY = additionCloseCameraY; + if (value == 0) { - cameraView.setClipTop((int) cameraViewOffsetY); - cameraView.setClipBottom((int) cameraViewOffsetBottomY); - cameraView.setTranslationX(cameraViewLocation[0]); - cameraView.setTranslationY(cameraViewLocation[1]); cameraIcon.setTranslationX(cameraViewLocation[0]); - cameraIcon.setTranslationY(cameraViewLocation[1]); - } else if (cameraView.getTranslationX() != 0 || cameraView.getTranslationY() != 0) { - cameraView.setTranslationX(0); - cameraView.setTranslationY(0); + cameraIcon.setTranslationY(cameraViewLocation[1] + cameraViewOffsetY); } + + + int cameraViewW, cameraViewH; FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) cameraView.getLayoutParams(); - layoutParams.width = (int) (startWidth + (endWidth - startWidth) * value); - layoutParams.height = (int) (startHeight + (endHeight - startHeight) * value); - if (value != 0) { - cameraView.setClipTop((int) (cameraViewOffsetY * (1.0f - value))); - cameraView.setClipBottom((int) (cameraViewOffsetBottomY * (1.0f - value))); - layoutParams.leftMargin = (int) (cameraViewLocation[0] * (1.0f - value)); - layoutParams.topMargin = (int) (animateCameraValues[0] + (cameraViewLocation[1] - animateCameraValues[0]) * (1.0f - value)); + + float textureStartHeight = cameraView.getTextureHeight(startWidth, startHeight); + float textureEndHeight = cameraView.getTextureHeight(endWidth, endHeight); + + float fromScale = textureStartHeight / textureEndHeight; + float fromScaleY = startHeight / endHeight; + float fromScaleX = startWidth/ endWidth; + + float scaleOffsetX = 0; + float scaleOffsetY = 0; + + if (cameraExpanded) { + cameraViewW = (int) endWidth; + cameraViewH = (int) endHeight; + float s = fromScale * (1f - value) + value; + cameraView.getTextureView().setScaleX(s); + cameraView.getTextureView().setScaleY(s); + + float sX = fromScaleX * (1f - value) + value; + float sY = fromScaleY * (1f - value) + value; + + scaleOffsetY = (1 - sY) * endHeight / 2; + scaleOffsetX = (1 - sX) * endWidth / 2; + + cameraView.setTranslationX(fromX * (1f - value) + toX * value - scaleOffsetX); + cameraView.setTranslationY(fromY * (1f - value) + toY * value - scaleOffsetY); + animationClipTop = fromY * (1f - value) - cameraView.getTranslationY(); + animationClipBottom = ((fromY + startHeight) * (1f - value) - cameraView.getTranslationY()) + endHeight * value; + + animationClipLeft = fromX * (1f - value) - cameraView.getTranslationX(); + animationClipRight = ((fromX + startWidth) * (1f - value) - cameraView.getTranslationX()) + endWidth * value; } else { - layoutParams.leftMargin = 0; - layoutParams.topMargin = 0; + cameraViewW = (int) startWidth; + cameraViewH = (int) startHeight; + cameraView.getTextureView().setScaleX(1f); + cameraView.getTextureView().setScaleY(1f); + animationClipTop = 0; + animationClipBottom = endHeight; + animationClipLeft = 0; + animationClipRight = endWidth; + + cameraView.setTranslationX(fromX); + cameraView.setTranslationY(fromY); } - cameraView.setLayoutParams(layoutParams); + if (value <= 0.5f) { cameraIcon.setAlpha(1.0f - value / 0.5f); } else { cameraIcon.setAlpha(0.0f); } + + if (layoutParams.width != cameraViewW || layoutParams.height != cameraViewH) { + layoutParams.width = cameraViewW; + layoutParams.height = cameraViewH; + cameraView.requestLayout(); + } if (Build.VERSION.SDK_INT >= 21) { cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); } } @@ -2060,6 +2214,15 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } } + if (cameraView != null) { + cameraView.invalidate(); + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && recordTime != null) { + MarginLayoutParams params = (MarginLayoutParams) recordTime.getLayoutParams(); + params.topMargin = (getRootWindowInsets() == null ? AndroidUtilities.dp(16) : getRootWindowInsets().getSystemWindowInsetTop() + AndroidUtilities.dp(2)); + } + if (!deviceHasGoodCamera) { return; } @@ -2081,10 +2244,25 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } float maxY = (Build.VERSION.SDK_INT >= 21 && !parentAlert.inBubbleMode ? AndroidUtilities.statusBarHeight : 0) + ActionBar.getCurrentActionBarHeight(); + float newCameraViewOffsetY; if (topLocal < maxY) { - cameraViewOffsetY = maxY - topLocal; + newCameraViewOffsetY = maxY - topLocal; } else { - cameraViewOffsetY = 0; + newCameraViewOffsetY = 0; + } + + if (newCameraViewOffsetY != cameraViewOffsetY) { + cameraViewOffsetY = newCameraViewOffsetY; + if (cameraView != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); + } + } + if (cameraIcon != null) { + cameraIcon.invalidate(); + } } int containerHeight = parentAlert.getSheetContainer().getMeasuredHeight(); @@ -2097,15 +2275,31 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } cameraViewLocation[0] = left; - cameraViewLocation[1] = top + cameraViewOffsetY; + cameraViewLocation[1] = top; applyCameraViewPosition(); return; } } - cameraViewOffsetX = 0; - cameraViewOffsetY = 0; + + + if (cameraViewOffsetY != 0 || cameraViewOffsetX != 0) { + cameraViewOffsetX = 0; + cameraViewOffsetY = 0; + if (cameraView != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + cameraView.invalidateOutline(); + } else { + cameraView.invalidate(); + } + } + if (cameraIcon != null) { + cameraIcon.invalidate(); + } + } + cameraViewLocation[0] = AndroidUtilities.dp(-400); cameraViewLocation[1] = 0; + applyCameraViewPosition(); } @@ -2116,9 +2310,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou cameraView.setTranslationY(cameraViewLocation[1] + currentPanTranslationY); } cameraIcon.setTranslationX(cameraViewLocation[0]); - cameraIcon.setTranslationY(cameraViewLocation[1] + currentPanTranslationY); - int finalWidth = (int) (itemSize - cameraViewOffsetX); - int finalHeight = (int) (itemSize - cameraViewOffsetY - cameraViewOffsetBottomY); + cameraIcon.setTranslationY(cameraViewLocation[1] + cameraViewOffsetY + currentPanTranslationY); + int finalWidth = itemSize; + int finalHeight = itemSize; FrameLayout.LayoutParams layoutParams; if (!cameraOpened) { @@ -2138,6 +2332,9 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou } } + finalWidth = (int) (itemSize - cameraViewOffsetX); + finalHeight = (int) (itemSize - cameraViewOffsetY - cameraViewOffsetBottomY); + layoutParams = (FrameLayout.LayoutParams) cameraIcon.getLayoutParams(); if (layoutParams.height != finalHeight || layoutParams.width != finalWidth) { layoutParams.width = finalWidth; @@ -2198,21 +2395,21 @@ public class ChatAttachAlertPhotoLayout extends ChatAttachAlert.AttachAlertLayou if (parentAlert.editingMessageObject == null && parentAlert.baseFragment instanceof ChatActivity && ((ChatActivity) parentAlert.baseFragment).isInScheduleMode()) { AlertsCreator.createScheduleDatePickerDialog(getContext(), ((ChatActivity) parentAlert.baseFragment).getDialogId(), (notify, scheduleDate) -> { parentAlert.applyCaption(); - parentAlert.delegate.didPressedButton(7, false, notify, scheduleDate); + parentAlert.delegate.didPressedButton(7, false, notify, scheduleDate, false); }); } else { parentAlert.applyCaption(); - parentAlert.delegate.didPressedButton(7, false, true, 0); + parentAlert.delegate.didPressedButton(7, false, true, 0, false); } } else if (id == compress) { if (parentAlert.editingMessageObject == null && parentAlert.baseFragment instanceof ChatActivity && ((ChatActivity) parentAlert.baseFragment).isInScheduleMode()) { AlertsCreator.createScheduleDatePickerDialog(getContext(), ((ChatActivity) parentAlert.baseFragment).getDialogId(), (notify, scheduleDate) -> { parentAlert.applyCaption(); - parentAlert.delegate.didPressedButton(4, true, notify, scheduleDate); + parentAlert.delegate.didPressedButton(4, true, notify, scheduleDate, false); }); } else { parentAlert.applyCaption(); - parentAlert.delegate.didPressedButton(4, true, true, 0); + parentAlert.delegate.didPressedButton(4, true, true, 0, false); } } else if (id == open_in) { try { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java index f4b839abe..a3bd8adfa 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatAvatarContainer.java @@ -28,7 +28,6 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.ImageReceiver; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; @@ -598,7 +597,6 @@ public class ChatAvatarContainer extends FrameLayout implements NotificationCent } public void setUserAvatar(TLRPC.User user, boolean showSelf) { - TLRPC.FileLocation newPhoto = null; avatarDrawable.setInfo(user); if (UserObject.isReplyUser(user)) { avatarDrawable.setAvatarType(AvatarDrawable.AVATAR_TYPE_REPLIES); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatBigEmptyView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatBigEmptyView.java index 372d80a66..0a702d7a8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatBigEmptyView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatBigEmptyView.java @@ -13,6 +13,7 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.util.TypedValue; import android.view.Gravity; +import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; @@ -34,11 +35,10 @@ public class ChatBigEmptyView extends LinearLayout { public final static int EMPTY_VIEW_TYPE_GROUP = 1; public final static int EMPTY_VIEW_TYPE_SAVED = 2; - public ChatBigEmptyView(Context context, int type) { + public ChatBigEmptyView(Context context, View parent, int type) { super(context); - setBackgroundResource(R.drawable.system); - getBackground().setColorFilter(Theme.colorFilter); + setBackground(Theme.createServiceDrawable(AndroidUtilities.dp(18), this, parent)); setPadding(AndroidUtilities.dp(16), AndroidUtilities.dp(12), AndroidUtilities.dp(16), AndroidUtilities.dp(12)); setOrientation(LinearLayout.VERTICAL); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java index d4acc5cfe..3c61a4e11 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ChatGreetingsView.java @@ -131,7 +131,6 @@ public class ChatGreetingsView extends LinearLayout { private void updateColors() { titleView.setTextColor(Theme.getColor(Theme.key_chat_serviceText)); descriptionView.setTextColor(Theme.getColor(Theme.key_chat_serviceText)); - setBackground(Theme.createRoundRectDrawable(AndroidUtilities.dp(10), Theme.getColor(Theme.key_chat_serviceBackground))); } public void setListener(Listener listener) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ClearHistoryAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ClearHistoryAlert.java index 6c2fecd35..cacd9fd54 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ClearHistoryAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ClearHistoryAlert.java @@ -380,7 +380,7 @@ public class ClearHistoryAlert extends BottomSheet { int time; int action; if (newTimer == 2) { - time = BuildVars.DEBUG_PRIVATE_VERSION ? 5 : 7 * 24 * 60 * 60; + time = 7 * 24 * 60 * 60; action = UndoView.ACTION_AUTO_DELETE_ON; } else if (newTimer == 1) { time = 24 * 60 * 60; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java index ca409322d..0f5b0dbac 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ColorPicker.java @@ -27,7 +27,9 @@ import android.util.TypedValue; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; +import android.view.accessibility.AccessibilityNodeInfo; import android.view.inputmethod.EditorInfo; +import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; @@ -43,6 +45,8 @@ import org.telegram.ui.ActionBar.ThemeDescription; import java.util.ArrayList; import java.util.List; +import androidx.annotation.Keep; + public class ColorPicker extends FrameLayout { private final ColorPickerDelegate delegate; @@ -51,7 +55,6 @@ public class ColorPicker extends FrameLayout { private Paint valueSliderPaint; private Paint circlePaint; private Paint linePaint; - private Paint editTextCirclePaint; private Drawable circleDrawable; private boolean myMessagesColor; @@ -62,10 +65,16 @@ public class ColorPicker extends FrameLayout { private Bitmap colorWheelBitmap; - private int selectedEditText; + private RadioButton[] radioButton = new RadioButton[4]; + private FrameLayout radioContainer; - private EditTextBoldCursor[] colorEditText = new EditTextBoldCursor[4]; + private LinearLayout linearLayout; + + private AnimatorSet colorsAnimator; + + private EditTextBoldCursor[] colorEditText; private ImageView clearButton; + private ImageView addButton; private ImageView exchangeButton; private TextView resetButton; private ActionBarMenuItem menuItem; @@ -73,6 +82,8 @@ public class ColorPicker extends FrameLayout { private int originalFirstColor; private int currentResetType; + private int colorsCount = 1; + private int colorWheelWidth; private float[] colorHSV = new float[] { 0.0f, 0.0f, 1.0f }; @@ -83,6 +94,8 @@ public class ColorPicker extends FrameLayout { private boolean circlePressed; private boolean colorPressed; + private int selectedColor; + private float pressedMoveProgress = 1.0f; private long lastUpdateTime; @@ -96,10 +109,102 @@ public class ColorPicker extends FrameLayout { private static final int item_share = 2; private static final int item_delete = 3; + private static class RadioButton extends View { + + private final Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + private ObjectAnimator checkAnimator; + private float checkedState; + private boolean checked; + private int currentColor; + + public RadioButton(Context context) { + super(context); + } + + void updateCheckedState(boolean animate) { + if (checkAnimator != null) { + checkAnimator.cancel(); + } + + if (animate) { + checkAnimator = ObjectAnimator.ofFloat(this, "checkedState", checked ? 1f : 0f); + checkAnimator.setDuration(200); + checkAnimator.start(); + } else { + setCheckedState(checked ? 1f : 0f); + } + } + + public void setChecked(boolean value, boolean animated) { + checked = value; + updateCheckedState(animated); + } + + public void setColor(int color) { + currentColor = color; + invalidate(); + } + + public int getColor() { + return currentColor; + } + + @Keep + public void setCheckedState(float state) { + checkedState = state; + invalidate(); + } + + @Keep + public float getCheckedState() { + return checkedState; + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + updateCheckedState(false); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(30), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(30), MeasureSpec.EXACTLY)); + } + + @Override + protected void onDraw(Canvas canvas) { + float radius = AndroidUtilities.dp(15); + + float cx = 0.5f * getMeasuredWidth(); + float cy = 0.5f * getMeasuredHeight(); + + paint.setColor(currentColor); + paint.setStyle(Paint.Style.STROKE); + paint.setStrokeWidth(AndroidUtilities.dp(3)); + paint.setAlpha(Math.round(255f * checkedState)); + canvas.drawCircle(cx, cy, radius - 0.5f * paint.getStrokeWidth(), paint); + + paint.setAlpha(255); + paint.setStyle(Paint.Style.FILL); + canvas.drawCircle(cx, cy, radius - AndroidUtilities.dp(5) * checkedState, paint); + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + info.setText(LocaleController.getString("ColorPickerMainColor", R.string.ColorPickerMainColor)); + info.setClassName(Button.class.getName()); + info.setChecked(checked); + info.setCheckable(true); + info.setEnabled(true); + } + } + public ColorPicker(Context context, boolean hasMenu, ColorPickerDelegate colorPickerDelegate) { super(context); delegate = colorPickerDelegate; + colorEditText = new EditTextBoldCursor[2]; setWillNotDraw(false); @@ -108,20 +213,58 @@ public class ColorPicker extends FrameLayout { circlePaint = new Paint(Paint.ANTI_ALIAS_FLAG); colorWheelPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG); valueSliderPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG); - editTextCirclePaint = new Paint(Paint.ANTI_ALIAS_FLAG); linePaint = new Paint(); linePaint.setColor(0x12000000); - LinearLayout linearLayout = new LinearLayout(context); + setClipChildren(false); + + linearLayout = new LinearLayout(context) { + + private RectF rect = new RectF(); + private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + { + paint.setColor(Theme.getColor(Theme.key_dialogBackgroundGray)); + } + + @Override + protected void onDraw(Canvas canvas) { + int left = colorEditText[0].getLeft() - AndroidUtilities.dp(13); + int width = (int) (AndroidUtilities.dp(91) + (clearButton.getVisibility() == VISIBLE ? AndroidUtilities.dp(25) * clearButton.getAlpha() : 0)); + rect.set(left, AndroidUtilities.dp(5), left + width, AndroidUtilities.dp(5 + 32)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(16), AndroidUtilities.dp(16), paint); + } + }; linearLayout.setOrientation(LinearLayout.HORIZONTAL); - addView(linearLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 54, Gravity.LEFT | Gravity.TOP, 22, 0, 22, 0)); + addView(linearLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 54, Gravity.LEFT | Gravity.TOP, 27, -6, 17, 0)); + linearLayout.setWillNotDraw(false); + + radioContainer = new FrameLayout(context); + radioContainer.setClipChildren(false); + addView(radioContainer, LayoutHelper.createFrame(174, 30, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 72, 1, 0, 0)); + for (int a = 0; a < 4; a++) { + radioButton[a] = new RadioButton(context); + radioButton[a].setChecked(selectedColor == a, false); + radioContainer.addView(radioButton[a], LayoutHelper.createFrame(30, 30, Gravity.TOP, 0, 0, 0, 0)); + radioButton[a].setOnClickListener(v -> { + RadioButton radioButton1 = (RadioButton) v; + for (int b = 0; b < radioButton.length; b++) { + boolean checked = radioButton[b] == radioButton1; + radioButton[b].setChecked(checked, true); + if (checked) { + selectedColor = b; + } + } + int color = radioButton1.getColor(); + setColorInner(color); + colorEditText[1].setText(String.format("%02x%02x%02x", (byte) Color.red(color), (byte) Color.green(color), (byte) Color.blue(color)).toUpperCase()); + }); + } + + for (int a = 0; a < colorEditText.length; a++) { final int num = a; - if (a == 0 || a == 2) { + if (a % 2 == 0) { colorEditText[a] = new EditTextBoldCursor(context) { - - private int lastColor = 0xffffffff; - @Override public boolean onTouchEvent(MotionEvent event) { if (getAlpha() != 1.0f) { @@ -136,22 +279,13 @@ public class ColorPicker extends FrameLayout { } return false; } - - @Override - protected void onDraw(Canvas canvas) { - super.onDraw(canvas); - - int color = lastColor = getFieldColor(num + 1, lastColor); - editTextCirclePaint.setColor(color); - canvas.drawCircle(AndroidUtilities.dp(10), AndroidUtilities.dp(21), AndroidUtilities.dp(10), editTextCirclePaint); - } }; colorEditText[a].setBackgroundDrawable(null); - colorEditText[a].setPadding(AndroidUtilities.dp(28), AndroidUtilities.dp(5), 0, AndroidUtilities.dp(18)); colorEditText[a].setText("#"); colorEditText[a].setEnabled(false); colorEditText[a].setFocusable(false); - linearLayout.addView(colorEditText[a], LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, a == 2 ? 39 : 0, 0, 0, 0)); + colorEditText[a].setPadding(0, AndroidUtilities.dp(5), 0, AndroidUtilities.dp(16)); + linearLayout.addView(colorEditText[a], LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.MATCH_PARENT, 0, 0, 0, 0)); } else { colorEditText[a] = new EditTextBoldCursor(context) { @Override @@ -183,9 +317,9 @@ public class ColorPicker extends FrameLayout { }; colorEditText[a].setBackgroundDrawable(null); colorEditText[a].setFilters(new InputFilter[]{new InputFilter.LengthFilter(6)}); - colorEditText[a].setPadding(0, AndroidUtilities.dp(5), 0, AndroidUtilities.dp(18)); colorEditText[a].setHint("8BC6ED"); - linearLayout.addView(colorEditText[a], LayoutHelper.createLinear(71, LayoutHelper.MATCH_PARENT)); + colorEditText[a].setPadding(0, AndroidUtilities.dp(5), 0, AndroidUtilities.dp(16)); + linearLayout.addView(colorEditText[a], LayoutHelper.createLinear(71, LayoutHelper.MATCH_PARENT, 0, 0, 0, 0)); colorEditText[a].addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { @@ -222,18 +356,12 @@ public class ColorPicker extends FrameLayout { editable.replace(0, editable.length(), String.format("%02x%02x%02x", (byte) Color.red(color), (byte) Color.green(color), (byte) Color.blue(color)).toUpperCase()); colorEditText[num].setSelection(editable.length()); } - delegate.setColor(color, num == 1 ? 0 : 1, true); + radioButton[selectedColor].setColor(color); + delegate.setColor(color, selectedColor, true); ignoreTextChange = false; } }); - colorEditText[a].setOnFocusChangeListener((v, hasFocus) -> { - if (colorEditText[3] == null) { - return; - } - selectedEditText = num == 1 ? 0 : 1; - setColorInner(getFieldColor(num, 0xffffffff)); - }); colorEditText[a].setOnEditorActionListener((textView, i, keyEvent) -> { if (i == EditorInfo.IME_ACTION_DONE) { AndroidUtilities.hideKeyboard(textView); @@ -242,11 +370,11 @@ public class ColorPicker extends FrameLayout { return false; }); } - colorEditText[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); + colorEditText[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); colorEditText[a].setHintTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteHintText)); colorEditText[a].setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); colorEditText[a].setCursorColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); - colorEditText[a].setCursorSize(AndroidUtilities.dp(20)); + colorEditText[a].setCursorSize(AndroidUtilities.dp(18)); colorEditText[a].setCursorWidth(1.5f); colorEditText[a].setSingleLine(true); colorEditText[a].setGravity(Gravity.LEFT | Gravity.CENTER_VERTICAL); @@ -266,61 +394,198 @@ public class ColorPicker extends FrameLayout { exchangeButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); exchangeButton.setScaleType(ImageView.ScaleType.CENTER); exchangeButton.setVisibility(GONE); + exchangeButton.setImageResource(R.drawable.themes_swapcolor); exchangeButton.setOnClickListener(v -> { if (exchangeButton.getAlpha() != 1.0f) { return; } - if (myMessagesColor) { - String text1 = colorEditText[1].getText().toString(); - String text2 = colorEditText[3].getText().toString(); - colorEditText[1].setText(text2); - colorEditText[1].setSelection(text2.length()); - colorEditText[3].setText(text1); - colorEditText[3].setSelection(text1.length()); - } else { - delegate.rotateColors(); - exchangeButton.animate().rotation(exchangeButton.getRotation() + 45).setDuration(180).setInterpolator(CubicBezierInterpolator.EASE_OUT).start(); - } + int color = radioButton[0].getColor(); + radioButton[0].setColor(radioButton[1].getColor()); + radioButton[1].setColor(color); + delegate.setColor(radioButton[0].getColor(), 0, false); + delegate.setColor(radioButton[1].getColor(), 1, true); }); - addView(exchangeButton, LayoutHelper.createFrame(42, 42, Gravity.LEFT | Gravity.TOP, 126, 0, 0, 0)); + radioContainer.addView(exchangeButton, 1, LayoutHelper.createFrame(30, 30, Gravity.LEFT | Gravity.TOP, 0, 1, 0, 0)); - clearButton = new ImageView(getContext()); - clearButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_dialogButtonSelector), 1)); - clearButton.setImageDrawable(new CloseProgressDrawable2()); - clearButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); - clearButton.setScaleType(ImageView.ScaleType.CENTER); - clearButton.setVisibility(GONE); - clearButton.setOnClickListener(v -> { - boolean hide = clearButton.getTag() != null; - if (myMessagesColor && hide) { - colorEditText[1].setText(String.format("%02x%02x%02x", (byte) Color.red(originalFirstColor), (byte) Color.green(originalFirstColor), (byte) Color.blue(originalFirstColor)).toUpperCase()); - colorEditText[1].setSelection(colorEditText[1].length()); + addButton = new ImageView(getContext()); + addButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_dialogButtonSelector), 1)); + addButton.setImageResource(R.drawable.themes_addcolor); + addButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + addButton.setScaleType(ImageView.ScaleType.CENTER); + addButton.setOnClickListener(v -> { + if (colorsAnimator != null) { + return; } - toggleSecondField(); - if (myMessagesColor && !hide) { - originalFirstColor = getFieldColor(1, 0xffffffff); - int color = Theme.getColor(Theme.key_chat_outBubble); - colorEditText[1].setText(String.format("%02x%02x%02x", (byte) Color.red(color), (byte) Color.green(color), (byte) Color.blue(color)).toUpperCase()); - colorEditText[1].setSelection(colorEditText[1].length()); - } - int color2 = getFieldColor(3, 0xff000000); - if (!hide) { - color2 = generateGradientColors(getFieldColor(1, 0)); - String text = String.format("%02x%02x%02x", (byte) Color.red(color2), (byte) Color.green(color2), (byte) Color.blue(color2)).toUpperCase(); - colorEditText[3].setText(text); - colorEditText[3].setSelection(text.length()); - } - delegate.setColor(hide ? 0 : color2, 1, true); - if (hide) { - if (colorEditText[3].isFocused()){ - colorEditText[1].requestFocus(); + ArrayList animators; + if (colorsCount == 1) { + if (radioButton[1].getColor() == 0) { + radioButton[1].setColor(generateGradientColors(radioButton[0].getColor())); } + delegate.setColor(radioButton[1].getColor(), 1, true); + colorsCount = 2; + clearButton.setVisibility(VISIBLE); + animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofFloat(clearButton, View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(clearButton, View.SCALE_X, 1.0f)); + animators.add(ObjectAnimator.ofFloat(clearButton, View.SCALE_Y, 1.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.TRANSLATION_X, AndroidUtilities.dp(30) + AndroidUtilities.dp(13))); + if (myMessagesColor) { + exchangeButton.setVisibility(VISIBLE); + animators.add(ObjectAnimator.ofFloat(addButton, View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_X, 0.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_Y, 0.0f)); + animators.add(ObjectAnimator.ofFloat(exchangeButton, View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(exchangeButton, View.SCALE_X, 1.0f)); + animators.add(ObjectAnimator.ofFloat(exchangeButton, View.SCALE_Y, 1.0f)); + } + } else if (colorsCount == 2) { + if (myMessagesColor) { + return; + } + colorsCount = 3; + if (radioButton[2].getColor() == 0) { + int color = radioButton[0].getColor(); + float[] hsv = new float[3]; + Color.colorToHSV(color, hsv); + if (hsv[0] > 180) { + hsv[0] -= 60; + } else { + hsv[0] += 60; + } + radioButton[2].setColor(Color.HSVToColor(255, hsv)); + } + animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofFloat(addButton, View.TRANSLATION_X, AndroidUtilities.dp(30) * 2 + AndroidUtilities.dp(13) * 2)); + delegate.setColor(radioButton[2].getColor(), 2, true); + } else if (colorsCount == 3) { + if (myMessagesColor) { + return; + } + colorsCount = 4; + if (radioButton[3].getColor() == 0) { + radioButton[3].setColor(generateGradientColors(radioButton[2].getColor())); + } + delegate.setColor(radioButton[3].getColor(), 3, true); + animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofFloat(addButton, View.TRANSLATION_X, AndroidUtilities.dp(30) * 3 + AndroidUtilities.dp(13) * 3)); + animators.add(ObjectAnimator.ofFloat(addButton, View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_X, 0.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_Y, 0.0f)); } else { - colorEditText[3].requestFocus(); + return; } + radioButton[colorsCount - 1].callOnClick(); + colorsAnimator = new AnimatorSet(); + updateColorsPosition(animators, 0, false, getMeasuredWidth()); + colorsAnimator.playTogether(animators); + colorsAnimator.setDuration(180); + colorsAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); + colorsAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (colorsCount == 4 || myMessagesColor && colorsCount == 2) { + addButton.setVisibility(INVISIBLE); + } + colorsAnimator = null; + } + }); + colorsAnimator.start(); + }); + addButton.setContentDescription(LocaleController.getString("Add", R.string.Add)); + addView(addButton, LayoutHelper.createFrame(30, 30, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 36, 1, 0, 0)); + + clearButton = new ImageView(getContext()) { + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + linearLayout.invalidate(); + } + }; + clearButton.setBackground(Theme.createSelectorDrawable(Theme.getColor(Theme.key_dialogButtonSelector), 1)); + clearButton.setImageResource(R.drawable.themes_deletecolor); + clearButton.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText), PorterDuff.Mode.MULTIPLY)); + clearButton.setAlpha(0.0f); + clearButton.setScaleX(0.0f); + clearButton.setScaleY(0.0f); + clearButton.setScaleType(ImageView.ScaleType.CENTER); + clearButton.setVisibility(INVISIBLE); + clearButton.setOnClickListener(v -> { + if (colorsAnimator != null) { + return; + } + ArrayList animators; + if (colorsCount == 2) { + colorsCount = 1; + animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofFloat(clearButton, View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(clearButton, View.SCALE_X, 0.0f)); + animators.add(ObjectAnimator.ofFloat(clearButton, View.SCALE_Y, 0.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.TRANSLATION_X, 0)); + if (myMessagesColor) { + addButton.setVisibility(VISIBLE); + animators.add(ObjectAnimator.ofFloat(exchangeButton, View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(exchangeButton, View.SCALE_X, 0.0f)); + animators.add(ObjectAnimator.ofFloat(exchangeButton, View.SCALE_Y, 0.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_X, 1.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_Y, 1.0f)); + } + } else if (colorsCount == 3) { + colorsCount = 2; + animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofFloat(addButton, View.TRANSLATION_X, AndroidUtilities.dp(30) + AndroidUtilities.dp(13))); + } else if (colorsCount == 4) { + colorsCount = 3; + addButton.setVisibility(VISIBLE); + animators = new ArrayList<>(); + animators.add(ObjectAnimator.ofFloat(addButton, View.TRANSLATION_X, AndroidUtilities.dp(30) * 2 + AndroidUtilities.dp(13) * 2)); + animators.add(ObjectAnimator.ofFloat(addButton, View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_X, 1.0f)); + animators.add(ObjectAnimator.ofFloat(addButton, View.SCALE_Y, 1.0f)); + } else { + return; + } + if (selectedColor != 3) { + RadioButton button = radioButton[selectedColor]; + for (int a = selectedColor + 1; a < radioButton.length; a++) { + radioButton[a - 1] = radioButton[a]; + } + radioButton[3] = button; + } + radioButton[0].callOnClick(); + for (int a = 0; a < radioButton.length; a++) { + if (a < colorsCount) { + delegate.setColor(radioButton[a].getColor(), a, a == radioButton.length - 1); + } else { + delegate.setColor(0, a, a == radioButton.length - 1); + } + } + colorsAnimator = new AnimatorSet(); + updateColorsPosition(animators, selectedColor, true, getMeasuredWidth()); + colorsAnimator.playTogether(animators); + colorsAnimator.setDuration(180); + colorsAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); + colorsAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (colorsCount == 1) { + clearButton.setVisibility(INVISIBLE); + if (myMessagesColor) { + exchangeButton.setVisibility(INVISIBLE); + } + } + for (int a = 0; a < radioButton.length; a++) { + if (radioButton[a].getTag(R.id.index_tag) == null) { + radioButton[a].setVisibility(INVISIBLE); + } + } + colorsAnimator = null; + } + }); + colorsAnimator.start(); }); clearButton.setContentDescription(LocaleController.getString("ClearButton", R.string.ClearButton)); - addView(clearButton, LayoutHelper.createFrame(42, 42, Gravity.TOP | Gravity.RIGHT, 0, 0, 9, 0)); + addView(clearButton, LayoutHelper.createFrame(30, 30, Gravity.TOP | Gravity.LEFT, 97, 1, 0, 0)); resetButton = new TextView(context); resetButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); @@ -330,12 +595,12 @@ public class ColorPicker extends FrameLayout { resetButton.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); addView(resetButton, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 36, Gravity.TOP | Gravity.RIGHT, 0, 3, 14, 0)); resetButton.setOnClickListener(v -> { - if (resetButton.getAlpha() != 1.0f) { + /*if (resetButton.getAlpha() != 1.0f) { TODO return; } delegate.setColor(0, -1, true); resetButton.animate().alpha(0.0f).setDuration(180).start(); - resetButton.setTag(null); + resetButton.setTag(null);*/ }); if (hasMenu) { @@ -358,61 +623,113 @@ public class ColorPicker extends FrameLayout { menuItem.setAdditionalYOffset(AndroidUtilities.dp(72)); menuItem.setTranslationX(AndroidUtilities.dp(6)); menuItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_dialogButtonSelector), 1)); - addView(menuItem, LayoutHelper.createFrame(48, 48, Gravity.TOP | Gravity.RIGHT, 0, -3, 7, 0)); + addView(menuItem, LayoutHelper.createFrame(30, 30, Gravity.TOP | Gravity.RIGHT, 0, 2, 10, 0)); menuItem.setOnClickListener(v -> menuItem.toggleSubMenu()); } + updateColorsPosition(null, 0, false, getMeasuredWidth()); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + updateColorsPosition(null, 0, false, getMeasuredWidth()); + } + + private void updateColorsPosition(ArrayList animators, int hidingIndex, boolean hiding, int width) { + int allX = 0; + int count = colorsCount; + if (myMessagesColor && colorsCount == 2) { + count++; + } + int visibleX = count * AndroidUtilities.dp(30) + (count - 1) * AndroidUtilities.dp(13); + int left = radioContainer.getLeft() + visibleX; + int w = width - AndroidUtilities.dp(currentResetType == 1 ? 50 : 0); + float tr; + if (left > w) { + tr = left - w; + } else { + tr = 0; + } + if (animators != null) { + animators.add(ObjectAnimator.ofFloat(radioContainer, View.TRANSLATION_X, -tr)); + } else { + radioContainer.setTranslationX(-tr); + } + for (int a = 0; a < radioButton.length; a++) { + boolean wasVisible = radioButton[a].getTag(R.id.index_tag) != null; + if (a < colorsCount) { + if (a == 1 && myMessagesColor) { + exchangeButton.setTranslationX(allX); + allX += AndroidUtilities.dp(30) + AndroidUtilities.dp(13); + } + radioButton[a].setVisibility(VISIBLE); + if (animators != null) { + if (!wasVisible) { + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.SCALE_X, 1.0f)); + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.SCALE_Y, 1.0f)); + } + if (hiding || !hiding && a != colorsCount - 1) { + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.TRANSLATION_X, allX)); + } else { + radioButton[a].setTranslationX(allX); + } + } else { + radioButton[a].setVisibility(VISIBLE); + if (colorsAnimator == null) { + radioButton[a].setAlpha(1.0f); + radioButton[a].setScaleX(1.0f); + radioButton[a].setScaleY(1.0f); + } + radioButton[a].setTranslationX(allX); + } + radioButton[a].setTag(R.id.index_tag, 1); + } else { + if (animators != null) { + if (wasVisible) { + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.SCALE_X, 0.0f)); + animators.add(ObjectAnimator.ofFloat(radioButton[a], View.SCALE_Y, 0.0f)); + } + } else { + radioButton[a].setVisibility(INVISIBLE); + if (colorsAnimator == null) { + radioButton[a].setAlpha(0.0f); + radioButton[a].setScaleX(0.0f); + radioButton[a].setScaleY(0.0f); + } + } + if (!hiding) { + radioButton[a].setTranslationX(allX); + } + radioButton[a].setTag(R.id.index_tag, null); + } + allX += AndroidUtilities.dp(30) + AndroidUtilities.dp(13); + } } public void hideKeyboard() { - AndroidUtilities.hideKeyboard(colorEditText[selectedEditText == 0 ? 1 : 3]); + AndroidUtilities.hideKeyboard(colorEditText[1]); } - private void toggleSecondField() { - boolean hide = clearButton.getTag() != null; - clearButton.setTag(hide ? null : 1); - AnimatorSet animatorSet = new AnimatorSet(); - ArrayList animators = new ArrayList<>(); - animators.add(ObjectAnimator.ofFloat(clearButton, View.ROTATION, hide ? 45 : 0)); - animators.add(ObjectAnimator.ofFloat(colorEditText[2], View.ALPHA, hide ? 0.0f : 1.0f)); - animators.add(ObjectAnimator.ofFloat(colorEditText[3], View.ALPHA, hide ? 0.0f : 1.0f)); - animators.add(ObjectAnimator.ofFloat(exchangeButton, View.ALPHA, hide ? 0.0f : 1.0f)); - if (currentResetType == 2 && !hide) { - animators.add(ObjectAnimator.ofFloat(resetButton, View.ALPHA, 0.0f)); - } - animatorSet.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - if (currentResetType == 2 && !hide) { - resetButton.setVisibility(GONE); - resetButton.setTag(null); - } - } - }); - animatorSet.playTogether(animators); - animatorSet.setDuration(180); - animatorSet.start(); - - if (hide && !ignoreTextChange && (minBrightness > 0f || maxBrightness < 1f)) { - setColorInner(getFieldColor(1, 0xffffffff)); - int color = getColor(); - int red = Color.red(color); - int green = Color.green(color); - int blue = Color.blue(color); - ignoreTextChange = true; - String text = String.format("%02x%02x%02x", (byte) red, (byte) green, (byte) blue).toUpperCase(); - colorEditText[1].setText(text); - colorEditText[1].setSelection(text.length()); - ignoreTextChange = false; - delegate.setColor(color, 0, true); - invalidate(); + private int getIndex(int num) { + if (num == 1) { + return 0; + } else if (num == 3) { + return 1; + } else if (num == 5) { + return 2; + } else { + return 3; } } @Override protected void onDraw(Canvas canvas) { - canvas.drawBitmap(colorWheelBitmap, 0, AndroidUtilities.dp(54), null); - int y = AndroidUtilities.dp(54) + colorWheelBitmap.getHeight(); - canvas.drawRect(0, AndroidUtilities.dp(54), getMeasuredWidth(), AndroidUtilities.dp(54) + 1, linePaint); + int top = AndroidUtilities.dp(45); + canvas.drawBitmap(colorWheelBitmap, 0, top, null); + int y = top + colorWheelBitmap.getHeight(); + canvas.drawRect(0, top, getMeasuredWidth(), top + 1, linePaint); canvas.drawRect(0, y - 1, getMeasuredWidth(), y, linePaint); hsvTemp[0] = colorHSV[0]; @@ -420,7 +737,7 @@ public class ColorPicker extends FrameLayout { hsvTemp[2] = 1f; int colorPointX = (int) (colorHSV[0] * getMeasuredWidth() / 360); - int colorPointY = (int) (AndroidUtilities.dp(54) + (colorWheelBitmap.getHeight() * (1.0f - colorHSV[1]))); + int colorPointY = (int) (top + (colorWheelBitmap.getHeight() * (1.0f - colorHSV[1]))); if (!circlePressed) { int minD = AndroidUtilities.dp(16); float progress = CubicBezierInterpolator.EASE_OUT.getInterpolation(pressedMoveProgress); @@ -429,10 +746,10 @@ public class ColorPicker extends FrameLayout { } else if (colorPointX > getMeasuredWidth() - minD) { colorPointX -= progress * (colorPointX - (getMeasuredWidth() - minD)); } - if (colorPointY < AndroidUtilities.dp(54) + minD) { - colorPointY += progress * (AndroidUtilities.dp(54) + minD - colorPointY); - } else if (colorPointY > AndroidUtilities.dp(54) + colorWheelBitmap.getHeight() - minD) { - colorPointY -= progress * (colorPointY - (AndroidUtilities.dp(54) + colorWheelBitmap.getHeight() - minD)); + if (colorPointY < top + minD) { + colorPointY += progress * (top + minD - colorPointY); + } else if (colorPointY > top + colorWheelBitmap.getHeight() - minD) { + colorPointY -= progress * (colorPointY - (top+ colorWheelBitmap.getHeight() - minD)); } } drawPointerArrow(canvas, colorPointX, colorPointY, Color.HSVToColor(hsvTemp), false); @@ -514,8 +831,8 @@ public class ColorPicker extends FrameLayout { case MotionEvent.ACTION_MOVE: int x = (int) event.getX(); int y = (int) event.getY(); - - if (circlePressed || !colorPressed && y >= AndroidUtilities.dp(54) && y <= AndroidUtilities.dp(54) + colorWheelBitmap.getHeight()) { + int top = AndroidUtilities.dp(45); + if (circlePressed || !colorPressed && y >= top && y <= top + colorWheelBitmap.getHeight()) { if (!circlePressed) { getParent().requestDisallowInterceptTouchEvent(true); } @@ -524,11 +841,11 @@ public class ColorPicker extends FrameLayout { lastUpdateTime = SystemClock.elapsedRealtime(); x = Math.max(0, Math.min(x, colorWheelBitmap.getWidth())); - y = Math.max(AndroidUtilities.dp(54), Math.min(y, AndroidUtilities.dp(54) + colorWheelBitmap.getHeight())); + y = Math.max(top, Math.min(y, top + colorWheelBitmap.getHeight())); float oldBrightnessPos = minHsvBrightness == maxHsvBrightness ? 0.5f : (getBrightness() - minHsvBrightness) / (maxHsvBrightness - minHsvBrightness); colorHSV[0] = x * 360f / colorWheelBitmap.getWidth(); - colorHSV[1] = 1.0f - (1.0f / colorWheelBitmap.getHeight() * (y - AndroidUtilities.dp(54))); + colorHSV[1] = 1.0f - (1.0f / colorWheelBitmap.getHeight() * (y - top)); updateHsvMinMaxBrightness(); colorHSV[2] = minHsvBrightness * (1 - oldBrightnessPos) + maxHsvBrightness * oldBrightnessPos; colorGradient = null; @@ -554,11 +871,12 @@ public class ColorPicker extends FrameLayout { int blue = Color.blue(color); ignoreTextChange = true; String text = String.format("%02x%02x%02x", (byte) red, (byte) green, (byte) blue).toUpperCase(); - Editable editable = colorEditText[selectedEditText == 0 ? 1 : 3].getText(); + Editable editable = colorEditText[1].getText(); editable.replace(0, editable.length(), text); + radioButton[selectedColor].setColor(color); ignoreTextChange = false; } - delegate.setColor(color, selectedEditText, false); + delegate.setColor(color, selectedColor, false); invalidate(); } return true; @@ -574,7 +892,7 @@ public class ColorPicker extends FrameLayout { private void setColorInner(int color) { Color.colorToHSV(color, colorHSV); - int defaultColor = delegate.getDefaultColor(selectedEditText); + int defaultColor = delegate.getDefaultColor(selectedColor); if (defaultColor == 0 || defaultColor != color) { updateHsvMinMaxBrightness(); } @@ -585,15 +903,15 @@ public class ColorPicker extends FrameLayout { public void setColor(int color, int num) { if (!ignoreTextChange) { ignoreTextChange = true; - String text = String.format("%02x%02x%02x", (byte) Color.red(color), (byte) Color.green(color), (byte) Color.blue(color)).toUpperCase(); - colorEditText[num == 0 ? 1 : 3].setText(text); - colorEditText[num == 0 ? 1 : 3].setSelection(text.length()); + if (selectedColor == num) { + String text = String.format("%02x%02x%02x", (byte) Color.red(color), (byte) Color.green(color), (byte) Color.blue(color)).toUpperCase(); + colorEditText[1].setText(text); + colorEditText[1].setSelection(text.length()); + } + radioButton[num].setColor(color); ignoreTextChange = false; } setColorInner(color); - if (num == 1 && color != 0 && clearButton.getTag() == null) { - toggleSecondField(); - } } public void setHasChanges(boolean value) { @@ -620,23 +938,61 @@ public class ColorPicker extends FrameLayout { animatorSet.start(); } - public void setType(int resetType, boolean hasChanges, boolean twoColors, boolean hasSecondColor, boolean myMessages, int angle, boolean animated) { + public void setType(int resetType, boolean hasChanges, boolean fewColors, int newColorsCount, boolean myMessages, int angle, boolean animated) { currentResetType = resetType; myMessagesColor = myMessages; - if (myMessagesColor) { - exchangeButton.setImageResource(R.drawable.menu_switch); - exchangeButton.setRotation(0); + colorsCount = newColorsCount; + + if (newColorsCount == 1) { + addButton.setTranslationX(0); + } else if (newColorsCount == 2) { + addButton.setTranslationX(AndroidUtilities.dp(30) + AndroidUtilities.dp(13)); + } else if (newColorsCount == 3) { + addButton.setTranslationX(AndroidUtilities.dp(30) * 2 + AndroidUtilities.dp(13) * 2); } else { - exchangeButton.setImageResource(R.drawable.editor_rotate); - exchangeButton.setRotation(angle - 45); + addButton.setTranslationX(AndroidUtilities.dp(30) * 3 + AndroidUtilities.dp(13) * 3); } + if (menuItem != null) { if (resetType == 1) { menuItem.setVisibility(VISIBLE); + clearButton.setTranslationX(-AndroidUtilities.dp(40)); } else { menuItem.setVisibility(GONE); + clearButton.setTranslationX(0); } } + if (!fewColors) { + addButton.setVisibility(GONE); + clearButton.setVisibility(GONE); + } else { + if (newColorsCount < (myMessages ? 2 : 4)) { + addButton.setVisibility(VISIBLE); + addButton.setScaleX(1.0f); + addButton.setScaleY(1.0f); + addButton.setAlpha(1.0f); + } else { + addButton.setVisibility(GONE); + } + if (newColorsCount > 1) { + clearButton.setVisibility(VISIBLE); + clearButton.setScaleX(1.0f); + clearButton.setScaleY(1.0f); + clearButton.setAlpha(1.0f); + } else { + clearButton.setVisibility(GONE); + } + } + if (myMessages) { + exchangeButton.setVisibility(newColorsCount == 2 ? VISIBLE : INVISIBLE); + exchangeButton.setAlpha(newColorsCount == 2 ? 1.0f : 0.0f); + exchangeButton.setScaleX(newColorsCount == 2 ? 1.0f : 0.0f); + exchangeButton.setScaleY(newColorsCount == 2 ? 1.0f : 0.0f); + } else { + exchangeButton.setVisibility(GONE); + } + linearLayout.invalidate(); + updateColorsPosition(null, 0, false, getMeasuredWidth()); ArrayList animators; if (animated) { @@ -645,10 +1001,10 @@ public class ColorPicker extends FrameLayout { animators = null; } - if (!twoColors || !hasSecondColor) { + /*if (!twoColors || !hasSecondColor) { colorEditText[1].requestFocus(); } - for (int a = 2; a < 4; a++) { + for (int a = 2; a < colorEditText.length; a++) { if (animated) { if (twoColors) { colorEditText[a].setVisibility(VISIBLE); @@ -659,8 +1015,9 @@ public class ColorPicker extends FrameLayout { colorEditText[a].setAlpha(twoColors && hasSecondColor ? 1.0f : 0.0f); } colorEditText[a].setTag(twoColors ? 1 : null); - } - if (animated) { + }*/ + + /*if (animated) { if (twoColors) { exchangeButton.setVisibility(VISIBLE); } @@ -681,9 +1038,9 @@ public class ColorPicker extends FrameLayout { } else { clearButton.setVisibility(twoColors ? VISIBLE : GONE); clearButton.setAlpha(twoColors ? 1.0f : 0.0f); - } + }*/ - resetButton.setTag(hasChanges ? 1 : null); + /*resetButton.setTag(hasChanges ? 1 : null); resetButton.setText(resetType == 1 ? LocaleController.getString("ColorPickerResetAll", R.string.ColorPickerResetAll) : LocaleController.getString("ColorPickerReset", R.string.ColorPickerReset)); FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) resetButton.getLayoutParams(); layoutParams.rightMargin = AndroidUtilities.dp(resetType == 1 ? 14 : (14 + 47)); @@ -697,7 +1054,7 @@ public class ColorPicker extends FrameLayout { } else { resetButton.setAlpha(!hasChanges || hasSecondColor ? 0.0f : 1.0f); resetButton.setVisibility(!hasChanges || hasSecondColor ? GONE : VISIBLE); - } + }*/ if (animators != null && !animators.isEmpty()) { AnimatorSet animatorSet = new AnimatorSet(); @@ -706,15 +1063,12 @@ public class ColorPicker extends FrameLayout { animatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - if (!hasChanges || hasSecondColor) { + /*if (!hasChanges || hasSecondColor) { resetButton.setVisibility(GONE); - } - if (!twoColors) { + }*/ + if (!fewColors) { clearButton.setVisibility(GONE); exchangeButton.setVisibility(GONE); - for (int a = 2; a < 4; a++) { - colorEditText[a].setVisibility(GONE); - } } } }); @@ -734,6 +1088,9 @@ public class ColorPicker extends FrameLayout { } private void updateHsvMinMaxBrightness() { + if (clearButton == null) { + return; + } float min = clearButton.getTag() != null ? 0f : minBrightness; float max = clearButton.getTag() != null ? 1f : maxBrightness; float hsvBrightness = colorHSV[2]; @@ -817,7 +1174,7 @@ public class ColorPicker extends FrameLayout { } } - private int generateGradientColors(int color) { + public static int generateGradientColors(int color) { float[] hsv = new float[3]; Color.colorToHSV(color, hsv); if (hsv[1] > 0.5f) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/CrossOutDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/CrossOutDrawable.java index abc0f564b..9aed2dcb0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/CrossOutDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/CrossOutDrawable.java @@ -2,13 +2,13 @@ package org.telegram.ui.Components; import android.content.Context; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.PorterDuffXfermode; -import android.graphics.Rect; import android.graphics.RectF; import android.graphics.drawable.Drawable; @@ -31,6 +31,10 @@ public class CrossOutDrawable extends Drawable { float progress; boolean cross; + private float xOffset; + private float lenOffsetTop; + private float lenOffsetBottom; + public CrossOutDrawable(Context context, int iconRes, String colorKey) { iconDrawable = ContextCompat.getDrawable(context, iconRes); this.colorKey = colorKey; @@ -44,13 +48,15 @@ public class CrossOutDrawable extends Drawable { } public void setCrossOut(boolean cross, boolean animated) { - this.cross = cross; - if (!animated) { - progress = cross ? 1f : 0f; - } else { - progress = cross ? 0f : 1f; + if (this.cross != cross) { + this.cross = cross; + if (!animated) { + progress = cross ? 1f : 0f; + } else { + progress = cross ? 0f : 1f; + } + invalidateSelf(); } - invalidateSelf(); } @Override @@ -68,11 +74,11 @@ public class CrossOutDrawable extends Drawable { progress = 0; } } - int newColor = Theme.getColor(colorKey); + int newColor = colorKey == null ? Color.WHITE : Theme.getColor(colorKey); if (color != newColor) { color = newColor; paint.setColor(newColor); - iconDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(colorKey), PorterDuff.Mode.MULTIPLY)); + iconDrawable.setColorFilter(new PorterDuffColorFilter(newColor, PorterDuff.Mode.MULTIPLY)); } if (progress == 0) { iconDrawable.draw(canvas); @@ -82,10 +88,10 @@ public class CrossOutDrawable extends Drawable { canvas.saveLayerAlpha(rectF, 255, Canvas.ALL_SAVE_FLAG); iconDrawable.draw(canvas); - float startX = rectF.left + AndroidUtilities.dpf2(4.5f); - float startY = rectF.top + AndroidUtilities.dpf2(4.5f) - AndroidUtilities.dp(1); - float stopX = rectF.right - AndroidUtilities.dp(3); - float stopY = rectF.bottom - AndroidUtilities.dp(1) - AndroidUtilities.dp(3); + float startX = rectF.left + AndroidUtilities.dpf2(4.5f) + xOffset + lenOffsetTop; + float startY = rectF.top + AndroidUtilities.dpf2(4.5f) - AndroidUtilities.dp(1) + lenOffsetTop; + float stopX = rectF.right - AndroidUtilities.dp(3) + xOffset - lenOffsetBottom; + float stopY = rectF.bottom - AndroidUtilities.dp(1) - AndroidUtilities.dp(3) - lenOffsetBottom; if (cross) { stopX = startX + (stopX - startX) * progress; stopY = startY + (stopY - startY) * progress; @@ -104,7 +110,7 @@ public class CrossOutDrawable extends Drawable { @Override public void setColorFilter(@Nullable ColorFilter colorFilter) { - + } @Override @@ -132,4 +138,19 @@ public class CrossOutDrawable extends Drawable { this.colorKey = colorKey; } + public void setOffsets(float xOffset, float lenOffsetTop, float lenOffsetBottom) { + this.xOffset = xOffset; + this.lenOffsetTop = lenOffsetTop; + this.lenOffsetBottom = lenOffsetBottom; + invalidateSelf(); + } + + public void setStrokeWidth(float w) { + paint.setStrokeWidth(w); + xRefPaint.setStrokeWidth(w * 1.47f); + } + + public float getProgress() { + return progress; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/DialogsItemAnimator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/DialogsItemAnimator.java index 4331545df..30967ed5b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/DialogsItemAnimator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/DialogsItemAnimator.java @@ -266,9 +266,7 @@ public class DialogsItemAnimator extends SimpleItemAnimator { @Override public boolean animateAdd(final ViewHolder holder) { resetAnimation(holder); - if (holder.itemView instanceof DialogCell) { - // ((DialogCell) holder.itemView).setMoving(true); - } else { + if (!(holder.itemView instanceof DialogCell)) { holder.itemView.setAlpha(0); } @@ -313,7 +311,6 @@ public class DialogsItemAnimator extends SimpleItemAnimator { } } }).start(); - } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java index 542a0f6cb..2632da403 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextBoldCursor.java @@ -8,11 +8,8 @@ package org.telegram.ui.Components; -import android.animation.Animator; -import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; -import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; @@ -31,7 +28,6 @@ import androidx.annotation.Nullable; import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; import android.text.Layout; -import android.text.SpannableStringBuilder; import android.text.StaticLayout; import android.text.TextPaint; import android.text.TextUtils; @@ -46,7 +42,6 @@ import android.widget.EditText; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.R; @@ -433,7 +428,11 @@ public class EditTextBoldCursor extends EditText { @Override protected void onFocusChanged(boolean focused, int direction, Rect previouslyFocusedRect) { - super.onFocusChanged(focused, direction, previouslyFocusedRect); + try { + super.onFocusChanged(focused, direction, previouslyFocusedRect); + } catch (Exception e) { + FileLog.e(e); + } checkHeaderVisibility(true); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java index c55c127ad..6b6eed16c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextCaption.java @@ -260,7 +260,11 @@ public class EditTextCaption extends EditTextBoldCursor { if (Build.VERSION.SDK_INT < 23 && !hasWindowFocus && copyPasteShowed) { return; } - super.onWindowFocusChanged(hasWindowFocus); + try { + super.onWindowFocusChanged(hasWindowFocus); + } catch (Throwable e) { + FileLog.e(e); + } } private ActionMode.Callback overrideCallback(final ActionMode.Callback callback) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java index 65a58b8e6..b0cb3c48b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EditTextEmoji.java @@ -99,7 +99,7 @@ public class EditTextEmoji extends FrameLayout implements NotificationCenter.Not super(context); currentStyle = style; - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); parentFragment = fragment; sizeNotifierLayout = parent; sizeNotifierLayout.setDelegate(this); @@ -195,7 +195,7 @@ public class EditTextEmoji extends FrameLayout implements NotificationCenter.Not @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (emojiView != null) { emojiView.invalidateViews(); } @@ -250,7 +250,7 @@ public class EditTextEmoji extends FrameLayout implements NotificationCenter.Not public void onDestroy() { destroyed = true; - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (emojiView != null) { emojiView.onDestroy(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java index 3ffb51e68..22078204e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/EmojiView.java @@ -43,7 +43,6 @@ import android.text.Editable; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.TextWatcher; -import android.util.Log; import android.util.LongSparseArray; import android.util.SparseArray; import android.util.SparseIntArray; @@ -75,6 +74,7 @@ import org.telegram.messenger.EmojiData; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageReceiver; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; @@ -254,7 +254,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific } - default void onStickerSelected(View view, TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { + default void onStickerSelected(View view, TLRPC.Document sticker, String query, Object parent, MessageObject.SendAnimationData sendAnimationData, boolean notify, int scheduleDate) { } @@ -329,7 +329,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific private ContentPreviewViewer.ContentPreviewViewerDelegate contentPreviewViewerDelegate = new ContentPreviewViewer.ContentPreviewViewerDelegate() { @Override public void sendSticker(TLRPC.Document sticker, String query, Object parent, boolean notify, int scheduleDate) { - delegate.onStickerSelected(null, sticker, query, parent, notify, scheduleDate); + delegate.onStickerSelected(null, sticker, query, parent, null, notify, scheduleDate); } @Override @@ -1637,7 +1637,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific return; } cell.disable(); - delegate.onStickerSelected(cell, cell.getSticker(), query, cell.getParentObject(), true, 0); + delegate.onStickerSelected(cell, cell.getSticker(), query, cell.getParentObject(), cell.getSendAnimationData(), true, 0); }; stickersGridView.setOnItemClickListener(stickersOnItemClickListener); stickersGridView.setGlowColor(Theme.getColor(Theme.key_chat_emojiPanelBackground)); @@ -1697,7 +1697,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific @Override public void onStickerSelected(TLRPC.Document sticker, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { - delegate.onStickerSelected(null, sticker, null, parent, notify, scheduleDate); + delegate.onStickerSelected(null, sticker, null, parent, null, notify, scheduleDate); } @Override @@ -3351,7 +3351,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.newEmojiSuggestionsAvailable); if (stickersGridAdapter != null) { NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.stickersDidLoad); @@ -3393,7 +3393,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific } public void onDestroy() { - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.newEmojiSuggestionsAvailable); if (stickersGridAdapter != null) { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.stickersDidLoad); @@ -3619,7 +3619,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific if (info != null && info.stickerset != null && info.stickerset.id == (Long) args[0]) { updateStickerTabs(); } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { if (stickersGridView != null) { int count = stickersGridView.getChildCount(); for (int a = 0; a < count; a++) { @@ -5289,7 +5289,7 @@ public class EmojiView extends FrameLayout implements NotificationCenter.Notific case 0: { TLRPC.Document sticker = (TLRPC.Document) cache.get(position); StickerEmojiCell cell = (StickerEmojiCell) holder.itemView; - cell.setSticker(sticker, cacheParent.get(position), positionToEmoji.get(position), false); + cell.setSticker(sticker, null, cacheParent.get(position), positionToEmoji.get(position), false); cell.setRecent(recentStickers.contains(sticker) || favouriteStickers.contains(sticker)); break; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastGridLayoutManager.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastGridLayoutManager.java index 66c7a9aa1..a0d3ab35a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastGridLayoutManager.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FillLastGridLayoutManager.java @@ -10,11 +10,17 @@ import androidx.recyclerview.widget.RecyclerView; public class FillLastGridLayoutManager extends GridLayoutManager { private SparseArray heights = new SparseArray<>(); - private int lastItemHeight = -1; + protected int lastItemHeight = -1; private int listHeight; private int listWidth; private int additionalHeight; private RecyclerView listView; + private boolean bind = true; + private boolean canScrollVertically = true; + + public void setBind(boolean bind) { + this.bind = bind; + } public FillLastGridLayoutManager(Context context, int spanCount, int h, RecyclerView recyclerView) { super(context, spanCount); @@ -34,7 +40,7 @@ public class FillLastGridLayoutManager extends GridLayoutManager { } @SuppressWarnings("unchecked") - private void calcLastItemHeight() { + protected void calcLastItemHeight() { if (listHeight <= 0 || !shouldCalcLastItemHeight()) { return; } @@ -69,7 +75,10 @@ public class FillLastGridLayoutManager extends GridLayoutManager { holder.itemView.setLayoutParams(generateDefaultLayoutParams()); } } - adapter.onBindViewHolder(holder, a); + + if (bind) { + adapter.onBindViewHolder(holder, a); + } final RecyclerView.LayoutParams lp = (RecyclerView.LayoutParams) holder.itemView.getLayoutParams(); final int widthSpec = getChildMeasureSpec(listWidth, getWidthMode(), getPaddingLeft() + getPaddingRight() + lp.leftMargin + lp.rightMargin, lp.width, canScrollHorizontally()); @@ -152,4 +161,13 @@ public class FillLastGridLayoutManager extends GridLayoutManager { protected boolean shouldCalcLastItemHeight() { return true; } + + public void setCanScrollVertically(boolean value) { + canScrollVertically = value; + } + + @Override + public boolean canScrollVertically() { + return canScrollVertically; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterShaders.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterShaders.java index a117f9d3a..8e120b6c4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterShaders.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterShaders.java @@ -123,7 +123,7 @@ public class FilterShaders { "}"; private static String vertexShaderForOptimizedBlurOfRadius(int blurRadius, float sigma) { - float[] standardGaussianWeights = new float[blurRadius + 1]; + float[] standardGaussianWeights = new float[blurRadius * 2 + 1]; float sumOfWeights = 0.0f; for (int currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) { standardGaussianWeights[currentGaussianWeightIndex] = (float) ((1.0 / Math.sqrt(2.0 * Math.PI * Math.pow(sigma, 2.0))) * Math.exp(-Math.pow(currentGaussianWeightIndex, 2.0) / (2.0 * Math.pow(sigma, 2.0)))); @@ -165,7 +165,7 @@ public class FilterShaders { } private static String fragmentShaderForOptimizedBlurOfRadius(int blurRadius, float sigma) { - float[] standardGaussianWeights = new float[blurRadius + 1]; + float[] standardGaussianWeights = new float[blurRadius * 2 + 1]; float sumOfWeights = 0.0f; for (int currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) { standardGaussianWeights[currentGaussianWeightIndex] = (float) ((1.0 / Math.sqrt(2.0 * Math.PI * Math.pow(sigma, 2.0))) * Math.exp(-Math.pow(currentGaussianWeightIndex, 2.0) / (2.0 * Math.pow(sigma, 2.0)))); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java index c2c649252..e534cc65b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FilterTabsView.java @@ -1208,7 +1208,10 @@ public class FilterTabsView extends FrameLayout { additionalTabWidth = trueTabsWidth < width ? (width - trueTabsWidth) / tabs.size() : 0; if (prevWidth != additionalTabWidth) { ignoreLayout = true; + RecyclerView.ItemAnimator animator = listView.getItemAnimator(); + listView.setItemAnimator(null); adapter.notifyDataSetChanged(); + listView.setItemAnimator(animator); ignoreLayout = false; } updateTabsWidths(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FiltersListBottomSheet.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FiltersListBottomSheet.java index 5b8885716..5d790bf6c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FiltersListBottomSheet.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FiltersListBottomSheet.java @@ -213,7 +213,7 @@ public class FiltersListBottomSheet extends BottomSheet implements NotificationC titleTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); containerView.addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.LEFT | Gravity.TOP, 0, 0, 40, 0)); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); } @Override @@ -284,12 +284,12 @@ public class FiltersListBottomSheet extends BottomSheet implements NotificationC @Override public void dismiss() { super.dismiss(); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); } @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (listView != null) { int count = listView.getChildCount(); for (int a = 0; a < count; a++) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlatCheckBox.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlatCheckBox.java index a02d3c42b..3e0ef0e45 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlatCheckBox.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlatCheckBox.java @@ -174,8 +174,6 @@ public class FlatCheckBox extends View { canvas.drawLine((int) AndroidUtilities.dpf2(7f), (int) AndroidUtilities.dpf2(13f), endX, endY, checkPaint); } canvas.restore(); - - } public void denied() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java index b3813ff43..4bfe6c082 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FlickerLoadingView.java @@ -5,7 +5,6 @@ import android.graphics.Canvas; import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; -import android.graphics.Path; import android.graphics.RectF; import android.graphics.Shader; import android.os.SystemClock; @@ -16,6 +15,8 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.SharedConfig; import org.telegram.ui.ActionBar.Theme; +import java.util.Random; + public class FlickerLoadingView extends View { public final static int DIALOG_TYPE = 1; @@ -28,6 +29,7 @@ public class FlickerLoadingView extends View { public final static int CALL_LOG_TYPE = 8; public final static int INVITE_LINKS_TYPE = 9; public final static int USERS2_TYPE = 10; + public final static int BOTS_MENU_TYPE = 11; private int gradientWidth; private LinearGradient gradient; @@ -54,8 +56,17 @@ public class FlickerLoadingView extends View { private String colorKey3; private int itemsCount = 1; + float[] randomParams; + public void setViewType(int type) { this.viewType = type; + if (viewType == BOTS_MENU_TYPE) { + Random random = new Random(); + randomParams = new float[2]; + for (int i = 0; i < 2; i++) { + randomParams[i] = Math.abs(random.nextInt() % 1000) / 1000f; + } + } invalidate(); } @@ -352,6 +363,27 @@ public class FlickerLoadingView extends View { canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); } + h += getCellHeight(getMeasuredWidth()); + k++; + if (isSingleCell && k >= itemsCount) { + break; + } + } + } else if (getViewType() == BOTS_MENU_TYPE) { + int k = 0; + while (h <= getMeasuredHeight()) { + rectF.set(AndroidUtilities.dp(18), AndroidUtilities.dp((36 - 8) / 2f), getMeasuredWidth() * 0.5f + AndroidUtilities.dp(40 * randomParams[0]), AndroidUtilities.dp((36 - 8) / 2f) + AndroidUtilities.dp(8)); + checkRtl(rectF); + canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + + rectF.set(getMeasuredWidth() - AndroidUtilities.dp(18), AndroidUtilities.dp((36 - 8) / 2f), getMeasuredWidth() - getMeasuredWidth() * 0.2f -AndroidUtilities.dp(20 * randomParams[0]), AndroidUtilities.dp((36 - 8) / 2f) + AndroidUtilities.dp(8)); + checkRtl(rectF); + canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + +// rectF.set(AndroidUtilities.dp(), AndroidUtilities.dp((36 - 8) / 2), AndroidUtilities.dp(268), AndroidUtilities.dp((36 - 8) / 2) + AndroidUtilities.dp(8)); +// checkRtl(rectF); +// canvas.drawRoundRect(rectF, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + h += getCellHeight(getMeasuredWidth()); k++; if (isSingleCell && k >= itemsCount) { @@ -419,6 +451,8 @@ public class FlickerLoadingView extends View { return AndroidUtilities.dp(58); } else if (getViewType() == CALL_LOG_TYPE) { return AndroidUtilities.dp(61); + } else if (getViewType() == BOTS_MENU_TYPE) { + return AndroidUtilities.dp(36); } return 0; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java index a0514a6f9..5eea18534 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java @@ -63,7 +63,6 @@ import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; -import org.telegram.messenger.voip.VoIPBaseService; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; @@ -79,7 +78,7 @@ import org.telegram.ui.LocationActivity; import java.util.ArrayList; -public class FragmentContextView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, VoIPBaseService.StateListener { +public class FragmentContextView extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, VoIPService.StateListener { private ImageView playButton; private PlayPauseDrawable playPauseDrawable; @@ -290,7 +289,6 @@ public class FragmentContextView extends FrameLayout implements NotificationCent importingImageView.setBackground(Theme.createCircleDrawable(AndroidUtilities.dp(22), Theme.getColor(Theme.key_inappPlayerPlayPause))); addView(importingImageView, LayoutHelper.createFrame(22, 22, Gravity.TOP | Gravity.LEFT, 7, 7, 0, 0)); - titleTextView = new AudioPlayerAlert.ClippingTextViewSwitcher(context) { @Override protected TextView createTextView() { @@ -627,7 +625,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent if (importingHistory == null) { return; } - ImportingAlert importingAlert = new ImportingAlert(getContext(), (ChatActivity) fragment); + ImportingAlert importingAlert = new ImportingAlert(getContext(), null, (ChatActivity) fragment); importingAlert.setOnHideListener(dialog -> checkImport(false)); fragment.showDialog(importingAlert); checkImport(false); @@ -1911,8 +1909,8 @@ public class FragmentContextView extends FrameLayout implements NotificationCent titleTextView.setTranslationX(0); subtitleTextView.setTranslationX(0); } - titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, Gravity.LEFT | Gravity.TOP, x, 5, 36, 0)); - subtitleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, Gravity.LEFT | Gravity.TOP, x, 25, 36, 0)); + titleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, Gravity.LEFT | Gravity.TOP, x, 5, call.isScheduled() ? 90 : 36, 0)); + subtitleTextView.setLayoutParams(LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 20, Gravity.LEFT | Gravity.TOP, x, 25, call.isScheduled() ? 90 : 36, 0)); } } else { avatars.updateAfterTransitionEnd(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallFullscreenAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallFullscreenAdapter.java new file mode 100644 index 000000000..914a3404a --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallFullscreenAdapter.java @@ -0,0 +1,645 @@ +package org.telegram.ui.Components; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Path; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.text.TextPaint; +import android.text.TextUtils; +import android.view.Gravity; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; + +import androidx.annotation.NonNull; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.DiffUtil; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.MessageObject; +import org.telegram.messenger.UserObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.voip.GroupCallMiniTextureView; +import org.telegram.ui.Components.voip.GroupCallRenderersContainer; +import org.telegram.ui.Components.voip.GroupCallStatusIcon; +import org.telegram.ui.GroupCallActivity; + +import java.util.ArrayList; + +public class GroupCallFullscreenAdapter extends RecyclerListView.SelectionAdapter { + + private ChatObject.Call groupCall; + private final int currentAccount; + + private final ArrayList videoParticipants = new ArrayList<>(); + private final ArrayList participants = new ArrayList<>(); + + private ArrayList attachedRenderers; + private GroupCallRenderersContainer renderersContainer; + private final GroupCallActivity activity; + private boolean visible = false; + + public GroupCallFullscreenAdapter(ChatObject.Call groupCall, int currentAccount, GroupCallActivity activity) { + this.groupCall = groupCall; + this.currentAccount = currentAccount; + this.activity = activity; + } + + public void setRenderersPool(ArrayList attachedRenderers, GroupCallRenderersContainer renderersContainer) { + this.attachedRenderers = attachedRenderers; + this.renderersContainer = renderersContainer; + } + + public void setGroupCall(ChatObject.Call groupCall) { + this.groupCall = groupCall; + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + return false; + } + + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + return new RecyclerListView.Holder(new GroupCallUserCell(parent.getContext())); + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + GroupCallUserCell view = (GroupCallUserCell) holder.itemView; + ChatObject.VideoParticipant oldVideoParticipant = view.videoParticipant; + + ChatObject.VideoParticipant videoParticipant; + TLRPC.TL_groupCallParticipant participant; + if (position < videoParticipants.size()) { + videoParticipant = videoParticipants.get(position); + participant = videoParticipants.get(position).participant; + } else if (position - videoParticipants.size() < participants.size()){ + videoParticipant = null; + participant = participants.get(position - videoParticipants.size()); + } else { + return; + } + view.setParticipant(videoParticipant, participant); + + if (oldVideoParticipant != null && !oldVideoParticipant.equals(videoParticipant) && view.attached && view.getRenderer() != null) { + view.attachRenderer(false); + if (videoParticipant != null) { + view.attachRenderer(true); + } + } else if (view.attached) { + if (view.getRenderer() == null && videoParticipant != null && visible) { + view.attachRenderer(true); + } else if (view.getRenderer() != null && videoParticipant == null) { + view.attachRenderer(false); + } + } + + } + + @Override + public int getItemCount() { + return videoParticipants.size() + participants.size(); + } + + public void setVisibility(RecyclerListView listView, boolean visibility) { + visible = visibility; + for (int i = 0; i < listView.getChildCount(); i++) { + View view = listView.getChildAt(i); + if (view instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) view; + if (cell.getVideoParticipant() != null) { + ((GroupCallUserCell) view).attachRenderer(visibility); + } + } + } + } + + + public void scrollTo(ChatObject.VideoParticipant videoParticipant, RecyclerListView fullscreenUsersListView) { + LinearLayoutManager layoutManager = (LinearLayoutManager)fullscreenUsersListView.getLayoutManager(); + if (layoutManager == null) { + return; + } + for (int i = 0; i < videoParticipants.size(); i++) { + if (videoParticipants.get(i).equals(videoParticipant)) { + layoutManager.scrollToPositionWithOffset(i, AndroidUtilities.dp(13)); + break; + } + } + } + + public class GroupCallUserCell extends FrameLayout implements GroupCallStatusIcon.Callback { + + AvatarDrawable avatarDrawable = new AvatarDrawable(); + + private TLRPC.User currentUser; + private TLRPC.Chat currentChat; + + private BackupImageView avatarImageView; + boolean hasAvatar; + int peerId; + + ChatObject.VideoParticipant videoParticipant; + TLRPC.TL_groupCallParticipant participant; + + Paint backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + Paint selectionPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + float progress = 1f; + + GroupCallMiniTextureView renderer; + + String drawingName; + String name; + int nameWidth; + + TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + + RLottieImageView muteButton; + + float selectionProgress; + boolean selected; + private boolean lastRaisedHand; + private boolean lastMuted; + + GroupCallStatusIcon statusIcon; + + org.telegram.ui.Cells.GroupCallUserCell.AvatarWavesDrawable avatarWavesDrawable = new org.telegram.ui.Cells.GroupCallUserCell.AvatarWavesDrawable(AndroidUtilities.dp(26), AndroidUtilities.dp(29)); + + public GroupCallUserCell(@NonNull Context context) { + super(context); + avatarDrawable.setTextSize((int) (AndroidUtilities.dp(18) / 1.15f)); + avatarImageView = new BackupImageView(context); + avatarImageView.setRoundRadius(AndroidUtilities.dp(20)); + addView(avatarImageView, LayoutHelper.createFrame(40, 40, Gravity.CENTER_HORIZONTAL, 0, 9, 0, 9)); + setWillNotDraw(false); + + backgroundPaint.setColor(Theme.getColor(Theme.key_voipgroup_listViewBackground)); + selectionPaint.setColor(Theme.getColor(Theme.key_voipgroup_speakingText)); + selectionPaint.setStyle(Paint.Style.STROKE); + selectionPaint.setStrokeWidth(AndroidUtilities.dp(2)); + textPaint.setColor(Color.WHITE); + + muteButton = new RLottieImageView(context) { + @Override + public void invalidate() { + super.invalidate(); + GroupCallUserCell.this.invalidate(); + } + }; + muteButton.setScaleType(ImageView.ScaleType.CENTER); + addView(muteButton, LayoutHelper.createFrame(24, 24)); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + textPaint.setTextSize(AndroidUtilities.dp(12)); + if (name != null) { + float maxWidth = AndroidUtilities.dp(46); + float textWidth = textPaint.measureText(name); + nameWidth = (int) Math.min(maxWidth, textWidth); + drawingName = TextUtils.ellipsize(name, textPaint, nameWidth, TextUtils.TruncateAt.END).toString(); + } + + super.onMeasure(MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(80), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(80), MeasureSpec.EXACTLY)); + } + + public void setParticipant(ChatObject.VideoParticipant videoParticipant, TLRPC.TL_groupCallParticipant participant) { + this.videoParticipant = videoParticipant; + this.participant = participant; + int lastPeerId = peerId; + peerId = MessageObject.getPeerId(participant.peer); + if (peerId > 0) { + currentUser = AccountInstance.getInstance(currentAccount).getMessagesController().getUser(peerId); + currentChat = null; + avatarDrawable.setInfo(currentUser); + + name = UserObject.getFirstName(currentUser); + avatarImageView.getImageReceiver().setCurrentAccount(currentAccount); + + ImageLocation imageLocation = ImageLocation.getForUser(currentUser, ImageLocation.TYPE_SMALL); + hasAvatar = imageLocation != null; + avatarImageView.setImage(imageLocation, "50_50", avatarDrawable, currentUser); + } else { + currentChat = AccountInstance.getInstance(currentAccount).getMessagesController().getChat(-peerId); + currentUser = null; + avatarDrawable.setInfo(currentChat); + + if (currentChat != null) { + name = currentChat.title; + avatarImageView.getImageReceiver().setCurrentAccount(currentAccount); + + ImageLocation imageLocation = ImageLocation.getForChat(currentChat, ImageLocation.TYPE_SMALL); + hasAvatar = imageLocation != null; + avatarImageView.setImage(imageLocation, "50_50", avatarDrawable, currentChat); + } + } + boolean animated = lastPeerId == peerId; + if (videoParticipant == null) { + selected = renderersContainer.fullscreenPeerId == MessageObject.getPeerId(participant.peer); + } else if (renderersContainer.fullscreenParticipant != null) { + selected = renderersContainer.fullscreenParticipant.equals(videoParticipant); + } else { + selected = false; + } + if (!animated) { + setSelectedProgress(selected ? 1f : 0f); + } + if (statusIcon != null) { + statusIcon.setParticipant(participant, animated); + updateState(animated); + } + } + + @Override + public void setAlpha(float alpha) { + super.setAlpha(alpha); + } + + public void setProgressToFullscreen(float progress) { + if (this.progress == progress) { + return; + } + this.progress = progress; + if (progress == 1f) { + avatarImageView.setTranslationY(0); + avatarImageView.setScaleX(1f); + avatarImageView.setScaleY(1f); + backgroundPaint.setAlpha(255); + + invalidate(); + if (renderer != null) { + renderer.invalidate(); + } + return; + } + float moveToCenter = avatarImageView.getTop() + avatarImageView.getMeasuredHeight() / 2f - getMeasuredHeight() / 2f; + float scaleFrom = AndroidUtilities.dp(46) / (float) AndroidUtilities.dp(40); + float s = scaleFrom * (1f - progress) + 1f * progress; + avatarImageView.setTranslationY(-moveToCenter * (1f - progress)); + + avatarImageView.setScaleX(s); + avatarImageView.setScaleY(s); + backgroundPaint.setAlpha((int) (255 * progress)); + + invalidate(); + if (renderer != null) { + renderer.invalidate(); + } + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (renderer != null && renderer.isFullyVisible() && !activity.drawingForBlur) { + drawSelection(canvas); + return; + } + if (progress > 0) { + float p = getMeasuredWidth() / 2f * (1f - progress); + AndroidUtilities.rectTmp.set(p, p, getMeasuredWidth() - p, getMeasuredHeight() - p); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(13), AndroidUtilities.dp(13), backgroundPaint); + drawSelection(canvas); + } + + float cx = avatarImageView.getX() + avatarImageView.getMeasuredWidth() / 2; + float cy = avatarImageView.getY() + avatarImageView.getMeasuredHeight() / 2; + + avatarWavesDrawable.update(); + avatarWavesDrawable.draw(canvas, cx, cy, this); + + + float scaleFrom = AndroidUtilities.dp(46) / (float) AndroidUtilities.dp(40); + float s = scaleFrom * (1f - progress) + 1f * progress; + + avatarImageView.setScaleX(avatarWavesDrawable.getAvatarScale() * s); + avatarImageView.setScaleY(avatarWavesDrawable.getAvatarScale() * s); + + super.dispatchDraw(canvas); + } + + private void drawSelection(Canvas canvas) { + if (selected && selectionProgress != 1f) { + float selectedProgressLocal = selectionProgress + 16 / 150f; + if (selectedProgressLocal > 1f) { + selectedProgressLocal = 1f; + } else { + invalidate(); + } + setSelectedProgress(selectedProgressLocal); + } else if (!selected && selectionProgress != 0f) { + float selectedProgressLocal = selectionProgress - 16 / 150f; + if (selectedProgressLocal < 0) { + selectedProgressLocal = 0; + } else { + invalidate(); + } + setSelectedProgress(selectedProgressLocal); + } + + if (selectionProgress > 0) { + float p = getMeasuredWidth() / 2f * (1f - progress); + AndroidUtilities.rectTmp.set(p, p, getMeasuredWidth() - p, getMeasuredHeight() - p); + AndroidUtilities.rectTmp.inset(selectionPaint.getStrokeWidth() / 2, selectionPaint.getStrokeWidth() / 2); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(12), AndroidUtilities.dp(12), selectionPaint); + } + } + + private void setSelectedProgress(float p) { + if (selectionProgress != p) { + selectionProgress = p; + selectionPaint.setAlpha((int) (255 * p)); + } + } + + public int getPeerId() { + return peerId; + } + + public BackupImageView getAvatarImageView() { + return avatarImageView; + } + + public TLRPC.TL_groupCallParticipant getParticipant() { + return participant; + } + + public ChatObject.VideoParticipant getVideoParticipant() { + return videoParticipant; + } + + boolean attached; + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (visible && videoParticipant != null) { + attachRenderer(true); + } + attached = true; + if (activity.statusIconPool.size() > 0) { + statusIcon = activity.statusIconPool.remove(activity.statusIconPool.size() - 1); + } else { + statusIcon = new GroupCallStatusIcon(); + } + statusIcon.setCallback(this); + statusIcon.setImageView(muteButton); + statusIcon.setParticipant(participant, false); + updateState(false); + avatarWavesDrawable.setShowWaves(statusIcon.isSpeaking(), this); + if (!statusIcon.isSpeaking()) { + avatarWavesDrawable.setAmplitude(0); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attachRenderer(false); + attached = false; + if (statusIcon != null) { + activity.statusIconPool.add(statusIcon); + statusIcon.setImageView(null); + statusIcon.setCallback(null); + } + statusIcon = null; + } + + public void attachRenderer(boolean attach) { + if (activity.isDismissed()) { + return; + } + if (attach && this.renderer == null) { + this.renderer = GroupCallMiniTextureView.getOrCreate(attachedRenderers, renderersContainer, null, this, null, videoParticipant, groupCall, activity); + } else if (!attach) { + if (renderer != null) { + renderer.setSecondaryView(null); + } + renderer = null; + } + } + + public void setRenderer(GroupCallMiniTextureView renderer) { + this.renderer = renderer; + } + + public void drawOverlays(Canvas canvas) { + if (drawingName != null) { + canvas.save(); + int paddingStart = (getMeasuredWidth() - nameWidth - AndroidUtilities.dp(24)) / 2; + textPaint.setAlpha((int) (255 * progress * getAlpha())); + canvas.drawText(drawingName, paddingStart + AndroidUtilities.dp(22), AndroidUtilities.dp(58 + 11), textPaint); + canvas.restore(); + canvas.save(); + canvas.translate(paddingStart, AndroidUtilities.dp(53)); + if (muteButton.getDrawable() != null) { + muteButton.getDrawable().setAlpha((int) (255 * progress * getAlpha())); + muteButton.draw(canvas); + muteButton.getDrawable().setAlpha(255); + } + canvas.restore(); + } + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == muteButton) { + return true; + } + return super.drawChild(canvas, child, drawingTime); + } + + public float getProgressToFullscreen() { + return progress; + } + + public GroupCallMiniTextureView getRenderer() { + return renderer; + } + + public void setAmplitude(double value) { + if (statusIcon != null) { + statusIcon.setAmplitude(value); + } + avatarWavesDrawable.setAmplitude(value); + } + + int lastColor; + int lastWavesColor; + ValueAnimator colorAnimator; + + + public void updateState(boolean animated) { + if (statusIcon == null) { + return; + } + statusIcon.updateIcon(animated); + int newColor; + int newWavesColor; + if (statusIcon.isMutedByMe()) { + newWavesColor = newColor = Theme.getColor(Theme.key_voipgroup_mutedByAdminIcon); + } else if (statusIcon.isSpeaking()) { + newWavesColor = newColor = Theme.getColor(Theme.key_voipgroup_speakingText); + } else { + newColor = Theme.getColor(Theme.key_voipgroup_nameText); + newWavesColor = Theme.getColor(Theme.key_voipgroup_listeningText); + } + + + if (!animated) { + if (colorAnimator != null) { + colorAnimator.removeAllListeners(); + colorAnimator.cancel(); + } + lastColor = newColor; + lastWavesColor = newWavesColor; + muteButton.setColorFilter(new PorterDuffColorFilter(newColor, PorterDuff.Mode.MULTIPLY)); + textPaint.setColor(lastColor); + selectionPaint.setColor(newWavesColor); + avatarWavesDrawable.setColor(ColorUtils.setAlphaComponent(newWavesColor, (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + invalidate(); + } else { + int colorFrom = lastColor; + int colorWavesFrom = lastWavesColor; + colorAnimator = ValueAnimator.ofFloat(0, 1f); + colorAnimator.addUpdateListener(valueAnimator -> { + lastColor = ColorUtils.blendARGB(colorFrom, newColor, (float) valueAnimator.getAnimatedValue()); + lastWavesColor = ColorUtils.blendARGB(colorWavesFrom, newWavesColor, (float) valueAnimator.getAnimatedValue()); + muteButton.setColorFilter(new PorterDuffColorFilter(lastColor, PorterDuff.Mode.MULTIPLY)); + textPaint.setColor(lastColor); + selectionPaint.setColor(lastWavesColor); + avatarWavesDrawable.setColor(ColorUtils.setAlphaComponent(lastWavesColor, (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + invalidate(); + }); + colorAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + lastColor = newColor; + lastWavesColor = newWavesColor; + muteButton.setColorFilter(new PorterDuffColorFilter(lastColor, PorterDuff.Mode.MULTIPLY)); + textPaint.setColor(lastColor); + selectionPaint.setColor(lastWavesColor); + avatarWavesDrawable.setColor(ColorUtils.setAlphaComponent(lastWavesColor, (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + } + }); + colorAnimator.start(); + } + } + + boolean skipInvalidate; + + @Override + public void invalidate() { + if (skipInvalidate) { + return; + } + skipInvalidate = true; + super.invalidate(); + if (renderer != null) { + renderer.invalidate(); + } else { + renderersContainer.invalidate(); + } + skipInvalidate = false; + } + + public boolean hasImage() { + return renderer != null && renderer.hasImage(); + } + + @Override + public void onStatusChanged() { + avatarWavesDrawable.setShowWaves(statusIcon.isSpeaking(), this); + updateState(true); + } + + public boolean isRemoving(RecyclerListView listView) { + return listView.getChildAdapterPosition(this) == RecyclerView.NO_POSITION; + } + } + + public void update(boolean animated, RecyclerListView listView) { + if (groupCall == null) { + return; + } + if (animated) { + ArrayList oldParticipants = new ArrayList<>(participants); + ArrayList oldVideoParticipants = new ArrayList<>(videoParticipants); + + participants.clear(); + participants.addAll(groupCall.visibleParticipants); + + videoParticipants.clear(); + videoParticipants.addAll(groupCall.visibleVideoParticipants); + + DiffUtil.calculateDiff(new DiffUtil.Callback() { + @Override + public int getOldListSize() { + return oldVideoParticipants.size() + oldParticipants.size(); + } + + @Override + public int getNewListSize() { + return videoParticipants.size() + participants.size(); + } + + @Override + public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { + if (oldItemPosition < oldVideoParticipants.size() && newItemPosition < videoParticipants.size()) { + return oldVideoParticipants.get(oldItemPosition).equals(videoParticipants.get(newItemPosition)); + } + int oldItemPosition2 = oldItemPosition - oldVideoParticipants.size(); + int newItemPosition2 = newItemPosition - videoParticipants.size(); + if (newItemPosition2 >= 0 && newItemPosition2 < participants.size() && oldItemPosition2 >= 0 && oldItemPosition2 < oldParticipants.size()) { + return MessageObject.getPeerId(oldParticipants.get(oldItemPosition2).peer) == MessageObject.getPeerId(participants.get(newItemPosition2).peer); + } + + TLRPC.TL_groupCallParticipant oldParticipant; + TLRPC.TL_groupCallParticipant newParticipant; + if (oldItemPosition < oldVideoParticipants.size()) { + oldParticipant = oldVideoParticipants.get(oldItemPosition).participant; + } else { + oldParticipant = oldParticipants.get(oldItemPosition2); + } + + if (newItemPosition < videoParticipants.size()) { + newParticipant = videoParticipants.get(newItemPosition).participant; + } else { + newParticipant = participants.get(newItemPosition2); + } + if (MessageObject.getPeerId(oldParticipant.peer) == MessageObject.getPeerId(newParticipant.peer)) { + return true; + } + return false; + } + + @Override + public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { + return true; + } + }).dispatchUpdatesTo(this); + AndroidUtilities.updateVisibleRows(listView); + } else { + participants.clear(); + participants.addAll(groupCall.visibleParticipants); + + videoParticipants.clear(); + videoParticipants.addAll(groupCall.visibleVideoParticipants); + notifyDataSetChanged(); + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java index 70b6581f0..2b34f808c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipAlertView.java @@ -27,7 +27,6 @@ import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; -import org.telegram.messenger.voip.VoIPBaseService; import org.telegram.messenger.voip.VoIPService; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.voip.VoIPButtonsLayout; @@ -35,7 +34,7 @@ import org.telegram.ui.Components.voip.VoIPToggleButton; import org.telegram.ui.GroupCallActivity; import org.telegram.ui.LaunchActivity; -public class GroupCallPipAlertView extends LinearLayout implements VoIPBaseService.StateListener, NotificationCenter.NotificationCenterDelegate { +public class GroupCallPipAlertView extends LinearLayout implements VoIPService.StateListener, NotificationCenter.NotificationCenterDelegate { public static final int POSITION_LEFT = 0; public static final int POSITION_RIGHT = 1; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java index 734167536..eafad93d0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupCallPipButton.java @@ -23,14 +23,13 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.Utilities; -import org.telegram.messenger.voip.VoIPBaseService; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import java.util.Random; -public class GroupCallPipButton extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, VoIPBaseService.StateListener { +public class GroupCallPipButton extends FrameLayout implements NotificationCenter.NotificationCenterDelegate, VoIPService.StateListener { Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); BlobDrawable blobDrawable = new BlobDrawable(8); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java index e9e4cf4ec..2a5156efb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/GroupVoipInviteAlert.java @@ -197,6 +197,10 @@ public class GroupVoipInviteAlert extends UsersAlertBase { contacts.addAll(ContactsController.getInstance(currentAccount).contacts); int selfId = UserConfig.getInstance(currentAccount).clientUserId; for (int a = 0, N = contacts.size(); a < N; a++) { + TLObject object = contacts.get(a); + if (!(object instanceof TLRPC.TL_contact)) { + continue; + } int userId = ((TLRPC.TL_contact) contacts.get(a)).user_id; if (userId == selfId || ignoredUsers.indexOfKey(userId) >= 0 || invitedUsers.contains(userId)) { contacts.remove(a); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java index e400484c7..831332e4b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ImageUpdater.java @@ -89,6 +89,7 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega private boolean canSelectVideo; private boolean forceDarkTheme; + private boolean showingFromDialog; private final static int attach_photo = 0; @@ -250,7 +251,7 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega } final HashMap photos = new HashMap<>(); final ArrayList order = new ArrayList<>(); - PhotoPickerActivity fragment = new PhotoPickerActivity(0, null, photos, order, 1, false, null); + PhotoPickerActivity fragment = new PhotoPickerActivity(0, null, photos, order, 1, false, null, forceDarkTheme); fragment.setDelegate(new PhotoPickerActivity.PhotoPickerActivityDelegate() { private boolean sendPressed; @@ -301,7 +302,11 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega }); fragment.setMaxSelectedPhotos(1, false); fragment.setInitialSearchString(delegate.getInitialSearchString()); - parentFragment.presentFragment(fragment); + if (showingFromDialog) { + parentFragment.showAsSheet(fragment); + } else { + parentFragment.presentFragment(fragment); + } } private void openAttachMenu(DialogInterface.OnDismissListener onDismissListener) { @@ -325,12 +330,12 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega return; } if (chatAttachAlert == null) { - chatAttachAlert = new ChatAttachAlert(parentFragment.getParentActivity(), parentFragment, forceDarkTheme); + chatAttachAlert = new ChatAttachAlert(parentFragment.getParentActivity(), parentFragment, forceDarkTheme, showingFromDialog); chatAttachAlert.setAvatarPicker(canSelectVideo ? 2 : 1, searchAvailable); chatAttachAlert.setDelegate(new ChatAttachAlert.ChatAttachViewDelegate() { @Override - public void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate) { + public void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate, boolean forceDocument) { if (parentFragment == null || parentFragment.getParentActivity() == null || chatAttachAlert == null) { return; } @@ -383,7 +388,7 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega } didSelectPhotos(media); return; - } else if (chatAttachAlert != null) { + } else { chatAttachAlert.dismissWithButtonClick(button); } processSelectedAttach(button); @@ -461,8 +466,8 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega if (path != null) { bitmap = ImageLoader.loadBitmap(path.getAbsolutePath(), null, 800, 800, true); } else { - NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileLoadFailed); uploadingImage = FileLoader.getAttachFileName(photoSize.location); imageReceiver.setImage(ImageLocation.getForPhoto(photoSize, info.searchImage.photo), null, null, "jpg", null, 1); } @@ -479,8 +484,6 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.httpFileDidFailedLoad); imageReceiver.setImage(info.searchImage.imageUrl, null, null, "jpg", 1); } - } else { - bitmap = null; } } processBitmap(bitmap, avatarObject); @@ -555,7 +558,7 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega if (parentFragment == null) { return; } - if (Build.VERSION.SDK_INT >= 23 && parentFragment != null && parentFragment.getParentActivity() != null) { + if (Build.VERSION.SDK_INT >= 23 && parentFragment.getParentActivity() != null) { if (parentFragment.getParentActivity().checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { parentFragment.getParentActivity().requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE}, 4); return; @@ -616,7 +619,7 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega PhotoViewer.getInstance().setParentActivity(parentFragment.getParentActivity()); PhotoViewer.getInstance().openPhotoForSelect(arrayList, 0, PhotoViewer.SELECT_TYPE_AVATAR, false, new PhotoViewer.EmptyPhotoViewerProvider() { @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { String path = null; MediaController.PhotoEntry photoEntry = (MediaController.PhotoEntry) arrayList.get(0); if (photoEntry.imagePath != null) { @@ -740,9 +743,9 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega delegate.didStartUpload(false); } } - NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.FileDidUpload); - NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.FileUploadProgressChanged); - NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.FileDidFailUpload); + NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileUploaded); + NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileUploadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(ImageUpdater.this, NotificationCenter.fileUploadFailed); if (uploadingImage != null) { FileLoader.getInstance(currentAccount).uploadFile(uploadingImage, false, true, ConnectionsManager.FileTypePhoto); } @@ -772,16 +775,16 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.FileDidUpload || id == NotificationCenter.FileDidFailUpload) { + if (id == NotificationCenter.fileUploaded || id == NotificationCenter.fileUploadFailed) { String location = (String) args[0]; if (location.equals(uploadingImage)) { uploadingImage = null; - if (id == NotificationCenter.FileDidUpload) { + if (id == NotificationCenter.fileUploaded) { uploadedPhoto = (TLRPC.InputFile) args[1]; } } else if (location.equals(uploadingVideo)) { uploadingVideo = null; - if (id == NotificationCenter.FileDidUpload) { + if (id == NotificationCenter.fileUploaded) { uploadedVideo = (TLRPC.InputFile) args[1]; } } else { @@ -789,17 +792,17 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega } if (uploadingImage == null && uploadingVideo == null && convertingVideo == null) { - NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.FileDidUpload); - NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.FileUploadProgressChanged); - NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.FileDidFailUpload); - if (id == NotificationCenter.FileDidUpload) { + NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileUploaded); + NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileUploadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileUploadFailed); + if (id == NotificationCenter.fileUploaded) { if (delegate != null) { delegate.didUploadPhoto(uploadedPhoto, uploadedVideo, videoTimestamp, videoPath, bigPhoto, smallPhoto); } } cleanup(); } - } else if (id == NotificationCenter.FileUploadProgressChanged) { + } else if (id == NotificationCenter.fileUploadProgressChanged) { String location = (String) args[0]; String path = convertingVideo != null ? uploadingVideo : uploadingImage; if (delegate != null && location.equals(path)) { @@ -808,16 +811,16 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega float progress = Math.min(1f, loadedSize / (float) totalSize); delegate.onUploadProgressChanged(progress); } - } else if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.fileDidFailToLoad || id == NotificationCenter.httpFileDidLoad || id == NotificationCenter.httpFileDidFailedLoad) { + } else if (id == NotificationCenter.fileLoaded || id == NotificationCenter.fileLoadFailed || id == NotificationCenter.httpFileDidLoad || id == NotificationCenter.httpFileDidFailedLoad) { String path = (String) args[0]; if (path.equals(uploadingImage)) { - NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.fileLoadFailed); NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.httpFileDidLoad); NotificationCenter.getInstance(currentAccount).removeObserver(ImageUpdater.this, NotificationCenter.httpFileDidFailedLoad); uploadingImage = null; - if (id == NotificationCenter.fileDidLoad || id == NotificationCenter.httpFileDidLoad) { + if (id == NotificationCenter.fileLoaded || id == NotificationCenter.httpFileDidLoad) { Bitmap bitmap = ImageLoader.loadBitmap(finalPath, null, 800, 800, true); processBitmap(bitmap, null); } else { @@ -892,4 +895,8 @@ public class ImageUpdater implements NotificationCenter.NotificationCenterDelega public void setForceDarkTheme(boolean forceDarkTheme) { this.forceDarkTheme = forceDarkTheme; } + + public void setShowingFromDialog(boolean b) { + showingFromDialog = b; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ImportingAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ImportingAlert.java index 2d04a076d..35af7569a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ImportingAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ImportingAlert.java @@ -44,6 +44,8 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No private RLottieDrawable completedDrawable; private TextView[] infoTextView = new TextView[2]; + private String stickersShortName; + public static class BottomSheetCell extends FrameLayout { private View background; @@ -110,11 +112,12 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No } }; - public ImportingAlert(final Context context, ChatActivity chatActivity) { + public ImportingAlert(final Context context, String shortName, ChatActivity chatActivity) { super(context, false); setApplyBottomPadding(false); setApplyTopPadding(false); parentFragment = chatActivity; + stickersShortName = shortName; FrameLayout frameLayout = new FrameLayout(context); setCustomView(frameLayout); @@ -123,7 +126,6 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); textView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); - textView.setText(LocaleController.getString("ImportImportingTitle", R.string.ImportImportingTitle)); textView.setSingleLine(true); textView.setEllipsize(TextUtils.TruncateAt.END); frameLayout.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 17, 20, 17, 0)); @@ -138,17 +140,13 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No frameLayout.addView(imageView, LayoutHelper.createFrame(160, 160, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 17, 79, 17, 0)); imageView.getAnimatedDrawable().setOnFinishCallback(onFinishCallback, 178); - SendMessagesHelper.ImportingHistory importingHistory = parentFragment.getSendMessagesHelper().getImportingHistory(parentFragment.getDialogId()); - percentTextView = new TextView(context); percentTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); percentTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 24); percentTextView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); - percentTextView.setText(String.format("%d%%", importingHistory.uploadProgress)); frameLayout.addView(percentTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 17, 262, 17, 0)); lineProgressView = new LineProgressView(getContext()); - lineProgressView.setProgress(importingHistory.uploadProgress / 100.0f, false); lineProgressView.setProgressColor(Theme.getColor(Theme.key_featuredStickers_addButton)); lineProgressView.setBackColor(Theme.getColor(Theme.key_dialogLineProgressBackground)); frameLayout.addView(lineProgressView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 4, Gravity.LEFT | Gravity.TOP, 50, 307, 50, 0)); @@ -177,11 +175,7 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No if (a == 0) { infoTextView[a].setText(LocaleController.getString("ImportImportingInfo", R.string.ImportImportingInfo)); - importCountTextView[a].setText(LocaleController.formatString("ImportCount", R.string.ImportCount, AndroidUtilities.formatFileSize(importingHistory.getUploadedCount()), AndroidUtilities.formatFileSize(importingHistory.getTotalCount()))); } else { - infoTextView[a].setText(LocaleController.getString("ImportDoneInfo", R.string.ImportDoneInfo)); - importCountTextView[a].setText(LocaleController.getString("ImportDoneTitle", R.string.ImportDoneTitle)); - infoTextView[a].setAlpha(0.0f); infoTextView[a].setTranslationY(AndroidUtilities.dp(10)); importCountTextView[a].setAlpha(0.0f); @@ -189,7 +183,27 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No } } - parentFragment.getNotificationCenter().addObserver(this, NotificationCenter.historyImportProgressChanged); + if (parentFragment != null) { + textView.setText(LocaleController.getString("ImportImportingTitle", R.string.ImportImportingTitle)); + SendMessagesHelper.ImportingHistory importingHistory = parentFragment.getSendMessagesHelper().getImportingHistory(parentFragment.getDialogId()); + percentTextView.setText(String.format("%d%%", importingHistory.uploadProgress)); + lineProgressView.setProgress(importingHistory.uploadProgress / 100.0f, false); + importCountTextView[0].setText(LocaleController.formatString("ImportCount", R.string.ImportCount, AndroidUtilities.formatFileSize(importingHistory.getUploadedCount()), AndroidUtilities.formatFileSize(importingHistory.getTotalCount()))); + infoTextView[1].setText(LocaleController.getString("ImportDoneInfo", R.string.ImportDoneInfo)); + importCountTextView[1].setText(LocaleController.getString("ImportDoneTitle", R.string.ImportDoneTitle)); + + parentFragment.getNotificationCenter().addObserver(this, NotificationCenter.historyImportProgressChanged); + } else { + textView.setText(LocaleController.getString("ImportStickersImportingTitle", R.string.ImportStickersImportingTitle)); + SendMessagesHelper.ImportingStickers importingStickers = SendMessagesHelper.getInstance(currentAccount).getImportingStickers(shortName); + percentTextView.setText(String.format("%d%%", importingStickers.uploadProgress)); + lineProgressView.setProgress(importingStickers.uploadProgress / 100.0f, false); + importCountTextView[0].setText(LocaleController.formatString("ImportCount", R.string.ImportCount, AndroidUtilities.formatFileSize(importingStickers.getUploadedCount()), AndroidUtilities.formatFileSize(importingStickers.getTotalCount()))); + infoTextView[1].setText(LocaleController.getString("ImportStickersDoneInfo", R.string.ImportStickersDoneInfo)); + importCountTextView[1].setText(LocaleController.getString("ImportStickersDoneTitle", R.string.ImportStickersDoneTitle)); + + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.stickersImportProgressChanged); + } } public void setCompleted() { @@ -243,12 +257,38 @@ public class ImportingAlert extends BottomSheet implements NotificationCenter.No percentTextView.setText(String.format("%d%%", importingHistory.uploadProgress)); importCountTextView[0].setText(LocaleController.formatString("ImportCount", R.string.ImportCount, AndroidUtilities.formatFileSize(importingHistory.getUploadedCount()), AndroidUtilities.formatFileSize(importingHistory.getTotalCount()))); lineProgressView.setProgress(importingHistory.uploadProgress / 100.0f, true); + } else if (id == NotificationCenter.stickersImportProgressChanged) { + if (args.length > 1) { + dismiss(); + return; + } + + SendMessagesHelper.ImportingStickers importingStickers = SendMessagesHelper.getInstance(currentAccount).getImportingStickers(stickersShortName); + if (importingStickers == null) { + setCompleted(); + return; + } + if (!completed) { + double timeToEndAnimation = (180 - imageView.getAnimatedDrawable().getCurrentFrame()) * 16.6 + 3000; + if (timeToEndAnimation >= importingStickers.timeUntilFinish) { + imageView.setAutoRepeat(false); + completed = true; + } + } + + percentTextView.setText(String.format("%d%%", importingStickers.uploadProgress)); + importCountTextView[0].setText(LocaleController.formatString("ImportCount", R.string.ImportCount, AndroidUtilities.formatFileSize(importingStickers.getUploadedCount()), AndroidUtilities.formatFileSize(importingStickers.getTotalCount()))); + lineProgressView.setProgress(importingStickers.uploadProgress / 100.0f, true); } } @Override public void dismissInternal() { super.dismissInternal(); - parentFragment.getNotificationCenter().removeObserver(this, NotificationCenter.historyImportProgressChanged); + if (parentFragment != null) { + parentFragment.getNotificationCenter().removeObserver(this, NotificationCenter.historyImportProgressChanged); + } else { + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.stickersImportProgressChanged); + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java index f8e96dcb0..41f4a7411 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/InstantCameraView.java @@ -369,18 +369,18 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileDidUpload); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileUploaded); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileDidUpload); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileUploaded); } @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.FileDidUpload) { + if (id == NotificationCenter.fileUploaded) { final String location = (String) args[0]; if (cameraFile != null && cameraFile.getAbsolutePath().equals(location)) { file = (TLRPC.InputFile) args[1]; @@ -675,7 +675,7 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter if (videoEditedInfo.endTime > 0) { videoEditedInfo.endTime *= 1000; } - FileLoader.getInstance(currentAccount).cancelUploadFile(cameraFile.getAbsolutePath(), false); + FileLoader.getInstance(currentAccount).cancelFileUpload(cameraFile.getAbsolutePath(), false); } else { videoEditedInfo.estimatedSize = Math.max(1, size); } @@ -683,7 +683,7 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter videoEditedInfo.encryptedFile = encryptedFile; videoEditedInfo.key = key; videoEditedInfo.iv = iv; - baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, cameraFile.getAbsolutePath(), 0, true, 0, 0, 0), videoEditedInfo, notify, scheduleDate); + baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, cameraFile.getAbsolutePath(), 0, true, 0, 0, 0), videoEditedInfo, notify, scheduleDate, false); if (scheduleDate != 0) { startAnimation(false); } @@ -1992,13 +1992,13 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter if (send == 1) { if (baseFragment.isInScheduleMode()) { AlertsCreator.createScheduleDatePickerDialog(baseFragment.getParentActivity(), baseFragment.getDialogId(), (notify, scheduleDate) -> { - baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, videoFile.getAbsolutePath(), 0, true, 0, 0, 0), videoEditedInfo, notify, scheduleDate); + baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, videoFile.getAbsolutePath(), 0, true, 0, 0, 0), videoEditedInfo, notify, scheduleDate, false); startAnimation(false); }, () -> { startAnimation(false); }); } else { - baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, videoFile.getAbsolutePath(), 0, true, 0, 0, 0), videoEditedInfo, true, 0); + baseFragment.sendMedia(new MediaController.PhotoEntry(0, 0, 0, videoFile.getAbsolutePath(), 0, true, 0, 0, 0), videoEditedInfo, true, 0, false); } } else { videoPlayer = new VideoPlayer(); @@ -2059,7 +2059,7 @@ public class InstantCameraView extends FrameLayout implements NotificationCenter MediaController.getInstance().requestAudioFocus(false); }); } else { - FileLoader.getInstance(currentAccount).cancelUploadFile(videoFile.getAbsolutePath(), false); + FileLoader.getInstance(currentAccount).cancelFileUpload(videoFile.getAbsolutePath(), false); videoFile.delete(); } EGL14.eglDestroySurface(eglDisplay, eglSurface); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/JoinCallAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/JoinCallAlert.java index a3292eedb..babc40753 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/JoinCallAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/JoinCallAlert.java @@ -19,6 +19,7 @@ import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.LinearLayout; +import android.widget.ScrollView; import android.widget.TextView; import org.telegram.messenger.AccountInstance; @@ -43,6 +44,7 @@ import org.telegram.ui.Cells.ShareDialogCell; import java.util.ArrayList; +import androidx.core.widget.NestedScrollView; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -349,7 +351,9 @@ public class JoinCallAlert extends BottomSheet { } }; linearLayout.setOrientation(LinearLayout.VERTICAL); - setCustomView(internalLayout = linearLayout); + NestedScrollView scrollView = new NestedScrollView(context); + scrollView.addView(internalLayout = linearLayout); + setCustomView(scrollView); } else { containerView = new FrameLayout(context) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/MediaActionDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/MediaActionDrawable.java index 9195d43d4..41275ae30 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/MediaActionDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/MediaActionDrawable.java @@ -34,6 +34,7 @@ public class MediaActionDrawable extends Drawable { public static final int ICON_CANCEL_NOPROFRESS = 12; public static final int ICON_CANCEL_PERCENT = 13; public static final int ICON_CANCEL_FILL = 14; + public static final int ICON_UPDATE = 15; private TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); private Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); @@ -80,6 +81,8 @@ public class MediaActionDrawable extends Drawable { private MediaActionDrawableDelegate delegate; private Theme.MessageDrawable messageDrawable; + private LinearGradient gradientDrawable; + private Matrix gradientMatrix; private boolean hasOverlayImage; public interface MediaActionDrawableDelegate { @@ -221,7 +224,7 @@ public class MediaActionDrawable extends Drawable { return downloadProgress; } - private float getCircleValue(float value) { + public static float getCircleValue(float value) { while (value > 360) { value -= 360; } @@ -240,6 +243,11 @@ public class MediaActionDrawable extends Drawable { messageDrawable = drawable; } + public void setBackgroundGradientDrawable(LinearGradient drawable) { + gradientDrawable = drawable; + gradientMatrix = new Matrix(); + } + public void setHasOverlayImage(boolean value) { hasOverlayImage = value; } @@ -273,6 +281,13 @@ public class MediaActionDrawable extends Drawable { paint.setShader(shader); paint2.setShader(shader); paint3.setShader(shader); + } else if (gradientDrawable != null && !hasOverlayImage) { + gradientMatrix.reset(); + gradientMatrix.setTranslate(0, bounds.top); + gradientDrawable.setLocalMatrix(gradientMatrix); + paint.setShader(gradientDrawable); + paint2.setShader(gradientDrawable); + paint3.setShader(gradientDrawable); } else { paint.setShader(null); paint2.setShader(null); @@ -435,7 +450,7 @@ public class MediaActionDrawable extends Drawable { alpha = 0; } rotation = 0; - } else if (nextIcon == ICON_PLAY || nextIcon == ICON_PAUSE || nextIcon == ICON_FILE || nextIcon == ICON_GIF || nextIcon == ICON_SECRETCHECK || nextIcon == ICON_FIRE || nextIcon == ICON_CHECK) { + } else if (nextIcon == ICON_UPDATE || nextIcon == ICON_PLAY || nextIcon == ICON_PAUSE || nextIcon == ICON_FILE || nextIcon == ICON_GIF || nextIcon == ICON_SECRETCHECK || nextIcon == ICON_FIRE || nextIcon == ICON_CHECK) { float progress; float backProgress; if (nextIcon == ICON_CHECK) { @@ -560,6 +575,11 @@ public class MediaActionDrawable extends Drawable { previowsDrawableScale = Math.max(0.0f, 1.0f - transitionProgress / 0.5f); } + if (nextIcon == ICON_UPDATE) { + nextPath = Theme.chat_updatePath; + } else if (currentIcon == ICON_UPDATE) { + previousPath = Theme.chat_updatePath; + } if (nextIcon == ICON_FILE) { nextPath = Theme.chat_filePath; } else if (currentIcon == ICON_FILE) { @@ -759,7 +779,9 @@ public class MediaActionDrawable extends Drawable { paint2.setAlpha(currentIcon == nextIcon ? 255 : (int) ((1.0f - transitionProgress) * 255)); canvas.save(); canvas.translate(cx - w / 2, cy - h / 2); - canvas.drawPath(previousPath[0], paint2); + if (previousPath[0] != null) { + canvas.drawPath(previousPath[0], paint2); + } if (previousPath[1] != null) { canvas.drawPath(previousPath[1], backPaint); } @@ -773,7 +795,12 @@ public class MediaActionDrawable extends Drawable { paint2.setAlpha(alpha); canvas.save(); canvas.translate(cx - w / 2, cy - h / 2); - canvas.drawPath(nextPath[0], paint2); + if (nextPath[0] != null) { + canvas.drawPath(nextPath[0], paint2); + } + if (nextPath.length >= 3 && nextPath[2] != null) { + canvas.drawPath(nextPath[2], paint); + } if (nextPath[1] != null) { if (alpha != 255) { int backgroundAlpha = backPaint.getAlpha(); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java new file mode 100644 index 000000000..77a6c2680 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/MotionBackgroundDrawable.java @@ -0,0 +1,437 @@ +package org.telegram.ui.Components; + +import android.graphics.Bitmap; +import android.graphics.BitmapShader; +import android.graphics.BlendMode; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.ColorFilter; +import android.graphics.ComposeShader; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.PixelFormat; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffXfermode; +import android.graphics.Rect; +import android.graphics.RectF; +import android.graphics.Shader; +import android.graphics.drawable.Drawable; +import android.os.Build; +import android.os.SystemClock; +import android.view.View; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.Utilities; + +import java.lang.ref.WeakReference; + +public class MotionBackgroundDrawable extends Drawable { + + private int[] colors = new int[]{ + 0xff426D57, + 0xffF7E48B, + 0xff87A284, + 0xffFDF6CA + }; + + private long lastUpdateTime; + private WeakReference parentView; + + private CubicBezierInterpolator interpolator = new CubicBezierInterpolator(0.33, 0.0, 0.0, 1.0); + + private int translationY; + + private boolean isPreview; + + private float posAnimationProgress = 1.0f; + private int phase; + + private RectF rect = new RectF(); + private Bitmap currentBitmap; + private Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG); + private Paint paint2 = new Paint(Paint.FILTER_BITMAP_FLAG); + private int intensity = 100; + + private Bitmap patternBitmap; + private BitmapShader bitmapShader; + private BitmapShader gradientShader; + private Matrix matrix; + + private Canvas legacyCanvas; + private Bitmap legacyBitmap; + + private boolean rotatingPreview; + + private android.graphics.Rect patternBounds = new android.graphics.Rect(); + + private int roundRadius; + + public MotionBackgroundDrawable() { + super(); + currentBitmap = Bitmap.createBitmap(60, 80, Bitmap.Config.ARGB_8888); + Utilities.generateGradient(currentBitmap, true, phase, interpolator.getInterpolation(posAnimationProgress), currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); + if (Build.VERSION.SDK_INT >= 29) { + paint2.setBlendMode(BlendMode.SOFT_LIGHT); + } + } + + public MotionBackgroundDrawable(int c1, int c2, int c3, int c4, boolean preview) { + super(); + colors[0] = c1; + colors[1] = c2; + colors[2] = c3; + colors[3] = c4; + isPreview = preview; + if (Build.VERSION.SDK_INT >= 29) { + paint2.setBlendMode(BlendMode.SOFT_LIGHT); + } + currentBitmap = Bitmap.createBitmap(60, 80, Bitmap.Config.ARGB_8888); + Utilities.generateGradient(currentBitmap, true, phase, interpolator.getInterpolation(posAnimationProgress), currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); + } + + public void setRoundRadius(int rad) { + roundRadius = rad; + matrix = new Matrix(); + bitmapShader = new BitmapShader(currentBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + paint.setShader(bitmapShader); + invalidateParent(); + } + + public Bitmap getBitmap() { + return currentBitmap; + } + + public static boolean isDark(int color1, int color2, int color3, int color4) { + int averageColor = AndroidUtilities.getAverageColor(color1, color2); + if (color3 != 0) { + averageColor = AndroidUtilities.getAverageColor(averageColor, color3); + } + if (color4 != 0) { + averageColor = AndroidUtilities.getAverageColor(averageColor, color4); + } + float[] hsb = AndroidUtilities.RGBtoHSB(Color.red(averageColor), Color.green(averageColor), Color.blue(averageColor)); + return hsb[2] < 0.3f; + } + + @Override + public void setBounds(Rect bounds) { + super.setBounds(bounds); + patternBounds.set(bounds); + } + + public void setPatternBounds(int left, int top, int right, int bottom) { + patternBounds.set(left, top, right, bottom); + } + + public static int getPatternColor(int color1, int color2, int color3, int color4) { + if (isDark(color1, color2, color3, color4)) { + return Build.VERSION.SDK_INT < 29 ? 0x7fffffff : 0xffffffff; + } else { + if (Build.VERSION.SDK_INT < 29) { + int averageColor = AndroidUtilities.getAverageColor(color3, AndroidUtilities.getAverageColor(color1, color2)); + if (color4 != 0) { + averageColor = AndroidUtilities.getAverageColor(color4, averageColor); + } + return (AndroidUtilities.getPatternColor(averageColor, true) & 0x00ffffff) | 0x64000000; + } else { + return 0xff000000; + } + } + } + + public int getPatternColor() { + return getPatternColor(colors[0], colors[1], colors[2], colors[3]); + } + + public int getPhase() { + return phase; + } + + public void rotatePreview() { + if (posAnimationProgress < 1.0f) { + return; + } + rotatingPreview = true; + posAnimationProgress = 0.0f; + invalidateParent(); + } + + public void setPhase(int value) { + phase = value; + if (phase < 0) { + phase = 0; + } else if (phase > 7) { + phase = 7; + } + Utilities.generateGradient(currentBitmap, true, phase, interpolator.getInterpolation(posAnimationProgress), currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); + } + + public void switchToNextPosition() { + if (posAnimationProgress < 1.0f) { + return; + } + rotatingPreview = false; + posAnimationProgress = 0.0f; + phase--; + if (phase < 0) { + phase = 7; + } + invalidateParent(); + } + + public int[] getColors() { + return colors; + } + + public void setParentView(View view) { + parentView = new WeakReference<>(view); + } + + public void setColors(int c1, int c2, int c3, int c4) { + setColors(c1, c2, c3, c4, true); + } + + public void setColors(int c1, int c2, int c3, int c4, boolean invalidate) { + colors[0] = c1; + colors[1] = c2; + colors[2] = c3; + colors[3] = c4; + Utilities.generateGradient(currentBitmap, true, phase, interpolator.getInterpolation(posAnimationProgress), currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); + if (invalidate) { + invalidateParent(); + } + } + + private void invalidateParent() { + if (parentView != null && parentView.get() != null) { + parentView.get().invalidate(); + } + } + + public boolean hasPattern() { + return patternBitmap != null; + } + + @Override + public int getIntrinsicWidth() { + if (patternBitmap != null) { + return patternBitmap.getWidth(); + } + return super.getIntrinsicWidth(); + } + + @Override + public int getIntrinsicHeight() { + if (patternBitmap != null) { + return patternBitmap.getHeight(); + } + return super.getIntrinsicHeight(); + } + + public void setTranslationY(int y) { + translationY = y; + } + + public void setPatternBitmap(int intensity, Bitmap bitmap) { + this.intensity = intensity; + patternBitmap = bitmap; + if (Build.VERSION.SDK_INT >= 29) { + if (intensity >= 0) { + paint2.setBlendMode(BlendMode.SOFT_LIGHT); + } else { + paint2.setBlendMode(null); + } + } + if (intensity < 0) { + if (Build.VERSION.SDK_INT >= 28) { + bitmapShader = new BitmapShader(currentBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + gradientShader = new BitmapShader(patternBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); + paint2.setShader(new ComposeShader(bitmapShader, gradientShader, PorterDuff.Mode.DST_IN)); + matrix = new Matrix(); + } else { + paint2.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_IN)); + } + } + } + + @Override + public void setBounds(int left, int top, int right, int bottom) { + super.setBounds(left, top, right, bottom); + patternBounds.set(left, top, right, bottom); + if (Build.VERSION.SDK_INT < 28 && intensity < 0) { + int w = right - left; + int h = bottom - top; + if (legacyBitmap == null || legacyBitmap.getWidth() != w || legacyBitmap.getHeight() != h) { + if (legacyBitmap != null) { + legacyBitmap.recycle(); + } + legacyBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); + legacyCanvas = new Canvas(legacyBitmap); + } + } + } + + @Override + public void draw(Canvas canvas) { + android.graphics.Rect bounds = getBounds(); + canvas.save(); + float tr = patternBitmap != null ? bounds.top : translationY; + int bitmapWidth = currentBitmap.getWidth(); + int bitmapHeight = currentBitmap.getHeight(); + float w = bounds.width(); + float h = bounds.height(); + float maxScale = Math.max(w / bitmapWidth, h / bitmapHeight); + float width = bitmapWidth * maxScale; + float height = bitmapHeight * maxScale; + float x = (w - width) / 2; + float y = (h - height) / 2; + if (isPreview) { + x += bounds.left; + y += bounds.top; + canvas.clipRect(bounds.left, bounds.top, bounds.right, bounds.bottom); + } + if (patternBitmap != null && intensity < 0) { + canvas.drawColor(0xff000000); + if (legacyBitmap != null) { + rect.set(0, 0, legacyBitmap.getWidth(), legacyBitmap.getHeight()); + legacyCanvas.drawBitmap(currentBitmap, null, rect, paint); + + bitmapWidth = patternBitmap.getWidth(); + bitmapHeight = patternBitmap.getHeight(); + maxScale = Math.max(w / bitmapWidth, h / bitmapHeight); + width = bitmapWidth * maxScale; + height = bitmapHeight * maxScale; + x = (w - width) / 2; + y = (h - height) / 2; + rect.set(x, y, x + width, y + height); + legacyCanvas.drawBitmap(patternBitmap, null, rect, paint2); + + rect.set(bounds.left, bounds.top, bounds.right, bounds.bottom); + canvas.drawBitmap(legacyBitmap, null, rect, paint); + } else { + matrix.reset(); + matrix.setTranslate(x, y + tr); + float scaleW = (currentBitmap.getWidth() / (float) bounds.width()); + float scaleH = (currentBitmap.getHeight() / (float) bounds.height()); + float scale = 1.0f / Math.min(scaleW, scaleH); + matrix.preScale(scale, scale); + bitmapShader.setLocalMatrix(matrix); + + matrix.reset(); + bitmapWidth = patternBitmap.getWidth(); + bitmapHeight = patternBitmap.getHeight(); + maxScale = Math.max(w / bitmapWidth, h / bitmapHeight); + width = bitmapWidth * maxScale; + height = bitmapHeight * maxScale; + x = (w - width) / 2; + y = (h - height) / 2; + matrix.setTranslate(x, y + tr); + matrix.preScale(maxScale, maxScale); + gradientShader.setLocalMatrix(matrix); + + rect.set(bounds.left, bounds.top, bounds.right, bounds.bottom); + canvas.drawRoundRect(rect, roundRadius, roundRadius, paint2); + } + } else { + if (roundRadius != 0) { + matrix.reset(); + matrix.setTranslate(x, y); + float scaleW = (currentBitmap.getWidth() / (float) bounds.width()); + float scaleH = (currentBitmap.getHeight() / (float) bounds.height()); + float scale = 1.0f / Math.min(scaleW, scaleH); + matrix.preScale(scale, scale); + bitmapShader.setLocalMatrix(matrix); + + rect.set(bounds.left, bounds.top, bounds.right, bounds.bottom); + canvas.drawRoundRect(rect, roundRadius, roundRadius, paint); + } else { + canvas.translate(0, tr); + rect.set(x, y, x + width, y + height); + canvas.drawBitmap(currentBitmap, null, rect, paint); + } + + if (patternBitmap != null) { + bitmapWidth = patternBitmap.getWidth(); + bitmapHeight = patternBitmap.getHeight(); + maxScale = Math.max(w / bitmapWidth, h / bitmapHeight); + width = bitmapWidth * maxScale; + height = bitmapHeight * maxScale; + x = (w - width) / 2; + y = (h - height) / 2; + rect.set(x, y, x + width, y + height); + canvas.drawBitmap(patternBitmap, null, rect, paint2); + } + } + canvas.restore(); + + long newTime = SystemClock.elapsedRealtime(); + long dt = newTime - lastUpdateTime; + if (dt > 20) { + dt = 17; + } + lastUpdateTime = newTime; + + if (posAnimationProgress < 1.0f) { + float progress; + if (rotatingPreview) { + int stageBefore; + float progressBefore = interpolator.getInterpolation(posAnimationProgress); + if (progressBefore <= 0.25f) { + stageBefore = 0; + } else if (progressBefore <= 0.5f) { + stageBefore = 1; + } else if (progressBefore <= 0.75f) { + stageBefore = 2; + } else { + stageBefore = 3; + } + posAnimationProgress += dt / 2000.0f; + if (posAnimationProgress > 1.0f) { + posAnimationProgress = 1.0f; + } + progress = interpolator.getInterpolation(posAnimationProgress); + if (stageBefore == 0 && progress > 0.25f || + stageBefore == 1 && progress > 0.5f || + stageBefore == 2 && progress > 0.75f) { + phase--; + if (phase < 0) { + phase = 7; + } + } + if (progress <= 0.25f) { + progress /= 0.25f; + } else if (progress <= 0.5f) { + progress = (progress - 0.25f) / 0.25f; + } else if (progress <= 0.75f) { + progress = (progress - 0.5f) / 0.25f; + } else { + progress = (progress - 0.75f) / 0.25f; + } + } else { + posAnimationProgress += dt / 500.0f; + if (posAnimationProgress > 1.0f) { + posAnimationProgress = 1.0f; + } + progress = interpolator.getInterpolation(posAnimationProgress); + } + Utilities.generateGradient(currentBitmap, true, phase, progress, currentBitmap.getWidth(), currentBitmap.getHeight(), currentBitmap.getRowBytes(), colors); + invalidateParent(); + } + } + + @Override + public void setAlpha(int alpha) { + paint.setAlpha(alpha); + paint2.setAlpha(alpha); + } + + @Override + public void setColorFilter(ColorFilter colorFilter) { + + } + + @Override + public int getOpacity() { + return PixelFormat.TRANSPARENT; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java index f4aaa3231..4825964b8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PasscodeView.java @@ -65,7 +65,7 @@ public class PasscodeView extends FrameLayout { void didAcceptedPassword(); } - private class AnimatingTextView extends FrameLayout { + private static class AnimatingTextView extends FrameLayout { private ArrayList characterTextViews; private ArrayList dotTextViews; @@ -1237,7 +1237,7 @@ public class PasscodeView extends FrameLayout { return; } if (backgroundDrawable != null) { - if (backgroundDrawable instanceof ColorDrawable || backgroundDrawable instanceof GradientDrawable) { + if (backgroundDrawable instanceof MotionBackgroundDrawable || backgroundDrawable instanceof ColorDrawable || backgroundDrawable instanceof GradientDrawable) { backgroundDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); backgroundDrawable.draw(canvas); } else { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java index a94a16a69..72715bc8b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhonebookShareAlert.java @@ -286,7 +286,12 @@ public class PhonebookShareAlert extends BottomSheet { } } if (!result.isEmpty()) { - vcard = result.get(0).restriction_reason; + TLRPC.User u = result.get(0); + vcard = u.restriction_reason; + if (TextUtils.isEmpty(firstName)) { + firstName = u.first_name; + lastName = u.last_name; + } } } currentUser = new TLRPC.TL_userContact_old2(); @@ -435,7 +440,7 @@ public class PhonebookShareAlert extends BottomSheet { @Override protected int computeScrollDeltaToGetChildRectOnScreen(Rect rect) { - if (linearLayout.getTop() != getPaddingTop()) { + if (focusingView == null || linearLayout.getTop() != getPaddingTop()) { return 0; } int delta = super.computeScrollDeltaToGetChildRectOnScreen(rect); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java index 0c65bc9ae..7696a5615 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PhotoViewerCaptionEnterView.java @@ -482,7 +482,7 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica } public void onCreate() { - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); sizeNotifierLayout.setDelegate(this); } @@ -492,7 +492,7 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica closeKeyboard(); } keyboardVisible = false; - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (sizeNotifierLayout != null) { sizeNotifierLayout.setDelegate(null); } @@ -809,7 +809,7 @@ public class PhotoViewerCaptionEnterView extends FrameLayout implements Notifica @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (emojiView != null) { emojiView.invalidateViews(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/PollVotesAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/PollVotesAlert.java index 6f433dd2a..bebefd6f6 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/PollVotesAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/PollVotesAlert.java @@ -42,7 +42,6 @@ import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ChatObject; import org.telegram.messenger.Emoji; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; @@ -1033,7 +1032,7 @@ public class PollVotesAlert extends BottomSheet { } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { if (section == 0 || row == 0 || queries != null && !queries.isEmpty()) { return false; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java index a716ee0fb..f4eb8457e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ProfileGalleryView.java @@ -19,6 +19,9 @@ import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; +import androidx.annotation.Nullable; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.FileLoader; import org.telegram.messenger.ImageLoader; @@ -35,9 +38,6 @@ import org.telegram.ui.ProfileActivity; import java.util.ArrayList; -import androidx.annotation.Nullable; -import androidx.recyclerview.widget.RecyclerView; - public class ProfileGalleryView extends CircularViewPager implements NotificationCenter.NotificationCenterDelegate { private final PointF downPoint = new PointF(); @@ -82,8 +82,31 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio private boolean invalidateWithParent; PinchToZoomHelper pinchToZoomHelper; + private boolean hasActiveVideo; + private TLRPC.TL_groupCallParticipant participant; + + private int imagesLayerNum; + + public void setHasActiveVideo(boolean hasActiveVideo) { + this.hasActiveVideo = hasActiveVideo; + } + + public View findVideoActiveView() { + if (!hasActiveVideo) { + return null; + } + for (int i = 0; i < getChildCount(); i++) { + View view = getChildAt(i); + if (view instanceof TextureStubView) { + return view; + } + } + return null; + } private static class Item { + boolean isActiveVideo; + private View textureViewStubView; private AvatarImageView imageView; } @@ -116,6 +139,9 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { if (positionOffsetPixels == 0) { position = adapter.getRealPosition(position); + if (hasActiveVideo) { + position--; + } BackupImageView currentView = getCurrentItemView(); int count = getChildCount(); for (int a = 0; a < count; a++) { @@ -124,6 +150,9 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio continue; } int p = adapter.getRealPosition(adapter.imageViews.indexOf(child)); + if (hasActiveVideo) { + p--; + } BackupImageView imageView = (BackupImageView) child; ImageReceiver imageReceiver = imageView.getImageReceiver(); boolean currentAllow = imageReceiver.getAllowStartAnimation(); @@ -167,11 +196,15 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio setAdapter(adapter = new ViewPagerAdapter(getContext(), null, parentActionBar)); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.dialogPhotosLoaded); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.reloadDialogPhotos); } + public void setImagesLayerNum(int value) { + imagesLayerNum = value; + } + public ProfileGalleryView(Context context, long dialogId, ActionBar parentActionBar, RecyclerListView parentListView, ProfileActivity.AvatarImageView parentAvatarImageView, int parentClassGuid, Callback callback) { super(context); setVisibility(View.GONE); @@ -241,16 +274,16 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio }); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.dialogPhotosLoaded); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.reloadDialogPhotos); MessagesController.getInstance(currentAccount).loadDialogPhotos((int) dialogId, 80, 0, true, parentClassGuid); } public void onDestroy() { NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.dialogPhotosLoaded); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.reloadDialogPhotos); int count = getChildCount(); for (int a = 0; a < count; a++) { @@ -312,7 +345,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } final int action = ev.getAction(); - if (pinchToZoomHelper != null) { + if (pinchToZoomHelper != null && getCurrentItemView() != null) { if (action != MotionEvent.ACTION_DOWN && isDownReleased && !pinchToZoomHelper.isInOverlayMode()) { pinchToZoomHelper.checkPinchToZoom(MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0), this, getCurrentItemView().getImageReceiver(), null); } else if (pinchToZoomHelper.checkPinchToZoom(ev, this, getCurrentItemView().getImageReceiver(), null)) { @@ -514,7 +547,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } public boolean isLoadingCurrentVideo() { - if (videoLocations.get(getRealPosition()) == null) { + if (videoLocations.get(hasActiveVideo ? getRealPosition() - 1 : getRealPosition()) == null) { return false; } BackupImageView imageView = getCurrentItemView(); @@ -538,7 +571,14 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } public boolean isCurrentItemVideo() { - return videoLocations.get(getRealPosition()) != null; + int i = getRealPosition(); + if (hasActiveVideo) { + if (i == 0) { + return false; + } + i--; + } + return videoLocations.get(i) != null; } public ImageLocation getCurrentVideoLocation(ImageLocation thumbLocation, ImageLocation imageLocation) { @@ -567,7 +607,11 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } public int getRealCount() { - return photos.size(); + int size = photos.size(); + if (hasActiveVideo) { + size++; + } + return size; } public int getRealPosition(int position) { @@ -808,7 +852,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio addUploadingImage(currentUploadingImageLocation, curreantUploadingThumbLocation); } } - } else if (id == NotificationCenter.fileDidLoad) { + } else if (id == NotificationCenter.fileLoaded) { final String fileName = (String) args[0]; for (int i = 0; i < thumbsFileNames.size(); i++) { String fileName2 = videoFileNames.get(i); @@ -822,7 +866,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } } } - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { String fileName = (String) args[0]; for (int i = 0; i < thumbsFileNames.size(); i++) { String fileName2 = videoFileNames.get(i); @@ -872,7 +916,11 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio @Override public boolean isViewFromObject(View view, Object object) { - return view == ((Item) object).imageView; + Item item = ((Item) object); + if (item.isActiveVideo) { + return view == item.textureViewStubView; + } + return view == item.imageView; } @Override @@ -884,7 +932,24 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio @Override public Item instantiateItem(ViewGroup container, int position) { final Item item = objects.get(position); + final int realPosition = getRealPosition(position); + if (hasActiveVideo && realPosition == 0) { + item.isActiveVideo = true; + if (item.textureViewStubView == null) { + item.textureViewStubView = new TextureStubView(context); + } + if (item.textureViewStubView.getParent() == null) { + container.addView(item.textureViewStubView); + } + return item; + } else { + item.isActiveVideo = false; + } + + if (item.textureViewStubView != null && item.textureViewStubView.getParent() != null) { + container.removeView(item.textureViewStubView); + } if (item.imageView == null) { item.imageView = new AvatarImageView(context, position, placeholderPaint); imageViews.set(position, item.imageView); @@ -895,9 +960,9 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } item.imageView.getImageReceiver().setAllowDecodeSingleFrame(true); - final int realPosition = getRealPosition(position); + int imageLocationPosition = hasActiveVideo ? realPosition - 1 : realPosition; boolean needProgress = false; - if (realPosition == 0) { + if (imageLocationPosition == 0) { Drawable drawable = parentAvatarImageView == null ? null : parentAvatarImageView.getImageReceiver().getDrawable(); if (drawable instanceof AnimatedFileDrawable && ((AnimatedFileDrawable) drawable).hasBitmap()) { AnimatedFileDrawable animatedFileDrawable = (AnimatedFileDrawable) drawable; @@ -905,7 +970,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio animatedFileDrawable.addSecondParentView(item.imageView); animatedFileDrawable.setInvalidateParentViewWithSecond(true); } else { - ImageLocation videoLocation = videoLocations.get(realPosition); + ImageLocation videoLocation = videoLocations.get(imageLocationPosition); item.imageView.isVideo = videoLocation != null; needProgress = true; String filter; @@ -914,36 +979,38 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } else { filter = null; } - ImageLocation location = thumbsLocations.get(realPosition); + ImageLocation location = thumbsLocations.get(imageLocationPosition); Bitmap thumb = (parentAvatarImageView == null || !createThumbFromParent) ? null : parentAvatarImageView.getImageReceiver().getBitmap(); + String parent = "avatar_" + dialogId; if (thumb != null) { - item.imageView.setImageMedia(videoLocations.get(realPosition), filter, imagesLocations.get(realPosition), null, thumb, imagesLocationsSizes.get(realPosition), 1, location); + item.imageView.setImageMedia(videoLocations.get(imageLocationPosition), filter, imagesLocations.get(imageLocationPosition), null, thumb, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } else if (uploadingImageLocation != null) { - item.imageView.setImageMedia(videoLocations.get(realPosition), filter, imagesLocations.get(realPosition), null, uploadingImageLocation, null, null, imagesLocationsSizes.get(realPosition), 1, location); + item.imageView.setImageMedia(videoLocations.get(imageLocationPosition), filter, imagesLocations.get(imageLocationPosition), null, uploadingImageLocation, null, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } else { String thumbFilter = location.photoSize instanceof TLRPC.TL_photoStrippedSize ? "b" : null; - item.imageView.setImageMedia(videoLocation, null, imagesLocations.get(realPosition), null, thumbsLocations.get(realPosition), thumbFilter, null, imagesLocationsSizes.get(realPosition), 1, location); + item.imageView.setImageMedia(videoLocation, null, imagesLocations.get(imageLocationPosition), null, thumbsLocations.get(imageLocationPosition), thumbFilter, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } } } else { - final ImageLocation videoLocation = videoLocations.get(realPosition); + final ImageLocation videoLocation = videoLocations.get(imageLocationPosition); item.imageView.isVideo = videoLocation != null; needProgress = true; - ImageLocation location = thumbsLocations.get(realPosition); + ImageLocation location = thumbsLocations.get(imageLocationPosition); String filter = location.photoSize instanceof TLRPC.TL_photoStrippedSize ? "b" : null; - item.imageView.setImageMedia(videoLocation, null, imagesLocations.get(realPosition), null, thumbsLocations.get(realPosition), filter, null, imagesLocationsSizes.get(realPosition), 1, location); + String parent = "avatar_" + dialogId; + item.imageView.setImageMedia(videoLocation, null, imagesLocations.get(imageLocationPosition), null, thumbsLocations.get(imageLocationPosition), filter, null, imagesLocationsSizes.get(imageLocationPosition), 1, parent); } - if (imagesUploadProgress.get(realPosition) != null) { + if (imagesUploadProgress.get(imageLocationPosition) != null) { needProgress = true; } if (needProgress) { - item.imageView.radialProgress = radialProgresses.get(realPosition); + item.imageView.radialProgress = radialProgresses.get(imageLocationPosition); if (item.imageView.radialProgress == null) { item.imageView.radialProgress = new RadialProgress2(item.imageView); item.imageView.radialProgress.setOverrideAlpha(0.0f); item.imageView.radialProgress.setIcon(MediaActionDrawable.ICON_EMPTY, false, false); item.imageView.radialProgress.setColors(0x42000000, 0x42000000, Color.WHITE, Color.WHITE); - radialProgresses.append(realPosition, item.imageView.radialProgress); + radialProgresses.append(imageLocationPosition, item.imageView.radialProgress); } if (invalidateWithParent) { invalidate(); @@ -971,7 +1038,14 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio @Override public void destroyItem(ViewGroup container, int position, Object object) { - BackupImageView imageView = ((Item) object).imageView; + Item item = (Item) object; + if (item.textureViewStubView != null) { + container.removeView(item.textureViewStubView); + } + if (item.isActiveVideo) { + return; + } + BackupImageView imageView = item.imageView; if (imageView.getImageReceiver().hasStaticThumb()) { Drawable drawable = imageView.getImageReceiver().getDrawable(); if (drawable instanceof AnimatedFileDrawable) { @@ -998,7 +1072,11 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } objects.clear(); imageViews.clear(); - for (int a = 0, N = imagesLocations.size() + getExtraCount() * 2; a < N; a++) { + int size = imagesLocations.size(); + if (hasActiveVideo) { + size++; + } + for (int a = 0, N = size + getExtraCount() * 2; a < N; a++) { objects.add(new Item()); imageViews.add(null); } @@ -1007,7 +1085,10 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio @Override public int getExtraCount() { - final int count = imagesLocations.size(); + int count = imagesLocations.size(); + if (hasActiveVideo) { + count++; + } if (count >= 2) { return getOffscreenPageLimit(); } else { @@ -1017,7 +1098,11 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio } public void setData(long dialogId) { - if (this.dialogId == dialogId) { + setData(dialogId, false); + } + + public void setData(long dialogId, boolean forceReset) { + if (this.dialogId == dialogId && !forceReset) { resetCurrentItem(); return; } @@ -1025,7 +1110,9 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio adapter.notifyDataSetChanged(); reset(); this.dialogId = dialogId; - MessagesController.getInstance(currentAccount).loadDialogPhotos((int) dialogId, 80, 0, true, parentClassGuid); + if (dialogId != 0) { + MessagesController.getInstance(currentAccount).loadDialogPhotos((int) dialogId, 80, 0, true, parentClassGuid); + } } private void reset() { @@ -1097,6 +1184,7 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio super(context); this.position = position; this.placeholderPaint = placeholderPaint; + setLayerNum(imagesLayerNum); } @Override @@ -1115,7 +1203,10 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio return; } if (radialProgress != null) { - final int realPosition = getRealPosition(position); + int realPosition = getRealPosition(position); + if (hasActiveVideo) { + realPosition--; + } final Drawable drawable = getImageReceiver().getDrawable(); boolean hideProgress; if (realPosition < imagesUploadProgress.size() && imagesUploadProgress.get(realPosition) != null) { @@ -1136,11 +1227,12 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio radialProgressHideAnimator.setDuration((long) (radialProgressHideAnimatorStartValue * 250f)); radialProgressHideAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); radialProgressHideAnimator.addUpdateListener(anim -> radialProgress.setOverrideAlpha(AndroidUtilities.lerp(radialProgressHideAnimatorStartValue, 0f, anim.getAnimatedFraction()))); + int finalRealPosition = realPosition; radialProgressHideAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { radialProgress = null; - radialProgresses.delete(getRealPosition(position)); + radialProgresses.delete(finalRealPosition); } }); radialProgressHideAnimator.start(); @@ -1204,4 +1296,10 @@ public class ProfileGalleryView extends CircularViewPager implements Notificatio public void setInvalidateWithParent(boolean invalidateWithParent) { this.invalidateWithParent = invalidateWithParent; } + + private class TextureStubView extends View { + public TextureStubView(Context context) { + super(context); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java index 2f7e2e609..94161a8ed 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java @@ -42,7 +42,7 @@ import java.util.concurrent.TimeUnit; public class RLottieDrawable extends BitmapDrawable implements Animatable { - public static native long create(String src, int w, int h, int[] params, boolean precache, int[] colorReplacement, boolean limitFps); + public static native long create(String src, String json, int w, int h, int[] params, boolean precache, int[] colorReplacement, boolean limitFps); protected static native long createWithJson(String json, String name, int[] params, int[] colorReplacement); public static native void destroy(long ptr); private static native void setLayerColor(long ptr, String layer, int color); @@ -363,7 +363,26 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { shouldLimitFps = limitFps; getPaint().setFlags(Paint.FILTER_BITMAP_FLAG); - nativePtr = create(file.getAbsolutePath(), w, h, metaData, precache, colorReplacement, shouldLimitFps); + nativePtr = create(file.getAbsolutePath(), null, w, h, metaData, precache, colorReplacement, shouldLimitFps); + if (precache && lottieCacheGenerateQueue == null) { + lottieCacheGenerateQueue = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()); + } + if (nativePtr == 0) { + file.delete(); + } + if (shouldLimitFps && metaData[1] < 60) { + shouldLimitFps = false; + } + timeBetweenFrames = Math.max(shouldLimitFps ? 33 : 16, (int) (1000.0f / metaData[1])); + } + + public RLottieDrawable(File file, String json, int w, int h, boolean precache, boolean limitFps, int[] colorReplacement) { + width = w; + height = h; + shouldLimitFps = limitFps; + getPaint().setFlags(Paint.FILTER_BITMAP_FLAG); + + nativePtr = create(file.getAbsolutePath(), json, w, h, metaData, precache, colorReplacement, shouldLimitFps); if (precache && lottieCacheGenerateQueue == null) { lottieCacheGenerateQueue = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java index 2922fa3c7..88b70abb0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadialProgress2.java @@ -11,6 +11,7 @@ package org.telegram.ui.Components; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; +import android.graphics.LinearGradient; import android.graphics.Paint; import android.graphics.RectF; import android.view.View; @@ -87,6 +88,10 @@ public class RadialProgress2 { overlayPaint.setColor(0x64000000); } + public void setAsMini() { + mediaActionDrawable.setMini(true); + } + public void setCircleRadius(int value) { circleRadius = value; overlayImageView.setRoundRadius(circleRadius); @@ -104,6 +109,11 @@ public class RadialProgress2 { miniMediaActionDrawable.setBackgroundDrawable(drawable); } + public void setBackgroundGradientDrawable(LinearGradient drawable) { + mediaActionDrawable.setBackgroundGradientDrawable(drawable); + miniMediaActionDrawable.setBackgroundGradientDrawable(drawable); + } + public void setImageOverlay(TLRPC.PhotoSize image, TLRPC.Document document, Object parentObject) { overlayImageView.setImage(ImageLocation.getForDocument(image, document), String.format(Locale.US, "%d_%d", circleRadius * 2, circleRadius * 2), null, null, parentObject, 1); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadioButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadioButton.java index fab2e3690..f54636982 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RadioButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RadioButton.java @@ -81,6 +81,10 @@ public class RadioButton extends View { size = value; } + public int getColor() { + return color; + } + public void setColor(int color1, int color2) { color = color1; checkedColor = color2; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecordStatusDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecordStatusDrawable.java index ce747f96b..217d336b7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecordStatusDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecordStatusDrawable.java @@ -23,6 +23,7 @@ public class RecordStatusDrawable extends StatusDrawable { private boolean started = false; private RectF rect = new RectF(); private float progress; + int alpha = 255; Paint currentPaint; @@ -77,11 +78,11 @@ public class RecordStatusDrawable extends StatusDrawable { canvas.translate(0, getIntrinsicHeight() / 2 + AndroidUtilities.dp(isChat ? 1 : 2)); for (int a = 0; a < 4; a++) { if (a == 0) { - paint.setAlpha((int) (255 * progress)); + paint.setAlpha((int) (alpha * progress)); } else if (a == 3) { - paint.setAlpha((int) (255 * (1.0f - progress))); + paint.setAlpha((int) (alpha * (1.0f - progress))); } else { - paint.setAlpha(255); + paint.setAlpha(alpha); } float side = AndroidUtilities.dp(4) * a + AndroidUtilities.dp(4) * progress; rect.set(-side, -side, side, side); @@ -95,7 +96,7 @@ public class RecordStatusDrawable extends StatusDrawable { @Override public void setAlpha(int alpha) { - + this.alpha = alpha; } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerItemsEnterAnimator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerItemsEnterAnimator.java index b26040d6a..e97242d51 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerItemsEnterAnimator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerItemsEnterAnimator.java @@ -20,16 +20,18 @@ public class RecyclerItemsEnterAnimator { private final SparseArray listAlphaItems = new SparseArray<>(); HashSet ignoreView = new HashSet<>(); boolean invalidateAlpha; + boolean alwaysCheckItemsAlpha; ArrayList currentAnimations = new ArrayList<>(); ArrayList preDrawListeners = new ArrayList<>(); - public RecyclerItemsEnterAnimator(RecyclerListView listView) { + public RecyclerItemsEnterAnimator(RecyclerListView listView, boolean alwaysCheckItemsAlpha) { this.listView = listView; + this.alwaysCheckItemsAlpha = alwaysCheckItemsAlpha; } public void dispatchDraw() { - if (invalidateAlpha) { + if (invalidateAlpha || alwaysCheckItemsAlpha) { for (int i = 0; i < listView.getChildCount(); i++) { View child = listView.getChildAt(i); int position = listView.getChildAdapterPosition(child); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java index 23657cdda..172073b5a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RecyclerListView.java @@ -48,7 +48,6 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; -import androidx.annotation.Nullable; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -205,7 +204,7 @@ public class RecyclerListView extends RecyclerView { @Override public boolean isEnabled(ViewHolder holder) { int position = holder.getAdapterPosition(); - return isEnabled(getSectionForPosition(position), getPositionInSectionForPosition(position)); + return isEnabled(holder, getSectionForPosition(position), getPositionInSectionForPosition(position)); } @Override @@ -290,7 +289,7 @@ public class RecyclerListView extends RecyclerView { public abstract int getSectionCount(); public abstract int getCountForSection(int section); - public abstract boolean isEnabled(int section, int row); + public abstract boolean isEnabled(ViewHolder holder, int section, int row); public abstract int getItemViewType(int section, int position); public abstract Object getItem(int section, int position); public abstract void onBindViewHolder(int section, int position, ViewHolder holder); @@ -592,7 +591,7 @@ public class RecyclerListView extends RecyclerView { child.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); child.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_LONG_CLICKED); } - } else if (onItemLongClickListenerExtended != null) { + } else { if (onItemLongClickListenerExtended.onItemClick(currentChildView, currentChildPosition, event.getX() - currentChildView.getX(), event.getY() - currentChildView.getY())) { child.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); child.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_LONG_CLICKED); @@ -668,9 +667,7 @@ public class RecyclerListView extends RecyclerView { if (currentChildView != null && !interceptedByChild) { try { - if (event != null) { - gestureDetector.onTouchEvent(event); - } + gestureDetector.onTouchEvent(event); } catch (Exception e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java index 195fd5541..d70b3973b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java @@ -17,6 +17,7 @@ import android.view.View; import android.view.inputmethod.EditorInfo; import android.widget.FrameLayout; import android.widget.LinearLayout; +import android.widget.ScrollView; import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; @@ -70,8 +71,12 @@ public class ReportAlert extends BottomSheet { setApplyBottomPadding(false); setApplyTopPadding(false); + ScrollView scrollView = new ScrollView(context); + scrollView.setFillViewport(true); + setCustomView(scrollView); + FrameLayout frameLayout = new FrameLayout(context); - setCustomView(frameLayout); + scrollView.addView(frameLayout, LayoutHelper.createScroll(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); RLottieImageView imageView = new RLottieImageView(context); imageView.setAnimation(R.raw.report_police, 120, 120); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java index de10b5ee5..bfa8d279d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ScrollSlidingTabStrip.java @@ -378,7 +378,11 @@ public class ScrollSlidingTabStrip extends HorizontalScrollView { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else if (object instanceof TLRPC.PhotoSize) { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + int thumbVersion = 0; + if (parentObject instanceof TLRPC.TL_messages_stickerSet) { + thumbVersion = ((TLRPC.TL_messages_stickerSet) parentObject).set.thumb_version; + } + imageLocation = ImageLocation.getForSticker(thumb, sticker, thumbVersion); } else { continue; } @@ -426,7 +430,11 @@ public class ScrollSlidingTabStrip extends HorizontalScrollView { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else if (object instanceof TLRPC.PhotoSize) { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + int thumbVersion = 0; + if (parentObject instanceof TLRPC.TL_messages_stickerSet) { + thumbVersion = ((TLRPC.TL_messages_stickerSet) parentObject).set.thumb_version; + } + imageLocation = ImageLocation.getForSticker(thumb, sticker, thumbVersion); } else { continue; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java index bfc0dcec8..ee28ef081 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SearchViewPager.java @@ -2,14 +2,10 @@ package org.telegram.ui.Components; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; -import android.animation.ObjectAnimator; -import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Canvas; import android.os.Bundle; -import android.text.TextUtils; import android.view.View; -import android.view.ViewTreeObserver; import android.widget.FrameLayout; import androidx.recyclerview.widget.LinearLayoutManager; @@ -44,9 +40,7 @@ import org.telegram.ui.FilteredSearchView; import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; -import java.util.Set; public class SearchViewPager extends ViewPagerFixed implements FilteredSearchView.UiCallback { @@ -54,7 +48,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie public RecyclerListView searchListView; public StickerEmptyView emptyView; public DialogsSearchAdapter dialogsSearchAdapter; - private LinearLayoutManager searchlayoutManager; + private LinearLayoutManager searchLayoutManager; private RecyclerItemsEnterAnimator itemsEnterAnimator; private boolean attached; @@ -127,7 +121,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie searchListView.setVerticalScrollBarEnabled(true); searchListView.setInstantClick(true); searchListView.setVerticalScrollbarPosition(LocaleController.isRTL ? RecyclerListView.SCROLLBAR_POSITION_LEFT : RecyclerListView.SCROLLBAR_POSITION_RIGHT); - searchListView.setLayoutManager(searchlayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)); + searchListView.setLayoutManager(searchLayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)); searchListView.setAnimateEmptyView(true, 0); searchListView.setOnScrollListener(new RecyclerView.OnScrollListener() { @Override @@ -139,10 +133,10 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { - int firstVisibleItem = searchlayoutManager.findFirstVisibleItemPosition(); - int visibleItemCount = Math.abs(searchlayoutManager.findLastVisibleItemPosition() - firstVisibleItem) + 1; + int firstVisibleItem = searchLayoutManager.findFirstVisibleItemPosition(); + int visibleItemCount = Math.abs(searchLayoutManager.findLastVisibleItemPosition() - firstVisibleItem) + 1; int totalItemCount = recyclerView.getAdapter().getItemCount(); - if (visibleItemCount > 0 && searchlayoutManager.findLastVisibleItemPosition() == totalItemCount - 1 && !dialogsSearchAdapter.isMessagesSearchEndReached()) { + if (visibleItemCount > 0 && searchLayoutManager.findLastVisibleItemPosition() == totalItemCount - 1 && !dialogsSearchAdapter.isMessagesSearchEndReached()) { dialogsSearchAdapter.loadMoreSearchMessages(); } } @@ -154,8 +148,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie noMediaFiltersSearchView.setChatPreviewDelegate(chatPreviewDelegate); searchContainer = new FrameLayout(context); - searchContainer.addView(searchListView); - searchContainer.addView(noMediaFiltersSearchView); + FlickerLoadingView loadingView = new FlickerLoadingView(context); loadingView.setViewType(1); @@ -176,9 +169,11 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie emptyView.showProgress(true, false); searchContainer.addView(emptyView); + searchContainer.addView(searchListView); + searchContainer.addView(noMediaFiltersSearchView); searchListView.setEmptyView(emptyView); - itemsEnterAnimator = new RecyclerItemsEnterAnimator(searchListView); + itemsEnterAnimator = new RecyclerItemsEnterAnimator(searchListView, true); setAdapter(new ViewPagerFixed.Adapter() { @@ -237,6 +232,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie int dialogId = 0; long minDate = 0; long maxDate = 0; + boolean includeFolder = false; for (int i = 0; i < currentSearchFilters.size(); i++) { FiltersView.MediaFilterData data = currentSearchFilters.get(i); if (data.filterType == FiltersView.FILTER_TYPE_CHAT) { @@ -248,13 +244,15 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie } else if (data.filterType == FiltersView.FILTER_TYPE_DATE) { minDate = data.dateData.minDate; maxDate = data.dateData.maxDate; + } else if (data.filterType == FiltersView.FILTER_TYPE_ARCHIVE) { + includeFolder = true; } } if (view == searchContainer) { if (dialogId == 0 && minDate == 0 && maxDate == 0) { lastSearchScrolledToTop = false; - dialogsSearchAdapter.searchDialogs(query); + dialogsSearchAdapter.searchDialogs(query, includeFolder ? 1 : 0); dialogsSearchAdapter.setFiltersDelegate(filteredSearchViewDelegate, false); noMediaFiltersSearchView.animate().setListener(null).cancel(); noMediaFiltersSearchView.setDelegate(null, false); @@ -294,14 +292,14 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie } noMediaFiltersSearchView.animate().alpha(1f).setDuration(150).start(); } - noMediaFiltersSearchView.search(dialogId, minDate, maxDate, null, query, reset); + noMediaFiltersSearchView.search(dialogId, minDate, maxDate, null, includeFolder, query, reset); emptyView.setVisibility(View.GONE); } emptyView.setKeyboardHeight(keyboardSize, false); noMediaFiltersSearchView.setKeyboardHeight(keyboardSize, false); } else { ((FilteredSearchView)view).setKeyboardHeight(keyboardSize, false); - ((FilteredSearchView)view).search(dialogId, minDate, maxDate, FiltersView.filters[position - 1], query, reset); + ((FilteredSearchView)view).search(dialogId, minDate, maxDate, FiltersView.filters[position - 1], includeFolder, query, reset); } } @@ -331,6 +329,9 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie if (isActionModeShowed == show) { return; } + if (show && parent.getActionBar().isActionModeShowed()) { + return; + } if (show && !parent.getActionBar().actionModeIsExist(actionModeTag)) { ActionBarMenu actionMode = parent.getActionBar().createActionMode(true, actionModeTag); @@ -402,7 +403,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - AccountInstance.getInstance(currentAccount).getSendMessagesHelper().sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + AccountInstance.getInstance(currentAccount).getSendMessagesHelper().sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); } AccountInstance.getInstance(currentAccount).getSendMessagesHelper().sendMessage(fmessages, did, true, 0); } @@ -607,7 +608,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie public void reset() { setPosition(0); if (dialogsSearchAdapter.getItemCount() > 0) { - searchlayoutManager.scrollToPositionWithOffset(0, 0); + searchLayoutManager.scrollToPositionWithOffset(0, 0); } viewsByType.clear(); } @@ -683,7 +684,7 @@ public class SearchViewPager extends ViewPagerFixed implements FilteredSearchVie } public void runResultsEnterAnimation() { - itemsEnterAnimator.showItemsAnimated(animateFromCount); + itemsEnterAnimator.showItemsAnimated(animateFromCount > 0 ? animateFromCount + 1 : 0); animateFromCount = dialogsSearchAdapter.getItemCount(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java index 070138408..0198100e5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SeekBarView.java @@ -35,7 +35,7 @@ public class SeekBarView extends FrameLayout { private int selectorWidth; private int thumbX; private int thumbDX; - private float progressToSet; + private float progressToSet = -100; private boolean pressed; public SeekBarViewDelegate delegate; private boolean reportChanges; @@ -47,6 +47,8 @@ public class SeekBarView extends FrameLayout { private float transitionProgress = 1f; private int transitionThumbX; + private boolean twoSided; + public interface SeekBarViewDelegate { void onSeekBarDrag(boolean stop, float progress); void onSeekBarPressed(boolean pressed); @@ -122,6 +124,14 @@ public class SeekBarView extends FrameLayout { } } + public void setTwoSided(boolean value) { + twoSided = value; + } + + public boolean isTwoSided() { + return twoSided; + } + public void setInnerColor(int color) { innerPaint1.setColor(color); } @@ -178,7 +188,16 @@ public class SeekBarView extends FrameLayout { } if (pressed) { if (ev.getAction() == MotionEvent.ACTION_UP) { - delegate.onSeekBarDrag(true, (float) thumbX / (float) (getMeasuredWidth() - selectorWidth)); + if (twoSided) { + float w = (getMeasuredWidth() - selectorWidth) / 2; + if (thumbX >= w) { + delegate.onSeekBarDrag(false, (thumbX - w) / w); + } else { + delegate.onSeekBarDrag(false, -Math.max(0.01f, 1.0f - (w - thumbX) / w)); + } + } else { + delegate.onSeekBarDrag(true, (float) thumbX / (float) (getMeasuredWidth() - selectorWidth)); + } } if (Build.VERSION.SDK_INT >= 21 && hoverDrawable != null) { hoverDrawable.setState(StateSet.NOTHING); @@ -227,7 +246,16 @@ public class SeekBarView extends FrameLayout { thumbX = getMeasuredWidth() - selectorWidth; } if (reportChanges) { - delegate.onSeekBarDrag(false, (float) thumbX / (float) (getMeasuredWidth() - selectorWidth)); + if (twoSided) { + float w = (getMeasuredWidth() - selectorWidth) / 2; + if (thumbX >= w) { + delegate.onSeekBarDrag(false, (thumbX - w) / w); + } else { + delegate.onSeekBarDrag(false, -Math.max(0.01f, 1.0f - (w - thumbX) / w)); + } + } else { + delegate.onSeekBarDrag(false, (float) thumbX / (float) (getMeasuredWidth() - selectorWidth)); + } } if (Build.VERSION.SDK_INT >= 21 && hoverDrawable != null) { hoverDrawable.setHotspot(ev.getX(), ev.getY()); @@ -256,8 +284,19 @@ public class SeekBarView extends FrameLayout { progressToSet = progress; return; } - progressToSet = -1; - int newThumbX = (int) Math.ceil((getMeasuredWidth() - selectorWidth) * progress); + progressToSet = -100; + int newThumbX; + if (twoSided) { + int w = getMeasuredWidth() - selectorWidth; + float cx = w / 2; + if (progress < 0) { + newThumbX = (int) Math.ceil(cx + w / 2 * -(1.0f + progress)); + } else { + newThumbX = (int) Math.ceil(cx + w / 2 * progress); + } + } else { + newThumbX = (int) Math.ceil((getMeasuredWidth() - selectorWidth) * progress); + } if (thumbX != newThumbX) { if (animated) { transitionThumbX = thumbX; @@ -281,9 +320,9 @@ public class SeekBarView extends FrameLayout { @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); - if (progressToSet >= 0 && getMeasuredWidth() > 0) { + if (progressToSet != -100 && getMeasuredWidth() > 0) { setProgress(progressToSet); - progressToSet = -1; + progressToSet = -100; } } @@ -305,7 +344,16 @@ public class SeekBarView extends FrameLayout { innerPaint1.setColor(Theme.getColor(Theme.key_player_progressCachedBackground)); canvas.drawRect(selectorWidth / 2, getMeasuredHeight() / 2 - AndroidUtilities.dp(1), selectorWidth / 2 + bufferedProgress * (getMeasuredWidth() - selectorWidth), getMeasuredHeight() / 2 + AndroidUtilities.dp(1), innerPaint1); } - canvas.drawRect(selectorWidth / 2, getMeasuredHeight() / 2 - AndroidUtilities.dp(1), selectorWidth / 2 + thumbX, getMeasuredHeight() / 2 + AndroidUtilities.dp(1), outerPaint1); + if (twoSided) { + canvas.drawRect(getMeasuredWidth() / 2 - AndroidUtilities.dp(1), getMeasuredHeight() / 2 - AndroidUtilities.dp(6), getMeasuredWidth() / 2 + AndroidUtilities.dp(1), getMeasuredHeight() / 2 + AndroidUtilities.dp(6), outerPaint1); + if (thumbX > (getMeasuredWidth() - selectorWidth) / 2) { + canvas.drawRect(getMeasuredWidth() / 2, getMeasuredHeight() / 2 - AndroidUtilities.dp(1), selectorWidth / 2 + thumbX, getMeasuredHeight() / 2 + AndroidUtilities.dp(1), outerPaint1); + } else { + canvas.drawRect(thumbX + selectorWidth / 2, getMeasuredHeight() / 2 - AndroidUtilities.dp(1), getMeasuredWidth() / 2, getMeasuredHeight() / 2 + AndroidUtilities.dp(1), outerPaint1); + } + } else { + canvas.drawRect(selectorWidth / 2, getMeasuredHeight() / 2 - AndroidUtilities.dp(1), selectorWidth / 2 + thumbX, getMeasuredHeight() / 2 + AndroidUtilities.dp(1), outerPaint1); + } if (hoverDrawable != null) { int dx = thumbX + selectorWidth / 2 - AndroidUtilities.dp(16); int dy = y + thumbSize / 2 - AndroidUtilities.dp(16); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java index ce6a19a2a..5a89664d1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShareAlert.java @@ -1221,7 +1221,7 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi for (int a = 0; a < selectedDialogs.size(); a++) { long key = selectedDialogs.keyAt(a); if (frameLayout2.getTag() != null && commentTextView.length() > 0) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(commentTextView.getText().toString(), key, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(commentTextView.getText().toString(), key, null, null, null, true, null, null, null, true, 0, null); } SendMessagesHelper.getInstance(currentAccount).sendMessage(sendingMessageObjects, key, true, 0); } @@ -1237,9 +1237,9 @@ public class ShareAlert extends BottomSheet implements NotificationCenter.Notifi for (int a = 0; a < selectedDialogs.size(); a++) { long key = selectedDialogs.keyAt(a); if (frameLayout2.getTag() != null && commentTextView.length() > 0) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(commentTextView.getText().toString(), key, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(commentTextView.getText().toString(), key, null, null, null, true, null, null, null, true, 0, null); } - SendMessagesHelper.getInstance(currentAccount).sendMessage(sendingText[num], key, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(sendingText[num], key, null, null, null, true, null, null, null, true, 0, null); } } onSend(selectedDialogs, 1); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java index d0b28c3d2..836cff63d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SharedMediaLayout.java @@ -46,6 +46,8 @@ import androidx.recyclerview.widget.GridLayoutManager; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; +import com.google.android.exoplayer2.util.Log; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.ChatObject; @@ -1049,6 +1051,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); checkLoadMoreScroll(mediaPage, mediaPage.listView, layoutManager); + if (mediaPage.selectedType == 0) { + PhotoViewer.getInstance().checkCurrentImageVisibility(); + } } @Override @@ -1673,7 +1678,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - profileActivity.getSendMessagesHelper().sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + profileActivity.getSendMessagesHelper().sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); } profileActivity.getSendMessagesHelper().sendMessage(fmessages, did, true, 0); } @@ -2520,11 +2525,8 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter changed++; } } else { - TLRPC.EncryptedChat currentEncryptedChat = profileActivity.getMessagesController().getEncryptedChat((int) (dialog_id >> 32)); - if (currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46) { - if ((hasMedia[4] <= 0) == scrollSlidingTextTabStrip.hasTab(4)) { - changed++; - } + if ((hasMedia[4] <= 0) == scrollSlidingTextTabStrip.hasTab(4)) { + changed++; } } if ((hasMedia[2] <= 0) == scrollSlidingTextTabStrip.hasTab(2)) { @@ -2605,12 +2607,9 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } } } else { - TLRPC.EncryptedChat currentEncryptedChat = profileActivity.getMessagesController().getEncryptedChat((int) (dialog_id >> 32)); - if (currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46) { - if (hasMedia[4] > 0) { - if (!scrollSlidingTextTabStrip.hasTab(4)) { - scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2), idToView); - } + if (hasMedia[4] > 0) { + if (!scrollSlidingTextTabStrip.hasTab(4)) { + scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2), idToView); } } } @@ -3088,7 +3087,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { if (sharedMediaData[3].sections.size() == 0 && !sharedMediaData[3].loading) { return false; } @@ -3226,7 +3225,7 @@ public class SharedMediaLayout extends FrameLayout implements NotificationCenter } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { if (sharedMediaData[currentType].sections.size() == 0 && !sharedMediaData[currentType].loading) { return false; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShutterButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShutterButton.java index 581a2ff74..d9dd6becb 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ShutterButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ShutterButton.java @@ -17,6 +17,7 @@ import android.graphics.drawable.Drawable; import android.os.Build; import android.view.MotionEvent; import android.view.View; +import android.view.ViewConfiguration; import android.view.accessibility.AccessibilityNodeInfo; import android.view.animation.DecelerateInterpolator; @@ -138,9 +139,9 @@ public class ShutterButton extends View { redProgress = interpolator.getInterpolation(totalTime / 120.0f); invalidate(); } - canvas.drawCircle(cx, cy, AndroidUtilities.dp(26) * scale * redProgress, redPaint); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(26.5f) * scale * redProgress, redPaint); } else if (redProgress != 0) { - canvas.drawCircle(cx, cy, AndroidUtilities.dp(26) * scale, redPaint); + canvas.drawCircle(cx, cy, AndroidUtilities.dp(26.5f) * scale, redPaint); } } else if (redProgress != 0) { redProgress = 0; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java index aea817edc..cecd8dfb3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java @@ -37,12 +37,16 @@ public class SizeNotifierFrameLayout extends FrameLayout { private WallpaperParallaxEffect parallaxEffect; private float translationX; private float translationY; + private float bgAngle; private float parallaxScale = 1.0f; private int backgroundTranslationY; private boolean paused = true; private Drawable oldBackgroundDrawable; private ActionBarLayout parentLayout; protected AdjustPanLayoutHelper adjustPanLayoutHelper; + private int emojiHeight; + private float emojiOffset; + private boolean animationInProgress; public interface SizeNotifierFrameLayoutDelegate { void onSizeChanged(int keyboardHeight, boolean isWidthGreater); @@ -60,13 +64,21 @@ public class SizeNotifierFrameLayout extends FrameLayout { } public void setBackgroundImage(Drawable bitmap, boolean motion) { + if (backgroundDrawable == bitmap) { + return; + } + if (bitmap instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) bitmap; + motionBackgroundDrawable.setParentView(this); + } backgroundDrawable = bitmap; if (motion) { if (parallaxEffect == null) { parallaxEffect = new WallpaperParallaxEffect(getContext()); - parallaxEffect.setCallback((offsetX, offsetY) -> { + parallaxEffect.setCallback((offsetX, offsetY, angle) -> { translationX = offsetX; translationY = offsetY; + bgAngle = angle; invalidate(); }); if (getMeasuredWidth() != 0 && getMeasuredHeight() != 0) { @@ -155,10 +167,50 @@ public class SizeNotifierFrameLayout extends FrameLayout { backgroundTranslationY = translation; } + public int getBackgroundTranslationY() { + if (backgroundDrawable instanceof MotionBackgroundDrawable) { + if (animationInProgress) { + return (int) emojiOffset; + } else if (emojiHeight != 0) { + return emojiHeight; + } + return backgroundTranslationY; + } + return 0; + } + + public int getBackgroundSizeY() { + int offset = 0; + if (backgroundDrawable instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) backgroundDrawable; + if (!motionBackgroundDrawable.hasPattern()) { + if (animationInProgress) { + offset = (int) emojiOffset; + } else if (emojiHeight != 0) { + offset = emojiHeight; + } else { + offset = backgroundTranslationY; + } + } else { + offset = backgroundTranslationY != 0 ? 0 : -keyboardHeight; + } + } + return getMeasuredHeight() - offset; + } + public int getHeightWithKeyboard() { return keyboardHeight + getMeasuredHeight(); } + public void setEmojiKeyboardHeight(int height) { + emojiHeight = height; + } + + public void setEmojiOffset(boolean animInProgress, float offset) { + emojiOffset = offset; + animationInProgress = animInProgress; + } + @Override protected void onDraw(Canvas canvas) { if (backgroundDrawable == null) { @@ -171,6 +223,10 @@ public class SizeNotifierFrameLayout extends FrameLayout { if (Theme.isAnimatingColor()) { oldBackgroundDrawable = backgroundDrawable; } + if (newDrawable instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) newDrawable; + motionBackgroundDrawable.setParentView(this); + } backgroundDrawable = newDrawable; } float themeAnimationValue = parentLayout != null ? parentLayout.getThemeAnimationValue() : 1.0f; @@ -184,7 +240,42 @@ public class SizeNotifierFrameLayout extends FrameLayout { } else { drawable.setAlpha(255); } - if (drawable instanceof ColorDrawable) { + if (drawable instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) drawable; + if (motionBackgroundDrawable.hasPattern()) { + int actionBarHeight = (isActionBarVisible() ? ActionBar.getCurrentActionBarHeight() : 0) + (Build.VERSION.SDK_INT >= 21 && occupyStatusBar ? AndroidUtilities.statusBarHeight : 0); + int viewHeight = getRootView().getMeasuredHeight() - actionBarHeight; + float scaleX = (float) getMeasuredWidth() / (float) drawable.getIntrinsicWidth(); + float scaleY = (float) (viewHeight) / (float) drawable.getIntrinsicHeight(); + float scale = Math.max(scaleX, scaleY); + int width = (int) Math.ceil(drawable.getIntrinsicWidth() * scale * parallaxScale); + int height = (int) Math.ceil(drawable.getIntrinsicHeight() * scale * parallaxScale); + int x = (getMeasuredWidth() - width) / 2 + (int) translationX; + int y = backgroundTranslationY + (viewHeight - height) / 2 + actionBarHeight + (int) translationY; + canvas.save(); + canvas.clipRect(0, actionBarHeight, width, getMeasuredHeight() - bottomClip); + drawable.setBounds(x, y, x + width, y + height); + drawable.draw(canvas); + canvas.restore(); + } else { + if (bottomClip != 0) { + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), getRootView().getMeasuredHeight() - bottomClip); + } + motionBackgroundDrawable.setTranslationY(backgroundTranslationY); + int bottom = getMeasuredHeight() - backgroundTranslationY; + if (animationInProgress) { + bottom -= emojiOffset; + } else if (emojiHeight != 0) { + bottom -= emojiHeight; + } + drawable.setBounds(0, 0, getMeasuredWidth(), bottom); + drawable.draw(canvas); + if (bottomClip != 0) { + canvas.restore(); + } + } + } else if (drawable instanceof ColorDrawable) { if (bottomClip != 0) { canvas.save(); canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight() - bottomClip); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java index c1a7dd659..9fc3ad30e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerMasksAlert.java @@ -289,7 +289,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter currentType = MediaDataController.TYPE_IMAGE; - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.stickersDidLoad); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.recentDocumentsDidLoad); MediaDataController.getInstance(currentAccount).loadRecents(MediaDataController.TYPE_IMAGE, false, true, false); @@ -901,7 +901,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter @Override public void dismissInternal() { super.dismissInternal(); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.stickersDidLoad); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.recentDocumentsDidLoad); } @@ -948,7 +948,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter if (!isGif && (type == currentType || type == MediaDataController.TYPE_FAVE)) { checkDocuments(false); } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { if (gridView != null) { int count = gridView.getChildCount(); for (int a = 0; a < count; a++) { @@ -1506,7 +1506,7 @@ public class StickerMasksAlert extends BottomSheet implements NotificationCenter case 0: { TLRPC.Document sticker = (TLRPC.Document) cache.get(position); StickerEmojiCell cell = (StickerEmojiCell) holder.itemView; - cell.setSticker(sticker, cacheParent.get(position), positionToEmoji.get(position), false); + cell.setSticker(sticker, null, cacheParent.get(position), positionToEmoji.get(position), false); cell.setRecent(recentStickers[currentType].contains(sticker) || favouriteStickers.contains(sticker)); break; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java index 60ddd5d6f..ddc7b8026 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickerSetBulletinLayout.java @@ -78,7 +78,13 @@ public class StickerSetBulletinLayout extends Bulletin.TwoLineLayout { imageLocation = ImageLocation.getForDocument(thumb, sticker); } else { TLRPC.PhotoSize thumb = (TLRPC.PhotoSize) object; - imageLocation = ImageLocation.getForSticker(thumb, sticker); + int thumbVersion = 0; + if (setObject instanceof TLRPC.StickerSetCovered) { + thumbVersion = ((TLRPC.StickerSetCovered) setObject).set.thumb_version; + } else if (setObject instanceof TLRPC.TL_messages_stickerSet) { + thumbVersion = ((TLRPC.TL_messages_stickerSet) setObject).set.thumb_version; + } + imageLocation = ImageLocation.getForSticker(thumb, sticker, thumbVersion); } if (object instanceof TLRPC.Document && MessageObject.isAnimatedStickerDocument(sticker, true)) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java index 4df4a631f..13ff32582 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/StickersAlert.java @@ -19,11 +19,16 @@ import android.content.Context; import android.graphics.*; import android.graphics.Rect; import android.graphics.drawable.Drawable; +import android.net.Uri; import android.os.Build; +import android.os.Parcelable; +import android.text.Editable; +import android.text.InputType; import android.text.Selection; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.TextUtils; +import android.text.TextWatcher; import android.text.method.LinkMovementMethod; import android.transition.Transition; import android.transition.TransitionManager; @@ -31,14 +36,18 @@ import android.transition.TransitionValues; import android.util.SparseArray; import android.util.TypedValue; import android.view.Gravity; +import android.view.HapticFeedbackConstants; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; +import android.view.inputmethod.EditorInfo; import android.widget.FrameLayout; +import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.MediaController; import org.telegram.messenger.MediaDataController; import org.telegram.messenger.Emoji; import org.telegram.messenger.FileLoader; @@ -46,14 +55,18 @@ import org.telegram.messenger.FileLog; import org.telegram.messenger.FileRefController; import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.SendMessagesHelper; +import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.RequestDelegate; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBarMenuItem; +import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.BottomSheet; import org.telegram.ui.ActionBar.Theme; @@ -64,7 +77,9 @@ import org.telegram.ui.Cells.StickerEmojiCell; import org.telegram.ui.ChatActivity; import org.telegram.ui.ContentPreviewViewer; +import java.io.File; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -75,7 +90,7 @@ import androidx.recyclerview.widget.RecyclerView; public class StickersAlert extends BottomSheet implements NotificationCenter.NotificationCenterDelegate { public interface StickersAlertDelegate { - void onStickerSelected(TLRPC.Document sticker, String query, Object parent, boolean clearsInputField, boolean notify, int scheduleDate); + void onStickerSelected(TLRPC.Document sticker, String query, Object parent, MessageObject.SendAnimationData sendAnimationData, boolean clearsInputField, boolean notify, int scheduleDate); boolean canSchedule(); boolean isInScheduleMode(); } @@ -123,8 +138,13 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not private TLRPC.TL_messages_stickerSet stickerSet; private TLRPC.Document selectedSticker; + private SendMessagesHelper.ImportingSticker selectedStickerPath; private TLRPC.InputStickerSet inputStickerSet; private ArrayList stickerSetCovereds; + private ArrayList importingStickers; + private ArrayList importingStickersPaths; + private HashMap uploadImportStickers; + private String importingSoftware; private StickersAlertDelegate delegate; private StickersAlertInstallDelegate installDelegate; @@ -145,7 +165,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not if (delegate == null) { return; } - delegate.onStickerSelected(sticker, query, parent, clearsInputField, notify, scheduleDate); + delegate.onStickerSelected(sticker, query, parent, null, clearsInputField, notify, scheduleDate); dismiss(); } @@ -164,9 +184,19 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } + @Override + public boolean needRemove() { + return importingStickers != null; + } + + @Override + public void remove(SendMessagesHelper.ImportingSticker importingSticker) { + removeSticker(importingSticker); + } + @Override public boolean needSend() { - return previewSendButton.getVisibility() == View.VISIBLE; + return previewSendButton.getVisibility() == View.VISIBLE && importingStickers == null; } @Override @@ -247,6 +277,82 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not init(context); } + public StickersAlert(Context context, String software, ArrayList uris, ArrayList emoji) { + super(context, false); + parentActivity = (Activity) context; + importingStickers = uris; + importingSoftware = software; + Utilities.globalQueue.postRunnable(() -> { + ArrayList stickers = new ArrayList<>(); + BitmapFactory.Options opts = new BitmapFactory.Options(); + opts.inJustDecodeBounds = true; + Boolean isAnimated = null; + for (int a = 0, N = uris.size(); a < N; a++) { + Object obj = uris.get(a); + if (obj instanceof Uri) { + Uri uri = (Uri) obj; + String ext = MediaController.getStickerExt(uri); + if (ext == null) { + continue; + } + boolean animated = "tgs".equals(ext); + if (isAnimated == null) { + isAnimated = animated; + } else if (isAnimated != animated) { + continue; + } + if (isDismissed()) { + return; + } + SendMessagesHelper.ImportingSticker importingSticker = new SendMessagesHelper.ImportingSticker(); + importingSticker.animated = animated; + importingSticker.path = MediaController.copyFileToCache(uri, ext, (animated ? 64 : 512) * 1024); + if (importingSticker.path == null) { + continue; + } + if (!animated) { + BitmapFactory.decodeFile(importingSticker.path, opts); + if ((opts.outWidth != 512 || opts.outHeight <= 0 || opts.outHeight > 512) && (opts.outHeight != 512 || opts.outWidth <= 0 || opts.outWidth > 512)) { + continue; + } + importingSticker.mimeType = "image/" + ext; + importingSticker.validated = true; + } else { + importingSticker.mimeType = "application/x-tgsticker"; + } + if (emoji != null && emoji.size() == N && emoji.get(a) instanceof String) { + importingSticker.emoji = emoji.get(a); + } else { + importingSticker.emoji = "#️⃣"; + } + stickers.add(importingSticker); + if (stickers.size() >= 200) { + break; + } + } + } + Boolean isAnimatedFinal = isAnimated; + AndroidUtilities.runOnUIThread(() -> { + importingStickersPaths = stickers; + if (importingStickersPaths.isEmpty()) { + dismiss(); + } else { + adapter.notifyDataSetChanged(); + if (isAnimatedFinal) { + uploadImportStickers = new HashMap<>(); + for (int a = 0, N = importingStickersPaths.size(); a < N; a++) { + SendMessagesHelper.ImportingSticker sticker = importingStickersPaths.get(a); + uploadImportStickers.put(sticker.path, sticker); + FileLoader.getInstance(currentAccount).uploadFile(sticker.path, false, true, ConnectionsManager.FileTypeFile); + } + } + updateFields(); + } + }); + }); + init(context); + } + public StickersAlert(Context context, BaseFragment baseFragment, TLRPC.InputStickerSet set, TLRPC.TL_messages_stickerSet loadedSet, StickersAlertDelegate stickersAlertDelegate) { super(context, false); delegate = stickersAlertDelegate; @@ -374,7 +480,9 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } itemSize = (MeasureSpec.getSize(widthMeasureSpec) - AndroidUtilities.dp(36)) / 5; int contentSize; - if (stickerSetCovereds != null) { + if (importingStickers != null) { + contentSize = AndroidUtilities.dp(48 + 48) + Math.max(3, (int) Math.ceil(importingStickers.size() / 5.0f)) * AndroidUtilities.dp(82) + backgroundPaddingTop + AndroidUtilities.statusBarHeight; + } else if (stickerSetCovereds != null) { contentSize = AndroidUtilities.dp(48 + 8) + AndroidUtilities.dp(60) * stickerSetCovereds.size() + adapter.stickersRowCount * AndroidUtilities.dp(82) + backgroundPaddingTop + AndroidUtilities.dp(24); } else { contentSize = AndroidUtilities.dp(48 + 48) + Math.max(3, (stickerSet != null ? (int) Math.ceil(stickerSet.documents.size() / 5.0f) : 0)) * AndroidUtilities.dp(82) + backgroundPaddingTop + AndroidUtilities.statusBarHeight; @@ -542,6 +650,24 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not StickersAlert alert = new StickersAlert(parentActivity, parentFragment, inputStickerSetID, null, null); alert.show(); } + } else if (importingStickersPaths != null) { + if (position < 0 || position >= importingStickersPaths.size()) { + return; + } + selectedStickerPath = importingStickersPaths.get(position); + if (!selectedStickerPath.validated) { + return; + } + stickerEmojiTextView.setText(Emoji.replaceEmoji(selectedStickerPath.emoji, stickerEmojiTextView.getPaint().getFontMetricsInt(), AndroidUtilities.dp(30), false)); + stickerImageView.setImage(ImageLocation.getForPath(selectedStickerPath.path), null, null, null, null, null, selectedStickerPath.animated ? "tgs" : null, 0, null); + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) stickerPreviewLayout.getLayoutParams(); + layoutParams.topMargin = scrollOffsetY; + stickerPreviewLayout.setLayoutParams(layoutParams); + stickerPreviewLayout.setVisibility(View.VISIBLE); + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.playTogether(ObjectAnimator.ofFloat(stickerPreviewLayout, View.ALPHA, 0.0f, 1.0f)); + animatorSet.setDuration(200); + animatorSet.start(); } else { if (stickerSet == null || position < 0 || position >= stickerSet.documents.size()) { return; @@ -659,8 +785,14 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not previewSendButton.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); stickerPreviewLayout.addView(previewSendButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.BOTTOM | Gravity.LEFT)); previewSendButton.setOnClickListener(v -> { - delegate.onStickerSelected(selectedSticker, null, stickerSet, clearsInputField, true, 0); - dismiss(); + if (importingStickersPaths != null) { + removeSticker(selectedStickerPath); + hidePreview(); + selectedStickerPath = null; + } else { + delegate.onStickerSelected(selectedSticker, null, stickerSet, null, clearsInputField, true, 0); + dismiss(); + } }); frameLayoutParams = new FrameLayout.LayoutParams(LayoutHelper.MATCH_PARENT, AndroidUtilities.getShadowHeight(), Gravity.BOTTOM | Gravity.LEFT); @@ -669,7 +801,11 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not previewSendButtonShadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); stickerPreviewLayout.addView(previewSendButtonShadow, frameLayoutParams); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); + if (importingStickers != null) { + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileUploaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileUploadFailed); + } updateFields(); updateSendButton(); @@ -679,7 +815,14 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not private void updateSendButton() { int size = (int) (Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y) / 2 / AndroidUtilities.density); - if (delegate != null && (stickerSet == null || !stickerSet.set.masks)) { + if (importingStickers != null) { + previewSendButton.setText(LocaleController.getString("ImportStickersRemove", R.string.ImportStickersRemove).toUpperCase()); + previewSendButton.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); + stickerImageView.setLayoutParams(LayoutHelper.createFrame(size, size, Gravity.CENTER, 0, 0, 0, 30)); + stickerEmojiTextView.setLayoutParams(LayoutHelper.createFrame(size, size, Gravity.CENTER, 0, 0, 0, 30)); + previewSendButton.setVisibility(View.VISIBLE); + previewSendButtonShadow.setVisibility(View.VISIBLE); + } else if (delegate != null && (stickerSet == null || !stickerSet.set.masks)) { previewSendButton.setText(LocaleController.getString("SendSticker", R.string.SendSticker).toUpperCase()); stickerImageView.setLayoutParams(LayoutHelper.createFrame(size, size, Gravity.CENTER, 0, 0, 0, 30)); stickerEmojiTextView.setLayoutParams(LayoutHelper.createFrame(size, size, Gravity.CENTER, 0, 0, 0, 30)); @@ -694,6 +837,19 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } } + private void removeSticker(SendMessagesHelper.ImportingSticker sticker) { + int idx = importingStickersPaths.indexOf(sticker); + if (idx >= 0) { + importingStickersPaths.remove(idx); + adapter.notifyItemRemoved(idx); + if (importingStickersPaths.isEmpty()) { + dismiss(); + return; + } + updateFields(); + } + } + public void setInstallDelegate(StickersAlertInstallDelegate stickersAlertInstallDelegate) { installDelegate = stickersAlertInstallDelegate; } @@ -824,12 +980,233 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } } adapter.notifyDataSetChanged(); + } else if (importingStickers != null) { + titleTextView.setText(LocaleController.formatPluralString("Stickers", importingStickersPaths != null ? importingStickersPaths.size() : importingStickers.size())); + if (uploadImportStickers == null || uploadImportStickers.isEmpty()) { + setButton(v -> showNameEnterAlert(), LocaleController.formatString("ImportStickers", R.string.ImportStickers, LocaleController.formatPluralString("Stickers", importingStickersPaths != null ? importingStickersPaths.size() : importingStickers.size())).toUpperCase(), Theme.key_dialogTextBlue2); + pickerBottomLayout.setEnabled(true); + } else { + setButton(null, LocaleController.getString("ImportStickersProcessing", R.string.ImportStickersProcessing).toUpperCase(), Theme.key_dialogTextGray2); + pickerBottomLayout.setEnabled(false); + } } else { String text = LocaleController.getString("Close", R.string.Close).toUpperCase(); setButton((v) -> dismiss(), text, Theme.key_dialogTextBlue2); } } + private void showNameEnterAlert() { + Context context = getContext(); + + int[] state = new int[]{0}; + FrameLayout fieldLayout = new FrameLayout(context); + + AlertDialog.Builder builder = new AlertDialog.Builder(context); + builder.setTitle(LocaleController.getString("ImportStickersEnterName", R.string.ImportStickersEnterName)); + builder.setPositiveButton(LocaleController.getString("Next", R.string.Next), (dialog, which) -> { + + }); + + LinearLayout linearLayout = new LinearLayout(context); + linearLayout.setOrientation(LinearLayout.VERTICAL); + builder.setView(linearLayout); + + linearLayout.addView(fieldLayout, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 36, Gravity.TOP | Gravity.LEFT, 24, 6, 24, 0)); + + TextView message = new TextView(context); + + TextView textView = new TextView(context); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + textView.setTextColor(Theme.getColor(Theme.key_dialogTextHint)); + textView.setMaxLines(1); + textView.setLines(1); + textView.setText("t.me/addstickers/"); + textView.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_CAP_SENTENCES); + textView.setGravity(Gravity.LEFT | Gravity.TOP); + textView.setSingleLine(true); + textView.setVisibility(View.INVISIBLE); + textView.setImeOptions(EditorInfo.IME_ACTION_DONE); + textView.setPadding(0, AndroidUtilities.dp(4), 0, 0); + fieldLayout.addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 36, Gravity.TOP | Gravity.LEFT)); + + EditTextBoldCursor editText = new EditTextBoldCursor(context); + editText.setBackgroundDrawable(Theme.createEditTextDrawable(context, true)); + editText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 16); + editText.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + editText.setMaxLines(1); + editText.setLines(1); + editText.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_CAP_SENTENCES); + editText.setGravity(Gravity.LEFT | Gravity.TOP); + editText.setSingleLine(true); + editText.setImeOptions(EditorInfo.IME_ACTION_NEXT); + editText.setCursorColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText)); + editText.setCursorSize(AndroidUtilities.dp(20)); + editText.setCursorWidth(1.5f); + editText.setPadding(0, AndroidUtilities.dp(4), 0, 0); + editText.addTextChangedListener(new TextWatcher() { + @Override + public void beforeTextChanged(CharSequence s, int start, int count, int after) { + + } + + @Override + public void onTextChanged(CharSequence s, int start, int before, int count) { + if (state[0] != 2) { + return; + } + checkUrlAvailable(message, editText.getText().toString(), false); + } + + @Override + public void afterTextChanged(Editable s) { + + } + }); + fieldLayout.addView(editText, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 36, Gravity.TOP | Gravity.LEFT)); + editText.setOnEditorActionListener((view, i, keyEvent) -> { + if (i == EditorInfo.IME_ACTION_NEXT) { + builder.create().getButton(AlertDialog.BUTTON_POSITIVE).callOnClick(); + return true; + } + return false; + }); + editText.setSelection(editText.length()); + + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), (dialog, which) -> AndroidUtilities.hideKeyboard(editText)); + + message.setText(AndroidUtilities.replaceTags(LocaleController.getString("ImportStickersEnterNameInfo", R.string.ImportStickersEnterNameInfo))); + message.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + message.setPadding(AndroidUtilities.dp(23), AndroidUtilities.dp(12), AndroidUtilities.dp(23), AndroidUtilities.dp(6)); + message.setTextColor(Theme.getColor(Theme.key_dialogTextGray2)); + linearLayout.addView(message, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); + + final AlertDialog alertDialog = builder.create(); + alertDialog.setOnShowListener(dialog -> AndroidUtilities.runOnUIThread(() -> { + editText.requestFocus(); + AndroidUtilities.showKeyboard(editText); + })); + alertDialog.show(); + editText.requestFocus(); + alertDialog.getButton(AlertDialog.BUTTON_POSITIVE).setOnClickListener(v -> { + if (state[0] == 1) { + return; + } + if (state[0] == 0) { + state[0] = 1; + TLRPC.TL_stickers_suggestShortName req = new TLRPC.TL_stickers_suggestShortName(); + req.title = setTitle = editText.getText().toString(); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + boolean set = false; + if (response instanceof TLRPC.TL_stickers_suggestedShortName) { + TLRPC.TL_stickers_suggestedShortName res = (TLRPC.TL_stickers_suggestedShortName) response; + if (res.short_name != null) { + editText.setText(res.short_name); + editText.setSelection(0, editText.length()); + checkUrlAvailable(message, editText.getText().toString(), true); + set = true; + } + } + textView.setVisibility(View.VISIBLE); + editText.setPadding(textView.getMeasuredWidth(), AndroidUtilities.dp(4), 0, 0); + if (!set) { + editText.setText(""); + } + state[0] = 2; + })); + } else if (state[0] == 2) { + state[0] = 3; + if (!lastNameAvailable) { + AndroidUtilities.shakeView(editText, 2, 0); + editText.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + AndroidUtilities.hideKeyboard(editText); + SendMessagesHelper.getInstance(currentAccount).prepareImportStickers(setTitle, lastCheckName, importingSoftware, importingStickersPaths, (param) -> { + ImportingAlert importingAlert = new ImportingAlert(getContext(), lastCheckName, null); + importingAlert.show(); + }); + builder.getDismissRunnable().run(); + dismiss(); + } + }); + } + + private Runnable checkRunnable; + private String lastCheckName; + private int checkReqId; + private boolean lastNameAvailable; + private String setTitle; + private void checkUrlAvailable(TextView message, String text, boolean forceAvailable) { + if (forceAvailable) { + message.setText(LocaleController.getString("ImportStickersLinkAvailable", R.string.ImportStickersLinkAvailable)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGreenText)); + lastNameAvailable = true; + lastCheckName = text; + return; + } + if (checkRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(checkRunnable); + checkRunnable = null; + lastCheckName = null; + if (checkReqId != 0) { + ConnectionsManager.getInstance(currentAccount).cancelRequest(checkReqId, true); + } + } + if (TextUtils.isEmpty(text)) { + message.setText(LocaleController.getString("ImportStickersEnterUrlInfo", R.string.ImportStickersEnterUrlInfo)); + message.setTextColor(Theme.getColor(Theme.key_dialogTextGray2)); + return; + } + lastNameAvailable = false; + if (text != null) { + if (text.startsWith("_") || text.endsWith("_")) { + message.setText(LocaleController.getString("ImportStickersLinkInvalid", R.string.ImportStickersLinkInvalid)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + return; + } + for (int a = 0, N = text.length(); a < N; a++) { + char ch = text.charAt(a); + if (!(ch >= '0' && ch <= '9' || ch >= 'a' && ch <= 'z' || ch >= 'A' && ch <= 'Z' || ch == '_')) { + message.setText(LocaleController.getString("ImportStickersEnterUrlInfo", R.string.ImportStickersEnterUrlInfo)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + return; + } + } + } + if (text == null || text.length() < 5) { + message.setText(LocaleController.getString("ImportStickersLinkInvalidShort", R.string.ImportStickersLinkInvalidShort)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + return; + } + if (text.length() > 32) { + message.setText(LocaleController.getString("ImportStickersLinkInvalidLong", R.string.ImportStickersLinkInvalidLong)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + return; + } + + message.setText(LocaleController.getString("ImportStickersLinkChecking", R.string.ImportStickersLinkChecking)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText8)); + lastCheckName = text; + checkRunnable = () -> { + TLRPC.TL_stickers_checkShortName req = new TLRPC.TL_stickers_checkShortName(); + req.short_name = text; + checkReqId = ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { + checkReqId = 0; + if (lastCheckName != null && lastCheckName.equals(text)) { + if (error == null && response instanceof TLRPC.TL_boolTrue) { + message.setText(LocaleController.getString("ImportStickersLinkAvailable", R.string.ImportStickersLinkAvailable)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGreenText)); + lastNameAvailable = true; + } else { + message.setText(LocaleController.getString("ImportStickersLinkTaken", R.string.ImportStickersLinkTaken)); + message.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteRedText4)); + lastNameAvailable = false; + } + } + }), ConnectionsManager.RequestFlagFailOnServerErrors); + }; + AndroidUtilities.runOnUIThread(checkRunnable, 300); + } + @Override protected boolean canDismissWithSwipe() { return false; @@ -861,7 +1238,9 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not gridView.setTopGlowOffset(newOffset); if (stickerSetCovereds == null) { titleTextView.setTranslationY(newOffset); - optionsButton.setTranslationY(newOffset); + if (importingStickers == null) { + optionsButton.setTranslationY(newOffset); + } shadow[0].setTranslationY(newOffset); } containerView.invalidate(); @@ -931,7 +1310,22 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not ConnectionsManager.getInstance(currentAccount).cancelRequest(reqId, true); reqId = 0; } - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); + if (importingStickers != null) { + if (importingStickersPaths != null) { + for (int a = 0, N = importingStickersPaths.size(); a < N; a++) { + SendMessagesHelper.ImportingSticker sticker = importingStickersPaths.get(a); + if (!sticker.validated) { + FileLoader.getInstance(currentAccount).cancelFileUpload(sticker.path, false); + } + if (sticker.animated) { + new File(sticker.path).delete(); + } + } + } + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileUploaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileUploadFailed); + } NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 4); } @@ -954,7 +1348,7 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (gridView != null) { int count = gridView.getChildCount(); for (int a = 0; a < count; a++) { @@ -965,6 +1359,49 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not ContentPreviewViewer.getInstance().close(); } ContentPreviewViewer.getInstance().reset(); + } else if (id == NotificationCenter.fileUploaded) { + if (uploadImportStickers == null) { + return; + } + String location = (String) args[0]; + SendMessagesHelper.ImportingSticker sticker = uploadImportStickers.get(location); + if (sticker != null) { + sticker.uploadMedia(currentAccount, (TLRPC.InputFile) args[1], () -> { + if (isDismissed()) { + return; + } + uploadImportStickers.remove(location); + if (!"application/x-tgsticker".equals(sticker.mimeType)) { + removeSticker(sticker); + } else { + sticker.validated = true; + int idx = importingStickersPaths.indexOf(sticker); + if (idx >= 0) { + RecyclerView.ViewHolder holder = gridView.findViewHolderForAdapterPosition(idx); + if (holder != null) { + ((StickerEmojiCell) holder.itemView).setSticker(sticker); + } + } else { + adapter.notifyDataSetChanged(); + } + } + if (uploadImportStickers.isEmpty()) { + updateFields(); + } + }); + } + } else if (id == NotificationCenter.fileUploadFailed) { + if (uploadImportStickers == null) { + return; + } + String location = (String) args[0]; + SendMessagesHelper.ImportingSticker sticker = uploadImportStickers.remove(location); + if (sticker != null) { + removeSticker(sticker); + } + if (uploadImportStickers.isEmpty()) { + updateFields(); + } } } @@ -1132,20 +1569,10 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not TLRPC.StickerSetCovered stickerSetCovered = stickerSetCovereds.get((Integer) cache.get(position)); FeaturedStickerSetInfoCell cell = (FeaturedStickerSetInfoCell) holder.itemView; cell.setStickerSet(stickerSetCovered, false); - /*boolean installing = installingStickerSets.containsKey(stickerSetCovered.set.id); - boolean removing = removingStickerSets.containsKey(stickerSetCovered.set.id); - if (installing || removing) { - if (installing && cell.isInstalled()) { - installingStickerSets.remove(stickerSetCovered.set.id); - installing = false; - } else if (removing && !cell.isInstalled()) { - removingStickerSets.remove(stickerSetCovered.set.id); - removing = false; - } - } - cell.setDrawProgress(installing || removing);*/ break; } + } else if (importingStickers != null) { + ((StickerEmojiCell) holder.itemView).setSticker(importingStickersPaths.get(position)); } else { ((StickerEmojiCell) holder.itemView).setSticker(stickerSet.documents.get(position), stickerSet, showEmoji); } @@ -1188,12 +1615,22 @@ public class StickersAlert extends BottomSheet implements NotificationCenter.Not } totalItems += count * stickersPerRow; } + } else if (importingStickersPaths != null) { + totalItems = importingStickersPaths.size(); } else { totalItems = stickerSet != null ? stickerSet.documents.size() : 0; } super.notifyDataSetChanged(); } + @Override + public void notifyItemRemoved(int position) { + if (importingStickersPaths != null) { + totalItems = importingStickersPaths.size(); + } + super.notifyItemRemoved(position); + } + public void updateColors() { if (stickerSetCovereds != null) { for (int i = 0, size = gridView.getChildCount(); i < size; i++) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java index 25be40619..cf381b303 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemeEditorView.java @@ -573,7 +573,7 @@ public class ThemeEditorView { for (int a = 0; a < currentThemeDesription.size(); a++) { ThemeDescription description = currentThemeDesription.get(a); String key = description.getCurrentKey(); - if (a == 0 && key.equals(Theme.key_chat_wallpaper) || key.equals(Theme.key_chat_wallpaper_gradient_to) || key.equals(Theme.key_windowBackgroundWhite) || key.equals(Theme.key_windowBackgroundGray)) { + if (a == 0 && key.equals(Theme.key_chat_wallpaper) || key.equals(Theme.key_chat_wallpaper_gradient_to1) || key.equals(Theme.key_chat_wallpaper_gradient_to2) || key.equals(Theme.key_chat_wallpaper_gradient_to3) || key.equals(Theme.key_windowBackgroundWhite) || key.equals(Theme.key_windowBackgroundGray)) { color = 0xff000000 | color; } currentThemeDesription.get(a).setColor(color, false); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java index 52cedb8d3..9bfc4be3e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ThemePreviewDrawable.java @@ -7,7 +7,6 @@ import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.RectF; -import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; @@ -56,8 +55,9 @@ public class ThemePreviewDrawable extends BitmapDrawable { int messageOutColor = Theme.getPreviewColor(colors, Theme.key_chat_outBubble); Integer messageOutGradientColor = colors.get(Theme.key_chat_outBubbleGradient); Integer backgroundColor = colors.get(Theme.key_chat_wallpaper); - Integer serviceColor = colors.get(Theme.key_chat_serviceBackground); - Integer gradientToColor = colors.get(Theme.key_chat_wallpaper_gradient_to); + Integer gradientToColor1 = colors.get(Theme.key_chat_wallpaper_gradient_to1); + Integer gradientToColor2 = colors.get(Theme.key_chat_wallpaper_gradient_to2); + Integer gradientToColor3 = colors.get(Theme.key_chat_wallpaper_gradient_to3); Integer gradientRotation = colors.get(Theme.key_chat_wallpaper_gradient_rotation); if (gradientRotation == null) { @@ -97,19 +97,20 @@ public class ThemePreviewDrawable extends BitmapDrawable { if (backgroundColor != null) { Drawable wallpaperDrawable; int patternColor; - if (gradientToColor == null) { + if (gradientToColor1 == null) { wallpaperDrawable = new ColorDrawable(backgroundColor); patternColor = AndroidUtilities.getPatternColor(backgroundColor); } else { - final int[] gradientColors = {backgroundColor, gradientToColor}; - wallpaperDrawable = BackgroundGradientDrawable.createDitheredGradientBitmapDrawable(gradientRotation, gradientColors, bitmap.getWidth(), bitmap.getHeight() - 120); - patternColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, gradientToColor)); + if (gradientToColor2 != 0) { + wallpaperDrawable = new MotionBackgroundDrawable(backgroundColor, gradientToColor1, gradientToColor2, gradientToColor3, true); + } else { + final int[] gradientColors = {backgroundColor, gradientToColor1}; + wallpaperDrawable = BackgroundGradientDrawable.createDitheredGradientBitmapDrawable(gradientRotation, gradientColors, bitmap.getWidth(), bitmap.getHeight() - 120); + } + patternColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, gradientToColor1)); } wallpaperDrawable.setBounds(0, 120, bitmap.getWidth(), bitmap.getHeight() - 120); wallpaperDrawable.draw(canvas); - if (serviceColor == null) { - serviceColor = AndroidUtilities.calcDrawableColor(new ColorDrawable(backgroundColor))[0]; - } if (pattern != null) { Bitmap patternBitmap; @@ -147,8 +148,10 @@ public class ThemePreviewDrawable extends BitmapDrawable { } if (patternBitmap != null) { Paint backgroundPaint = new Paint(Paint.FILTER_BITMAP_FLAG); - backgroundPaint.setColorFilter(new PorterDuffColorFilter(patternColor, PorterDuff.Mode.SRC_IN)); - backgroundPaint.setAlpha((int) (255 * themeDocument.accent.patternIntensity)); + if (themeDocument.accent.patternIntensity >= 0) { + backgroundPaint.setColorFilter(new PorterDuffColorFilter(patternColor, PorterDuff.Mode.SRC_IN)); + } + backgroundPaint.setAlpha((int) (255 * Math.abs(themeDocument.accent.patternIntensity))); float scale = Math.max(560.0f / patternBitmap.getWidth(), 678.0f / patternBitmap.getHeight()); int w = (int) (patternBitmap.getWidth() * scale); int h = (int) (patternBitmap.getHeight() * scale); @@ -165,11 +168,7 @@ public class ThemePreviewDrawable extends BitmapDrawable { hasBackground = true; } if (!hasBackground) { - BitmapDrawable catsDrawable = (BitmapDrawable) ApplicationLoader.applicationContext.getResources().getDrawable(R.drawable.catstile).mutate(); - if (serviceColor == null) { - serviceColor = AndroidUtilities.calcDrawableColor(catsDrawable)[0]; - } - catsDrawable.setTileModeXY(Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); + Drawable catsDrawable = Theme.createDefaultWallpaper(bitmap.getWidth(), bitmap.getHeight() - 120); catsDrawable.setBounds(0, 120, bitmap.getWidth(), bitmap.getHeight() - 120); catsDrawable.draw(canvas); } @@ -202,14 +201,6 @@ public class ThemePreviewDrawable extends BitmapDrawable { messageDrawable[0].setTop(323, 522, false, false); messageDrawable[0].draw(canvas); - if (serviceColor != null) { - int x = (bitmap.getWidth() - 126) / 2; - int y = 150; - rect.set(x, y, x + 126, y + 42); - paint.setColor(serviceColor); - canvas.drawRoundRect(rect, 21, 21, paint); - } - paint.setColor(messageFieldColor); canvas.drawRect(0, bitmap.getHeight() - 120, bitmap.getWidth(), bitmap.getHeight(), paint); if (emojiDrawable != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java index 60ecef8eb..d053a5ac5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/TrendingStickersLayout.java @@ -20,6 +20,7 @@ import androidx.recyclerview.widget.RecyclerView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaDataController; +import org.telegram.messenger.MessageObject; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; @@ -387,7 +388,7 @@ public class TrendingStickersLayout extends FrameLayout implements NotificationC if (delegate.canSendSticker()) { stickersAlertDelegate = new StickersAlert.StickersAlertDelegate() { @Override - public void onStickerSelected(TLRPC.Document sticker, String query, Object parent, boolean clearsInputField, boolean notify, int scheduleDate) { + public void onStickerSelected(TLRPC.Document sticker, String query, Object parent, MessageObject.SendAnimationData sendAnimationData, boolean clearsInputField, boolean notify, int scheduleDate) { delegate.onStickerSelected(sticker, parent, clearsInputField, notify, scheduleDate); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java index ad0ac0975..22939f034 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/UndoView.java @@ -24,6 +24,7 @@ import android.text.method.LinkMovementMethod; import android.text.style.CharacterStyle; import android.util.TypedValue; import android.view.Gravity; +import android.view.HapticFeedbackConstants; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; @@ -44,6 +45,7 @@ import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; import org.telegram.messenger.UserObject; import org.telegram.tgnet.ConnectionsManager; +import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; @@ -130,6 +132,7 @@ public class UndoView extends FrameLayout { public final static int ACTION_VOIP_INVITE_LINK_SENT = 41; public final static int ACTION_VOIP_SOUND_MUTED = 42; public final static int ACTION_VOIP_SOUND_UNMUTED = 43; + public final static int ACTION_VOIP_USER_JOINED = 44; public final static int ACTION_IMPORT_NOT_MUTUAL = 45; public final static int ACTION_IMPORT_GROUP_NOT_ADMIN = 46; @@ -145,7 +148,8 @@ public class UndoView extends FrameLayout { public final static int ACTION_HASHTAG_COPIED = 57; public final static int ACTION_TEXT_COPIED = 58; public final static int ACTION_LINK_COPIED = 59; - public static final int ACTION_PHONE_COPIED = 60; + public final static int ACTION_PHONE_COPIED = 60; + public final static int ACTION_SHARE_BACKGROUND = 61; public final static int ACTION_AUTO_DELETE_ON = 70; public final static int ACTION_AUTO_DELETE_OFF = 71; @@ -154,8 +158,11 @@ public class UndoView extends FrameLayout { public final static int ACTION_GIGAGROUP_SUCCESS = 76; public final static int ACTION_PAYMENT_SUCCESS = 77; + public final static int ACTION_PIN_DIALOGS = 78; + public final static int ACTION_UNPIN_DIALOGS = 79; private CharSequence infoText; + private int hideAnimationType = 1; public class LinkMovementMethodMy extends LinkMovementMethod { @Override @@ -291,7 +298,8 @@ public class UndoView extends FrameLayout { currentAction == ACTION_CHAT_UNARCHIVED || currentAction == ACTION_VOIP_MUTED || currentAction == ACTION_VOIP_UNMUTED || currentAction == ACTION_VOIP_REMOVED || currentAction == ACTION_VOIP_LINK_COPIED || currentAction == ACTION_VOIP_INVITED || currentAction == ACTION_VOIP_MUTED_FOR_YOU || currentAction == ACTION_VOIP_UNMUTED_FOR_YOU || currentAction == ACTION_REPORT_SENT || currentAction == ACTION_VOIP_USER_CHANGED || currentAction == ACTION_VOIP_CAN_NOW_SPEAK || currentAction == ACTION_VOIP_RECORDING_STARTED || - currentAction == ACTION_VOIP_RECORDING_FINISHED || currentAction == ACTION_VOIP_SOUND_MUTED || currentAction == ACTION_VOIP_SOUND_UNMUTED || currentAction == ACTION_PAYMENT_SUCCESS; + currentAction == ACTION_VOIP_RECORDING_FINISHED || currentAction == ACTION_VOIP_SOUND_MUTED || currentAction == ACTION_VOIP_SOUND_UNMUTED || currentAction == ACTION_PAYMENT_SUCCESS || + currentAction == ACTION_VOIP_USER_JOINED || currentAction == ACTION_PIN_DIALOGS || currentAction == ACTION_UNPIN_DIALOGS; } private boolean hasSubInfo() { @@ -451,6 +459,22 @@ public class UndoView extends FrameLayout { avatarImageView.setForUserOrChat(user, avatarDrawable); avatarImageView.setVisibility(VISIBLE); timeLeft = 3000; + } else if (action == ACTION_VOIP_USER_JOINED) { + if (infoObject instanceof TLRPC.User) { + TLRPC.User user = (TLRPC.User) infoObject; + infoText = AndroidUtilities.replaceTags(LocaleController.formatString("VoipChatUserJoined", R.string.VoipChatUserJoined, UserObject.getFirstName(user))); + } else { + TLRPC.Chat chat = (TLRPC.Chat) infoObject; + infoText = AndroidUtilities.replaceTags(LocaleController.formatString("VoipChatChatJoined", R.string.VoipChatChatJoined, chat.title)); + } + subInfoText = null; + icon = 0; + AvatarDrawable avatarDrawable = new AvatarDrawable(); + avatarDrawable.setTextSize(AndroidUtilities.dp(12)); + avatarDrawable.setInfo((TLObject) infoObject); + avatarImageView.setForUserOrChat((TLObject) infoObject, avatarDrawable); + avatarImageView.setVisibility(VISIBLE); + timeLeft = 3000; } else if (action == ACTION_VOIP_USER_CHANGED) { AvatarDrawable avatarDrawable = new AvatarDrawable(); avatarDrawable.setTextSize(AndroidUtilities.dp(12)); @@ -710,6 +734,15 @@ public class UndoView extends FrameLayout { infoText = this.infoText; subInfoText = null; icon = R.raw.chats_infotip; + } else if (action == ACTION_PIN_DIALOGS || action == ACTION_UNPIN_DIALOGS) { + int count = (Integer) infoObject; + if (action == ACTION_PIN_DIALOGS) { + infoText = LocaleController.formatPluralString("PinnedDialogsCount", count); + } else { + infoText = LocaleController.formatPluralString("UnpinnedDialogsCount", count); + } + subInfoText = null; + icon = currentAction == ACTION_PIN_DIALOGS ? R.raw.ic_pin : R.raw.ic_unpin; } else { if (action == ACTION_ARCHIVE_HINT) { infoText = LocaleController.getString("ChatArchived", R.string.ChatArchived); @@ -768,7 +801,7 @@ public class UndoView extends FrameLayout { currentAction == ACTION_FWD_MESSAGES || currentAction == ACTION_NOTIFY_ON || currentAction == ACTION_NOTIFY_OFF || currentAction == ACTION_USERNAME_COPIED || currentAction == ACTION_HASHTAG_COPIED || currentAction == ACTION_TEXT_COPIED || currentAction == ACTION_LINK_COPIED || currentAction == ACTION_PHONE_COPIED || currentAction == ACTION_AUTO_DELETE_OFF || currentAction == ACTION_AUTO_DELETE_ON || currentAction == ACTION_GIGAGROUP_CANCEL || currentAction == ACTION_GIGAGROUP_SUCCESS || - currentAction == ACTION_VOIP_INVITE_LINK_SENT) { + currentAction == ACTION_VOIP_INVITE_LINK_SENT || currentAction == ACTION_PIN_DIALOGS || currentAction == ACTION_UNPIN_DIALOGS || currentAction == ACTION_SHARE_BACKGROUND) { undoImageView.setVisibility(GONE); leftImageView.setVisibility(VISIBLE); @@ -929,6 +962,29 @@ public class UndoView extends FrameLayout { leftImageView.setAnimation(R.raw.forward, 30, 30); } timeLeft = 3000; + } else if (currentAction == ACTION_SHARE_BACKGROUND) { + Integer count = (Integer) infoObject; + if (infoObject2 == null) { + if (did == UserConfig.getInstance(currentAccount).clientUserId) { + infoTextView.setText(AndroidUtilities.replaceTags(LocaleController.getString("BackgroundToSavedMessages", R.string.BackgroundToSavedMessages))); + leftImageView.setAnimation(R.raw.saved_messages, 30, 30); + } else { + int lowerId = (int) did; + if (lowerId < 0) { + TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(-lowerId); + infoTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("BackgroundToGroup", R.string.BackgroundToGroup, chat.title))); + } else { + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(lowerId); + infoTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("BackgroundToUser", R.string.BackgroundToUser, UserObject.getFirstName(user)))); + } + leftImageView.setAnimation(R.raw.forward, 30, 30); + } + } else { + int amount = (Integer) infoObject2; + infoTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("BackgroundToChats", R.string.BackgroundToChats, LocaleController.formatPluralString("Chats", amount)))); + leftImageView.setAnimation(R.raw.forward, 30, 30); + } + timeLeft = 3000; } subinfoTextView.setVisibility(GONE); undoTextView.setTextColor(Theme.getColor(Theme.key_undo_cancelColor)); @@ -939,6 +995,9 @@ public class UndoView extends FrameLayout { leftImageView.setProgress(0); leftImageView.playAnimation(); + leftImageView.postDelayed(() -> { + leftImageView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + }, 300); } else if (currentAction == ACTION_PROXIMITY_SET || currentAction == ACTION_PROXIMITY_REMOVED) { int radius = (Integer) infoObject; TLRPC.User user = (TLRPC.User) infoObject2; @@ -1310,7 +1369,7 @@ public class UndoView extends FrameLayout { timeLeft -= dt; lastUpdateTime = newTime; if (timeLeft <= 0) { - hide(true, 1); + hide(true, hideAnimationType); } invalidate(); @@ -1326,4 +1385,8 @@ public class UndoView extends FrameLayout { public void setInfoText(CharSequence text) { infoText = text; } + + public void setHideAnimationType(int hideAnimationType) { + this.hideAnimationType = hideAnimationType; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/UpdateAppAlertDialog.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/UpdateAppAlertDialog.java index 703ed9943..c677e30a5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/UpdateAppAlertDialog.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/UpdateAppAlertDialog.java @@ -4,187 +4,355 @@ import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; -import android.app.Activity; +import android.content.Context; import android.graphics.Canvas; -import android.os.Bundle; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.drawable.Drawable; +import android.text.SpannableStringBuilder; +import android.text.TextUtils; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; import android.view.View; import android.widget.FrameLayout; +import android.widget.LinearLayout; +import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.DocumentObject; import org.telegram.messenger.FileLoader; +import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; -import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.MessageObject; import org.telegram.messenger.R; -import org.telegram.messenger.browser.Browser; +import org.telegram.messenger.SvgHelper; import org.telegram.tgnet.TLRPC; -import org.telegram.ui.ActionBar.AlertDialog; +import org.telegram.ui.ActionBar.BottomSheet; import org.telegram.ui.ActionBar.Theme; -public class UpdateAppAlertDialog extends AlertDialog implements NotificationCenter.NotificationCenterDelegate { +import androidx.core.widget.NestedScrollView; + +public class UpdateAppAlertDialog extends BottomSheet { private TLRPC.TL_help_appUpdate appUpdate; private int accountNum; - private String fileName; private RadialProgress radialProgress; private FrameLayout radialProgressView; private AnimatorSet progressAnimation; - private Activity parentActivity; - public UpdateAppAlertDialog(final Activity activity, TLRPC.TL_help_appUpdate update, int account) { - super(activity, 0); + private Drawable shadowDrawable; + private TextView textView; + private TextView messageTextView; + private NestedScrollView scrollView; + + private AnimatorSet shadowAnimation; + + private View shadow; + + private boolean ignoreLayout; + + private LinearLayout linearLayout; + + private int scrollOffsetY; + + private int[] location = new int[2]; + + private boolean animationInProgress; + + public class BottomSheetCell extends FrameLayout { + + private View background; + private TextView[] textView = new TextView[2]; + private boolean hasBackground; + + public BottomSheetCell(Context context, boolean withoutBackground) { + super(context); + + hasBackground = !withoutBackground; + setBackground(null); + + background = new View(context); + if (hasBackground) { + background.setBackground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(4), Theme.getColor(Theme.key_featuredStickers_addButton), Theme.getColor(Theme.key_featuredStickers_addButtonPressed))); + } + addView(background, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 16, withoutBackground ? 0 : 16, 16, 16)); + + for (int a = 0; a < 2; a++) { + textView[a] = new TextView(context); + textView[a].setLines(1); + textView[a].setSingleLine(true); + textView[a].setGravity(Gravity.CENTER_HORIZONTAL); + textView[a].setEllipsize(TextUtils.TruncateAt.END); + textView[a].setGravity(Gravity.CENTER); + if (hasBackground) { + textView[a].setTextColor(Theme.getColor(Theme.key_featuredStickers_buttonText)); + textView[a].setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + } else { + textView[a].setTextColor(Theme.getColor(Theme.key_featuredStickers_addButton)); + } + textView[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + textView[a].setPadding(0, 0, 0, hasBackground ? 0 : AndroidUtilities.dp(13)); + addView(textView[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + if (a == 1) { + textView[a].setAlpha(0.0f); + } + } + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(hasBackground ? 80 : 50), MeasureSpec.EXACTLY)); + } + + public void setText(CharSequence text, boolean animated) { + if (!animated) { + textView[0].setText(text); + } else { + textView[1].setText(text); + animationInProgress = true; + AnimatorSet animatorSet = new AnimatorSet(); + animatorSet.setDuration(180); + animatorSet.setInterpolator(CubicBezierInterpolator.EASE_OUT); + animatorSet.playTogether( + ObjectAnimator.ofFloat(textView[0], View.ALPHA, 1.0f, 0.0f), + ObjectAnimator.ofFloat(textView[0], View.TRANSLATION_Y, 0, -AndroidUtilities.dp(10)), + ObjectAnimator.ofFloat(textView[1], View.ALPHA, 0.0f, 1.0f), + ObjectAnimator.ofFloat(textView[1], View.TRANSLATION_Y, AndroidUtilities.dp(10), 0) + ); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + animationInProgress = false; + TextView temp = textView[0]; + textView[0] = textView[1]; + textView[1] = temp; + } + }); + animatorSet.start(); + } + } + } + + public UpdateAppAlertDialog(Context context, TLRPC.TL_help_appUpdate update, int account) { + super(context, false); appUpdate = update; accountNum = account; - if (update.document instanceof TLRPC.TL_document) { - fileName = FileLoader.getAttachFileName(update.document); - } - parentActivity = activity; + setCanceledOnTouchOutside(false); - setTopImage(R.drawable.update, Theme.getColor(Theme.key_dialogTopBackground)); - setTopHeight(175); - setMessage(appUpdate.text); - if (appUpdate.document instanceof TLRPC.TL_document) { - setSecondTitle(AndroidUtilities.formatFileSize(appUpdate.document.size)); - } - setDismissDialogByButtons(false); - setTitle(LocaleController.getString("UpdateTelegram", R.string.UpdateTelegram)); - setPositiveButton(LocaleController.getString("UpdateNow", R.string.UpdateNow), (dialog, which) -> { - if (!BlockingUpdateView.checkApkInstallPermissions(getContext())) { - return; - } - if (appUpdate.document instanceof TLRPC.TL_document) { - if (!BlockingUpdateView.openApkInstall(parentActivity, appUpdate.document)) { - FileLoader.getInstance(accountNum).loadFile(appUpdate.document, "update", 1, 1); - showProgress(true); - } - } else if (appUpdate.url != null) { - Browser.openUrl(getContext(), appUpdate.url); - dialog.dismiss(); - } - }); - setNeutralButton(LocaleController.getString("Later", R.string.Later), (dialog, which) -> { - if (appUpdate.document instanceof TLRPC.TL_document) { - FileLoader.getInstance(accountNum).cancelLoadFile(appUpdate.document); - } - dialog.dismiss(); - }); + setApplyTopPadding(false); + setApplyBottomPadding(false); - radialProgressView = new FrameLayout(parentActivity) { + shadowDrawable = context.getResources().getDrawable(R.drawable.sheet_shadow_round).mutate(); + shadowDrawable.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_dialogBackground), PorterDuff.Mode.MULTIPLY)); + + FrameLayout container = new FrameLayout(context) { @Override - protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - super.onLayout(changed, left, top, right, bottom); - int width = right - left; - int height = bottom - top; - int w = AndroidUtilities.dp(24); - int l = (width - w) / 2; - int t = (height - w) / 2 + AndroidUtilities.dp(2); - radialProgress.setProgressRect(l, t, l + w, t + w); + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + updateLayout(); + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + if (ev.getAction() == MotionEvent.ACTION_DOWN && scrollOffsetY != 0 && ev.getY() < scrollOffsetY) { + dismiss(); + return true; + } + return super.onInterceptTouchEvent(ev); + } + + @Override + public boolean onTouchEvent(MotionEvent e) { + return !isDismissed() && super.onTouchEvent(e); } @Override protected void onDraw(Canvas canvas) { - radialProgress.draw(canvas); + int top = (int) (scrollOffsetY - backgroundPaddingTop - getTranslationY()); + shadowDrawable.setBounds(0, top, getMeasuredWidth(), getMeasuredHeight()); + shadowDrawable.draw(canvas); } }; - radialProgressView.setWillNotDraw(false); - radialProgressView.setAlpha(0.0f); - radialProgressView.setScaleX(0.1f); - radialProgressView.setScaleY(0.1f); - radialProgressView.setVisibility(View.INVISIBLE); - radialProgress = new RadialProgress(radialProgressView); - radialProgress.setStrokeWidth(AndroidUtilities.dp(2)); - radialProgress.setBackground(null, true, false); - radialProgress.setProgressColor(Theme.getColor(Theme.key_dialogButton)); - } + container.setWillNotDraw(false); + containerView = container; - @Override - public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidLoad) { - String location = (String) args[0]; - if (fileName != null && fileName.equals(location)) { - showProgress(false); - BlockingUpdateView.openApkInstall(parentActivity, appUpdate.document); - } - } else if (id == NotificationCenter.fileDidFailToLoad) { - String location = (String) args[0]; - if (fileName != null && fileName.equals(location)) { - showProgress(false); - } - } else if (id == NotificationCenter.FileLoadProgressChanged) { - String location = (String) args[0]; - if (fileName != null && fileName.equals(location)) { - Long loadedSize = (Long) args[1]; - Long totalSize = (Long) args[2]; - float loadProgress = Math.min(1f, loadedSize / (float) totalSize); - radialProgress.setProgress(loadProgress, true); - } - } - } + scrollView = new NestedScrollView(context) { - @Override - protected void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.fileDidFailToLoad); - NotificationCenter.getInstance(accountNum).addObserver(this, NotificationCenter.FileLoadProgressChanged); - buttonsLayout.addView(radialProgressView, LayoutHelper.createFrame(36, 36)); - } + private boolean ignoreLayout; - @Override - public void dismiss() { - super.dismiss(); - NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.fileDidFailToLoad); - NotificationCenter.getInstance(accountNum).removeObserver(this, NotificationCenter.FileLoadProgressChanged); - } - - private void showProgress(final boolean show) { - if (progressAnimation != null) { - progressAnimation.cancel(); - } - progressAnimation = new AnimatorSet(); - final View textButton = buttonsLayout.findViewWithTag(BUTTON_POSITIVE); - if (show) { - radialProgressView.setVisibility(View.VISIBLE); - textButton.setEnabled(false); - progressAnimation.playTogether( - ObjectAnimator.ofFloat(textButton, "scaleX", 0.1f), - ObjectAnimator.ofFloat(textButton, "scaleY", 0.1f), - ObjectAnimator.ofFloat(textButton, "alpha", 0.0f), - ObjectAnimator.ofFloat(radialProgressView, "scaleX", 1.0f), - ObjectAnimator.ofFloat(radialProgressView, "scaleY", 1.0f), - ObjectAnimator.ofFloat(radialProgressView, "alpha", 1.0f)); - } else { - textButton.setVisibility(View.VISIBLE); - textButton.setEnabled(true); - progressAnimation.playTogether( - ObjectAnimator.ofFloat(radialProgressView, "scaleX", 0.1f), - ObjectAnimator.ofFloat(radialProgressView, "scaleY", 0.1f), - ObjectAnimator.ofFloat(radialProgressView, "alpha", 0.0f), - ObjectAnimator.ofFloat(textButton, "scaleX", 1.0f), - ObjectAnimator.ofFloat(textButton, "scaleY", 1.0f), - ObjectAnimator.ofFloat(textButton, "alpha", 1.0f)); - - } - progressAnimation.addListener(new AnimatorListenerAdapter() { @Override - public void onAnimationEnd(Animator animation) { - if (progressAnimation != null && progressAnimation.equals(animation)) { - if (!show) { - radialProgressView.setVisibility(View.INVISIBLE); - } else { - textButton.setVisibility(View.INVISIBLE); + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int height = MeasureSpec.getSize(heightMeasureSpec); + measureChildWithMargins(linearLayout, widthMeasureSpec, 0, heightMeasureSpec, 0); + int contentHeight = linearLayout.getMeasuredHeight(); + int padding = (height / 5 * 2); + int visiblePart = height - padding; + if (contentHeight - visiblePart < AndroidUtilities.dp(90) || contentHeight < height / 2 + AndroidUtilities.dp(90)) { + padding = height - contentHeight; + } + if (padding < 0) { + padding = 0; + } + if (getPaddingTop() != padding) { + ignoreLayout = true; + setPadding(0, padding, 0, 0); + ignoreLayout = false; + } + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)); + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + updateLayout(); + } + + @Override + public void requestLayout() { + if (ignoreLayout) { + return; + } + super.requestLayout(); + } + + @Override + protected void onScrollChanged(int l, int t, int oldl, int oldt) { + super.onScrollChanged(l, t, oldl, oldt); + updateLayout(); + } + }; + scrollView.setFillViewport(true); + scrollView.setWillNotDraw(false); + scrollView.setClipToPadding(false); + scrollView.setVerticalScrollBarEnabled(false); + container.addView(scrollView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 130)); + + linearLayout = new LinearLayout(context); + linearLayout.setOrientation(LinearLayout.VERTICAL); + scrollView.addView(linearLayout, LayoutHelper.createScroll(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); + + if (appUpdate.sticker != null) { + BackupImageView imageView = new BackupImageView(context); + SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(appUpdate.sticker.thumbs, Theme.key_windowBackgroundGray, 1.0f); + TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(appUpdate.sticker.thumbs, 90); + ImageLocation imageLocation = ImageLocation.getForDocument(thumb, appUpdate.sticker); + + if (svgThumb != null) { + imageView.setImage(ImageLocation.getForDocument(appUpdate.sticker), "50_50", svgThumb, 0, "update"); + } else { + imageView.setImage(ImageLocation.getForDocument(appUpdate.sticker), "50_50", imageLocation, null, 0, "update"); + } + linearLayout.addView(imageView, LayoutHelper.createLinear(160, 160, Gravity.CENTER_HORIZONTAL | Gravity.TOP, 17, 8, 17, 0)); + } + + TextView textView = new TextView(context); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); + textView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + textView.setSingleLine(true); + textView.setEllipsize(TextUtils.TruncateAt.END); + textView.setText(LocaleController.getString("AppUpdate", R.string.AppUpdate)); + linearLayout.addView(textView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 23, 16, 23, 0)); + + TextView messageTextView = new TextView(getContext()); + messageTextView.setTextColor(Theme.getColor(Theme.key_dialogTextGray3)); + messageTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + messageTextView.setMovementMethod(new AndroidUtilities.LinkMovementMethodMy()); + messageTextView.setLinkTextColor(Theme.getColor(Theme.key_dialogTextLink)); + messageTextView.setText(LocaleController.formatString("AppUpdateVersionAndSize", R.string.AppUpdateVersionAndSize, appUpdate.version, AndroidUtilities.formatFileSize(appUpdate.document.size))); + messageTextView.setGravity(Gravity.CENTER_HORIZONTAL | Gravity.TOP); + linearLayout.addView(messageTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 23, 0, 23, 5)); + + TextView changelogTextView = new TextView(getContext()); + changelogTextView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); + changelogTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + changelogTextView.setMovementMethod(new AndroidUtilities.LinkMovementMethodMy()); + changelogTextView.setLinkTextColor(Theme.getColor(Theme.key_dialogTextLink)); + if (TextUtils.isEmpty(appUpdate.text)) { + changelogTextView.setText(AndroidUtilities.replaceTags(LocaleController.getString("AppUpdateChangelogEmpty", R.string.AppUpdateChangelogEmpty))); + } else { + SpannableStringBuilder builder = new SpannableStringBuilder(appUpdate.text); + MessageObject.addEntitiesToText(builder, update.entities, false, false, false, false); + changelogTextView.setText(builder); + } + changelogTextView.setGravity(Gravity.LEFT | Gravity.TOP); + linearLayout.addView(changelogTextView, LayoutHelper.createLinear(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 23, 15, 23, 0)); + + FrameLayout.LayoutParams frameLayoutParams = new FrameLayout.LayoutParams(LayoutHelper.MATCH_PARENT, AndroidUtilities.getShadowHeight(), Gravity.BOTTOM | Gravity.LEFT); + frameLayoutParams.bottomMargin = AndroidUtilities.dp(130); + shadow = new View(context); + shadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); + shadow.setAlpha(0.0f); + shadow.setTag(1); + container.addView(shadow, frameLayoutParams); + + BottomSheetCell doneButton = new BottomSheetCell(context, false); + doneButton.setText(LocaleController.formatString("AppUpdateDownloadNow", R.string.AppUpdateDownloadNow), false); + doneButton.background.setOnClickListener(v -> { + FileLoader.getInstance(accountNum).loadFile(appUpdate.document, "update", 1, 1); + dismiss(); + }); + container.addView(doneButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 50)); + + BottomSheetCell scheduleButton = new BottomSheetCell(context, true); + scheduleButton.setText(LocaleController.getString("AppUpdateRemindMeLater", R.string.AppUpdateRemindMeLater), false); + scheduleButton.background.setOnClickListener(v -> dismiss()); + container.addView(scheduleButton, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 50, Gravity.LEFT | Gravity.BOTTOM, 0, 0, 0, 0)); + } + + private void runShadowAnimation(final int num, final boolean show) { + if (show && shadow.getTag() != null || !show && shadow.getTag() == null) { + shadow.setTag(show ? null : 1); + if (show) { + shadow.setVisibility(View.VISIBLE); + } + if (shadowAnimation != null) { + shadowAnimation.cancel(); + } + shadowAnimation = new AnimatorSet(); + shadowAnimation.playTogether(ObjectAnimator.ofFloat(shadow, View.ALPHA, show ? 1.0f : 0.0f)); + shadowAnimation.setDuration(150); + shadowAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (shadowAnimation != null && shadowAnimation.equals(animation)) { + if (!show) { + shadow.setVisibility(View.INVISIBLE); + } + shadowAnimation = null; } } - } - @Override - public void onAnimationCancel(Animator animation) { - if (progressAnimation != null && progressAnimation.equals(animation)) { - progressAnimation = null; + @Override + public void onAnimationCancel(Animator animation) { + if (shadowAnimation != null && shadowAnimation.equals(animation)) { + shadowAnimation = null; + } } - } - }); - progressAnimation.setDuration(150); - progressAnimation.start(); + }); + shadowAnimation.start(); + } + } + + private void updateLayout() { + View child = linearLayout.getChildAt(0); + child.getLocationInWindow(location); + int top = location[1] - AndroidUtilities.dp(24); + int newOffset = Math.max(top, 0); + if (location[1] + linearLayout.getMeasuredHeight() <= container.getMeasuredHeight() - AndroidUtilities.dp(113) + containerView.getTranslationY()) { + runShadowAnimation(0, false); + } else { + runShadowAnimation(0, true); + } + if (scrollOffsetY != newOffset) { + scrollOffsetY = newOffset; + scrollView.invalidate(); + } + } + + @Override + protected boolean canDismissWithSwipe() { + return false; } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelinePlayView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelinePlayView.java index 954f4e18b..2456e2a0f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelinePlayView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/VideoTimelinePlayView.java @@ -43,7 +43,7 @@ public class VideoTimelinePlayView extends View { private float pressDx; private MediaMetadataRetriever mediaMetadataRetriever; private VideoTimelineViewDelegate delegate; - private ArrayList frames = new ArrayList<>(); + private ArrayList frames = new ArrayList<>(); private AsyncTask currentTask; private static final Object sync = new Object(); private long frameTimeOffset; @@ -58,6 +58,8 @@ public class VideoTimelinePlayView extends View { private int lastWidth; private int currentMode = MODE_VIDEO; + Paint bitmapPaint = new Paint(); + private ArrayList exclusionRects = new ArrayList<>(); private android.graphics.Rect exclustionRect = new Rect(); @@ -359,7 +361,7 @@ public class VideoTimelinePlayView extends View { @Override protected void onPostExecute(Bitmap bitmap) { if (!isCancelled()) { - frames.add(bitmap); + frames.add(new BitmapFrame(bitmap)); invalidate(); if (frameNum < framesToLoad) { reloadFrames(frameNum + 1); @@ -382,9 +384,9 @@ public class VideoTimelinePlayView extends View { } } for (int a = 0; a < frames.size(); a++) { - Bitmap bitmap = frames.get(a); - if (bitmap != null) { - bitmap.recycle(); + BitmapFrame bitmap = frames.get(a); + if (bitmap != null && bitmap.bitmap != null) { + bitmap.bitmap.recycle(); } } frames.clear(); @@ -405,9 +407,9 @@ public class VideoTimelinePlayView extends View { public void clearFrames() { for (int a = 0; a < frames.size(); a++) { - Bitmap bitmap = frames.get(a); - if (bitmap != null) { - bitmap.recycle(); + BitmapFrame frame = frames.get(a); + if (frame != null) { + frame.bitmap.recycle(); } } frames.clear(); @@ -441,11 +443,22 @@ public class VideoTimelinePlayView extends View { } else { int offset = 0; for (int a = 0; a < frames.size(); a++) { - Bitmap bitmap = frames.get(a); - if (bitmap != null) { + BitmapFrame bitmap = frames.get(a); + if (bitmap.bitmap != null) { int x = AndroidUtilities.dp(16) + offset * frameWidth; int y = AndroidUtilities.dp(2 + 4); - canvas.drawBitmap(bitmap, x, y, null); + if (bitmap.alpha != 1f) { + bitmap.alpha += 16f / 100f; + if (bitmap.alpha > 1f) { + bitmap.alpha = 1f; + } else { + invalidate(); + } + bitmapPaint.setAlpha((int) (255 * bitmap.alpha)); + canvas.drawBitmap(bitmap.bitmap, x, y, bitmapPaint); + } else { + canvas.drawBitmap(bitmap.bitmap, x, y, null); + } } offset++; } @@ -482,4 +495,13 @@ public class VideoTimelinePlayView extends View { canvas.drawRoundRect(rect3, AndroidUtilities.dp(1), AndroidUtilities.dp(1), paint); canvas.drawCircle(cx, AndroidUtilities.dp(52), AndroidUtilities.dp(3), paint); } + + private static class BitmapFrame { + Bitmap bitmap; + float alpha; + + public BitmapFrame(Bitmap bitmap) { + this.bitmap = bitmap; + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperCheckBoxView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperCheckBoxView.java index db4251544..682f25ab8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperCheckBoxView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperCheckBoxView.java @@ -4,12 +4,10 @@ import android.animation.ObjectAnimator; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; -import android.graphics.LinearGradient; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.RectF; -import android.graphics.Shader; import android.text.TextPaint; import android.util.Property; import android.view.View; @@ -35,9 +33,9 @@ public class WallpaperCheckBoxView extends View { private float progress; private ObjectAnimator checkAnimator; - private int backgroundColor; - private int backgroundGradientColor; - private LinearGradient colorGradient; + private View parentView; + + private int[] colors = new int[4]; private final static float progressBounceDiff = 0.2f; @@ -54,7 +52,7 @@ public class WallpaperCheckBoxView extends View { } }; - public WallpaperCheckBoxView(Context context, boolean check) { + public WallpaperCheckBoxView(Context context, boolean check, View parent) { super(context); rect = new RectF(); @@ -63,6 +61,8 @@ public class WallpaperCheckBoxView extends View { drawCanvas = new Canvas(drawBitmap); } + parentView = parent; + textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); textPaint.setTextSize(AndroidUtilities.dp(14)); textPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); @@ -87,15 +87,11 @@ public class WallpaperCheckBoxView extends View { maxTextSize = max; } - public void setBackgroundColor(int color) { - colorGradient = null; - backgroundColor = color; - invalidate(); - } - - public void setBackgroundGradientColor(int color) { - colorGradient = null; - backgroundGradientColor = color; + public void setColor(int index, int color) { + if (colors == null) { + colors = new int[4]; + } + colors[index] = color; invalidate(); } @@ -111,7 +107,12 @@ public class WallpaperCheckBoxView extends View { @Override protected void onDraw(Canvas canvas) { rect.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); - canvas.drawRoundRect(rect, AndroidUtilities.dp(4), AndroidUtilities.dp(4), Theme.chat_actionBackgroundPaint); + Theme.applyServiceShaderMatrixForView(this, parentView); + canvas.drawRoundRect(rect, getMeasuredHeight() / 2, getMeasuredHeight() / 2, Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, getMeasuredHeight() / 2, getMeasuredHeight() / 2, Theme.chat_actionBackgroundGradientDarkenPaint); + } + textPaint.setColor(Theme.getColor(Theme.key_chat_serviceText)); int x = (getMeasuredWidth() - currentTextSize - AndroidUtilities.dp(28)) / 2; @@ -153,17 +154,25 @@ public class WallpaperCheckBoxView extends View { canvas.drawBitmap(drawBitmap, 0, 0, null); } else { rect.set(0, 0, AndroidUtilities.dp(18), AndroidUtilities.dp(18)); - if (backgroundGradientColor != 0) { - if (colorGradient == null) { - colorGradient = new LinearGradient(rect.left, rect.bottom, rect.left, rect.top, new int[]{backgroundColor, backgroundGradientColor}, null, Shader.TileMode.CLAMP); - backgroundPaint.setShader(colorGradient); + if (colors[3] != 0) { + for (int a = 0; a < 4; a++) { + backgroundPaint.setColor(colors[a]); + canvas.drawArc(rect, -90 + 90 * a, 90, true, backgroundPaint); + } + } else if (colors[2] != 0) { + for (int a = 0; a < 3; a++) { + backgroundPaint.setColor(colors[a]); + canvas.drawArc(rect, -90 + 120 * a, 120, true, backgroundPaint); + } + } else if (colors[1] != 0) { + for (int a = 0; a < 2; a++) { + backgroundPaint.setColor(colors[a]); + canvas.drawArc(rect, -90 + 180 * a, 180, true, backgroundPaint); } - backgroundPaint.setColor(backgroundColor); } else { - backgroundPaint.setColor(backgroundColor); - backgroundPaint.setShader(null); + backgroundPaint.setColor(colors[0]); + canvas.drawRoundRect(rect, rect.width() / 2, rect.height() / 2, backgroundPaint); } - canvas.drawRoundRect(rect, rect.width() / 2, rect.height() / 2, backgroundPaint); } canvas.restore(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperParallaxEffect.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperParallaxEffect.java index 19c0866db..c47876f36 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperParallaxEffect.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/WallpaperParallaxEffect.java @@ -65,8 +65,8 @@ public class WallpaperParallaxEffect implements SensorEventListener { float z = event.values[2] / SensorManager.GRAVITY_EARTH; - float pitch=(float)(Math.atan2(x, Math.sqrt(y*y+z*z))/Math.PI*2.0); - float roll=(float)(Math.atan2(y, Math.sqrt(x*x+z*z))/Math.PI*2.0); + float pitch = (float) (Math.atan2(x, Math.sqrt(y * y + z * z)) / Math.PI * 2.0); + float roll = (float) (Math.atan2(y, Math.sqrt(x * x + z * z)) / Math.PI * 2.0); switch (rotation) { case Surface.ROTATION_0: @@ -105,8 +105,20 @@ public class WallpaperParallaxEffect implements SensorEventListener { } int offsetX = Math.round(pitch * AndroidUtilities.dpf2(16)); int offsetY = Math.round(roll * AndroidUtilities.dpf2(16)); - if (callback != null) - callback.onOffsetsChanged(offsetX, offsetY); + float vx = Math.max(-1.0f, Math.min(1.0f, -pitch / 0.45f)); + float vy = Math.max(-1.0f, Math.min(1.0f, -roll / 0.45f)); + float len = (float) Math.sqrt(vx * vx + vy * vy); + vx /= len; + vy /= len; + float y2 = -1; + float x2 = 0; + float angle = (float) (Math.atan2(vx * y2 - vy * x2, vx * x2 + vy * y2) / (Math.PI / 180.0f)); + if (angle < 0) { + angle += 360; + } + if (callback != null) { + callback.onOffsetsChanged(offsetX, offsetY, angle); + } } @Override @@ -115,6 +127,6 @@ public class WallpaperParallaxEffect implements SensorEventListener { } public interface Callback { - void onOffsetsChanged(int offsetX, int offsetY); + void onOffsetsChanged(int offsetX, int offsetY, float angle); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/CellFlickerDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/CellFlickerDrawable.java new file mode 100644 index 000000000..1c8341858 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/CellFlickerDrawable.java @@ -0,0 +1,108 @@ +package org.telegram.ui.Components.voip; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Matrix; +import android.graphics.Paint; +import android.graphics.RectF; +import android.graphics.Shader; + +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AndroidUtilities; + +public class CellFlickerDrawable { + + private final Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + private final Shader gradientShader; + + private final Paint paintOutline = new Paint(Paint.ANTI_ALIAS_FLAG); + private final Shader gradientShader2; + int size; + + int parentWidth; + float progress; + long lastUpdateTime; + + Matrix matrix = new Matrix(); + + public boolean drawFrame = true; + public float repeatProgress = 1.2f; + + public CellFlickerDrawable() { + size = AndroidUtilities.dp(160); + gradientShader = new LinearGradient(0, 0, size, 0, new int[]{Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.WHITE, 64), Color.TRANSPARENT}, null, Shader.TileMode.CLAMP); + gradientShader2 = new LinearGradient(0, 0, size, 0, new int[]{Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.WHITE, 204), Color.TRANSPARENT}, null, Shader.TileMode.CLAMP); + paint.setShader(gradientShader); + paintOutline.setShader(gradientShader2); + paintOutline.setStyle(Paint.Style.STROKE); + paintOutline.setStrokeWidth(AndroidUtilities.dp(2)); + } + + public void draw(Canvas canvas, RectF rectF, float rad) { + long currentTime = System.currentTimeMillis(); + if (lastUpdateTime != 0) { + long dt = currentTime - lastUpdateTime; + if (dt > 10) { + progress += dt / 1200f; + if (progress > repeatProgress) { + progress = 0; + } + lastUpdateTime = currentTime; + } + } else { + lastUpdateTime = currentTime; + } + + if (progress > 1f) { + return; + } + + float x = (parentWidth + size * 2) * progress - size; + matrix.setTranslate(x, 0); + gradientShader.setLocalMatrix(matrix); + gradientShader2.setLocalMatrix(matrix); + + canvas.drawRoundRect(rectF, rad, rad, paint); + if (drawFrame) { + canvas.drawRoundRect(rectF, rad, rad, paintOutline); + } + } + + + public void draw(Canvas canvas, GroupCallMiniTextureView view) { + long currentTime = System.currentTimeMillis(); + if (lastUpdateTime != 0) { + long dt = currentTime - lastUpdateTime; + if (dt > 10) { + progress += dt / 500f; + if (progress > 4f) { + progress = 0; + } + lastUpdateTime = currentTime; + } + } else { + lastUpdateTime = currentTime; + } + + if (progress > 1f) { + return; + } + + float x = (parentWidth + size * 2) * progress - size - view.getX(); + matrix.setTranslate(x, 0); + gradientShader.setLocalMatrix(matrix); + gradientShader2.setLocalMatrix(matrix); + + AndroidUtilities.rectTmp.set(view.textureView.currentClipHorizontal, view.textureView.currentClipVertical, view.textureView.getMeasuredWidth() - view.textureView.currentClipHorizontal, view.textureView.getMeasuredHeight() - view.textureView.currentClipVertical); + canvas.drawRect(AndroidUtilities.rectTmp, paint); + if (drawFrame) { + canvas.drawRoundRect(AndroidUtilities.rectTmp, view.textureView.roundRadius, view.textureView.roundRadius, paintOutline); + } + } + + public void setParentWidth(int parentWidth) { + this.parentWidth = parentWidth; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallGridCell.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallGridCell.java new file mode 100644 index 000000000..199b29c3b --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallGridCell.java @@ -0,0 +1,93 @@ +package org.telegram.ui.Components.voip; + +import android.content.Context; +import android.view.View; +import android.widget.FrameLayout; + +import androidx.annotation.NonNull; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.ui.GroupCallActivity; +import org.telegram.ui.GroupCallTabletGridAdapter; + +public class GroupCallGridCell extends FrameLayout { + + public final static int CELL_HEIGHT = 165; + public int spanCount; + public int position; + public GroupCallTabletGridAdapter gridAdapter; + + GroupCallMiniTextureView renderer; + + ChatObject.VideoParticipant participant; + public boolean attached; + private final boolean isTabletGrid; + + public GroupCallGridCell(@NonNull Context context, boolean isTabletGrid) { + super(context); + this.isTabletGrid = isTabletGrid; + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (isTabletGrid) { + float totalSpans = 6; + float w = ((View) getParent()).getMeasuredWidth() / totalSpans * spanCount; + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(gridAdapter.getItemHeight(position), MeasureSpec.EXACTLY)); + } else { + float spanCount = GroupCallActivity.isLandscapeMode ? 3f : 2f; + float parentWidth; + float h; + if (getParent() != null) { + parentWidth = ((View) getParent()).getMeasuredWidth(); + } else { + parentWidth = MeasureSpec.getSize(widthMeasureSpec); + } + if (GroupCallActivity.isTabletMode) { + h = parentWidth / 2f; + } else { + h = parentWidth / spanCount; + } + + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec((int) (h + AndroidUtilities.dp(4)), MeasureSpec.EXACTLY)); + } + } + + public void setData(AccountInstance accountInstance, ChatObject.VideoParticipant participant, ChatObject.Call call, int selfPeerId) { + this.participant = participant; + } + + public ChatObject.VideoParticipant getParticipant() { + return participant; + } + + public void setRenderer(GroupCallMiniTextureView renderer) { + this.renderer = renderer; + } + + public GroupCallMiniTextureView getRenderer() { + return renderer; + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + attached = true; + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attached = false; + } + + public float getItemHeight() { + if (gridAdapter != null) { + return gridAdapter.getItemHeight(position); + } else { + return getMeasuredHeight(); + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java new file mode 100644 index 000000000..99989c749 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java @@ -0,0 +1,1787 @@ +package org.telegram.ui.Components.voip; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Paint; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.graphics.RadialGradient; +import android.graphics.Rect; +import android.graphics.Shader; +import android.graphics.drawable.BitmapDrawable; +import android.graphics.drawable.ColorDrawable; +import android.graphics.drawable.Drawable; +import android.graphics.drawable.GradientDrawable; +import android.text.Layout; +import android.text.StaticLayout; +import android.text.TextPaint; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; +import androidx.core.graphics.ColorUtils; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.BuildVars; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.ImageLoader; +import org.telegram.messenger.ImageLocation; +import org.telegram.messenger.ImageReceiver; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessageObject; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.R; +import org.telegram.messenger.UserConfig; +import org.telegram.messenger.UserObject; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.voip.Instance; +import org.telegram.messenger.voip.VideoCapturerDevice; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.SimpleTextView; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AvatarDrawable; +import org.telegram.ui.Components.BlobDrawable; +import org.telegram.ui.Components.CrossOutDrawable; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.GroupCallFullscreenAdapter; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MotionBackgroundDrawable; +import org.telegram.ui.Components.RLottieImageView; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.GroupCallActivity; +import org.webrtc.GlGenericDrawer; +import org.webrtc.RendererCommon; + +import java.util.ArrayList; + +@SuppressLint("ViewConstructor") +public class GroupCallMiniTextureView extends FrameLayout implements GroupCallStatusIcon.Callback { + + public final VoIPTextureView textureView; + + public boolean showingInFullscreen; + public GroupCallGridCell primaryView; + public GroupCallFullscreenAdapter.GroupCallUserCell secondaryView; + public GroupCallGridCell tabletGridView; + public boolean animateToScrimView; + private boolean showingAsScrimView; + boolean isFullscreenMode; + public boolean animateToFullscreen; + private boolean updateNextLayoutAnimated; + + boolean attached; + public ChatObject.VideoParticipant participant; + GroupCallRenderersContainer parentContainer; + + ArrayList attachedRenderers; + + Paint gradientPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + LinearGradient gradientShader; + boolean animateEnter; + ChatObject.Call call; + GroupCallActivity activity; + + boolean useSpanSize; + float spanCount; + int gridItemsCount; + + FrameLayout infoContainer; + + int currentAccount; + private final SimpleTextView nameView; + private int lastSize; + + private TextView stopSharingTextView; + + public boolean forceDetached; + + private boolean invalidateFromChild; + private boolean checkScale; + float progressToSpeaking; + + Paint speakingPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private final RLottieImageView micIconView; + private final ImageView screencastIcon; + + public boolean hasVideo; + public float progressToNoVideoStub = 1f; + private NoVideoStubLayout noVideoStubLayout; + ValueAnimator noVideoStubAnimator; + private boolean lastLandscapeMode; + + float pinchScale; + float pinchCenterX; + float pinchCenterY; + float pinchTranslationX; + float pinchTranslationY; + boolean inPinchToZoom; + + private float progressToBackground; + ImageReceiver imageReceiver = new ImageReceiver(); + + ArrayList onFirstFrameRunnables = new ArrayList<>(); + + private GroupCallStatusIcon statusIcon; + private boolean swipeToBack; + private float swipeToBackDy; + + Bitmap thumb; + Paint thumbPaint; + private boolean videoIsPaused; + private float videoIsPausedProgress; + private CrossOutDrawable pausedVideoDrawable; + private Drawable castingScreenDrawable; + float overlayIconAlpha; + + ImageView blurredFlippingStub; + + public boolean drawFirst; + + public GroupCallMiniTextureView(GroupCallRenderersContainer parentContainer, ArrayList attachedRenderers, ChatObject.Call call, GroupCallActivity activity) { + super(parentContainer.getContext()); + this.call = call; + this.currentAccount = activity.getCurrentAccount(); + pausedVideoDrawable = new CrossOutDrawable(parentContainer.getContext(), R.drawable.calls_video, null); + pausedVideoDrawable.setCrossOut(true, false); + pausedVideoDrawable.setOffsets(-AndroidUtilities.dp(4), AndroidUtilities.dp(6), AndroidUtilities.dp(6)); + pausedVideoDrawable.setStrokeWidth(AndroidUtilities.dpf2(3.4f)); + + castingScreenDrawable = parentContainer.getContext().getResources().getDrawable(R.drawable.screencast_big).mutate(); + + TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + textPaint.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textPaint.setTextSize(AndroidUtilities.dp(13)); + textPaint.setColor(Color.WHITE); + + TextPaint textPaint2 = new TextPaint(Paint.ANTI_ALIAS_FLAG); + textPaint2.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textPaint2.setTextSize(AndroidUtilities.dp(15)); + textPaint2.setColor(Color.WHITE); + + String videoOnPauseString = LocaleController.getString("VoipVideoOnPause", R.string.VoipVideoOnPause); + StaticLayout staticLayout = new StaticLayout(LocaleController.getString("VoipVideoScreenSharingTwoLines", R.string.VoipVideoScreenSharingTwoLines), textPaint, AndroidUtilities.dp(400), Layout.Alignment.ALIGN_CENTER, 1.0f, 0, false); + TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(call.chatId); + String text = LocaleController.formatPluralString("Participants", MessagesController.getInstance(currentAccount).groipCallVideoMaxParticipants); + StaticLayout noVideoLayout = new StaticLayout(LocaleController.formatString("VoipVideoNotAvailable", R.string.VoipVideoNotAvailable, text), textPaint, AndroidUtilities.dp(400), Layout.Alignment.ALIGN_CENTER, 1.0f, 0, false); + String sharingScreenString = LocaleController.getString("VoipVideoScreenSharing", R.string.VoipVideoScreenSharing); + + float textW = textPaint.measureText(videoOnPauseString); + float textW3 = textPaint2.measureText(sharingScreenString); + + this.textureView = new VoIPTextureView(parentContainer.getContext(), false, false, true, true) { + + float overlayIconAlphaFrom; + + @Override + public void animateToLayout() { + super.animateToLayout(); + overlayIconAlphaFrom = overlayIconAlpha; + } + + @Override + protected void updateRendererSize() { + super.updateRendererSize(); + if (blurredFlippingStub != null && blurredFlippingStub.getParent() != null) { + blurredFlippingStub.getLayoutParams().width = textureView.renderer.getMeasuredWidth(); + blurredFlippingStub.getLayoutParams().height = textureView.renderer.getMeasuredHeight(); + } + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (!renderer.isFirstFrameRendered() || (renderer.getAlpha() != 1f && blurRenderer.getAlpha() != 1f) || videoIsPaused) { + if (progressToBackground != 1f) { + progressToBackground += 16f / 150f; + if (progressToBackground > 1f) { + progressToBackground = 1f; + } else { + invalidate(); + } + } + if (thumb != null) { + canvas.save(); + canvas.scale(currentThumbScale, currentThumbScale, getMeasuredWidth() / 2f, getMeasuredHeight() / 2f); + if (thumbPaint == null) { + thumbPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + thumbPaint.setFilterBitmap(true); + } + canvas.drawBitmap(thumb, (getMeasuredWidth() - thumb.getWidth()) / 2f, (getMeasuredHeight() - thumb.getHeight()) / 2f, thumbPaint); + canvas.restore(); + } else { + imageReceiver.setImageCoords(currentClipHorizontal, currentClipVertical, getMeasuredWidth() - currentClipHorizontal * 2, getMeasuredHeight() - currentClipVertical * 2); + imageReceiver.setAlpha(progressToBackground); + imageReceiver.draw(canvas); + } + if (participant == call.videoNotAvailableParticipant) { + if (showingInFullscreen || !parentContainer.inFullscreenMode) { + float iconSize = AndroidUtilities.dp(48); + float x = (getMeasuredWidth() - iconSize) / 2f; + float y = getMeasuredHeight() / 2 - iconSize; + textPaint.setAlpha(255); + + canvas.save(); + canvas.translate(x - AndroidUtilities.dp(400) / 2f + iconSize / 2f, y + iconSize + AndroidUtilities.dp(10)); + noVideoLayout.draw(canvas); + canvas.restore(); + } + if (stopSharingTextView.getVisibility() != INVISIBLE) { + stopSharingTextView.setVisibility(INVISIBLE); + } + } else if (participant.presentation && participant.participant.self) { + if (stopSharingTextView.getVisibility() != VISIBLE) { + stopSharingTextView.setVisibility(VISIBLE); + stopSharingTextView.setScaleX(1.0f); + stopSharingTextView.setScaleY(1.0f); + } + float progressToFullscreen = drawFirst ? 0 : parentContainer.progressToFullscreenMode; + int size = AndroidUtilities.dp(33); + if (animateToFullscreen || showingInFullscreen) { + size += (AndroidUtilities.dp(10) + AndroidUtilities.dp(39) * parentContainer.progressToFullscreenMode); + } else { + size += AndroidUtilities.dp(10) * (1.0f - progressToFullscreen); + } + + int x = (getMeasuredWidth() - size) / 2; + float smallProgress; + float scrimProgress = (showingAsScrimView || animateToScrimView ? parentContainer.progressToScrimView : 0); + + smallProgress = (showingAsScrimView || animateToScrimView) ? scrimProgress : progressToFullscreen; + + int y = (int) ((getMeasuredHeight() - size) / 2 - AndroidUtilities.dp(11) - (AndroidUtilities.dp(17) + AndroidUtilities.dp(74) * progressToFullscreen) * smallProgress); + castingScreenDrawable.setBounds(x, y, x + size, y + size); + castingScreenDrawable.draw(canvas); + + if (parentContainer.progressToFullscreenMode > 0 || scrimProgress > 0) { + float alpha = Math.max(progressToFullscreen, scrimProgress) * smallProgress; + textPaint2.setAlpha((int) (255 * alpha)); + if (animateToFullscreen || showingInFullscreen) { + stopSharingTextView.setAlpha(alpha * (1.0f - scrimProgress)); + } else { + stopSharingTextView.setAlpha(0.0f); + } + canvas.drawText(sharingScreenString, x - textW3 / 2f + size / 2f, y + size + AndroidUtilities.dp(32), textPaint2); + } else { + stopSharingTextView.setAlpha(0.0f); + } + stopSharingTextView.setTranslationY(y + size + AndroidUtilities.dp(72) + swipeToBackDy - currentClipVertical); + stopSharingTextView.setTranslationX((getMeasuredWidth() - stopSharingTextView.getMeasuredWidth()) / 2 - currentClipHorizontal); + if (progressToFullscreen < 1 && scrimProgress < 1) { + textPaint.setAlpha((int) (255 * (1.0 - Math.max(progressToFullscreen, scrimProgress)))); + canvas.save(); + canvas.translate(x - AndroidUtilities.dp(400) / 2f + size / 2f, y + size + AndroidUtilities.dp(10)); + staticLayout.draw(canvas); + canvas.restore(); + } + } else { + if (stopSharingTextView.getVisibility() != INVISIBLE) { + stopSharingTextView.setVisibility(INVISIBLE); + } + activity.cellFlickerDrawable.draw(canvas, GroupCallMiniTextureView.this); + } + invalidate(); + } + + if (blurredFlippingStub != null && blurredFlippingStub.getParent() != null) { + blurredFlippingStub.setScaleX(textureView.renderer.getScaleX()); + blurredFlippingStub.setScaleY(textureView.renderer.getScaleY()); + } + super.dispatchDraw(canvas); + + float y = getMeasuredHeight() - currentClipVertical - AndroidUtilities.dp(80); + + if (participant != call.videoNotAvailableParticipant) { + canvas.save(); + if ((showingInFullscreen || animateToFullscreen) && !GroupCallActivity.isLandscapeMode && !GroupCallActivity.isTabletMode) { + y -= AndroidUtilities.dp(90) * parentContainer.progressToFullscreenMode * (1f - parentContainer.progressToHideUi); + } + canvas.translate(0, y); + canvas.drawPaint(gradientPaint); + canvas.restore(); + } + + + if (videoIsPaused || videoIsPausedProgress != 0) { + if (videoIsPaused && videoIsPausedProgress != 1f) { + videoIsPausedProgress += 16 / 250f; + if (videoIsPausedProgress > 1f) { + videoIsPausedProgress = 1f; + } else { + invalidate(); + } + } else if (!videoIsPaused && videoIsPausedProgress != 0f) { + videoIsPausedProgress -= 16 / 250f; + if (videoIsPausedProgress < 0f) { + videoIsPausedProgress = 0f; + } else { + invalidate(); + } + } + + float a = videoIsPausedProgress; + a *= (isInAnimation() ? (overlayIconAlphaFrom * (1f - animationProgress) + overlayIconAlpha * animationProgress) : overlayIconAlpha); + + if (a > 0) { + float iconSize = AndroidUtilities.dp(48); + float x = (getMeasuredWidth() - iconSize) / 2f; + y = (getMeasuredHeight() - iconSize) / 2f; + if (participant == call.videoNotAvailableParticipant) { + y -= iconSize / 2.5f; + } + AndroidUtilities.rectTmp.set((int) x, (int) y, (int) (x + iconSize), (int) (y + iconSize)); + if (a != 1) { + canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (255 * a), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + pausedVideoDrawable.setBounds((int) AndroidUtilities.rectTmp.left, (int) AndroidUtilities.rectTmp.top, (int) AndroidUtilities.rectTmp.right, (int) AndroidUtilities.rectTmp.bottom); + pausedVideoDrawable.draw(canvas); + canvas.restore(); + + a *= parentContainer.progressToFullscreenMode; + if (a > 0 && participant != call.videoNotAvailableParticipant) { + textPaint.setAlpha((int) (255 * a)); + canvas.drawText(videoOnPauseString, x - textW / 2f + iconSize / 2f, y + iconSize + AndroidUtilities.dp(16), textPaint); + } + } + } + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (inPinchToZoom && child == textureView.renderer) { + canvas.save(); + canvas.scale(pinchScale, pinchScale, pinchCenterX, pinchCenterY); + canvas.translate(pinchTranslationX, pinchTranslationY); + boolean b = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return b; + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + public void invalidate() { + super.invalidate(); + invalidateFromChild = true; + GroupCallMiniTextureView.this.invalidate(); + invalidateFromChild = false; + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + if (attached && checkScale && renderer.rotatedFrameHeight != 0 && renderer.rotatedFrameWidth != 0) { + if (showingAsScrimView) { + textureView.scaleType = SCALE_TYPE_FIT; + } else if (showingInFullscreen) { + textureView.scaleType = SCALE_TYPE_FIT; + } else if (parentContainer.inFullscreenMode && !showingInFullscreen) { + textureView.scaleType = SCALE_TYPE_FILL; + } else if (!parentContainer.inFullscreenMode) { + textureView.scaleType = participant.presentation ? SCALE_TYPE_FIT : SCALE_TYPE_ADAPTIVE; + } else { + textureView.scaleType = SCALE_TYPE_FIT; + } + checkScale = false; + } + super.onLayout(changed, left, top, right, bottom); + + if (renderer.rotatedFrameHeight != 0 && renderer.rotatedFrameWidth != 0 && participant != null) { + participant.setAspectRatio(renderer.rotatedFrameWidth / (float) renderer.rotatedFrameHeight, call); + } + } + + @Override + public void requestLayout() { + GroupCallMiniTextureView.this.requestLayout(); + super.requestLayout(); + } + + @Override + protected void onFirstFrameRendered() { + invalidate(); + if (!videoIsPaused) { + if (renderer.getAlpha() != 1f) { + renderer.animate().setDuration(300).alpha(1f); + } + } + + if (blurRenderer != null && blurRenderer.getAlpha() != 1f) { + blurRenderer.animate().setDuration(300).alpha(1f); + } + + if (blurredFlippingStub != null && blurredFlippingStub.getParent() != null) { + if (blurredFlippingStub.getAlpha() == 1f) { + blurredFlippingStub.animate().alpha(0f).setDuration(300).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (blurredFlippingStub.getParent() != null) { + textureView.removeView(blurredFlippingStub); + } + } + }).start(); + } else { + if (blurredFlippingStub.getParent() != null) { + textureView.removeView(blurredFlippingStub); + } + } + + } + } + }; + textureView.renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); + this.parentContainer = parentContainer; + this.attachedRenderers = attachedRenderers; + this.activity = activity; + + textureView.renderer.init(VideoCapturerDevice.getEglBase().getEglBaseContext(), new RendererCommon.RendererEvents() { + @Override + public void onFirstFrameRendered() { + for (int i = 0; i < onFirstFrameRunnables.size(); i++) { + AndroidUtilities.cancelRunOnUIThread(onFirstFrameRunnables.get(i)); + onFirstFrameRunnables.get(i).run(); + } + onFirstFrameRunnables.clear(); + } + + @Override + public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { + + } + }); + + textureView.attachBackgroundRenderer(); + + setClipChildren(false); + textureView.renderer.setAlpha(0f); + addView(textureView); + + noVideoStubLayout = new NoVideoStubLayout(getContext()); + addView(noVideoStubLayout); + + nameView = new SimpleTextView(parentContainer.getContext()); + nameView.setTextSize(13); + nameView.setTextColor(ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.9f))); + nameView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + nameView.setFullTextMaxLines(1); + nameView.setBuildFullLayout(true); + infoContainer = new FrameLayout(parentContainer.getContext()); + infoContainer.addView(nameView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | Gravity.LEFT, 32, 0, 8, 0)); + addView(infoContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 32)); + speakingPaint.setStyle(Paint.Style.STROKE); + speakingPaint.setStrokeWidth(AndroidUtilities.dp(2)); + speakingPaint.setColor(Theme.getColor(Theme.key_voipgroup_speakingText)); + infoContainer.setClipChildren(false); + + micIconView = new RLottieImageView(parentContainer.getContext()); + addView(micIconView, LayoutHelper.createFrame(24, 24, 0, 4, 6, 4, 0)); + + screencastIcon = new ImageView(parentContainer.getContext()); + addView(screencastIcon, LayoutHelper.createFrame(24, 24, 0, 4, 6, 4, 0)); + screencastIcon.setPadding(AndroidUtilities.dp(4), AndroidUtilities.dp(4), AndroidUtilities.dp(4), AndroidUtilities.dp(4)); + screencastIcon.setImageDrawable(ContextCompat.getDrawable(parentContainer.getContext(), R.drawable.voicechat_screencast)); + screencastIcon.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.MULTIPLY)); + + final Drawable rippleDrawable = Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(19), Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.WHITE, 100)); + stopSharingTextView = new TextView(parentContainer.getContext()) { + @Override + public boolean onTouchEvent(MotionEvent event) { + if (Math.abs(stopSharingTextView.getAlpha() - 1.0f) > 0.001f) { + return false; + } + return super.onTouchEvent(event); + } + }; + stopSharingTextView.setText(LocaleController.getString("VoipVideoScreenStopSharing", R.string.VoipVideoScreenStopSharing)); + stopSharingTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + stopSharingTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + stopSharingTextView.setPadding(AndroidUtilities.dp(21), 0, AndroidUtilities.dp(21), 0); + stopSharingTextView.setTextColor(0xffffffff); + stopSharingTextView.setBackground(rippleDrawable); + stopSharingTextView.setGravity(Gravity.CENTER); + stopSharingTextView.setOnClickListener(v -> { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().stopScreenCapture(); + } + stopSharingTextView.animate().alpha(0.0f).scaleX(0.0f).scaleY(0.0f).setDuration(180).start(); + }); + addView(stopSharingTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 38, Gravity.LEFT | Gravity.TOP)); + } + + private Rect rect = new Rect(); + + public boolean isInsideStopScreenButton(float x, float y) { + stopSharingTextView.getHitRect(rect); + return rect.contains((int) x, (int) y); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (attached) { + float y = textureView.getY() + textureView.getMeasuredHeight() - textureView.currentClipVertical - infoContainer.getMeasuredHeight(); + y += swipeToBackDy; + if (showingAsScrimView || animateToScrimView) { + infoContainer.setAlpha(1f - parentContainer.progressToScrimView); + micIconView.setAlpha(1f - parentContainer.progressToScrimView); + } else if (showingInFullscreen || animateToFullscreen) { + if (!GroupCallActivity.isLandscapeMode && !GroupCallActivity.isTabletMode) { + y -= AndroidUtilities.dp(90) * parentContainer.progressToFullscreenMode * (1f - parentContainer.progressToHideUi); + } + infoContainer.setAlpha(1f); + micIconView.setAlpha(1f); + } else if (secondaryView != null) { + infoContainer.setAlpha(1f - parentContainer.progressToFullscreenMode); + micIconView.setAlpha(1f - parentContainer.progressToFullscreenMode); + } else { + infoContainer.setAlpha(1f); + micIconView.setAlpha(1f); + } + + if (showingInFullscreen || animateToFullscreen) { + nameView.setFullAlpha(parentContainer.progressToFullscreenMode); + } else { + nameView.setFullAlpha(0f); + } + micIconView.setTranslationX(infoContainer.getX()); + micIconView.setTranslationY(y - AndroidUtilities.dp(2)); + + if (screencastIcon.getVisibility() == View.VISIBLE) { + screencastIcon.setTranslationX(textureView.getMeasuredWidth() - 2 * textureView.currentClipHorizontal - AndroidUtilities.dp(32)); + screencastIcon.setTranslationY(y - AndroidUtilities.dp(2)); + screencastIcon.setAlpha(Math.min(1f - parentContainer.progressToFullscreenMode, 1f - parentContainer.progressToScrimView)); + } + infoContainer.setTranslationY(y); + infoContainer.setTranslationX(drawFirst ? 0 : AndroidUtilities.dp(6) * parentContainer.progressToFullscreenMode); + } + super.dispatchDraw(canvas); + + if (attached) { + if (statusIcon != null) { + if (statusIcon.isSpeaking && progressToSpeaking != 1f) { + progressToSpeaking += 16f / 300f; + if (progressToSpeaking > 1f) { + progressToSpeaking = 1f; + } else { + invalidate(); + } + } else if (!statusIcon.isSpeaking && progressToSpeaking != 0) { + progressToSpeaking -= 16f / 300f; + if (progressToSpeaking < 0) { + progressToSpeaking = 0; + } else { + invalidate(); + } + } + } + + float selectionProgress = progressToSpeaking * (1f - parentContainer.progressToFullscreenMode) * (1f - parentContainer.progressToScrimView); + if (progressToSpeaking > 0) { + speakingPaint.setAlpha((int) (255 * selectionProgress)); + + float scale = 0.9f + 0.1f * Math.max(0, 1f - Math.abs(swipeToBackDy) / AndroidUtilities.dp(300)); + canvas.save(); + AndroidUtilities.rectTmp.set(textureView.getX() + textureView.currentClipHorizontal, textureView.getY() + textureView.currentClipVertical, textureView.getX() + textureView.getMeasuredWidth() - textureView.currentClipHorizontal, textureView.getY() + textureView.getMeasuredHeight() - textureView.currentClipVertical); + canvas.scale(scale, scale, AndroidUtilities.rectTmp.centerX(), AndroidUtilities.rectTmp.centerY()); + canvas.translate(0, swipeToBackDy); + canvas.drawRoundRect(AndroidUtilities.rectTmp, textureView.roundRadius, textureView.roundRadius, speakingPaint); + canvas.restore(); + } + } + } + + public void getRenderBufferBitmap(GlGenericDrawer.TextureCallback callback) { + textureView.renderer.getRenderBufferBitmap(callback); + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (swipeToBack && (child == textureView || child == noVideoStubLayout)) { + float scale = 0.9f + 0.1f * Math.max(0, 1f - Math.abs(swipeToBackDy) / AndroidUtilities.dp(300)); + canvas.save(); + canvas.scale(scale, scale, child.getX() + child.getMeasuredWidth() / 2f, child.getY() + child.getMeasuredHeight() / 2f); + canvas.translate(0, swipeToBackDy); + boolean b = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return b; + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + FrameLayout.LayoutParams layoutParams = (LayoutParams) infoContainer.getLayoutParams(); + int lastLeft = layoutParams.leftMargin; + float nameScale = 1f; + + if (lastLandscapeMode != GroupCallActivity.isLandscapeMode) { + checkScale = true; + lastLandscapeMode = GroupCallActivity.isLandscapeMode; + } + layoutParams.leftMargin = layoutParams.rightMargin = AndroidUtilities.dp(2); + + if (updateNextLayoutAnimated) { + nameView.animate().scaleX(nameScale).scaleY(nameScale).start(); + micIconView.animate().scaleX(nameScale).scaleY(nameScale).start(); + + } else { + nameView.animate().cancel(); + nameView.setScaleX(nameScale); + nameView.setScaleY(nameScale); + + micIconView.animate().cancel(); + micIconView.setScaleX(nameScale); + micIconView.setScaleY(nameScale); + infoContainer.animate().cancel(); + } + + updateNextLayoutAnimated = false; + + if (showingInFullscreen) { + updateSize(0); + overlayIconAlpha = 1f; + if (GroupCallActivity.isTabletMode) { + int w = MeasureSpec.getSize(widthMeasureSpec); + w -= AndroidUtilities.dp(GroupCallActivity.TABLET_LIST_SIZE + 8); + int h = MeasureSpec.getSize(heightMeasureSpec); + h -= AndroidUtilities.dp(4); + super.onMeasure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY)); + } else if (!GroupCallActivity.isLandscapeMode) { + int h = MeasureSpec.getSize(heightMeasureSpec); + h -= AndroidUtilities.dp(92); + super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(h, MeasureSpec.EXACTLY)); + } else { + int w = MeasureSpec.getSize(widthMeasureSpec); + w -= AndroidUtilities.dp(92); + super.onMeasure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(heightMeasureSpec), MeasureSpec.EXACTLY)); + } + } else if (showingAsScrimView) { + overlayIconAlpha = 1f; + int size = Math.min(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)) - AndroidUtilities.dp(14) * 2; + super.onMeasure(MeasureSpec.makeMeasureSpec(size, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(size + getPaddingBottom(), MeasureSpec.EXACTLY)); + } else if (useSpanSize) { + overlayIconAlpha = 1f; + int spanCountTotal; + if (GroupCallActivity.isTabletMode && tabletGridView != null) { + spanCountTotal = 6; + } else { + spanCountTotal = GroupCallActivity.isLandscapeMode ? 6 : 2; + } + float listSize; + if (tabletGridView != null) { + listSize = MeasureSpec.getSize(widthMeasureSpec) - AndroidUtilities.dp(GroupCallActivity.TABLET_LIST_SIZE + 16 + 8); + } else if (GroupCallActivity.isTabletMode) { + listSize = AndroidUtilities.dp(GroupCallActivity.TABLET_LIST_SIZE); + } else { + listSize = MeasureSpec.getSize(widthMeasureSpec) - AndroidUtilities.dp(14) * 2 + (GroupCallActivity.isLandscapeMode ? -AndroidUtilities.dp(90) : 0); + } + float w = listSize * (spanCount / (float) spanCountTotal); + float h; + if (tabletGridView != null) { + h = tabletGridView.getItemHeight() - AndroidUtilities.dp(4); + w -= AndroidUtilities.dp(4); + } else { + if (GroupCallActivity.isTabletMode) { + h = listSize / 2f; + } else { + h = listSize / (float) (GroupCallActivity.isLandscapeMode ? 3 : 2); + } + w -= AndroidUtilities.dp(2); + } + float layoutContainerW = w; + layoutParams = (LayoutParams) infoContainer.getLayoutParams(); + if (screencastIcon.getVisibility() == View.VISIBLE) { + layoutContainerW -= AndroidUtilities.dp(28); + } + updateSize((int) layoutContainerW); + layoutParams.width = (int) (layoutContainerW - layoutParams.leftMargin * 2); + + super.onMeasure(MeasureSpec.makeMeasureSpec((int) w, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec((int) h, MeasureSpec.EXACTLY)); + } else { + overlayIconAlpha = 0f; + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + int size = MeasureSpec.getSize(heightMeasureSpec) + (MeasureSpec.getSize(widthMeasureSpec) << 16); + if (lastSize != size) { + lastSize = size; + gradientShader = new LinearGradient(0, 0, 0, AndroidUtilities.dp(120), Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.BLACK, 120), Shader.TileMode.CLAMP); + gradientPaint.setShader(gradientShader); + } + + nameView.setPivotX(0); + nameView.setPivotY(nameView.getMeasuredHeight() / 2f); + } + + public static GroupCallMiniTextureView getOrCreate(ArrayList attachedRenderers, GroupCallRenderersContainer renderersContainer, GroupCallGridCell primaryView, GroupCallFullscreenAdapter.GroupCallUserCell secondaryView, GroupCallGridCell tabletGridView, ChatObject.VideoParticipant participant, ChatObject.Call call, GroupCallActivity activity) { + GroupCallMiniTextureView renderer = null; + for (int i = 0; i < attachedRenderers.size(); i++) { + if (participant.equals(attachedRenderers.get(i).participant)) { + renderer = attachedRenderers.get(i); + break; + } + } + if (renderer == null) { + renderer = new GroupCallMiniTextureView(renderersContainer, attachedRenderers, call, activity); + } + if (primaryView != null) { + renderer.setPrimaryView(primaryView); + } + if (secondaryView != null) { + renderer.setSecondaryView(secondaryView); + } + if (tabletGridView != null) { + renderer.setTabletGridView(tabletGridView); + } + return renderer; + } + + public void setTabletGridView(GroupCallGridCell tabletGridView) { + if (this.tabletGridView != tabletGridView) { + this.tabletGridView = tabletGridView; + updateAttachState(true); + } + } + + public void setPrimaryView(GroupCallGridCell primaryView) { + if (this.primaryView != primaryView) { + this.primaryView = primaryView; + checkScale = true; + updateAttachState(true); + } + } + + public void setSecondaryView(GroupCallFullscreenAdapter.GroupCallUserCell secondaryView) { + if (this.secondaryView != secondaryView) { + this.secondaryView = secondaryView; + checkScale = true; + updateAttachState(true); + } + } + + public void setShowingAsScrimView(boolean showing, boolean animated) { + this.showingAsScrimView = showing; + updateAttachState(animated); + } + + public void setShowingInFullscreen(boolean showing, boolean animated) { + if (this.showingInFullscreen != showing) { + this.showingInFullscreen = showing; + checkScale = true; + updateAttachState(animated); + } + } + + public void setFullscreenMode(boolean fullscreenMode, boolean animated) { + if (isFullscreenMode != fullscreenMode) { + isFullscreenMode = fullscreenMode; + updateAttachState((primaryView != null || tabletGridView != null) && animated); + } + } + + public void updateAttachState(boolean animated) { + if (forceDetached) { + return; + } + if (participant == null && (primaryView != null || secondaryView != null || tabletGridView != null)) { + if (primaryView != null) { + participant = primaryView.getParticipant(); + } else if (tabletGridView != null) { + participant = tabletGridView.getParticipant(); + } else { + participant = secondaryView.getVideoParticipant(); + } + } + boolean forceRequestLayout = false; + if (attached && !showingInFullscreen) { + boolean needDetach = VoIPService.getSharedInstance() == null; + if (GroupCallActivity.paused || participant == null || (secondaryView == null && (!ChatObject.Call.videoIsActive(participant.participant, participant.presentation, call) || !call.canStreamVideo && participant != call.videoNotAvailableParticipant))) { + needDetach = true; + } + if (needDetach || (primaryView == null && secondaryView == null && tabletGridView == null) && !showingAsScrimView && !animateToScrimView) { + attached = false; + + saveThumb(); + + if (textureView.currentAnimation == null && needDetach) { + GroupCallMiniTextureView viewToRemove = this; + parentContainer.detach(viewToRemove); + animate().scaleX(0.5f).scaleY(0.5f).alpha(0).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + viewToRemove.setScaleX(1f); + viewToRemove.setScaleY(1f); + viewToRemove.setAlpha(1f); + parentContainer.removeView(viewToRemove); + release(); + } + }).setDuration(150).start(); + } else { + if (parentContainer.inLayout) { + View viewToRemove = this; + AndroidUtilities.runOnUIThread(() -> parentContainer.removeView(viewToRemove)); + } else { + parentContainer.removeView(this); + } + parentContainer.detach(this); + release(); + } + + if (participant.participant.self) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setLocalSink(null, participant.presentation); + } + } else { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().removeRemoteSink(participant.participant, participant.presentation); + } + } + + invalidate(); + + if (noVideoStubAnimator != null) { + noVideoStubAnimator.removeAllListeners(); + noVideoStubAnimator.cancel(); + } + } + } else if (!attached) { + if (VoIPService.getSharedInstance() == null) { + return; + } + if (primaryView != null || secondaryView != null || tabletGridView != null || showingInFullscreen) { + if (primaryView != null) { + participant = primaryView.getParticipant(); + } else if (secondaryView != null) { + participant = secondaryView.getVideoParticipant(); + } else if (tabletGridView != null) { + participant = tabletGridView.getParticipant(); + } + + boolean videoActive; + if (participant.participant.self) { + videoActive = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getVideoState(participant.presentation) == Instance.VIDEO_STATE_ACTIVE; + } else { + videoActive = (call.canStreamVideo || participant == call.videoNotAvailableParticipant) && ChatObject.Call.videoIsActive(participant.participant, participant.presentation, call); + } + if (showingInFullscreen || (!VoIPService.getSharedInstance().isFullscreen(participant.participant, participant.presentation) && !VoIPService.getSharedInstance().isFullscreen(participant.participant, participant.presentation) && videoActive)) { + if (BuildVars.DEBUG_PRIVATE_VERSION) { + for (int i = 0; i < attachedRenderers.size(); i++) { + if (attachedRenderers.get(i).participant.equals(participant)) { + throw new RuntimeException("try add two same renderers"); + } + } + } + forceRequestLayout = true; + attached = true; + + if (activity.statusIconPool.size() > 0) { + statusIcon = activity.statusIconPool.remove(activity.statusIconPool.size() - 1); + } else { + statusIcon = new GroupCallStatusIcon(); + } + statusIcon.setCallback(this); + statusIcon.setImageView(micIconView); + updateIconColor(false); + + if (getParent() == null) { + parentContainer.addView(this, LayoutHelper.createFrame(46, 46, Gravity.LEFT | Gravity.TOP)); + parentContainer.attach(this); + } + + checkScale = true; + animateEnter = false; + animate().setListener(null).cancel(); + if (textureView.currentAnimation == null && ((secondaryView != null && primaryView == null)) && !hasImage()) { + setScaleX(0.5f); + setScaleY(0.5f); + setAlpha(0); + animateEnter = true; + invalidate(); + animate().scaleX(1f).scaleY(1f).alpha(1f).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + animateEnter = false; + invalidate(); + } + }).setDuration(100).start(); + invalidate(); + } else { + setScaleY(1f); + setScaleX(1f); + setAlpha(1f); + } + animated = false; + + loadThumb(); + screencastIcon.setVisibility(participant.presentation ? VISIBLE : GONE); + } + } + } + if (participant == call.videoNotAvailableParticipant) { + if (nameView.getVisibility() != INVISIBLE) { + nameView.setVisibility(INVISIBLE); + micIconView.setVisibility(INVISIBLE); + } + } else { + if (nameView.getVisibility() != VISIBLE) { + nameView.setVisibility(VISIBLE); + micIconView.setVisibility(VISIBLE); + } + } + + if (attached) { + int size; + float spanCount = 1f; + boolean useSpanSize = false; + int gridItemsCount = 0; + + boolean useTablet = GroupCallActivity.isTabletMode && (!parentContainer.inFullscreenMode || (secondaryView == null && primaryView == null)); + if (showingInFullscreen) { + size = LayoutHelper.MATCH_PARENT; + } else if (secondaryView != null && primaryView == null && !parentContainer.inFullscreenMode) { + size = 0; + } else if (showingAsScrimView) { + size = LayoutHelper.MATCH_PARENT; + } else if (secondaryView != null && primaryView == null) { + size = AndroidUtilities.dp(80); + } else if (tabletGridView != null && useTablet) { + if (tabletGridView != null) { + useSpanSize = true; + size = LayoutHelper.MATCH_PARENT; + spanCount = tabletGridView.spanCount; + gridItemsCount = tabletGridView.gridAdapter.getItemCount(); + } else { + size = AndroidUtilities.dp(46); + } + } else if (primaryView != null && secondaryView == null || !isFullscreenMode) { + if (primaryView != null) { + useSpanSize = true; + size = LayoutHelper.MATCH_PARENT; + spanCount = primaryView.spanCount; + } else { + size = AndroidUtilities.dp(46); + } + } else if (primaryView != null) { + size = AndroidUtilities.dp(80); + } else { + size = 0; + } + MarginLayoutParams layoutParams = (MarginLayoutParams) getLayoutParams(); + if (size != 0 && (layoutParams.height != size || forceRequestLayout || this.useSpanSize != useSpanSize || (useSpanSize && this.spanCount != spanCount || this.gridItemsCount != gridItemsCount))) { + layoutParams.height = size; + layoutParams.width = useSpanSize ? LayoutHelper.MATCH_PARENT : size; + this.useSpanSize = useSpanSize; + this.spanCount = spanCount; + checkScale = true; + if (animated) { + textureView.animateToLayout(); + updateNextLayoutAnimated = true; + } else { + textureView.requestLayout(); + } + AndroidUtilities.runOnUIThread(this::requestLayout); + parentContainer.requestLayout(); + invalidate(); + } + + if (participant.participant.self && !participant.presentation && VoIPService.getSharedInstance() != null) { + textureView.renderer.setMirror(VoIPService.getSharedInstance().isFrontFaceCamera()); + textureView.renderer.setRotateTextureWitchScreen(true); + textureView.renderer.setUseCameraRotation(true); + } else { + textureView.renderer.setMirror(false); + textureView.renderer.setRotateTextureWitchScreen(true); + textureView.renderer.setUseCameraRotation(false); + } + textureView.updateRotation(); + + if (participant.participant.self) { + textureView.renderer.setMaxTextureSize(720); + } else { + textureView.renderer.setMaxTextureSize(0); + } + + boolean hasVideoLocal = true; + + if (!ChatObject.Call.videoIsActive(participant.participant, participant.presentation, call) || !call.canStreamVideo && participant != call.videoNotAvailableParticipant) { + noVideoStubLayout.avatarImageReceiver.setCurrentAccount(currentAccount); + int peerId = MessageObject.getPeerId(participant.participant.peer); + ImageLocation imageLocation; + ImageLocation thumbLocation; + Object parentObject; + if (peerId > 0) { + TLRPC.User currentUser = AccountInstance.getInstance(currentAccount).getMessagesController().getUser(peerId); + noVideoStubLayout.avatarDrawable.setInfo(currentUser); + imageLocation = ImageLocation.getForUser(currentUser, ImageLocation.TYPE_BIG); + thumbLocation = ImageLocation.getForUser(currentUser, ImageLocation.TYPE_SMALL); + parentObject = currentUser; + } else { + TLRPC.Chat currentChat = AccountInstance.getInstance(UserConfig.selectedAccount).getMessagesController().getChat(-peerId); + noVideoStubLayout.avatarDrawable.setInfo(currentChat); + imageLocation = ImageLocation.getForChat(currentChat, ImageLocation.TYPE_BIG); + thumbLocation = ImageLocation.getForChat(currentChat, ImageLocation.TYPE_SMALL); + parentObject = currentChat; + } + + Drawable thumb = noVideoStubLayout.avatarDrawable; + if (thumbLocation != null) { + BitmapDrawable drawable = ImageLoader.getInstance().getImageFromMemory(thumbLocation.location, null, "50_50"); + if (drawable != null) { + thumb = drawable; + } + } + noVideoStubLayout.avatarImageReceiver.setImage(imageLocation, null, thumb, null, parentObject, 0); + noVideoStubLayout.backgroundImageReceiver.setImage(imageLocation, "50_50_b", new ColorDrawable(Theme.getColor(Theme.key_voipgroup_listViewBackground)), null, parentObject, 0); + hasVideoLocal = false; + } + + boolean skipNoStubTransition = animated && secondaryView != null && !showingInFullscreen && !hasVideoLocal; + + if (hasVideoLocal != hasVideo && !skipNoStubTransition) { + hasVideo = hasVideoLocal; + + if (noVideoStubAnimator != null) { + noVideoStubAnimator.removeAllListeners(); + noVideoStubAnimator.cancel(); + } + if (animated) { + if (!hasVideo && noVideoStubLayout.getVisibility() != View.VISIBLE) { + noVideoStubLayout.setVisibility(View.VISIBLE); + noVideoStubLayout.setAlpha(0); + } + noVideoStubAnimator = ValueAnimator.ofFloat(progressToNoVideoStub, hasVideo ? 0 : 1f); + noVideoStubAnimator.addUpdateListener(valueAnimator1 -> { + progressToNoVideoStub = (float) valueAnimator1.getAnimatedValue(); + noVideoStubLayout.setAlpha(progressToNoVideoStub); + textureView.invalidate(); + }); + noVideoStubAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + progressToNoVideoStub = hasVideo ? 0f : 1f; + noVideoStubLayout.setAlpha(progressToNoVideoStub); + noVideoStubLayout.setVisibility(hasVideo ? View.GONE : View.VISIBLE); + textureView.invalidate(); + } + }); + noVideoStubAnimator.start(); + } else { + progressToNoVideoStub = hasVideo ? 0f : 1f; + noVideoStubLayout.setVisibility(hasVideo ? View.GONE : View.VISIBLE); + noVideoStubLayout.setAlpha(progressToNoVideoStub); + textureView.invalidate(); + } + + if (hasVideo) { + noVideoStubLayout.updateMuteButtonState(false); + } + } + + if (participant.participant.self && VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setLocalSink(textureView.renderer, participant.presentation); + } + + statusIcon.setParticipant(participant.participant, animated); + if (noVideoStubLayout.getVisibility() == View.VISIBLE) { + noVideoStubLayout.updateMuteButtonState(true); + } + + boolean pausedInternal = false; + if (participant.participant.video != null && participant.participant.video.paused) { + pausedInternal = true; + } + if (videoIsPaused != pausedInternal) { + videoIsPaused = pausedInternal; + textureView.renderer.animate().alpha(videoIsPaused ? 0 : 1f).setDuration(250).start(); + textureView.invalidate(); + } + + if (GroupCallActivity.paused || !hasVideo) { + if (participant.participant.self) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setLocalSink(null, participant.presentation); + } + } else if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().removeRemoteSink(participant.participant, participant.presentation); + VoIPService.getSharedInstance().removeRemoteSink(participant.participant, participant.presentation); + } + if (GroupCallActivity.paused && textureView.renderer.isFirstFrameRendered()) { + saveThumb(); + textureView.renderer.clearFirstFrame(); + textureView.renderer.setAlpha(0f); + textureView.blurRenderer.setAlpha(0f); + } + } else { + if (!textureView.renderer.isFirstFrameRendered()) { + loadThumb(); + } + if (participant.participant.self) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setLocalSink(textureView.renderer, participant.presentation); + } + } else if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().addRemoteSink(participant.participant, participant.presentation, textureView.renderer, null); + VoIPService.getSharedInstance().addRemoteSink(participant.participant, participant.presentation, textureView.renderer, null); + } + } + } + + updateInfo(); + } + + private void loadThumb() { + if (thumb != null) { + return; + } + thumb = call.thumbs.get(participant.presentation ? participant.participant.presentationEndpoint : participant.participant.videoEndpoint); + textureView.setThumb(thumb); + + if (thumb == null) { + int peerId = MessageObject.getPeerId(participant.participant.peer); + + if (participant.participant.self && participant.presentation) { + imageReceiver.setImageBitmap(new MotionBackgroundDrawable(0xff212E3A, 0xff2B5B4D, 0xff245863, 0xff274558, true)); + } else { + if (peerId > 0) { + TLRPC.User user = MessagesController.getInstance(currentAccount).getUser(peerId); + ImageLocation imageLocation = ImageLocation.getForUser(user, ImageLocation.TYPE_SMALL); + int color = user != null ? AvatarDrawable.getColorForId(user.id) : ColorUtils.blendARGB(Color.BLACK, Color.WHITE, 0.2f); + GradientDrawable gradientDrawable = new GradientDrawable(GradientDrawable.Orientation.BOTTOM_TOP, new int[]{ColorUtils.blendARGB(color, Color.BLACK, 0.2f), ColorUtils.blendARGB(color, Color.BLACK, 0.4f)}); + imageReceiver.setImage(imageLocation, "50_50_b", gradientDrawable, null, user, 0); + } else { + TLRPC.Chat chat = MessagesController.getInstance(currentAccount).getChat(-peerId); + ImageLocation imageLocation = ImageLocation.getForChat(chat, ImageLocation.TYPE_SMALL); + int color = chat != null ? AvatarDrawable.getColorForId(chat.id) : ColorUtils.blendARGB(Color.BLACK, Color.WHITE, 0.2f); + GradientDrawable gradientDrawable = new GradientDrawable(GradientDrawable.Orientation.BOTTOM_TOP, new int[]{ColorUtils.blendARGB(color, Color.BLACK, 0.2f), ColorUtils.blendARGB(color, Color.BLACK, 0.4f)}); + imageReceiver.setImage(imageLocation, "50_50_b", gradientDrawable, null, chat, 0); + } + } + } + } + + + public void updateInfo() { + if (!attached) { + return; + } + + String name = null; + + int peerId = MessageObject.getPeerId(participant.participant.peer); + if (peerId > 0) { + TLRPC.User currentUser = AccountInstance.getInstance(currentAccount).getMessagesController().getUser(peerId); + name = UserObject.getUserName(currentUser); + } else { + TLRPC.Chat currentChat = AccountInstance.getInstance(currentAccount).getMessagesController().getChat(-peerId); + if (currentChat != null) { + name = currentChat.title; + } + } + + nameView.setText(name); + } + + public boolean hasImage() { + return textureView.stubVisibleProgress == 1f; + } + + public void updatePosition(ViewGroup listView, ViewGroup tabletGridListView, RecyclerListView fullscreenListView, GroupCallRenderersContainer renderersContainer) { + if (showingAsScrimView || animateToScrimView || forceDetached) { + return; + } + drawFirst = false; + float progressToFullscreen = renderersContainer.progressToFullscreenMode; + if (animateToFullscreen || showingInFullscreen) { + if (primaryView != null || tabletGridView != null) { + GroupCallGridCell callUserCell = tabletGridView != null ? tabletGridView : primaryView; + ViewGroup fromListView = tabletGridView != null ? tabletGridListView : listView; + float fromX = callUserCell.getX() + fromListView.getX() - getLeft() - renderersContainer.getLeft(); + float fromY = callUserCell.getY() + AndroidUtilities.dp(2) + fromListView.getY() - getTop() - renderersContainer.getTop(); + + float toX = 0; + float toY = 0; + + setTranslationX(fromX * (1f - progressToFullscreen) + toX * progressToFullscreen); + setTranslationY(fromY * (1f - progressToFullscreen) + toY * progressToFullscreen); + } else { + setTranslationX(0); + setTranslationY(0); + } + + textureView.setRoundCorners(AndroidUtilities.dp(8)); + + if (secondaryView != null) { + secondaryView.setAlpha(progressToFullscreen); + } + if (!showingInFullscreen && primaryView == null && tabletGridView == null) { + setAlpha(progressToFullscreen); + } else if (!animateEnter) { + setAlpha(1f); + } + } else if (secondaryView != null) { + if (secondaryView.isRemoving(fullscreenListView)) { + setAlpha(secondaryView.getAlpha()); + } else if (primaryView == null) { + if (attached && !animateEnter) { + setAlpha(progressToFullscreen); + } + secondaryView.setAlpha(progressToFullscreen); + progressToFullscreen = 1f; + } else { + secondaryView.setAlpha(1f); + if (attached && !animateEnter) { + setAlpha(1f); + } + } + + setTranslationX(secondaryView.getX() + fullscreenListView.getX() - getLeft()); + setTranslationY(AndroidUtilities.dp(2) * (1f - progressToFullscreen) + secondaryView.getY() + fullscreenListView.getY() - getTop()); + textureView.setRoundCorners(AndroidUtilities.dp(13) * progressToFullscreen + AndroidUtilities.dp(8) * (1f - progressToFullscreen)); + } else if (primaryView != null || tabletGridView != null) { + GroupCallGridCell callUserCell; + ViewGroup fromListView; + if (tabletGridView != null && primaryView != null) { + boolean useTablet = GroupCallActivity.isTabletMode && !parentContainer.inFullscreenMode; + callUserCell = useTablet ? tabletGridView : primaryView; + fromListView = useTablet ? tabletGridListView : listView; + } else { + callUserCell = tabletGridView != null ? tabletGridView : primaryView; + fromListView = tabletGridView != null ? tabletGridListView : listView; + } + setTranslationX(callUserCell.getX() + fromListView.getX() - getLeft() - renderersContainer.getLeft()); + setTranslationY(callUserCell.getY() + AndroidUtilities.dp(2) + fromListView.getY() - getTop() - renderersContainer.getTop()); + textureView.setRoundCorners(AndroidUtilities.dp(8)); + + if (attached && !animateEnter) { + if (!GroupCallActivity.isTabletMode) { + drawFirst = true; + setAlpha((1f - progressToFullscreen) * callUserCell.getAlpha()); + } else if (primaryView != null && tabletGridView == null) { + setAlpha(progressToFullscreen * callUserCell.getAlpha()); + } + } + } + } + + public boolean isAttached() { + return attached; + } + + public void release() { + textureView.renderer.release(); + if (statusIcon != null) { + activity.statusIconPool.add(statusIcon); + statusIcon.setCallback(null); + statusIcon.setImageView(null); + } + statusIcon = null; + } + + public boolean isFullyVisible() { + if (showingInFullscreen || animateToFullscreen) { + return false; + } + return attached && textureView.renderer.isFirstFrameRendered() && getAlpha() == 1; + } + + public boolean isVisible() { + if (showingInFullscreen || animateToFullscreen) { + return false; + } + return attached && textureView.renderer.isFirstFrameRendered(); + } + + @Override + public void invalidate() { + super.invalidate(); + if (!invalidateFromChild) { + textureView.invalidate(); + } + if (primaryView != null) { + primaryView.invalidate(); + if (activity.getScrimView() == primaryView) { + activity.getContainerView().invalidate(); + } + } + if (secondaryView != null) { + secondaryView.invalidate(); + if (secondaryView.getParent() != null) { + ((View) secondaryView.getParent()).invalidate(); + } + } + if (getParent() != null) { + ((View) getParent()).invalidate(); + } + } + + + public void forceDetach(boolean removeSink) { + GroupCallMiniTextureView viewToRemove = this; + forceDetached = true; + attached = false; + parentContainer.detach(viewToRemove); + + if (removeSink) { + if (participant.participant.self) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setLocalSink(null, participant.presentation); + } + } else { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().removeRemoteSink(participant.participant, participant.presentation); + } + } + } + + saveThumb(); + + if (noVideoStubAnimator != null) { + noVideoStubAnimator.removeAllListeners(); + noVideoStubAnimator.cancel(); + } + + textureView.renderer.release(); + } + + public void saveThumb() { + if (participant != null && textureView.renderer.getMeasuredHeight() != 0 && textureView.renderer.getMeasuredWidth() != 0) { + getRenderBufferBitmap((bitmap, rotation1) -> { + if (bitmap != null && bitmap.getPixel(0, 0) != Color.TRANSPARENT) { + Utilities.stackBlurBitmap(bitmap, Math.max(7, Math.max(bitmap.getWidth(), bitmap.getHeight()) / 180)); + AndroidUtilities.runOnUIThread(() -> call.thumbs.put(participant.presentation ? participant.participant.presentationEndpoint : participant.participant.videoEndpoint, bitmap)); + } + }); + } + } + + public void setViews(GroupCallGridCell primaryView, GroupCallFullscreenAdapter.GroupCallUserCell secondaryView, GroupCallGridCell tabletGrid) { + this.primaryView = primaryView; + this.secondaryView = secondaryView; + this.tabletGridView = tabletGrid; + } + + public void setAmplitude(double value) { + statusIcon.setAmplitude(value); + noVideoStubLayout.setAmplitude(value); + } + + public void setZoom(boolean inPinchToZoom, float pinchScale, float pinchCenterX, float pinchCenterY, float pinchTranslationX, float pinchTranslationY) { + if (this.pinchScale != pinchScale || this.pinchCenterX != pinchCenterX || this.pinchCenterY != pinchCenterY || this.pinchTranslationX != pinchTranslationX || this.pinchTranslationY != pinchTranslationY) { + this.inPinchToZoom = inPinchToZoom; + this.pinchScale = pinchScale; + this.pinchCenterX = pinchCenterX; + this.pinchCenterY = pinchCenterY; + this.pinchTranslationX = pinchTranslationX; + this.pinchTranslationY = pinchTranslationY; + textureView.invalidate(); + } + } + + public void setSwipeToBack(boolean swipeToBack, float swipeToBackDy) { + if (this.swipeToBack != swipeToBack || this.swipeToBackDy != swipeToBackDy) { + this.swipeToBack = swipeToBack; + this.swipeToBackDy = swipeToBackDy; + textureView.invalidate(); + invalidate(); + } + } + + public void runOnFrameRendered(Runnable runnable) { + if (textureView.renderer.isFirstFrameRendered()) { + runnable.run(); + } else { + AndroidUtilities.runOnUIThread(runnable, 250); + onFirstFrameRunnables.add(runnable); + } + } + + int lastIconColor; + int animateToColor; + int lastSpeakingFrameColor; + ValueAnimator colorAnimator; + + @Override + public void onStatusChanged() { + invalidate(); + updateIconColor(true); + if (noVideoStubLayout.getVisibility() == View.VISIBLE) { + noVideoStubLayout.updateMuteButtonState(true); + } + } + + private void updateIconColor(boolean animated) { + if (statusIcon == null) { + return; + } + int newColor; + int newSpeakingFrameColor; + if (statusIcon.isMutedByMe()) { + newSpeakingFrameColor = newColor = Theme.getColor(Theme.key_voipgroup_mutedByAdminIcon); + } else if (statusIcon.isSpeaking()) { + newSpeakingFrameColor = newColor = Theme.getColor(Theme.key_voipgroup_speakingText); + } else { + newSpeakingFrameColor = Theme.getColor(Theme.key_voipgroup_speakingText); + newColor = Color.WHITE; + } + + if (animateToColor == newColor) { + return; + } + if (colorAnimator != null) { + colorAnimator.removeAllListeners(); + colorAnimator.cancel(); + } + + if (!animated) { + // micIconView.setColorFilter(new PorterDuffColorFilter(animateToColor = lastIconColor = newColor, PorterDuff.Mode.MULTIPLY)); + speakingPaint.setColor(lastSpeakingFrameColor = newSpeakingFrameColor); + } else { + int colorFrom = lastIconColor; + int colorFromSpeaking = lastSpeakingFrameColor; + animateToColor = newColor; + colorAnimator = ValueAnimator.ofFloat(0, 1f); + colorAnimator.addUpdateListener(valueAnimator -> { + float v = (float) valueAnimator.getAnimatedValue(); + lastIconColor = ColorUtils.blendARGB(colorFrom, newColor, v); + lastSpeakingFrameColor = ColorUtils.blendARGB(colorFromSpeaking, newSpeakingFrameColor, v); + // micIconView.setColorFilter(new PorterDuffColorFilter(lastIconColor, PorterDuff.Mode.MULTIPLY)); + speakingPaint.setColor(lastSpeakingFrameColor); + }); + colorAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + animateToColor = lastIconColor = newColor; + lastSpeakingFrameColor = newSpeakingFrameColor; + // micIconView.setColorFilter(new PorterDuffColorFilter(lastIconColor, PorterDuff.Mode.MULTIPLY)); + speakingPaint.setColor(lastSpeakingFrameColor); + } + }); + colorAnimator.start(); + } + } + + public void runDelayedAnimations() { + for (int i = 0; i < onFirstFrameRunnables.size(); i++) { + onFirstFrameRunnables.get(i).run(); + } + onFirstFrameRunnables.clear(); + } + + int collapseSize; + int fullSize; + + public void updateSize(int collapseSize) { + int fullSize = parentContainer.getMeasuredWidth() - AndroidUtilities.dp(6); + if ((this.collapseSize != collapseSize && collapseSize > 0) || (this.fullSize != fullSize && fullSize > 0)) { + if (collapseSize != 0) { + this.collapseSize = collapseSize; + } + if (fullSize != 0) { + this.fullSize = fullSize; + } + nameView.setFullLayoutAdditionalWidth(fullSize - collapseSize, 0); + } + } + + private class NoVideoStubLayout extends View { + + public ImageReceiver avatarImageReceiver = new ImageReceiver(); + public ImageReceiver backgroundImageReceiver = new ImageReceiver(); + AvatarDrawable avatarDrawable = new AvatarDrawable(); + + BlobDrawable tinyWaveDrawable; + BlobDrawable bigWaveDrawable; + + Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); + Paint backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + float amplitude; + float animateToAmplitude; + float animateAmplitudeDiff; + float wavesEnter = 0f; + float cx, cy; + float speakingProgress; + + public NoVideoStubLayout(@NonNull Context context) { + super(context); + + tinyWaveDrawable = new BlobDrawable(9); + bigWaveDrawable = new BlobDrawable(12); + + tinyWaveDrawable.minRadius = AndroidUtilities.dp(76); + tinyWaveDrawable.maxRadius = AndroidUtilities.dp(92); + tinyWaveDrawable.generateBlob(); + + bigWaveDrawable.minRadius = AndroidUtilities.dp(80); + bigWaveDrawable.maxRadius = AndroidUtilities.dp(95); + bigWaveDrawable.generateBlob(); + + paint.setColor(ColorUtils.blendARGB(Theme.getColor(Theme.key_voipgroup_listeningText), Theme.getColor(Theme.key_voipgroup_speakingText), speakingProgress)); + paint.setAlpha((int) (255 * 0.4f)); + + backgroundPaint.setColor(ColorUtils.setAlphaComponent(Color.BLACK, (int) (255 * 0.5f))); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + float size = AndroidUtilities.dp(157); + cx = getMeasuredWidth() >> 1; + cy = (getMeasuredHeight() >> 1) + (GroupCallActivity.isLandscapeMode ? 0 : -getMeasuredHeight() * 0.12f); + avatarImageReceiver.setRoundRadius((int) (size / 2f)); + avatarImageReceiver.setImageCoords(cx - size / 2, cy - size / 2, size, size); + } + + @Override + protected void onDraw(Canvas canvas) { + super.onDraw(canvas); + + AndroidUtilities.rectTmp.set(textureView.getX() + textureView.currentClipHorizontal, textureView.getY() + textureView.currentClipVertical, textureView.getX() + textureView.getMeasuredWidth() - textureView.currentClipHorizontal, textureView.getY() + textureView.getMeasuredHeight() + textureView.currentClipVertical); + backgroundImageReceiver.setImageCoords(AndroidUtilities.rectTmp.left, AndroidUtilities.rectTmp.top, AndroidUtilities.rectTmp.width(), AndroidUtilities.rectTmp.height()); + backgroundImageReceiver.setRoundRadius((int) textureView.roundRadius); + backgroundImageReceiver.draw(canvas); + + canvas.drawRoundRect(AndroidUtilities.rectTmp, textureView.roundRadius, textureView.roundRadius, backgroundPaint); + + if (animateToAmplitude != amplitude) { + amplitude += animateAmplitudeDiff * 16; + if (animateAmplitudeDiff > 0) { + if (amplitude > animateToAmplitude) { + amplitude = animateToAmplitude; + } + } else { + if (amplitude < animateToAmplitude) { + amplitude = animateToAmplitude; + } + } + } + + if (switchProgress != 1f) { + if (prevState != null) { + switchProgress += 16 / 220f; + } + if (switchProgress >= 1.0f) { + switchProgress = 1f; + prevState = null; + } + } + + float scale = 1f + 0.8f * amplitude; + canvas.save(); + canvas.scale(scale, scale, cx, cy); + + if (currentState != null) { + currentState.update((int) (cy - AndroidUtilities.dp(100)), (int) (cx - AndroidUtilities.dp(100)), AndroidUtilities.dp(200), 16, amplitude); + } + bigWaveDrawable.update(amplitude, 1f); + tinyWaveDrawable.update(amplitude, 1f); + + for (int i = 0; i < 2; i++) { + float alpha; + if (i == 0 && prevState != null) { + paint.setShader(prevState.shader); + alpha = 1f - switchProgress; + } else if (i == 1 && currentState != null) { + paint.setShader(currentState.shader); + alpha = switchProgress; + } else { + continue; + } + + paint.setAlpha((int) (76 * alpha)); + bigWaveDrawable.draw(cx, cy, canvas, paint); + tinyWaveDrawable.draw(cx, cy, canvas, paint); + } + canvas.restore(); + + scale = 1f + 0.2f * amplitude; + canvas.save(); + canvas.scale(scale, scale, cx, cy); + avatarImageReceiver.draw(canvas); + canvas.restore(); + + invalidate(); + } + + private GroupCallActivity.WeavingState[] states = new GroupCallActivity.WeavingState[3]; + private GroupCallActivity.WeavingState currentState; + private GroupCallActivity.WeavingState prevState; + + int muteButtonState = -1; + private final static int MUTE_BUTTON_STATE_MUTE = 1; + private final static int MUTE_BUTTON_STATE_UNMUTE = 0; + private final static int MUTED_BY_ADMIN = 2; + float switchProgress = 1f; + + private void updateMuteButtonState(boolean animated) { + int newButtonState; + if (statusIcon.isMutedByMe() || statusIcon.isMutedByAdmin()) { + newButtonState = MUTED_BY_ADMIN; + } else if (statusIcon.isSpeaking()) { + newButtonState = MUTE_BUTTON_STATE_MUTE; + } else { + newButtonState = MUTE_BUTTON_STATE_UNMUTE; + } + if (newButtonState == muteButtonState) { + return; + } + muteButtonState = newButtonState; + + if (states[muteButtonState] == null) { + states[muteButtonState] = new GroupCallActivity.WeavingState(muteButtonState); + if (muteButtonState == MUTED_BY_ADMIN) { + states[muteButtonState].shader = new LinearGradient(0, 400, 400, 0, new int[]{Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient), Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient3), Theme.getColor(Theme.key_voipgroup_mutedByAdminGradient2)}, null, Shader.TileMode.CLAMP); + } else if (muteButtonState == MUTE_BUTTON_STATE_MUTE) { + states[muteButtonState].shader = new RadialGradient(200, 200, 200, new int[]{Theme.getColor(Theme.key_voipgroup_muteButton), Theme.getColor(Theme.key_voipgroup_muteButton3)}, null, Shader.TileMode.CLAMP); + } else { + states[muteButtonState].shader = new RadialGradient(200, 200, 200, new int[]{Theme.getColor(Theme.key_voipgroup_unmuteButton2), Theme.getColor(Theme.key_voipgroup_unmuteButton)}, null, Shader.TileMode.CLAMP); + } + } + if (states[muteButtonState] != currentState) { + prevState = currentState; + currentState = states[muteButtonState]; + if (prevState == null || !animated) { + switchProgress = 1; + prevState = null; + } else { + switchProgress = 0; + } + } + invalidate(); + } + + public void setAmplitude(double value) { + float amplitude = (float) value / 80f; + if (amplitude > 1f) { + amplitude = 1f; + } else if (amplitude < 0) { + amplitude = 0; + } + animateToAmplitude = amplitude; + animateAmplitudeDiff = (animateToAmplitude - this.amplitude) / 200; + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + avatarImageReceiver.onAttachedToWindow(); + backgroundImageReceiver.onAttachedToWindow(); + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + avatarImageReceiver.onDetachedFromWindow(); + backgroundImageReceiver.onDetachedFromWindow(); + } + } + + public String getName() { + int peerId = MessageObject.getPeerId(participant.participant.peer); + if (peerId > 0) { + TLRPC.User currentUser = AccountInstance.getInstance(UserConfig.selectedAccount).getMessagesController().getUser(peerId); + return UserObject.getUserName(currentUser); + } else { + TLRPC.Chat currentChat = AccountInstance.getInstance(UserConfig.selectedAccount).getMessagesController().getChat(-peerId); + return currentChat.title; + } + } + + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + imageReceiver.onDetachedFromWindow(); + } + + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + imageReceiver.onAttachedToWindow(); + } + + ValueAnimator flipAnimator; + boolean flipHalfReached; + + public void startFlipAnimation() { + if (flipAnimator != null) { + return; + } + flipHalfReached = false; + + if (blurredFlippingStub == null) { + blurredFlippingStub = new ImageView(getContext()); + } else { + blurredFlippingStub.animate().cancel(); + } + if (textureView.renderer.isFirstFrameRendered()) { + Bitmap bitmap = textureView.blurRenderer.getBitmap(100, 100); + if (bitmap != null) { + Utilities.blurBitmap(bitmap, 3, 1, bitmap.getWidth(), bitmap.getHeight(), bitmap.getRowBytes()); + Drawable drawable = new BitmapDrawable(bitmap); + blurredFlippingStub.setBackground(drawable); + } + blurredFlippingStub.setAlpha(0f); + } else { + blurredFlippingStub.setAlpha(1f); + } + + if (blurredFlippingStub.getParent() == null) { + textureView.addView(blurredFlippingStub); + } + ((LayoutParams) blurredFlippingStub.getLayoutParams()).gravity = Gravity.CENTER; + + flipAnimator = ValueAnimator.ofFloat(0, 1f); + flipAnimator.addUpdateListener(valueAnimator -> { + float v = (float) valueAnimator.getAnimatedValue(); + float rotation; + boolean halfReached = false; + if (v < 0.5f) { + rotation = v; + } else { + halfReached = true; + rotation = v - 1f; + } + + if (halfReached && !flipHalfReached) { + blurredFlippingStub.setAlpha(1f); + flipHalfReached = true; + textureView.renderer.clearImage(); + } + + rotation *= 180; + blurredFlippingStub.setRotationY(rotation); + textureView.renderer.setRotationY(rotation); + }); + + flipAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + flipAnimator = null; + textureView.setRotationY(0); + + if (!flipHalfReached) { + textureView.renderer.clearImage(); + // + } + } + }); + flipAnimator.setDuration(400); + flipAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + flipAnimator.start(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java new file mode 100644 index 000000000..7801bc1da --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java @@ -0,0 +1,1414 @@ +package org.telegram.ui.Components.voip; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ObjectAnimator; +import android.animation.ValueAnimator; +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.drawable.Drawable; +import android.graphics.drawable.GradientDrawable; +import android.os.Build; +import android.os.SystemClock; +import android.text.SpannableStringBuilder; +import android.text.TextUtils; +import android.util.SparseIntArray; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewConfiguration; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.core.content.ContextCompat; +import androidx.core.graphics.ColorUtils; +import androidx.recyclerview.widget.RecyclerView; + +import com.google.android.exoplayer2.util.Log; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.MessageObject; +import org.telegram.messenger.MessagesController; +import org.telegram.messenger.NotificationCenter; +import org.telegram.messenger.R; +import org.telegram.messenger.UserObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.BackDrawable; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.AvatarsImageView; +import org.telegram.ui.Components.CrossOutDrawable; +import org.telegram.ui.Components.CubicBezierInterpolator; +import org.telegram.ui.Components.GroupCallFullscreenAdapter; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.TypefaceSpan; +import org.telegram.ui.Components.UndoView; +import org.telegram.ui.GroupCallActivity; + +import java.util.ArrayList; + +import static org.telegram.ui.GroupCallActivity.TRANSITION_DURATION; +import static org.telegram.ui.GroupCallActivity.isLandscapeMode; + +@SuppressLint("ViewConstructor") +public class GroupCallRenderersContainer extends FrameLayout { + + private final int touchSlop; + public boolean inFullscreenMode; + public float progressToFullscreenMode; + public int fullscreenPeerId; + public ChatObject.VideoParticipant fullscreenParticipant; + public boolean hasPinnedVideo; + public long lastUpdateTime; + public float progressToScrimView; + public int listWidth; + ValueAnimator fullscreenAnimator; + public boolean inLayout; + + private SparseIntArray attachedPeerIds = new SparseIntArray(); + + int animationIndex; + + public GroupCallMiniTextureView fullscreenTextureView; + private GroupCallMiniTextureView outFullscreenTextureView; + private final RecyclerView listView; + private final RecyclerView fullscreenListView; + private final ArrayList attachedRenderers; + + private final FrameLayout speakingMembersToast; + private final AvatarsImageView speakingMembersAvatars; + private final TextView speakingMembersText; + private boolean showSpeakingMembersToast; + private int speakingToastPeerId; + private float showSpeakingMembersToastProgress; + + private float speakingMembersToastChangeProgress = 1f; + private float speakingMembersToastFromLeft; + private float speakingMembersToastFromTextLeft; + private float speakingMembersToastFromRight; + private boolean animateSpeakingOnNextDraw = true; + + private boolean drawRenderesOnly; + private boolean drawFirst; + private boolean notDrawRenderes; + + + boolean uiVisible = true; + + float progressToHideUi; + + Drawable topShadowDrawable; + CrossOutDrawable pinDrawable; + TextView pinTextView; + TextView unpinTextView; + View pinContainer; + + boolean hideUiRunnableIsScheduled; + Runnable hideUiRunnable = new Runnable() { + @Override + public void run() { + if (!canHideUI()) { + AndroidUtilities.runOnUIThread(hideUiRunnable, 3000); + return; + } + hideUiRunnableIsScheduled = false; + setUiVisible(false); + } + }; + + ChatObject.Call call; + GroupCallActivity groupCallActivity; + + private final ImageView backButton; + private final ImageView pinButton; + private final View topShadowView; + + private float pinchStartCenterX; + private float pinchStartCenterY; + private float pinchStartDistance; + private float pinchTranslationX; + private float pinchTranslationY; + private boolean isInPinchToZoomTouchMode; + + private float pinchCenterX; + private float pinchCenterY; + + private int pointerId1, pointerId2; + + float pinchScale = 1f; + private boolean zoomStarted; + private boolean canZoomGesture; + ValueAnimator zoomBackAnimator; + + long tapTime; + boolean tapGesture; + float tapX, tapY; + boolean swipeToBackGesture; + boolean maybeSwipeToBackGesture; + float swipeToBackDy; + ValueAnimator swipeToBackAnimator; + + public UndoView[] undoView = new UndoView[2]; + + public boolean swipedBack; + private boolean isTablet; + + public GroupCallRenderersContainer(@NonNull Context context, RecyclerView listView, RecyclerView fullscreenListView, ArrayList attachedRenderers, ChatObject.Call call, GroupCallActivity groupCallActivity) { + super(context); + this.listView = listView; + this.fullscreenListView = fullscreenListView; + this.attachedRenderers = attachedRenderers; + this.call = call; + this.groupCallActivity = groupCallActivity; + + backButton = new ImageView(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(ActionBar.getCurrentActionBarHeight(), MeasureSpec.EXACTLY)); + } + }; + BackDrawable backDrawable = new BackDrawable(false); + backDrawable.setColor(Color.WHITE); + backButton.setImageDrawable(backDrawable); + backButton.setScaleType(ImageView.ScaleType.FIT_CENTER); + backButton.setPadding(AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16), 0); + backButton.setBackground(Theme.createSelectorDrawable(ColorUtils.setAlphaComponent(Color.WHITE, 55))); + topShadowView = new View(context); + topShadowDrawable = new GradientDrawable(GradientDrawable.Orientation.BOTTOM_TOP, new int[]{Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.BLACK, (int) (255 * 0.45f))}); + topShadowView.setBackgroundDrawable(topShadowDrawable); + addView(topShadowView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 120)); + + addView(backButton, LayoutHelper.createFrame(56, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); + backButton.setOnClickListener(view -> onBackPressed()); + + pinButton = new ImageView(context) { + @Override + public void invalidate() { + super.invalidate(); + pinContainer.invalidate(); + GroupCallRenderersContainer.this.invalidate(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(ActionBar.getCurrentActionBarHeight(), MeasureSpec.EXACTLY)); + } + }; + + final Drawable pinRippleDrawable = Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(20), Color.TRANSPARENT, ColorUtils.setAlphaComponent(Color.WHITE, 100)); + pinContainer = new View(context) { + + @Override + protected void drawableStateChanged() { + super.drawableStateChanged(); + pinRippleDrawable.setState(getDrawableState()); + } + + @Override + public boolean verifyDrawable(Drawable drawable) { + return pinRippleDrawable == drawable || super.verifyDrawable(drawable); + } + + @Override + public void jumpDrawablesToCurrentState() { + super.jumpDrawablesToCurrentState(); + pinRippleDrawable.jumpToCurrentState(); + } + + + @Override + protected void dispatchDraw(Canvas canvas) { + float w = pinTextView.getMeasuredWidth() * (1f - pinDrawable.getProgress()) + unpinTextView.getMeasuredWidth() * pinDrawable.getProgress(); + canvas.save(); + pinRippleDrawable.setBounds(0, 0, AndroidUtilities.dp(50) + (int) w, getMeasuredHeight()); + pinRippleDrawable.draw(canvas); + super.dispatchDraw(canvas); + } + }; + pinContainer.setOnClickListener(view -> { + if (inFullscreenMode) { + hasPinnedVideo = !hasPinnedVideo; + pinDrawable.setCrossOut(hasPinnedVideo, true); + requestLayout(); + } + }); + pinRippleDrawable.setCallback(pinContainer); + + addView(pinContainer); + + pinDrawable = new CrossOutDrawable(context, R.drawable.msg_pin_filled, null); + pinDrawable.setOffsets(-AndroidUtilities.dp(1), AndroidUtilities.dp(2), AndroidUtilities.dp(1)); + pinButton.setImageDrawable(pinDrawable); + pinButton.setPadding(AndroidUtilities.dp(16), 0, AndroidUtilities.dp(16), 0); + addView(pinButton, LayoutHelper.createFrame(56, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); + + + pinTextView = new TextView(context); + pinTextView.setTextColor(Color.WHITE); + pinTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + pinTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + pinTextView.setText(LocaleController.getString("CallVideoPin", R.string.CallVideoPin)); + + unpinTextView = new TextView(context); + unpinTextView.setTextColor(Color.WHITE); + unpinTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + unpinTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + unpinTextView.setText(LocaleController.getString("CallVideoUnpin", R.string.CallVideoUnpin)); + + + addView(pinTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); + addView(unpinTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP)); + + Drawable toastBackgroundDrawable = Theme.createRoundRectDrawable(AndroidUtilities.dp(18), ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_listViewBackground), (int) (255 * 0.8f))); + speakingMembersToast = new FrameLayout(context) { + @Override + protected void dispatchDraw(Canvas canvas) { + if (speakingMembersToastChangeProgress == 1f) { + toastBackgroundDrawable.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); + speakingMembersAvatars.setTranslationX(0); + speakingMembersText.setTranslationX(0); + } else { + float progress = CubicBezierInterpolator.DEFAULT.getInterpolation(speakingMembersToastChangeProgress); + float offset = (speakingMembersToastFromLeft - getLeft()) * (1f - progress); + float offsetText = (speakingMembersToastFromTextLeft - speakingMembersText.getLeft()) * (1f - progress); + toastBackgroundDrawable.setBounds((int) offset, 0, getMeasuredWidth() + (int) ((speakingMembersToastFromRight - getRight()) * (1f - progress)), getMeasuredHeight()); + speakingMembersAvatars.setTranslationX(offset); + speakingMembersText.setTranslationX(-offsetText); + } + toastBackgroundDrawable.draw(canvas); + super.dispatchDraw(canvas); + } + }; + + speakingMembersAvatars = new AvatarsImageView(context); + speakingMembersAvatars.setStyle(AvatarsImageView.STYLE_GROUP_CALL_TOOLTIP); + + speakingMembersToast.setClipChildren(false); + speakingMembersToast.setClipToPadding(false); + speakingMembersToast.addView(speakingMembersAvatars, LayoutHelper.createFrame(100, 32, Gravity.CENTER_VERTICAL, 0, 0, 0, 0)); + + + speakingMembersText = new TextView(context); + speakingMembersText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + speakingMembersText.setTextColor(Color.WHITE); + speakingMembersText.setLines(1); + speakingMembersText.setEllipsize(TextUtils.TruncateAt.END); + speakingMembersToast.addView(speakingMembersText, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL)); + + addView(speakingMembersToast, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 36, Gravity.CENTER_HORIZONTAL, 0, 0, 0, 0)); + + ViewConfiguration configuration = ViewConfiguration.get(getContext()); + + touchSlop = configuration.getScaledTouchSlop(); + + for (int a = 0; a < 2; a++) { + undoView[a] = new UndoView(context) { + @Override + public void invalidate() { + super.invalidate(); + GroupCallRenderersContainer.this.invalidate(); + } + }; + undoView[a].setHideAnimationType(2); + undoView[a].setAdditionalTranslationY(AndroidUtilities.dp(10)); + addView(undoView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 16, 0, 0, 8)); + } + + pinContainer.setVisibility(View.GONE); + setIsTablet(GroupCallActivity.isTabletMode); + } + + protected void onBackPressed() { + + } + + public void setIsTablet(boolean tablet) { + if (isTablet != tablet) { + isTablet = tablet; + FrameLayout.LayoutParams lp = (LayoutParams) backButton.getLayoutParams(); + lp.gravity = tablet ? (Gravity.RIGHT | Gravity.BOTTOM) : (Gravity.LEFT | Gravity.TOP); + lp.rightMargin = tablet ? AndroidUtilities.dp(GroupCallActivity.TABLET_LIST_SIZE + 8) : 0; + lp.bottomMargin = tablet ? -AndroidUtilities.dp(8) : 0; + if (isTablet) { + backButton.setImageDrawable(ContextCompat.getDrawable(getContext(), R.drawable.msg_calls_minimize)); + } else { + BackDrawable backDrawable = new BackDrawable(false); + backDrawable.setColor(Color.WHITE); + backButton.setImageDrawable(backDrawable); + } + } + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (drawFirst) { + if (child instanceof GroupCallMiniTextureView) { + if (((GroupCallMiniTextureView) child).drawFirst) { + float listTop = listView.getY() - getTop(); + float listBottom = listTop + listView.getMeasuredHeight() - listView.getTranslationY(); + canvas.save(); + canvas.clipRect(0, listTop, getMeasuredWidth(), listBottom); + boolean r = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return r; + } + } + return true; + } + if (child == undoView[0] || child == undoView[1]) { + return true; + } + if (child instanceof GroupCallMiniTextureView) { + GroupCallMiniTextureView textureView = (GroupCallMiniTextureView) child; + + if (textureView == fullscreenTextureView || textureView == outFullscreenTextureView || notDrawRenderes || textureView.drawFirst) { + return true; + } + if (textureView.primaryView != null) { + float listTop = listView.getY() - getTop(); + float listBottom = listTop + listView.getMeasuredHeight() - listView.getTranslationY(); + float progress = progressToFullscreenMode; + if (textureView.secondaryView == null) { + progress = 0f; + } + canvas.save(); + canvas.clipRect(0, listTop * (1f - progress), getMeasuredWidth(), listBottom * (1f - progress) + getMeasuredHeight() * progress); + boolean r = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return r; + } else if (GroupCallActivity.isTabletMode) { + canvas.save(); + canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight()); + boolean r = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return r; + } else { + return super.drawChild(canvas, child, drawingTime); + } + } + if (drawRenderesOnly) { + return true; + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (GroupCallActivity.isTabletMode) { + drawRenderesOnly = true; + super.dispatchDraw(canvas); + drawRenderesOnly = false; + } + + drawFirst = true; + super.dispatchDraw(canvas); + drawFirst = false; + + if (outFullscreenTextureView != null || fullscreenTextureView != null) { + float listTop = listView.getY() - getTop(); + float listBottom = listTop + listView.getMeasuredHeight() - listView.getTranslationY(); + float progress = progressToFullscreenMode; + canvas.save(); + if (!GroupCallActivity.isTabletMode && fullscreenTextureView != null && !fullscreenTextureView.forceDetached && fullscreenTextureView.primaryView != null) { + canvas.clipRect(0, listTop * (1f - progress), getMeasuredWidth(), listBottom * (1f - progress) + getMeasuredHeight() * progress); + } else if (GroupCallActivity.isTabletMode) { + canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight()); + } + if (outFullscreenTextureView != null && outFullscreenTextureView.getParent() != null) { + canvas.save(); + canvas.translate(outFullscreenTextureView.getX(), outFullscreenTextureView.getY()); + outFullscreenTextureView.draw(canvas); + canvas.restore(); + } + if (fullscreenTextureView != null && fullscreenTextureView.getParent() != null) { + if (fullscreenTextureView.getAlpha() != 1f) { + AndroidUtilities.rectTmp.set(fullscreenTextureView.getX(), fullscreenTextureView.getY(), fullscreenTextureView.getX() + fullscreenTextureView.getMeasuredWidth(), fullscreenTextureView.getY() + fullscreenTextureView.getMeasuredHeight()); + canvas.saveLayerAlpha(AndroidUtilities.rectTmp, (int) (255 * fullscreenTextureView.getAlpha()), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + boolean swipeToBack = swipeToBackGesture || swipeToBackAnimator != null; + if (swipeToBack) { + canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight() - ((isLandscapeMode || GroupCallActivity.isTabletMode) ? 0 : AndroidUtilities.dp(90))); + } + canvas.translate(fullscreenTextureView.getX(), fullscreenTextureView.getY()); + fullscreenTextureView.setSwipeToBack(swipeToBack, swipeToBackDy); + fullscreenTextureView.setZoom(zoomStarted || zoomBackAnimator != null, pinchScale, pinchCenterX, pinchCenterY, pinchTranslationX, pinchTranslationY); + fullscreenTextureView.draw(canvas); + canvas.restore(); + } + canvas.restore(); + } + for (int i = 0; i < 2; i++) { + if (undoView[i].getVisibility() == View.VISIBLE) { + canvas.save(); + float offset = isLandscapeMode ? 0 : -AndroidUtilities.dp(90) * (1f - progressToHideUi); + canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight() - (isLandscapeMode ? 0 : AndroidUtilities.dp(90)) + offset - AndroidUtilities.dp(18)); + if (isTablet) { + canvas.translate(undoView[i].getX() - AndroidUtilities.dp(8), undoView[i].getY() - AndroidUtilities.dp(8)); + } else { + canvas.translate(undoView[i].getX() - AndroidUtilities.dp(8), undoView[i].getY() - (isLandscapeMode ? 0 : AndroidUtilities.dp(90)) + offset - AndroidUtilities.dp(26)); + } + if (undoView[i].getAlpha() != 1f) { + canvas.saveLayerAlpha(0, 0, undoView[i].getMeasuredWidth(), undoView[i].getMeasuredHeight(), (int) (255 * undoView[i].getAlpha()), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + canvas.scale(undoView[i].getScaleX(), undoView[i].getScaleY(), undoView[i].getMeasuredWidth() / 2f, undoView[i].getMeasuredHeight() / 2f); + undoView[i].draw(canvas); + canvas.restore(); + canvas.restore(); + } + } + float a = progressToFullscreenMode * (1f - progressToHideUi); + if (replaceFullscreenViewAnimator != null && outFullscreenTextureView != null && fullscreenTextureView != null) { + float shadowAlpha = a; + if (outFullscreenTextureView.hasVideo != fullscreenTextureView.hasVideo) { + if (!fullscreenTextureView.hasVideo) { + shadowAlpha *= (1f - fullscreenTextureView.getAlpha()); + } else { + shadowAlpha *= fullscreenTextureView.getAlpha(); + } + } else if (!fullscreenTextureView.hasVideo) { + shadowAlpha = 0; + } + topShadowDrawable.setAlpha((int) (255 * shadowAlpha)); + } else if (fullscreenTextureView != null) { + topShadowDrawable.setAlpha((int) (255 * a * (1f - fullscreenTextureView.progressToNoVideoStub))); + } else { + topShadowDrawable.setAlpha((int) (255 * a)); + } + + backButton.setAlpha(a); + pinButton.setAlpha(a); + + float x1 = getMeasuredWidth() - pinTextView.getMeasuredWidth(); + float x2 = getMeasuredWidth() - unpinTextView.getMeasuredWidth(); + float pinY = (ActionBar.getCurrentActionBarHeight() - pinTextView.getMeasuredHeight()) / 2f - AndroidUtilities.dp(1); + float pinX = x2 * pinDrawable.getProgress() + x1 * (1f - pinDrawable.getProgress()) - AndroidUtilities.dp(21); + if (GroupCallActivity.isTabletMode) { + pinX -= AndroidUtilities.dp(GroupCallActivity.TABLET_LIST_SIZE + 8); + } else { + pinX -= (GroupCallActivity.isLandscapeMode ? AndroidUtilities.dp(180) : 0); + } + pinTextView.setTranslationX(pinX); + unpinTextView.setTranslationX(pinX); + pinTextView.setTranslationY(pinY); + unpinTextView.setTranslationY(pinY); + + pinContainer.setTranslationX(pinX - AndroidUtilities.dp(36f)); + pinContainer.setTranslationY((ActionBar.getCurrentActionBarHeight() - pinContainer.getMeasuredHeight()) / 2f); + + pinButton.setTranslationX(pinX - AndroidUtilities.dp(44f)); + + pinTextView.setAlpha(a * (1f - pinDrawable.getProgress())); + unpinTextView.setAlpha(a * pinDrawable.getProgress()); + pinContainer.setAlpha(a); + + if (speakingMembersToastChangeProgress != 1) { + speakingMembersToastChangeProgress += 16 / 220f; + if (speakingMembersToastChangeProgress > 1f) { + speakingMembersToastChangeProgress = 1f; + } else { + invalidate(); + } + speakingMembersToast.invalidate(); + } + + + if (showSpeakingMembersToast && showSpeakingMembersToastProgress != 1f) { + showSpeakingMembersToastProgress += 16 / 150f; + if (showSpeakingMembersToastProgress > 1f) { + showSpeakingMembersToastProgress = 1f; + } else { + invalidate(); + } + } else if (!showSpeakingMembersToast && showSpeakingMembersToastProgress != 0) { + showSpeakingMembersToastProgress -= 16 / 150f; + if (showSpeakingMembersToastProgress < 0) { + showSpeakingMembersToastProgress = 0; + } else { + invalidate(); + } + } + + + if (isLandscapeMode) { + speakingMembersToast.setTranslationY(AndroidUtilities.dp(16)); + } else { + speakingMembersToast.setTranslationY(ActionBar.getCurrentActionBarHeight() * (1f - progressToHideUi) + AndroidUtilities.dp(8) + AndroidUtilities.dp(8) * progressToHideUi); + } + speakingMembersToast.setAlpha(showSpeakingMembersToastProgress * progressToFullscreenMode); + speakingMembersToast.setScaleX(0.5f + 0.5f * showSpeakingMembersToastProgress); + speakingMembersToast.setScaleY(0.5f + 0.5f * showSpeakingMembersToastProgress); + + final boolean isTablet = GroupCallActivity.isTabletMode; + + if (GroupCallActivity.isTabletMode) { + notDrawRenderes = true; + super.dispatchDraw(canvas); + notDrawRenderes = false; + } else { + super.dispatchDraw(canvas); + } + + if (fullscreenListView.getVisibility() == View.VISIBLE) { + for (int i = 0; i < fullscreenListView.getChildCount(); i++) { + GroupCallFullscreenAdapter.GroupCallUserCell child = (GroupCallFullscreenAdapter.GroupCallUserCell) fullscreenListView.getChildAt(i); + if (child.getVisibility() == View.VISIBLE && child.getAlpha() != 0) { + canvas.save(); + canvas.translate(child.getX() + fullscreenListView.getX(), child.getY() + fullscreenListView.getY()); + canvas.scale(child.getScaleX(), child.getScaleY(), child.getMeasuredWidth() / 2f, child.getMeasuredHeight() / 2f); + child.drawOverlays(canvas); + canvas.restore(); + } + } + } + } + + ValueAnimator replaceFullscreenViewAnimator; + + public void requestFullscreen(ChatObject.VideoParticipant videoParticipant) { + if ((videoParticipant == null && fullscreenParticipant == null) || (videoParticipant != null && videoParticipant.equals(fullscreenParticipant))) { + return; + } + int peerId = videoParticipant == null ? 0 : MessageObject.getPeerId(videoParticipant.participant.peer); + if (fullscreenTextureView != null) { + fullscreenTextureView.runDelayedAnimations(); + } + + if (replaceFullscreenViewAnimator != null) { + replaceFullscreenViewAnimator.cancel(); + } + fullscreenParticipant = videoParticipant; + fullscreenPeerId = peerId; + + boolean oldInFullscreen = inFullscreenMode; + lastUpdateTime = System.currentTimeMillis(); + + if (videoParticipant == null) { + if (inFullscreenMode) { + if (fullscreenAnimator != null) { + fullscreenAnimator.cancel(); + } + inFullscreenMode = false; + + if ((fullscreenTextureView.primaryView == null && fullscreenTextureView.secondaryView == null && fullscreenTextureView.tabletGridView == null) || !ChatObject.Call.videoIsActive(fullscreenTextureView.participant.participant, fullscreenTextureView.participant.presentation, call)) { + fullscreenTextureView.forceDetach(true); + if (fullscreenTextureView.primaryView != null) { + fullscreenTextureView.primaryView.setRenderer(null); + } + if (fullscreenTextureView.secondaryView != null) { + fullscreenTextureView.secondaryView.setRenderer(null); + } + if (fullscreenTextureView.tabletGridView != null) { + fullscreenTextureView.tabletGridView.setRenderer(null); + } + final GroupCallMiniTextureView removingMiniView = fullscreenTextureView; + removingMiniView.animate().alpha(0).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (removingMiniView.getParent() != null) { + removeView(removingMiniView); + removingMiniView.release(); + } + } + }).setDuration(GroupCallActivity.TRANSITION_DURATION).start(); + } else { + fullscreenTextureView.setShowingInFullscreen(false, true); + } + } + backButton.setEnabled(false); + hasPinnedVideo = false; + } else { + GroupCallMiniTextureView textureView = null; + for (int i = 0; i < attachedRenderers.size(); i++) { + if (attachedRenderers.get(i).participant.equals(videoParticipant)) { + textureView = attachedRenderers.get(i); + break; + } + } + + if (textureView != null) { + if (fullscreenAnimator != null) { + fullscreenAnimator.cancel(); + } + if (!inFullscreenMode) { + inFullscreenMode = true; + fullscreenTextureView = textureView; + fullscreenTextureView.setShowingInFullscreen(true, true); + invalidate(); + pinDrawable.setCrossOut(hasPinnedVideo, false); + } else { + hasPinnedVideo = false; + pinDrawable.setCrossOut(hasPinnedVideo, false); + fullscreenTextureView.forceDetach(false); + textureView.forceDetach(false); + final GroupCallMiniTextureView removingMiniView = textureView; + + GroupCallMiniTextureView newSmallTextureView = null; + if (!isTablet && (fullscreenTextureView.primaryView != null || fullscreenTextureView.secondaryView != null || fullscreenTextureView.tabletGridView != null)) { + newSmallTextureView = new GroupCallMiniTextureView(this, attachedRenderers, call, groupCallActivity); + newSmallTextureView.setViews(fullscreenTextureView.primaryView, fullscreenTextureView.secondaryView, fullscreenTextureView.tabletGridView); + newSmallTextureView.setFullscreenMode(inFullscreenMode, false); + newSmallTextureView.updateAttachState(false); + if (fullscreenTextureView.primaryView != null) { + fullscreenTextureView.primaryView.setRenderer(newSmallTextureView); + } + if (fullscreenTextureView.secondaryView != null) { + fullscreenTextureView.secondaryView.setRenderer(newSmallTextureView); + } + if (fullscreenTextureView.tabletGridView != null) { + fullscreenTextureView.tabletGridView.setRenderer(newSmallTextureView); + } + } + + GroupCallMiniTextureView newFullscreenTextureView = new GroupCallMiniTextureView(this, attachedRenderers, call, groupCallActivity); + newFullscreenTextureView.participant = textureView.participant; + newFullscreenTextureView.setViews(textureView.primaryView, textureView.secondaryView, textureView.tabletGridView); + newFullscreenTextureView.setFullscreenMode(inFullscreenMode, false); + newFullscreenTextureView.updateAttachState(false); + newFullscreenTextureView.textureView.renderer.setAlpha(1f); + newFullscreenTextureView.textureView.blurRenderer.setAlpha(1f); + + if (textureView.primaryView != null) { + textureView.primaryView.setRenderer(newFullscreenTextureView); + } + if (textureView.secondaryView != null) { + textureView.secondaryView.setRenderer(newFullscreenTextureView); + } + if (textureView.tabletGridView != null) { + textureView.tabletGridView.setRenderer(newFullscreenTextureView); + } + + newFullscreenTextureView.animateEnter = true; + newFullscreenTextureView.setAlpha(0); + outFullscreenTextureView = fullscreenTextureView; + replaceFullscreenViewAnimator = ObjectAnimator.ofFloat(newFullscreenTextureView, View.ALPHA, 0f, 1f); + replaceFullscreenViewAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + replaceFullscreenViewAnimator = null; + newFullscreenTextureView.animateEnter = false; + if (outFullscreenTextureView != null) { + if (outFullscreenTextureView.getParent() != null) { + removeView(outFullscreenTextureView); + removingMiniView.release(); + } + outFullscreenTextureView = null; + } + } + }); + if (newSmallTextureView != null) { + newSmallTextureView.setAlpha(0); + newSmallTextureView.setScaleX(0.5f); + newSmallTextureView.setScaleY(0.5f); + newSmallTextureView.animateEnter = true; + } + + GroupCallMiniTextureView finalNewSmallTextureView = newSmallTextureView; + newFullscreenTextureView.runOnFrameRendered(() -> { + if (replaceFullscreenViewAnimator != null) { + replaceFullscreenViewAnimator.start(); + } + + removingMiniView.animate().scaleX(0.5f).scaleY(0.5f).alpha(0).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (removingMiniView.getParent() != null) { + removeView(removingMiniView); + removingMiniView.release(); + } + } + }).setDuration(100).start(); + + if (finalNewSmallTextureView != null) { + finalNewSmallTextureView.animate().alpha(1f).scaleX(1f).scaleY(1f).setDuration(100).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + finalNewSmallTextureView.animateEnter = false; + } + }).start(); + } + }); + + fullscreenTextureView = newFullscreenTextureView; + fullscreenTextureView.setShowingInFullscreen(true, false); + update(); + } + } else { + if (inFullscreenMode) { + if (fullscreenTextureView.primaryView != null || fullscreenTextureView.secondaryView != null | fullscreenTextureView.tabletGridView != null) { + fullscreenTextureView.forceDetach(false); + GroupCallMiniTextureView newSmallTextureView = new GroupCallMiniTextureView(this, attachedRenderers, call, groupCallActivity); + newSmallTextureView.setViews(fullscreenTextureView.primaryView, fullscreenTextureView.secondaryView, fullscreenTextureView.tabletGridView); + newSmallTextureView.setFullscreenMode(inFullscreenMode, false); + newSmallTextureView.updateAttachState(false); + if (fullscreenTextureView.primaryView != null) { + fullscreenTextureView.primaryView.setRenderer(newSmallTextureView); + } + if (fullscreenTextureView.secondaryView != null) { + fullscreenTextureView.secondaryView.setRenderer(newSmallTextureView); + } + if (fullscreenTextureView.tabletGridView != null) { + fullscreenTextureView.tabletGridView.setRenderer(newSmallTextureView); + } + + newSmallTextureView.setAlpha(0); + newSmallTextureView.setScaleX(0.5f); + newSmallTextureView.setScaleY(0.5f); + newSmallTextureView.animateEnter = true; + newSmallTextureView.runOnFrameRendered(() -> newSmallTextureView.animate().alpha(1f).scaleY(1f).scaleX(1f).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + newSmallTextureView.animateEnter = false; + } + }).setDuration(150).start()); + } else { + fullscreenTextureView.forceDetach(true); + } + + + GroupCallMiniTextureView newFullscreenTextureView = new GroupCallMiniTextureView(this, attachedRenderers, call, groupCallActivity); + newFullscreenTextureView.participant = videoParticipant; + newFullscreenTextureView.setFullscreenMode(inFullscreenMode, false); + newFullscreenTextureView.setShowingInFullscreen(true, false); + + newFullscreenTextureView.animateEnter = true; + newFullscreenTextureView.setAlpha(0); + outFullscreenTextureView = fullscreenTextureView; + replaceFullscreenViewAnimator = ValueAnimator.ofFloat(0f, 1f); + replaceFullscreenViewAnimator.addUpdateListener(valueAnimator -> { + newFullscreenTextureView.setAlpha((Float) valueAnimator.getAnimatedValue()); + invalidate(); + }); + replaceFullscreenViewAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + replaceFullscreenViewAnimator = null; + newFullscreenTextureView.animateEnter = false; + if (outFullscreenTextureView != null) { + if (outFullscreenTextureView.getParent() != null) { + removeView(outFullscreenTextureView); + outFullscreenTextureView.release(); + } + outFullscreenTextureView = null; + } + } + }); + replaceFullscreenViewAnimator.start(); + + fullscreenTextureView = newFullscreenTextureView; + fullscreenTextureView.setShowingInFullscreen(true, false); + fullscreenTextureView.updateAttachState(false); + update(); + } else { + inFullscreenMode = true; + fullscreenTextureView = new GroupCallMiniTextureView(this, attachedRenderers, call, groupCallActivity); + fullscreenTextureView.participant = videoParticipant; + fullscreenTextureView.setFullscreenMode(inFullscreenMode, false); + fullscreenTextureView.setShowingInFullscreen(true, false); + // fullscreenTextureView.textureView.renderer.setAlpha(1f); + fullscreenTextureView.setShowingInFullscreen(true, false); + + replaceFullscreenViewAnimator = ObjectAnimator.ofFloat(fullscreenTextureView, View.ALPHA, 0f, 1f); + replaceFullscreenViewAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + replaceFullscreenViewAnimator = null; + fullscreenTextureView.animateEnter = false; + if (outFullscreenTextureView != null) { + if (outFullscreenTextureView.getParent() != null) { + removeView(outFullscreenTextureView); + outFullscreenTextureView.release(); + } + outFullscreenTextureView = null; + } + } + }); + replaceFullscreenViewAnimator.start(); + invalidate(); + pinDrawable.setCrossOut(hasPinnedVideo, false); + } + } + backButton.setEnabled(true); + } + + + if (oldInFullscreen != inFullscreenMode) { + if (!inFullscreenMode) { + setUiVisible(true); + if (hideUiRunnableIsScheduled) { + hideUiRunnableIsScheduled = false; + AndroidUtilities.cancelRunOnUIThread(hideUiRunnable); + } + } else { + backButton.setVisibility(View.VISIBLE); + pinButton.setVisibility(View.VISIBLE); + unpinTextView.setVisibility(View.VISIBLE); + pinContainer.setVisibility(View.VISIBLE); + } + onFullScreenModeChanged(true); + fullscreenAnimator = ValueAnimator.ofFloat(progressToFullscreenMode, inFullscreenMode ? 1f : 0); + fullscreenAnimator.addUpdateListener(valueAnimator -> { + progressToFullscreenMode = (float) valueAnimator.getAnimatedValue(); + update(); + }); + GroupCallMiniTextureView textureViewFinal = fullscreenTextureView; + textureViewFinal.animateToFullscreen = true; + int currentAccount = groupCallActivity.getCurrentAccount(); + swipedBack = swipeToBackGesture; + animationIndex = NotificationCenter.getInstance(currentAccount).setAnimationInProgress(animationIndex, null); + fullscreenAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + NotificationCenter.getInstance(currentAccount).onAnimationFinish(animationIndex); + fullscreenAnimator = null; + textureViewFinal.animateToFullscreen = false; + if (!inFullscreenMode) { + fullscreenTextureView = null; + fullscreenPeerId = 0; + } + progressToFullscreenMode = inFullscreenMode ? 1f : 0; + update(); + onFullScreenModeChanged(false); + if (!inFullscreenMode) { + backButton.setVisibility(View.GONE); + pinButton.setVisibility(View.GONE); + unpinTextView.setVisibility(View.GONE); + pinContainer.setVisibility(View.GONE); + } + } + }); + + fullscreenAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + fullscreenAnimator.setDuration(TRANSITION_DURATION); + fullscreenTextureView.textureView.synchOrRunAnimation(fullscreenAnimator); + } + + animateSwipeToBack(fullscreenParticipant == null); + } + + protected void update() { + invalidate(); + } + + protected void onFullScreenModeChanged(boolean startAnimaion) { + + } + + private void setUiVisible(boolean uiVisible) { + if (this.uiVisible != uiVisible) { + this.uiVisible = uiVisible; + onUiVisibilityChanged(); + + if (uiVisible && inFullscreenMode) { + if (!hideUiRunnableIsScheduled) { + hideUiRunnableIsScheduled = true; + AndroidUtilities.runOnUIThread(hideUiRunnable, 3000); + } + } else { + hideUiRunnableIsScheduled = false; + AndroidUtilities.cancelRunOnUIThread(hideUiRunnable); + } + if (fullscreenTextureView != null) { + fullscreenTextureView.requestLayout(); + } + } + } + + protected void onUiVisibilityChanged() { + + } + + protected boolean canHideUI() { + return inFullscreenMode; + } + + @Override + public boolean onInterceptTouchEvent(MotionEvent ev) { + return onTouchEvent(ev); + } + + @Override + public boolean onTouchEvent(MotionEvent ev) { + if (!inFullscreenMode || (!maybeSwipeToBackGesture && !swipeToBackGesture && !tapGesture && !canZoomGesture && !isInPinchToZoomTouchMode && !zoomStarted && ev.getActionMasked() != MotionEvent.ACTION_DOWN) || fullscreenTextureView == null) { + finishZoom(); + return false; + } + if (ev.getActionMasked() == MotionEvent.ACTION_DOWN) { + maybeSwipeToBackGesture = false; + swipeToBackGesture = false; + canZoomGesture = false; + isInPinchToZoomTouchMode = false; + zoomStarted = false; + } + + if (ev.getActionMasked() == MotionEvent.ACTION_DOWN && swipeToBackAnimator != null) { + maybeSwipeToBackGesture = false; + swipeToBackGesture = true; + tapY = ev.getY() - swipeToBackDy; + swipeToBackAnimator.removeAllListeners(); + swipeToBackAnimator.cancel(); + swipeToBackAnimator = null; + } else if (swipeToBackAnimator != null) { + finishZoom(); + return false; + } + if (fullscreenTextureView.isInsideStopScreenButton(ev.getX(), ev.getY())) { + return false; + } + + if (ev.getActionMasked() == MotionEvent.ACTION_DOWN && !swipeToBackGesture) { + AndroidUtilities.rectTmp.set(0, ActionBar.getCurrentActionBarHeight(), fullscreenTextureView.getMeasuredWidth() + (isLandscapeMode && uiVisible ? -AndroidUtilities.dp(90) : 0), fullscreenTextureView.getMeasuredHeight() + (!isLandscapeMode && uiVisible ? -AndroidUtilities.dp(90) : 0)); + if (AndroidUtilities.rectTmp.contains(ev.getX(), ev.getY())) { + tapTime = System.currentTimeMillis(); + tapGesture = true; + maybeSwipeToBackGesture = true; + tapX = ev.getX(); + tapY = ev.getY(); + } + } else if ((maybeSwipeToBackGesture || swipeToBackGesture || tapGesture) && ev.getActionMasked() == MotionEvent.ACTION_MOVE) { + if (Math.abs(tapX - ev.getX()) > touchSlop || Math.abs(tapY - ev.getY()) > touchSlop) { + tapGesture = false; + } + if (maybeSwipeToBackGesture && !zoomStarted && Math.abs(tapY - ev.getY()) > touchSlop * 2) { + tapY = ev.getY(); + maybeSwipeToBackGesture = false; + swipeToBackGesture = true; + } else if (swipeToBackGesture) { + swipeToBackDy = ev.getY() - tapY; + invalidate(); + } + + if (maybeSwipeToBackGesture && Math.abs(tapX - ev.getX()) > touchSlop * 4) { + maybeSwipeToBackGesture = false; + } + } + if (tapGesture && ev.getActionMasked() == MotionEvent.ACTION_UP && System.currentTimeMillis() - tapTime < 200) { + boolean confirmAction = false; + tapGesture = false; + if (showSpeakingMembersToast) { + AndroidUtilities.rectTmp.set(speakingMembersToast.getX(), speakingMembersToast.getY(), speakingMembersToast.getX() + speakingMembersToast.getWidth(), speakingMembersToast.getY() + speakingMembersToast.getHeight()); + if (call != null && AndroidUtilities.rectTmp.contains(ev.getX(), ev.getY())) { + boolean found = false; + for (int i = 0; i < call.visibleVideoParticipants.size(); i++) { + if (speakingToastPeerId == MessageObject.getPeerId(call.visibleVideoParticipants.get(i).participant.peer)) { + found = true; + confirmAction = true; + groupCallActivity.fullscreenFor(call.visibleVideoParticipants.get(i)); + } + } + if (!found) { + TLRPC.TL_groupCallParticipant participant = call.participants.get(speakingToastPeerId); + groupCallActivity.fullscreenFor(new ChatObject.VideoParticipant(participant, false, false)); + confirmAction = true; + } + } + } + + if (!confirmAction) { + setUiVisible(!uiVisible); + } + swipeToBackDy = 0; + invalidate(); + } + + if ((maybeSwipeToBackGesture || swipeToBackGesture) && ev.getActionMasked() == MotionEvent.ACTION_UP || ev.getActionMasked() == MotionEvent.ACTION_CANCEL) { + maybeSwipeToBackGesture = false; + if (swipeToBackGesture) { + if (ev.getActionMasked() == MotionEvent.ACTION_UP && Math.abs(swipeToBackDy) > AndroidUtilities.dp(120)) { + groupCallActivity.fullscreenFor(null); + } else { + animateSwipeToBack(false); + } + } + invalidate(); + } + + if (!fullscreenTextureView.hasVideo || swipeToBackGesture) { + finishZoom(); + return tapGesture || swipeToBackGesture || maybeSwipeToBackGesture; + } + + if (ev.getActionMasked() == MotionEvent.ACTION_DOWN || ev.getActionMasked() == MotionEvent.ACTION_POINTER_DOWN) { + if (ev.getActionMasked() == MotionEvent.ACTION_DOWN) { + View renderer = fullscreenTextureView.textureView.renderer; + AndroidUtilities.rectTmp.set(renderer.getX(), renderer.getY(), renderer.getX() + renderer.getMeasuredWidth(), renderer.getY() + renderer.getMeasuredHeight()); + AndroidUtilities.rectTmp.inset((renderer.getMeasuredHeight() * fullscreenTextureView.textureView.scaleTextureToFill - renderer.getMeasuredHeight()) / 2, (renderer.getMeasuredWidth() * fullscreenTextureView.textureView.scaleTextureToFill - renderer.getMeasuredWidth()) / 2); + if (!GroupCallActivity.isLandscapeMode) { + AndroidUtilities.rectTmp.top = Math.max(AndroidUtilities.rectTmp.top, ActionBar.getCurrentActionBarHeight()); + AndroidUtilities.rectTmp.bottom = Math.min(AndroidUtilities.rectTmp.bottom, fullscreenTextureView.getMeasuredHeight() - AndroidUtilities.dp(90)); + } else { + AndroidUtilities.rectTmp.top = Math.max(AndroidUtilities.rectTmp.top, ActionBar.getCurrentActionBarHeight()); + AndroidUtilities.rectTmp.right = Math.min(AndroidUtilities.rectTmp.right, fullscreenTextureView.getMeasuredWidth() - AndroidUtilities.dp(90)); + } + canZoomGesture = AndroidUtilities.rectTmp.contains(ev.getX(), ev.getY()); + if (!canZoomGesture) { + finishZoom(); + return maybeSwipeToBackGesture; + } + } + if (!isInPinchToZoomTouchMode && ev.getPointerCount() == 2) { + pinchStartDistance = (float) Math.hypot(ev.getX(1) - ev.getX(0), ev.getY(1) - ev.getY(0)); + pinchStartCenterX = pinchCenterX = (ev.getX(0) + ev.getX(1)) / 2.0f; + pinchStartCenterY = pinchCenterY = (ev.getY(0) + ev.getY(1)) / 2.0f; + pinchScale = 1f; + + pointerId1 = ev.getPointerId(0); + pointerId2 = ev.getPointerId(1); + isInPinchToZoomTouchMode = true; + } + } else if (ev.getActionMasked() == MotionEvent.ACTION_MOVE && isInPinchToZoomTouchMode) { + int index1 = -1; + int index2 = -1; + for (int i = 0; i < ev.getPointerCount(); i++) { + if (pointerId1 == ev.getPointerId(i)) { + index1 = i; + } + if (pointerId2 == ev.getPointerId(i)) { + index2 = i; + } + } + if (index1 == -1 || index2 == -1) { + getParent().requestDisallowInterceptTouchEvent(false); + finishZoom(); + return maybeSwipeToBackGesture; + } + pinchScale = (float) Math.hypot(ev.getX(index2) - ev.getX(index1), ev.getY(index2) - ev.getY(index1)) / pinchStartDistance; + if (pinchScale > 1.005f && !zoomStarted) { + pinchStartDistance = (float) Math.hypot(ev.getX(index2) - ev.getX(index1), ev.getY(index2) - ev.getY(index1)); + pinchStartCenterX = pinchCenterX = (ev.getX(index1) + ev.getX(index2)) / 2.0f; + pinchStartCenterY = pinchCenterY = (ev.getY(index1) + ev.getY(index2)) / 2.0f; + pinchScale = 1f; + pinchTranslationX = 0f; + pinchTranslationY = 0f; + getParent().requestDisallowInterceptTouchEvent(true); + zoomStarted = true;// + isInPinchToZoomTouchMode = true; + } + + float newPinchCenterX = (ev.getX(index1) + ev.getX(index2)) / 2.0f; + float newPinchCenterY = (ev.getY(index1) + ev.getY(index2)) / 2.0f; + + float moveDx = pinchStartCenterX - newPinchCenterX; + float moveDy = pinchStartCenterY - newPinchCenterY; + pinchTranslationX = -moveDx / pinchScale; + pinchTranslationY = -moveDy / pinchScale; + invalidate(); + } else if ((ev.getActionMasked() == MotionEvent.ACTION_UP || (ev.getActionMasked() == MotionEvent.ACTION_POINTER_UP && checkPointerIds(ev)) || ev.getActionMasked() == MotionEvent.ACTION_CANCEL)) { + getParent().requestDisallowInterceptTouchEvent(false); + finishZoom(); + } + return canZoomGesture || tapGesture || maybeSwipeToBackGesture; + } + + private void animateSwipeToBack(boolean aplay) { + if (swipeToBackGesture) { + swipeToBackGesture = false; + swipeToBackAnimator = aplay ? ValueAnimator.ofFloat(swipeToBackDy, 0) : ValueAnimator.ofFloat(swipeToBackDy, 0); + swipeToBackAnimator.addUpdateListener(valueAnimator -> { + swipeToBackDy = (float) valueAnimator.getAnimatedValue(); + invalidate(); + }); + swipeToBackAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + swipeToBackAnimator = null; + swipeToBackDy = 0; + } + }); + swipeToBackAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + + swipeToBackAnimator.setDuration(aplay ? TRANSITION_DURATION : 200); + swipeToBackAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + if (fullscreenTextureView != null) { + fullscreenTextureView.textureView.synchOrRunAnimation(swipeToBackAnimator); + } else { + swipeToBackAnimator.start(); + } + lastUpdateTime = System.currentTimeMillis(); + } + maybeSwipeToBackGesture = false; + } + + private void finishZoom() { + if (zoomStarted) { + zoomStarted = false; + zoomBackAnimator = ValueAnimator.ofFloat(1f, 0); + + float fromScale = pinchScale; + float fromTranslateX = pinchTranslationX; + float fromTranslateY = pinchTranslationY; + zoomBackAnimator.addUpdateListener(valueAnimator -> { + float v = (float) valueAnimator.getAnimatedValue(); + pinchScale = fromScale * v + 1f * (1f - v); + pinchTranslationX = fromTranslateX * v; + pinchTranslationY = fromTranslateY * v; + invalidate(); + }); + + zoomBackAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + zoomBackAnimator = null; + pinchScale = 1f; + pinchTranslationX = 0; + pinchTranslationY = 0; + invalidate(); + } + }); + zoomBackAnimator.setDuration(TRANSITION_DURATION); + zoomBackAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + zoomBackAnimator.start(); + lastUpdateTime = System.currentTimeMillis(); + } + canZoomGesture = false; + isInPinchToZoomTouchMode = false; + + } + + private boolean checkPointerIds(MotionEvent ev) { + if (ev.getPointerCount() < 2) { + return false; + } + if (pointerId1 == ev.getPointerId(0) && pointerId2 == ev.getPointerId(1)) { + return true; + } + if (pointerId1 == ev.getPointerId(1) && pointerId2 == ev.getPointerId(0)) { + return true; + } + return false; + } + + public void delayHideUi() { + if (hideUiRunnableIsScheduled) { + AndroidUtilities.cancelRunOnUIThread(hideUiRunnable); + AndroidUtilities.runOnUIThread(hideUiRunnable, 3000); + } + } + + public boolean isUiVisible() { + return uiVisible; + } + + public void setProgressToHideUi(float progressToHideUi) { + if (this.progressToHideUi != progressToHideUi) { + this.progressToHideUi = progressToHideUi; + invalidate(); + if (fullscreenTextureView != null) { + fullscreenTextureView.invalidate(); + } + } + } + + public void setAmplitude(TLRPC.TL_groupCallParticipant participant, float v) { + for (int i = 0; i < attachedRenderers.size(); i++) { + if (MessageObject.getPeerId(attachedRenderers.get(i).participant.participant.peer) == MessageObject.getPeerId(participant.peer)) { + attachedRenderers.get(i).setAmplitude(v); + } + } + } + + public boolean isAnimating() { + return fullscreenAnimator != null; + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (GroupCallActivity.isTabletMode) { + ((MarginLayoutParams) topShadowView.getLayoutParams()).rightMargin = AndroidUtilities.dp(GroupCallActivity.TABLET_LIST_SIZE + 8); + } else if (GroupCallActivity.isLandscapeMode) { + ((MarginLayoutParams) topShadowView.getLayoutParams()).rightMargin = AndroidUtilities.dp(90); + } else { + ((MarginLayoutParams) topShadowView.getLayoutParams()).rightMargin = 0; + } + + pinContainer.getLayoutParams().height = AndroidUtilities.dp(40); + pinTextView.measure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.UNSPECIFIED), heightMeasureSpec); + unpinTextView.measure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.UNSPECIFIED), heightMeasureSpec); + pinContainer.getLayoutParams().width = AndroidUtilities.dp(46) + (!hasPinnedVideo ? pinTextView.getMeasuredWidth() : unpinTextView.getMeasuredWidth()); + + ((MarginLayoutParams) speakingMembersToast.getLayoutParams()).rightMargin = GroupCallActivity.isLandscapeMode ? AndroidUtilities.dp(45) : 0; + + for (int a = 0; a < 2; a++) { + MarginLayoutParams lp = (MarginLayoutParams) undoView[a].getLayoutParams(); + if (isTablet) { + lp.rightMargin = AndroidUtilities.dp(8 + 16 + GroupCallActivity.TABLET_LIST_SIZE); + } else { + lp.rightMargin = isLandscapeMode ? AndroidUtilities.dp(180) : 0; + } + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + public boolean autoPinEnabled() { + return !hasPinnedVideo && (System.currentTimeMillis() - lastUpdateTime) > 2000 && !swipeToBackGesture && !isInPinchToZoomTouchMode; + } + + long lastUpdateTooltipTime; + Runnable updateTooltipRunnbale; + + public void setVisibleParticipant(boolean animated) { + if (!inFullscreenMode || isTablet || fullscreenParticipant == null || fullscreenAnimator != null || call == null) { + if (showSpeakingMembersToast) { + showSpeakingMembersToast = false; + showSpeakingMembersToastProgress = 0f; + } + return; + } + int speakingIndex = 0; + int currenAccount = groupCallActivity.getCurrentAccount(); + if (System.currentTimeMillis() - lastUpdateTooltipTime < 500) { + if (updateTooltipRunnbale == null) { + AndroidUtilities.runOnUIThread(updateTooltipRunnbale = () -> { + updateTooltipRunnbale = null; + setVisibleParticipant(true); + }, System.currentTimeMillis() - lastUpdateTooltipTime + 50); + } + return; + } + lastUpdateTooltipTime = System.currentTimeMillis(); + SpannableStringBuilder spannableStringBuilder = null; + for (int i = 0; i < call.currentSpeakingPeers.size(); i++) { + int key = call.currentSpeakingPeers.keyAt(i); + TLRPC.TL_groupCallParticipant participant = call.currentSpeakingPeers.get(key); + if (participant.self || participant.muted_by_you || MessageObject.getPeerId(fullscreenParticipant.participant.peer) == MessageObject.getPeerId(participant.peer)) { + continue; + } + int peerId = MessageObject.getPeerId(participant.peer); + long diff = SystemClock.uptimeMillis() - participant.lastSpeakTime; + boolean newSpeaking = diff < 500; + if (newSpeaking) { + if (spannableStringBuilder == null) { + spannableStringBuilder = new SpannableStringBuilder(); + } + if (speakingIndex == 0) { + speakingToastPeerId = MessageObject.getPeerId(participant.peer); + } + if (speakingIndex < 3) { + TLRPC.User user = peerId > 0 ? MessagesController.getInstance(currenAccount).getUser(peerId) : null; + TLRPC.Chat chat = peerId <= 0 ? MessagesController.getInstance(currenAccount).getChat(peerId) : null; + if (user == null && chat == null) { + continue; + } + speakingMembersAvatars.setObject(speakingIndex, currenAccount, participant); + if (speakingIndex != 0) { + spannableStringBuilder.append(", "); + } + if (user != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + spannableStringBuilder.append(UserObject.getFirstName(user), new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0); + } else { + spannableStringBuilder.append(UserObject.getFirstName(user)); + } + } else { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + spannableStringBuilder.append(chat.title, new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0); + } else { + spannableStringBuilder.append(chat.title); + } + } + } + speakingIndex++; + if (speakingIndex == 3) { + break; + } + } + } + boolean show; + if (speakingIndex == 0) { + show = false; + } else { + show = true; + } + + if (!showSpeakingMembersToast && show) { + animated = false; + } else if (!show && showSpeakingMembersToast) { + showSpeakingMembersToast = show; + invalidate(); + return; + } else if (showSpeakingMembersToast && show) { + speakingMembersToastFromLeft = speakingMembersToast.getLeft(); + speakingMembersToastFromRight = speakingMembersToast.getRight(); + speakingMembersToastFromTextLeft = speakingMembersText.getLeft(); + speakingMembersToastChangeProgress = 0; + } + + if (!show) { + showSpeakingMembersToast = show; + invalidate(); + return; + } + String s = LocaleController.getPluralString("MembersAreSpeakingToast", speakingIndex); + int replaceIndex = s.indexOf("un1"); + SpannableStringBuilder spannableStringBuilder1 = new SpannableStringBuilder(s); + spannableStringBuilder1.replace(replaceIndex, replaceIndex + 3, spannableStringBuilder); + speakingMembersText.setText(spannableStringBuilder1); + + int leftMargin; + if (speakingIndex == 0) { + leftMargin = 0; + } else if (speakingIndex == 1) { + leftMargin = AndroidUtilities.dp(32 + 8); + } else if (speakingIndex == 2) { + leftMargin = AndroidUtilities.dp(32 + 24 + 8); + } else { + leftMargin = AndroidUtilities.dp(32 + 24 + 24 + 8); + } + ((LayoutParams) speakingMembersText.getLayoutParams()).leftMargin = leftMargin; + ((LayoutParams) speakingMembersText.getLayoutParams()).rightMargin = AndroidUtilities.dp(16); + + showSpeakingMembersToast = show; + invalidate(); + + while (speakingIndex < 3) { + speakingMembersAvatars.setObject(speakingIndex, currenAccount, null); + speakingIndex++; + } + + speakingMembersAvatars.commitTransition(animated); + } + + public UndoView getUndoView() { + if (undoView[0].getVisibility() == View.VISIBLE) { + UndoView old = undoView[0]; + undoView[0] = undoView[1]; + undoView[1] = old; + old.hide(true, 2); + removeView(undoView[0]); + addView(undoView[0]); + } + return undoView[0]; + } + + public boolean isVisible(TLRPC.TL_groupCallParticipant participant) { + int peerId = MessageObject.getPeerId(participant.peer); + return attachedPeerIds.get(peerId) > 0; + } + + public void attach(GroupCallMiniTextureView view) { + attachedRenderers.add(view); + int peerId = MessageObject.getPeerId(view.participant.participant.peer); + attachedPeerIds.put(peerId, attachedPeerIds.get(peerId, 0) + 1); + } + + public void detach(GroupCallMiniTextureView view) { + attachedRenderers.remove(view); + int peerId = MessageObject.getPeerId(view.participant.participant.peer); + attachedPeerIds.put(peerId, attachedPeerIds.get(peerId, 0) - 1); + } + + public void setGroupCall(ChatObject.Call call) { + this.call = call; + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallStatusIcon.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallStatusIcon.java new file mode 100644 index 000000000..a56265f9d --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallStatusIcon.java @@ -0,0 +1,220 @@ +package org.telegram.ui.Components.voip; + +import android.graphics.Color; +import android.graphics.PorterDuff; +import android.graphics.PorterDuffColorFilter; +import android.os.SystemClock; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.R; +import org.telegram.messenger.Utilities; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.Components.RLottieDrawable; +import org.telegram.ui.Components.RLottieImageView; + +public class GroupCallStatusIcon { + + RLottieDrawable micDrawable; + RLottieDrawable shakeHandDrawable; + RLottieImageView iconView; + + boolean updateRunnableScheduled; + boolean isSpeaking; + + boolean lastMuted; + boolean lastRaisedHand; + Callback callback; + TLRPC.TL_groupCallParticipant participant; + + private Runnable shakeHandCallback = () -> { + shakeHandDrawable.setOnFinishCallback(null, 0); + micDrawable.setOnFinishCallback(null, 0); + if (iconView != null) { + iconView.setAnimation(micDrawable); + } + }; + + private Runnable raiseHandCallback = () -> { + int num = Utilities.random.nextInt(100); + int endFrame; + int startFrame; + if (num < 32) { + startFrame = 0; + endFrame = 120; + } else if (num < 64) { + startFrame = 120; + endFrame = 240; + } else if (num < 97) { + startFrame = 240; + endFrame = 420; + } else if (num == 98) { + startFrame = 420; + endFrame = 540; + } else { + startFrame = 540; + endFrame = 720; + } + shakeHandDrawable.setCustomEndFrame(endFrame); + shakeHandDrawable.setOnFinishCallback(shakeHandCallback, endFrame - 1); + shakeHandDrawable.setCurrentFrame(startFrame); + + if (iconView != null) { + iconView.setAnimation(shakeHandDrawable); + iconView.playAnimation(); + } + }; + + private boolean mutedByMe; + + public GroupCallStatusIcon() { + micDrawable = new RLottieDrawable(R.raw.voice_mini, "" + R.raw.voice_mini, AndroidUtilities.dp(24), AndroidUtilities.dp(24), true, null); + shakeHandDrawable = new RLottieDrawable(R.raw.hand_2, "" + R.raw.hand_2, AndroidUtilities.dp(15), AndroidUtilities.dp(15), true, null); + } + + private Runnable updateRunnable = () -> { + isSpeaking = false; + if (callback != null) { + callback.onStatusChanged(); + } + updateRunnableScheduled = false; + }; + + public void setAmplitude(double value) { + if (value > 1.5f) { + if (updateRunnableScheduled) { + AndroidUtilities.cancelRunOnUIThread(updateRunnable); + } + if (!isSpeaking) { + isSpeaking = true; + if (callback != null) { + callback.onStatusChanged(); + } + } + + AndroidUtilities.runOnUIThread(updateRunnable, 500); + updateRunnableScheduled = true; + } + } + + private Runnable checkRaiseRunnable = () -> { + updateIcon(true); + }; + + public void setImageView(RLottieImageView iconView) { + this.iconView = iconView; + updateIcon(false); + } + + public void setParticipant(TLRPC.TL_groupCallParticipant participant, boolean animated) { + this.participant = participant; + updateIcon(animated); + } + + public void updateIcon(boolean animated) { + if (iconView == null || participant == null || micDrawable == null) { + return; + } + boolean changed; + boolean newMutedByMe = participant.muted_by_you && !participant.self; + boolean newMuted; + boolean hasVoice; + if (SystemClock.elapsedRealtime() - participant.lastVoiceUpdateTime < 500) { + hasVoice = participant.hasVoiceDelayed; + } else { + hasVoice = participant.hasVoice; + } + if (participant.self) { + newMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute() && (!isSpeaking || !hasVoice); + } else { + newMuted = participant.muted && (!isSpeaking || !hasVoice) || newMutedByMe; + } + boolean newRaisedHand = (participant.muted && !isSpeaking || newMutedByMe) && (!participant.can_self_unmute || newMutedByMe) && (!participant.can_self_unmute && participant.raise_hand_rating != 0); + int newStatus = 0; + if (newRaisedHand) { + long time = SystemClock.elapsedRealtime() - participant.lastRaiseHandDate; + if (participant.lastRaiseHandDate == 0 || time > 5000) { + newStatus = newMutedByMe ? 2 : 0; + } else { + newStatus = 3; + AndroidUtilities.runOnUIThread(checkRaiseRunnable, 5000 - time); + } + + changed = micDrawable.setCustomEndFrame(136); +// if (animated) { +// micDrawable.setOnFinishCallback(raiseHandCallback, 135); +// } else { +// micDrawable.setOnFinishCallback(null, 0); +// } + } else { + iconView.setAnimation(micDrawable); + micDrawable.setOnFinishCallback(null, 0); + if (newMuted && lastRaisedHand) { + changed = micDrawable.setCustomEndFrame(36); + } else { + changed = micDrawable.setCustomEndFrame(newMuted ? 99 : 69); + } + } + + if (animated) { + if (changed) { + if (newRaisedHand) { + micDrawable.setCurrentFrame(99); + micDrawable.setCustomEndFrame(136); + } else if (newMuted && lastRaisedHand && !newRaisedHand) { + micDrawable.setCurrentFrame(0); + micDrawable.setCustomEndFrame(36); + } else if (newMuted) { + micDrawable.setCurrentFrame(69); + micDrawable.setCustomEndFrame(99); + } else { + micDrawable.setCurrentFrame(36); + micDrawable.setCustomEndFrame(69); + } + iconView.playAnimation(); + iconView.invalidate(); + } + } else { + micDrawable.setCurrentFrame(micDrawable.getCustomEndFrame() - 1, false, true); + iconView.invalidate(); + } + + iconView.setAnimation(micDrawable); + lastMuted = newMuted; + lastRaisedHand = newRaisedHand; + + if (mutedByMe != newMutedByMe) { + mutedByMe = newMutedByMe; + if (callback != null) { + callback.onStatusChanged(); + } + } + } + + public boolean isSpeaking() { + return isSpeaking; + } + + public boolean isMutedByMe() { + return mutedByMe; + } + + public boolean isMutedByAdmin() { + return participant != null && participant.muted && !participant.can_self_unmute; + } + + public void setCallback(Callback callback) { + this.callback = callback; + if (callback == null) { + isSpeaking = false; + AndroidUtilities.cancelRunOnUIThread(updateRunnable); + AndroidUtilities.cancelRunOnUIThread(raiseHandCallback); + AndroidUtilities.cancelRunOnUIThread(checkRaiseRunnable); + micDrawable.setColorFilter(new PorterDuffColorFilter(Color.WHITE, PorterDuff.Mode.MULTIPLY)); + } + } + + public interface Callback { + void onStatusChanged(); + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VideoPreviewDialog.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VideoPreviewDialog.java new file mode 100644 index 000000000..d0dc12088 --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VideoPreviewDialog.java @@ -0,0 +1,394 @@ +package org.telegram.ui.Components.voip; + +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.app.Dialog; +import android.content.Context; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Paint; +import android.graphics.Shader; +import android.os.Build; +import android.util.TypedValue; +import android.view.Gravity; +import android.view.MotionEvent; +import android.view.View; +import android.view.ViewGroup; +import android.widget.FrameLayout; +import android.widget.ImageView; +import android.widget.LinearLayout; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.core.graphics.ColorUtils; + +import com.google.android.exoplayer2.C; + +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; +import org.telegram.messenger.voip.VideoCapturerDevice; +import org.telegram.messenger.voip.VoIPService; +import org.telegram.ui.ActionBar.ActionBar; +import org.telegram.ui.ActionBar.BackDrawable; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RLottieDrawable; +import org.telegram.ui.Components.RLottieImageView; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.GroupCallActivity; +import org.webrtc.RendererCommon; + +public abstract class VideoPreviewDialog extends FrameLayout { + + VoIPTextureView textureView; + Paint backgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + boolean isDismissed; + float outProgress; + FrameLayout container; + + View negativeButton; + View positiveButton; + + private final LinearLayout buttonsLayout; + private final ActionBar actionBar; + private final RLottieImageView flipIconView; + private final RLottieImageView micIconView; + + int flipIconEndFrame; + private final TextView subtitle; + + public boolean micEnabled; + + CellFlickerDrawable drawable = new CellFlickerDrawable(); + + public VideoPreviewDialog(@NonNull Context context, RecyclerListView listView, RecyclerListView fullscreenListView) { + super(context); + backgroundPaint.setColor(Theme.getColor(Theme.key_voipgroup_dialogBackground)); + + actionBar = new ActionBar(context); + + actionBar.setBackButtonDrawable(new BackDrawable(false)); + actionBar.setBackgroundColor(Color.TRANSPARENT); + actionBar.setItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), false); + actionBar.setTitle(LocaleController.getString("CallVideoPreviewTitle", R.string.CallVideoPreviewTitle)); + actionBar.setOccupyStatusBar(false); + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == -1) { + dismiss(false); + } + super.onItemClick(id); + } + }); + + container = new FrameLayout(context); + container.setClipChildren(false); + + addView(container, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + container.addView(actionBar); + + textureView = new VoIPTextureView(context, false, false); + textureView.renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); + textureView.setRoundCorners(AndroidUtilities.dp(8)); + if (VoIPService.getSharedInstance() != null) { + textureView.renderer.setMirror(VoIPService.getSharedInstance().isFrontFaceCamera()); + } + textureView.scaleType = VoIPTextureView.SCALE_TYPE_FIT; + textureView.clipToTexture = true; + textureView.renderer.setAlpha(0); + textureView.renderer.setRotateTextureWitchScreen(true); + textureView.renderer.setUseCameraRotation(true); + + subtitle = new TextView(context); + subtitle.setTextColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_nameText), (int) (255 * 0.4f))); + subtitle.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + subtitle.setText(LocaleController.getString("VideoPreviewDesrciption", R.string.VideoPreviewDesrciption)); + subtitle.setGravity(Gravity.CENTER_HORIZONTAL); + container.addView(subtitle, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM, 24, 0, 24, 108)); + + buttonsLayout = new LinearLayout(context); + buttonsLayout.setOrientation(LinearLayout.HORIZONTAL); + + TextView negative = new TextView(getContext()) { + @Override + public void setEnabled(boolean enabled) { + super.setEnabled(enabled); + setAlpha(enabled ? 1.0f : 0.5f); + } + + @Override + public void setTextColor(int color) { + super.setTextColor(color); + setBackgroundDrawable(Theme.getRoundRectSelectorDrawable(color)); + } + }; + negative.setMinWidth(AndroidUtilities.dp(64)); + negative.setTag(Dialog.BUTTON_POSITIVE); + negative.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + negative.setTextColor(Theme.getColor(Theme.key_voipgroup_nameText)); + negative.setGravity(Gravity.CENTER); + negative.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + negative.setText(LocaleController.getString("Cancel", R.string.Cancel)); + negative.setBackgroundDrawable(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(6), Theme.getColor(Theme.key_voipgroup_listViewBackground), ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_nameText), (int) (255 * 0.3f)))); + negative.setPadding(0, AndroidUtilities.dp(12), 0, AndroidUtilities.dp(12)); + + negativeButton = negative; + + TextView positive = new TextView(getContext()) { + + Paint gradientPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + @Override + protected void onSizeChanged(int w, int h, int oldw, int oldh) { + super.onSizeChanged(w, h, oldw, oldh); + Shader gradient = new LinearGradient(0, 0, getMeasuredWidth(), 0, new int[]{Theme.getColor(Theme.key_voipgroup_unmuteButton), Theme.getColor(Theme.key_voipgroup_unmuteButton2)}, null, Shader.TileMode.CLAMP); + gradientPaint.setShader(gradient); + } + + + @Override + protected void onDraw(Canvas canvas) { + AndroidUtilities.rectTmp.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + canvas.drawRoundRect(AndroidUtilities.rectTmp, AndroidUtilities.dp(6), AndroidUtilities.dp(6), gradientPaint); + super.onDraw(canvas); + } + }; + positive.setMinWidth(AndroidUtilities.dp(64)); + positive.setTag(Dialog.BUTTON_POSITIVE); + positive.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14); + positive.setTextColor(Theme.getColor(Theme.key_voipgroup_nameText)); + positive.setGravity(Gravity.CENTER); + positive.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + positive.setText(LocaleController.getString("ShareVideo", R.string.ShareVideo)); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + positive.setForeground(Theme.createSimpleSelectorRoundRectDrawable(AndroidUtilities.dp(6), Color.TRANSPARENT, ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_nameText), (int) (255 * 0.3f)))); + } + positive.setPadding(0, AndroidUtilities.dp(12), 0, AndroidUtilities.dp(12)); + positiveButton = positive; + + buttonsLayout.addView(negative, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48, 1f, 0, 4, 0, 4, 0)); + buttonsLayout.addView(positive, LayoutHelper.createLinear(LayoutHelper.MATCH_PARENT, 48, 1f, 0, 4, 0, 4, 0)); + + addView(textureView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + container.addView(buttonsLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM)); + if (VoIPService.getSharedInstance() != null) { + textureView.renderer.init(VideoCapturerDevice.getEglBase().getEglBaseContext(), new RendererCommon.RendererEvents() { + @Override + public void onFirstFrameRendered() { + textureView.animate().alpha(1f).setDuration(250); + } + + @Override + public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { + + } + }); + VoIPService.getSharedInstance().setLocalSink(textureView.renderer, false); + } + + negative.setOnClickListener(view -> { + dismiss(false); + }); + positive.setOnClickListener(view -> { + if (isDismissed) { + return; + } + dismiss(true); + }); + + setAlpha(0); + setTranslationX(AndroidUtilities.dp(32)); + animate().alpha(1f).translationX(0).setDuration(150).start(); + + flipIconView = new RLottieImageView(context); + flipIconView.setPadding(AndroidUtilities.dp(10), AndroidUtilities.dp(10), AndroidUtilities.dp(10), AndroidUtilities.dp(10)); + flipIconView.setBackground(Theme.createCircleDrawable(AndroidUtilities.dp(48), ColorUtils.setAlphaComponent(Color.BLACK, (int) (255 * 0.3f)))); + RLottieDrawable flipIcon = new RLottieDrawable(R.raw.camera_flip, "" + R.raw.camera_flip, AndroidUtilities.dp(24), AndroidUtilities.dp(24), true, null); + flipIconView.setAnimation(flipIcon); + flipIconView.setScaleType(ImageView.ScaleType.FIT_CENTER); + flipIconView.setOnClickListener(v -> { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().switchCamera(); + if (flipIconEndFrame == 18) { + flipIcon.setCustomEndFrame(flipIconEndFrame = 39); + flipIcon.start(); + } else { + flipIcon.setCurrentFrame(0, false); + flipIcon.setCustomEndFrame(flipIconEndFrame = 18); + flipIcon.start(); + } + } + }); + + addView(flipIconView, LayoutHelper.createFrame(48, 48)); + + + micIconView = new RLottieImageView(context); + micIconView.setPadding(AndroidUtilities.dp(9), AndroidUtilities.dp(9), AndroidUtilities.dp(9), AndroidUtilities.dp(9)); + micIconView.setBackground(Theme.createCircleDrawable(AndroidUtilities.dp(48), ColorUtils.setAlphaComponent(Color.BLACK, (int) (255 * 0.3f)))); + RLottieDrawable micIcon = new RLottieDrawable(R.raw.voice_mini, "" + R.raw.voice_mini, AndroidUtilities.dp(24), AndroidUtilities.dp(24), true, null); + micIconView.setAnimation(micIcon); + micIconView.setScaleType(ImageView.ScaleType.FIT_CENTER); + micEnabled = true; + micIcon.setCurrentFrame(micEnabled ? 69 : 36); + micIconView.setOnClickListener(v -> { + micEnabled = !micEnabled; + if (micEnabled) { + micIcon.setCurrentFrame(36); + micIcon.setCustomEndFrame(69); + } else { + micIcon.setCurrentFrame(69); + micIcon.setCustomEndFrame(99); + } + micIcon.start(); + }); + addView(micIconView, LayoutHelper.createFrame(48, 48)); + + setWillNotDraw(false); + } + + public void dismiss(boolean apply) { + if (isDismissed) { + return; + } + isDismissed = true; + animate().alpha(0f).translationX(AndroidUtilities.dp(32)).setDuration(150).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + if (getParent() != null) { + ((ViewGroup) getParent()).removeView(VideoPreviewDialog.this); + } + onDismiss(apply); + } + }); + invalidate(); + } + + @Override + protected void onDraw(Canvas canvas) { + float x = textureView.getRight() - AndroidUtilities.dp(48 + 12) - textureView.currentClipHorizontal; + float y = textureView.getBottom() - AndroidUtilities.dp(48 + 12) - textureView.currentClipVertical; + flipIconView.setTranslationX(x); + flipIconView.setTranslationY(y); + flipIconView.setScaleX(textureView.getScaleX()); + flipIconView.setScaleY(textureView.getScaleY()); + flipIconView.setPivotX(getMeasuredWidth() / 2f - x); + flipIconView.setPivotY(getMeasuredHeight() / 2f - y); + flipIconView.setAlpha(textureView.renderer.getAlpha() * (1f - outProgress)); + + + x = textureView.getLeft() + AndroidUtilities.dp(12) + textureView.currentClipHorizontal; + micIconView.setTranslationX(x); + micIconView.setTranslationY(y); + micIconView.setScaleX(textureView.getScaleX()); + micIconView.setScaleY(textureView.getScaleY()); + micIconView.setPivotX(getMeasuredWidth() / 2f - x); + micIconView.setPivotY(getMeasuredHeight() / 2f - y); + micIconView.setAlpha(textureView.renderer.getAlpha() * (1f - outProgress)); + + canvas.drawColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_actionBar), (int) (255 * (1f - outProgress)))); + if (isDismissed || textureView.renderer.getAlpha() != 1f) { + invalidate(); + } + + if (!textureView.renderer.isFirstFrameRendered() && textureView.renderer.getAlpha() != 1f) { + MarginLayoutParams layoutParams = (MarginLayoutParams) textureView.getLayoutParams(); + AndroidUtilities.rectTmp.set(layoutParams.leftMargin, layoutParams.topMargin, getMeasuredWidth() - layoutParams.rightMargin, getMeasuredHeight() - layoutParams.bottomMargin); + float k = !GroupCallActivity.isLandscapeMode ? 9f / 16f : 16f / 9f; + + if (AndroidUtilities.rectTmp.width() / AndroidUtilities.rectTmp.height() > k) { + float padding = (AndroidUtilities.rectTmp.width() - AndroidUtilities.rectTmp.height() * k) / 2f; + AndroidUtilities.rectTmp.left += padding; + AndroidUtilities.rectTmp.right -= padding; + } else { + float padding = (AndroidUtilities.rectTmp.height() - AndroidUtilities.rectTmp.width() * k) / 2f; + AndroidUtilities.rectTmp.top += padding; + AndroidUtilities.rectTmp.bottom -= padding; + } + + drawable.setParentWidth(getMeasuredWidth()); + drawable.draw(canvas, AndroidUtilities.rectTmp, AndroidUtilities.dp(8)); + invalidate(); + } + + super.onDraw(canvas); + } + + @Override + public boolean onTouchEvent(MotionEvent event) { + return true; + } + + protected void onDismiss(boolean apply) { + + } + + boolean ignoreLayout = false; + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + boolean isLandscape = MeasureSpec.getSize(widthMeasureSpec) > MeasureSpec.getSize(heightMeasureSpec); + ignoreLayout = true; + + if (isLandscape) { + actionBar.setTitle(null); + MarginLayoutParams marginLayoutParams = (MarginLayoutParams) textureView.getLayoutParams(); + marginLayoutParams.topMargin = AndroidUtilities.dp(8); + marginLayoutParams.bottomMargin = AndroidUtilities.dp(76); + marginLayoutParams.rightMargin = marginLayoutParams.leftMargin = AndroidUtilities.dp(48); + negativeButton.setVisibility(View.VISIBLE); + subtitle.setVisibility(View.GONE); + + marginLayoutParams = (MarginLayoutParams) buttonsLayout.getLayoutParams(); + marginLayoutParams.rightMargin = marginLayoutParams.leftMargin = AndroidUtilities.dp(80); + marginLayoutParams.bottomMargin = AndroidUtilities.dp(16); + + } else { + MarginLayoutParams marginLayoutParams = (MarginLayoutParams) textureView.getLayoutParams(); + actionBar.setTitle(LocaleController.getString("CallVideoPreviewTitle", R.string.CallVideoPreviewTitle)); + marginLayoutParams.topMargin = ActionBar.getCurrentActionBarHeight() + AndroidUtilities.dp(8); + marginLayoutParams.bottomMargin = AndroidUtilities.dp(168); + marginLayoutParams.rightMargin = marginLayoutParams.leftMargin = AndroidUtilities.dp(12); + negativeButton.setVisibility(View.GONE); + subtitle.setVisibility(View.VISIBLE); + + marginLayoutParams = (MarginLayoutParams) buttonsLayout.getLayoutParams(); + marginLayoutParams.rightMargin = marginLayoutParams.leftMargin = marginLayoutParams.bottomMargin = AndroidUtilities.dp(16); + } + ignoreLayout = false; + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + @Override + public void requestLayout() { + if (ignoreLayout) { + return; + } + super.requestLayout(); + } + + public int getBackgroundColor() { + int color = Theme.getColor(Theme.key_voipgroup_actionBar); + color = ColorUtils.setAlphaComponent(color, (int) (255 * (getAlpha() * (1f - outProgress)))); + return color; + } + + @Override + public void invalidate() { + super.invalidate(); + if (getParent() != null) { + ((View) getParent()).invalidate(); + } + } + + public void update() { + if (VoIPService.getSharedInstance() != null) { + textureView.renderer.setMirror(VoIPService.getSharedInstance().isFrontFaceCamera()); + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java index bdab37c21..33234ad51 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java @@ -723,35 +723,6 @@ public class VoIPHelper { if (fragment == null || fragment.getParentActivity() == null) { return; } - JoinCallAlert.checkFewUsers(fragment.getParentActivity(), -currentChat.id, accountInstance, param -> { - /*if (param) { - if (fragment.getParentActivity() == null) { - return; - } - AlertDialog.Builder builder = new AlertDialog.Builder(fragment.getParentActivity()); - - builder.setTitle(LocaleController.getString("StartVoipChatTitle", R.string.StartVoipChatTitle)); - if (recreate) { - builder.setMessage(LocaleController.getString("VoipGroupEndedStartNew", R.string.VoipGroupEndedStartNew)); - } else { - if (ChatObject.isChannel(currentChat) && !currentChat.megagroup) { - builder.setMessage(LocaleController.getString("StartVoipChannelAlertText", R.string.StartVoipChannelAlertText)); - } else { - builder.setMessage(LocaleController.getString("StartVoipChatAlertText", R.string.StartVoipChatAlertText)); - } - } - - builder.setPositiveButton(LocaleController.getString("Start", R.string.Start), (dialogInterface, i) -> { - if (fragment.getParentActivity() == null) { - return; - } - startCall(currentChat, peer, null, true, fragment.getParentActivity(), fragment, accountInstance); - }); - builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); - fragment.showDialog(builder.create()); - } else {*/ - startCall(currentChat, peer, null, true, fragment.getParentActivity(), fragment, accountInstance); - //} - }); + JoinCallAlert.checkFewUsers(fragment.getParentActivity(), -currentChat.id, accountInstance, param -> startCall(currentChat, peer, null, true, fragment.getParentActivity(), fragment, accountInstance)); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java index 2670f00fe..5074495b1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java @@ -37,15 +37,14 @@ import org.telegram.messenger.LocaleController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.voip.Instance; -import org.telegram.messenger.voip.VideoCameraCapturer; -import org.telegram.messenger.voip.VoIPBaseService; +import org.telegram.messenger.voip.VideoCapturerDevice; import org.telegram.messenger.voip.VoIPService; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.LaunchActivity; import org.telegram.ui.VoIPFragment; -public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationCenter.NotificationCenterDelegate { +public class VoIPPiPView implements VoIPService.StateListener, NotificationCenter.NotificationCenterDelegate { public final static int ANIMATION_ENTER_TYPE_SCALE = 0; public final static int ANIMATION_ENTER_TYPE_TRANSITION = 1; @@ -136,7 +135,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC }; public static void show(Activity activity, int account, int parentWidth, int parentHeight, int animationType) { - if (instance != null || VideoCameraCapturer.eglBase == null) { + if (instance != null || VideoCapturerDevice.eglBase == null) { return; } WindowManager.LayoutParams windowLayoutParams = createWindowLayoutParams(activity, parentWidth, parentHeight, SCALE_NORMAL); @@ -159,8 +158,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC NotificationCenter.getGlobalInstance().addObserver(instance, NotificationCenter.didEndCall); wm.addView(instance.windowView, windowLayoutParams); - instance.currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); - instance.callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); + instance.currentUserTextureView.renderer.init(VideoCapturerDevice.eglBase.getEglBaseContext(), null); + instance.callingUserTextureView.renderer.init(VideoCapturerDevice.eglBase.getEglBaseContext(), null); if (animationType == ANIMATION_ENTER_TYPE_SCALE) { instance.windowView.setScaleX(0.5f); @@ -301,8 +300,9 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC floatingView = new FloatingView(context); - callingUserTextureView = new VoIPTextureView(context, false); - currentUserTextureView = new VoIPTextureView(context, false); + callingUserTextureView = new VoIPTextureView(context, false, true); + callingUserTextureView.scaleType = VoIPTextureView.SCALE_TYPE_NONE; + currentUserTextureView = new VoIPTextureView(context, false, true); currentUserTextureView.renderer.setMirror(true); floatingView.addView(callingUserTextureView); @@ -386,7 +386,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC @Override public void onStateChanged(int state) { - if (state == VoIPBaseService.STATE_ENDED || state == VoIPService.STATE_BUSY || state == VoIPService.STATE_FAILED || state == VoIPService.STATE_HANGING_UP) { + if (state == VoIPService.STATE_ENDED || state == VoIPService.STATE_BUSY || state == VoIPService.STATE_FAILED || state == VoIPService.STATE_HANGING_UP) { AndroidUtilities.runOnUIThread(VoIPPiPView::finish, 200); } VoIPService service = VoIPService.getSharedInstance(); @@ -433,9 +433,9 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC return; } if (!screenOn && currentUserIsVideo) { - service.setVideoState(Instance.VIDEO_STATE_PAUSED); - } else if (screenOn && service.getVideoState() == Instance.VIDEO_STATE_PAUSED) { - service.setVideoState(Instance.VIDEO_STATE_ACTIVE); + service.setVideoState(false, Instance.VIDEO_STATE_PAUSED); + } else if (screenOn && service.getVideoState(false) == Instance.VIDEO_STATE_PAUSED) { + service.setVideoState(false, Instance.VIDEO_STATE_ACTIVE); } } @@ -445,8 +445,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC VoIPService service = VoIPService.getSharedInstance(); if (service != null) { - callingUserIsVideo = service.getCurrentVideoState() == Instance.VIDEO_STATE_ACTIVE; - currentUserIsVideo = service.getVideoState() == Instance.VIDEO_STATE_ACTIVE || service.getVideoState() == Instance.VIDEO_STATE_PAUSED; + callingUserIsVideo = service.getRemoteVideoState() == Instance.VIDEO_STATE_ACTIVE; + currentUserIsVideo = service.getVideoState(false) == Instance.VIDEO_STATE_ACTIVE || service.getVideoState(false) == Instance.VIDEO_STATE_PAUSED; currentUserTextureView.renderer.setMirror(service.isFrontFaceCamera()); } @@ -476,7 +476,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC if (windowLayoutParams.type == WindowManager.LayoutParams.LAST_APPLICATION_WINDOW) { VoIPService service = VoIPService.getSharedInstance(); if (currentUserIsVideo) { - service.setVideoState(Instance.VIDEO_STATE_PAUSED); + service.setVideoState(false, Instance.VIDEO_STATE_PAUSED); } } } @@ -484,8 +484,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC public void onResume() { VoIPService service = VoIPService.getSharedInstance(); - if (service != null && service.getVideoState() == Instance.VIDEO_STATE_PAUSED) { - service.setVideoState(Instance.VIDEO_STATE_ACTIVE); + if (service != null && service.getVideoState(false) == Instance.VIDEO_STATE_PAUSED) { + service.setVideoState(false, Instance.VIDEO_STATE_ACTIVE); } } @@ -820,11 +820,11 @@ public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationC to.callingUserTextureView.setStub(from.callingUserTextureView); from.currentUserTextureView.renderer.release(); from.callingUserTextureView.renderer.release(); - if (VideoCameraCapturer.eglBase == null) { + if (VideoCapturerDevice.eglBase == null) { return; } - to.currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); - to.callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); + to.currentUserTextureView.renderer.init(VideoCapturerDevice.eglBase.getEglBaseContext(), null); + to.callingUserTextureView.renderer.init(VideoCapturerDevice.eglBase.getEglBaseContext(), null); if (VoIPService.getSharedInstance() != null) { VoIPService.getSharedInstance().setSinks(to.currentUserTextureView.renderer, to.callingUserTextureView.renderer); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java index 2062842ff..cc7fc8fe9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java @@ -1,20 +1,21 @@ package org.telegram.ui.Components.voip; +import android.animation.Animator; +import android.animation.AnimatorListenerAdapter; +import android.animation.ValueAnimator; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Outline; -import android.graphics.Paint; -import android.graphics.Path; -import android.graphics.PorterDuff; -import android.graphics.PorterDuffXfermode; -import android.graphics.RectF; import android.os.Build; +import android.view.Display; import android.view.Gravity; +import android.view.TextureView; import android.view.View; import android.view.ViewOutlineProvider; +import android.view.WindowManager; import android.widget.FrameLayout; import android.widget.ImageView; @@ -22,75 +23,137 @@ import androidx.annotation.NonNull; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.Utilities; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.GroupCallActivity; import org.webrtc.RendererCommon; import org.webrtc.TextureViewRenderer; import java.io.File; import java.io.FileOutputStream; +import java.util.ArrayList; public class VoIPTextureView extends FrameLayout { - - final Path path = new Path(); - final RectF rectF = new RectF(); - final Paint xRefPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - final boolean isCamera; + final boolean applyRotation; float roundRadius; public final TextureViewRenderer renderer; + public TextureView blurRenderer; public final ImageView imageView; public View backgroundView; + private Bitmap thumb; public Bitmap cameraLastBitmap; public float stubVisibleProgress = 1f; - public VoIPTextureView(@NonNull Context context, boolean isCamera) { + boolean animateOnNextLayout; + long animateNextDuration; + ArrayList animateOnNextLayoutAnimations = new ArrayList<>(); + int animateFromHeight; + int animateFromWidth; + + float animateFromY; + float animateFromX; + + float clipVertical; + float clipHorizontal; + float currentClipVertical; + float currentClipHorizontal; + + float aninateFromScale = 1f; + float aninateFromScaleBlur = 1f; + float animateFromThumbScale = 1f; + float animateFromRendererW; + float animateFromRendererH; + + public float scaleTextureToFill; + private float scaleTextureToFillBlur; + private float scaleThumb; + float currentThumbScale; + + public static int SCALE_TYPE_NONE = 3; + public static int SCALE_TYPE_FILL = 0; + public static int SCALE_TYPE_FIT = 1; + public static int SCALE_TYPE_ADAPTIVE = 2; + + public int scaleType; + + ValueAnimator currentAnimation; + + boolean applyRoundRadius; + boolean clipToTexture; + public float animationProgress; + + public VoIPTextureView(@NonNull Context context, boolean isCamera, boolean applyRotation) { + this(context, isCamera, applyRotation, true, false); + } + + public VoIPTextureView(@NonNull Context context, boolean isCamera, boolean applyRotation, boolean applyRoundRadius, boolean blurBackground) { super(context); this.isCamera = isCamera; + this.applyRotation = applyRotation; imageView = new ImageView(context); + renderer = new TextureViewRenderer(context) { @Override public void onFirstFrameRendered() { super.onFirstFrameRendered(); - VoIPTextureView.this.invalidate(); - } - - @Override - protected void onMeasure(int widthSpec, int heightSpec) { - super.onMeasure(widthSpec, heightSpec); + VoIPTextureView.this.onFirstFrameRendered(); } }; + renderer.setFpsReduction(30); + renderer.setOpaque(false); renderer.setEnableHardwareScaler(true); - renderer.setIsCamera(isCamera); - if (!isCamera) { + renderer.setIsCamera(!applyRotation); + if (!isCamera && applyRotation) { backgroundView = new View(context); backgroundView.setBackgroundColor(0xff1b1f23); addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + + if (blurBackground) { + blurRenderer = new TextureView(context); + addView(blurRenderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } + renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); addView(renderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } else if (!isCamera) { + if (blurBackground) { + blurRenderer = new TextureView(context); + addView(blurRenderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } + addView(renderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); } else { + if (blurBackground) { + blurRenderer = new TextureView(context); + addView(blurRenderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } addView(renderer); } + addView(imageView); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - setOutlineProvider(new ViewOutlineProvider() { - @TargetApi(Build.VERSION_CODES.LOLLIPOP) - @Override - public void getOutline(View view, Outline outline) { - if (roundRadius < 1) { - outline.setRect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight()); - } else { - outline.setRoundRect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight(), roundRadius); + if (blurRenderer != null) { + blurRenderer.setOpaque(false); + } + + if (applyRoundRadius) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + setOutlineProvider(new ViewOutlineProvider() { + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + @Override + public void getOutline(View view, Outline outline) { + if (roundRadius < 1) { + outline.setRect((int) currentClipHorizontal, (int) currentClipVertical, (int) (view.getMeasuredWidth() - currentClipHorizontal), (int) (view.getMeasuredHeight() - currentClipVertical)); + } else { + outline.setRoundRect((int) currentClipHorizontal, (int) currentClipVertical, (int) (view.getMeasuredWidth() - currentClipHorizontal), (int) (view.getMeasuredHeight() - currentClipVertical), roundRadius); + } } - } - }); - setClipToOutline(true); - } else { - xRefPaint.setColor(0xff000000); - xRefPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); + }); + setClipToOutline(true); + } } if (isCamera) { @@ -109,20 +172,27 @@ public class VoIPTextureView extends FrameLayout { } } } + + if (!applyRotation) { + Display display = ((WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + renderer.setScreenRotation(display.getRotation()); + } + } + + protected void onFirstFrameRendered() { + VoIPTextureView.this.invalidate(); + if (renderer.getAlpha() != 1f) { + renderer.animate().setDuration(300).alpha(1f); + } + + if (blurRenderer != null && blurRenderer.getAlpha() != 1f) { + blurRenderer.animate().setDuration(300).alpha(1f); + } } @Override protected void dispatchDraw(Canvas canvas) { - if (roundRadius > 0 && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - try { - super.dispatchDraw(canvas); - canvas.drawPath(path, xRefPaint); - } catch (Exception ignore) { - - } - } else { - super.dispatchDraw(canvas); - } + super.dispatchDraw(canvas); if (imageView.getVisibility() == View.VISIBLE && renderer.isFirstFrameRendered()) { stubVisibleProgress -= 16f / 150f; @@ -137,11 +207,13 @@ public class VoIPTextureView extends FrameLayout { } public void setRoundCorners(float radius) { - roundRadius = radius; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - invalidateOutline(); - } else { - invalidate(); + if (roundRadius != radius) { + roundRadius = radius; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + invalidateOutline(); + } else { + invalidate(); + } } } @@ -162,14 +234,304 @@ public class VoIPTextureView extends FrameLayout { public void setStub(VoIPTextureView from) { Bitmap bitmap = from.renderer.getBitmap(); - if (bitmap == null || bitmap.getPixel(0,0) == 0) { + if (bitmap == null || bitmap.getPixel(0, 0) == 0) { imageView.setImageDrawable(from.imageView.getDrawable()); } else { imageView.setImageBitmap(bitmap); + imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); } stubVisibleProgress = 1f; imageView.setVisibility(View.VISIBLE); imageView.setAlpha(1f); } + public void animateToLayout() { + if (animateOnNextLayout || getMeasuredHeight() == 0 || getMeasuredWidth() == 0) { + return; + } + animateFromHeight = getMeasuredHeight(); + animateFromWidth = getMeasuredWidth(); + + if (animateWithParent && getParent() != null) { + View parent = (View) getParent(); + animateFromY = parent.getY(); + animateFromX = parent.getX(); + } else { + animateFromY = getY(); + animateFromX = getX(); + } + aninateFromScale = scaleTextureToFill; + aninateFromScaleBlur = scaleTextureToFillBlur; + animateFromThumbScale = scaleThumb; + animateFromRendererW = renderer.getMeasuredWidth(); + animateFromRendererH = renderer.getMeasuredHeight(); + + animateOnNextLayout = true; + requestLayout(); + } + + boolean ignoreLayout; + + @Override + public void requestLayout() { + if (ignoreLayout) { + return; + } + super.requestLayout(); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (!applyRotation) { + ignoreLayout = true; + Display display = ((WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + renderer.setScreenRotation(display.getRotation()); + ignoreLayout = false; + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + updateRendererSize(); + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + + protected void updateRendererSize() { + if (blurRenderer != null) { + blurRenderer.getLayoutParams().width = renderer.getMeasuredWidth(); + blurRenderer.getLayoutParams().height = renderer.getMeasuredHeight(); + } + } + + @Override + protected void onLayout(boolean changed, int left, int top, int right, int bottom) { + super.onLayout(changed, left, top, right, bottom); + + if (blurRenderer != null) { + scaleTextureToFillBlur = Math.max(getMeasuredHeight() / (float) blurRenderer.getMeasuredHeight(), getMeasuredWidth() / (float) blurRenderer.getMeasuredWidth()); + } + + if (scaleType == SCALE_TYPE_NONE) { + if (blurRenderer != null) { + blurRenderer.setScaleX(scaleTextureToFillBlur); + blurRenderer.setScaleY(scaleTextureToFillBlur); + } + return; + } + + if (renderer.getMeasuredHeight() == 0 || renderer.getMeasuredWidth() == 0 || getMeasuredHeight() == 0 || getMeasuredWidth() == 0) { + scaleTextureToFill = 1f; + if (currentAnimation == null && !animateOnNextLayout) { + currentClipHorizontal = 0; + currentClipVertical = 0; + } + } else if (scaleType == SCALE_TYPE_FILL) { + scaleTextureToFill = Math.max(getMeasuredHeight() / (float) renderer.getMeasuredHeight(), getMeasuredWidth() / (float) renderer.getMeasuredWidth()); + } else if (scaleType == SCALE_TYPE_ADAPTIVE) { + //sqaud view + if (Math.abs(getMeasuredHeight() / (float) getMeasuredWidth() - 1f) < 0.02f) { + scaleTextureToFill = Math.max(getMeasuredHeight() / (float) renderer.getMeasuredHeight(), getMeasuredWidth() / (float) renderer.getMeasuredWidth()); + } else { + if (getMeasuredWidth() > getMeasuredHeight() && renderer.getMeasuredHeight() > renderer.getMeasuredWidth()) { + scaleTextureToFill = Math.max(getMeasuredHeight() / (float) renderer.getMeasuredHeight(), (getMeasuredWidth() / 2f ) / (float) renderer.getMeasuredWidth()); + } else { + scaleTextureToFill = Math.min(getMeasuredHeight() / (float) renderer.getMeasuredHeight(), getMeasuredWidth() / (float) renderer.getMeasuredWidth()); + } + } + } else if (scaleType == SCALE_TYPE_FIT) { + scaleTextureToFill = Math.min(getMeasuredHeight() / (float) renderer.getMeasuredHeight(), getMeasuredWidth() / (float) renderer.getMeasuredWidth()); + if (clipToTexture && !animateWithParent && currentAnimation == null && !animateOnNextLayout) { + currentClipHorizontal = (getMeasuredWidth() - renderer.getMeasuredWidth()) / 2f; + currentClipVertical = (getMeasuredHeight() - renderer.getMeasuredHeight()) / 2f; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + invalidateOutline(); + } + } + } + + if (thumb != null) { + scaleThumb = Math.max((getMeasuredWidth()) / (float) thumb.getWidth(), (getMeasuredHeight()) / (float) thumb.getHeight()); + } + + if (animateOnNextLayout) { + aninateFromScale /= renderer.getMeasuredWidth() / animateFromRendererW; + aninateFromScaleBlur /= renderer.getMeasuredWidth() / animateFromRendererW; + animateOnNextLayout = false; + float translationY, translationX; + if (animateWithParent && getParent() != null) { + View parent = (View) getParent(); + translationY = animateFromY - parent.getTop(); + translationX = animateFromX - parent.getLeft(); + } else { + translationY = animateFromY - getTop(); + translationX = animateFromX - getLeft(); + } + clipVertical = 0; + clipHorizontal = 0; + if (animateFromHeight != getMeasuredHeight()) { + clipVertical = (getMeasuredHeight() - animateFromHeight) / 2f; + translationY -= clipVertical; + } + if (animateFromWidth != getMeasuredWidth()) { + clipHorizontal = (getMeasuredWidth() - animateFromWidth) / 2f; + translationX -= clipHorizontal; + } + setTranslationY(translationY); + setTranslationX(translationX); + + if (currentAnimation != null) { + currentAnimation.removeAllListeners(); + currentAnimation.cancel(); + } + renderer.setScaleX(aninateFromScale); + renderer.setScaleY(aninateFromScale); + + if (blurRenderer != null) { + blurRenderer.setScaleX(aninateFromScaleBlur); + blurRenderer.setScaleY(aninateFromScaleBlur); + } + + currentClipVertical = clipVertical; + currentClipHorizontal = clipHorizontal; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + invalidateOutline(); + } + invalidate(); + float fromScaleFinal = aninateFromScale; + float fromScaleBlurFinal = aninateFromScaleBlur; + float fromThumbScale = animateFromThumbScale; + + currentAnimation = ValueAnimator.ofFloat(1f, 0); + float finalTranslationX = translationX; + float finalTranslationY = translationY; + currentAnimation.addUpdateListener(animator -> { + float v = (float) animator.getAnimatedValue(); + animationProgress = (1f - v); + currentClipVertical = v * clipVertical; + currentClipHorizontal = v * clipHorizontal; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + invalidateOutline(); + } + invalidate(); + + float s = fromScaleFinal * v + scaleTextureToFill * (1f - v); + renderer.setScaleX(s); + renderer.setScaleY(s); + + s = fromScaleBlurFinal * v + scaleTextureToFillBlur * (1f - v); + if (blurRenderer != null) { + blurRenderer.setScaleX(s); + blurRenderer.setScaleY(s); + } + + setTranslationX(finalTranslationX * v); + setTranslationY(finalTranslationY * v); + currentThumbScale = fromThumbScale * v + scaleThumb * (1f - v); + }); + if (animateNextDuration != 0) { + currentAnimation.setDuration(animateNextDuration); + } else { + currentAnimation.setDuration(GroupCallActivity.TRANSITION_DURATION); + } + currentAnimation.setInterpolator(CubicBezierInterpolator.DEFAULT); + currentAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + currentClipVertical = 0; + currentClipHorizontal = 0; + + renderer.setScaleX(scaleTextureToFill); + renderer.setScaleY(scaleTextureToFill); + + if (blurRenderer != null) { + blurRenderer.setScaleX(scaleTextureToFillBlur); + blurRenderer.setScaleY(scaleTextureToFillBlur); + } + + setTranslationY(0); + setTranslationX(0); + + currentThumbScale = scaleThumb; + currentAnimation = null; + } + }); + currentAnimation.start(); + if (!animateOnNextLayoutAnimations.isEmpty()) { + for (int i = 0; i < animateOnNextLayoutAnimations.size(); i++) { + animateOnNextLayoutAnimations.get(i).start(); + } + } + animateOnNextLayoutAnimations.clear(); + animateNextDuration = 0; + } else { + if (currentAnimation == null) { + renderer.setScaleX(scaleTextureToFill); + renderer.setScaleY(scaleTextureToFill); + + if (blurRenderer != null) { + blurRenderer.setScaleX(scaleTextureToFillBlur); + blurRenderer.setScaleY(scaleTextureToFillBlur); + } + + currentThumbScale = scaleThumb; + } + } + } + + public void setCliping(float horizontalClip, float verticalClip) { + if (currentAnimation != null || animateOnNextLayout) { + return; + } + currentClipHorizontal = horizontalClip; + currentClipVertical = verticalClip; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + invalidateOutline(); + } + invalidate(); + + } + + boolean animateWithParent; + + public void setAnimateWithParent(boolean b) { + animateWithParent = b; + } + + public void synchOrRunAnimation(Animator animator) { + if (animateOnNextLayout) { + animateOnNextLayoutAnimations.add(animator); + } else { + animator.start(); + } + } + + public void cancelAnimation() { + animateOnNextLayout = false; + animateNextDuration = 0; + } + + public void setAnimateNextDuration(long animateNextDuration) { + this.animateNextDuration = animateNextDuration; + } + + public void setThumb(Bitmap thumb) { + this.thumb = thumb; + } + + public void attachBackgroundRenderer() { + if (blurRenderer != null) { + renderer.setBackgroundRenderer(blurRenderer); + if (!renderer.isFirstFrameRendered()) { + blurRenderer.setAlpha(0f); + } + } + } + + public boolean isInAnimation() { + return currentAnimation != null; + } + + public void updateRotation() { + if (!applyRotation) { + Display display = ((WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + renderer.setScreenRotation(display.getRotation()); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java index 6d3a9b6dd..d87163a98 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java @@ -26,10 +26,10 @@ import androidx.core.content.ContextCompat; import androidx.core.graphics.ColorUtils; import org.telegram.messenger.AndroidUtilities; -import org.telegram.messenger.FileLog; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.RLottieImageView; public class VoIPToggleButton extends FrameLayout { @@ -37,6 +37,8 @@ public class VoIPToggleButton extends FrameLayout { private boolean drawBackground = true; private boolean animateBackground; Drawable[] icon = new Drawable[2]; + + FrameLayout textLayoutContainer; TextView[] textView = new TextView[2]; int backgroundColor; @@ -75,6 +77,8 @@ public class VoIPToggleButton extends FrameLayout { private float radius; private ValueAnimator checkAnimator; + private RLottieImageView lottieImageView; + public VoIPToggleButton(@NonNull Context context) { this(context, 52f); } @@ -83,17 +87,21 @@ public class VoIPToggleButton extends FrameLayout { this.radius = radius; setWillNotDraw(false); + textLayoutContainer = new FrameLayout(context); + addView(textLayoutContainer); + for (int i = 0; i < 2; i++) { TextView textView = new TextView(context); textView.setGravity(Gravity.CENTER_HORIZONTAL); textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 11); textView.setTextColor(Color.WHITE); textView.setImportantForAccessibility(IMPORTANT_FOR_ACCESSIBILITY_NO); - addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, radius + 4, 0, 0)); + textLayoutContainer.addView(textView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, radius + 4, 0, 0)); this.textView[i] = textView; } textView[1].setVisibility(View.GONE); + xRefPaint.setColor(0xff000000); xRefPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR)); xRefPaint.setStrokeWidth(AndroidUtilities.dp(3)); @@ -136,86 +144,88 @@ public class VoIPToggleButton extends FrameLayout { rippleDrawable.setBounds((int) (cx - radius), (int) (cy - radius), (int) (cx + radius), (int) (cy + radius)); rippleDrawable.draw(canvas); - if (drawCross || crossProgress != 0) { - if (iconChangeColor) { - int color = ColorUtils.blendARGB(replaceColorFrom, currentIconColor, replaceProgress); - icon[0].setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); - crossPaint.setColor(color); - } - icon[0].setAlpha(255); - - if (replaceProgress != 0 && iconChangeColor) { - int color = ColorUtils.blendARGB(replaceColorFrom, currentIconColor, replaceProgress); - icon[0].setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); - crossPaint.setColor(color); - } - icon[0].setAlpha(255); - - if (drawCross && crossProgress < 1f) { - crossProgress += 0.08f; - if (crossProgress > 1f) { - crossProgress = 1f; - } else { - invalidate(); + if (currentIconRes != 0) { + if (drawCross || crossProgress != 0) { + if (iconChangeColor) { + int color = ColorUtils.blendARGB(replaceColorFrom, currentIconColor, replaceProgress); + icon[0].setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + crossPaint.setColor(color); } - } else if (!drawCross) { - crossProgress -= 0.08f; - if (crossProgress < 0) { - crossProgress = 0; - } else { - invalidate(); + icon[0].setAlpha(255); + + if (replaceProgress != 0 && iconChangeColor) { + int color = ColorUtils.blendARGB(replaceColorFrom, currentIconColor, replaceProgress); + icon[0].setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + crossPaint.setColor(color); } - } - if (crossProgress > 0) { - int left = (int) (cx - icon[0].getIntrinsicWidth() / 2f); - int top = (int) (cy - icon[0].getIntrinsicHeight() / 2); + icon[0].setAlpha(255); - float startX = left + AndroidUtilities.dpf2(8) + crossOffset; - float startY = top + AndroidUtilities.dpf2(8); - - float endX = startX - AndroidUtilities.dp(1) + AndroidUtilities.dp(17) * CubicBezierInterpolator.DEFAULT.getInterpolation(crossProgress); - float endY = startY + AndroidUtilities.dp(17) * CubicBezierInterpolator.DEFAULT.getInterpolation(crossProgress); - - canvas.saveLayerAlpha(0, 0, getMeasuredWidth(), getMeasuredHeight(), 255, Canvas.ALL_SAVE_FLAG); - icon[0].setBounds( - (int) (cx - icon[0].getIntrinsicWidth() / 2f), (int) (cy - icon[0].getIntrinsicHeight() / 2), - (int) (cx + icon[0].getIntrinsicWidth() / 2), (int) (cy + icon[0].getIntrinsicHeight() / 2) - ); - icon[0].draw(canvas); - - canvas.drawLine(startX, startY - AndroidUtilities.dp(2f), endX, endY - AndroidUtilities.dp(2f), xRefPaint); - canvas.drawLine(startX, startY, endX, endY, crossPaint); - canvas.restore(); - } else { - icon[0].setBounds( - (int) (cx - icon[0].getIntrinsicWidth() / 2f), (int) (cy - icon[0].getIntrinsicHeight() / 2), - (int) (cx + icon[0].getIntrinsicWidth() / 2), (int) (cy + icon[0].getIntrinsicHeight() / 2) - ); - icon[0].draw(canvas); - } - } else { - for (int i = 0; i < ((replaceProgress == 0 || iconChangeColor) ? 1 : 2); i++) { - if (icon[i] != null) { - canvas.save(); - if (replaceProgress != 0 && !iconChangeColor && icon[0] != null && icon[1] != null) { - float p = i == 0 ? 1f - replaceProgress : replaceProgress; - canvas.scale(p, p, cx, cy); - icon[i].setAlpha((int) (255 * p)); + if (drawCross && crossProgress < 1f) { + crossProgress += 0.08f; + if (crossProgress > 1f) { + crossProgress = 1f; } else { - if (iconChangeColor) { - int color = ColorUtils.blendARGB(replaceColorFrom, currentIconColor, replaceProgress); - icon[i].setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); - crossPaint.setColor(color); - } - icon[i].setAlpha(255); + invalidate(); } - icon[i].setBounds( - (int) (cx - icon[i].getIntrinsicWidth() / 2f), (int) (cy - icon[i].getIntrinsicHeight() / 2), - (int) (cx + icon[i].getIntrinsicWidth() / 2), (int) (cy + icon[i].getIntrinsicHeight() / 2) - ); - icon[i].draw(canvas); + } else if (!drawCross) { + crossProgress -= 0.08f; + if (crossProgress < 0) { + crossProgress = 0; + } else { + invalidate(); + } + } + if (crossProgress > 0) { + int left = (int) (cx - icon[0].getIntrinsicWidth() / 2f); + int top = (int) (cy - icon[0].getIntrinsicHeight() / 2); + float startX = left + AndroidUtilities.dpf2(8) + crossOffset; + float startY = top + AndroidUtilities.dpf2(8); + + float endX = startX - AndroidUtilities.dp(1) + AndroidUtilities.dp(17) * CubicBezierInterpolator.DEFAULT.getInterpolation(crossProgress); + float endY = startY + AndroidUtilities.dp(17) * CubicBezierInterpolator.DEFAULT.getInterpolation(crossProgress); + + canvas.saveLayerAlpha(0, 0, getMeasuredWidth(), getMeasuredHeight(), 255, Canvas.ALL_SAVE_FLAG); + icon[0].setBounds( + (int) (cx - icon[0].getIntrinsicWidth() / 2f), (int) (cy - icon[0].getIntrinsicHeight() / 2), + (int) (cx + icon[0].getIntrinsicWidth() / 2), (int) (cy + icon[0].getIntrinsicHeight() / 2) + ); + icon[0].draw(canvas); + + canvas.drawLine(startX, startY - AndroidUtilities.dp(2f), endX, endY - AndroidUtilities.dp(2f), xRefPaint); + canvas.drawLine(startX, startY, endX, endY, crossPaint); canvas.restore(); + } else { + icon[0].setBounds( + (int) (cx - icon[0].getIntrinsicWidth() / 2f), (int) (cy - icon[0].getIntrinsicHeight() / 2), + (int) (cx + icon[0].getIntrinsicWidth() / 2), (int) (cy + icon[0].getIntrinsicHeight() / 2) + ); + icon[0].draw(canvas); + } + } else { + for (int i = 0; i < ((replaceProgress == 0 || iconChangeColor) ? 1 : 2); i++) { + if (icon[i] != null) { + canvas.save(); + if (replaceProgress != 0 && !iconChangeColor && icon[0] != null && icon[1] != null) { + float p = i == 0 ? 1f - replaceProgress : replaceProgress; + canvas.scale(p, p, cx, cy); + icon[i].setAlpha((int) (255 * p)); + } else { + if (iconChangeColor) { + int color = ColorUtils.blendARGB(replaceColorFrom, currentIconColor, replaceProgress); + icon[i].setColorFilter(new PorterDuffColorFilter(color, PorterDuff.Mode.MULTIPLY)); + crossPaint.setColor(color); + } + icon[i].setAlpha(255); + } + icon[i].setBounds( + (int) (cx - icon[i].getIntrinsicWidth() / 2f), (int) (cy - icon[i].getIntrinsicHeight() / 2), + (int) (cx + icon[i].getIntrinsicWidth() / 2), (int) (cy + icon[i].getIntrinsicHeight() / 2) + ); + icon[i].draw(canvas); + + canvas.restore(); + } } } } @@ -248,7 +258,7 @@ public class VoIPToggleButton extends FrameLayout { setVisibility(View.VISIBLE); } - if (currentIconRes == iconRes && currentIconColor == iconColor && (checkable || currentBackgroundColor == backgroundColor) && (currentText != null && currentText.equals(text))) { + if (currentIconRes == iconRes && currentIconColor == iconColor && (checkable || currentBackgroundColor == backgroundColor) && (currentText != null && currentText.equals(text)) && cross == this.drawCross) { return; } @@ -278,8 +288,10 @@ public class VoIPToggleButton extends FrameLayout { drawCross = cross; if (!animated) { - icon[0] = ContextCompat.getDrawable(getContext(), iconRes).mutate(); - icon[0].setColorFilter(new PorterDuffColorFilter(iconColor, PorterDuff.Mode.MULTIPLY)); + if (iconRes != 0) { + icon[0] = ContextCompat.getDrawable(getContext(), iconRes).mutate(); + icon[0].setColorFilter(new PorterDuffColorFilter(iconColor, PorterDuff.Mode.MULTIPLY)); + } crossPaint.setColor(iconColor); if (!checkable) { this.backgroundColor = backgroundColor; @@ -290,7 +302,7 @@ public class VoIPToggleButton extends FrameLayout { replaceProgress = 0f; invalidate(); } else { - if (!iconChangeColor) { + if (!iconChangeColor && iconRes != 0) { icon[1] = ContextCompat.getDrawable(getContext(), iconRes).mutate(); icon[1].setColorFilter(new PorterDuffColorFilter(iconColor, PorterDuff.Mode.MULTIPLY)); } @@ -340,7 +352,9 @@ public class VoIPToggleButton extends FrameLayout { icon[1] = null; } iconChangeColor = false; - VoIPToggleButton.this.backgroundColor = animateToBackgroundColor; + if (!checkable) { + VoIPToggleButton.this.backgroundColor = animateToBackgroundColor; + } replaceProgress = 0f; invalidate(); } @@ -437,4 +451,16 @@ public class VoIPToggleButton extends FrameLayout { AndroidUtilities.shakeView(textView[0], 2, 0); AndroidUtilities.shakeView(textView[1], 2, 0); } + + public void showText(boolean show, boolean animated) { + if (animated) { + float a = show ? 1f : 0; + if (textLayoutContainer.getAlpha() != a) { + textLayoutContainer.animate().alpha(a).start(); + } + } else { + textLayoutContainer.animate().cancel(); + textLayoutContainer.setAlpha(show ? 1f : 0); + } + } } \ No newline at end of file diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java index 8dd6ac9ee..fefccfa09 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ContentPreviewViewer.java @@ -18,6 +18,8 @@ import android.graphics.PixelFormat; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Build; +import android.os.VibrationEffect; +import android.os.Vibrator; import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; @@ -43,6 +45,7 @@ import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.WebFile; import org.telegram.tgnet.TLRPC; @@ -79,6 +82,14 @@ public class ContentPreviewViewer { boolean isInScheduleMode(); long getDialogId(); + default boolean needRemove() { + return false; + } + + default void remove(SendMessagesHelper.ImportingSticker sticker) { + + } + default String getQuery(boolean isGif) { return null; } @@ -168,6 +179,11 @@ public class ContentPreviewViewer { icons.add(R.drawable.outline_pack); actions.add(1); } + if (delegate.needRemove()) { + items.add(LocaleController.getString("ImportStickersRemoveMenu", R.string.ImportStickersRemoveMenu)); + icons.add(R.drawable.msg_delete); + actions.add(5); + } } if (!MessageObject.isMaskDocument(currentDocument) && (inFavs || MediaDataController.getInstance(currentAccount).canAddStickerToFavorites() && MessageObject.isStickerHasSet(currentDocument))) { items.add(inFavs ? LocaleController.getString("DeleteFromFavorites", R.string.DeleteFromFavorites) : LocaleController.getString("AddToFavorites", R.string.AddToFavorites)); @@ -208,6 +224,8 @@ public class ContentPreviewViewer { AlertsCreator.createScheduleDatePickerDialog(parentActivity, stickerPreviewViewerDelegate.getDialogId(), (notify, scheduleDate) -> stickerPreviewViewerDelegate.sendSticker(sticker, query, parent, notify, scheduleDate)); } else if (actions.get(which) == 4) { MediaDataController.getInstance(currentAccount).addRecentSticker(MediaDataController.TYPE_IMAGE, parentObject, currentDocument, (int) (System.currentTimeMillis() / 1000), true); + } else if (actions.get(which) == 5) { + delegate.remove(importingSticker); } }); builder.setDimBehind(false); @@ -218,6 +236,11 @@ public class ContentPreviewViewer { }); visibleDialog.show(); containerView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + if (delegate.needRemove()) { + BottomSheet.BottomSheetCell cell = visibleDialog.getItemViews().get(0); + cell.setTextColor(Theme.getColor(Theme.key_dialogTextRed)); + cell.setIconColor(Theme.getColor(Theme.key_dialogRedIcon)); + } } else if (delegate != null) { animateY = true; visibleDialog = new BottomSheet(parentActivity, false) { @@ -309,6 +332,7 @@ public class ContentPreviewViewer { private int currentContentType; private TLRPC.Document currentDocument; + private SendMessagesHelper.ImportingSticker importingSticker; private String currentQuery; private TLRPC.BotInlineResult inlineResult; private TLRPC.InputStickerSet currentStickerSet; @@ -454,20 +478,22 @@ public class ContentPreviewViewer { clearsInputField = false; if (currentPreviewCell instanceof StickerEmojiCell) { StickerEmojiCell stickerEmojiCell = (StickerEmojiCell) currentPreviewCell; - open(stickerEmojiCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentType, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); + open(stickerEmojiCell.getSticker(), stickerEmojiCell.getStickerPath(), stickerEmojiCell.getEmoji(), delegate != null ? delegate.getQuery(false) : null, null, contentType, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); stickerEmojiCell.setScaled(true); } else if (currentPreviewCell instanceof StickerCell) { StickerCell stickerCell = (StickerCell) currentPreviewCell; - open(stickerCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentType, false, stickerCell.getParentObject()); + open(stickerCell.getSticker(), null, null, delegate != null ? delegate.getQuery(false) : null, null, contentType, false, stickerCell.getParentObject()); stickerCell.setScaled(true); clearsInputField = stickerCell.isClearsInputField(); } else if (currentPreviewCell instanceof ContextLinkCell) { ContextLinkCell contextLinkCell = (ContextLinkCell) currentPreviewCell; - open(contextLinkCell.getDocument(), delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentType, false, contextLinkCell.getBotInlineResult() != null ? contextLinkCell.getInlineBot() : contextLinkCell.getParentObject()); + open(contextLinkCell.getDocument(), null, null, delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentType, false, contextLinkCell.getBotInlineResult() != null ? contextLinkCell.getInlineBot() : contextLinkCell.getParentObject()); if (contentType != CONTENT_TYPE_GIF) { contextLinkCell.setScaled(true); } } + runSmoothHaptic(); + return true; } } @@ -488,6 +514,20 @@ public class ContentPreviewViewer { return false; } + VibrationEffect vibrationEffect; + + protected void runSmoothHaptic() { + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) { + final Vibrator vibrator = (Vibrator) containerView.getContext().getSystemService(Context.VIBRATOR_SERVICE); + if (vibrationEffect == null) { + long[] vibrationWaveFormDurationPattern = {0, 2}; + vibrationEffect = VibrationEffect.createWaveform(vibrationWaveFormDurationPattern, -1); + } + vibrator.cancel(); + vibrator.vibrate(vibrationEffect); + } + } + public boolean onInterceptTouchEvent(MotionEvent event, final RecyclerListView listView, final int height, ContentPreviewViewerDelegate contentPreviewViewerDelegate) { delegate = contentPreviewViewerDelegate; if (event.getAction() == MotionEvent.ACTION_DOWN) { @@ -551,20 +591,21 @@ public class ContentPreviewViewer { clearsInputField = false; if (currentPreviewCell instanceof StickerEmojiCell) { StickerEmojiCell stickerEmojiCell = (StickerEmojiCell) currentPreviewCell; - open(stickerEmojiCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); + open(stickerEmojiCell.getSticker(), stickerEmojiCell.getStickerPath(), stickerEmojiCell.getEmoji(), delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, stickerEmojiCell.isRecent(), stickerEmojiCell.getParentObject()); stickerEmojiCell.setScaled(true); } else if (currentPreviewCell instanceof StickerCell) { StickerCell stickerCell = (StickerCell) currentPreviewCell; - open(stickerCell.getSticker(), delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, false, stickerCell.getParentObject()); + open(stickerCell.getSticker(), null, null, delegate != null ? delegate.getQuery(false) : null, null, contentTypeFinal, false, stickerCell.getParentObject()); stickerCell.setScaled(true); clearsInputField = stickerCell.isClearsInputField(); } else if (currentPreviewCell instanceof ContextLinkCell) { ContextLinkCell contextLinkCell = (ContextLinkCell) currentPreviewCell; - open(contextLinkCell.getDocument(), delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentTypeFinal, false, contextLinkCell.getBotInlineResult() != null ? contextLinkCell.getInlineBot() : contextLinkCell.getParentObject()); + open(contextLinkCell.getDocument(), null, null, delegate != null ? delegate.getQuery(true) : null, contextLinkCell.getBotInlineResult(), contentTypeFinal, false, contextLinkCell.getBotInlineResult() != null ? contextLinkCell.getInlineBot() : contextLinkCell.getParentObject()); if (contentTypeFinal != CONTENT_TYPE_GIF) { contextLinkCell.setScaled(true); } } + currentPreviewCell.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); }; AndroidUtilities.runOnUIThread(openPreviewRunnable, 200); return true; @@ -629,14 +670,14 @@ public class ContentPreviewViewer { keyboardHeight = height; } - public void open(TLRPC.Document document, String query, TLRPC.BotInlineResult botInlineResult, int contentType, boolean isRecent, Object parent) { + public void open(TLRPC.Document document, SendMessagesHelper.ImportingSticker sticker, String emojiPath, String query, TLRPC.BotInlineResult botInlineResult, int contentType, boolean isRecent, Object parent) { if (parentActivity == null || windowView == null) { return; } isRecentSticker = isRecent; stickerEmojiLayout = null; if (contentType == CONTENT_TYPE_STICKER) { - if (document == null) { + if (document == null && sticker == null) { return; } if (textPaint == null) { @@ -644,39 +685,60 @@ public class ContentPreviewViewer { textPaint.setTextSize(AndroidUtilities.dp(24)); } - TLRPC.InputStickerSet newSet = null; - for (int a = 0; a < document.attributes.size(); a++) { - TLRPC.DocumentAttribute attribute = document.attributes.get(a); - if (attribute instanceof TLRPC.TL_documentAttributeSticker && attribute.stickerset != null) { - newSet = attribute.stickerset; - break; - } - } - if (newSet != null && (delegate == null || delegate.needMenu())) { - try { - if (visibleDialog != null) { - visibleDialog.setOnDismissListener(null); - visibleDialog.dismiss(); - visibleDialog = null; - } - } catch (Exception e) { - FileLog.e(e); - } - AndroidUtilities.cancelRunOnUIThread(showSheetRunnable); - AndroidUtilities.runOnUIThread(showSheetRunnable, 1300); - } - currentStickerSet = newSet; - TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); - centerImage.setImage(ImageLocation.getForDocument(document), null, ImageLocation.getForDocument(thumb, document), null, "webp", currentStickerSet, 1); - for (int a = 0; a < document.attributes.size(); a++) { - TLRPC.DocumentAttribute attribute = document.attributes.get(a); - if (attribute instanceof TLRPC.TL_documentAttributeSticker) { - if (!TextUtils.isEmpty(attribute.alt)) { - CharSequence emoji = Emoji.replaceEmoji(attribute.alt, textPaint.getFontMetricsInt(), AndroidUtilities.dp(24), false); - stickerEmojiLayout = new StaticLayout(emoji, textPaint, AndroidUtilities.dp(100), Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + if (document != null) { + TLRPC.InputStickerSet newSet = null; + for (int a = 0; a < document.attributes.size(); a++) { + TLRPC.DocumentAttribute attribute = document.attributes.get(a); + if (attribute instanceof TLRPC.TL_documentAttributeSticker && attribute.stickerset != null) { + newSet = attribute.stickerset; break; } } + if (newSet != null && (delegate == null || delegate.needMenu())) { + try { + if (visibleDialog != null) { + visibleDialog.setOnDismissListener(null); + visibleDialog.dismiss(); + visibleDialog = null; + } + } catch (Exception e) { + FileLog.e(e); + } + AndroidUtilities.cancelRunOnUIThread(showSheetRunnable); + AndroidUtilities.runOnUIThread(showSheetRunnable, 1300); + } + currentStickerSet = newSet; + TLRPC.PhotoSize thumb = FileLoader.getClosestPhotoSizeWithSize(document.thumbs, 90); + centerImage.setImage(ImageLocation.getForDocument(document), null, ImageLocation.getForDocument(thumb, document), null, "webp", currentStickerSet, 1); + for (int a = 0; a < document.attributes.size(); a++) { + TLRPC.DocumentAttribute attribute = document.attributes.get(a); + if (attribute instanceof TLRPC.TL_documentAttributeSticker) { + if (!TextUtils.isEmpty(attribute.alt)) { + CharSequence emoji = Emoji.replaceEmoji(attribute.alt, textPaint.getFontMetricsInt(), AndroidUtilities.dp(24), false); + stickerEmojiLayout = new StaticLayout(emoji, textPaint, AndroidUtilities.dp(100), Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + break; + } + } + } + } else if (sticker != null) { + centerImage.setImage(sticker.path, null, null, sticker.animated ? "tgs" : null, 0); + if (emojiPath != null) { + CharSequence emoji = Emoji.replaceEmoji(emojiPath, textPaint.getFontMetricsInt(), AndroidUtilities.dp(24), false); + stickerEmojiLayout = new StaticLayout(emoji, textPaint, AndroidUtilities.dp(100), Layout.Alignment.ALIGN_CENTER, 1.0f, 0.0f, false); + } + if (delegate.needMenu()) { + try { + if (visibleDialog != null) { + visibleDialog.setOnDismissListener(null); + visibleDialog.dismiss(); + visibleDialog = null; + } + } catch (Exception e) { + FileLog.e(e); + } + AndroidUtilities.cancelRunOnUIThread(showSheetRunnable); + AndroidUtilities.runOnUIThread(showSheetRunnable, 1300); + } } } else { if (document != null) { @@ -707,6 +769,7 @@ public class ContentPreviewViewer { currentContentType = contentType; currentDocument = document; + importingSticker = sticker; currentQuery = query; inlineResult = botInlineResult; parentObject = parent; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java index 626956efb..33408725a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/CountrySelectActivity.java @@ -253,7 +253,7 @@ public class CountrySelectActivity extends BaseFragment { } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { ArrayList arr = countries.get(sortedCountries.get(section)); return row < arr.size(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java index e36760bfa..2e495a7dd 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java @@ -28,12 +28,15 @@ import android.content.res.Configuration; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Matrix; import android.graphics.Outline; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; import android.graphics.RectF; +import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; @@ -43,6 +46,7 @@ import android.os.Vibrator; import android.text.TextUtils; import android.util.Property; import android.util.StateSet; +import android.util.TypedValue; import android.view.Gravity; import android.view.HapticFeedbackConstants; import android.view.KeyEvent; @@ -73,8 +77,6 @@ import androidx.recyclerview.widget.LinearSmoothScrollerCustom; import androidx.recyclerview.widget.RecyclerView; import androidx.viewpager.widget.ViewPager; -import com.google.android.exoplayer2.util.Log; - import org.telegram.messenger.AccountInstance; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; @@ -82,6 +84,7 @@ import org.telegram.messenger.BuildVars; import org.telegram.messenger.ChatObject; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; +import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; import org.telegram.messenger.ImageLocation; @@ -134,7 +137,6 @@ import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.UserCell; import org.telegram.ui.Components.AlertsCreator; -import org.telegram.ui.Components.AnimatedArrowDrawable; import org.telegram.ui.Components.AnimationProperties; import org.telegram.ui.Components.AvatarDrawable; import org.telegram.ui.Components.BackupImageView; @@ -152,12 +154,14 @@ import org.telegram.ui.Components.FlickerLoadingView; import org.telegram.ui.Components.FragmentContextView; import org.telegram.ui.Components.JoinGroupAlert; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MediaActionDrawable; import org.telegram.ui.Components.NumberTextView; import org.telegram.ui.Components.PacmanAnimation; import org.telegram.ui.Components.ProxyDrawable; import org.telegram.ui.Components.PullForegroundDrawable; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RLottieImageView; +import org.telegram.ui.Components.RadialProgress2; import org.telegram.ui.Components.RecyclerAnimationScrollHelper; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.SearchViewPager; @@ -167,6 +171,7 @@ import org.telegram.ui.Components.UndoView; import org.telegram.ui.Components.ViewPagerFixed; import org.telegram.ui.Components.RecyclerItemsEnterAnimator; +import java.io.File; import java.util.ArrayList; public class DialogsActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { @@ -175,19 +180,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private boolean filterTabsViewIsVisible; private int initialSearchType = -1; - public void setShowSearch(String query, int i) { - if (!searching) { - initialSearchType = i; - actionBar.openSearchField(query, false); - } else { - if (!searchItem.getSearchField().getText().toString().equals(query)) { - searchItem.getSearchField().setText(query); - } - if (searchViewPager.getTabsView().getCurrentTabId() != i) { - searchViewPager.getTabsView().scrollToTab(i, i); - } - } - } + private final String ACTION_MODE_SEARCH_DIALOGS_TAG = "search_dialogs_action_mode"; private class ViewPage extends FrameLayout { private DialogsRecyclerView listView; @@ -284,6 +277,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private ActionBarMenuSubItem blockItem; private float additionalFloatingTranslation; + private float additionalFloatingTranslation2; private float floatingButtonTranslation; private float floatingButtonHideProgress; @@ -292,7 +286,6 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private float searchAnimationProgress; private boolean searchAnimationTabsDelayedCrossfade; - private AnimatedArrowDrawable arrowDrawable; private RecyclerView sideMenu; private ChatActivityEnterView commentView; private ActionBarMenuItem switchItem; @@ -343,6 +336,11 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private boolean checkCanWrite; private boolean afterSignup; + private FrameLayout updateLayout; + private AnimatorSet updateLayoutAnimator; + private RadialProgress2 updateLayoutIcon; + private TextView updateTextView; + private DialogsActivityDelegate delegate; private ArrayList selectedDialogs = new ArrayList<>(); @@ -1504,7 +1502,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (!onlySelect && parentPage.isDefaultDialogType() && slidingView == null && viewHolder.itemView instanceof DialogCell) { DialogCell dialogCell = (DialogCell) viewHolder.itemView; long dialogId = dialogCell.getDialogId(); - if (actionBar.isActionModeShowed()) { + if (actionBar.isActionModeShowed(null)) { TLRPC.Dialog dialog = getMessagesController().dialogs_dict.get(dialogId); if (!allowMoving || dialog == null || !isDialogPinned(dialog) || DialogObject.isFolderDialogId(dialogId)) { return 0; @@ -1788,7 +1786,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. currentConnectionState = getConnectionsManager().getConnectionState(); getNotificationCenter().addObserver(this, NotificationCenter.dialogsNeedReload); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); if (!onlySelect) { NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.closeSearchByActiveAction); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.proxySettingsChanged); @@ -1812,8 +1810,12 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().addObserver(this, NotificationCenter.needDeleteDialog); getNotificationCenter().addObserver(this, NotificationCenter.folderBecomeEmpty); getNotificationCenter().addObserver(this, NotificationCenter.newSuggestionsAvailable); + getNotificationCenter().addObserver(this, NotificationCenter.fileLoaded); + getNotificationCenter().addObserver(this, NotificationCenter.fileLoadFailed); + getNotificationCenter().addObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetPasscode); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.appUpdateAvailable); } getNotificationCenter().addObserver(this, NotificationCenter.messagesDeleted); @@ -1849,7 +1851,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. super.onFragmentDestroy(); if (searchString == null) { getNotificationCenter().removeObserver(this, NotificationCenter.dialogsNeedReload); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (!onlySelect) { NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.closeSearchByActiveAction); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.proxySettingsChanged); @@ -1874,8 +1876,12 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().removeObserver(this, NotificationCenter.folderBecomeEmpty); getNotificationCenter().removeObserver(this, NotificationCenter.newSuggestionsAvailable); getNotificationCenter().removeObserver(this, NotificationCenter.messagesDeleted); + getNotificationCenter().removeObserver(this, NotificationCenter.fileLoaded); + getNotificationCenter().removeObserver(this, NotificationCenter.fileLoadFailed); + getNotificationCenter().removeObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetPasscode); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.appUpdateAvailable); } getNotificationCenter().removeObserver(this, NotificationCenter.didClearDatabase); @@ -2042,7 +2048,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. searchViewPager.removeSearchFilter(filterData); searchViewPager.onTextChanged(searchItem.getSearchField().getText().toString()); - updateFiltersView(true, null, null,true); + updateFiltersView(true, null, null,false, true); } @Override @@ -2451,7 +2457,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. TLRPC.User user = getUserConfig().getCurrentUser(); avatarDrawable.setInfo(user); imageView.getImageReceiver().setCurrentAccount(currentAccount); - imageView.setForUserOrChat(user, avatarDrawable); + imageView.setImage(ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_SMALL), "50_50", ImageLocation.getForUserOrChat(user, ImageLocation.TYPE_STRIPPED), "50_50", avatarDrawable, user); for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { TLRPC.User u = AccountInstance.getInstance(a).getUserConfig().getCurrentUser(); @@ -2470,7 +2476,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. sideMenu.getAdapter().notifyDataSetChanged(); } - createActionMode(); + createActionMode(null); ContentView contentView = new ContentView(context); fragmentView = contentView; @@ -2785,7 +2791,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } }); viewPage.swipeController = new SwipeController(viewPage); - viewPage.recyclerItemsEnterAnimator = new RecyclerItemsEnterAnimator(viewPage.listView); + viewPage.recyclerItemsEnterAnimator = new RecyclerItemsEnterAnimator(viewPage.listView, false); viewPage.itemTouchhelper = new ItemTouchHelper(viewPage.swipeController); viewPage.itemTouchhelper.attachToRecyclerView(viewPage.listView); @@ -3066,6 +3072,11 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. searchViewPager.runResultsEnterAnimation(); } } + + @Override + public boolean isSelected(long dialogId) { + return selectedDialogs.contains(dialogId); + } }); searchViewPager.searchListView.setOnItemClickListener((view, position) -> { @@ -3092,7 +3103,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } }); - searchViewPager.setFilteredSearchViewDelegate((showMediaFilters, users, dates) -> DialogsActivity.this.updateFiltersView(showMediaFilters, users, dates, true)); + searchViewPager.setFilteredSearchViewDelegate((showMediaFilters, users, dates, archive) -> DialogsActivity.this.updateFiltersView(showMediaFilters, users, dates, archive,true)); searchViewPager.setVisibility(View.GONE); filtersView = new FiltersView(getParentActivity()); @@ -3310,6 +3321,80 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. contentView.addView(actionBar, layoutParams); } + if (searchString == null && initialDialogsType == 0) { + updateLayout = new FrameLayout(context) { + + private Paint paint = new Paint(); + private Matrix matrix = new Matrix(); + private LinearGradient updateGradient; + private int lastGradientWidth; + + @Override + protected void onDraw(Canvas canvas) { + if (updateGradient == null) { + return; + } + paint.setColor(0xffffffff); + paint.setShader(updateGradient); + updateGradient.setLocalMatrix(matrix); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), paint); + updateLayoutIcon.setBackgroundGradientDrawable(updateGradient); + updateLayoutIcon.draw(canvas); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + int width = MeasureSpec.getSize(widthMeasureSpec); + if (lastGradientWidth != width) { + updateGradient = new LinearGradient(0, 0, width, 0, new int[]{0xff69BF72, 0xff53B3AD}, new float[]{0.0f, 1.0f}, Shader.TileMode.CLAMP); + lastGradientWidth = width; + } + int x = (getMeasuredWidth() - updateTextView.getMeasuredWidth()) / 2; + updateLayoutIcon.setProgressRect(x, AndroidUtilities.dp(13), x + AndroidUtilities.dp(22), AndroidUtilities.dp(13 + 22)); + } + + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + additionalFloatingTranslation2 = AndroidUtilities.dp(48) - translationY; + if (additionalFloatingTranslation2 < 0) { + additionalFloatingTranslation2 = 0; + } + if (!floatingHidden) { + updateFloatingButtonOffset(); + } + } + }; + updateLayout.setWillNotDraw(false); + updateLayout.setVisibility(View.INVISIBLE); + updateLayout.setTranslationY(AndroidUtilities.dp(48)); + if (Build.VERSION.SDK_INT >= 21) { + updateLayout.setBackground(Theme.getSelectorDrawable(Theme.getColor(Theme.key_listSelector), null)); + } + contentView.addView(updateLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.BOTTOM)); + updateLayout.setOnClickListener(v -> { + if (!SharedConfig.isAppUpdateAvailable()) { + return; + } + AndroidUtilities.openForView(SharedConfig.pendingAppUpdate.document, true, getParentActivity()); + }); + + updateLayoutIcon = new RadialProgress2(updateLayout); + updateLayoutIcon.setColors(0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff); + updateLayoutIcon.setCircleRadius(AndroidUtilities.dp(11)); + updateLayoutIcon.setAsMini(); + updateLayoutIcon.setIcon(MediaActionDrawable.ICON_UPDATE, true, false); + + updateTextView = new TextView(context); + updateTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + updateTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + updateTextView.setText(LocaleController.getString("AppUpdateNow", R.string.AppUpdateNow).toUpperCase()); + updateTextView.setTextColor(0xffffffff); + updateTextView.setPadding(AndroidUtilities.dp(30), 0, 0, 0); + updateLayout.addView(updateTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER, 0, 0, 0, 0)); + } + for (int a = 0; a < 2; a++) { undoView[a] = new UndoView(context) { @Override @@ -3432,16 +3517,73 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. showSearch(false, false); } - if (folderId != 0) { - FiltersView.MediaFilterData filterData = new FiltersView.MediaFilterData(R.drawable.chats_archive, R.drawable.chats_archive, LocaleController.getString("ArchiveSearchFilter", R.string.ArchiveSearchFilter), null, FiltersView.FILTER_TYPE_ARCHIVE); - filterData.removable = false; - actionBar.setSearchFilter(filterData); - searchItem.collapseSearchFilters(); - } - + updateMenuButton(false); return fragmentView; } + private void updateAppUpdateViews(boolean animated) { + if (updateLayout == null) { + return; + } + boolean show; + if (SharedConfig.isAppUpdateAvailable()) { + String fileName = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + File path = FileLoader.getPathToAttach(SharedConfig.pendingAppUpdate.document, true); + show = path.exists(); + } else { + show = false; + } + if (show) { + if (updateLayout.getTag() != null) { + return; + } + if (updateLayoutAnimator != null) { + updateLayoutAnimator.cancel(); + } + updateLayout.setVisibility(View.VISIBLE); + updateLayout.setTag(1); + if (animated) { + updateLayoutAnimator = new AnimatorSet(); + updateLayoutAnimator.setDuration(180); + updateLayoutAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); + updateLayoutAnimator.playTogether(ObjectAnimator.ofFloat(updateLayout, View.TRANSLATION_Y, 0)); + updateLayoutAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + updateLayoutAnimator = null; + } + }); + updateLayoutAnimator.start(); + } else { + updateLayout.setTranslationY(0); + } + } else { + if (updateLayout.getTag() == null) { + return; + } + updateLayout.setTag(null); + if (animated) { + updateLayoutAnimator = new AnimatorSet(); + updateLayoutAnimator.setDuration(180); + updateLayoutAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); + updateLayoutAnimator.playTogether(ObjectAnimator.ofFloat(updateLayout, View.TRANSLATION_Y, AndroidUtilities.dp(48))); + updateLayoutAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (updateLayout.getTag() == null) { + updateLayout.setVisibility(View.INVISIBLE); + } + updateLayoutAnimator = null; + } + }); + updateLayoutAnimator.start(); + } else { + updateLayout.setTranslationY(AndroidUtilities.dp(48)); + updateLayout.setVisibility(View.INVISIBLE); + } + } + } + private void updateContextViewPosition() { float filtersTabsHeight = 0; if (filterTabsView != null && filterTabsView.getVisibility() != View.GONE) { @@ -3467,13 +3609,14 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } - private void updateFiltersView(boolean showMediaFilters, ArrayList users, ArrayList dates, boolean animated) { + private void updateFiltersView(boolean showMediaFilters, ArrayList users, ArrayList dates, boolean archive, boolean animated) { if (!searchIsShowed || onlySelect) { return; } boolean hasMediaFilter = false; boolean hasUserFilter = false; - boolean hasDataFilter = false; + boolean hasDateFilter = false; + boolean hasArchiveFilter = false; ArrayList currentSearchFilters = searchViewPager.getCurrentSearchFilters(); for (int i = 0; i < currentSearchFilters.size(); i++) { @@ -3482,24 +3625,31 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } else if (currentSearchFilters.get(i).filterType == FiltersView.FILTER_TYPE_CHAT) { hasUserFilter = true; } else if (currentSearchFilters.get(i).filterType == FiltersView.FILTER_TYPE_DATE) { - hasDataFilter = true; + hasDateFilter = true; + } else if (currentSearchFilters.get(i).filterType == FiltersView.FILTER_TYPE_ARCHIVE) { + hasArchiveFilter = true; } } + if (hasArchiveFilter) { + archive = false; + } + boolean visible = false; - boolean hasUsersOrDates = (users != null && !users.isEmpty()) || (dates != null && !dates.isEmpty()); + boolean hasUsersOrDates = (users != null && !users.isEmpty()) || (dates != null && !dates.isEmpty() || archive); if (!hasMediaFilter && !hasUsersOrDates && showMediaFilters) { + } else if (hasUsersOrDates) { ArrayList finalUsers = (users != null && !users.isEmpty() && !hasUserFilter) ? users : null; - ArrayList finalDates = (dates != null && !dates.isEmpty() && !hasDataFilter) ? dates : null; - if (finalUsers != null || finalDates != null) { + ArrayList finalDates = (dates != null && !dates.isEmpty() && !hasDateFilter) ? dates : null; + if (finalUsers != null || finalDates != null || archive) { visible = true; - filtersView.setUsersAndDates(finalUsers, finalDates); + filtersView.setUsersAndDates(finalUsers, finalDates, archive); } } if (!visible) { - filtersView.setUsersAndDates(null, null); + filtersView.setUsersAndDates(null, null, false); } if (!animated) { filtersView.getAdapter().notifyDataSetChanged(); @@ -3526,14 +3676,14 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. currentSearchFilters.add(filter); actionBar.setSearchFilter(filter); actionBar.setSearchFieldText(""); - updateFiltersView(true, null, null, true); + updateFiltersView(true, null, null, false, true); } - private void createActionMode() { - if (actionBar.actionModeIsExist(null)) { + private void createActionMode(String tag) { + if (actionBar.actionModeIsExist(tag)) { return; } - final ActionBarMenu actionMode = actionBar.createActionMode(); + final ActionBarMenu actionMode = actionBar.createActionMode(false, tag); actionMode.setBackground(null); selectedDialogsCountTextView = new NumberTextView(actionMode.getContext()); @@ -3562,116 +3712,118 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. actionModeViews.add(deleteItem); actionModeViews.add(otherItem); - actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { - @Override - public void onItemClick(int id) { - if (id == SearchViewPager.forwardItemId || id == SearchViewPager.gotoItemId && searchViewPager != null) { - searchViewPager.onActionBarItemClick(id); - return; - } - if (id == -1) { - if (filterTabsView != null && filterTabsView.isEditing()) { - filterTabsView.setIsEditing(false); - showDoneItem(false); - } else if (actionBar.isActionModeShowed()) { - if (searchViewPager != null && searchViewPager.getVisibility() == View.VISIBLE) { - searchViewPager.hideActionMode(); - } else { - hideActionMode(true); - } - } else if (onlySelect || folderId != 0) { - finishFragment(); - } else if (parentLayout != null) { - parentLayout.getDrawerLayoutContainer().openDrawer(false); - } - } else if (id == 1) { - SharedConfig.appLocked = !SharedConfig.appLocked; - SharedConfig.saveConfig(); - updatePasscodeButton(true); - } else if (id == 2) { - presentFragment(new ProxyListActivity()); - } else if (id >= 10 && id < 10 + UserConfig.MAX_ACCOUNT_COUNT) { - if (getParentActivity() == null) { + if (tag == null) { + actionBar.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { + @Override + public void onItemClick(int id) { + if (id == SearchViewPager.forwardItemId || id == SearchViewPager.gotoItemId && searchViewPager != null) { + searchViewPager.onActionBarItemClick(id); return; } - DialogsActivityDelegate oldDelegate = delegate; - LaunchActivity launchActivity = (LaunchActivity) getParentActivity(); - launchActivity.switchToAccount(id - 10, true); + if (id == -1) { + if (filterTabsView != null && filterTabsView.isEditing()) { + filterTabsView.setIsEditing(false); + showDoneItem(false); + } else if (actionBar.isActionModeShowed()) { + if (searchViewPager != null && searchViewPager.getVisibility() == View.VISIBLE && searchViewPager.actionModeShowing()) { + searchViewPager.hideActionMode(); + } else { + hideActionMode(true); + } + } else if (onlySelect || folderId != 0) { + finishFragment(); + } else if (parentLayout != null) { + parentLayout.getDrawerLayoutContainer().openDrawer(false); + } + } else if (id == 1) { + SharedConfig.appLocked = !SharedConfig.appLocked; + SharedConfig.saveConfig(); + updatePasscodeButton(true); + } else if (id == 2) { + presentFragment(new ProxyListActivity()); + } else if (id >= 10 && id < 10 + UserConfig.MAX_ACCOUNT_COUNT) { + if (getParentActivity() == null) { + return; + } + DialogsActivityDelegate oldDelegate = delegate; + LaunchActivity launchActivity = (LaunchActivity) getParentActivity(); + launchActivity.switchToAccount(id - 10, true); + + DialogsActivity dialogsActivity = new DialogsActivity(arguments); + dialogsActivity.setDelegate(oldDelegate); + launchActivity.presentFragment(dialogsActivity, false, true); + } else if (id == add_to_folder) { + FiltersListBottomSheet sheet = new FiltersListBottomSheet(DialogsActivity.this, selectedDialogs); + sheet.setDelegate(filter -> { + ArrayList alwaysShow = FiltersListBottomSheet.getDialogsCount(DialogsActivity.this, filter, selectedDialogs, true, false); + int currentCount; + if (filter != null) { + currentCount = filter.alwaysShow.size(); + } else { + currentCount = 0; + } + if (currentCount + alwaysShow.size() > 100) { + showDialog(AlertsCreator.createSimpleAlert(getParentActivity(), LocaleController.getString("FilterAddToAlertFullTitle", R.string.FilterAddToAlertFullTitle), LocaleController.getString("FilterRemoveFromAlertFullText", R.string.FilterRemoveFromAlertFullText)).create()); + return; + } + if (filter != null) { + if (!alwaysShow.isEmpty()) { + for (int a = 0; a < alwaysShow.size(); a++) { + filter.neverShow.remove(alwaysShow.get(a)); + } + filter.alwaysShow.addAll(alwaysShow); + FilterCreateActivity.saveFilterToServer(filter, filter.flags, filter.name, filter.alwaysShow, filter.neverShow, filter.pinnedDialogs, false, false, true, true, false, DialogsActivity.this, null); + } + long did; + if (alwaysShow.size() == 1) { + did = alwaysShow.get(0); + } else { + did = 0; + } + getUndoView().showWithAction(did, UndoView.ACTION_ADDED_TO_FOLDER, alwaysShow.size(), filter, null, null); + } else { + presentFragment(new FilterCreateActivity(null, alwaysShow)); + } + hideActionMode(true); + }); + showDialog(sheet); + } else if (id == remove_from_folder) { + MessagesController.DialogFilter filter = getMessagesController().dialogFilters.get(viewPages[0].selectedType); + ArrayList neverShow = FiltersListBottomSheet.getDialogsCount(DialogsActivity.this, filter, selectedDialogs, false, false); - DialogsActivity dialogsActivity = new DialogsActivity(arguments); - dialogsActivity.setDelegate(oldDelegate); - launchActivity.presentFragment(dialogsActivity, false, true); - } else if (id == add_to_folder) { - FiltersListBottomSheet sheet = new FiltersListBottomSheet(DialogsActivity.this, selectedDialogs); - sheet.setDelegate(filter -> { - ArrayList alwaysShow = FiltersListBottomSheet.getDialogsCount(DialogsActivity.this, filter, selectedDialogs, true, false); int currentCount; if (filter != null) { - currentCount = filter.alwaysShow.size(); + currentCount = filter.neverShow.size(); } else { currentCount = 0; } - if (currentCount + alwaysShow.size() > 100) { - showDialog(AlertsCreator.createSimpleAlert(getParentActivity(), LocaleController.getString("FilterAddToAlertFullTitle", R.string.FilterAddToAlertFullTitle), LocaleController.getString("FilterRemoveFromAlertFullText", R.string.FilterRemoveFromAlertFullText)).create()); + if (currentCount + neverShow.size() > 100) { + showDialog(AlertsCreator.createSimpleAlert(getParentActivity(), LocaleController.getString("FilterAddToAlertFullTitle", R.string.FilterAddToAlertFullTitle), LocaleController.getString("FilterAddToAlertFullText", R.string.FilterAddToAlertFullText)).create()); return; } - if (filter != null) { - if (!alwaysShow.isEmpty()) { - for (int a = 0; a < alwaysShow.size(); a++) { - filter.neverShow.remove(alwaysShow.get(a)); - } - filter.alwaysShow.addAll(alwaysShow); - FilterCreateActivity.saveFilterToServer(filter, filter.flags, filter.name, filter.alwaysShow, filter.neverShow, filter.pinnedDialogs, false, false, true, true, false, DialogsActivity.this, null); + if (!neverShow.isEmpty()) { + filter.neverShow.addAll(neverShow); + for (int a = 0; a < neverShow.size(); a++) { + Integer did = neverShow.get(a); + filter.alwaysShow.remove(did); + filter.pinnedDialogs.remove((long) did); } - long did; - if (alwaysShow.size() == 1) { - did = alwaysShow.get(0); - } else { - did = 0; - } - getUndoView().showWithAction(did, UndoView.ACTION_ADDED_TO_FOLDER, alwaysShow.size(), filter, null, null); + FilterCreateActivity.saveFilterToServer(filter, filter.flags, filter.name, filter.alwaysShow, filter.neverShow, filter.pinnedDialogs, false, false, true, false, false, DialogsActivity.this, null); + } + long did; + if (neverShow.size() == 1) { + did = neverShow.get(0); } else { - presentFragment(new FilterCreateActivity(null, alwaysShow)); + did = 0; } - hideActionMode(true); - }); - showDialog(sheet); - } else if (id == remove_from_folder) { - MessagesController.DialogFilter filter = getMessagesController().dialogFilters.get(viewPages[0].selectedType); - ArrayList neverShow = FiltersListBottomSheet.getDialogsCount(DialogsActivity.this, filter, selectedDialogs, false, false); - - int currentCount; - if (filter != null) { - currentCount = filter.neverShow.size(); - } else { - currentCount = 0; + getUndoView().showWithAction(did, UndoView.ACTION_REMOVED_FROM_FOLDER, neverShow.size(), filter, null, null); + hideActionMode(false); + } else if (id == pin || id == read || id == delete || id == clear || id == mute || id == archive || id == block || id == archive2 || id == pin2) { + perfromSelectedDialogsAction(selectedDialogs, id, true); } - if (currentCount + neverShow.size() > 100) { - showDialog(AlertsCreator.createSimpleAlert(getParentActivity(), LocaleController.getString("FilterAddToAlertFullTitle", R.string.FilterAddToAlertFullTitle), LocaleController.getString("FilterAddToAlertFullText", R.string.FilterAddToAlertFullText)).create()); - return; - } - if (!neverShow.isEmpty()) { - filter.neverShow.addAll(neverShow); - for (int a = 0; a < neverShow.size(); a++) { - Integer did = neverShow.get(a); - filter.alwaysShow.remove(did); - filter.pinnedDialogs.remove((long) did); - } - FilterCreateActivity.saveFilterToServer(filter, filter.flags, filter.name, filter.alwaysShow, filter.neverShow, filter.pinnedDialogs, false, false, true, false, false, DialogsActivity.this, null); - } - long did; - if (neverShow.size() == 1) { - did = neverShow.get(0); - } else { - did = 0; - } - getUndoView().showWithAction(did, UndoView.ACTION_REMOVED_FROM_FOLDER, neverShow.size(), filter, null, null); - hideActionMode(false); - } else if (id == pin || id == read || id == delete || id == clear || id == mute || id == archive || id == block || id == archive2 || id == pin2) { - perfromSelectedDialogsAction(selectedDialogs, id, true); } - } - }); + }); + } } private void switchToCurrentSelectedMode(boolean animated) { @@ -3890,7 +4042,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (activity != null) { checkPermission = false; boolean hasNotContactsPermission = activity.checkSelfPermission(Manifest.permission.READ_CONTACTS) != PackageManager.PERMISSION_GRANTED; - boolean hasNotStoragePermission = activity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED; + boolean hasNotStoragePermission = (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && activity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED; if (hasNotContactsPermission || hasNotStoragePermission) { askingForPermissions = true; if (hasNotContactsPermission && askAboutContacts && getUserConfig().syncContacts && activity.shouldShowRequestPermissionRationale(Manifest.permission.READ_CONTACTS)) { @@ -4027,6 +4179,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } else if (actionBar != null && actionBar.isActionModeShowed()) { if (searchViewPager.getVisibility() == View.VISIBLE) { searchViewPager.hideActionMode(); + hideActionMode(true); } else { hideActionMode(true); } @@ -4094,12 +4247,16 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. selectedDialogs.remove(did); if (cell instanceof DialogCell) { ((DialogCell) cell).setChecked(false, true); + } else if (cell instanceof ProfileSearchCell) { + ((ProfileSearchCell) cell).setChecked(false, true); } return false; } else { selectedDialogs.add(did); if (cell instanceof DialogCell) { ((DialogCell) cell).setChecked(true, true); + } else if (cell instanceof ProfileSearchCell) { + ((ProfileSearchCell) cell).setChecked(true, true); } return true; } @@ -4172,8 +4329,14 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } searchViewPager.setKeyboardHeight(((ContentView)fragmentView).getKeyboardHeight()); parentLayout.getDrawerLayoutContainer().setAllowOpenDrawerBySwipe(true); + + searchViewPager.clear(); + if (folderId != 0) { + FiltersView.MediaFilterData filterData = new FiltersView.MediaFilterData(R.drawable.chats_archive, R.drawable.chats_archive, LocaleController.getString("ArchiveSearchFilter", R.string.ArchiveSearchFilter), null, FiltersView.FILTER_TYPE_ARCHIVE); + addSearchFilter(filterData); + } } else { - if (filterTabsView != null) { + if (filterTabsView != null && parentLayout != null) { parentLayout.getDrawerLayoutContainer().setAllowOpenDrawerBySwipe(viewPages[0].selectedType == filterTabsView.getFirstTabId() || SharedConfig.getChatSwipeAction(currentAccount) != SwipeGestureSettingsView.SWIPE_GESTURE_FOLDERS); } } @@ -4190,7 +4353,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (show) { searchViewPager.setVisibility(View.VISIBLE); searchViewPager.reset(); - updateFiltersView(true, null, null,false); + updateFiltersView(true, null, null, false, false); if (searchTabsView != null) { searchTabsView.hide(false, false); searchTabsView.setVisibility(View.VISIBLE); @@ -4532,7 +4695,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } else if (object instanceof TLRPC.Dialog) { TLRPC.Dialog dialog = (TLRPC.Dialog) object; if (dialog instanceof TLRPC.TL_dialogFolder) { - if (actionBar.isActionModeShowed()) { + if (actionBar.isActionModeShowed(null)) { return; } TLRPC.TL_dialogFolder dialogFolder = (TLRPC.TL_dialogFolder) dialog; @@ -4542,8 +4705,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. return; } dialogId = dialog.id; - if (actionBar.isActionModeShowed()) { - showOrUpdateActionMode(dialog, view); + if (actionBar.isActionModeShowed(null)) { + showOrUpdateActionMode(dialogId, view); return; } } else if (object instanceof TLRPC.TL_recentMeUrlChat) { @@ -4616,6 +4779,13 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. presentFragment(activity); } } + + if (dialogId != 0 && actionBar.isActionModeShowed()) { + if (actionBar.isActionModeShowed(ACTION_MODE_SEARCH_DIALOGS_TAG) && message_id == 0 && !isGlobalSearch) { + showOrUpdateActionMode(dialogId, view); + } + return; + } } if (dialogId == 0) { @@ -4746,18 +4916,32 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } return true; } + } + TLRPC.Dialog dialog; + if (adapter == searchViewPager.dialogsSearchAdapter) { + long dialogId = 0; + if (view instanceof ProfileSearchCell && !searchViewPager.dialogsSearchAdapter.isGlobalSearch(position)) { + dialogId = ((ProfileSearchCell) view).getDialogId(); + } + if (dialogId != 0) { + showOrUpdateActionMode(dialogId, view); + return true; + } + return false; + } else { + DialogsAdapter dialogsAdapter = (DialogsAdapter) adapter; + ArrayList dialogs = getDialogsArray(currentAccount, dialogsType, folderId, dialogsListFrozen); + position = dialogsAdapter.fixPosition(position); + if (position < 0 || position >= dialogs.size()) { + return false; + } + dialog = dialogs.get(position); + } + + if (dialog == null) { return false; } - if (actionBar.isSearchFieldVisible()) { - return false; - } - DialogsAdapter dialogsAdapter = (DialogsAdapter) adapter; - ArrayList dialogs = getDialogsArray(currentAccount, dialogsType, folderId, dialogsListFrozen); - position = dialogsAdapter.fixPosition(position); - if (position < 0 || position >= dialogs.size()) { - return false; - } - final TLRPC.Dialog dialog = dialogs.get(position); + if (onlySelect) { if (initialDialogsType != 3 && initialDialogsType != 10) { return false; @@ -4804,7 +4988,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (actionBar.isActionModeShowed() && isDialogPinned(dialog)) { return false; } - showOrUpdateActionMode(dialog, view); + showOrUpdateActionMode(dialog.id, view); } return true; } @@ -4851,7 +5035,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } private void updateFloatingButtonOffset() { - floatingButtonContainer.setTranslationY(floatingButtonTranslation - additionalFloatingTranslation * (1f - floatingButtonHideProgress)); + floatingButtonContainer.setTranslationY(floatingButtonTranslation - Math.max(additionalFloatingTranslation, additionalFloatingTranslation2) * (1f - floatingButtonHideProgress)); } private boolean hasHiddenArchive() { @@ -4998,7 +5182,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private int getPinnedCount() { int pinnedCount = 0; ArrayList dialogs; - if (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) { + boolean containsFilter = (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) && (!actionBar.isActionModeShowed() || actionBar.isActionModeShowed(null)); + if (containsFilter) { dialogs = getDialogsArray(currentAccount, viewPages[0].dialogsType, folderId, dialogsListFrozen); } else { dialogs = getMessagesController().getDialogs(folderId); @@ -5020,7 +5205,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. private boolean isDialogPinned(TLRPC.Dialog dialog) { MessagesController.DialogFilter filter; - if (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) { + boolean containsFilter = (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) && (!actionBar.isActionModeShowed() || actionBar.isActionModeShowed(null)); + if (containsFilter) { filter = getMessagesController().selectedDialogFilter[viewPages[0].dialogsType == 8 ? 1 : 0]; } else { filter = null; @@ -5036,12 +5222,14 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. return; } MessagesController.DialogFilter filter; - if (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) { + boolean containsFilter = (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) && (!actionBar.isActionModeShowed() || actionBar.isActionModeShowed(null)); + if (containsFilter) { filter = getMessagesController().selectedDialogFilter[viewPages[0].dialogsType == 8 ? 1 : 0]; } else { filter = null; } int count = selectedDialogs.size(); + int pinedActionCount = 0; if (action == archive || action == archive2) { ArrayList copy = new ArrayList<>(selectedDialogs); getMessagesController().addDialogToFolder(copy, canUnarchiveCount == 0 ? 1 : 0, -1, null, 0); @@ -5108,7 +5296,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } int maxPinnedCount; - if (viewPages[0].dialogsType == 7 || viewPages[0].dialogsType == 8) { + if (containsFilter) { maxPinnedCount = 100 - filter.alwaysShow.size(); } else if (folderId != 0 || filter != null) { maxPinnedCount = getMessagesController().maxFolderPinnedDialogsCount; @@ -5237,6 +5425,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (isDialogPinned(dialog)) { continue; } + pinedActionCount++; pinDialog(selectedDialog, true, filter, minPinnedNum,count == 1); if (filter != null) { minPinnedNum++; @@ -5254,6 +5443,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (!isDialogPinned(dialog)) { continue; } + pinedActionCount++; pinDialog(selectedDialog, false, filter, minPinnedNum,count == 1); } @@ -5428,6 +5618,9 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } else { getMessagesController().reorderPinnedDialogs(folderId, null, 0); } + if (searchIsShowed) { + getUndoView().showWithAction(0, canPinCount != 0 ? UndoView.ACTION_PIN_DIALOGS : UndoView.ACTION_UNPIN_DIALOGS, pinedActionCount); + } } if (scrollToTop) { if (initialDialogsType != 10) { @@ -5657,6 +5850,38 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. canDeleteCount++; } } +// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { +// TransitionSet transition = new TransitionSet(); +// transition.addTransition(new Visibility() { +// @Override +// public Animator onAppear(ViewGroup sceneRoot, View view, TransitionValues startValues, TransitionValues endValues) { +// AnimatorSet set = new AnimatorSet(); +// set.playTogether( +// ObjectAnimator.ofFloat(view, View.ALPHA, 0, 1f), +// ObjectAnimator.ofFloat(view, View.SCALE_X, 0.5f, 1f), +// ObjectAnimator.ofFloat(view, View.SCALE_Y, 0.5f, 1f) +// ); +// set.setInterpolator(CubicBezierInterpolator.DEFAULT); +// return set; +// } +// +// @Override +// public Animator onDisappear(ViewGroup sceneRoot, View view, TransitionValues startValues, TransitionValues endValues) { +// AnimatorSet set = new AnimatorSet(); +// set.playTogether( +// ObjectAnimator.ofFloat(view, View.ALPHA, view.getAlpha(), 0f), +// ObjectAnimator.ofFloat(view, View.SCALE_X, view.getScaleX(), 0.5f), +// ObjectAnimator.ofFloat(view, View.SCALE_Y, view.getScaleX(), 0.5f) +// ); +// set.setInterpolator(CubicBezierInterpolator.DEFAULT); +// return set; +// } +// }).addTransition(new ChangeBounds()); +// transition.setOrdering(TransitionSet.ORDERING_TOGETHER); +// transition.setInterpolator(CubicBezierInterpolator.EASE_OUT); +// transition.setDuration(150); +// TransitionManager.beginDelayedTransition(actionBar.getActionMode(), transition); +// } if (canDeleteCount != count) { deleteItem.setVisibility(View.GONE); } else { @@ -5766,8 +5991,8 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. return true; } - private void showOrUpdateActionMode(TLRPC.Dialog dialog, View cell) { - addOrRemoveSelectedDialog(dialog.id, cell); + private void showOrUpdateActionMode(long dialogId, View cell) { + addOrRemoveSelectedDialog(dialogId, cell); boolean updateAnimated = false; if (actionBar.isActionModeShowed()) { if (selectedDialogs.isEmpty()) { @@ -5776,7 +6001,15 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } updateAnimated = true; } else { - createActionMode(); + if (searchIsShowed) { + createActionMode(ACTION_MODE_SEARCH_DIALOGS_TAG); + if (actionBar.getBackButton().getDrawable() instanceof MenuDrawable) { + actionBar.setBackButtonDrawable(new BackDrawable(false)); + } + } else { + createActionMode(null); + } + AndroidUtilities.hideKeyboard(fragmentView.findFocus()); actionBar.setActionModeOverrideColor(Theme.getColor(Theme.key_windowBackgroundWhite)); actionBar.showActionMode(); resetScroll(); @@ -5792,17 +6025,19 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. updateVisibleRows(MessagesController.UPDATE_MASK_REORDER); } - AnimatorSet animatorSet = new AnimatorSet(); - ArrayList animators = new ArrayList<>(); - for (int a = 0; a < actionModeViews.size(); a++) { - View view = actionModeViews.get(a); - view.setPivotY(ActionBar.getCurrentActionBarHeight() / 2); - AndroidUtilities.clearDrawableAnimation(view); - animators.add(ObjectAnimator.ofFloat(view, View.SCALE_Y, 0.1f, 1.0f)); + if (!searchIsShowed) { + AnimatorSet animatorSet = new AnimatorSet(); + ArrayList animators = new ArrayList<>(); + for (int a = 0; a < actionModeViews.size(); a++) { + View view = actionModeViews.get(a); + view.setPivotY(ActionBar.getCurrentActionBarHeight() / 2); + AndroidUtilities.clearDrawableAnimation(view); + animators.add(ObjectAnimator.ofFloat(view, View.SCALE_Y, 0.1f, 1.0f)); + } + animatorSet.playTogether(animators); + animatorSet.setDuration(200); + animatorSet.start(); } - animatorSet.playTogether(animators); - animatorSet.setDuration(200); - animatorSet.start(); if (actionBarColorAnimator != null) { actionBarColorAnimator.cancel(); @@ -6026,7 +6261,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. permissons.add(Manifest.permission.WRITE_CONTACTS); permissons.add(Manifest.permission.GET_ACCOUNTS); } - if (activity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if ((Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && activity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { permissons.add(Manifest.permission.READ_EXTERNAL_STORAGE); permissons.add(Manifest.permission.WRITE_EXTERNAL_STORAGE); } @@ -6151,7 +6386,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (filterTabsView != null && filterTabsView.getVisibility() == View.VISIBLE) { filterTabsView.notifyTabCounterChanged(Integer.MAX_VALUE); } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { updateVisibleRows(0); if (filterTabsView != null) { filterTabsView.getTabsContainer().invalidateViews(); @@ -6286,9 +6521,44 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. viewPages[a].dialogsAdapter.didDatabaseCleared(); } } + } else if (id == NotificationCenter.appUpdateAvailable) { + updateMenuButton(true); + } else if (id == NotificationCenter.fileLoaded || id == NotificationCenter.fileLoadFailed || id == NotificationCenter.fileLoadProgressChanged) { + String name = (String) args[0]; + if (SharedConfig.isAppUpdateAvailable()) { + String fileName = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + if (fileName.equals(name)) { + updateMenuButton(true); + } + } } } + private void updateMenuButton(boolean animated) { + if (menuDrawable == null || updateLayout == null) { + return; + } + int type; + float downloadProgress; + if (SharedConfig.isAppUpdateAvailable()) { + String fileName = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + if (getFileLoader().isLoadingFile(fileName)) { + type = MenuDrawable.TYPE_UDPATE_DOWNLOADING; + Float p = ImageLoader.getInstance().getFileProgress(fileName); + downloadProgress = p != null ? p : 0.0f; + } else { + type = MenuDrawable.TYPE_UDPATE_AVAILABLE; + downloadProgress = 0.0f; + } + } else { + type = MenuDrawable.TYPE_DEFAULT; + downloadProgress = 0.0f; + } + updateAppUpdateViews(animated); + menuDrawable.setType(type, animated); + menuDrawable.setUpdateDownloadProgress(downloadProgress, animated); + } + private String showingSuggestion; private void showNextSupportedSuggestion() { if (showingSuggestion != null) { @@ -6397,6 +6667,13 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. sideMenu.setGlowColor(Theme.getColor(Theme.key_chats_menuBackground)); } + Runnable hapticLockRunnable = new Runnable() { + @Override + public void run() { + passcodeItem.getIconView().performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + }; + private void updatePasscodeButton(boolean animated) { if (passcodeItem == null) { return; @@ -6404,6 +6681,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (isPaused) { animated = false; } + AndroidUtilities.cancelRunOnUIThread(hapticLockRunnable); if (SharedConfig.passcodeHash.length() != 0 && !searching) { if (doneItem == null || doneItem.getVisibility() != View.VISIBLE) { passcodeItem.setVisibility(View.VISIBLE); @@ -6415,6 +6693,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. if (animated) { passcodeDrawable2.setCurrentFrame(0, false); passcodeItem.getIconView().playAnimation(); + AndroidUtilities.runOnUIThread(hapticLockRunnable, 350); } else { passcodeDrawable2.setCurrentFrame(38, false); } @@ -6533,15 +6812,21 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } } - if (dialogsListFrozen) { - continue; - } + if (child instanceof UserCell) { ((UserCell) child).update(mask); } else if (child instanceof ProfileSearchCell) { - ((ProfileSearchCell) child).update(mask); - } else if (child instanceof RecyclerListView) { + ProfileSearchCell cell = (ProfileSearchCell) child; + cell.update(mask); + if (selectedDialogs != null) { + cell.setChecked(selectedDialogs.contains(cell.getDialogId()), false); + } + } + if (dialogsListFrozen) { + continue; + } + if (child instanceof RecyclerListView) { RecyclerListView innerListView = (RecyclerListView) child; int count2 = innerListView.getChildCount(); for (int b = 0; b < count2; b++) { @@ -7370,5 +7655,19 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } setSlideTransitionProgress(1f - progress); } + + public void setShowSearch(String query, int i) { + if (!searching) { + initialSearchType = i; + actionBar.openSearchField(query, false); + } else { + if (!searchItem.getSearchField().getText().toString().equals(query)) { + searchItem.getSearchField().setText(query); + } + if (searchViewPager.getTabsView().getCurrentTabId() != i) { + searchViewPager.getTabsView().scrollToTab(i, i); + } + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java index 4402c3401..c2691bbe5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ExternalActionActivity.java @@ -11,8 +11,6 @@ package org.telegram.ui; import android.app.Activity; import android.content.Intent; import android.content.res.Configuration; -import android.graphics.Shader; -import android.graphics.drawable.BitmapDrawable; import android.os.Build; import android.os.Bundle; import android.os.SystemClock; @@ -46,6 +44,7 @@ import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.AlertsCreator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.PasscodeView; +import org.telegram.ui.Components.SizeNotifierFrameLayout; import java.util.ArrayList; @@ -58,7 +57,7 @@ public class ExternalActionActivity extends Activity implements ActionBarLayout. private PasscodeView passcodeView; protected ActionBarLayout actionBarLayout; protected ActionBarLayout layersActionBarLayout; - protected View backgroundTablet; + protected SizeNotifierFrameLayout backgroundTablet; protected DrawerLayoutContainer drawerLayoutContainer; private Intent passcodeSaveIntent; @@ -110,10 +109,14 @@ public class ExternalActionActivity extends Activity implements ActionBarLayout. layoutParams1.height = LayoutHelper.MATCH_PARENT; launchLayout.setLayoutParams(layoutParams1); - backgroundTablet = new View(this); - BitmapDrawable drawable = (BitmapDrawable) getResources().getDrawable(R.drawable.catstile); - drawable.setTileModeXY(Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); - backgroundTablet.setBackgroundDrawable(drawable); + backgroundTablet = new SizeNotifierFrameLayout(this) { + @Override + protected boolean isActionBarVisible() { + return false; + } + }; + backgroundTablet.setOccupyStatusBar(false); + backgroundTablet.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); launchLayout.addView(backgroundTablet, LayoutHelper.createRelative(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); launchLayout.addView(actionBarLayout, LayoutHelper.createRelative(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); @@ -163,10 +166,14 @@ public class ExternalActionActivity extends Activity implements ActionBarLayout. RelativeLayout launchLayout = new RelativeLayout(this); drawerLayoutContainer.addView(launchLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - backgroundTablet = new View(this); - BitmapDrawable drawable = (BitmapDrawable) getResources().getDrawable(R.drawable.catstile); - drawable.setTileModeXY(Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); - backgroundTablet.setBackgroundDrawable(drawable); + backgroundTablet = new SizeNotifierFrameLayout(this) { + @Override + protected boolean isActionBarVisible() { + return false; + } + }; + backgroundTablet.setOccupyStatusBar(false); + backgroundTablet.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); launchLayout.addView(backgroundTablet, LayoutHelper.createRelative(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); launchLayout.addView(actionBarLayout, LayoutHelper.createRelative(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java b/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java index 9dd811e2b..8327ae432 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/FilteredSearchView.java @@ -99,13 +99,13 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente long currentSearchMaxDate; long currentSearchMinDate; String currentSearchString; + boolean currentIncludeFolder; Activity parentActivity; BaseFragment parentFragment; private boolean isLoading; private boolean endReached; private int totalCount; - private boolean wasIsEmpty; private int requestIndex; private String currentDataQuery; @@ -126,6 +126,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente ArrayList localTipChats = new ArrayList<>(); ArrayList localTipDates = new ArrayList<>(); + boolean localTipArchive; Runnable clearCurrentResultsRunnable = new Runnable() { @Override @@ -151,7 +152,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente @Override public boolean loadMore() { if (!endReached) { - search(currentSearchDialogId, currentSearchMinDate, currentSearchMaxDate, currentSearchFilter, lastMessagesSearchString, false); + search(currentSearchDialogId, currentSearchMinDate, currentSearchMaxDate, currentSearchFilter, currentIncludeFolder, lastMessagesSearchString, false); } return true; } @@ -190,7 +191,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente } else if (view instanceof ContextLinkCell) { ContextLinkCell cell = (ContextLinkCell) view; MessageObject message = (MessageObject) cell.getParentObject(); - if (message != null && messageObject != null && message.getId() == messageObject.getId()) { + if (message != null && message.getId() == messageObject.getId()) { imageReceiver = cell.getPhotoImage(); cell.getLocationInWindow(coords); } @@ -372,7 +373,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente int visibleItemCount = Math.abs(lastVisibleItem - firstVisibleItem) + 1; int totalItemCount = recyclerView.getAdapter().getItemCount(); if (!isLoading && visibleItemCount > 0 && lastVisibleItem >= totalItemCount - 10 && !endReached) { - search(currentSearchDialogId, currentSearchMinDate, currentSearchMaxDate, currentSearchFilter, lastMessagesSearchString, false); + search(currentSearchDialogId, currentSearchMinDate, currentSearchMaxDate, currentSearchFilter, currentIncludeFolder, lastMessagesSearchString, false); } if (adapter == sharedPhotoVideoAdapter) { @@ -400,7 +401,6 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente floatingDateView.setTranslationY(-AndroidUtilities.dp(48)); addView(floatingDateView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 0, 4, 0, 0)); - dialogsAdapter = new OnlyUserFiltersAdapter(); sharedPhotoVideoAdapter = new SharedPhotoVideoAdapter(getContext()); sharedDocumentsAdapter = new SharedDocumentsAdapter(getContext(), 1); @@ -444,16 +444,16 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente return fromName == null ? "" : fromName; } - public void search(int dialogId, long minDate, long maxDate, FiltersView.MediaFilterData currentSearchFilter, String query, boolean clearOldResults) { - String currentSearchFilterQueryString = String.format(Locale.ENGLISH, "%d%d%d%d%s", dialogId, minDate, maxDate, currentSearchFilter == null ? -1 : currentSearchFilter.filterType, query); + public void search(int dialogId, long minDate, long maxDate, FiltersView.MediaFilterData currentSearchFilter, boolean includeFolder, String query, boolean clearOldResults) { + String currentSearchFilterQueryString = String.format(Locale.ENGLISH, "%d%d%d%d%s%s", dialogId, minDate, maxDate, currentSearchFilter == null ? -1 : currentSearchFilter.filterType, query, includeFolder); boolean filterAndQueryIsSame = lastSearchFilterQueryString != null && lastSearchFilterQueryString.equals(currentSearchFilterQueryString); boolean forceClear = !filterAndQueryIsSame && clearOldResults; - boolean filterIsSame = dialogId == currentSearchDialogId && currentSearchMinDate == minDate && currentSearchMaxDate == maxDate; this.currentSearchFilter = currentSearchFilter; this.currentSearchDialogId = dialogId; this.currentSearchMinDate = minDate; this.currentSearchMaxDate = maxDate; this.currentSearchString = query; + this.currentIncludeFolder = includeFolder; if (searchRunnable != null) { AndroidUtilities.cancelRunOnUIThread(searchRunnable); } @@ -497,14 +497,11 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente localTipDates.clear(); localTipChats.clear(); if (delegate != null) { - delegate.updateFiltersView(false, null, null); + delegate.updateFiltersView(false, null, null, false); } } requestIndex++; final int requestId = requestIndex; - - final int folderId = uiCallback.getFolderId(); - int currentAccount = UserConfig.selectedAccount; AndroidUtilities.runOnUIThread(searchRunnable = () -> { @@ -535,7 +532,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente resultArray = new ArrayList<>(); ArrayList resultArrayNames = new ArrayList<>(); ArrayList encUsers = new ArrayList<>(); - MessagesStorage.getInstance(currentAccount).localSearch(0, query, resultArray, resultArrayNames, encUsers, folderId); + MessagesStorage.getInstance(currentAccount).localSearch(0, query, resultArray, resultArrayNames, encUsers, includeFolder ? 1 : 0); } final TLRPC.TL_messages_searchGlobal req = new TLRPC.TL_messages_searchGlobal(); @@ -567,7 +564,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente req.offset_peer = new TLRPC.TL_inputPeerEmpty(); } req.flags |= 1; - req.folder_id = uiCallback.getFolderId(); + req.folder_id = includeFolder ? 1 : 0; request = req; } @@ -637,31 +634,26 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente if (messages.size() > totalCount) { totalCount = messages.size(); } - wasIsEmpty = messages.size() == 0; endReached = messages.size() >= totalCount; if (messages.isEmpty()) { if (currentSearchFilter != null) { if (TextUtils.isEmpty(currentDataQuery) && dialogId == 0 && minDate == 0) { emptyView.title.setText(LocaleController.getString("SearchEmptyViewTitle", R.string.SearchEmptyViewTitle)); - if (dialogId == 0 && minDate == 0) { - String str; - if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_FILES) { - str = LocaleController.getString("SearchEmptyViewFilteredSubtitleFiles", R.string.SearchEmptyViewFilteredSubtitleFiles); - } else if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_MEDIA) { - str = LocaleController.getString("SearchEmptyViewFilteredSubtitleMedia", R.string.SearchEmptyViewFilteredSubtitleMedia); - } else if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_LINKS) { - str = LocaleController.getString("SearchEmptyViewFilteredSubtitleLinks", R.string.SearchEmptyViewFilteredSubtitleLinks); - } else if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_MUSIC) { - str = LocaleController.getString("SearchEmptyViewFilteredSubtitleMusic", R.string.SearchEmptyViewFilteredSubtitleMusic); - } else { - str = LocaleController.getString("SearchEmptyViewFilteredSubtitleVoice", R.string.SearchEmptyViewFilteredSubtitleVoice); - } - emptyView.subtitle.setVisibility(View.VISIBLE); - emptyView.subtitle.setText(str); + String str; + if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_FILES) { + str = LocaleController.getString("SearchEmptyViewFilteredSubtitleFiles", R.string.SearchEmptyViewFilteredSubtitleFiles); + } else if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_MEDIA) { + str = LocaleController.getString("SearchEmptyViewFilteredSubtitleMedia", R.string.SearchEmptyViewFilteredSubtitleMedia); + } else if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_LINKS) { + str = LocaleController.getString("SearchEmptyViewFilteredSubtitleLinks", R.string.SearchEmptyViewFilteredSubtitleLinks); + } else if (currentSearchFilter.filterType == FiltersView.FILTER_TYPE_MUSIC) { + str = LocaleController.getString("SearchEmptyViewFilteredSubtitleMusic", R.string.SearchEmptyViewFilteredSubtitleMusic); } else { - emptyView.subtitle.setVisibility(View.GONE); + str = LocaleController.getString("SearchEmptyViewFilteredSubtitleVoice", R.string.SearchEmptyViewFilteredSubtitleVoice); } + emptyView.subtitle.setVisibility(View.VISIBLE); + emptyView.subtitle.setText(str); } else { emptyView.title.setText(LocaleController.getString("SearchEmptyViewTitle2", R.string.SearchEmptyViewTitle2)); emptyView.subtitle.setVisibility(View.VISIBLE); @@ -723,8 +715,13 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente } localTipDates.clear(); localTipDates.addAll(dateData); + localTipArchive = false; + if (query.length() >= 3 && (LocaleController.getString("ArchiveSearchFilter", R.string.ArchiveSearchFilter).toLowerCase().startsWith(query) || + "archive".startsWith(query))) { + localTipArchive = true; + } if (delegate != null) { - delegate.updateFiltersView(TextUtils.isEmpty(currentDataQuery), localTipChats, localTipDates); + delegate.updateFiltersView(TextUtils.isEmpty(currentDataQuery), localTipChats, localTipDates, localTipArchive); } } firstLoading = false; @@ -960,7 +957,6 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente FlickerLoadingView flickerLoadingView = (FlickerLoadingView) holder.itemView; int count = (int) Math.ceil(messages.size() / (float) columnsCount); flickerLoadingView.skipDrawItemsCount(columnsCount - (columnsCount * count - messages.size())); - } } @@ -1110,7 +1106,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { return true; } @@ -1248,7 +1244,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { return section == 0 || row != 0; } @@ -1329,7 +1325,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente params.endReached = endReached; params.nextSearchRate = nextSearchRate; params.totalCount = totalCount; - params.folderId = uiCallback.getFolderId(); + params.folderId = currentIncludeFolder ? 1 : 0; return MediaController.getInstance().setPlaylist(messages, messageObject, 0, params); } return false; @@ -1447,18 +1443,18 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); - NotificationCenter.getInstance(lastAccount = UserConfig.selectedAccount).addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getInstance(lastAccount = UserConfig.selectedAccount).addObserver(this, NotificationCenter.emojiLoaded); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); - NotificationCenter.getInstance(lastAccount).removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getInstance(lastAccount).removeObserver(this, NotificationCenter.emojiLoaded); } @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { int n = recyclerListView.getChildCount(); for (int i = 0; i < n; i++) { if (recyclerListView.getChildAt(i) instanceof DialogCell) { @@ -1473,7 +1469,9 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente if (!uiCallback.actionModeShowing()) { uiCallback.showActionMode(); } - uiCallback.toggleItemSelection(item, view, a); + if (uiCallback.actionModeShowing()) { + uiCallback.toggleItemSelection(item, view, a); + } return true; } @@ -1613,7 +1611,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente this.delegate = delegate; if (update && delegate != null) { if (!localTipChats.isEmpty()) { - delegate.updateFiltersView(false, localTipChats, localTipDates); + delegate.updateFiltersView(false, localTipChats, localTipDates, localTipArchive); } } } @@ -1623,7 +1621,7 @@ public class FilteredSearchView extends FrameLayout implements NotificationCente } public interface Delegate { - void updateFiltersView(boolean showMediaFilters, ArrayList users, ArrayList dates); + void updateFiltersView(boolean showMediaFilters, ArrayList users, ArrayList dates, boolean archive); } public interface UiCallback { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java index 4f3a97469..ad96dfb59 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java @@ -1,5 +1,6 @@ package org.telegram.ui; +import android.Manifest; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; @@ -10,6 +11,7 @@ import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; +import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; @@ -27,12 +29,16 @@ import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; +import android.graphics.drawable.GradientDrawable; +import android.media.AudioManager; +import android.media.projection.MediaProjectionManager; import android.os.Build; import android.os.Bundle; import android.os.SystemClock; import android.provider.Settings; import android.text.Editable; import android.text.InputType; +import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.text.TextWatcher; import android.util.LongSparseArray; @@ -57,10 +63,12 @@ import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; +import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.core.graphics.ColorUtils; import androidx.recyclerview.widget.DefaultItemAnimator; import androidx.recyclerview.widget.DiffUtil; +import androidx.recyclerview.widget.GridLayoutManager; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.ListUpdateCallback; import androidx.recyclerview.widget.RecyclerView; @@ -78,9 +86,10 @@ import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; +import org.telegram.messenger.SharedConfig; import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; -import org.telegram.messenger.voip.VoIPBaseService; +import org.telegram.messenger.voip.Instance; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; @@ -109,7 +118,8 @@ import org.telegram.ui.Components.BlobDrawable; import org.telegram.ui.Components.CheckBoxSquare; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.EditTextBoldCursor; -import org.telegram.ui.Components.FillLastLinearLayoutManager; +import org.telegram.ui.Components.FillLastGridLayoutManager; +import org.telegram.ui.Components.GroupCallFullscreenAdapter; import org.telegram.ui.Components.GroupCallPip; import org.telegram.ui.Components.GroupVoipInviteAlert; import org.telegram.ui.Components.HintView; @@ -121,18 +131,33 @@ import org.telegram.ui.Components.ProfileGalleryView; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RLottieImageView; import org.telegram.ui.Components.RadialProgressView; +import org.telegram.ui.Components.RecordStatusDrawable; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.ShareAlert; +import org.telegram.ui.Components.TypefaceSpan; import org.telegram.ui.Components.UndoView; import org.telegram.ui.Components.WaveDrawable; +import org.telegram.ui.Components.voip.CellFlickerDrawable; +import org.telegram.ui.Components.voip.GroupCallGridCell; +import org.telegram.ui.Components.voip.GroupCallMiniTextureView; +import org.telegram.ui.Components.voip.GroupCallRenderersContainer; +import org.telegram.ui.Components.voip.GroupCallStatusIcon; +import org.telegram.ui.Components.voip.VideoPreviewDialog; import org.telegram.ui.Components.voip.VoIPToggleButton; import java.io.File; import java.util.ArrayList; import java.util.Calendar; +import java.util.HashMap; +import java.util.HashSet; import java.util.Locale; -public class GroupCallActivity extends BottomSheet implements NotificationCenter.NotificationCenterDelegate, VoIPBaseService.StateListener { +import static android.content.Context.AUDIO_SERVICE; + +public class GroupCallActivity extends BottomSheet implements NotificationCenter.NotificationCenterDelegate, VoIPService.StateListener { + + public final static int TABLET_LIST_SIZE = 320; + public static final long TRANSITION_DURATION = 350; private static final int eveyone_can_speak_item = 1; private static final int admin_can_speak_item = 2; @@ -142,6 +167,9 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private static final int edit_item = 6; private static final int permission_item = 7; private static final int user_item = 8; + private static final int screen_capture_item = 9; + private static final int sound_item = 10; + private static final int noise_item = 11; private static final int user_item_gap = 0; private static final int MUTE_BUTTON_STATE_UNMUTE = 0; @@ -153,9 +181,12 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private static final int MUTE_BUTTON_STATE_SET_REMINDER = 6; private static final int MUTE_BUTTON_STATE_CANCEL_REMINDER = 7; + public static int currentScreenOrientation; + public static GroupCallActivity groupCallInstance; public static boolean groupCallUiVisible; private final ProfileGalleryView avatarsViewPager; + private final GridLayoutManager.SpanSizeLookup spanSizeLookup; private AccountInstance accountInstance; @@ -169,13 +200,16 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private SimpleTextView scheduleStartInTextView; private SimpleTextView scheduleTimeTextView; private SimpleTextView scheduleStartAtTextView; - private DefaultItemAnimator itemAnimator; - private FillLastLinearLayoutManager layoutManager; + private GroupCallItemAnimator itemAnimator; + private FillLastGridLayoutManager layoutManager; + private VoIPToggleButton flipButton; + private VoIPToggleButton cameraButton; private VoIPToggleButton soundButton; + private float soundButtonScale; + private float cameraButtonScale; private VoIPToggleButton leaveButton; private RLottieImageView muteButton; private TextView[] muteLabel = new TextView[2]; - private TextView[] muteSubLabel = new TextView[2]; private FrameLayout buttonsContainer; private RadialProgressView radialProgressView; private Drawable shadowDrawable; @@ -183,13 +217,18 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private AnimatorSet actionBarAnimation; private LaunchActivity parentActivity; private UndoView[] undoView = new UndoView[2]; - private BackupImageView accountSwitchImageView; - private AvatarDrawable accountSwitchAvatarDrawable; private AccountSelectCell accountSelectCell; private View accountGap; private boolean changingPermissions; private HintView recordHintView; private HintView reminderHintView; + private int buttonsVisibility; + private TextView speakingMembersSubtitle; + + + public final ArrayList visibleVideoParticipants = new ArrayList<>(); + + float progressToHideUi; private ShareAlert shareAlert; @@ -205,7 +244,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private float scrollOffsetY; - private boolean scrolling; + VideoPreviewDialog previewDialog; private TLRPC.Peer selfPeer; private TLObject userSwitchObject; @@ -213,6 +252,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private Paint listViewBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private ArrayList oldParticipants = new ArrayList<>(); + private ArrayList oldVideoParticipants = new ArrayList<>(); private ArrayList oldInvited = new ArrayList<>(); private int oldCount; @@ -248,14 +288,19 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private AudioPlayerAlert.ClippingTextViewSwitcher titleTextView; private ActionBarMenuItem otherItem; private ActionBarMenuItem pipItem; + private ActionBarMenuItem screenShareItem; private ActionBarMenuSubItem inviteItem; private ActionBarMenuSubItem editTitleItem; + private ActionBarMenuSubItem soundItem; + private ActionBarMenuSubItem noiseItem; private ActionBarMenuSubItem permissionItem; private ActionBarMenuSubItem recordItem; + private ActionBarMenuSubItem screenItem; private ActionBarMenuSubItem everyoneItem; private ActionBarMenuSubItem adminItem; private ActionBarMenuSubItem leaveItem; private final LinearLayout menuItemsContainer; + private View soundItemDivider; private Runnable updateCallRecordRunnable; @@ -287,19 +332,43 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private boolean invalidateColors = true; private final int[] colorsTmp = new int[3]; - View blurredView; + // private boolean isVideoCall = true; + private final ArrayList attachedRenderers = new ArrayList<>(); + private final ArrayList attachedRenderersTmp = new ArrayList<>(); + private GroupCallRenderersContainer renderersContainer; + + private View blurredView; PinchToZoomHelper pinchToZoomHelper; private float progressToAvatarPreview; private View scrimPopupLayout; private boolean avatarsPreviewShowed; + private boolean useBlur; ImageUpdater currentAvatarUpdater; AvatarUpdaterDelegate avatarUpdaterDelegate; private int scheduleStartAt; private boolean contentFullyOverlayed; + // private float progressToFullscreenMode; + ValueAnimator fullscreenModeAnimator; + + RecyclerListView fullscreenUsersListView; + RecyclerListView tabletVideoGridView; + GroupCallTabletGridAdapter tabletGridAdapter; + GroupCallFullscreenAdapter fullscreenAdapter; + ViewTreeObserver.OnPreDrawListener requestFullscreenListener; + public CellFlickerDrawable cellFlickerDrawable = new CellFlickerDrawable(); + + public static boolean isLandscapeMode; + public static boolean isTabletMode; + public final ArrayList statusIconPool = new ArrayList<>(); + private boolean drawSpeakingSubtitle; + + private HashMap buttonsAnimationParamsX = new HashMap<>(); + private HashMap buttonsAnimationParamsY = new HashMap<>(); + boolean animateButtonsOnNextLayout; private Runnable updateSchedeulRunnable = new Runnable() { @Override @@ -363,6 +432,19 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private final FrameLayout avatarPreviewContainer; private final AvatarPreviewPagerIndicator avatarPagerIndicator; private ViewGroup currentOptionsLayout; + public boolean drawingForBlur; + private final RLottieDrawable flipIcon; + private int flipIconCurrentEndFrame; + private boolean hasVideo; + private final View buttonsBackgroundGradientView; + private final View buttonsBackgroundGradientView2; + private GradientDrawable buttonsBackgroundGradient; + private int[] gradientColors = new int[2]; + private final DefaultItemAnimator fullscreenListItemAnimator; + private boolean previewTextureTransitionEnabled; + private boolean listViewVideoVisibility = true; + + public static boolean paused; private static class SmallRecordCallDrawable extends Drawable { @@ -720,7 +802,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter dismissAvatarPreview(true); processSelectedOption(currentParticipant, id, ChatObject.canManageCalls(currentChat) ? 0 : 5); } else { - VoIPService.getSharedInstance().editCallMember(object, false, currentParticipant.volume, null); + VoIPService.getSharedInstance().editCallMember(object, null, null, currentParticipant.volume, null, null); } } Integer newTag = currentProgress == 0 ? 1 : null; @@ -819,22 +901,22 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } - private class WeavingState { + public static class WeavingState { private float targetX = -1f; private float targetY = -1f; private float startX; private float startY; private float duration; private float time; - private Shader shader; + public Shader shader; private Matrix matrix = new Matrix(); - private int currentState; + public int currentState; public WeavingState(int state) { currentState = state; } - public void update(int top, int left, int size, long dt) { + public void update(int top, int left, int size, long dt, float amplitude) { if (shader == null) { return; } @@ -884,7 +966,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } - private static boolean isGradientState(int state) { + public static boolean isGradientState(int state) { return state == MUTE_BUTTON_STATE_MUTED_BY_ADMIN || state == MUTE_BUTTON_STATE_RAISED_HAND || state == MUTE_BUTTON_STATE_START_NOW || state == MUTE_BUTTON_STATE_SET_REMINDER || state == MUTE_BUTTON_STATE_CANCEL_REMINDER; } @@ -941,7 +1023,9 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter canvas.restore(); canvas.save(); canvas.translate(containerView.getX(), -AndroidUtilities.statusBarHeight); + drawingForBlur = true; containerView.draw(canvas); + drawingForBlur = false; Utilities.stackBlurBitmap(bitmap, Math.max(7, Math.max(w, h) / 180)); blurredView.setBackground(new BitmapDrawable(bitmap)); @@ -978,6 +1062,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.userInfoDidLoad); accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.mainUserInfoChanged); accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.updateInterfaces); + accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.groupCallScreencastStateChanged); + accountInstance.getNotificationCenter().removeObserver(this, NotificationCenter.groupCallSpeakingUsersUpdated); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didEndCall); super.dismiss(); @@ -1040,10 +1126,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter applyCallParticipantUpdates(); } - if (actionBar != null) { - int count = call.call.participants_count + (listAdapter.addSelfToCounter() ? 1 : 0); - actionBar.setSubtitle(LocaleController.formatPluralString("Participants", count)); - } + updateSubtitle(); boolean selfUpdate = (Boolean) args[2]; boolean raisedHand = muteButtonState == MUTE_BUTTON_STATE_RAISED_HAND; updateState(true, selfUpdate); @@ -1054,30 +1137,51 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter VoIPService.getSharedInstance().playAllowTalkSound(); } } - } - } - } else if (id == NotificationCenter.webRtcMicAmplitudeEvent) { - float amplitude = (float) args[0]; - setAmplitude(amplitude * 4000.0f); - if (call != null && listView != null) { - TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); - if (participant != null) { - ArrayList array = delayedGroupCallUpdated ? oldParticipants : call.sortedParticipants; - int idx = array.indexOf(participant); - if (idx >= 0) { - RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(idx + listAdapter.usersStartRow); - if (holder != null && holder.itemView instanceof GroupCallUserCell) { - GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; - cell.setAmplitude(amplitude * 15.0f); - if (holder.itemView == scrimView) { - if (!contentFullyOverlayed) { - containerView.invalidate(); + + if (args.length >= 4) { + long justJoinedId = (Long) args[3]; + if (justJoinedId != 0) { + TLObject object; + if (justJoinedId > 0) { + TLRPC.User user = accountInstance.getMessagesController().getUser((int) justJoinedId); + if (call.call.participants_count < 250 || UserObject.isContact(user)) { + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_USER_JOINED, user); + } + } else { + TLRPC.Chat chat = accountInstance.getMessagesController().getChat((int) -justJoinedId); + if (call.call.participants_count < 250 || !ChatObject.isNotInChat(chat)) { + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_USER_JOINED, chat); } } } } } } + } else if (id == NotificationCenter.groupCallSpeakingUsersUpdated) { + if (renderersContainer.inFullscreenMode && call != null) { + boolean autoPinEnabled = renderersContainer.autoPinEnabled(); + if (call != null && renderersContainer.inFullscreenMode && renderersContainer.fullscreenParticipant != null && call.participants.get(MessageObject.getPeerId(renderersContainer.fullscreenParticipant.participant.peer)) == null) { + autoPinEnabled = true; + } + if (autoPinEnabled) { + ChatObject.VideoParticipant currentSpeaker = null; + for (int i = 0; i < visibleVideoParticipants.size(); i++) { + ChatObject.VideoParticipant participant = visibleVideoParticipants.get(i); + boolean newSpeaking = call.currentSpeakingPeers.get(MessageObject.getPeerId(participant.participant.peer), null) != null; + if (newSpeaking && !participant.participant.muted_by_you && renderersContainer.fullscreenPeerId != MessageObject.getPeerId(participant.participant.peer)) { + currentSpeaker = participant; + } + } + if (currentSpeaker != null) { + fullscreenFor(currentSpeaker); + } + } + } + renderersContainer.setVisibleParticipant(true); + updateSubtitle(); + } else if (id == NotificationCenter.webRtcMicAmplitudeEvent) { + float amplitude = (float) args[0]; + setMicAmplitude(amplitude); } else if (id == NotificationCenter.needShowAlert) { int num = (Integer) args[0]; if (num == 6) { @@ -1152,19 +1256,21 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter int selfId = MessageObject.getPeerId(selfPeer); if (call != null && selfId == uid) { TLRPC.TL_groupCallParticipant participant = call.participants.get(selfId); - TLRPC.UserFull userInfo = (TLRPC.UserFull) args[1]; - participant.about = userInfo.about; - applyCallParticipantUpdates(); - AndroidUtilities.updateVisibleRows(listView); + if (participant != null) { + TLRPC.UserFull userInfo = (TLRPC.UserFull) args[1]; + participant.about = userInfo.about; + applyCallParticipantUpdates(); + AndroidUtilities.updateVisibleRows(listView); - if (currentOptionsLayout != null) { - for (int i = 0; i < currentOptionsLayout.getChildCount(); i++) { - View child = currentOptionsLayout.getChildAt(i); - if (child instanceof ActionBarMenuSubItem && child.getTag() != null && (Integer) child.getTag() == 10) { - ((ActionBarMenuSubItem) child).setTextAndIcon( - TextUtils.isEmpty(participant.about) ? LocaleController.getString("VoipAddBio", R.string.VoipAddBio) : LocaleController.getString("VoipEditBio", R.string.VoipEditBio), - TextUtils.isEmpty(participant.about) ? R.drawable.msg_addbio : R.drawable.msg_bio - ); + if (currentOptionsLayout != null) { + for (int i = 0; i < currentOptionsLayout.getChildCount(); i++) { + View child = currentOptionsLayout.getChildAt(i); + if (child instanceof ActionBarMenuSubItem && child.getTag() != null && (Integer) child.getTag() == 10) { + ((ActionBarMenuSubItem) child).setTextAndIcon( + TextUtils.isEmpty(participant.about) ? LocaleController.getString("VoipAddBio", R.string.VoipAddBio) : LocaleController.getString("VoipEditBio", R.string.VoipEditBio), + TextUtils.isEmpty(participant.about) ? R.drawable.msg_addbio : R.drawable.msg_bio + ); + } } } } @@ -1178,10 +1284,52 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter applyCallParticipantUpdates(); AndroidUtilities.updateVisibleRows(listView); } + } else if (id == NotificationCenter.groupCallScreencastStateChanged) { + updateItems(); + } + } + + private void setMicAmplitude(float amplitude) { + if (VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute()) { + amplitude = 0f; + } + setAmplitude(amplitude * 4000.0f); + if (call != null && listView != null) { + TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); + if (participant != null) { + if (!renderersContainer.inFullscreenMode) { + ArrayList array = delayedGroupCallUpdated ? oldParticipants : call.visibleParticipants; + int idx = array.indexOf(participant); + if (idx >= 0) { + + RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(idx + listAdapter.usersStartRow); + if (holder != null && holder.itemView instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; + cell.setAmplitude(amplitude * 15.0f); + if (holder.itemView == scrimView) { + if (!contentFullyOverlayed) { + containerView.invalidate(); + } + } + } + } + } else { + for (int i = 0; i < fullscreenUsersListView.getChildCount(); i++) { + GroupCallFullscreenAdapter.GroupCallUserCell cell = (GroupCallFullscreenAdapter.GroupCallUserCell) fullscreenUsersListView.getChildAt(i); + if (MessageObject.getPeerId(cell.getParticipant().peer) == MessageObject.getPeerId(participant.peer)) { + cell.setAmplitude(amplitude * 15.0f); + } + } + } + renderersContainer.setAmplitude(participant, amplitude * 15.0f); + } } } private void applyCallParticipantUpdates() { + if (renderersContainer.inFullscreenMode) { + renderersContainer.setVisibleParticipant(true); + } if (call == null || delayedGroupCallUpdated) { return; } @@ -1193,19 +1341,23 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter int count = listView.getChildCount(); View minChild = null; int minPosition = 0; + int minTop = Integer.MAX_VALUE; for (int a = 0; a < count; a++) { View child = listView.getChildAt(a); RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); - if (holder != null && holder.getAdapterPosition() != RecyclerView.NO_POSITION) { - if (minChild == null || minPosition > holder.getAdapterPosition()) { + if (holder != null && holder.getAdapterPosition() != RecyclerView.NO_POSITION && holder.getLayoutPosition() != RecyclerView.NO_POSITION) { + if (minChild == null || child.getTop() < minTop) { minChild = child; - minPosition = holder.getAdapterPosition(); + minPosition = holder.getLayoutPosition(); + minTop = child.getTop(); } } } + updateVideoParticipantList(); + try { UpdateCallback updateCallback = new UpdateCallback(listAdapter); - setOldRows(listAdapter.addMemberRow, listAdapter.usersStartRow, listAdapter.usersEndRow, listAdapter.invitedStartRow, listAdapter.invitedEndRow); + setOldRows(listAdapter.addMemberRow, listAdapter.usersStartRow, listAdapter.usersEndRow, listAdapter.invitedStartRow, listAdapter.invitedEndRow, listAdapter.usersVideoGridStartRow, listAdapter.usersVideoGridEndRow, listAdapter.videoGridDividerRow); listAdapter.updateRows(); DiffUtil.calculateDiff(diffUtilsCallback).dispatchUpdatesTo(updateCallback); } catch (Exception e) { @@ -1217,7 +1369,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter layoutManager.scrollToPositionWithOffset(minPosition, minChild.getTop() - listView.getPaddingTop()); } oldParticipants.clear(); - oldParticipants.addAll(call.sortedParticipants); + oldParticipants.addAll(call.visibleParticipants); + + oldVideoParticipants.clear(); + oldVideoParticipants.addAll(visibleVideoParticipants); oldInvited.clear(); oldInvited.addAll(call.invitedUsers); oldCount = listAdapter.getItemCount(); @@ -1234,6 +1389,60 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } } + fullscreenAdapter.update(true, fullscreenUsersListView); + if (fullscreenUsersListView.getVisibility() == View.VISIBLE) { + AndroidUtilities.updateVisibleRows(fullscreenUsersListView); + } + if (isTabletMode) { + tabletGridAdapter.update(true, tabletVideoGridView); + } + if (listView.getVisibility() == View.VISIBLE) { + AndroidUtilities.updateVisibleRows(listView); + } + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(true); + } + + boolean autoPinEnabled = renderersContainer.autoPinEnabled(); + if (call != null && renderersContainer.inFullscreenMode && renderersContainer.fullscreenParticipant != null && call.participants.get(MessageObject.getPeerId(renderersContainer.fullscreenParticipant.participant.peer)) == null) { + autoPinEnabled = true; + } + if (renderersContainer.inFullscreenMode && renderersContainer.fullscreenParticipant != null && !ChatObject.Call.videoIsActive(renderersContainer.fullscreenParticipant.participant, renderersContainer.fullscreenParticipant.presentation, call)) { + boolean foundAnotherVideoParticipant = false; + if (!visibleVideoParticipants.isEmpty()) { + foundAnotherVideoParticipant = true; + if (autoPinEnabled) { + fullscreenFor(visibleVideoParticipants.get(0)); + } + } + if (!foundAnotherVideoParticipant) { + fullscreenFor(null); + } + } + + boolean hasVideoLocal = !call.visibleVideoParticipants.isEmpty(); + if (hasVideoLocal != hasVideo) { + hasVideo = hasVideoLocal; + if (isTabletMode) { + containerView.requestLayout(); + } + } + } + + private void updateVideoParticipantList() { + visibleVideoParticipants.clear(); + if (isTabletMode) { + if (renderersContainer.inFullscreenMode) { + visibleVideoParticipants.addAll(call.visibleVideoParticipants); + if (renderersContainer.fullscreenParticipant != null) { + visibleVideoParticipants.remove(renderersContainer.fullscreenParticipant); + } + } + } else { + visibleVideoParticipants.addAll(call.visibleVideoParticipants); + } } private void updateRecordCallText() { @@ -1251,9 +1460,9 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private void updateItems() { if (call == null || call.isScheduled()) { pipItem.setVisibility(View.INVISIBLE); + screenShareItem.setVisibility(View.GONE); if (call == null) { otherItem.setVisibility(View.GONE); - accountSwitchImageView.setVisibility(View.GONE); return; } } @@ -1264,22 +1473,31 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (newChat != null) { currentChat = newChat; } - boolean anyVisible = false; if (ChatObject.canUserDoAdminAction(currentChat, ChatObject.ACTION_INVITE)) { inviteItem.setVisibility(View.VISIBLE); - anyVisible = true; } else { inviteItem.setVisibility(View.GONE); } + + noiseItem.setVisibility(View.VISIBLE); + noiseItem.setIcon(SharedConfig.noiseSupression ? R.drawable.msg_noise_on : R.drawable.msg_noise_off); + noiseItem.setSubtext(SharedConfig.noiseSupression ? LocaleController.getString("VoipNoiseCancellationEnabled", R.string.VoipNoiseCancellationEnabled) : LocaleController.getString("VoipNoiseCancellationDisabled", R.string.VoipNoiseCancellationDisabled)); + if (ChatObject.canManageCalls(currentChat)) { leaveItem.setVisibility(View.VISIBLE); editTitleItem.setVisibility(View.VISIBLE); if (call.isScheduled()) { recordItem.setVisibility(View.GONE); + screenItem.setVisibility(View.GONE); } else { recordItem.setVisibility(View.VISIBLE); } - anyVisible = true; + if (!call.canStreamVideo || call.isScheduled() || Build.VERSION.SDK_INT < 21) { + screenItem.setVisibility(View.GONE); + } else { + screenItem.setVisibility(View.VISIBLE); + } + screenShareItem.setVisibility(View.GONE); recordCallDrawable.setRecording(call.recording); if (call.recording) { if (updateCallRecordRunnable == null) { @@ -1296,59 +1514,66 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } recordItem.setText(LocaleController.getString("VoipGroupRecordCall", R.string.VoipGroupRecordCall)); } + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getVideoState(true) == Instance.VIDEO_STATE_ACTIVE) { + screenItem.setTextAndIcon(LocaleController.getString("VoipChatStopScreenCapture", R.string.VoipChatStopScreenCapture), R.drawable.msg_screencast_off); + } else { + screenItem.setTextAndIcon(LocaleController.getString("VoipChatStartScreenCapture", R.string.VoipChatStartScreenCapture), R.drawable.msg_screencast); + } updateRecordCallText(); } else { + TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); + boolean mutedByAdmin = participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(currentChat); + if (Build.VERSION.SDK_INT >= 21 && !mutedByAdmin && call.canStreamVideo) { + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getVideoState(true) == Instance.VIDEO_STATE_ACTIVE) { + screenShareItem.setVisibility(View.GONE); + screenItem.setVisibility(View.VISIBLE); + screenItem.setTextAndIcon(LocaleController.getString("VoipChatStopScreenCapture", R.string.VoipChatStopScreenCapture), R.drawable.msg_screencast_off); + screenItem.setContentDescription(LocaleController.getString("VoipChatStopScreenCapture", R.string.VoipChatStopScreenCapture)); + } else { + screenItem.setTextAndIcon(LocaleController.getString("VoipChatStartScreenCapture", R.string.VoipChatStartScreenCapture), R.drawable.msg_screencast); + screenItem.setContentDescription(LocaleController.getString("VoipChatStartScreenCapture", R.string.VoipChatStartScreenCapture)); + screenShareItem.setVisibility(View.GONE); + screenItem.setVisibility(View.VISIBLE); + } + } else { + screenShareItem.setVisibility(View.GONE); + screenItem.setVisibility(View.GONE); + } leaveItem.setVisibility(View.GONE); editTitleItem.setVisibility(View.GONE); recordItem.setVisibility(View.GONE); } if (ChatObject.canManageCalls(currentChat) && call.call.can_change_join_muted) { permissionItem.setVisibility(View.VISIBLE); - anyVisible = true; } else { permissionItem.setVisibility(View.GONE); } - otherItem.setVisibility(anyVisible ? View.VISIBLE : View.GONE); + if (soundButton.getVisibility() != View.VISIBLE) { + soundItem.setVisibility(View.VISIBLE); + soundItemDivider.setVisibility(View.VISIBLE); + } else { + soundItem.setVisibility(View.GONE); + soundItemDivider.setVisibility(View.GONE); + } + otherItem.setVisibility(View.VISIBLE); int margin = 48; if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().hasFewPeers || scheduleHasFewPeers) { - if (!anyVisible) { - anyVisible = true; - accountSwitchImageView.getImageReceiver().setCurrentAccount(currentAccount); - int peerId = MessageObject.getPeerId(selfPeer); - if (peerId > 0) { - TLRPC.User user = accountInstance.getMessagesController().getUser(peerId); - accountSwitchAvatarDrawable.setInfo(user); - accountSwitchImageView.setForUserOrChat(user, accountSwitchAvatarDrawable); - } else { - TLRPC.Chat chat = accountInstance.getMessagesController().getChat(-peerId); - accountSwitchAvatarDrawable.setInfo(chat); - accountSwitchImageView.setForUserOrChat(chat, accountSwitchAvatarDrawable); - } - accountSelectCell.setVisibility(View.GONE); - accountGap.setVisibility(View.GONE); - accountSwitchImageView.setVisibility(View.VISIBLE); + accountSelectCell.setVisibility(View.VISIBLE); + accountGap.setVisibility(View.VISIBLE); + int peerId = MessageObject.getPeerId(selfPeer); + TLObject object; + if (peerId > 0) { + object = accountInstance.getMessagesController().getUser(peerId); } else { - accountSwitchImageView.setVisibility(View.GONE); - accountSelectCell.setVisibility(View.VISIBLE); - accountGap.setVisibility(View.VISIBLE); - int peerId = MessageObject.getPeerId(selfPeer); - TLObject object; - if (peerId > 0) { - object = accountInstance.getMessagesController().getUser(peerId); - } else { - object = accountInstance.getMessagesController().getChat(-peerId); - } - accountSelectCell.setObject(object); + object = accountInstance.getMessagesController().getChat(-peerId); } + accountSelectCell.setObject(object); margin += 48; } else { - if (anyVisible) { - margin += 48; - } + margin += 48; accountSelectCell.setVisibility(View.GONE); accountGap.setVisibility(View.GONE); - accountSwitchImageView.setVisibility(View.GONE); } @@ -1358,8 +1583,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter titleTextView.requestLayout(); } - ((FrameLayout.LayoutParams) menuItemsContainer.getLayoutParams()).rightMargin = anyVisible ? 0 : AndroidUtilities.dp(6); - actionBar.setTitleRightMargin(AndroidUtilities.dp(48) * (anyVisible ? 2 : 1)); + ((FrameLayout.LayoutParams) menuItemsContainer.getLayoutParams()).rightMargin = 0; + actionBar.setTitleRightMargin(AndroidUtilities.dp(48) * 2); } protected void makeFocusable(BottomSheet bottomSheet, AlertDialog alertDialog, EditTextBoldCursor editText, boolean showKeyboard) { @@ -1434,6 +1659,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter this.scheduledHash = scheduledHash; this.currentAccount = account.getCurrentAccount(); this.scheduleHasFewPeers = scheduleHasFewPeers; + fullWidth = true; + isTabletMode = false; + isLandscapeMode = false; + paused = false; setDelegate(new BottomSheetDelegateInterface() { @Override public void onOpenAnimationStart() { @@ -1481,13 +1710,39 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter listAdapter = new ListAdapter(context); + RecordStatusDrawable recordStatusDrawable = new RecordStatusDrawable(true); + recordStatusDrawable.setColor(Theme.getColor(Theme.key_voipgroup_speakingText)); + recordStatusDrawable.start(); actionBar = new ActionBar(context) { + @Override public void setAlpha(float alpha) { - super.setAlpha(alpha); - containerView.invalidate(); + if (getAlpha() != alpha) { + super.setAlpha(alpha); + containerView.invalidate(); + } + } + + @Override + protected void dispatchDraw(Canvas canvas) { + super.dispatchDraw(canvas); + if (getAdditionalSubtitleTextView().getVisibility() == View.VISIBLE) { + canvas.save(); + canvas.translate(getSubtitleTextView().getLeft(), getSubtitleTextView().getY() - AndroidUtilities.dp(1)); + recordStatusDrawable.setAlpha((int) (255 * getAdditionalSubtitleTextView().getAlpha())); + recordStatusDrawable.draw(canvas); + canvas.restore(); + invalidate(); + } } }; + actionBar.setSubtitle(""); + actionBar.getSubtitleTextView().setVisibility(View.VISIBLE); + actionBar.createAdditionalSubtitleTextView(); + actionBar.getAdditionalSubtitleTextView().setPadding(AndroidUtilities.dp(24), 0, 0, 0); + AndroidUtilities.updateViewVisibilityAnimated(actionBar.getAdditionalSubtitleTextView(), drawSpeakingSubtitle, 1f, false); + actionBar.getAdditionalSubtitleTextView().setTextColor(Theme.getColor(Theme.key_voipgroup_speakingText)); + actionBar.setSubtitleColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled)); actionBar.setBackButtonImage(R.drawable.ic_ab_back); actionBar.setOccupyStatusBar(false); actionBar.setAllowOverlayTitle(false); @@ -1499,7 +1754,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter @Override public void onItemClick(int id) { if (id == -1) { - dismiss(); + onBackPressed(); } else if (id == eveyone_can_speak_item) { call.call.join_muted = false; toggleAdminSpeak(); @@ -1519,7 +1774,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (call.isScheduled()) { TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentChat.id); if (chatFull != null) { - chatFull.flags &=~ 2097152; + chatFull.flags &= ~2097152; chatFull.call = null; accountInstance.getNotificationCenter().postNotificationName(NotificationCenter.groupCallUpdated, currentChat.id, call.call.id, false); } @@ -1547,7 +1802,9 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter button.setTextColor(Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); } dialog.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); - } else if (id == start_record_item) { + } else if (id == screen_capture_item) { + screenShareItem.callOnClick(); + } else if (id == start_record_item) { AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); builder.setDialogButtonColorKey(Theme.key_voipgroup_listeningText); EditTextBoldCursor editText; @@ -1627,7 +1884,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter permissionItem.setVisibility(View.GONE); editTitleItem.setVisibility(View.GONE); recordItem.setVisibility(View.GONE); + screenItem.setVisibility(View.GONE); accountSelectCell.setVisibility(View.GONE); + soundItem.setVisibility(View.GONE); + noiseItem.setVisibility(View.GONE); otherItem.forceUpdatePopupPosition(); } else if (id == edit_item) { enterEventSent = false; @@ -1710,7 +1970,129 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter alertDialog.setTextColor(Theme.getColor(Theme.key_voipgroup_nameText)); editText.requestFocus(); } else if (id == user_item) { - accountSwitchImageView.callOnClick(); + JoinCallAlert.open(getContext(), -currentChat.id, accountInstance, null, JoinCallAlert.TYPE_DISPLAY, selfPeer, (peer1, hasFewPeers, schedule) -> { + if (call == null) { + return; + } + TLObject object; + if (peer1 instanceof TLRPC.TL_inputPeerUser) { + object = accountInstance.getMessagesController().getUser(peer1.user_id); + } else if (peer1 instanceof TLRPC.TL_inputPeerChat) { + object = accountInstance.getMessagesController().getChat(peer1.chat_id); + } else { + object = accountInstance.getMessagesController().getChat(peer1.channel_id); + } + if (call.isScheduled()) { + getUndoView().showWithAction(0, UndoView.ACTION_VOIP_USER_CHANGED, object); + if (peer1 instanceof TLRPC.TL_inputPeerChannel) { + selfPeer = new TLRPC.TL_peerChannel(); + selfPeer.channel_id = peer1.channel_id; + } else if (peer1 instanceof TLRPC.TL_inputPeerUser) { + selfPeer = new TLRPC.TL_peerUser(); + selfPeer.user_id = peer1.user_id; + } else if (peer1 instanceof TLRPC.TL_inputPeerChat) { + selfPeer = new TLRPC.TL_peerChat(); + selfPeer.chat_id = peer1.chat_id; + } + GroupCallActivity.this.schedulePeer = peer1; + TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentChat.id); + if (chatFull != null) { + chatFull.groupcall_default_join_as = selfPeer; + if (chatFull instanceof TLRPC.TL_chatFull) { + chatFull.flags |= 32768; + } else { + chatFull.flags |= 67108864; + } + } + TLRPC.TL_phone_saveDefaultGroupCallJoinAs req = new TLRPC.TL_phone_saveDefaultGroupCallJoinAs(); + req.peer = MessagesController.getInputPeer(currentChat); + req.join_as = peer1; + accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { + + }); + updateItems(); + } else { + if (VoIPService.getSharedInstance() == null || !hasFewPeers) { + return; + } + TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); + VoIPService.getSharedInstance().setGroupCallPeer(peer1); + userSwitchObject = object; + } + }); + } else if (id == noise_item) { + SharedConfig.toggleNoiseSupression(); + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + return; + } + service.setNoiseSupressionEnabled(SharedConfig.noiseSupression); + } else if (id == sound_item) { + VoIPService service = VoIPService.getSharedInstance(); + if (service == null) { + return; + } + ArrayList names = new ArrayList<>(); + ArrayList icons = new ArrayList<>(); + ArrayList options = new ArrayList<>(); + + names.add(LocaleController.getString("VoipAudioRoutingSpeaker", R.string.VoipAudioRoutingSpeaker)); + icons.add(R.drawable.msg_voice_speaker); + options.add(0); + + if (service.hasEarpiece()) { + names.add(service.isHeadsetPlugged() ? LocaleController.getString("VoipAudioRoutingHeadset", R.string.VoipAudioRoutingHeadset) : LocaleController.getString("VoipAudioRoutingPhone", R.string.VoipAudioRoutingPhone)); + icons.add(service.isHeadsetPlugged() ? R.drawable.msg_voice_headphones : R.drawable.msg_voice_phone); + options.add(1); + } + + if (service.isBluetoothHeadsetConnected()) { + names.add(service.currentBluetoothDeviceName != null ? service.currentBluetoothDeviceName : LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth)); + icons.add(R.drawable.msg_voice_bluetooth); + options.add(2); + } + + int n = names.size(); + CharSequence[] itemsArray = new CharSequence[n]; + int[] iconsArray = new int[n]; + for (int i = 0; i < n; i++) { + itemsArray[i] = names.get(i); + iconsArray[i] = icons.get(i); + } + + BottomSheet.Builder builder = new BottomSheet.Builder(context) + .setTitle(LocaleController.getString("VoipSelectAudioOutput", R.string.VoipSelectAudioOutput), true) + .setItems(itemsArray, iconsArray, (dialog, which) -> { + if (VoIPService.getSharedInstance() == null) { + return; + } + service.setAudioOutput(options.get(which)); + }); + BottomSheet bottomSheet = builder.create(); + + bottomSheet.setBackgroundColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled)); + int selectedPosition; + if (service.getCurrentAudioRoute() == VoIPService.AUDIO_ROUTE_SPEAKER) { + selectedPosition = 0; + } else if (service.getCurrentAudioRoute() == VoIPService.AUDIO_ROUTE_EARPIECE) { + selectedPosition = 1; + } else { + selectedPosition = 2; + } + builder.show(); + bottomSheet.setTitleColor(Theme.getColor(Theme.key_voipgroup_nameText)); + for (int i = 0; i < bottomSheet.getItemViews().size(); i++) { + BottomSheetCell cell = bottomSheet.getItemViews().get(i); + int color; + if (i == selectedPosition) { + color = Theme.getColor(Theme.key_voipgroup_listeningText); + } else { + color = Theme.getColor(Theme.key_voipgroup_nameText); + } + cell.setTextColor(color); + cell.setIconColor(color); + cell.setBackground(Theme.createSelectorDrawable(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_actionBarItems), (int) (255 * 0.05f)), 2)); + } } } }); @@ -1739,18 +2121,30 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter for (int a = 0; a < uids.length; a++) { TLRPC.TL_groupCallParticipant participant = call.participantsBySources.get(uids[a]); if (participant != null) { - ArrayList array = delayedGroupCallUpdated ? oldParticipants : call.sortedParticipants; - int idx = array.indexOf(participant); - if (idx >= 0) { - RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(idx + listAdapter.usersStartRow); - if (holder != null && holder.itemView instanceof GroupCallUserCell) { - GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; - cell.setAmplitude(levels[a] * 15.0f); - if (holder.itemView == scrimView) { - containerView.invalidate(); + if (!renderersContainer.inFullscreenMode) { + ArrayList array = delayedGroupCallUpdated ? oldParticipants : call.visibleParticipants; + int idx = array.indexOf(participant); + if (idx >= 0) { + RecyclerView.ViewHolder holder = listView.findViewHolderForAdapterPosition(idx + listAdapter.usersStartRow); + if (holder != null && holder.itemView instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) holder.itemView; + cell.setAmplitude(levels[a] * 15.0f); + if (holder.itemView == scrimView) { + if (!contentFullyOverlayed) { + containerView.invalidate(); + } + } + } + } + } else { + for (int i = 0; i < fullscreenUsersListView.getChildCount(); i++) { + GroupCallFullscreenAdapter.GroupCallUserCell cell = (GroupCallFullscreenAdapter.GroupCallUserCell) fullscreenUsersListView.getChildAt(i); + if (MessageObject.getPeerId(cell.getParticipant().peer) == MessageObject.getPeerId(participant.peer)) { + cell.setAmplitude(levels[a] * 15.0f); } } } + renderersContainer.setAmplitude(participant, levels[a] * 15.0f); } } }; @@ -1763,6 +2157,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.userInfoDidLoad); accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.mainUserInfoChanged); accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.updateInterfaces); + accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.groupCallScreencastStateChanged); + accountInstance.getNotificationCenter().addObserver(this, NotificationCenter.groupCallSpeakingUsersUpdated); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.webRtcMicAmplitudeEvent); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didEndCall); @@ -1776,28 +2172,220 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private boolean ignoreLayout = false; private RectF rect = new RectF(); private int lastSize; + private boolean updateRenderers; + + boolean localHasVideo; + boolean wasLayout; @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int totalHeight = MeasureSpec.getSize(heightMeasureSpec); + ignoreLayout = true; + boolean landscape = MeasureSpec.getSize(widthMeasureSpec) > totalHeight && !AndroidUtilities.isTablet(); + renderersContainer.listWidth = MeasureSpec.getSize(widthMeasureSpec); + boolean tablet = AndroidUtilities.isTablet() && MeasureSpec.getSize(widthMeasureSpec) > totalHeight; + if (isLandscapeMode != landscape) { + isLandscapeMode = landscape; + layoutManager.setSpanCount(isLandscapeMode ? 6 : 2); + listView.invalidateItemDecorations(); + fullscreenUsersListView.invalidateItemDecorations(); + updateRenderers = true; + if (scheduleInfoTextView != null) { + scheduleInfoTextView.setVisibility(!isLandscapeMode ? View.VISIBLE : View.GONE); + } + } + if (isTabletMode != tablet) { + isTabletMode = tablet; + tabletVideoGridView.setVisibility(tablet ? View.VISIBLE : View.GONE); + listView.invalidateItemDecorations(); + fullscreenUsersListView.invalidateItemDecorations(); + updateRenderers = true; + } + if (updateRenderers) { + applyCallParticipantUpdates(); + listAdapter.notifyDataSetChanged(); + fullscreenAdapter.update(false, tabletVideoGridView); + if (isTabletMode) { + tabletGridAdapter.update(false, tabletVideoGridView); + } + tabletVideoGridView.setVisibility(isTabletMode ? View.VISIBLE : View.GONE); + tabletGridAdapter.setVisibility(tabletVideoGridView, isTabletMode && !renderersContainer.inFullscreenMode, true); + + listViewVideoVisibility = (isTabletMode && renderersContainer.inFullscreenMode) || !isTabletMode; + + boolean fullscreenListVisibility = !isTabletMode && renderersContainer.inFullscreenMode; + fullscreenAdapter.setVisibility(fullscreenUsersListView, fullscreenListVisibility); + fullscreenUsersListView.setVisibility(fullscreenListVisibility ? View.VISIBLE : View.GONE); + listView.setVisibility((isTabletMode || !renderersContainer.inFullscreenMode) ? View.VISIBLE : View.GONE); + layoutManager.setSpanCount(isLandscapeMode ? 6 : 2); + updateState(false, false); + listView.invalidateItemDecorations(); + fullscreenUsersListView.invalidateItemDecorations(); + + AndroidUtilities.updateVisibleRows(listView); + updateRenderers = false; + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + renderersContainer.setIsTablet(isTabletMode); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(true); + } + } if (Build.VERSION.SDK_INT >= 21) { - ignoreLayout = true; setPadding(backgroundPaddingLeft, statusBarHeight, backgroundPaddingLeft, 0); - ignoreLayout = false; } int availableHeight = totalHeight - getPaddingTop() - AndroidUtilities.dp(14 + 231); - LayoutParams layoutParams = (LayoutParams) listView.getLayoutParams(); - layoutParams.topMargin = ActionBar.getCurrentActionBarHeight() + AndroidUtilities.dp(14); + int listViewPaddingBottom; + LayoutParams layoutParams = (LayoutParams) renderersContainer.getLayoutParams(); + if (isTabletMode) { + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); + } else { + layoutParams.topMargin = 0; + } + + for (int a = 0; a < 2; a++) { + layoutParams = (LayoutParams) undoView[a].getLayoutParams(); + if (isTabletMode) { + layoutParams.rightMargin = AndroidUtilities.dp(TABLET_LIST_SIZE + 8); + } else { + layoutParams.rightMargin = AndroidUtilities.dp(8); + } + } + + if (tabletVideoGridView != null) { + layoutParams = (LayoutParams) tabletVideoGridView.getLayoutParams(); + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); + } + + int buttonsGradientSize = AndroidUtilities.dp(150); + //listView layoutParams + layoutParams = (LayoutParams) listView.getLayoutParams(); + if (isTabletMode) { + layoutParams.gravity = hasVideo ? Gravity.RIGHT : Gravity.CENTER_HORIZONTAL; + layoutParams.width = AndroidUtilities.dp(TABLET_LIST_SIZE); + layoutParams.rightMargin = layoutParams.leftMargin = AndroidUtilities.dp(4); + layoutParams.bottomMargin = buttonsGradientSize; + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); + listViewPaddingBottom = AndroidUtilities.dp(60); + } else if (isLandscapeMode) { + layoutParams.gravity = Gravity.TOP | Gravity.LEFT; + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); + layoutParams.bottomMargin = AndroidUtilities.dp(14); + layoutParams.rightMargin = AndroidUtilities.dp(90); + layoutParams.leftMargin = AndroidUtilities.dp(14); + listViewPaddingBottom = 0; + } else { + layoutParams.gravity = Gravity.TOP | Gravity.LEFT; + layoutParams.width = LayoutHelper.MATCH_PARENT; + listViewPaddingBottom = AndroidUtilities.dp(60); + layoutParams.bottomMargin = buttonsGradientSize; + layoutParams.topMargin = ActionBar.getCurrentActionBarHeight() + AndroidUtilities.dp(14); + layoutParams.rightMargin = layoutParams.leftMargin = AndroidUtilities.dp(14); + } + // + if (isLandscapeMode && !isTabletMode) { + buttonsBackgroundGradientView.setVisibility(View.GONE); + buttonsBackgroundGradientView2.setVisibility(View.GONE); + } else { + buttonsBackgroundGradientView.setVisibility(View.VISIBLE); + layoutParams = (LayoutParams) buttonsBackgroundGradientView.getLayoutParams(); + layoutParams.bottomMargin = buttonsGradientSize; + + if (isTabletMode) { + layoutParams.gravity = hasVideo ? Gravity.RIGHT | Gravity.BOTTOM : Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM; + layoutParams.width = AndroidUtilities.dp(TABLET_LIST_SIZE + 8); + } else { + layoutParams.width = LayoutHelper.MATCH_PARENT; + } + + buttonsBackgroundGradientView2.setVisibility(View.VISIBLE); + layoutParams = (LayoutParams) buttonsBackgroundGradientView2.getLayoutParams(); + layoutParams.height = buttonsGradientSize; + + if (isTabletMode) { + layoutParams.gravity = hasVideo ? Gravity.RIGHT | Gravity.BOTTOM : Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM; + layoutParams.width = AndroidUtilities.dp(TABLET_LIST_SIZE + 8); + } else { + layoutParams.width = LayoutHelper.MATCH_PARENT; + } + } + + if (isLandscapeMode) { + fullscreenUsersListView.setPadding(0, AndroidUtilities.dp(9), 0, AndroidUtilities.dp(9)); + } else { + fullscreenUsersListView.setPadding(AndroidUtilities.dp(9), 0, AndroidUtilities.dp(9), 0); + } + + // buttonsContainer + layoutParams = (LayoutParams) buttonsContainer.getLayoutParams(); + if (isTabletMode) { + layoutParams.width = AndroidUtilities.dp(TABLET_LIST_SIZE); + layoutParams.height = AndroidUtilities.dp(200); + layoutParams.gravity = hasVideo ? Gravity.RIGHT | Gravity.BOTTOM : Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM; + layoutParams.rightMargin = 0; + } else if (isLandscapeMode) { + layoutParams.width = AndroidUtilities.dp(90); + layoutParams.height = LayoutHelper.MATCH_PARENT; + layoutParams.gravity = Gravity.RIGHT | Gravity.TOP; + } else { + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.height = AndroidUtilities.dp(200); + layoutParams.gravity = Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM; + layoutParams.rightMargin = 0; + } + // + + // actionBar + if (isLandscapeMode && !isTabletMode) { + layoutParams = (LayoutParams) actionBar.getLayoutParams(); + layoutParams.rightMargin = AndroidUtilities.dp(90); + layoutParams = (LayoutParams) menuItemsContainer.getLayoutParams(); + layoutParams.rightMargin = AndroidUtilities.dp(90); + layoutParams = (LayoutParams) actionBarBackground.getLayoutParams(); + layoutParams.rightMargin = AndroidUtilities.dp(90); + layoutParams = (LayoutParams) actionBarShadow.getLayoutParams(); + layoutParams.rightMargin = AndroidUtilities.dp(90); + } else { + layoutParams = (LayoutParams) actionBar.getLayoutParams(); + layoutParams.rightMargin = 0; + layoutParams = (LayoutParams) menuItemsContainer.getLayoutParams(); + layoutParams.rightMargin = 0; + layoutParams = (LayoutParams) actionBarBackground.getLayoutParams(); + layoutParams.rightMargin = 0; + layoutParams = (LayoutParams) actionBarShadow.getLayoutParams(); + layoutParams.rightMargin = 0; + } + // + layoutParams = (LayoutParams) fullscreenUsersListView.getLayoutParams(); + if (isLandscapeMode) { + if (((LinearLayoutManager) fullscreenUsersListView.getLayoutManager()).getOrientation() != RecyclerView.VERTICAL) { + ((LinearLayoutManager) fullscreenUsersListView.getLayoutManager()).setOrientation(RecyclerView.VERTICAL); + } + layoutParams.height = LayoutHelper.MATCH_PARENT; + layoutParams.width = AndroidUtilities.dp(80); + layoutParams.gravity = Gravity.TOP | Gravity.RIGHT; + layoutParams.rightMargin = AndroidUtilities.dp(100); + layoutParams.bottomMargin = 0; + } else { + if (((LinearLayoutManager) fullscreenUsersListView.getLayoutManager()).getOrientation() != RecyclerView.HORIZONTAL) { + ((LinearLayoutManager) fullscreenUsersListView.getLayoutManager()).setOrientation(RecyclerView.HORIZONTAL); + } + layoutParams.height = AndroidUtilities.dp(80); + layoutParams.width = LayoutHelper.MATCH_PARENT; + layoutParams.gravity = Gravity.BOTTOM; + layoutParams.rightMargin = 0; + layoutParams.bottomMargin = AndroidUtilities.dp(100); + } layoutParams = (LayoutParams) actionBarShadow.getLayoutParams(); layoutParams.topMargin = ActionBar.getCurrentActionBarHeight(); int contentSize = Math.max(AndroidUtilities.dp(64 + 50 + 58 * 2.5f), availableHeight / 5 * 3); - int padding = Math.max(0, availableHeight - contentSize + AndroidUtilities.dp(8)); - ignoreLayout = true; - if (listView.getPaddingTop() != padding) { - listView.setPadding(0, padding, 0, 0); + int padding = isTabletMode ? 0 : Math.max(0, availableHeight - contentSize + AndroidUtilities.dp(8)); + if (listView.getPaddingTop() != padding || listView.getPaddingBottom() != listViewPaddingBottom) { + listView.setPadding(0, padding, 0, listViewPaddingBottom); } if (scheduleStartAtTextView != null) { int y = padding + (availableHeight - padding + AndroidUtilities.dp(60)) / 2; @@ -1819,19 +2407,48 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter layoutParams3.topMargin = y; } } + + for (int i = 0; i < attachedRenderers.size(); i++) { + attachedRenderers.get(i).setFullscreenMode(renderersContainer.inFullscreenMode, true); + } + ignoreLayout = false; super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(totalHeight, MeasureSpec.EXACTLY)); int currentSize = getMeasuredHeight() + (getMeasuredWidth() << 16); - if (currentSize != lastSize) { + if (currentSize != lastSize) { lastSize = currentSize; dismissAvatarPreview(false); } + cellFlickerDrawable.setParentWidth(getMeasuredWidth()); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { + boolean needAnimate = false; + float fromX = 0; + if (isTabletMode && localHasVideo != hasVideo && wasLayout) { + needAnimate = true; + fromX = listView.getX(); + } + localHasVideo = hasVideo; + renderersContainer.inLayout = true; super.onLayout(changed, l, t, r, b); + renderersContainer.inLayout = false; updateLayout(false); + wasLayout = true; + + if (needAnimate && listView.getLeft() != fromX) { + float dx = fromX - listView.getLeft(); + listView.setTranslationX(dx); + buttonsContainer.setTranslationX(dx); + buttonsBackgroundGradientView.setTranslationX(dx); + buttonsBackgroundGradientView2.setTranslationX(dx); + + listView.animate().translationX(0).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start();listView.animate().translationX(0).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + buttonsBackgroundGradientView.animate().translationX(0).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + buttonsBackgroundGradientView2.animate().translationX(0).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + buttonsContainer.animate().translationX(0).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } } @Override @@ -1853,7 +2470,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return true; } } - if (ev.getAction() == MotionEvent.ACTION_DOWN && scrollOffsetY != 0 && ev.getY() < scrollOffsetY - AndroidUtilities.dp(37) && actionBar.getAlpha() == 0.0f && !avatarsPreviewShowed) { + if (ev.getAction() == MotionEvent.ACTION_DOWN && scrollOffsetY != 0 && ev.getY() < scrollOffsetY - AndroidUtilities.dp(37) && actionBar.getAlpha() == 0.0f && !avatarsPreviewShowed && previewDialog == null && !renderersContainer.inFullscreenMode) { dismiss(); return true; } @@ -1893,25 +2510,168 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter top += getPaddingTop(); - shadowDrawable.setBounds(0, (int) top, getMeasuredWidth(), height); - shadowDrawable.draw(canvas); - if (rad != 1.0f) { - Theme.dialogs_onlineCirclePaint.setColor(backgroundColor); - rect.set(backgroundPaddingLeft, backgroundPaddingTop + top, getMeasuredWidth() - backgroundPaddingLeft, backgroundPaddingTop + top + AndroidUtilities.dp(24)); - canvas.drawRoundRect(rect, AndroidUtilities.dp(12) * rad, AndroidUtilities.dp(12) * rad, Theme.dialogs_onlineCirclePaint); + if (renderersContainer.progressToFullscreenMode != 1f) { + shadowDrawable.setBounds(0, (int) top, getMeasuredWidth(), height); + shadowDrawable.draw(canvas); + + + if (rad != 1.0f) { + Theme.dialogs_onlineCirclePaint.setColor(backgroundColor); + rect.set(backgroundPaddingLeft, backgroundPaddingTop + top, getMeasuredWidth() - backgroundPaddingLeft, backgroundPaddingTop + top + AndroidUtilities.dp(24)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(12) * rad, AndroidUtilities.dp(12) * rad, Theme.dialogs_onlineCirclePaint); + } + + int finalColor = Color.argb((int) (255 * actionBar.getAlpha()), (int) (Color.red(backgroundColor) * 0.8f), (int) (Color.green(backgroundColor) * 0.8f), (int) (Color.blue(backgroundColor) * 0.8f)); + Theme.dialogs_onlineCirclePaint.setColor(finalColor); + float bottom = statusBarHeight; + canvas.drawRect(backgroundPaddingLeft, 0, getMeasuredWidth() - backgroundPaddingLeft, bottom, Theme.dialogs_onlineCirclePaint); + + if (previewDialog != null) { + Theme.dialogs_onlineCirclePaint.setColor(previewDialog.getBackgroundColor()); + canvas.drawRect(backgroundPaddingLeft, 0, getMeasuredWidth() - backgroundPaddingLeft, statusBarHeight, Theme.dialogs_onlineCirclePaint); + } } - int finalColor = Color.argb((int) (255 * actionBar.getAlpha()), (int) (Color.red(backgroundColor) * 0.8f), (int) (Color.green(backgroundColor) * 0.8f), (int) (Color.blue(backgroundColor) * 0.8f)); - Theme.dialogs_onlineCirclePaint.setColor(finalColor); - canvas.drawRect(backgroundPaddingLeft, 0, getMeasuredWidth() - backgroundPaddingLeft, statusBarHeight, Theme.dialogs_onlineCirclePaint); + if (renderersContainer.progressToFullscreenMode != 0) { + Theme.dialogs_onlineCirclePaint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_actionBar), (int) (255 * renderersContainer.progressToFullscreenMode))); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), Theme.dialogs_onlineCirclePaint); + } } + HashMap listCells = new HashMap<>(); + @Override protected void dispatchDraw(Canvas canvas) { + if (isTabletMode) { + buttonsContainer.setTranslationY(0); + fullscreenUsersListView.setTranslationY(0); + buttonsContainer.setTranslationX(0); + fullscreenUsersListView.setTranslationY(0); + } else if (isLandscapeMode) { + buttonsContainer.setTranslationY(0); + fullscreenUsersListView.setTranslationY(0); + buttonsContainer.setTranslationX(progressToHideUi * AndroidUtilities.dp(94)); + fullscreenUsersListView.setTranslationX(progressToHideUi * AndroidUtilities.dp(94)); + } else { + buttonsContainer.setTranslationX(0); + fullscreenUsersListView.setTranslationX(0); + buttonsContainer.setTranslationY(progressToHideUi * AndroidUtilities.dp(94)); + fullscreenUsersListView.setTranslationY(progressToHideUi * AndroidUtilities.dp(94)); + } + + for (int i = 0; i < listView.getChildCount(); i++) { + View view = listView.getChildAt(i); + if (view instanceof GroupCallUserCell) { + GroupCallUserCell cell = (GroupCallUserCell) view; + cell.setDrawAvatar(true); + } + if (!(view instanceof GroupCallGridCell)) { + if (view.getMeasuredWidth() != listView.getMeasuredWidth()) { + view.setTranslationX((listView.getMeasuredWidth() - view.getMeasuredWidth()) >> 1); + } else { + view.setTranslationX(0); + } + } + } + + if (renderersContainer.isAnimating()) { + if (fullscreenUsersListView.getVisibility() == View.VISIBLE) { + listCells.clear(); + for (int i = 0; i < listView.getChildCount(); i++) { + View view = listView.getChildAt(i); + if (view instanceof GroupCallGridCell && listView.getChildAdapterPosition(view) >= 0) { + GroupCallGridCell cell = (GroupCallGridCell) view; + if (cell.getRenderer() != renderersContainer.fullscreenTextureView) { + listCells.put(cell.getParticipant(), view); + } + } else if (view instanceof GroupCallUserCell && listView.getChildAdapterPosition(view) >= 0) { + GroupCallUserCell cell = (GroupCallUserCell) view; + listCells.put(cell.getParticipant(), cell); + } + } + for (int i = 0; i < fullscreenUsersListView.getChildCount(); i++) { + GroupCallFullscreenAdapter.GroupCallUserCell cellTo = (GroupCallFullscreenAdapter.GroupCallUserCell) fullscreenUsersListView.getChildAt(i); + + View cellFrom = listCells.get(cellTo.getVideoParticipant()); + if (cellFrom == null) { + cellFrom = listCells.get(cellTo.getParticipant()); + } + float progressToFullscreenMode = renderersContainer.progressToFullscreenMode; + + if (!fullscreenListItemAnimator.isRunning()) { + cellTo.setAlpha(1f); + } + if (cellFrom != null) { + float toX, toY, fromX, fromY; + if (cellFrom instanceof GroupCallGridCell) { + GroupCallGridCell gridCell = (GroupCallGridCell) cellFrom; + fromX = gridCell.getLeft() + listView.getX() - renderersContainer.getLeft(); + fromY = gridCell.getTop() + listView.getY() - renderersContainer.getTop(); + + toX = cellTo.getLeft() + fullscreenUsersListView.getX(); + toY = cellTo.getTop() + fullscreenUsersListView.getY(); + } else { + GroupCallUserCell userCell = (GroupCallUserCell) cellFrom; + fromX = userCell.getLeft() + listView.getX() - renderersContainer.getLeft() + userCell.getAvatarImageView().getLeft() + (userCell.getAvatarImageView().getMeasuredWidth() >> 1); + fromY = userCell.getTop() + listView.getY() - renderersContainer.getTop() + userCell.getAvatarImageView().getTop() + (userCell.getAvatarImageView().getMeasuredHeight() >> 1); + + toX = cellTo.getLeft() + fullscreenUsersListView.getX() + (cellTo.getMeasuredWidth() >> 1); + toY = cellTo.getTop() + fullscreenUsersListView.getY() + (cellTo.getMeasuredHeight() >> 1); + + userCell.setDrawAvatar(false); + } + + cellTo.setTranslationX((fromX - toX) * (1f - progressToFullscreenMode)); + cellTo.setTranslationY((fromY - toY) * (1f - progressToFullscreenMode)); + cellTo.setScaleX(1f); + cellTo.setScaleY(1f); + cellTo.setProgressToFullscreen(progressToFullscreenMode); + } else { + cellTo.setScaleX(1f); + cellTo.setScaleY(1f); + cellTo.setTranslationX(0); + cellTo.setTranslationY(0); + cellTo.setProgressToFullscreen(1f); + if (cellTo.getRenderer() == null) { + cellTo.setAlpha(progressToFullscreenMode); + } + } + } + } + } else { + for (int i = 0; i < fullscreenUsersListView.getChildCount(); i++) { + GroupCallFullscreenAdapter.GroupCallUserCell cellTo = (GroupCallFullscreenAdapter.GroupCallUserCell) fullscreenUsersListView.getChildAt(i); + cellTo.setProgressToFullscreen(1f); + } + } + for (int i = 0; i < attachedRenderers.size(); i++) { + GroupCallMiniTextureView child = attachedRenderers.get(i); + child.updatePosition(listView, tabletVideoGridView, fullscreenUsersListView, renderersContainer); + } + + if (!isTabletMode) { + buttonsBackgroundGradientView.setAlpha(1f - renderersContainer.progressToFullscreenMode); + buttonsBackgroundGradientView2.setAlpha(1f - renderersContainer.progressToFullscreenMode); + } else { + buttonsBackgroundGradientView.setAlpha(1f); + buttonsBackgroundGradientView2.setAlpha(1f); + } + + if (renderersContainer.swipedBack) { + listView.setAlpha(1f - renderersContainer.progressToFullscreenMode); + } else { + listView.setAlpha(1f); + } super.dispatchDraw(canvas); + if (drawingForBlur) { + return; + } if (avatarsPreviewShowed) { if (scrimView != null) { + if (!useBlur) { + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); + } float listTop = listView.getY(); float[] radii = new float[8]; @@ -1920,66 +2680,117 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter int count = listView.getChildCount(); float viewClipBottom = listView.getY() + listView.getMeasuredHeight(); - for (int num = 0; num < count; num++) { - View child = listView.getChildAt(num); - if (child != scrimView) { - continue; + GroupCallUserCell scrimViewLocal = null; + if (hasScrimAnchorView) { + for (int num = 0; num < count; num++) { + View child = listView.getChildAt(num); + if (child == scrimView) { + scrimViewLocal = scrimView; + break; + } + } + } else { + scrimViewLocal = scrimView; + } + + if (scrimViewLocal != null && listTop < viewClipBottom) { + canvas.save(); + if (scrimFullscreenView == null) { + canvas.clipRect(0, listTop * (1f - progressToAvatarPreview), getMeasuredWidth(), viewClipBottom * (1f - progressToAvatarPreview) + getMeasuredHeight() * progressToAvatarPreview); + } + float childY, childX; + if (!hasScrimAnchorView) { + childY = avatarPreviewContainer.getTop() + avatarPreviewContainer.getMeasuredWidth(); + childX = avatarPreviewContainer.getLeft(); + } else { + childY = (listView.getY() + scrimViewLocal.getY()) * (1f - progressToAvatarPreview) + (avatarPreviewContainer.getTop() + avatarPreviewContainer.getMeasuredWidth()) * progressToAvatarPreview; + childX = (listView.getLeft() + scrimViewLocal.getX()) * (1f - progressToAvatarPreview) + avatarPreviewContainer.getLeft() * progressToAvatarPreview; + } + canvas.translate(childX, childY); + if (!hasScrimAnchorView) { + canvas.saveLayerAlpha(0 ,0, scrimViewLocal.getMeasuredWidth(), scrimViewLocal.getClipHeight(), (int) (255 * progressToAvatarPreview), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + float progress = progressToAvatarPreview; + float pr = 1.0f - CubicBezierInterpolator.EASE_OUT.getInterpolation(1.0f - progress); + int h = (int) (scrimViewLocal.getMeasuredHeight() + (scrimViewLocal.getClipHeight() - scrimViewLocal.getMeasuredHeight()) * pr); + rect.set(0, 0, scrimViewLocal.getMeasuredWidth(), h); + scrimViewLocal.setProgressToAvatarPreview(hasScrimAnchorView ? progressToAvatarPreview : 1f); + for (int i = 0; i < 4; i++) { + radii[i] = AndroidUtilities.dp(13) * (1f - progressToAvatarPreview); + radii[4 + i] = AndroidUtilities.dp(13); } + roundPath.reset(); + roundPath.addRoundRect(rect, radii, Path.Direction.CW); + roundPath.close(); + canvas.drawPath(roundPath, listViewBackgroundPaint); + scrimViewLocal.draw(canvas); + canvas.restore(); + canvas.restore(); - if (listTop < viewClipBottom) { - canvas.save(); - canvas.clipRect(0, listTop * (1f - progressToAvatarPreview), getMeasuredWidth(), viewClipBottom * (1f - progressToAvatarPreview) + getMeasuredHeight() * progressToAvatarPreview); - - float childY = (listView.getY() + child.getY()) * (1f - progressToAvatarPreview) + (avatarPreviewContainer.getTop() + avatarPreviewContainer.getMeasuredWidth()) * progressToAvatarPreview; - float childX = (listView.getLeft() + child.getX()) * (1f - progressToAvatarPreview) + avatarPreviewContainer.getLeft() * progressToAvatarPreview; - canvas.translate(childX, childY); - float progress = progressToAvatarPreview; - float pr = 1.0f - CubicBezierInterpolator.EASE_OUT.getInterpolation(1.0f - progress); - int h = (int) (scrimView.getMeasuredHeight() + (scrimView.getClipHeight() - scrimView.getMeasuredHeight()) * pr); - rect.set(0, 0, child.getMeasuredWidth(), h); - scrimView.setProgressToAvatarPreview(progressToAvatarPreview); - for (int i = 0; i < 4; i++) { - radii[i] = AndroidUtilities.dp(13) * (1f - progressToAvatarPreview); - radii[4 + i] = AndroidUtilities.dp(13); + if (scrimPopupLayout != null) { + float y = childY + h; + float x = getMeasuredWidth() - scrimPopupLayout.getMeasuredWidth() - AndroidUtilities.dp(14); + if (progressToAvatarPreview != 1f) { + canvas.saveLayerAlpha(x, y, x + scrimPopupLayout.getMeasuredWidth(), y + scrimPopupLayout.getMeasuredHeight(), (int) (255 * progressToAvatarPreview), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); } + scrimPopupLayout.setTranslationX(x - scrimPopupLayout.getLeft()); + scrimPopupLayout.setTranslationY(y - scrimPopupLayout.getTop()); + float scale = 0.8f + 0.2f * progressToAvatarPreview; + canvas.scale(scale, scale, x + scrimPopupLayout.getMeasuredWidth() / 2f, y); - roundPath.reset(); - roundPath.addRoundRect(rect, radii, Path.Direction.CW); - roundPath.close(); - canvas.drawPath(roundPath, listViewBackgroundPaint); - child.draw(canvas); + canvas.translate(x, y); + scrimPopupLayout.draw(canvas); canvas.restore(); - - if (scrimPopupLayout != null) { - float y = childY + h; - float x = getMeasuredWidth() - scrimPopupLayout.getMeasuredWidth() - AndroidUtilities.dp(14); - if (progressToAvatarPreview != 1f) { - canvas.saveLayerAlpha(x, y, x + scrimPopupLayout.getMeasuredWidth(), y + scrimPopupLayout.getMeasuredHeight(), (int) (255 * progressToAvatarPreview), Canvas.ALL_SAVE_FLAG); - } else { - canvas.save(); - } - scrimPopupLayout.setTranslationX(x - scrimPopupLayout.getLeft()); - scrimPopupLayout.setTranslationY(y - scrimPopupLayout.getTop()); - float scale = 0.8f + 0.2f * progressToAvatarPreview; - canvas.scale(scale, scale, x + scrimPopupLayout.getMeasuredWidth() / 2f, y); - - canvas.translate(x, y); - scrimPopupLayout.draw(canvas); - canvas.restore(); - } } } if (!pinchToZoomHelper.isInOverlayMode()) { canvas.save(); - canvas.clipRect(0, listTop * (1f - progressToAvatarPreview), getMeasuredWidth(), viewClipBottom * (1f - progressToAvatarPreview) + getMeasuredHeight() * progressToAvatarPreview); + if (hasScrimAnchorView && scrimFullscreenView == null) { + canvas.clipRect(0, listTop * (1f - progressToAvatarPreview), getMeasuredWidth(), viewClipBottom * (1f - progressToAvatarPreview) + getMeasuredHeight() * progressToAvatarPreview); + } canvas.scale(avatarPreviewContainer.getScaleX(), avatarPreviewContainer.getScaleY(), avatarPreviewContainer.getX(), avatarPreviewContainer.getY()); canvas.translate(avatarPreviewContainer.getX(), avatarPreviewContainer.getY()); avatarPreviewContainer.draw(canvas); canvas.restore(); } } + + + if (progressToAvatarPreview != 1f && scrimFullscreenView == null) { + canvas.saveLayerAlpha((int) buttonsBackgroundGradientView2.getX(), (int) buttonsBackgroundGradientView.getY(), (int) (buttonsBackgroundGradientView2.getX() + buttonsBackgroundGradientView2.getMeasuredWidth()), getMeasuredHeight(), (int) (255 * (1f - progressToAvatarPreview)), Canvas.ALL_SAVE_FLAG); + + canvas.save(); + canvas.translate(buttonsBackgroundGradientView2.getX(), buttonsBackgroundGradientView2.getY()); + buttonsBackgroundGradientView2.draw(canvas); + canvas.restore(); + + canvas.save(); + canvas.translate(buttonsBackgroundGradientView.getX(), buttonsBackgroundGradientView.getY()); + buttonsBackgroundGradientView.draw(canvas); + canvas.restore(); + + canvas.save(); + canvas.translate(buttonsContainer.getX(), buttonsContainer.getY()); + buttonsContainer.draw(canvas); + canvas.restore(); + + for (int i = 0; i < 2; i++) { + if (undoView[i].getVisibility() == View.VISIBLE) { + canvas.save(); + canvas.translate(undoView[1].getX(), undoView[1].getY()); + undoView[1].draw(canvas); + canvas.restore(); + } + } + + canvas.restore(); + } } else { if (scrimView != null) { canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), scrimPaint); @@ -1987,36 +2798,57 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter float listTop = listView.getY(); float listBottom = listView.getY() + listView.getMeasuredHeight(); - int count = listView.getChildCount(); - for (int num = 0; num < count; num++) { - View child = listView.getChildAt(num); - if (child != scrimView) { - continue; - } - - float viewClipLeft = Math.max(listView.getLeft(), listView.getLeft() + child.getX()); - float viewClipTop = Math.max(listTop, listView.getTop() + child.getY()); - float viewClipRight = Math.min(listView.getRight(), listView.getLeft() + child.getX() + child.getMeasuredWidth()); - float viewClipBottom = Math.min(listView.getY() + listView.getMeasuredHeight(), listView.getY() + child.getY() + scrimView.getClipHeight()); - - if (viewClipTop < viewClipBottom) { - if (child.getAlpha() != 1f) { - canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * child.getAlpha()), Canvas.ALL_SAVE_FLAG); - } else { - canvas.save(); + if (hasScrimAnchorView) { + int count = listView.getChildCount(); + for (int num = 0; num < count; num++) { + View child = listView.getChildAt(num); + if (child != scrimView) { + continue; } - canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, getMeasuredHeight()); - canvas.translate(listView.getLeft() + child.getX(), listView.getY() + child.getY()); - float progress = scrimPaint.getAlpha() / 100.0f; - float pr = 1.0f - CubicBezierInterpolator.EASE_OUT.getInterpolation(1.0f - progress); - int h = (int) (scrimView.getMeasuredHeight() + (scrimView.getClipHeight() - scrimView.getMeasuredHeight()) * pr); - rect.set(0, 0, child.getMeasuredWidth(), h); - scrimView.setAboutVisibleProgress(listViewBackgroundPaint.getColor(), progress); - canvas.drawRoundRect(rect, AndroidUtilities.dp(13), AndroidUtilities.dp(13), listViewBackgroundPaint); - child.draw(canvas); - canvas.restore(); + float viewClipLeft = Math.max(listView.getLeft(), listView.getLeft() + child.getX()); + float viewClipTop = Math.max(listTop, listView.getY() + child.getY()); + float viewClipRight = Math.min(listView.getRight(), listView.getLeft() + child.getX() + child.getMeasuredWidth()); + float viewClipBottom = Math.min(listView.getY() + listView.getMeasuredHeight(), listView.getY() + child.getY() + scrimView.getClipHeight()); + + if (viewClipTop < viewClipBottom) { + if (child.getAlpha() != 1f) { + canvas.saveLayerAlpha(viewClipLeft, viewClipTop, viewClipRight, viewClipBottom, (int) (255 * child.getAlpha()), Canvas.ALL_SAVE_FLAG); + } else { + canvas.save(); + } + + canvas.clipRect(viewClipLeft, viewClipTop, viewClipRight, getMeasuredHeight()); + canvas.translate(listView.getLeft() + child.getX(), listView.getY() + child.getY()); + float progress = scrimPaint.getAlpha() / 100.0f; + float pr = 1.0f - CubicBezierInterpolator.EASE_OUT.getInterpolation(1.0f - progress); + int h = (int) (scrimView.getMeasuredHeight() + (scrimView.getClipHeight() - scrimView.getMeasuredHeight()) * pr); + rect.set(0, 0, child.getMeasuredWidth(), h); + scrimView.setAboutVisibleProgress(listViewBackgroundPaint.getColor(), progress); + canvas.drawRoundRect(rect, AndroidUtilities.dp(13), AndroidUtilities.dp(13), listViewBackgroundPaint); + child.draw(canvas); + canvas.restore(); + } } + } else if (scrimFullscreenView != null) { + canvas.save(); + float x = scrimFullscreenView.getX() + fullscreenUsersListView.getX() + renderersContainer.getX(); + float y = scrimFullscreenView.getY() + fullscreenUsersListView.getY() + renderersContainer.getY(); + canvas.translate(x, y); + if (scrimFullscreenView.getRenderer() != null && scrimFullscreenView.getRenderer().isAttached() && !scrimFullscreenView.getRenderer().showingInFullscreen) { + scrimFullscreenView.getRenderer().draw(canvas); + } else { + scrimFullscreenView.draw(canvas); + } + scrimFullscreenView.drawOverlays(canvas); + canvas.restore(); + } else if (scrimRenderer != null && scrimRenderer.isAttached()) { + canvas.save(); + float x = scrimRenderer.getX() + renderersContainer.getX(); + float y = scrimRenderer.getY() + renderersContainer.getY(); + canvas.translate(x, y); + scrimRenderer.draw(canvas); + canvas.restore(); } } } @@ -2024,14 +2856,27 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { - if (child == avatarPreviewContainer || child == scrimPopupLayout) { + if (!isTabletMode && renderersContainer.progressToFullscreenMode == 1f && (child == actionBar || child == actionBarShadow || child == actionBarBackground || child == titleTextView || child == menuItemsContainer)) { return true; } - if (contentFullyOverlayed) { + if (drawingForBlur && child == renderersContainer) { + canvas.save(); + canvas.translate(renderersContainer.getX() + fullscreenUsersListView.getX(), renderersContainer.getY() + fullscreenUsersListView.getY()); + fullscreenUsersListView.draw(canvas); + canvas.restore(); + return true; + } + if (child == avatarPreviewContainer || child == scrimPopupLayout || child == scrimView) { + return true; + } + if (contentFullyOverlayed && useBlur) { if (child == listView || child == buttonsContainer) { return true; } } + if (scrimFullscreenView == null && !drawingForBlur && avatarsPreviewShowed && (child == buttonsBackgroundGradientView2 || child == buttonsBackgroundGradientView || child == buttonsContainer || child == undoView[0] || child == undoView[1])) { + return true; + } return super.drawChild(canvas, child, drawingTime); } @@ -2051,6 +2896,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter containerView.setKeepScreenOn(true); containerView.setClipChildren(false); + if (schedulePeer != null) { scheduleStartInTextView = new SimpleTextView(context); scheduleStartInTextView.setGravity(Gravity.CENTER); @@ -2159,40 +3005,76 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter @Override protected void dispatchDraw(Canvas canvas) { - float maxBottom = 0; - float minTop = 0; + float inMaxBottom = 0; + float inMinTop = Float.MAX_VALUE; + + boolean animateBackground = itemAnimator.outMinTop != Float.MAX_VALUE; for (int a = 0, N = getChildCount(); a < N; a++) { View child = getChildAt(a); ViewHolder holder = findContainingViewHolder(child); - if (holder == null || holder.getItemViewType() == 3) { + if (holder == null || holder.getItemViewType() == 3 || holder.getItemViewType() == 4 || holder.getItemViewType() == 5) { continue; } - maxBottom = Math.max(maxBottom, child.getY() + child.getMeasuredHeight()); - if (a == 0) { - minTop = Math.max(0, child.getY()); + + if (animateBackground) { + if (!itemAnimator.removingHolders.contains(holder)) { + inMinTop = Math.min(inMinTop, Math.max(0, child.getTop())); + inMaxBottom = Math.max(inMaxBottom, child.getBottom()); + } } else { - minTop = Math.min(minTop, Math.max(0, child.getY())); + inMaxBottom = Math.max(inMaxBottom, child.getY() + child.getMeasuredHeight()); + inMinTop = Math.min(inMinTop, Math.max(0, child.getY())); } } - rect.set(0, minTop, getMeasuredWidth(), Math.min(getMeasuredHeight(), maxBottom)); - canvas.drawRoundRect(rect, AndroidUtilities.dp(13), AndroidUtilities.dp(13), listViewBackgroundPaint); + float minTop, maxBottom; + if (animateBackground) { + minTop = itemAnimator.outMinTop * (1f - itemAnimator.animationProgress) + inMinTop * (itemAnimator.animationProgress); + maxBottom = itemAnimator.outMaxBottom * (1f - itemAnimator.animationProgress) + inMaxBottom * (itemAnimator.animationProgress); + } else { + minTop = inMinTop; + maxBottom = inMaxBottom; + } + + if (inMinTop != Float.MAX_VALUE) { + int itemsWidth = AndroidUtilities.isTablet() ? Math.min(AndroidUtilities.dp(420), getMeasuredWidth()) : getMeasuredWidth(); + int padding = (getMeasuredWidth() - itemsWidth) >> 1; + rect.set(padding, minTop, getMeasuredWidth() - padding, Math.min(getMeasuredHeight() - getTranslationY(), maxBottom)); + canvas.drawRoundRect(rect, AndroidUtilities.dp(13), AndroidUtilities.dp(13), listViewBackgroundPaint); + } canvas.save(); canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight()); super.dispatchDraw(canvas); canvas.restore(); } + + @Override + public void setVisibility(int visibility) { + if (getVisibility() != visibility) { + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + if (child instanceof GroupCallGridCell) { + attachRenderer((GroupCallGridCell) child, visibility == View.VISIBLE); + } + } + } + super.setVisibility(visibility); + } + + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + itemAnimator.updateBackgroundBeforeAnimation(); + } }; listView.setClipToPadding(false); listView.setClipChildren(false); - itemAnimator = new DefaultItemAnimator() { - @Override - protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { - listView.invalidate(); - updateLayout(true); - } - }; + itemAnimator = new GroupCallItemAnimator(); + itemAnimator.setTranslationInterpolator(CubicBezierInterpolator.DEFAULT); + itemAnimator.setRemoveDuration(TRANSITION_DURATION); + itemAnimator.setAddDuration(TRANSITION_DURATION); + itemAnimator.setMoveDuration(TRANSITION_DURATION); itemAnimator.setDelayAnimations(false); listView.setItemAnimator(itemAnimator); listView.setOnScrollListener(new RecyclerView.OnScrollListener() { @@ -2205,6 +3087,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter call.loadMembers(false); } updateLayout(true); + containerView.invalidate(); } @Override @@ -2227,19 +3110,73 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter reminderHintView.hide(); } } - scrolling = newState == RecyclerView.SCROLL_STATE_DRAGGING; } }); listView.setVerticalScrollBarEnabled(false); - listView.setLayoutManager(layoutManager = new FillLastLinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false, 0, listView)); + listView.setLayoutManager(layoutManager = new FillLastGridLayoutManager(getContext(), isLandscapeMode ? 6 : 2, LinearLayoutManager.VERTICAL, false, 0, listView)); + layoutManager.setSpanSizeLookup(spanSizeLookup = new GridLayoutManager.SpanSizeLookup() { + @Override + public int getSpanSize(int position) { + int spanSize = isLandscapeMode ? 6 : 2; + if (!isTabletMode && position >= listAdapter.usersVideoGridStartRow && position < listAdapter.usersVideoGridEndRow) { + int spanCount = 1; + int size = listAdapter.usersVideoGridEndRow - listAdapter.usersVideoGridStartRow; + if (position == listAdapter.usersVideoGridEndRow - 1) { + if (isLandscapeMode) { + spanCount = 2; + } else if (size % 2 == 0) { + spanCount = 1; + } else { + spanCount = 2; + } + } + if (isLandscapeMode) { + if (size == 1) { + return 6; + } else if (size == 2) { + return 3; + } else { + return 2; + } + } else { + return spanCount; + } + } + return spanSize; + } + }); + listView.addItemDecoration(new RecyclerView.ItemDecoration() { + @Override + public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) { + int position = parent.getChildAdapterPosition(view); + if (position >= 0) { + outRect.setEmpty(); + if (position >= listAdapter.usersVideoGridStartRow && position < listAdapter.usersVideoGridEndRow) { + int userPosition = position - listAdapter.usersVideoGridStartRow; + int cellCount = isLandscapeMode ? 6 : 2; + int index = userPosition % cellCount; + if (index == 0) { + outRect.right = AndroidUtilities.dp(2); + } else if (index == cellCount - 1) { + outRect.left = AndroidUtilities.dp(2); + } else { + // outRect.right = AndroidUtilities.dp(0.5f); + outRect.left = AndroidUtilities.dp(1f); + } + } + } + } + }); layoutManager.setBind(false); containerView.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT, 14, 14, 14, 231)); listView.setAdapter(listAdapter); listView.setTopBottomSelectorRadius(13); listView.setSelectorDrawableColor(Theme.getColor(Theme.key_voipgroup_listSelector)); listView.setOnItemClickListener((view, position, x, y) -> { - if (view instanceof GroupCallUserCell) { + if (view instanceof GroupCallGridCell) { + fullscreenFor(((GroupCallGridCell) view).getParticipant()); + } else if (view instanceof GroupCallUserCell) { GroupCallUserCell cell = (GroupCallUserCell) view; showMenuForCell(cell); } else if (view instanceof GroupCallInvitedCell) { @@ -2293,6 +3230,9 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } }); listView.setOnItemLongClickListener((view, position) -> { + if (view instanceof GroupCallGridCell) { + return showMenuForCell(view); + } if (view instanceof GroupCallUserCell) { updateItems(); GroupCallUserCell cell = (GroupCallUserCell) view; @@ -2301,33 +3241,275 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return false; }); + tabletVideoGridView = new RecyclerListView(context); + containerView.addView(tabletVideoGridView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.TOP | Gravity.LEFT, 14, 14, TABLET_LIST_SIZE + 4, 14)); + tabletVideoGridView.setAdapter(tabletGridAdapter = new GroupCallTabletGridAdapter(groupCall, currentAccount, this)); + GridLayoutManager gridLayoutManager = new GridLayoutManager(context, 6, LinearLayoutManager.VERTICAL, false); + tabletVideoGridView.setLayoutManager(gridLayoutManager); + gridLayoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup() { + @Override + public int getSpanSize(int position) { + return tabletGridAdapter.getSpanCount(position); + } + }); + + tabletVideoGridView.setOnItemClickListener((view, position) -> { + GroupCallGridCell cell = (GroupCallGridCell) view; + if (cell.getParticipant() != null) { + fullscreenFor(cell.getParticipant()); + } + }); + DefaultItemAnimator tabletGridItemAnimator = new DefaultItemAnimator(); + tabletGridItemAnimator.setDelayAnimations(false); + tabletGridItemAnimator.setTranslationInterpolator(CubicBezierInterpolator.DEFAULT); + tabletGridItemAnimator.setRemoveDuration(TRANSITION_DURATION); + tabletGridItemAnimator.setAddDuration(TRANSITION_DURATION); + tabletGridItemAnimator.setMoveDuration(TRANSITION_DURATION); + tabletGridItemAnimator = new DefaultItemAnimator() { + + @Override + protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { + listView.invalidate(); + renderersContainer.invalidate(); + containerView.invalidate(); + updateLayout(true); + } + }; + tabletVideoGridView.setItemAnimator(tabletGridItemAnimator); + tabletVideoGridView.setOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { + super.onScrolled(recyclerView, dx, dy); + containerView.invalidate(); + } + }); + tabletGridAdapter.setVisibility(tabletVideoGridView, false, false); + tabletVideoGridView.setVisibility(View.GONE); + buttonsContainer = new FrameLayout(context) { + + AnimatorSet currentButtonsAnimation; + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (!isLandscapeMode) { + widthMeasureSpec = MeasureSpec.makeMeasureSpec(Math.min(AndroidUtilities.dp(460), MeasureSpec.getSize(widthMeasureSpec)), MeasureSpec.EXACTLY); + } + for (int i = 0; i < 2; i++) { + if (isLandscapeMode && !isTabletMode) { + muteLabel[i].getLayoutParams().width = (int) (MeasureSpec.getSize(widthMeasureSpec) / 0.68f); + } else { + muteLabel[i].getLayoutParams().width = LayoutHelper.WRAP_CONTENT; + } + } + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { int cw = AndroidUtilities.dp(122); int w = (getMeasuredWidth() - cw) / 2; int h = getMeasuredHeight(); - int x = (w - soundButton.getMeasuredWidth()) / 2; - int y = (h - leaveButton.getMeasuredHeight()) / 2 - AndroidUtilities.dp(9); - soundButton.layout(x, y, x + soundButton.getMeasuredWidth(), y + soundButton.getMeasuredHeight()); - - x = getMeasuredWidth() - w + (w - leaveButton.getMeasuredWidth()) / 2; - leaveButton.layout(x, y, x + leaveButton.getMeasuredWidth(), y + leaveButton.getMeasuredHeight()); - - x = (getMeasuredWidth() - muteButton.getMeasuredWidth()) / 2; - y = (h - muteButton.getMeasuredHeight()) / 2 - AndroidUtilities.dp(18); - muteButton.layout(x, y, x + muteButton.getMeasuredWidth(), y + muteButton.getMeasuredHeight()); - - for (int a = 0; a < 2; a++) { - x = (getMeasuredWidth() - muteLabel[a].getMeasuredWidth()) / 2; - y = h - AndroidUtilities.dp(35) - muteLabel[a].getMeasuredHeight(); - muteLabel[a].layout(x, y, x + muteLabel[a].getMeasuredWidth(), y + muteLabel[a].getMeasuredHeight()); - - x = (getMeasuredWidth() - muteSubLabel[a].getMeasuredWidth()) / 2; - y = h - AndroidUtilities.dp(17) - muteSubLabel[a].getMeasuredHeight(); - muteSubLabel[a].layout(x, y, x + muteSubLabel[a].getMeasuredWidth(), y + muteSubLabel[a].getMeasuredHeight()); + int buttonsCount = 5; + if (cameraButton.getVisibility() != View.VISIBLE) { + buttonsCount--; } + if (soundButton.getVisibility() != View.VISIBLE) { + buttonsCount--; + } + if (flipButton.getVisibility() != View.VISIBLE) { + buttonsCount--; + } + + if (isLandscapeMode && !isTabletMode) { + int part = getMeasuredHeight() / buttonsCount; + + int y, x; + + if (soundButton.getVisibility() == View.VISIBLE) { + y = part / 2 - cameraButton.getMeasuredHeight() / 2; + x = (getMeasuredWidth() - cameraButton.getMeasuredWidth()) >> 1; + cameraButton.layout(x, y, x + cameraButton.getMeasuredWidth(), y + cameraButton.getMeasuredHeight()); + + int partOffset = buttonsCount == 4 ? part : 0; + y = part / 2 + partOffset - soundButton.getMeasuredHeight() / 2; + x = (getMeasuredWidth() - soundButton.getMeasuredWidth()) >> 1; + soundButton.layout(x, y, x + soundButton.getMeasuredWidth(), y + soundButton.getMeasuredHeight()); + } else { + y = part / 2 - flipButton.getMeasuredHeight() / 2; + x = (getMeasuredWidth() - flipButton.getMeasuredWidth()) >> 1; + flipButton.layout(x, y, x + flipButton.getMeasuredWidth(), y + flipButton.getMeasuredHeight()); + + int partOffset = buttonsCount == 4 ? part : 0; + y = part / 2 + partOffset - cameraButton.getMeasuredHeight() / 2; + x = (getMeasuredWidth() - cameraButton.getMeasuredWidth()) >> 1; + cameraButton.layout(x, y, x + cameraButton.getMeasuredWidth(), y + cameraButton.getMeasuredHeight()); + } + + int partOffset = buttonsCount == 4 ? part * 3 : part * 2; + y = part / 2 + partOffset - leaveButton.getMeasuredHeight() / 2; + x = (getMeasuredWidth() - leaveButton.getMeasuredWidth()) >> 1; + leaveButton.layout(x, y, x + leaveButton.getMeasuredWidth(), y + leaveButton.getMeasuredHeight()); + + partOffset = buttonsCount == 4 ? part * 2 : part; + y = part / 2 + partOffset - muteButton.getMeasuredWidth() / 2 - AndroidUtilities.dp(4); + x = (getMeasuredWidth() - muteButton.getMeasuredWidth()) >> 1; + if (buttonsCount == 3) { + y -= AndroidUtilities.dp(6); + } + muteButton.layout(x, y, x + muteButton.getMeasuredWidth(), y + muteButton.getMeasuredHeight()); + + float muteButtonScale = AndroidUtilities.dp(52) / (float) (muteButton.getMeasuredWidth() - AndroidUtilities.dp(8)); + muteButton.animate().cancel(); + muteButton.setScaleX(muteButtonScale); + muteButton.setScaleY(muteButtonScale); + + for (int a = 0; a < 2; a++) { + x = (getMeasuredWidth() - muteLabel[a].getMeasuredWidth()) >> 1; + partOffset = buttonsCount == 4 ? part * 2 : part; + + y = part / 2 + partOffset - muteButton.getMeasuredWidth() / 2 - AndroidUtilities.dp(4); + if (buttonsCount == 3) { + y -= AndroidUtilities.dp(6); + } + y += muteButton.getMeasuredWidth() * 0.687f + AndroidUtilities.dp(4); + if (y + muteLabel[a].getMeasuredHeight() > partOffset + part) { + y -= AndroidUtilities.dp(4); + } + + muteLabel[a].layout(x, y, x + muteLabel[a].getMeasuredWidth(), y + muteLabel[a].getMeasuredHeight()); + muteLabel[a].setScaleX(0.687f); + muteLabel[a].setScaleY(0.687f); + } + } else if (renderersContainer.inFullscreenMode && !isTabletMode) { + + int part = getMeasuredWidth() / buttonsCount; + + int x, y; + if (soundButton.getVisibility() == View.VISIBLE) { + x = part / 2 - cameraButton.getMeasuredWidth() / 2; + y = getMeasuredHeight() - cameraButton.getMeasuredHeight(); + cameraButton.layout(x, y, x + cameraButton.getMeasuredWidth(), y + cameraButton.getMeasuredHeight()); + + int partOffset = buttonsCount == 4 ? part : 0; + x = part / 2 + partOffset - leaveButton.getMeasuredWidth() / 2; + y = getMeasuredHeight() - soundButton.getMeasuredHeight(); + soundButton.layout(x, y, x + soundButton.getMeasuredWidth(), y + soundButton.getMeasuredHeight()); + } else { + int partOffset = buttonsCount == 4 ? part : 0; + x = part / 2 + partOffset - cameraButton.getMeasuredWidth() / 2; + y = getMeasuredHeight() - cameraButton.getMeasuredHeight(); + cameraButton.layout(x, y, x + cameraButton.getMeasuredWidth(), y + cameraButton.getMeasuredHeight()); + x = part / 2 - flipButton.getMeasuredWidth() / 2; + y = getMeasuredHeight() - flipButton.getMeasuredHeight(); + flipButton.layout(x, y, x + flipButton.getMeasuredWidth(), y + flipButton.getMeasuredHeight()); + } + + int partOffset = buttonsCount == 4 ? part * 3 : part * 2; + x = part / 2 + partOffset - leaveButton.getMeasuredWidth() / 2; + y = getMeasuredHeight() - leaveButton.getMeasuredHeight(); + leaveButton.layout(x, y, x + leaveButton.getMeasuredWidth(), y + leaveButton.getMeasuredHeight()); + + + partOffset = buttonsCount == 4 ? part * 2 : part; + x = part / 2 + partOffset - muteButton.getMeasuredWidth() / 2; + y = getMeasuredHeight() - leaveButton.getMeasuredHeight() - (muteButton.getMeasuredWidth() - AndroidUtilities.dp(52)) / 2; + muteButton.layout(x, y, x + muteButton.getMeasuredWidth(), y + muteButton.getMeasuredHeight()); + + float muteButtonScale = AndroidUtilities.dp(52) / (float) (muteButton.getMeasuredWidth() - AndroidUtilities.dp(8)); + muteButton.animate().scaleX(muteButtonScale).scaleY(muteButtonScale).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + for (int a = 0; a < 2; a++) { + partOffset = buttonsCount == 4 ? part * 2 : part; + x = partOffset + (part - muteLabel[a].getMeasuredWidth()) / 2; + y = h - AndroidUtilities.dp(27); + muteLabel[a].layout(x, y, x + muteLabel[a].getMeasuredWidth(), y + muteLabel[a].getMeasuredHeight()); + muteLabel[a].animate().scaleX(0.687f).scaleY(0.687f).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + } else { + int x, y; + int buttonsYOffset = AndroidUtilities.dp(0); + if (soundButton.getVisibility() == View.VISIBLE) { + if (cameraButton.getVisibility() == View.VISIBLE) { + x = (w - cameraButton.getMeasuredWidth()) / 2; + y = (h - cameraButton.getMeasuredHeight()) / 2;// - AndroidUtilities.dp(32); + cameraButton.layout(x, y, x + cameraButton.getMeasuredWidth(), y + cameraButton.getMeasuredHeight()); + + x = (w - soundButton.getMeasuredWidth()) / 2; + y = (h - leaveButton.getMeasuredHeight()) / 2;// + AndroidUtilities.dp(32); + soundButton.layout(x, y, x + soundButton.getMeasuredWidth(), y + soundButton.getMeasuredHeight()); + } else { + x = (w - soundButton.getMeasuredWidth()) / 2; + y = (h - soundButton.getMeasuredHeight()) / 2; + soundButton.layout(x, y, x + soundButton.getMeasuredWidth(), y + soundButton.getMeasuredHeight()); + } + } else { + int offset = flipButton.getVisibility() == View.VISIBLE ? AndroidUtilities.dp(28) : 0; + x = (w - flipButton.getMeasuredWidth()) / 2; + y = (h - flipButton.getMeasuredHeight()) / 2 + buttonsYOffset - offset; + flipButton.layout(x, y, x + flipButton.getMeasuredWidth(), y + flipButton.getMeasuredHeight()); + + x = (w - cameraButton.getMeasuredWidth()) / 2; + y = (h - cameraButton.getMeasuredHeight()) / 2 + buttonsYOffset + offset; + cameraButton.layout(x, y, x + cameraButton.getMeasuredWidth(), y + cameraButton.getMeasuredHeight()); + } + + y = (h - leaveButton.getMeasuredHeight()) / 2 + buttonsYOffset; + x = getMeasuredWidth() - w + (w - leaveButton.getMeasuredWidth()) / 2; + leaveButton.layout(x, y, x + leaveButton.getMeasuredWidth(), y + leaveButton.getMeasuredHeight()); + + x = (getMeasuredWidth() - muteButton.getMeasuredWidth()) / 2; + y = (h - muteButton.getMeasuredHeight()) / 2 - AndroidUtilities.dp(9); + muteButton.layout(x, y, x + muteButton.getMeasuredWidth(), y + muteButton.getMeasuredHeight()); + + muteButton.animate().setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).scaleX(1f).scaleY(1f).start(); + + for (int a = 0; a < 2; a++) { + x = (getMeasuredWidth() - muteLabel[a].getMeasuredWidth()) / 2; + y = h - AndroidUtilities.dp(12) - muteLabel[a].getMeasuredHeight(); + muteLabel[a].layout(x, y, x + muteLabel[a].getMeasuredWidth(), y + muteLabel[a].getMeasuredHeight()); + muteLabel[a].animate().scaleX(1f).scaleY(1f).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + } + + if (animateButtonsOnNextLayout) { + AnimatorSet animatorSet = new AnimatorSet(); + boolean hasAnimation = false; + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + Float fromX = buttonsAnimationParamsX.get(child); + Float fromY = buttonsAnimationParamsY.get(child); + if (fromX != null && fromY != null) { + hasAnimation = true; + animatorSet.playTogether(ObjectAnimator.ofFloat(child, TRANSLATION_X, fromX - child.getLeft(), 0)); + animatorSet.playTogether(ObjectAnimator.ofFloat(child, TRANSLATION_Y, fromY - child.getTop(), 0)); + } + } + if (hasAnimation) { + if (currentButtonsAnimation != null) { + currentButtonsAnimation.removeAllListeners(); + currentButtonsAnimation.cancel(); + } + currentButtonsAnimation = animatorSet; + animatorSet.setDuration(TRANSITION_DURATION); + animatorSet.setInterpolator(CubicBezierInterpolator.DEFAULT); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + currentButtonsAnimation = null; + for (int i = 0; i < getChildCount(); i++) { + View child = getChildAt(i); + child.setTranslationX(0); + child.setTranslationY(0); + } + } + }); + animatorSet.start(); + } + buttonsAnimationParamsX.clear(); + buttonsAnimationParamsY.clear(); + } + animateButtonsOnNextLayout = false; } final OvershootInterpolator overshootInterpolator = new OvershootInterpolator(1.5f); @@ -2336,7 +3518,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter @SuppressLint("DrawAllocation") @Override protected void dispatchDraw(Canvas canvas) { - if (contentFullyOverlayed) { + if (contentFullyOverlayed && useBlur) { return; } int offset = (getMeasuredWidth() - getMeasuredHeight()) / 2; @@ -2349,7 +3531,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } if (currentState != null) { - currentState.update(0, offset, getMeasuredHeight(), dt); + currentState.update(0, offset, getMeasuredHeight(), dt, amplitude); } tinyWaveDrawable.minRadius = AndroidUtilities.dp(62); @@ -2369,7 +3551,6 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter amplitude = animateToAmplitude; } } - invalidate(); } boolean canSwitchProgress = true; @@ -2397,7 +3578,6 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } invalidateColors = true; - invalidate(); } if (invalidateColors && currentState != null) { @@ -2427,6 +3607,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } soundButton.setBackgroundColor(soundButtonColor, soundButtonColorChecked); + cameraButton.setBackgroundColor(soundButtonColor, soundButtonColorChecked); + flipButton.setBackgroundColor(soundButtonColor, soundButtonColorChecked); } boolean showWaves = false; @@ -2441,20 +3623,17 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (showWavesProgress < 0f) { showWavesProgress = 0f; } - invalidate(); } else { if (showWaves && showWavesProgress != 1f) { showWavesProgress += dt / 350f; if (showWavesProgress > 1f) { showWavesProgress = 1f; } - invalidate(); } else if (!showWaves && showWavesProgress != 0) { showWavesProgress -= dt / 350f; if (showWavesProgress < 0f) { showWavesProgress = 0f; } - invalidate(); } } @@ -2463,13 +3642,11 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (showLightingProgress > 1f) { showLightingProgress = 1f; } - invalidate(); } else if (!showLighting && showLightingProgress != 0) { showLightingProgress -= dt / 350f; if (showLightingProgress < 0f) { showLightingProgress = 0f; } - invalidate(); } } @@ -2492,8 +3669,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter paintTmp.setColor(AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_disabledButton), colorProgress, 1.0f)); - int cx = muteButton.getLeft() + muteButton.getMeasuredWidth() / 2; - int cy = muteButton.getTop() + muteButton.getMeasuredHeight() / 2; + int cx = (int) (muteButton.getX() + muteButton.getMeasuredWidth() / 2); + int cy = (int) (muteButton.getY() + muteButton.getMeasuredHeight() / 2); radialMatrix.setTranslate(cx, cy); radialGradient.setLocalMatrix(radialMatrix); @@ -2505,7 +3682,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } canvas.save(); - canvas.scale(BlobDrawable.GLOBAL_SCALE, BlobDrawable.GLOBAL_SCALE, cx, cy); + canvas.scale(BlobDrawable.GLOBAL_SCALE * muteButton.getScaleX(), BlobDrawable.GLOBAL_SCALE * muteButton.getScaleY(), cx, cy); canvas.save(); float scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude * 0.5f; @@ -2548,7 +3725,6 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter radialProgressView.draw(canvas, cx, cy); } canvas.restore(); - invalidate(); } else { for (int i = 0; i < 2; i++) { float alpha; @@ -2559,7 +3735,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (prevState.currentState == MUTE_BUTTON_STATE_CONNECTING) { buttonRadius -= alpha * AndroidUtilities.dp(2); } - } else if (i == 1) { + } else if (i == 1 && currentState != null) { paint.setShader(currentState.shader); alpha = switchProgress; if (currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { @@ -2572,8 +3748,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter paint.setColor(AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_disabledButton), colorProgress, 1.0f)); } - int cx = muteButton.getLeft() + muteButton.getMeasuredWidth() / 2; - int cy = muteButton.getTop() + muteButton.getMeasuredHeight() / 2; + int cx = (int) (muteButton.getX() + muteButton.getMeasuredWidth() / 2); + int cy = (int) (muteButton.getY() + muteButton.getMeasuredHeight() / 2); radialMatrix.setTranslate(cx, cy); radialGradient.setLocalMatrix(radialMatrix); @@ -2589,11 +3765,17 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } canvas.save(); - canvas.scale(BlobDrawable.GLOBAL_SCALE, BlobDrawable.GLOBAL_SCALE, cx, cy); + canvas.scale(BlobDrawable.GLOBAL_SCALE * muteButton.getScaleX(), BlobDrawable.GLOBAL_SCALE * muteButton.getScaleX(), cx, cy); canvas.save(); - float translation = AndroidUtilities.dp(89) * (1.0f - switchToButtonInt2); - cy += translation; + + float translation; + if (isLandscapeMode) { + translation = 0; + } else { + translation = AndroidUtilities.dp(65) * (1.0f - switchToButtonInt2); + } + // cy += translation; float scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude * 0.5f; canvas.scale(scale * showLightingProgress, scale * showLightingProgress, cx, cy); if (i == 1) { @@ -2601,7 +3783,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter canvas.save(); canvas.scale(scaleLight, scaleLight, cx, cy); int a = radialPaint.getAlpha(); - radialPaint.setAlpha((int) (a * switchToButtonProgress)); + radialPaint.setAlpha((int) (a * switchToButtonProgress * (1f - progressToHideUi))); canvas.drawCircle(cx, cy, AndroidUtilities.dp(160), radialPaint); radialPaint.setAlpha(a); canvas.restore(); @@ -2610,7 +3792,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (switchToButtonProgress > 0) { canvas.save(); scale = BlobDrawable.SCALE_BIG_MIN + BlobDrawable.SCALE_BIG * amplitude * showWavesProgressInterpolated * scheduleButtonsScale; - canvas.scale(scale, scale, cx, cy); + canvas.scale(scale, scale, cx, cy); bigWaveDrawable.draw(cx, cy, canvas, paint); canvas.restore(); @@ -2620,35 +3802,905 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter tinyWaveDrawable.draw(cx, cy, canvas, paint); canvas.restore(); } - if (i == 0) { - paint.setAlpha(255); + + if (isLandscapeMode) { + if (i == 0) { + paint.setAlpha((int) (255 * switchToButtonInt2)); + } else { + paint.setAlpha((int) (255 * alpha * switchToButtonInt2)); + } } else { - paint.setAlpha((int) (255 * alpha)); + if (i == 0) { + paint.setAlpha(255); + } else { + paint.setAlpha((int) (255 * alpha)); + } } - muteButton.setTranslationY(translation); + if (currentButtonsAnimation == null) { + muteButton.setTranslationY(translation); + } + float switchButtonProgrss = isLandscapeMode ? 1f : switchToButtonInt2; float startX = getMeasuredWidth() / 2 - AndroidUtilities.dp(21); float startY = AndroidUtilities.dp(24); - float w = (startX + (buttonRadius - startX) * switchToButtonInt2) * scheduleButtonsScale; - float h = (startY + (buttonRadius - startY) * switchToButtonInt2) * scheduleButtonsScale; - rect.set(cx - w, cy - h, cx + w, cy + h); - float rad = AndroidUtilities.dp(4) + (buttonRadius - AndroidUtilities.dp(4)) * switchToButtonInt2; - canvas.drawRoundRect(rect, rad, rad, paint); + float w = (startX + (buttonRadius - startX) * switchButtonProgrss) * scheduleButtonsScale; + float h = (startY + (buttonRadius - startY) * switchButtonProgrss) * scheduleButtonsScale; - canvas.restore(); + rect.set(cx - w, cy - h, cx + w, cy + h); + float rad = AndroidUtilities.dp(4) + (buttonRadius - AndroidUtilities.dp(4)) * switchButtonProgrss; + canvas.drawRoundRect(rect, rad, rad, paint); if (i == 1 && currentState.currentState == MUTE_BUTTON_STATE_CONNECTING) { radialProgressView.draw(canvas, cx, cy); } + + canvas.restore(); + + if (isLandscapeMode && switchToButtonInt2 == 0) { + paint.setAlpha((int) (255)); + float x = scheduleButtonTextView.getX() - getX(); + float y = scheduleButtonTextView.getY() - getY(); + rect.set(x, y, x + scheduleButtonTextView.getMeasuredWidth(), y + scheduleButtonTextView.getMeasuredHeight()); + canvas.drawRoundRect(rect, AndroidUtilities.dp(4), AndroidUtilities.dp(4), paint); + } + } - invalidate(); } super.dispatchDraw(canvas); + if (!renderersContainer.isAnimating()) { + invalidate(); + } } + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == muteButton && child.getScaleX() != 1f) { + canvas.save(); + float s = 1f / muteButton.getScaleX(); + s = 1f + (s - 1f) * 0.2f; + canvas.scale(s, s, child.getX() + child.getMeasuredWidth() / 2f, child.getY() + child.getMeasuredHeight() / 2f); + boolean b = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return b; + } else { + return super.drawChild(canvas, child, drawingTime); + } + } }; + + int color = Theme.getColor(Theme.key_voipgroup_unmuteButton2); + int r = Color.red(color); + int g = Color.green(color); + int b = Color.blue(color); + radialMatrix = new Matrix(); + radialGradient = new RadialGradient(0, 0, AndroidUtilities.dp(160), new int[]{Color.argb(50, r, g, b), Color.argb(0, r, g, b)}, null, Shader.TileMode.CLAMP); + radialPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + radialPaint.setShader(radialGradient); + + tinyWaveDrawable = new BlobDrawable(9); + bigWaveDrawable = new BlobDrawable(12); + + tinyWaveDrawable.minRadius = AndroidUtilities.dp(62); + tinyWaveDrawable.maxRadius = AndroidUtilities.dp(72); + tinyWaveDrawable.generateBlob(); + + bigWaveDrawable.minRadius = AndroidUtilities.dp(65); + bigWaveDrawable.maxRadius = AndroidUtilities.dp(75); + bigWaveDrawable.generateBlob(); + + tinyWaveDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_unmuteButton), (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); + bigWaveDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_unmuteButton), (int) (255 * WaveDrawable.CIRCLE_ALPHA_1))); + + soundButton = new VoIPToggleButton(context); + soundButton.setCheckable(true); + soundButton.setTextSize(12); + buttonsContainer.addView(soundButton, LayoutHelper.createFrame(68, 80)); + soundButton.setOnClickListener(v -> { + if (call == null || call.isScheduled()) { + getLink(false); + return; + } + if (VoIPService.getSharedInstance() == null) { + return; + } + VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(getContext(), false); + }); + + cameraButton = new VoIPToggleButton(context); + cameraButton.setCheckable(true); + cameraButton.setTextSize(12); + cameraButton.showText(false, false); + cameraButton.setCrossOffset(-AndroidUtilities.dpf2(3.5f)); + cameraButton.setVisibility(View.GONE); + buttonsContainer.addView(cameraButton, LayoutHelper.createFrame(68, 80)); + + flipButton = new VoIPToggleButton(context); + flipButton.setCheckable(true); + flipButton.setTextSize(12); + flipButton.showText(false, false); + RLottieImageView flipIconView = new RLottieImageView(context); + flipButton.addView(flipIconView, LayoutHelper.createFrame(32, 32, 0, 18, 10, 18, 0)); + flipIcon = new RLottieDrawable(R.raw.camera_flip, "" + R.raw.camera_flip, AndroidUtilities.dp(24), AndroidUtilities.dp(24), true, null); + flipIconView.setAnimation(flipIcon); + flipButton.setOnClickListener(view -> { + renderersContainer.delayHideUi(); + VoIPService service = VoIPService.getSharedInstance(); + if (service != null && service.getVideoState(false) == Instance.VIDEO_STATE_ACTIVE) { + service.switchCamera(); + if (flipIconCurrentEndFrame == 18) { + flipIcon.setCustomEndFrame(flipIconCurrentEndFrame = 39); + flipIcon.start(); + } else { + flipIcon.setCurrentFrame(0, false); + flipIcon.setCustomEndFrame(flipIconCurrentEndFrame = 18); + flipIcon.start(); + } + + for (int i = 0; i < attachedRenderers.size(); i++) { + GroupCallMiniTextureView renderer = attachedRenderers.get(i); + if (renderer.participant.participant.self && !renderer.participant.presentation) { + renderer.startFlipAnimation(); + } + } + } + }); + flipButton.setVisibility(View.GONE); + buttonsContainer.addView(flipButton, LayoutHelper.createFrame(68, 80)); + + leaveButton = new VoIPToggleButton(context); + leaveButton.setDrawBackground(false); + leaveButton.setTextSize(12); + leaveButton.setData(R.drawable.calls_decline, 0xffffffff, Theme.getColor(Theme.key_voipgroup_leaveButton), 0.3f, false, LocaleController.getString("VoipGroupLeave", R.string.VoipGroupLeave), false, false); + buttonsContainer.addView(leaveButton, LayoutHelper.createFrame(68, 80)); + leaveButton.setOnClickListener(v -> { + renderersContainer.delayHideUi(); + if (call == null || call.isScheduled()) { + dismiss(); + return; + } + updateItems(); + onLeaveClick(context, this::dismiss, false); + }); + + muteButton = new RLottieImageView(context) { + + @Override + public boolean onTouchEvent(MotionEvent event) { + //pinnedVideoView.delayHideUi(); + if (event.getAction() == MotionEvent.ACTION_DOWN && muteButtonState == MUTE_BUTTON_STATE_UNMUTE && call != null) { + AndroidUtilities.runOnUIThread(pressRunnable, 300); + scheduled = true; + } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { + if (scheduled) { + AndroidUtilities.cancelRunOnUIThread(pressRunnable); + scheduled = false; + } else if (pressed) { + AndroidUtilities.cancelRunOnUIThread(unmuteRunnable); + updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, true); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setMicMute(true, true, false); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(true); + } + pressed = false; + MotionEvent cancel = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); + super.onTouchEvent(cancel); + cancel.recycle(); + return true; + } + } + return super.onTouchEvent(event); + } + + @Override + public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { + super.onInitializeAccessibilityNodeInfo(info); + + info.setClassName(Button.class.getName()); + info.setEnabled(muteButtonState == MUTE_BUTTON_STATE_UNMUTE || muteButtonState == MUTE_BUTTON_STATE_MUTE); + + if (muteButtonState == MUTE_BUTTON_STATE_MUTE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, LocaleController.getString("VoipMute", R.string.VoipMute))); + } + } + }; + muteButton.setAnimation(bigMicDrawable); + muteButton.setScaleType(ImageView.ScaleType.CENTER); + buttonsContainer.addView(muteButton, LayoutHelper.createFrame(122, 122, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); + muteButton.setOnClickListener(new View.OnClickListener() { + + Runnable finishRunnable = new Runnable() { + @Override + public void run() { + muteButton.setAnimation(bigMicDrawable); + playingHandAnimation = false; + } + }; + + @Override + public void onClick(View v) { + if (call == null || muteButtonState == MUTE_BUTTON_STATE_CONNECTING) { + return; + } + if (muteButtonState == MUTE_BUTTON_STATE_START_NOW) { + if (startingGroupCall) { + return; + } + v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + startingGroupCall = true; + TLRPC.TL_phone_startScheduledGroupCall req = new TLRPC.TL_phone_startScheduledGroupCall(); + req.call = call.getInputGroupCall(); + accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { + if (response != null) { + accountInstance.getMessagesController().processUpdates((TLRPC.Updates) response, false); + } + }); + } else if (muteButtonState == MUTE_BUTTON_STATE_CANCEL_REMINDER || muteButtonState == MUTE_BUTTON_STATE_SET_REMINDER) { + if (muteButtonState == MUTE_BUTTON_STATE_SET_REMINDER) { + if (reminderHintView != null) { + reminderHintView.hide(); + } + } + TLRPC.TL_phone_toggleGroupCallStartSubscription req = new TLRPC.TL_phone_toggleGroupCallStartSubscription(); + req.call = call.getInputGroupCall(); + call.call.schedule_start_subscribed = !call.call.schedule_start_subscribed; + req.subscribed = call.call.schedule_start_subscribed; + accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { + if (response != null) { + accountInstance.getMessagesController().processUpdates((TLRPC.Updates) response, false); + } + }); + updateMuteButton(call.call.schedule_start_subscribed ? MUTE_BUTTON_STATE_CANCEL_REMINDER : MUTE_BUTTON_STATE_SET_REMINDER, true); + } else { + if (VoIPService.getSharedInstance() == null || isStillConnecting()) { + return; + } + if (muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN || muteButtonState == MUTE_BUTTON_STATE_RAISED_HAND) { + if (playingHandAnimation) { + return; + } + playingHandAnimation = true; + AndroidUtilities.shakeView(muteLabel[0], 2, 0); + v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + int num = Utilities.random.nextInt(100); + int endFrame; + int startFrame; + if (num < 32) { + startFrame = 0; + endFrame = 120; + } else if (num < 64) { + startFrame = 120; + endFrame = 240; + } else if (num < 97) { + startFrame = 240; + endFrame = 420; + } else if (num == 98) { + startFrame = 420; + endFrame = 540; + } else { + startFrame = 540; + endFrame = 720; + } + handDrawables.setCustomEndFrame(endFrame); + handDrawables.setOnFinishCallback(finishRunnable, endFrame - 1); + muteButton.setAnimation(handDrawables); + handDrawables.setCurrentFrame(startFrame); + muteButton.playAnimation(); + if (muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { + TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); + TLObject object; + int peerId = MessageObject.getPeerId(participant.peer); + if (peerId > 0) { + object = accountInstance.getMessagesController().getUser(peerId); + } else { + object = accountInstance.getMessagesController().getChat(-peerId); + } + VoIPService.getSharedInstance().editCallMember(object, null, null, null, true, null); + updateMuteButton(MUTE_BUTTON_STATE_RAISED_HAND, true); + } + } else if (muteButtonState == MUTE_BUTTON_STATE_UNMUTE) { + updateMuteButton(MUTE_BUTTON_STATE_MUTE, true); + VoIPService.getSharedInstance().setMicMute(false, false, true); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } else { + updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, true); + VoIPService.getSharedInstance().setMicMute(true, false, true); + muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); + } + } + } + }); + + radialProgressView = new RadialProgressView(context); + radialProgressView.setSize(AndroidUtilities.dp(110)); + radialProgressView.setStrokeWidth(4); + radialProgressView.setProgressColor(Theme.getColor(Theme.key_voipgroup_connectingProgress)); + + for (int a = 0; a < 2; a++) { + muteLabel[a] = new TextView(context); + muteLabel[a].setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + muteLabel[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); + muteLabel[a].setGravity(Gravity.CENTER_HORIZONTAL); + buttonsContainer.addView(muteLabel[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 0, 0, 26)); + } + + // buttonsContainer.addView(muteLabelContainer, LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT); + + actionBar.setAlpha(0.0f); + actionBar.getBackButton().setScaleX(0.9f); + actionBar.getBackButton().setScaleY(0.9f); + actionBar.getBackButton().setTranslationX(-AndroidUtilities.dp(14)); + + actionBar.getTitleTextView().setTranslationY(AndroidUtilities.dp(23)); + actionBar.getSubtitleTextView().setTranslationY(AndroidUtilities.dp(20)); + actionBar.getAdditionalSubtitleTextView().setTranslationY(AndroidUtilities.dp(20)); + + otherItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); + otherItem.setLongClickEnabled(false); + otherItem.setIcon(R.drawable.ic_ab_other); + otherItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); + otherItem.setSubMenuOpenSide(2); + otherItem.setDelegate(id -> actionBar.getActionBarMenuOnItemClick().onItemClick(id)); + otherItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); + otherItem.setOnClickListener(v -> { + if (call == null || renderersContainer.inFullscreenMode) { + return; + } + if (call.call.join_muted) { + everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + everyoneItem.setChecked(false); + adminItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + adminItem.setChecked(true); + } else { + everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + everyoneItem.setChecked(true); + adminItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + adminItem.setChecked(false); + } + changingPermissions = false; + otherItem.hideSubItem(eveyone_can_speak_item); + otherItem.hideSubItem(admin_can_speak_item); + + if (VoIPService.getSharedInstance() != null && soundButton.getVisibility() != View.VISIBLE && (VoIPService.getSharedInstance().hasEarpiece() || VoIPService.getSharedInstance().isBluetoothHeadsetConnected())) { + soundItem.setVisibility(View.VISIBLE); + int rout = VoIPService.getSharedInstance().getCurrentAudioRoute(); + if (rout == VoIPService.AUDIO_ROUTE_BLUETOOTH) { + soundItem.setIcon(R.drawable.msg_voice_bluetooth); + soundItem.setSubtext(VoIPService.getSharedInstance().currentBluetoothDeviceName != null ? VoIPService.getSharedInstance().currentBluetoothDeviceName : LocaleController.getString("VoipAudioRoutingBluetooth", R.string.VoipAudioRoutingBluetooth)); + } else if (rout == VoIPService.AUDIO_ROUTE_EARPIECE) { + soundItem.setIcon(VoIPService.getSharedInstance().isHeadsetPlugged() ? R.drawable.msg_voice_headphones : R.drawable.msg_voice_phone); + soundItem.setSubtext(VoIPService.getSharedInstance().isHeadsetPlugged() ? LocaleController.getString("VoipAudioRoutingHeadset", R.string.VoipAudioRoutingHeadset) : LocaleController.getString("VoipAudioRoutingPhone", R.string.VoipAudioRoutingPhone)); + } else if (rout == VoIPService.AUDIO_ROUTE_SPEAKER) { + AudioManager am = (AudioManager) context.getSystemService(AUDIO_SERVICE); + if (am.isSpeakerphoneOn()) { + soundItem.setIcon(R.drawable.msg_voice_speaker); + soundItem.setSubtext(LocaleController.getString("VoipAudioRoutingSpeaker", R.string.VoipAudioRoutingSpeaker)); + } else { + soundItem.setIcon(R.drawable.msg_voice_phone); + soundItem.setSubtext(LocaleController.getString("VoipAudioRoutingPhone", R.string.VoipAudioRoutingPhone)); + } + } + + } else { + soundItem.setVisibility(View.GONE); + } + updateItems(); + otherItem.toggleSubMenu(); + }); + otherItem.setPopupItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), false); + otherItem.setPopupItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), true); + + pipItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); + pipItem.setLongClickEnabled(false); + pipItem.setIcon(R.drawable.msg_voice_pip); + pipItem.setContentDescription(LocaleController.getString("AccDescrPipMode", R.string.AccDescrPipMode)); + pipItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); + pipItem.setOnClickListener(v -> { + if (Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(parentActivity)) { + GroupCallPip.clearForce(); + dismiss(); + } else { + AlertsCreator.createDrawOverlayGroupCallPermissionDialog(getContext()).show(); + } + }); + + screenShareItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); + screenShareItem.setLongClickEnabled(false); + screenShareItem.setIcon(R.drawable.msg_screencast); + screenShareItem.setContentDescription(LocaleController.getString("AccDescrPipMode", R.string.AccDescrPipMode)); + screenShareItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); + screenShareItem.setOnClickListener(v -> { + VoIPService voIPService = VoIPService.getSharedInstance(); + if (voIPService == null) { + return; + } + if (voIPService.getVideoState(true) == Instance.VIDEO_STATE_ACTIVE) { + voIPService.stopScreenCapture(); + } else { + startScreenCapture(); + } + }); + + titleTextView = new AudioPlayerAlert.ClippingTextViewSwitcher(context) { + @Override + protected TextView createTextView() { + TextView textView = new TextView(context); + textView.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); + textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + textView.setGravity(Gravity.LEFT | Gravity.TOP); + textView.setSingleLine(true); + textView.setEllipsize(TextUtils.TruncateAt.END); + textView.setOnClickListener(v -> { + if (call != null && call.recording) { + showRecordHint(textView); + } + }); + return textView; + } + }; + + actionBarBackground = new View(context) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), ActionBar.getCurrentActionBarHeight()); + } + }; + actionBarBackground.setAlpha(0.0f); + + containerView.addView(actionBarBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 0)); + containerView.addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 23, 0, 48, 0)); + containerView.addView(actionBar, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 0)); + + menuItemsContainer = new LinearLayout(context); + menuItemsContainer.setOrientation(LinearLayout.HORIZONTAL); + menuItemsContainer.addView(screenShareItem, LayoutHelper.createLinear(48, 48)); + menuItemsContainer.addView(pipItem, LayoutHelper.createLinear(48, 48)); + menuItemsContainer.addView(otherItem, LayoutHelper.createLinear(48, 48)); + containerView.addView(menuItemsContainer, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 48, Gravity.TOP | Gravity.RIGHT)); + + actionBarShadow = new View(context); + actionBarShadow.setAlpha(0.0f); + actionBarShadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); + containerView.addView(actionBarShadow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1)); + + for (int a = 0; a < 2; a++) { + undoView[a] = new UndoView(context) { + @Override + public void showWithAction(long did, int action, Object infoObject, Object infoObject2, Runnable actionRunnable, Runnable cancelRunnable) { + if (previewDialog != null) { + return; + } + super.showWithAction(did, action, infoObject, infoObject2, actionRunnable, cancelRunnable); + } + }; + undoView[a].setAdditionalTranslationY(AndroidUtilities.dp(10)); + if (Build.VERSION.SDK_INT >= 21) { + undoView[a].setTranslationZ(AndroidUtilities.dp(5)); + } + containerView.addView(undoView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); + } + + accountSelectCell = new AccountSelectCell(context, true); + accountSelectCell.setTag(R.id.width_tag, 240); + otherItem.addSubItem(user_item, accountSelectCell, LayoutHelper.WRAP_CONTENT, AndroidUtilities.dp(48)); + otherItem.setShowSubmenuByMove(false); + accountSelectCell.setBackground(Theme.createRadSelectorDrawable(Theme.getColor(Theme.key_voipgroup_listSelector), 6, 6)); + accountGap = otherItem.addGap(user_item_gap); + everyoneItem = otherItem.addSubItem(eveyone_can_speak_item, 0, LocaleController.getString("VoipGroupAllCanSpeak", R.string.VoipGroupAllCanSpeak), true); + everyoneItem.updateSelectorBackground(true, false); + adminItem = otherItem.addSubItem(admin_can_speak_item, 0, LocaleController.getString("VoipGroupOnlyAdminsCanSpeak", R.string.VoipGroupOnlyAdminsCanSpeak), true); + adminItem.updateSelectorBackground(false, true); + + everyoneItem.setCheckColor(Theme.getColor(Theme.key_voipgroup_checkMenu)); + everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + adminItem.setCheckColor(Theme.getColor(Theme.key_voipgroup_checkMenu)); + adminItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); + + Paint soundDrawablePaint = new Paint(Paint.ANTI_ALIAS_FLAG); + soundDrawablePaint.setColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); + soundDrawablePaint.setStyle(Paint.Style.STROKE); + soundDrawablePaint.setStrokeWidth(AndroidUtilities.dp(1.5f)); + soundDrawablePaint.setStrokeCap(Paint.Cap.ROUND); + soundItem = otherItem.addSubItem(sound_item, R.drawable.msg_voice_speaker, null, LocaleController.getString("VoipGroupAudio", R.string.VoipGroupAudio), true, false); + soundItem.setItemHeight(56); + + noiseItem = otherItem.addSubItem(noise_item, R.drawable.msg_noise_on, null, LocaleController.getString("VoipNoiseCancellation", R.string.VoipNoiseCancellation), true, false); + noiseItem.setItemHeight(56); + + soundItemDivider = otherItem.addDivider(ColorUtils.blendARGB(Theme.getColor(Theme.key_voipgroup_actionBar), Color.BLACK, 0.3f)); + ((ViewGroup.MarginLayoutParams)soundItemDivider.getLayoutParams()).topMargin = 0; + ((ViewGroup.MarginLayoutParams)soundItemDivider.getLayoutParams()).bottomMargin = 0; + editTitleItem = otherItem.addSubItem(edit_item, R.drawable.msg_edit, recordCallDrawable, LocaleController.getString("VoipGroupEditTitle", R.string.VoipGroupEditTitle), true, false); + permissionItem = otherItem.addSubItem(permission_item, R.drawable.msg_permissions, recordCallDrawable, LocaleController.getString("VoipGroupEditPermissions", R.string.VoipGroupEditPermissions), false, false); + inviteItem = otherItem.addSubItem(share_invite_link_item, R.drawable.msg_link, LocaleController.getString("VoipGroupShareInviteLink", R.string.VoipGroupShareInviteLink)); + recordCallDrawable = new RecordCallDrawable(); + screenItem = otherItem.addSubItem(screen_capture_item, R.drawable.msg_screencast, LocaleController.getString("VoipChatStartScreenCapture", R.string.VoipChatStartScreenCapture)); + recordItem = otherItem.addSubItem(start_record_item, 0, recordCallDrawable, LocaleController.getString("VoipGroupRecordCall", R.string.VoipGroupRecordCall), true, false); + recordCallDrawable.setParentView(recordItem.getImageView()); + leaveItem = otherItem.addSubItem(leave_item, R.drawable.msg_endcall, LocaleController.getString("VoipGroupEndChat", R.string.VoipGroupEndChat)); + otherItem.setPopupItemsSelectorColor(Theme.getColor(Theme.key_voipgroup_listSelector)); + otherItem.getPopupLayout().setFitItems(true); + + soundItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + noiseItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + leaveItem.setColors(Theme.getColor(Theme.key_voipgroup_leaveCallMenu), Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); + inviteItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + editTitleItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + permissionItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + recordItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + screenItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); + + if (call != null) { + initCreatedGroupCall(); + } + + + + leaveBackgroundPaint.setColor(Theme.getColor(Theme.key_voipgroup_leaveButton)); + + updateTitle(false); + actionBar.getTitleTextView().setOnClickListener(v -> { + if (call != null && call.recording) { + showRecordHint(actionBar.getTitleTextView()); + } + }); + + fullscreenUsersListView = new RecyclerListView(context) { + @Override + public boolean drawChild(Canvas canvas, View child, long drawingTime) { + GroupCallFullscreenAdapter.GroupCallUserCell cell = (GroupCallFullscreenAdapter.GroupCallUserCell) child; + if (!renderersContainer.isAnimating()) { + cell.setAlpha(1f); + cell.setTranslationX(0); + cell.setTranslationY(0); + } + if (cell.isRemoving(fullscreenUsersListView) && cell.getRenderer() != null) { + return true; + } else if (cell.getTranslationY() != 0 && cell.getRenderer() != null && cell.getRenderer().primaryView != null) { + float listTop = listView.getTop() - getTop(); + float listBottom = listTop + listView.getMeasuredHeight(); + float progress = renderersContainer.progressToFullscreenMode; + canvas.save(); + canvas.clipRect(0, listTop * (1f - progress), getMeasuredWidth(), listBottom * (1f - progress) + getMeasuredHeight() * progress); + boolean r = super.drawChild(canvas, child, drawingTime); + canvas.restore(); + return r; + } else { + return super.drawChild(canvas, child, drawingTime); + } + } + }; + fullscreenListItemAnimator = new DefaultItemAnimator() { + + @Override + protected void onMoveAnimationUpdate(RecyclerView.ViewHolder holder) { + listView.invalidate(); + renderersContainer.invalidate(); + containerView.invalidate(); + updateLayout(true); + } + }; + fullscreenUsersListView.setClipToPadding(false); + fullscreenListItemAnimator.setDelayAnimations(false); + fullscreenListItemAnimator.setTranslationInterpolator(CubicBezierInterpolator.DEFAULT); + fullscreenListItemAnimator.setRemoveDuration(TRANSITION_DURATION); + fullscreenListItemAnimator.setAddDuration(TRANSITION_DURATION); + fullscreenListItemAnimator.setMoveDuration(TRANSITION_DURATION); + + fullscreenUsersListView.setItemAnimator(fullscreenListItemAnimator); + fullscreenUsersListView.setOnScrollListener(new RecyclerView.OnScrollListener() { + @Override + public void onScrolled(@NonNull RecyclerView recyclerView, int dx, int dy) { + super.onScrolled(recyclerView, dx, dy); + containerView.invalidate(); + renderersContainer.invalidate(); + } + }); + fullscreenUsersListView.setClipChildren(false); + LinearLayoutManager layoutManager = new LinearLayoutManager(context); + layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); + fullscreenUsersListView.setLayoutManager(layoutManager); + fullscreenUsersListView.setAdapter(fullscreenAdapter = new GroupCallFullscreenAdapter(groupCall, currentAccount, this)); + fullscreenAdapter.setVisibility(fullscreenUsersListView, false); + fullscreenUsersListView.setOnItemClickListener((view, position) -> { + // pinnedVideoView.delayHideUi(); + GroupCallFullscreenAdapter.GroupCallUserCell userCell = (GroupCallFullscreenAdapter.GroupCallUserCell) view; + if (userCell.getVideoParticipant() == null) { + fullscreenFor(new ChatObject.VideoParticipant(userCell.getParticipant(), false, false)); + } else { + fullscreenFor(userCell.getVideoParticipant()); + } + }); + fullscreenUsersListView.setOnItemLongClickListener((view, position) -> { + if (showMenuForCell(view)) { + listView.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); + } + return false; + }); + fullscreenUsersListView.setVisibility(View.GONE); + fullscreenUsersListView.addItemDecoration(new RecyclerView.ItemDecoration() { + @Override + public void getItemOffsets(@NonNull Rect outRect, @NonNull View view, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) { + int p = parent.getChildAdapterPosition(view); + if (!isLandscapeMode) { + outRect.set(AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4), 0); + } else { + outRect.set(0, AndroidUtilities.dp(4), 0, AndroidUtilities.dp(4)); + } + } + }); + + renderersContainer = new GroupCallRenderersContainer(context, listView, fullscreenUsersListView, attachedRenderers, call, this) { + @Override + protected void update() { + super.update(); + + float finalColorProgress2 = Math.max(colorProgress, renderersContainer == null ? 0 : renderersContainer.progressToFullscreenMode); + navBarColor = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled), Theme.getColor(Theme.key_voipgroup_actionBar), finalColorProgress2, 1.0f); + containerView.invalidate(); + setColorProgress(colorProgress); + } + + @Override + protected boolean drawChild(Canvas canvas, View child, long drawingTime) { + if (child == scrimRenderer) { + return true; + } + return super.drawChild(canvas, child, drawingTime); + } + + @Override + protected void onFullScreenModeChanged(boolean startAnimation) { + delayedGroupCallUpdated = startAnimation; + if (isTabletMode) { + if (!startAnimation && renderersContainer.inFullscreenMode) { + tabletGridAdapter.setVisibility(tabletVideoGridView, false, true); + } + } else { + if (startAnimation) { + GroupCallActivity.this.undoView[0].hide(false, 1); + renderersContainer.undoView[0].hide(false, 2); + if (!renderersContainer.inFullscreenMode) { + listView.setVisibility(View.VISIBLE); + actionBar.setVisibility(View.VISIBLE); + } + updateState(true, false); + buttonsContainer.requestLayout(); + if (fullscreenUsersListView.getVisibility() != View.VISIBLE) { + fullscreenUsersListView.setVisibility(View.VISIBLE); + fullscreenAdapter.setVisibility(fullscreenUsersListView, true); + fullscreenAdapter.update(false, fullscreenUsersListView); + } else { + fullscreenAdapter.setVisibility(fullscreenUsersListView, true); + applyCallParticipantUpdates(); + } + } else { + if (!renderersContainer.inFullscreenMode) { + fullscreenUsersListView.setVisibility(View.GONE); + fullscreenAdapter.setVisibility(fullscreenUsersListView, false); + } else { + actionBar.setVisibility(View.GONE); + listView.setVisibility(View.GONE); + } + + if (fullscreenUsersListView.getVisibility() == View.VISIBLE) { + for (int i = 0; i < fullscreenUsersListView.getChildCount(); i++) { + View child = fullscreenUsersListView.getChildAt(i); + child.setAlpha(1f); + child.setScaleX(1f); + child.setScaleY(1f); + child.setTranslationX(0); + child.setTranslationY(0); + ((GroupCallFullscreenAdapter.GroupCallUserCell) child).setProgressToFullscreen(renderersContainer.progressToFullscreenMode); + } + } + } + buttonsBackgroundGradientView2.setVisibility(startAnimation ? View.VISIBLE : View.GONE); + if (!delayedGroupCallUpdated) { + applyCallParticipantUpdates(); + } + } + } + + ValueAnimator uiVisibilityAnimator; + + public void onUiVisibilityChanged() { + if (renderersContainer == null) { + return; + } + boolean uiVisible = renderersContainer.isUiVisible(); + if (uiVisibilityAnimator != null) { + uiVisibilityAnimator.removeAllListeners(); + uiVisibilityAnimator.cancel(); + } + uiVisibilityAnimator = ValueAnimator.ofFloat(progressToHideUi, uiVisible ? 0 : 1f); + uiVisibilityAnimator.addUpdateListener(valueAnimator -> { + progressToHideUi = (float) valueAnimator.getAnimatedValue(); + renderersContainer.setProgressToHideUi(progressToHideUi); + fullscreenUsersListView.invalidate(); + containerView.invalidate(); + buttonsContainer.invalidate(); + }); + uiVisibilityAnimator.setDuration(TRANSITION_DURATION); + uiVisibilityAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + uiVisibilityAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + uiVisibilityAnimator = null; + progressToHideUi = uiVisible ? 0 : 1f; + renderersContainer.setProgressToHideUi(progressToHideUi); + fullscreenUsersListView.invalidate(); + containerView.invalidate(); + buttonsContainer.invalidate(); + } + }); + uiVisibilityAnimator.start(); + + } + + @Override + protected boolean canHideUI() { + return super.canHideUI() && previewDialog == null; + } + + @Override + protected void onBackPressed() { + GroupCallActivity.this.onBackPressed(); + } + }; + renderersContainer.setClipChildren(false); + fullscreenAdapter.setRenderersPool(attachedRenderers, renderersContainer); + if (tabletVideoGridView != null) { + tabletGridAdapter.setRenderersPool(attachedRenderers, renderersContainer); + } + avatarPagerIndicator = new AvatarPreviewPagerIndicator(context); + avatarsViewPager = new ProfileGalleryView(context, actionBar, listView, avatarPagerIndicator) { + @Override + public void invalidate() { + super.invalidate(); + containerView.invalidate(); + } + }; + avatarsViewPager.setImagesLayerNum(8192); + avatarsViewPager.setInvalidateWithParent(true); + avatarPagerIndicator.setProfileGalleryView(avatarsViewPager); + avatarPreviewContainer = new FrameLayout(context) { + + Rect rect = new Rect(); + RectF rectF = new RectF(); + Path path = new Path(); + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + int size = Math.min(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)); + super.onMeasure(MeasureSpec.makeMeasureSpec(size, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(size + getPaddingBottom(), MeasureSpec.EXACTLY)); + } + + @Override + protected void dispatchDraw(Canvas canvas) { + if (progressToAvatarPreview != 1) { + if (scrimView != null && hasScrimAnchorView) { + canvas.save(); + float s = getMeasuredHeight() / (float) scrimView.getAvatarImageView().getMeasuredHeight(); + + float fromRadius = scrimView.getAvatarImageView().getMeasuredHeight() / 2f * s; + int topRad = (int) ((fromRadius * (1f - progressToAvatarPreview) + (AndroidUtilities.dp(13) * progressToAvatarPreview))); + int bottomRad = (int) (fromRadius * (1f - progressToAvatarPreview)); + scrimView.getAvatarWavesDrawable().draw(canvas, scrimView.getAvatarImageView().getMeasuredHeight() / 2, scrimView.getAvatarImageView().getMeasuredHeight() / 2, this); + scrimView.getAvatarImageView().getImageReceiver().setImageCoords(0, 0, getMeasuredWidth(), getMeasuredHeight()); + scrimView.getAvatarImageView().setRoundRadius(topRad, topRad, bottomRad, bottomRad); + scrimView.getAvatarImageView().getImageReceiver().draw(canvas); + scrimView.getAvatarImageView().setRoundRadius(scrimView.getAvatarImageView().getMeasuredHeight() / 2); + canvas.restore(); + } else if (scrimFullscreenView != null && scrimRenderer == null && previewTextureTransitionEnabled) { + canvas.save(); + float s = getMeasuredHeight() / (float) scrimFullscreenView.getAvatarImageView().getMeasuredHeight(); + + float fromRadius = scrimFullscreenView.getAvatarImageView().getMeasuredHeight() / 2f * s; + int topRad = (int) ((fromRadius * (1f - progressToAvatarPreview) + (AndroidUtilities.dp(13) * progressToAvatarPreview))); + int bottomRad = (int) (fromRadius * (1f - progressToAvatarPreview)); + // scrimFullscreenView.getAvatarWavesDrawable().draw(canvas, scrimFullscreenView.getAvatarImageView().getMeasuredHeight() / 2, scrimFullscreenView.getAvatarImageView().getMeasuredHeight() / 2, this); + scrimFullscreenView.getAvatarImageView().getImageReceiver().setImageCoords(0, 0, getMeasuredWidth(), getMeasuredHeight()); + scrimFullscreenView.getAvatarImageView().setRoundRadius(topRad, topRad, bottomRad, bottomRad); + scrimFullscreenView.getAvatarImageView().getImageReceiver().draw(canvas); + scrimFullscreenView.getAvatarImageView().setRoundRadius(scrimFullscreenView.getAvatarImageView().getMeasuredHeight() / 2); + canvas.restore(); + } + } + avatarsViewPager.setAlpha(progressToAvatarPreview); + + path.reset(); + rectF.set(0, 0, getMeasuredHeight(), getMeasuredWidth()); + path.addRoundRect(rectF, new float[]{AndroidUtilities.dp(13), AndroidUtilities.dp(13), AndroidUtilities.dp(13), AndroidUtilities.dp(13), 0, 0, 0, 0}, Path.Direction.CCW); + canvas.save(); + canvas.clipPath(path); + View textureView = avatarsViewPager.findVideoActiveView(); + if (textureView != null && scrimRenderer != null && scrimRenderer.isAttached() && !drawingForBlur) { + canvas.save(); + rect.setEmpty(); + avatarsViewPager.getChildVisibleRect(textureView, rect, null); + int left = rect.left; + if (left < -avatarsViewPager.getMeasuredWidth()) { + left += avatarsViewPager.getMeasuredWidth() * 2; + } else if (left > avatarsViewPager.getMeasuredWidth()) { + left -= avatarsViewPager.getMeasuredWidth() * 2; + } + canvas.translate(left, 0); + scrimRenderer.draw(canvas); + canvas.restore(); + } + super.dispatchDraw(canvas); + canvas.restore(); + } + + @Override + public void invalidate() { + super.invalidate(); + containerView.invalidate(); + } + }; + avatarPreviewContainer.setVisibility(View.GONE); + + avatarsViewPager.setVisibility(View.VISIBLE); + avatarsViewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { + + @Override + public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { + + } + + @Override + public void onPageSelected(int position) { + int realPosition = avatarsViewPager.getRealPosition(position); + avatarPagerIndicator.saveCurrentPageProgress(); + avatarPagerIndicator.invalidate(); + } + + @Override + public void onPageScrollStateChanged(int state) { + + } + }); + blurredView = new View(context) { + @Override + public void setAlpha(float alpha) { + if (getAlpha() != alpha) { + super.setAlpha(alpha); + checkContentOverlayed(); + } + } + }; + containerView.addView(renderersContainer); + renderersContainer.addView(fullscreenUsersListView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 80, Gravity.BOTTOM, 0, 0, 0, 100)); + buttonsContainer.setWillNotDraw(false); - containerView.addView(buttonsContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 231, Gravity.LEFT | Gravity.BOTTOM)); + buttonsBackgroundGradientView = new View(context); + gradientColors[0] = backgroundColor; + gradientColors[1] = Color.TRANSPARENT; + buttonsBackgroundGradientView.setBackground(buttonsBackgroundGradient = new GradientDrawable(GradientDrawable.Orientation.BOTTOM_TOP, gradientColors)); + containerView.addView(buttonsBackgroundGradientView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 60, Gravity.LEFT | Gravity.BOTTOM)); + buttonsBackgroundGradientView2 = new View(context); + buttonsBackgroundGradientView2.setBackgroundColor(gradientColors[0]); + containerView.addView(buttonsBackgroundGradientView2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 0, Gravity.LEFT | Gravity.BOTTOM)); + containerView.addView(buttonsContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 200, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM)); + containerView.addView(blurredView); + + avatarPreviewContainer.addView(avatarsViewPager, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + avatarPreviewContainer.addView(avatarPagerIndicator, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 0, 0, 0, 0)); + containerView.addView(avatarPreviewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 14, 14, 14, 14)); + + applyCallParticipantUpdates(); + listAdapter.notifyDataSetChanged(); + + if (isTabletMode) { + tabletGridAdapter.update(false, tabletVideoGridView); + } + oldCount = listAdapter.getItemCount(); if (schedulePeer != null) { scheduleInfoTextView = new TextView(context); @@ -2742,6 +4794,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter call.call = new TLRPC.TL_groupCall(); call.call.participants_count = 0; call.call.version = 1; + call.call.can_start_video = true; call.call.can_change_join_muted = true; call.chatId = chat.id; call.call.schedule_date = scheduleStartAt; @@ -2750,6 +4803,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter call.setSelfPeer(peer); call.call.access_hash = updateGroupCall.call.access_hash; call.call.id = updateGroupCall.call.id; + call.createNoVideoParticipant(); + fullscreenAdapter.setGroupCall(call); + renderersContainer.setGroupCall(call); + tabletGridAdapter.setGroupCall(call); accountInstance.getMessagesController().putGroupCall(call.chatId, call); }); break; @@ -2856,538 +4913,6 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter AlertsCreator.checkScheduleDate(scheduleButtonTextView, scheduleInfoTextView, 7 * 24 * 60 * 60, 2, dayPicker, hourPicker, minutePicker); } - int color = Theme.getColor(Theme.key_voipgroup_unmuteButton2); - int r = Color.red(color); - int g = Color.green(color); - int b = Color.blue(color); - radialMatrix = new Matrix(); - radialGradient = new RadialGradient(0, 0, AndroidUtilities.dp(160), new int[]{Color.argb(50, r, g, b), Color.argb(0, r, g, b)}, null, Shader.TileMode.CLAMP); - radialPaint = new Paint(Paint.ANTI_ALIAS_FLAG); - radialPaint.setShader(radialGradient); - - tinyWaveDrawable = new BlobDrawable(9); - bigWaveDrawable = new BlobDrawable(12); - - tinyWaveDrawable.minRadius = AndroidUtilities.dp(62); - tinyWaveDrawable.maxRadius = AndroidUtilities.dp(72); - tinyWaveDrawable.generateBlob(); - - bigWaveDrawable.minRadius = AndroidUtilities.dp(65); - bigWaveDrawable.maxRadius = AndroidUtilities.dp(75); - bigWaveDrawable.generateBlob(); - - tinyWaveDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_unmuteButton), (int) (255 * WaveDrawable.CIRCLE_ALPHA_2))); - bigWaveDrawable.paint.setColor(ColorUtils.setAlphaComponent(Theme.getColor(Theme.key_voipgroup_unmuteButton), (int) (255 * WaveDrawable.CIRCLE_ALPHA_1))); - - soundButton = new VoIPToggleButton(context); - soundButton.setCheckable(true); - soundButton.setTextSize(12); - buttonsContainer.addView(soundButton, LayoutHelper.createFrame(68, 90)); - soundButton.setOnClickListener(v -> { - if (call == null || call.isScheduled()) { - getLink(false); - return; - } - if (VoIPService.getSharedInstance() == null) { - return; - } - VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(getContext(), false); - }); - - leaveButton = new VoIPToggleButton(context); - leaveButton.setDrawBackground(false); - leaveButton.setTextSize(12); - leaveButton.setData(R.drawable.calls_decline, 0xffffffff, Theme.getColor(Theme.key_voipgroup_leaveButton), 0.3f, false, LocaleController.getString("VoipGroupLeave", R.string.VoipGroupLeave), false, false); - buttonsContainer.addView(leaveButton, LayoutHelper.createFrame(68, 80)); - leaveButton.setOnClickListener(v -> { - if (call == null || call.isScheduled()) { - dismiss(); - return; - } - updateItems(); - onLeaveClick(context, this::dismiss, false); - }); - - muteButton = new RLottieImageView(context) { - - @Override - public boolean onTouchEvent(MotionEvent event) { - if (event.getAction() == MotionEvent.ACTION_DOWN && muteButtonState == MUTE_BUTTON_STATE_UNMUTE && call != null) { - AndroidUtilities.runOnUIThread(pressRunnable, 300); - scheduled = true; - } else if (event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) { - if (scheduled) { - AndroidUtilities.cancelRunOnUIThread(pressRunnable); - scheduled = false; - } else if (pressed) { - AndroidUtilities.cancelRunOnUIThread(unmuteRunnable); - updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, true); - if (VoIPService.getSharedInstance() != null) { - VoIPService.getSharedInstance().setMicMute(true, true, false); - muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } - pressed = false; - MotionEvent cancel = MotionEvent.obtain(0, 0, MotionEvent.ACTION_CANCEL, 0, 0, 0); - super.onTouchEvent(cancel); - cancel.recycle(); - return true; - } - } - return super.onTouchEvent(event); - } - - @Override - public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { - super.onInitializeAccessibilityNodeInfo(info); - - info.setClassName(Button.class.getName()); - info.setEnabled(muteButtonState == MUTE_BUTTON_STATE_UNMUTE || muteButtonState == MUTE_BUTTON_STATE_MUTE); - - if (muteButtonState == MUTE_BUTTON_STATE_MUTE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - info.addAction(new AccessibilityNodeInfo.AccessibilityAction(AccessibilityNodeInfo.ACTION_CLICK, LocaleController.getString("VoipMute", R.string.VoipMute))); - } - } - }; - muteButton.setAnimation(bigMicDrawable); - muteButton.setScaleType(ImageView.ScaleType.CENTER); - buttonsContainer.addView(muteButton, LayoutHelper.createFrame(122, 122, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); - muteButton.setOnClickListener(new View.OnClickListener() { - - Runnable finishRunnable = new Runnable() { - @Override - public void run() { - muteButton.setAnimation(bigMicDrawable); - playingHandAnimation = false; - } - }; - - @Override - public void onClick(View v) { - if (call == null || muteButtonState == MUTE_BUTTON_STATE_CONNECTING) { - return; - } - if (muteButtonState == MUTE_BUTTON_STATE_START_NOW) { - if (startingGroupCall) { - return; - } - v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - startingGroupCall = true; - TLRPC.TL_phone_startScheduledGroupCall req = new TLRPC.TL_phone_startScheduledGroupCall(); - req.call = call.getInputGroupCall(); - accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { - if (response != null) { - accountInstance.getMessagesController().processUpdates((TLRPC.Updates) response, false); - } - }); - } else if (muteButtonState == MUTE_BUTTON_STATE_CANCEL_REMINDER || muteButtonState == MUTE_BUTTON_STATE_SET_REMINDER) { - if (muteButtonState == MUTE_BUTTON_STATE_SET_REMINDER) { - if (reminderHintView != null) { - reminderHintView.hide(); - } - } - TLRPC.TL_phone_toggleGroupCallStartSubscription req = new TLRPC.TL_phone_toggleGroupCallStartSubscription(); - req.call = call.getInputGroupCall(); - call.call.schedule_start_subscribed = !call.call.schedule_start_subscribed; - req.subscribed = call.call.schedule_start_subscribed; - accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { - if (response != null) { - accountInstance.getMessagesController().processUpdates((TLRPC.Updates) response, false); - } - }); - updateMuteButton(call.call.schedule_start_subscribed ? MUTE_BUTTON_STATE_CANCEL_REMINDER : MUTE_BUTTON_STATE_SET_REMINDER, true); - } else { - if (VoIPService.getSharedInstance() == null || isStillConnecting()) { - return; - } - if (muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN || muteButtonState == MUTE_BUTTON_STATE_RAISED_HAND) { - if (playingHandAnimation) { - return; - } - playingHandAnimation = true; - AndroidUtilities.shakeView(muteLabel[0], 2, 0); - AndroidUtilities.shakeView(muteSubLabel[0], 2, 0); - v.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - int num = Utilities.random.nextInt(100); - int endFrame; - int startFrame; - if (num < 32) { - startFrame = 0; - endFrame = 120; - } else if (num < 64) { - startFrame = 120; - endFrame = 240; - } else if (num < 97) { - startFrame = 240; - endFrame = 420; - } else if (num == 98) { - startFrame = 420; - endFrame = 540; - } else { - startFrame = 540; - endFrame = 720; - } - handDrawables.setCustomEndFrame(endFrame); - handDrawables.setOnFinishCallback(finishRunnable, endFrame - 1); - muteButton.setAnimation(handDrawables); - handDrawables.setCurrentFrame(startFrame); - muteButton.playAnimation(); - if (muteButtonState == MUTE_BUTTON_STATE_MUTED_BY_ADMIN) { - TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); - TLObject object; - int peerId = MessageObject.getPeerId(participant.peer); - if (peerId > 0) { - object = accountInstance.getMessagesController().getUser(peerId); - } else { - object = accountInstance.getMessagesController().getChat(-peerId); - } - VoIPService.getSharedInstance().editCallMember(object, true, -1, true); - updateMuteButton(MUTE_BUTTON_STATE_RAISED_HAND, true); - } - } else if (muteButtonState == MUTE_BUTTON_STATE_UNMUTE) { - updateMuteButton(MUTE_BUTTON_STATE_MUTE, true); - VoIPService.getSharedInstance().setMicMute(false, false, true); - muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } else { - updateMuteButton(MUTE_BUTTON_STATE_UNMUTE, true); - VoIPService.getSharedInstance().setMicMute(true, false, true); - muteButton.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP, HapticFeedbackConstants.FLAG_IGNORE_GLOBAL_SETTING); - } - } - } - }); - - radialProgressView = new RadialProgressView(context); - radialProgressView.setSize(AndroidUtilities.dp(110)); - radialProgressView.setStrokeWidth(4); - radialProgressView.setProgressColor(Theme.getColor(Theme.key_voipgroup_connectingProgress)); - //buttonsContainer.addView(radialProgressView, LayoutHelper.createFrame(126, 126, Gravity.CENTER_HORIZONTAL | Gravity.TOP)); - - for (int a = 0; a < 2; a++) { - muteLabel[a] = new TextView(context); - muteLabel[a].setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); - muteLabel[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18); - muteLabel[a].setGravity(Gravity.CENTER_HORIZONTAL); - buttonsContainer.addView(muteLabel[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 0, 0, 26)); - - muteSubLabel[a] = new TextView(context); - muteSubLabel[a].setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); - muteSubLabel[a].setTextSize(TypedValue.COMPLEX_UNIT_DIP, 12); - muteSubLabel[a].setGravity(Gravity.CENTER_HORIZONTAL); - buttonsContainer.addView(muteSubLabel[a], LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 0, 0, 10)); - if (a == 1) { - muteLabel[a].setVisibility(View.INVISIBLE); - muteSubLabel[a].setVisibility(View.INVISIBLE); - } - } - - actionBar.setAlpha(0.0f); - actionBar.getBackButton().setScaleX(0.9f); - actionBar.getBackButton().setScaleY(0.9f); - actionBar.getBackButton().setTranslationX(-AndroidUtilities.dp(14)); - - actionBar.getTitleTextView().setTranslationY(AndroidUtilities.dp(23)); - actionBar.getSubtitleTextView().setTranslationY(AndroidUtilities.dp(20)); - - accountSwitchAvatarDrawable = new AvatarDrawable(); - accountSwitchAvatarDrawable.setTextSize(AndroidUtilities.dp(12)); - accountSwitchImageView = new BackupImageView(context); - accountSwitchImageView.setRoundRadius(AndroidUtilities.dp(16)); - accountSwitchImageView.setOnClickListener(v -> JoinCallAlert.open(getContext(), -currentChat.id, accountInstance, null, JoinCallAlert.TYPE_DISPLAY, selfPeer, (peer1, hasFewPeers, schedule) -> { - if (call == null) { - return; - } - TLObject object; - if (peer1 instanceof TLRPC.TL_inputPeerUser) { - object = accountInstance.getMessagesController().getUser(peer1.user_id); - } else if (peer1 instanceof TLRPC.TL_inputPeerChat) { - object = accountInstance.getMessagesController().getChat(peer1.chat_id); - } else { - object = accountInstance.getMessagesController().getChat(peer1.channel_id); - } - if (call.isScheduled()) { - getUndoView().showWithAction(0, UndoView.ACTION_VOIP_USER_CHANGED, object); - if (peer1 instanceof TLRPC.TL_inputPeerChannel) { - selfPeer = new TLRPC.TL_peerChannel(); - selfPeer.channel_id = peer1.channel_id; - } else if (peer1 instanceof TLRPC.TL_inputPeerUser) { - selfPeer = new TLRPC.TL_peerUser(); - selfPeer.user_id = peer1.user_id; - } else if (peer1 instanceof TLRPC.TL_inputPeerChat) { - selfPeer = new TLRPC.TL_peerChat(); - selfPeer.chat_id = peer1.chat_id; - } - this.schedulePeer = peer1; - TLRPC.ChatFull chatFull = accountInstance.getMessagesController().getChatFull(currentChat.id); - if (chatFull != null) { - chatFull.groupcall_default_join_as = selfPeer; - if (chatFull instanceof TLRPC.TL_chatFull) { - chatFull.flags |= 32768; - } else { - chatFull.flags |= 67108864; - } - } - TLRPC.TL_phone_saveDefaultGroupCallJoinAs req = new TLRPC.TL_phone_saveDefaultGroupCallJoinAs(); - req.peer = MessagesController.getInputPeer(currentChat); - req.join_as = peer1; - accountInstance.getConnectionsManager().sendRequest(req, (response, error) -> { - - }); - updateItems(); - } else { - if (VoIPService.getSharedInstance() == null || !hasFewPeers) { - return; - } - TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); - VoIPService.getSharedInstance().setGroupCallPeer(peer1); - userSwitchObject = object; - } - })); - - otherItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); - otherItem.setLongClickEnabled(false); - otherItem.setIcon(R.drawable.ic_ab_other); - otherItem.setContentDescription(LocaleController.getString("AccDescrMoreOptions", R.string.AccDescrMoreOptions)); - otherItem.setSubMenuOpenSide(2); - otherItem.setDelegate(id -> actionBar.getActionBarMenuOnItemClick().onItemClick(id)); - otherItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); - otherItem.setOnClickListener(v -> { - if (call == null) { - return; - } - if (call.call.join_muted) { - everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); - everyoneItem.setChecked(false); - adminItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); - adminItem.setChecked(true); - } else { - everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); - everyoneItem.setChecked(true); - adminItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); - adminItem.setChecked(false); - } - changingPermissions = false; - otherItem.hideSubItem(eveyone_can_speak_item); - otherItem.hideSubItem(admin_can_speak_item); - updateItems(); - otherItem.toggleSubMenu(); - }); - otherItem.setPopupItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), false); - otherItem.setPopupItemsColor(Theme.getColor(Theme.key_voipgroup_actionBarItems), true); - - pipItem = new ActionBarMenuItem(context, null, 0, Theme.getColor(Theme.key_voipgroup_actionBarItems)); - pipItem.setLongClickEnabled(false); - pipItem.setIcon(R.drawable.msg_voice_pip); - pipItem.setContentDescription(LocaleController.getString("AccDescrPipMode", R.string.AccDescrPipMode)); - pipItem.setBackgroundDrawable(Theme.createSelectorDrawable(Theme.getColor(Theme.key_voipgroup_actionBarItemsSelector), 6)); - pipItem.setOnClickListener(v -> { - if (Build.VERSION.SDK_INT < 23 || Settings.canDrawOverlays(parentActivity)) { - GroupCallPip.clearForce(); - dismiss(); - } else { - AlertsCreator.createDrawOverlayGroupCallPermissionDialog(getContext()).show(); - } - }); - - titleTextView = new AudioPlayerAlert.ClippingTextViewSwitcher(context) { - @Override - protected TextView createTextView() { - TextView textView = new TextView(context); - textView.setTextColor(Theme.getColor(Theme.key_voipgroup_actionBarItems)); - textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 20); - textView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); - textView.setGravity(Gravity.LEFT | Gravity.TOP); - textView.setSingleLine(true); - textView.setEllipsize(TextUtils.TruncateAt.END); - textView.setOnClickListener(v -> { - if (call != null && call.recording) { - showRecordHint(textView); - } - }); - return textView; - } - }; - - actionBarBackground = new View(context) { - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), ActionBar.getCurrentActionBarHeight()); - } - }; - actionBarBackground.setAlpha(0.0f); - - containerView.addView(actionBarBackground, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 0)); - containerView.addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 23, 0, 48, 0)); - containerView.addView(actionBar, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 0)); - - menuItemsContainer = new LinearLayout(context); - menuItemsContainer.setOrientation(LinearLayout.HORIZONTAL); - menuItemsContainer.addView(pipItem, LayoutHelper.createLinear(48, 48)); - menuItemsContainer.addView(otherItem, LayoutHelper.createLinear(48, 48)); - menuItemsContainer.addView(accountSwitchImageView, LayoutHelper.createLinear(32, 32, Gravity.CENTER_VERTICAL, 2, 0, 12, 0)); - containerView.addView(menuItemsContainer, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 48, Gravity.TOP | Gravity.RIGHT)); - - actionBarShadow = new View(context); - actionBarShadow.setAlpha(0.0f); - actionBarShadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); - containerView.addView(actionBarShadow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 1)); - - for (int a = 0; a < 2; a++) { - undoView[a] = new UndoView(context); - undoView[a].setAdditionalTranslationY(AndroidUtilities.dp(10)); - if (Build.VERSION.SDK_INT >= 21) { - undoView[a].setTranslationZ(AndroidUtilities.dp(5)); - } - containerView.addView(undoView[a], LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); - } - - accountSelectCell = new AccountSelectCell(context, true); - accountSelectCell.setTag(R.id.width_tag, 240); - otherItem.addSubItem(user_item, accountSelectCell, LayoutHelper.WRAP_CONTENT, AndroidUtilities.dp(48)); - accountSelectCell.setBackground(Theme.createRadSelectorDrawable(Theme.getColor(Theme.key_voipgroup_listSelector), 6, 6)); - accountGap = otherItem.addGap(user_item_gap); - everyoneItem = otherItem.addSubItem(eveyone_can_speak_item, 0, LocaleController.getString("VoipGroupAllCanSpeak", R.string.VoipGroupAllCanSpeak), true); - everyoneItem.updateSelectorBackground(true, false); - adminItem = otherItem.addSubItem(admin_can_speak_item, 0, LocaleController.getString("VoipGroupOnlyAdminsCanSpeak", R.string.VoipGroupOnlyAdminsCanSpeak), true); - adminItem.updateSelectorBackground(false, true); - - everyoneItem.setCheckColor(Theme.getColor(Theme.key_voipgroup_checkMenu)); - everyoneItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); - adminItem.setCheckColor(Theme.getColor(Theme.key_voipgroup_checkMenu)); - adminItem.setColors(Theme.getColor(Theme.key_voipgroup_checkMenu), Theme.getColor(Theme.key_voipgroup_checkMenu)); - - editTitleItem = otherItem.addSubItem(edit_item, R.drawable.msg_edit, recordCallDrawable, LocaleController.getString("VoipGroupEditTitle", R.string.VoipGroupEditTitle), true, false); - permissionItem = otherItem.addSubItem(permission_item, R.drawable.msg_permissions, recordCallDrawable, LocaleController.getString("VoipGroupEditPermissions", R.string.VoipGroupEditPermissions), false, false); - inviteItem = otherItem.addSubItem(share_invite_link_item, R.drawable.msg_link, LocaleController.getString("VoipGroupShareInviteLink", R.string.VoipGroupShareInviteLink)); - recordCallDrawable = new RecordCallDrawable(); - recordItem = otherItem.addSubItem(start_record_item, 0, recordCallDrawable, LocaleController.getString("VoipGroupRecordCall", R.string.VoipGroupRecordCall), true, false); - recordCallDrawable.setParentView(recordItem.getImageView()); - leaveItem = otherItem.addSubItem(leave_item, R.drawable.msg_endcall, LocaleController.getString("VoipGroupEndChat", R.string.VoipGroupEndChat)); - otherItem.setPopupItemsSelectorColor(Theme.getColor(Theme.key_voipgroup_listSelector)); - otherItem.getPopupLayout().setFitItems(true); - - leaveItem.setColors(Theme.getColor(Theme.key_voipgroup_leaveCallMenu), Theme.getColor(Theme.key_voipgroup_leaveCallMenu)); - inviteItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); - editTitleItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); - permissionItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); - recordItem.setColors(Theme.getColor(Theme.key_voipgroup_actionBarItems), Theme.getColor(Theme.key_voipgroup_actionBarItems)); - - if (call != null) { - initCreatedGroupCall(); - } - - listAdapter.notifyDataSetChanged(); - oldCount = listAdapter.getItemCount(); - - updateItems(); - updateSpeakerPhoneIcon(false); - updateState(false, false); - updateScheduleUI(false); - setColorProgress(0.0f); - - leaveBackgroundPaint.setColor(Theme.getColor(Theme.key_voipgroup_leaveButton)); - - updateTitle(false); - actionBar.getTitleTextView().setOnClickListener(v -> { - if (call != null && call.recording) { - showRecordHint(actionBar.getTitleTextView()); - } - }); - - - avatarPagerIndicator = new AvatarPreviewPagerIndicator(context); - avatarsViewPager = new ProfileGalleryView(context, actionBar, listView, avatarPagerIndicator) { - @Override - public void invalidate() { - super.invalidate(); - containerView.invalidate(); - } - }; - avatarsViewPager.setInvalidateWithParent(true); - avatarPagerIndicator.setProfileGalleryView(avatarsViewPager); - avatarPreviewContainer = new FrameLayout(context) { - - RectF rectF = new RectF(); - Path path = new Path(); - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int size = Math.min(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec)); - super.onMeasure( MeasureSpec.makeMeasureSpec(size, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(size + getPaddingBottom(), MeasureSpec.EXACTLY)); - } - - @Override - protected void dispatchDraw(Canvas canvas) { - if (progressToAvatarPreview != 1) { - if (scrimView != null) { - canvas.save(); - float s = getMeasuredHeight() / (float) scrimView.getAvatarImageView().getMeasuredHeight(); - - - float fromRadius = scrimView.getAvatarImageView().getMeasuredHeight() / 2f * s; - int topRad = (int) ((fromRadius * (1f - progressToAvatarPreview) + (AndroidUtilities.dp(13) * progressToAvatarPreview))); - int bottomRad = (int) (fromRadius * (1f - progressToAvatarPreview)); - scrimView.getAvatarWavesDrawable().draw(canvas, scrimView.getAvatarImageView().getMeasuredHeight() / 2, scrimView.getAvatarImageView().getMeasuredHeight() / 2, this); - scrimView.getAvatarImageView().getImageReceiver().setImageCoords(0, 0, getMeasuredWidth(), getMeasuredHeight()); - scrimView.getAvatarImageView().setRoundRadius(topRad, topRad, bottomRad, bottomRad); - scrimView.getAvatarImageView().getImageReceiver().draw(canvas); - scrimView.getAvatarImageView().setRoundRadius(scrimView.getAvatarImageView().getMeasuredHeight() / 2); - canvas.restore(); - } - } - avatarsViewPager.setAlpha(progressToAvatarPreview); - - path.reset(); - rectF.set(0, 0, getMeasuredHeight(), getMeasuredWidth()); - path.addRoundRect(rectF, new float[]{AndroidUtilities.dp(13), AndroidUtilities.dp(13), AndroidUtilities.dp(13), AndroidUtilities.dp(13), 0, 0, 0, 0}, Path.Direction.CCW); - canvas.save(); - canvas.clipPath(path); - super.dispatchDraw(canvas); - canvas.restore(); - } - - @Override - public void invalidate() { - super.invalidate(); - containerView.invalidate(); - } - }; - avatarPreviewContainer.setVisibility(View.GONE); - - avatarsViewPager.setVisibility(View.VISIBLE); - avatarsViewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { - - @Override - public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { - - } - - @Override - public void onPageSelected(int position) { - int realPosition = avatarsViewPager.getRealPosition(position); - avatarPagerIndicator.saveCurrentPageProgress(); - avatarPagerIndicator.invalidate(); - } - - @Override - public void onPageScrollStateChanged(int state) { - - } - }); - blurredView = new View(context) { - @Override - public void setAlpha(float alpha) { - if (getAlpha() != alpha) { - super.setAlpha(alpha); - checkContentOverlayed(); - } - } - }; - containerView.addView(blurredView); - - avatarPreviewContainer.addView(avatarsViewPager, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - avatarPreviewContainer.addView(avatarPagerIndicator, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 0, 0, 0, 0)); - containerView.addView(avatarPreviewContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, 0, 14, 14, 14, 14)); - ViewGroup decorView; if (Build.VERSION.SDK_INT >= 21) { decorView = (ViewGroup) getWindow().getDecorView(); @@ -3433,17 +4958,242 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } }); avatarsViewPager.setPinchToZoomHelper(pinchToZoomHelper); + + cameraButton.setOnClickListener((View) -> { + if (Build.VERSION.SDK_INT >= 23 && parentActivity != null && parentActivity.checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { + parentActivity.requestPermissions(new String[]{Manifest.permission.CAMERA}, 104); + return; + } + if (VoIPService.getSharedInstance() == null) { + return; + } + if (VoIPService.getSharedInstance().getVideoState(false) != Instance.VIDEO_STATE_ACTIVE) { + undoView[0].hide(false, 1); + if (previewDialog == null) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().createCaptureDevice(false); + } + previewDialog = new VideoPreviewDialog(context, listView, fullscreenUsersListView) { + @Override + public void onDismiss(boolean apply) { + boolean showMicIcon = previewDialog.micEnabled; + previewDialog = null; + VoIPService service = VoIPService.getSharedInstance(); + if (apply) { + if (service != null) { + service.setupCaptureDevice(false, showMicIcon); + } + updateState(true, false); + call.sortParticipants(); + applyCallParticipantUpdates(); + buttonsContainer.requestLayout(); + } else { + if (service != null) { + VoIPService.getSharedInstance().setVideoState(false, Instance.VIDEO_STATE_INACTIVE); + } + } + } + }; + containerView.addView(previewDialog); + if (!VoIPService.getSharedInstance().isFrontFaceCamera()) { + VoIPService.getSharedInstance().switchCamera(); + } + } + } else { + VoIPService.getSharedInstance().setVideoState(false, Instance.VIDEO_STATE_INACTIVE); + updateState(true, false); + updateSpeakerPhoneIcon(false); + call.sortParticipants(); + applyCallParticipantUpdates(); + buttonsContainer.requestLayout(); + } + }); + updateScheduleUI(false); + updateItems(); + updateSpeakerPhoneIcon(false); + updateState(false, false); + setColorProgress(0.0f); + updateSubtitle(); + } + + public void fullscreenFor(ChatObject.VideoParticipant videoParticipant) { + VoIPService voIPService = VoIPService.getSharedInstance(); + if (voIPService == null || renderersContainer.isAnimating()) { + return; + } + if (isTabletMode) { + if (requestFullscreenListener != null) { + listView.getViewTreeObserver().removeOnPreDrawListener(requestFullscreenListener); + requestFullscreenListener = null; + } + ArrayList activeSinks = new ArrayList<>(); + if (videoParticipant == null) { + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + final GroupCallMiniTextureView miniTextureView = attachedRenderersTmp.get(i); + if (miniTextureView.primaryView != null) { + miniTextureView.primaryView.setRenderer(null); + if (miniTextureView.secondaryView != null) { + miniTextureView.secondaryView.setRenderer(null); + } + if (miniTextureView.tabletGridView != null) { + miniTextureView.tabletGridView.setRenderer(null); + } + activeSinks.add(miniTextureView.participant); + miniTextureView.forceDetach(false); + miniTextureView.animate().alpha(0f).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (miniTextureView.getParent() != null) { + containerView.removeView(miniTextureView); + } + } + }); + } + } + listViewVideoVisibility = false; + tabletGridAdapter.setVisibility(tabletVideoGridView, true, true); + } else { + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + final GroupCallMiniTextureView miniTextureView = attachedRenderersTmp.get(i); + if (miniTextureView.tabletGridView != null && (miniTextureView.participant == null || !miniTextureView.participant.equals(videoParticipant))) { + activeSinks.add(miniTextureView.participant); + miniTextureView.forceDetach(false); + if (miniTextureView.secondaryView != null) { + miniTextureView.secondaryView.setRenderer(null); + } + if (miniTextureView.primaryView != null) { + miniTextureView.primaryView.setRenderer(null); + } + miniTextureView.animate().alpha(0f).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (miniTextureView.getParent() != null) { + containerView.removeView(miniTextureView); + } + } + }); + } + } + listViewVideoVisibility = true; + tabletGridAdapter.setVisibility(tabletVideoGridView, false, false); + + if (!activeSinks.isEmpty()) { + AndroidUtilities.runOnUIThread(() -> { + for (int i = 0; i < attachedRenderers.size(); i++) { + if (attachedRenderers.get(i).participant != null) { + activeSinks.remove(attachedRenderers.get(i).participant); + } + } + for (int i = 0; i < activeSinks.size(); i++) { + ChatObject.VideoParticipant participant = activeSinks.get(i); + if (participant.participant.self) { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().setLocalSink(null, participant.presentation); + } + } else { + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().removeRemoteSink(participant.participant, participant.presentation); + } + } + } + }); + } + } + boolean updateScroll = !renderersContainer.inFullscreenMode; + listView.getViewTreeObserver().addOnPreDrawListener(requestFullscreenListener = new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + requestFullscreenListener = null; + renderersContainer.requestFullscreen(videoParticipant); + if (delayedGroupCallUpdated) { + delayedGroupCallUpdated = false; + applyCallParticipantUpdates(); + if (updateScroll && videoParticipant != null) { + listView.scrollToPosition(0); + } + delayedGroupCallUpdated = true; + } else { + applyCallParticipantUpdates(); + } + return false; + } + }); + } else { + if (requestFullscreenListener != null) { + listView.getViewTreeObserver().removeOnPreDrawListener(requestFullscreenListener); + requestFullscreenListener = null; + } + + if (videoParticipant != null) { + if (fullscreenUsersListView.getVisibility() != View.VISIBLE) { + fullscreenUsersListView.setVisibility(View.VISIBLE); + fullscreenAdapter.update(false, fullscreenUsersListView); + delayedGroupCallUpdated = true; + if (!renderersContainer.inFullscreenMode) { + fullscreenAdapter.scrollTo(videoParticipant, fullscreenUsersListView); + } + listView.getViewTreeObserver().addOnPreDrawListener(requestFullscreenListener = new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + requestFullscreenListener = null; + renderersContainer.requestFullscreen(videoParticipant); + AndroidUtilities.updateVisibleRows(fullscreenUsersListView); + return false; + } + }); + } else { + renderersContainer.requestFullscreen(videoParticipant); + AndroidUtilities.updateVisibleRows(fullscreenUsersListView); + } + } else { + if (listView.getVisibility() != View.VISIBLE) { + listView.setVisibility(View.VISIBLE); + applyCallParticipantUpdates(); + delayedGroupCallUpdated = true; + listView.getViewTreeObserver().addOnPreDrawListener(requestFullscreenListener = new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + renderersContainer.requestFullscreen(null); + AndroidUtilities.updateVisibleRows(fullscreenUsersListView); + return false; + } + }); + } else { + listView.getViewTreeObserver().addOnPreDrawListener(requestFullscreenListener = new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + listView.getViewTreeObserver().removeOnPreDrawListener(this); + renderersContainer.requestFullscreen(null); + AndroidUtilities.updateVisibleRows(fullscreenUsersListView); + return false; + } + }); + } + } + } + } + + public void enableCamera() { + cameraButton.callOnClick(); } private void checkContentOverlayed() { - boolean overlayed = !avatarPriviewTransitionInProgress && blurredView.getVisibility() == View.VISIBLE && blurredView.getAlpha() == 1f;; + boolean overlayed = !avatarPriviewTransitionInProgress && blurredView.getVisibility() == View.VISIBLE && blurredView.getAlpha() == 1f; + if (contentFullyOverlayed != overlayed) { contentFullyOverlayed = overlayed; buttonsContainer.invalidate(); containerView.invalidate(); listView.invalidate(); } - }; + } private void updateScheduleUI(boolean animation) { if ((scheduleTimerContainer == null || call != null) && scheduleAnimator == null) { @@ -3477,6 +5227,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter alpha = switchToButtonProgress / 0.6f; } + float muteButtonScale = isLandscapeMode ? scheduleButtonsScale2 * AndroidUtilities.dp(52) / (float) (muteButton.getMeasuredWidth() - AndroidUtilities.dp(8)) : scheduleButtonsScale2; + float reversedAlpha = 1.0f - alpha; leaveButton.setAlpha(alpha); soundButton.setAlpha(alpha * (soundButton.isEnabled() ? 1.0f : 0.5f)); @@ -3486,19 +5238,24 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter scheduleStartAtTextView.setAlpha(alpha); scheduleTimeTextView.setAlpha(alpha); muteLabel[0].setAlpha(alpha); - muteSubLabel[0].setAlpha(alpha); scheduleTimeTextView.setScaleX(scheduleButtonsScale2); scheduleTimeTextView.setScaleY(scheduleButtonsScale2); leaveButton.setScaleX(scheduleButtonsScale2); leaveButton.setScaleY(scheduleButtonsScale2); soundButton.setScaleX(scheduleButtonsScale2); soundButton.setScaleY(scheduleButtonsScale2); - muteButton.setScaleX(scheduleButtonsScale2); - muteButton.setScaleY(scheduleButtonsScale2); + muteButton.setScaleX(muteButtonScale); + muteButton.setScaleY(muteButtonScale); scheduleButtonTextView.setScaleX(reversedAlpha); scheduleButtonTextView.setScaleY(reversedAlpha); scheduleButtonTextView.setAlpha(reversedAlpha); scheduleInfoTextView.setAlpha(reversedAlpha); + cameraButton.setAlpha(alpha); + cameraButton.setScaleY(scheduleButtonsScale2); + cameraButton.setScaleX(scheduleButtonsScale2); + flipButton.setAlpha(alpha); + flipButton.setScaleY(scheduleButtonsScale2); + flipButton.setScaleX(scheduleButtonsScale2); otherItem.setAlpha(alpha); int newVisibility = reversedAlpha == 0.0f ? View.INVISIBLE : View.VISIBLE; if (newVisibility != scheduleTimerContainer.getVisibility()) { @@ -3516,13 +5273,16 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return; } callInitied = true; - oldParticipants.addAll(call.sortedParticipants); + oldParticipants.addAll(call.visibleParticipants); + oldVideoParticipants.addAll(visibleVideoParticipants); oldInvited.addAll(call.invitedUsers); currentCallState = service.getCallState(); if (call == null) { call = service.groupCall; + fullscreenAdapter.setGroupCall(call); + renderersContainer.setGroupCall(call); + tabletGridAdapter.setGroupCall(call); } - actionBar.setSubtitle(LocaleController.formatPluralString("Participants", call.call.participants_count + (listAdapter.addSelfToCounter() ? 1 : 0))); actionBar.setTitleRightMargin(AndroidUtilities.dp(48) * 2); call.saveActiveDates(); VoIPService.getSharedInstance().registerStateListener(this); @@ -3562,19 +5322,123 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } + private void updateSubtitle() { + if (actionBar == null || call == null) { + return; + } + SpannableStringBuilder spannableStringBuilder = null; + int speakingIndex = 0; + for (int i = 0; i < call.currentSpeakingPeers.size(); i++) { + int key = call.currentSpeakingPeers.keyAt(i); + TLRPC.TL_groupCallParticipant participant = call.currentSpeakingPeers.get(key); + if (participant.self || renderersContainer.isVisible(participant)) { + continue; + } + int peerId = MessageObject.getPeerId(participant.peer); + long diff = SystemClock.uptimeMillis() - participant.lastSpeakTime; + boolean newSpeaking = diff < 500; + if (newSpeaking) { + if (spannableStringBuilder == null) { + spannableStringBuilder = new SpannableStringBuilder(); + } + if (speakingIndex < 2) { + TLRPC.User user = peerId > 0 ? MessagesController.getInstance(currentAccount).getUser(peerId) : null; + TLRPC.Chat chat = peerId <= 0 ? MessagesController.getInstance(currentAccount).getChat(peerId) : null; + if (user == null && chat == null) { + continue; + } + if (speakingIndex != 0) { + spannableStringBuilder.append(", "); + } + if (user != null) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + spannableStringBuilder.append(UserObject.getFirstName(user), new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0); + } else { + spannableStringBuilder.append(UserObject.getFirstName(user)); + } + } else { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + spannableStringBuilder.append(chat.title, new TypefaceSpan(AndroidUtilities.getTypeface("fonts/rmedium.ttf")), 0); + } else { + spannableStringBuilder.append(chat.title); + } + } + } + speakingIndex++; + if (speakingIndex == 2) { + break; + } + } + } + boolean drawStatus; + if (speakingIndex > 0) { + String s = LocaleController.getPluralString("MembersAreSpeakingToast", speakingIndex); + int replaceIndex = s.indexOf("un1"); + SpannableStringBuilder spannableStringBuilder1 = new SpannableStringBuilder(s); + spannableStringBuilder1.replace(replaceIndex, replaceIndex + 3, spannableStringBuilder); + actionBar.getAdditionalSubtitleTextView().setText(spannableStringBuilder1); + drawStatus = true; + } else { + drawStatus = false; + } + actionBar.getSubtitleTextView().setText(LocaleController.formatPluralString("Participants", call.call.participants_count + (listAdapter.addSelfToCounter() ? 1 : 0))); + + if (drawStatus != drawSpeakingSubtitle) { + drawSpeakingSubtitle = drawStatus; + actionBar.invalidate(); + actionBar.getSubtitleTextView().setPivotX(0); + actionBar.getSubtitleTextView().setPivotY(actionBar.getMeasuredHeight() >> 1); + actionBar.getSubtitleTextView().animate().scaleX(drawSpeakingSubtitle ? 0.98f : 1f).scaleY(drawSpeakingSubtitle ? 0.9f : 1f).alpha(drawSpeakingSubtitle ? 0f : 1f).setDuration(150); + AndroidUtilities.updateViewVisibilityAnimated(actionBar.getAdditionalSubtitleTextView(), drawSpeakingSubtitle); + } + } + + @Override + public void show() { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 2048); + super.show(); + } + @Override public void dismissInternal() { + if (renderersContainer != null) { + if (requestFullscreenListener != null) { + listView.getViewTreeObserver().removeOnPreDrawListener(requestFullscreenListener); + requestFullscreenListener = null; + } + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).saveThumb(); + renderersContainer.removeView(attachedRenderersTmp.get(i)); + attachedRenderersTmp.get(i).release(); + attachedRenderersTmp.get(i).forceDetach(true); + } + attachedRenderers.clear(); + + if (renderersContainer.getParent() != null) { + attachedRenderers.clear(); + containerView.removeView(renderersContainer); + } + } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.startAllHeavyOperations, 2048); super.dismissInternal(); if (VoIPService.getSharedInstance() != null) { VoIPService.getSharedInstance().unregisterStateListener(this); + VoIPService.getSharedInstance().setSinks(null, null); } if (groupCallInstance == this) { groupCallInstance = null; } + groupCallUiVisible = false; VoIPService.audioLevelsCallback = null; GroupCallPip.updateVisibility(getContext()); + call.clearVideFramesInfo(); + if (VoIPService.getSharedInstance() != null) { + VoIPService.getSharedInstance().clearRemoteSinks(); + } } public final static float MAX_AMPLITUDE = 8_500f; @@ -3590,7 +5454,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter updateState(isShowing(), false); } - private UndoView getUndoView() { + public UndoView getUndoView() { + if (!isTabletMode && renderersContainer.inFullscreenMode) { + return renderersContainer.getUndoView(); + } if (undoView[0].getVisibility() == View.VISIBLE) { UndoView old = undoView[0]; undoView[0] = undoView[1]; @@ -3660,13 +5527,16 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private void setColorProgress(float progress) { colorProgress = progress; - backgroundColor = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled), Theme.getColor(Theme.key_voipgroup_actionBar), progress, 1.0f); + float finalColorProgress = colorProgress; + float finalColorProgress2 = Math.max(colorProgress, renderersContainer == null ? 0 : renderersContainer.progressToFullscreenMode); + backgroundColor = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled), Theme.getColor(Theme.key_voipgroup_actionBar), finalColorProgress, 1.0f); actionBarBackground.setBackgroundColor(backgroundColor); otherItem.redrawPopup(0xff232A33); shadowDrawable.setColorFilter(new PorterDuffColorFilter(backgroundColor, PorterDuff.Mode.MULTIPLY)); - navBarColor = backgroundColor; - int color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_listViewBackground), progress, 1.0f); + navBarColor = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_actionBarUnscrolled), Theme.getColor(Theme.key_voipgroup_actionBar), finalColorProgress2, 1.0f); + + int color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_listViewBackgroundUnscrolled), Theme.getColor(Theme.key_voipgroup_listViewBackground), finalColorProgress, 1.0f); listViewBackgroundPaint.setColor(color); listView.setGlowColor(color); @@ -3674,11 +5544,22 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter muteButton.invalidate(); } - color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_leaveButton), Theme.getColor(Theme.key_voipgroup_leaveButtonScrolled), progress, 1.0f); + if (buttonsBackgroundGradientView != null) { + gradientColors[0] = backgroundColor; + gradientColors[1] = Color.TRANSPARENT; + if (Build.VERSION.SDK_INT > Build.VERSION_CODES.Q) { + buttonsBackgroundGradient.setColors(gradientColors); + } else { + buttonsBackgroundGradientView.setBackground(buttonsBackgroundGradient = new GradientDrawable(GradientDrawable.Orientation.BOTTOM_TOP, gradientColors)); + } + buttonsBackgroundGradientView2.setBackgroundColor(gradientColors[0]); + } + + color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_leaveButton), Theme.getColor(Theme.key_voipgroup_leaveButtonScrolled), finalColorProgress, 1.0f); leaveButton.setBackgroundColor(color, color); - color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_lastSeenText), progress, 1.0f); - int color2 = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_mutedIconUnscrolled), Theme.getColor(Theme.key_voipgroup_mutedIcon), progress, 1.0f); + color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_lastSeenText), finalColorProgress, 1.0f); + int color2 = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_mutedIconUnscrolled), Theme.getColor(Theme.key_voipgroup_mutedIcon), finalColorProgress, 1.0f); for (int a = 0, N = listView.getChildCount(); a < N; a++) { View child = listView.getChildAt(a); if (child instanceof GroupCallTextCell) { @@ -3879,13 +5760,15 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter int N = listView.getChildCount(); for (int a = 0; a < N; a++) { View child = listView.getChildAt(a); - minY = Math.min(minY, itemAnimator.getTargetY(child)); + if (listView.getChildAdapterPosition(child) >= 0) { + minY = Math.min(minY, child.getTop()); + } } if (minY < 0 || minY == Integer.MAX_VALUE) { minY = N != 0 ? 0 : listView.getPaddingTop(); } boolean show = minY <= ActionBar.getCurrentActionBarHeight() - AndroidUtilities.dp(14); - + minY += ActionBar.getCurrentActionBarHeight() + AndroidUtilities.dp(14); if (show && actionBar.getTag() == null || !show && actionBar.getTag() != null) { actionBar.setTag(show ? 1 : null); if (actionBarAnimation != null) { @@ -3914,6 +5797,12 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter .setInterpolator(CubicBezierInterpolator.DEFAULT) .start(); + actionBar.getAdditionalSubtitleTextView().animate() + .translationY(show ? 0.0f : AndroidUtilities.dp(20)) + .setDuration(300) + .setInterpolator(CubicBezierInterpolator.DEFAULT) + .start(); + actionBarAnimation = new AnimatorSet(); actionBarAnimation.setDuration(140); actionBarAnimation.playTogether( @@ -3928,45 +5817,48 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter }); actionBarAnimation.start(); } - - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) listView.getLayoutParams(); - minY += layoutParams.topMargin; if (scrollOffsetY != minY) { - listView.setTopGlowOffset((int) ((scrollOffsetY = minY) - layoutParams.topMargin)); - - int offset = AndroidUtilities.dp(74); - float t = scrollOffsetY - offset; - int diff; - if (t + backgroundPaddingTop < ActionBar.getCurrentActionBarHeight() * 2) { - int willMoveUpTo = offset - backgroundPaddingTop - AndroidUtilities.dp(14) + ActionBar.getCurrentActionBarHeight(); - float moveProgress = Math.min(1.0f, (ActionBar.getCurrentActionBarHeight() * 2 - t - backgroundPaddingTop) / willMoveUpTo); - diff = (int) (AndroidUtilities.dp(AndroidUtilities.isTablet() ? 17 : 13) * moveProgress); - float newProgress = Math.min(1.0f, moveProgress); - if (Math.abs(newProgress - colorProgress) > 0.0001f) { - setColorProgress(Math.min(1.0f, moveProgress)); - } - titleTextView.setScaleX(Math.max(0.9f, 1.0f - 0.1f * moveProgress * 1.2f)); - titleTextView.setScaleY(Math.max(0.9f, 1.0f - 0.1f * moveProgress * 1.2f)); - titleTextView.setAlpha(Math.max(0.0f, 1.0f - moveProgress * 1.2f)); - } else { - diff = 0; - titleTextView.setScaleX(1.0f); - titleTextView.setScaleY(1.0f); - titleTextView.setAlpha(1.0f); - if (colorProgress > 0.0001f) { - setColorProgress(0.0f); - } - } - - menuItemsContainer.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(53) - diff)); - titleTextView.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(44) - diff)); - if (scheduleTimerContainer != null) { - scheduleTimerContainer.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(44) - diff)); - } - containerView.invalidate(); + setScrollOffsetY(minY); } } + private void setScrollOffsetY(float scrollOffsetY) { + this.scrollOffsetY = scrollOffsetY; + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) listView.getLayoutParams(); + listView.setTopGlowOffset((int) ((scrollOffsetY) - layoutParams.topMargin)); + + int offset = AndroidUtilities.dp(74); + float t = scrollOffsetY - offset; + int diff; + if (t + backgroundPaddingTop < ActionBar.getCurrentActionBarHeight() * 2) { + int willMoveUpTo = offset - backgroundPaddingTop - AndroidUtilities.dp(14) + ActionBar.getCurrentActionBarHeight(); + float moveProgress = Math.min(1.0f, (ActionBar.getCurrentActionBarHeight() * 2 - t - backgroundPaddingTop) / willMoveUpTo); + diff = (int) (AndroidUtilities.dp(AndroidUtilities.isTablet() ? 17 : 13) * moveProgress); + float newProgress = Math.min(1.0f, moveProgress); + if (Math.abs(newProgress - colorProgress) > 0.0001f) { + setColorProgress(Math.min(1.0f, moveProgress)); + } + titleTextView.setScaleX(Math.max(0.9f, 1.0f - 0.1f * moveProgress * 1.2f)); + titleTextView.setScaleY(Math.max(0.9f, 1.0f - 0.1f * moveProgress * 1.2f)); + titleTextView.setAlpha(Math.max(0.0f, 1.0f - moveProgress * 1.2f)); + } else { + diff = 0; + titleTextView.setScaleX(1.0f); + titleTextView.setScaleY(1.0f); + titleTextView.setAlpha(1.0f); + if (colorProgress > 0.0001f) { + setColorProgress(0.0f); + } + } + + menuItemsContainer.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(53) - diff)); + titleTextView.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(44) - diff)); + if (scheduleTimerContainer != null) { + scheduleTimerContainer.setTranslationY(Math.max(AndroidUtilities.dp(4), scrollOffsetY - AndroidUtilities.dp(44) - diff)); + } + containerView.invalidate(); + } + private void cancelMutePress() { if (scheduled) { scheduled = false; @@ -4006,7 +5898,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter userSwitchObject = null; } TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); - if (participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(currentChat)) { + if (!voIPService.micSwitching && participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(currentChat)) { cancelMutePress(); if (participant.raise_hand_rating != 0) { updateMuteButton(MUTE_BUTTON_STATE_RAISED_HAND, animated); @@ -4016,7 +5908,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter voIPService.setMicMute(true, false, false); } else { boolean micMuted = voIPService.isMicMute(); - if (selfUpdated && participant != null && participant.muted && !micMuted) { + if (!voIPService.micSwitching && selfUpdated && participant != null && participant.muted && !micMuted) { cancelMutePress(); voIPService.setMicMute(true, false, false); micMuted = true; @@ -4028,21 +5920,156 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } } + + boolean outgoingVideoIsActive = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().getVideoState(false) == Instance.VIDEO_STATE_ACTIVE; + + + TLRPC.TL_groupCallParticipant participant = call.participants.get(MessageObject.getPeerId(selfPeer)); + boolean mutedByAdmin = participant != null && !participant.can_self_unmute && participant.muted && !ChatObject.canManageCalls(currentChat); + boolean cameraButtonVisible; + boolean flipButtonVisible; + boolean soundButtonVisible; + if ((!mutedByAdmin && call.canStreamVideo) || outgoingVideoIsActive) { + cameraButtonVisible = true; + soundButtonVisible = false; + } else { + cameraButtonVisible = false; + soundButtonVisible = true; + } + + if (outgoingVideoIsActive) { + if (animated && flipButton.getVisibility() != View.VISIBLE) { + flipButton.setScaleX(0.3f); + flipButton.setScaleY(0.3f); + } + flipButtonVisible = true; + } else { + flipButtonVisible = false; + } + + int newButtonsVisibility = (flipButtonVisible ? 1 : 0) + (soundButtonVisible ? 2 : 0) + (cameraButtonVisible ? 4 : 0) + (renderersContainer != null && renderersContainer.inFullscreenMode ? 8 : 0); + + if (buttonsVisibility != 0 && buttonsVisibility != newButtonsVisibility && animated) { + for (int i = 0; i < buttonsContainer.getChildCount(); i++) { + View child = buttonsContainer.getChildAt(i); + if (child.getVisibility() == View.VISIBLE) { + buttonsAnimationParamsX.put(child, child.getX()); + buttonsAnimationParamsY.put(child, child.getY()); + } + } + animateButtonsOnNextLayout = true; + } + + boolean soundButtonChanged = (buttonsVisibility | 2) != (newButtonsVisibility | 2); + buttonsVisibility = newButtonsVisibility; + + if (cameraButtonVisible) { + cameraButton.setData(R.drawable.calls_video, Color.WHITE, 0, 1f, true, LocaleController.getString("VoipCamera", R.string.VoipCamera), !outgoingVideoIsActive, animated); + cameraButton.setChecked(true, false); + } else { + cameraButton.setVisibility(View.GONE); + } + + if (flipButtonVisible) { + flipButton.setData(0, Color.WHITE, 0, 1f, true, LocaleController.getString("VoipFlip", R.string.VoipFlip), false, false); + flipButton.setChecked(true, false); + } else { + flipButton.setVisibility(View.GONE); + } + + soundButton.setVisibility(soundButtonVisible ? View.VISIBLE : View.GONE); + if (soundButtonChanged && soundButtonVisible) { + updateSpeakerPhoneIcon(false); + } + + if (soundButtonChanged) { + float s = soundButtonVisible ? 1f : 0.3f; + if (!animated) { + soundButton.animate().cancel(); + soundButton.setScaleX(s); + soundButton.setScaleY(s); + } else { + if (soundButtonVisible) { + soundButton.setScaleX(0.3f); + soundButton.setScaleY(0.3f); + } + soundButton.animate().scaleX(s).scaleY(s).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + } + + float cameraScale; + if (cameraButton.getVisibility() == View.VISIBLE) { + cameraScale = 1f; + cameraButton.showText(cameraScale == 1f, animated); + } else { + cameraScale = 0.3f; + } + + if (this.cameraButtonScale != cameraScale) { + cameraButtonScale = cameraScale; + if (!animated) { + cameraButton.animate().cancel(); + cameraButton.setScaleX(cameraScale); + cameraButton.setScaleY(cameraScale); + } else { + cameraButton.animate().scaleX(cameraScale).scaleY(cameraScale).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + } + + float flipButtonScale; + if (isTabletMode) { + flipButtonScale = 0.8f; + } else { + flipButtonScale = isLandscapeMode || (renderersContainer != null && renderersContainer.inFullscreenMode) ? 1f : 0.8f; + } + + if (!outgoingVideoIsActive) { + flipButtonScale = 0.3f; + } + if (!animated) { + flipButton.animate().cancel(); + flipButton.setScaleX(flipButtonScale); + flipButton.setScaleY(flipButtonScale); + } else { + flipButton.animate().scaleX(flipButtonScale).scaleY(flipButtonScale).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + flipButton.showText(flipButtonScale == 1f, animated); + + + float soundButtonScale = outgoingVideoIsActive ? 0.3f : 1f; + if (this.soundButtonScale != soundButtonScale) { + this.soundButtonScale = soundButtonScale; + if (!animated) { + soundButton.animate().cancel(); + soundButton.setScaleX(soundButtonScale); + soundButton.setScaleY(soundButtonScale); + } else { + soundButton.animate().scaleX(soundButtonScale).scaleY(soundButtonScale).setDuration(TRANSITION_DURATION).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + } + } } @Override public void onAudioSettingsChanged() { updateSpeakerPhoneIcon(true); - for (int a = 0, N = listView.getChildCount(); a < N; a++) { - View child = listView.getChildAt(a); - if (child instanceof GroupCallUserCell) { - ((GroupCallUserCell) child).applyParticipantChanges(true); - } + if (VoIPService.getSharedInstance() == null || VoIPService.getSharedInstance().isMicMute()) { + setMicAmplitude(0f); + } + if (listView.getVisibility() == View.VISIBLE) { + AndroidUtilities.updateVisibleRows(listView); + } + if (fullscreenUsersListView.getVisibility() == View.VISIBLE) { + AndroidUtilities.updateVisibleRows(fullscreenUsersListView); + } + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(true); } } private void updateSpeakerPhoneIcon(boolean animated) { - if (soundButton == null) { + if (soundButton == null || soundButton.getVisibility() != View.VISIBLE) { return; } VoIPService service = VoIPService.getSharedInstance(); @@ -4224,38 +6251,27 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter muteLabel[1].setAlpha(0.0f); muteLabel[1].setTranslationY(-AndroidUtilities.dp(5)); muteLabel[1].setText(newText); - muteSubLabel[1].setVisibility(View.VISIBLE); - muteSubLabel[1].setAlpha(0.0f); - muteSubLabel[1].setTranslationY(-AndroidUtilities.dp(5)); - muteSubLabel[1].setText(newSubtext); muteButtonAnimator = ValueAnimator.ofFloat(0.0f, 1.0f); muteButtonAnimator.addUpdateListener(animation -> { float v = (float) animation.getAnimatedValue(); muteLabel[0].setAlpha(1.0f - v); muteLabel[0].setTranslationY(AndroidUtilities.dp(5) * v); - muteSubLabel[0].setAlpha(1.0f - v); - muteSubLabel[0].setTranslationY(AndroidUtilities.dp(5) * v); muteLabel[1].setAlpha(v); muteLabel[1].setTranslationY(AndroidUtilities.dp(-5 + 5 * v)); - muteSubLabel[1].setAlpha(v); - muteSubLabel[1].setTranslationY(AndroidUtilities.dp(-5 + 5 * v)); }); muteButtonAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - muteButtonAnimator = null; - TextView temp = muteLabel[0]; - muteLabel[0] = muteLabel[1]; - muteLabel[1] = temp; - temp.setVisibility(View.INVISIBLE); - temp = muteSubLabel[0]; - muteSubLabel[0] = muteSubLabel[1]; - muteSubLabel[1] = temp; - temp.setVisibility(View.INVISIBLE); - for (int a = 0; a < 2; a++) { - muteLabel[a].setTranslationY(0); - muteSubLabel[a].setTranslationY(0); + if (muteButtonAnimator != null) { + muteButtonAnimator = null; + TextView temp = muteLabel[0]; + muteLabel[0] = muteLabel[1]; + muteLabel[1] = temp; + temp.setVisibility(View.INVISIBLE); + for (int a = 0; a < 2; a++) { + muteLabel[a].setTranslationY(0); + } } } }); @@ -4266,7 +6282,6 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter muteButtonState = state; bigMicDrawable.setCurrentFrame(bigMicDrawable.getCustomEndFrame() - 1, false, true); muteLabel[0].setText(newText); - muteSubLabel[0].setText(newSubtext); } updateMuteButtonState(animated); } @@ -4375,8 +6390,21 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (participant != null) { call.participants.delete(selfId); call.sortedParticipants.remove(participant); + call.visibleParticipants.remove(participant); + for (int i = 0; i < call.visibleVideoParticipants.size(); i++) { + ChatObject.VideoParticipant videoParticipant = call.visibleVideoParticipants.get(i); + if (MessageObject.getPeerId(videoParticipant.participant.peer) == MessageObject.getPeerId(participant.peer)) { + call.visibleVideoParticipants.remove(i); + i--; + } + } call.call.participants_count--; } + + for (int i = 0; i < call.sortedParticipants.size(); i++) { + TLRPC.TL_groupCallParticipant participant1 = call.sortedParticipants.get(i); + participant1.lastActiveDate = participant1.lastSpeakTime; + } } if (onLeave != null) { onLeave.run(); @@ -4460,7 +6488,12 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private Paint scrimPaint; private GroupCallUserCell scrimView; + private GroupCallGridCell scrimGridView; + private GroupCallMiniTextureView scrimRenderer; + private GroupCallFullscreenAdapter.GroupCallUserCell scrimFullscreenView; + private boolean hasScrimAnchorView; private boolean avatarPriviewTransitionInProgress; + private boolean scrimViewAttached; private int popupAnimationIndex = -1; private AnimatorSet scrimAnimatorSet; private ActionBarPopupWindow scrimPopupWindow; @@ -4481,7 +6514,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (VoIPService.getSharedInstance() == null) { return; } - VoIPService.getSharedInstance().editCallMember(object, true, -1, null); + VoIPService.getSharedInstance().editCallMember(object, true, null, null, null, null); getUndoView().showWithAction(0, UndoView.ACTION_VOIP_MUTED, object, null, null, null); return; } @@ -4590,7 +6623,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter parentActivity.presentFragment(new ChatActivity(args)); dismiss(); } else if (option == 7) { - voIPService.editCallMember(object, true, -1, false); + voIPService.editCallMember(object, true, null, null, false, null); updateMuteButton(MUTE_BUTTON_STATE_MUTED_BY_ADMIN, true); } else if (option == 9) { if (currentAvatarUpdater != null && currentAvatarUpdater.isUploadingImage()) { @@ -4599,7 +6632,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter currentAvatarUpdater = new ImageUpdater(true); currentAvatarUpdater.setOpenWithFrontfaceCamera(true); currentAvatarUpdater.setForceDarkTheme(true); - currentAvatarUpdater.setSearchAvailable(false, true); + currentAvatarUpdater.setSearchAvailable(true, true); + currentAvatarUpdater.setShowingFromDialog(true); currentAvatarUpdater.parentFragment = parentActivity.getActionBarLayout().getLastFragment(); currentAvatarUpdater.setDelegate(avatarUpdaterDelegate = new AvatarUpdaterDelegate(peerId)); @@ -4613,16 +6647,16 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter AlertsCreator.createChangeNameAlert(peerId, getContext(), currentAccount); } else { if (option == 5) { - voIPService.editCallMember(object, true, -1, null); + voIPService.editCallMember(object, true, null, null, null, null); getUndoView().showWithAction(0, UndoView.ACTION_VOIP_MUTED_FOR_YOU, object); voIPService.setParticipantVolume(participant.source, 0); } else { if ((participant.flags & 128) != 0 && participant.volume == 0) { participant.volume = 10000; participant.volume_by_admin = false; - voIPService.editCallMember(object, false, participant.volume, null); + voIPService.editCallMember(object, false, null, participant.volume, null, null); } else { - voIPService.editCallMember(object, false, -1, null); + voIPService.editCallMember(object, false, null, null, null, null); } voIPService.setParticipantVolume(participant.source, ChatObject.getParticipantVolume(participant)); getUndoView().showWithAction(0, option == 1 ? UndoView.ACTION_VOIP_UNMUTED : UndoView.ACTION_VOIP_UNMUTED_FOR_YOU, object, null, null, null); @@ -4630,7 +6664,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } - private boolean showMenuForCell(GroupCallUserCell view) { + private boolean showMenuForCell(View rendererCell) { if (itemAnimator.isRunning()) { return false; } @@ -4644,7 +6678,57 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return false; } - boolean showWithAvatarPreview = containerView.getMeasuredHeight() > containerView.getMeasuredWidth() && !AndroidUtilities.isTablet() && !AndroidUtilities.isInMultiwindow; + clearScrimView(); + + GroupCallUserCell view; + if (rendererCell instanceof GroupCallGridCell) { + GroupCallGridCell groupCallGridCell = ((GroupCallGridCell) rendererCell); + if (groupCallGridCell.getParticipant() == call.videoNotAvailableParticipant) { + return false; + } + view = new GroupCallUserCell(groupCallGridCell.getContext()); + int selfPeerId = MessageObject.getPeerId(selfPeer); + view.setData(accountInstance, groupCallGridCell.getParticipant().participant, call, selfPeerId, null, false); + hasScrimAnchorView = false; + scrimGridView = groupCallGridCell; + scrimRenderer = groupCallGridCell.getRenderer(); + if (!isTabletMode && !isLandscapeMode) { + scrimViewAttached = true; + containerView.addView(view, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 14, 0, 14, 0)); + } else { + scrimViewAttached = false; + } + } else if (rendererCell instanceof GroupCallFullscreenAdapter.GroupCallUserCell) { + GroupCallFullscreenAdapter.GroupCallUserCell groupCallFullscreenCell = ((GroupCallFullscreenAdapter.GroupCallUserCell) rendererCell); + if (groupCallFullscreenCell.getParticipant() == call.videoNotAvailableParticipant.participant) { + return false; + } + view = new GroupCallUserCell(groupCallFullscreenCell.getContext()); + int selfPeerId = MessageObject.getPeerId(selfPeer); + view.setData(accountInstance, groupCallFullscreenCell.getParticipant(), call, selfPeerId, null, false); + hasScrimAnchorView = false; + scrimFullscreenView = groupCallFullscreenCell; + scrimRenderer = groupCallFullscreenCell.getRenderer(); + if (scrimRenderer != null && scrimRenderer.showingInFullscreen) { + scrimRenderer = null; + } + containerView.addView(view, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 14, 0, 14, 0)); + scrimViewAttached = true; + } else { + view = (GroupCallUserCell) rendererCell; + hasScrimAnchorView = true; + scrimViewAttached = true; + } + if (view == null) { + return false; + } + + +// if (isLandscapeMode) { +// scrimRenderer = null; +// } + + boolean showWithAvatarPreview = !isLandscapeMode && !isTabletMode && !AndroidUtilities.isInMultiwindow; TLRPC.TL_groupCallParticipant participant = view.getParticipant(); Rect rect = new Rect(); @@ -4821,6 +6905,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter icons.add(R.drawable.msg_voice_muted); options.add(5); } + if (participant.peer.channel_id != 0 && !ChatObject.isMegagroup(currentAccount, participant.peer.channel_id)) { items.add(LocaleController.getString("VoipGroupOpenChannel", R.string.VoipGroupOpenChannel)); icons.add(R.drawable.msg_msgbubble3); @@ -4856,7 +6941,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (scrimPopupWindow != null) { scrimPopupWindow.dismiss(); } else { - if (options.get(i) != 9 && options.get(i) != 10 && options.get(i) != 11) { + if (options.get(i) != 9 && options.get(i) != 10 && options.get(i) != 11) { dismissAvatarPreview(true); } } @@ -4888,13 +6973,18 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter thumbLocation = ImageLocation.getForUserOrChat(currentChat, ImageLocation.TYPE_SMALL); } - if (imageLocation == null) { + boolean hasAttachedRenderer = scrimRenderer != null && scrimRenderer.isAttached(); + if (imageLocation == null && !hasAttachedRenderer) { showWithAvatarPreview = false; - } else { + } else if (showWithAvatarPreview) { avatarsViewPager.setParentAvatarImage(scrimView.getAvatarImageView()); - avatarsViewPager.setData(peerId); + avatarsViewPager.setHasActiveVideo(hasAttachedRenderer); + avatarsViewPager.setData(peerId, true); avatarsViewPager.setCreateThumbFromParent(true); avatarsViewPager.initIfEmpty(imageLocation, thumbLocation); + if (scrimRenderer != null) { + scrimRenderer.setShowingAsScrimView(true, true); + } if (MessageObject.getPeerId(selfPeer) == peerId && currentAvatarUpdater != null && avatarUpdaterDelegate != null && avatarUpdaterDelegate.avatar != null) { avatarsViewPager.addUploadingImage(avatarUpdaterDelegate.uploadingImageLocation, ImageLocation.getForLocal(avatarUpdaterDelegate.avatar)); } @@ -4904,23 +6994,20 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter avatarsPreviewShowed = true; popupLayout.measure(View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST), View.MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(1000), View.MeasureSpec.AT_MOST)); containerView.addView(scrimPopupLayout, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT)); - - prepareBlurBitmap(); + useBlur = true;//scrimFullscreenView == null; + if (useBlur) { + prepareBlurBitmap(); + } avatarPriviewTransitionInProgress = true; avatarPreviewContainer.setVisibility(View.VISIBLE); - VolumeSlider finalVolumeSlider = volumeSlider; + if (volumeSlider != null) { + volumeSlider.invalidate(); + } + runAvatarPreviewTransition(true, view); - avatarPreviewContainer.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { - @Override - public boolean onPreDraw() { - if (finalVolumeSlider != null) { - finalVolumeSlider.invalidate(); - } - avatarPreviewContainer.getViewTreeObserver().removeOnPreDrawListener(this); - runAvatarPreviewTransition(true, view); - return false; - } - }); + if (scrimFullscreenView != null) { + scrimFullscreenView.getAvatarImageView().setAlpha(0f); + } } else { avatarsPreviewShowed = false; scrimPopupWindow = new ActionBarPopupWindow(popupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT) { @@ -4944,10 +7031,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter scrimAnimatorSet.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { - if (scrimView != null) { - scrimView.setAboutVisible(false); - scrimView = null; - } + clearScrimView(); containerView.invalidate(); listView.invalidate(); if (delayedGroupCallUpdated) { @@ -4970,8 +7054,26 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter scrimPopupWindow.setInputMethodMode(ActionBarPopupWindow.INPUT_METHOD_NOT_NEEDED); scrimPopupWindow.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED); scrimPopupWindow.getContentView().setFocusableInTouchMode(true); - int popupX = AndroidUtilities.dp(14) + listView.getMeasuredWidth() + AndroidUtilities.dp(8) - popupLayout.getMeasuredWidth(); - int popupY = (int) (listView.getY() + view.getY() + view.getClipHeight()); + + int popupX, popupY; + if (scrimFullscreenView != null) { + if (isLandscapeMode) { + popupX = (int) (scrimFullscreenView.getX() + fullscreenUsersListView.getX() + renderersContainer.getX()) - popupLayout.getMeasuredWidth() + AndroidUtilities.dp(32); + popupY = (int) (scrimFullscreenView.getY() + fullscreenUsersListView.getY() + renderersContainer.getY()) - AndroidUtilities.dp(6); + } else { + popupX = (int) (scrimFullscreenView.getX() + fullscreenUsersListView.getX() + renderersContainer.getX()) - AndroidUtilities.dp(14); + popupY = (int) (scrimFullscreenView.getY() + fullscreenUsersListView.getY() + renderersContainer.getY() - popupLayout.getMeasuredHeight()); + } + } else { + popupX = (int) (listView.getX() + listView.getMeasuredWidth() + AndroidUtilities.dp(8) - popupLayout.getMeasuredWidth()); + if (hasScrimAnchorView) { + popupY = (int) (listView.getY() + view.getY() + view.getClipHeight()); + } else if (scrimGridView != null) { + popupY = (int) (listView.getY() + scrimGridView.getY() + scrimGridView.getMeasuredHeight()); + } else { + popupY = (int) listView.getY(); + } + } scrimPopupWindow.showAtLocation(listView, Gravity.LEFT | Gravity.TOP, popupX, popupY); scrimAnimatorSet = new AnimatorSet(); @@ -4985,11 +7087,95 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return true; } - private void runAvatarPreviewTransition(boolean enter, GroupCallUserCell view) { - float fromX = view.getAvatarImageView().getX() + view.getX() + listView.getX() - avatarPreviewContainer.getLeft(); - float fromY = view.getAvatarImageView().getY() + view.getY() + listView.getY() - avatarPreviewContainer.getTop(); + private void clearScrimView() { + if (scrimRenderer != null) { + scrimRenderer.textureView.setRoundCorners(AndroidUtilities.dp(8)); + scrimRenderer.setShowingAsScrimView(false, false); + scrimRenderer.invalidate(); + renderersContainer.invalidate(); + } + if (scrimView != null && !hasScrimAnchorView) { + if (scrimView.getParent() != null) { + containerView.removeView(scrimView); + } + } + if (scrimView != null) { + scrimView.setProgressToAvatarPreview(0f); + scrimView.setAboutVisible(false); + scrimView.getAvatarImageView().setAlpha(1f); + } + if (scrimFullscreenView != null) { + scrimFullscreenView.getAvatarImageView().setAlpha(1f); + } + scrimView = null; + scrimGridView = null; + scrimFullscreenView = null; + scrimRenderer = null; - float fromScale = view.getAvatarImageView().getMeasuredHeight() / (float) avatarPreviewContainer.getMeasuredWidth(); + } + + private void startScreenCapture() { + if (parentActivity == null || Build.VERSION.SDK_INT < 21) { + return; + } + MediaProjectionManager mediaProjectionManager = (MediaProjectionManager) parentActivity.getSystemService(Context.MEDIA_PROJECTION_SERVICE); + parentActivity.startActivityForResult(mediaProjectionManager.createScreenCaptureIntent(), LaunchActivity.SCREEN_CAPTURE_REQUEST_CODE); + } + + private void runAvatarPreviewTransition(boolean enter, GroupCallUserCell view) { + float fromX, fromY, fromScale; + int fromRadius; + + float left = AndroidUtilities.dp(14) + containerView.getPaddingLeft(); + float top = AndroidUtilities.dp(14) + containerView.getPaddingTop(); + if (hasScrimAnchorView) { + fromX = view.getAvatarImageView().getX() + view.getX() + listView.getX() - left; + fromY = view.getAvatarImageView().getY() + view.getY() + listView.getY() - top; + fromScale = view.getAvatarImageView().getMeasuredHeight() / (float) listView.getMeasuredWidth(); + fromRadius = (int) ((view.getAvatarImageView().getMeasuredHeight() >> 1) / fromScale); + } else { + if (scrimRenderer == null) { + previewTextureTransitionEnabled = true; + } else { + previewTextureTransitionEnabled = enter || avatarsViewPager.getRealPosition(avatarsViewPager.getCurrentItem()) == 0; + } + if (scrimGridView != null && previewTextureTransitionEnabled) { + fromX = scrimGridView.getX() + listView.getX() - left; + fromY = scrimGridView.getY() + listView.getY() + AndroidUtilities.dp(2) - top; + fromScale = 1f; + fromRadius = 0; + } else if (scrimFullscreenView != null) { + if (scrimRenderer == null) { + fromX = scrimFullscreenView.getAvatarImageView().getX() + scrimFullscreenView.getX() + fullscreenUsersListView.getX() + renderersContainer.getX() - left; + fromY = scrimFullscreenView.getAvatarImageView().getY() + scrimFullscreenView.getY() + fullscreenUsersListView.getY() + renderersContainer.getY() - top; + fromScale = scrimFullscreenView.getAvatarImageView().getMeasuredHeight() / (float) listView.getMeasuredWidth(); + fromRadius = (int) ((scrimFullscreenView.getAvatarImageView().getMeasuredHeight() >> 1) / fromScale); + } else if (previewTextureTransitionEnabled) { + fromX = scrimFullscreenView.getX() + fullscreenUsersListView.getX() + renderersContainer.getX() - left; + fromY = scrimFullscreenView.getY() + fullscreenUsersListView.getY() + renderersContainer.getY() - top; + fromScale = 1f; + fromRadius = 0; + } else { + fromX = 0; + fromY = 0; + fromScale = 0.96f; + fromRadius = 0; + } + } else { + fromX = 0; + fromY = 0; + fromScale = 0.96f; + fromRadius = 0; + } + + if (!previewTextureTransitionEnabled && scrimRenderer != null) { + scrimRenderer.invalidate(); + renderersContainer.invalidate(); + scrimRenderer.setShowingAsScrimView(false, false); + scrimRenderer = null; + } + + } if (enter) { avatarPreviewContainer.setScaleX(fromScale); @@ -4999,41 +7185,66 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter avatarPagerIndicator.setAlpha(0); } - int fromRadius = (int) ((view.getAvatarImageView().getMeasuredHeight() >> 1) / fromScale); - avatarsViewPager.setRoundRadius(fromRadius, fromRadius); - if (enter) { - blurredView.setAlpha(0f); + if (useBlur) { + if (enter) { + blurredView.setAlpha(0f); + } + blurredView.animate().alpha(enter ? 1f : 0).setDuration(220).start(); } - blurredView.animate().alpha(enter ? 1f : 0).setDuration(220).start(); - avatarPreviewContainer.animate() - .scaleX(enter ? 1f : fromScale) - .scaleY(enter ? 1f : fromScale) - .translationY(enter ? 0 : fromY) - .translationX(enter ? 0 : fromX).setInterpolator(CubicBezierInterpolator.DEFAULT).setDuration(220).start(); - avatarPagerIndicator.animate().alpha(enter ? 1f : 0).setDuration(220).start(); + + if (!enter && scrimRenderer != null) { + scrimRenderer.setShowingAsScrimView(false, true); + if (avatarsViewPager.getRealPosition(avatarsViewPager.getCurrentItem()) != 0) { + scrimRenderer.textureView.cancelAnimation(); + scrimGridView = null; + } + } + ValueAnimator valueAnimator = ValueAnimator.ofFloat(enter ? 0 : 1f, enter ? 1f : 0); + valueAnimator.addUpdateListener((valueAnimator1) -> { progressToAvatarPreview = (float) valueAnimator1.getAnimatedValue(); + renderersContainer.progressToScrimView = progressToAvatarPreview; + float s = fromScale * (1f - progressToAvatarPreview) + 1f * progressToAvatarPreview; + avatarPreviewContainer.setScaleX(s); + avatarPreviewContainer.setScaleY(s); + avatarPreviewContainer.setTranslationX(fromX * (1f - progressToAvatarPreview)); + avatarPreviewContainer.setTranslationY(fromY * (1f - progressToAvatarPreview)); + + if (!useBlur) { + scrimPaint.setAlpha((int) (100 * progressToAvatarPreview)); + } + + if (scrimRenderer != null) { + scrimRenderer.textureView.setRoundCorners(AndroidUtilities.dp(8) * (1f - progressToAvatarPreview)); + } + avatarPreviewContainer.invalidate(); containerView.invalidate(); avatarsViewPager.setRoundRadius((int) (fromRadius * (1f - progressToAvatarPreview)), (int) (fromRadius * (1f - progressToAvatarPreview))); }); - popupAnimationIndex = accountInstance.getNotificationCenter().setAnimationInProgress(popupAnimationIndex, new int[]{NotificationCenter.dialogPhotosLoaded, NotificationCenter.fileDidLoad, NotificationCenter.messagesDidLoad}); + popupAnimationIndex = accountInstance.getNotificationCenter().setAnimationInProgress(popupAnimationIndex, new int[]{NotificationCenter.dialogPhotosLoaded, NotificationCenter.fileLoaded, NotificationCenter.messagesDidLoad}); + + GroupCallMiniTextureView videoRenderer = scrimGridView == null ? null : scrimRenderer; + if (videoRenderer != null) { + videoRenderer.animateToScrimView = true; + } valueAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { + if (videoRenderer != null) { + videoRenderer.animateToScrimView = false; + } accountInstance.getNotificationCenter().onAnimationFinish(popupAnimationIndex); avatarPriviewTransitionInProgress = false; progressToAvatarPreview = enter ? 1f : 0f; + renderersContainer.progressToScrimView = progressToAvatarPreview; if (!enter) { - if (scrimView != null) { - scrimView.setProgressToAvatarPreview(0f); - scrimView.setAboutVisible(false); - scrimView = null; - } + scrimPaint.setAlpha(0); + clearScrimView(); if (scrimPopupLayout.getParent() != null) { containerView.removeView(scrimPopupLayout); } @@ -5048,13 +7259,16 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter delayedGroupCallUpdated = false; applyCallParticipantUpdates(); } + + if (scrimRenderer != null) { + scrimRenderer.textureView.setRoundCorners(0); + } } else { - avatarPreviewContainer.animate().cancel(); avatarPreviewContainer.setAlpha(1f); avatarPreviewContainer.setScaleX(1f); avatarPreviewContainer.setScaleY(1f); - avatarPreviewContainer.setTranslationX(1f); - avatarPreviewContainer.setTranslationY(1f); + avatarPreviewContainer.setTranslationX(0); + avatarPreviewContainer.setTranslationY(0); } checkContentOverlayed(); containerView.invalidate(); @@ -5062,9 +7276,16 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter listView.invalidate(); } }); - valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); - valueAnimator.setDuration(220); - valueAnimator.start(); + if (!hasScrimAnchorView && scrimRenderer != null) { + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + valueAnimator.setDuration(220); + scrimRenderer.textureView.setAnimateNextDuration(220); + scrimRenderer.textureView.synchOrRunAnimation(valueAnimator); + } else { + valueAnimator.setInterpolator(CubicBezierInterpolator.DEFAULT); + valueAnimator.setDuration(220); + valueAnimator.start(); + } checkContentOverlayed(); } @@ -5077,11 +7298,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter avatarPriviewTransitionInProgress = true; runAvatarPreviewTransition(false, scrimView); } else { - if (scrimView != null) { - scrimView.setProgressToAvatarPreview(0f); - scrimView.setAboutVisible(false); - scrimView = null; - } + clearScrimView(); containerView.removeView(scrimPopupLayout); scrimPopupLayout = null; avatarPreviewContainer.setVisibility(View.GONE); @@ -5109,6 +7326,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter private int addMemberRow; private int lastRow; private int rowsCount; + private int usersVideoGridStartRow; + private int usersVideoGridEndRow; + private int videoGridDividerRow; + private int videoCount; private boolean hasSelfUser; @@ -5133,15 +7354,23 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return; } rowsCount = 0; - if ((!ChatObject.isChannel(currentChat) || currentChat.megagroup) && ChatObject.canWriteToChat(currentChat) || ChatObject.isChannel(currentChat) && !currentChat.megagroup && !TextUtils.isEmpty(currentChat.username)) { - addMemberRow = rowsCount++; - } else { - addMemberRow = -1; - } hasSelfUser = call.participants.indexOfKey(MessageObject.getPeerId(selfPeer)) >= 0; + + usersVideoGridStartRow = rowsCount; + rowsCount += visibleVideoParticipants.size(); + usersVideoGridEndRow = rowsCount; + + videoCount = visibleVideoParticipants.size(); + + if (videoCount > 0) { + videoGridDividerRow = rowsCount++; + } else { + videoGridDividerRow = -1; + } usersStartRow = rowsCount; - rowsCount += call.sortedParticipants.size(); + rowsCount += call.visibleParticipants.size(); usersEndRow = rowsCount; + if (call.invitedUsers.isEmpty()) { invitedStartRow = -1; invitedEndRow = -1; @@ -5150,6 +7379,13 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter rowsCount += call.invitedUsers.size(); invitedEndRow = rowsCount; } + + if ((!ChatObject.isChannel(currentChat) || currentChat.megagroup) && ChatObject.canWriteToChat(currentChat) || ChatObject.isChannel(currentChat) && !currentChat.megagroup && !TextUtils.isEmpty(currentChat.username)) { + addMemberRow = rowsCount++; + } else { + addMemberRow = -1; + } + lastRow = rowsCount++; } @@ -5237,9 +7473,9 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter int color = AndroidUtilities.getOffsetColor(Theme.getColor(Theme.key_voipgroup_lastSeenTextUnscrolled), Theme.getColor(Theme.key_voipgroup_lastSeenText), actionBar.getTag() != null ? 1.0f : 0.0f, 1.0f); textCell.setColors(color, color); if (ChatObject.isChannel(currentChat) && !currentChat.megagroup && !TextUtils.isEmpty(currentChat.username)) { - textCell.setTextAndIcon(LocaleController.getString("VoipGroupShareLink", R.string.VoipGroupShareLink), R.drawable.msg_link, true); + textCell.setTextAndIcon(LocaleController.getString("VoipGroupShareLink", R.string.VoipGroupShareLink), R.drawable.msg_link, false); } else { - textCell.setTextAndIcon(LocaleController.getString("VoipGroupInviteMember", R.string.VoipGroupInviteMember), R.drawable.actions_addmember2, true); + textCell.setTextAndIcon(LocaleController.getString("VoipGroupInviteMember", R.string.VoipGroupInviteMember), R.drawable.actions_addmember2, false); } break; case 1: { @@ -5253,8 +7489,8 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter participant = null; } } else { - if (row >= 0 && row < call.sortedParticipants.size()) { - participant = call.sortedParticipants.get(row); + if (row >= 0 && row < call.visibleParticipants.size()) { + participant = call.visibleParticipants.get(row); } else { participant = null; } @@ -5265,7 +7501,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter TLRPC.FileLocation uploadingAvatar = (peerId == selfPeerId && avatarUpdaterDelegate != null) ? avatarUpdaterDelegate.avatar : null; float uploadingProgress = (uploadingAvatar != null) ? avatarUpdaterDelegate.uploadingProgress : 1f; boolean animated = userCell.getParticipant() != null && MessageObject.getPeerId(userCell.getParticipant().peer) == peerId; - userCell.setData(accountInstance, participant, call, selfPeerId, uploadingAvatar); + userCell.setData(accountInstance, participant, call, selfPeerId, uploadingAvatar, animated); userCell.setUploadProgress(uploadingProgress, animated); } break; @@ -5292,6 +7528,41 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } break; } + case 4: { + GroupCallGridCell userCell = (GroupCallGridCell) holder.itemView; + ChatObject.VideoParticipant oldParticipant = userCell.getParticipant(); + int row = position - usersVideoGridStartRow; + userCell.spanCount = spanSizeLookup.getSpanSize(position); + + ChatObject.VideoParticipant participant; + + if (delayedGroupCallUpdated) { + if (row >= 0 && row < oldVideoParticipants.size()) { + participant = oldVideoParticipants.get(row); + } else { + participant = null; + } + } else { + if (row >= 0 && row < visibleVideoParticipants.size()) { + participant = visibleVideoParticipants.get(row); + } else { + participant = null; + } + } + if (participant != null) { + int peerId = MessageObject.getPeerId(participant.participant.peer); + int selfPeerId = MessageObject.getPeerId(selfPeer); + TLRPC.FileLocation uploadingAvatar = (peerId == selfPeerId && avatarUpdaterDelegate != null) ? avatarUpdaterDelegate.avatar : null; + float uploadingProgress = (uploadingAvatar != null) ? avatarUpdaterDelegate.uploadingProgress : 1f; + boolean animated = userCell.getParticipant() != null && userCell.getParticipant().equals(participant); + userCell.setData(accountInstance, participant, call, selfPeerId); + } + if (oldParticipant != null && !oldParticipant.equals(participant) && userCell.attached && userCell.getRenderer() != null) { + attachRenderer(userCell, false); + attachRenderer(userCell, true); + } + break; + } } } @@ -5300,7 +7571,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter int type = holder.getItemViewType(); if (type == 1) { return true; - } else if (type == 3) { + } else if (type == 3 || type == 4 || type == 5) { return false; } return true; @@ -5311,7 +7582,17 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter View view; switch (viewType) { case 0: - view = new GroupCallTextCell(mContext); + view = new GroupCallTextCell(mContext) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (AndroidUtilities.isTablet()) { + int w = Math.min(AndroidUtilities.dp(420), MeasureSpec.getSize(widthMeasureSpec)); + super.onMeasure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), heightMeasureSpec); + } else { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + } + }; break; case 1: view = new GroupCallUserCell(mContext) { @@ -5319,17 +7600,63 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter protected void onMuteClick(GroupCallUserCell cell) { showMenuForCell(cell); } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (AndroidUtilities.isTablet()) { + int w = Math.min(AndroidUtilities.dp(420), MeasureSpec.getSize(widthMeasureSpec)); + super.onMeasure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), heightMeasureSpec); + } else { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + } }; break; case 2: - view = new GroupCallInvitedCell(mContext); + view = new GroupCallInvitedCell(mContext) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + if (AndroidUtilities.isTablet()) { + int w = Math.min(AndroidUtilities.dp(420), MeasureSpec.getSize(widthMeasureSpec)); + super.onMeasure(MeasureSpec.makeMeasureSpec(w, MeasureSpec.EXACTLY), heightMeasureSpec); + } else { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + } + } + }; + break; + case 4: + view = new GroupCallGridCell(mContext, false) { + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (listView.getVisibility() == View.VISIBLE && listViewVideoVisibility) { + attachRenderer(this, true); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attachRenderer(this, false); + } + }; + break; + case 5: + view = new View(mContext) { + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(isLandscapeMode ? 0 : 8), MeasureSpec.EXACTLY)); + } + }; break; case 3: default: view = new View(mContext); break; } - view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); + RecyclerView.LayoutParams params = new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT); + view.setLayoutParams(params); return new RecyclerListView.Holder(view); } @@ -5341,25 +7668,51 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (position == addMemberRow) { return 0; } + if (position == videoGridDividerRow) { + return 5; + } if (position >= usersStartRow && position < usersEndRow) { return 1; } + if (position >= usersVideoGridStartRow && position < usersVideoGridEndRow) { + return 4; + } return 2; } } + private void attachRenderer(GroupCallGridCell cell, boolean attach) { + if (isDismissed()) { + return; + } + if (attach && cell.getRenderer() == null) { + cell.setRenderer(GroupCallMiniTextureView.getOrCreate(attachedRenderers, renderersContainer, cell, null, null, cell.getParticipant(), call, this)); + } else if (!attach) { + if (cell.getRenderer() != null) { + cell.getRenderer().setPrimaryView(null); + cell.setRenderer(null); + } + } + } + private int oldAddMemberRow; private int oldUsersStartRow; private int oldUsersEndRow; private int oldInvitedStartRow; private int oldInvitedEndRow; + private int oldUsersVideoStartRow; + private int oldUsersVideoEndRow; + private int oldVideoDividerRow; - public void setOldRows(int addMemberRow, int usersStartRow, int usersEndRow, int invitedStartRow, int invitedEndRow) { + public void setOldRows(int addMemberRow, int usersStartRow, int usersEndRow, int invitedStartRow, int invitedEndRow, int usersVideoStartRow, int usersVideoEndRow, int videoDividerRow) { oldAddMemberRow = addMemberRow; oldUsersStartRow = usersStartRow; oldUsersEndRow = usersEndRow; oldInvitedStartRow = invitedStartRow; oldInvitedEndRow = invitedEndRow; + oldUsersVideoStartRow = usersVideoStartRow; + oldUsersVideoEndRow = usersVideoEndRow; + oldVideoDividerRow = videoDividerRow; } private DiffUtil.Callback diffUtilsCallback = new DiffUtil.Callback() { @@ -5384,15 +7737,23 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter return false; } } + if (listAdapter.videoGridDividerRow >= 0 && listAdapter.videoGridDividerRow == newItemPosition && oldItemPosition == oldVideoDividerRow) { + return true; + } if (oldItemPosition == oldCount - 1 && newItemPosition == listAdapter.rowsCount - 1) { return true; } else if (oldItemPosition == oldCount - 1 || newItemPosition == listAdapter.rowsCount - 1) { return false; } - if ((newItemPosition >= listAdapter.usersStartRow && newItemPosition < listAdapter.usersEndRow) && + if ((newItemPosition >= listAdapter.usersVideoGridStartRow && newItemPosition < listAdapter.usersVideoGridEndRow) && + (oldItemPosition >= oldUsersVideoStartRow && oldItemPosition < oldUsersVideoEndRow)) { + ChatObject.VideoParticipant oldItem = oldVideoParticipants.get(oldItemPosition - oldUsersVideoStartRow); + ChatObject.VideoParticipant newItem = visibleVideoParticipants.get(newItemPosition - listAdapter.usersVideoGridStartRow); + return oldItem.equals(newItem); + } else if ((newItemPosition >= listAdapter.usersStartRow && newItemPosition < listAdapter.usersEndRow) && (oldItemPosition >= oldUsersStartRow && oldItemPosition < oldUsersEndRow)) { TLRPC.TL_groupCallParticipant oldItem = oldParticipants.get(oldItemPosition - oldUsersStartRow); - TLRPC.TL_groupCallParticipant newItem = call.sortedParticipants.get(newItemPosition - listAdapter.usersStartRow); + TLRPC.TL_groupCallParticipant newItem = call.visibleParticipants.get(newItemPosition - listAdapter.usersStartRow); return MessageObject.getPeerId(oldItem.peer) == MessageObject.getPeerId(newItem.peer) && (oldItemPosition == newItemPosition || oldItem.lastActiveDate == oldItem.active_date); } else if (newItemPosition >= listAdapter.invitedStartRow && newItemPosition < listAdapter.invitedEndRow && oldItemPosition >= oldInvitedStartRow && oldItemPosition < oldInvitedEndRow) { @@ -5461,6 +7822,14 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter dismissAvatarPreview(true); return; } + if (renderersContainer.inFullscreenMode) { + fullscreenFor(null); + return; + } + if (previewDialog != null) { + previewDialog.dismiss(false); + return; + } super.onBackPressed(); } @@ -5635,5 +8004,144 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } } + public View getScrimView() { + return scrimView; + } + @Override + public void onCameraSwitch(boolean isFrontFace) { + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(true); + } + if (previewDialog != null) { + previewDialog.update(); + } + } + + private class GroupCallItemAnimator extends DefaultItemAnimator { + + public float animationProgress; + public ValueAnimator animator; + + HashSet addingHolders = new HashSet<>(); + HashSet removingHolders = new HashSet<>(); + + float outMaxBottom; + float outMinTop; + + @Override + public void endAnimations() { + super.endAnimations(); + removingHolders.clear(); + addingHolders.clear(); + outMinTop = Float.MAX_VALUE; + listView.invalidate(); + } + + public void updateBackgroundBeforeAnimation() { + if (animator != null) { + return; + } + addingHolders.clear(); + addingHolders.addAll(mPendingAdditions); + + removingHolders.clear(); + removingHolders.addAll(mPendingRemovals); + + outMaxBottom = 0; + outMinTop = Float.MAX_VALUE; + + if (!addingHolders.isEmpty() || !removingHolders.isEmpty()) { + for (int a = 0, N = listView.getChildCount(); a < N; a++) { + View child = listView.getChildAt(a); + RecyclerView.ViewHolder holder = listView.findContainingViewHolder(child); + if (holder == null || holder.getItemViewType() == 3 || holder.getItemViewType() == 4 || holder.getItemViewType() == 5) { + continue; + } + if (!addingHolders.contains(holder)) { + outMaxBottom = Math.max(outMaxBottom, child.getY() + child.getMeasuredHeight()); + outMinTop = Math.min(outMinTop, Math.max(0, child.getY())); + } + } + animationProgress = 0f; + listView.invalidate(); + } + } + + + @Override + public void runPendingAnimations() { + boolean removalsPending = !mPendingRemovals.isEmpty(); + boolean movesPending = !mPendingMoves.isEmpty(); + boolean additionsPending = !mPendingAdditions.isEmpty(); + if (animator != null) { + animator.cancel(); + animator = null; + } + if (removalsPending || movesPending || additionsPending) { + animationProgress = 0f; + animator = ValueAnimator.ofFloat(0, 1f); + animator.addUpdateListener(valueAnimator -> { + animationProgress = (float) valueAnimator.getAnimatedValue(); + listView.invalidate(); + renderersContainer.invalidate(); + containerView.invalidate(); + updateLayout(true); + }); + animator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + animator = null; + listView.invalidate(); + renderersContainer.invalidate(); + containerView.invalidate(); + updateLayout(true); + + addingHolders.clear(); + removingHolders.clear(); + } + }); + animator.setDuration(TRANSITION_DURATION); + animator.setInterpolator(CubicBezierInterpolator.DEFAULT); + animator.start(); + + listView.invalidate(); + renderersContainer.invalidate(); + } + super.runPendingAnimations(); + } + } + + @Override + protected boolean canDismissWithTouchOutside() { + return !renderersContainer.inFullscreenMode; + } + + public void onResume() { + paused = false; + listAdapter.notifyDataSetChanged(); + if (fullscreenUsersListView.getVisibility() == View.VISIBLE) { + fullscreenAdapter.update(false, fullscreenUsersListView); + } + if (isTabletMode) { + tabletGridAdapter.update(false, tabletVideoGridView); + } + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(true); + } + } + + public void onPause() { + paused = true; + attachedRenderersTmp.clear(); + attachedRenderersTmp.addAll(attachedRenderers); + for (int i = 0; i < attachedRenderersTmp.size(); i++) { + attachedRenderersTmp.get(i).updateAttachState(false); + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallTabletGridAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallTabletGridAdapter.java new file mode 100644 index 000000000..1e21d5f5f --- /dev/null +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallTabletGridAdapter.java @@ -0,0 +1,212 @@ +package org.telegram.ui; + +import android.graphics.Color; +import android.view.View; +import android.view.ViewGroup; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.DiffUtil; +import androidx.recyclerview.widget.RecyclerView; + +import org.telegram.messenger.AccountInstance; +import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ChatObject; +import org.telegram.messenger.MessageObject; +import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.Theme; +import org.telegram.ui.Components.RecyclerListView; +import org.telegram.ui.Components.voip.GroupCallGridCell; +import org.telegram.ui.Components.voip.GroupCallMiniTextureView; +import org.telegram.ui.Components.voip.GroupCallRenderersContainer; + +import java.util.ArrayList; + +public class GroupCallTabletGridAdapter extends RecyclerListView.SelectionAdapter { + private ChatObject.Call groupCall; + private final int currentAccount; + + private final ArrayList videoParticipants = new ArrayList<>(); + + private ArrayList attachedRenderers; + private GroupCallRenderersContainer renderersContainer; + private final GroupCallActivity activity; + private boolean visible = false; + + public GroupCallTabletGridAdapter(ChatObject.Call groupCall, int currentAccount, GroupCallActivity activity) { + this.groupCall = groupCall; + this.currentAccount = currentAccount; + this.activity = activity; + } + + public void setRenderersPool(ArrayList attachedRenderers, GroupCallRenderersContainer renderersContainer) { + this.attachedRenderers = attachedRenderers; + this.renderersContainer = renderersContainer; + } + + public void setGroupCall(ChatObject.Call groupCall) { + this.groupCall = groupCall; + } + + @Override + public boolean isEnabled(RecyclerView.ViewHolder holder) { + return false; + } + + @NonNull + @Override + public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + return new RecyclerListView.Holder(new GroupCallGridCell(parent.getContext(), true) { + @Override + protected void onAttachedToWindow() { + super.onAttachedToWindow(); + if (visible && getParticipant() != null) { + attachRenderer(this, true); + } + } + + @Override + protected void onDetachedFromWindow() { + super.onDetachedFromWindow(); + attachRenderer(this, false); + } + }); + } + + private void attachRenderer(GroupCallGridCell cell, boolean attach) { + if (attach && cell.getRenderer() == null) { + cell.setRenderer(GroupCallMiniTextureView.getOrCreate(attachedRenderers, renderersContainer, null, null, cell, cell.getParticipant(), groupCall, activity)); + } else if (!attach) { + if (cell.getRenderer() != null) { + cell.getRenderer().setTabletGridView(null); + cell.setRenderer(null); + } + } + } + + @Override + public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) { + GroupCallGridCell cell = (GroupCallGridCell) holder.itemView; + + ChatObject.VideoParticipant oldVideoParticipant = cell.getParticipant(); + ChatObject.VideoParticipant videoParticipant; + TLRPC.TL_groupCallParticipant participant; + videoParticipant = videoParticipants.get(position); + participant = videoParticipants.get(position).participant; + cell.spanCount = getSpanCount(position); + cell.position = position; + cell.gridAdapter = this; + + if (cell.getMeasuredHeight() != getItemHeight(position)) { + cell.requestLayout(); + } + + cell.setData(AccountInstance.getInstance(currentAccount), videoParticipant, groupCall, MessageObject.getPeerId(groupCall.selfPeer)); + + if (oldVideoParticipant != null && !oldVideoParticipant.equals(videoParticipant) && cell.attached && cell.getRenderer() != null) { + attachRenderer(cell, false); + attachRenderer(cell, true); + } else if (cell.getRenderer() != null) { + cell.getRenderer().updateAttachState(true); + } + } + + @Override + public int getItemCount() { + return videoParticipants.size(); + } + + public void setVisibility(RecyclerListView listView, boolean visibility, boolean updateAttach) { + visible = visibility; + if (updateAttach) { + for (int i = 0; i < listView.getChildCount(); i++) { + View view = listView.getChildAt(i); + if (view instanceof GroupCallGridCell) { + GroupCallGridCell cell = (GroupCallGridCell) view; + if (cell.getParticipant() != null) { + attachRenderer(cell, visibility); + } + } + } + } + } + + public void scrollToPeerId(int peerId, RecyclerListView fullscreenUsersListView) { +// for (int i = 0; i < participants.size(); i++) { +// if (peerId == MessageObject.getPeerId(participants.get(i).peer)) { +// ((LinearLayoutManager) fullscreenUsersListView.getLayoutManager()).scrollToPositionWithOffset(i, AndroidUtilities.dp(13)); +// break; +// } +// } + } + + public void update(boolean animated, RecyclerListView listView) { + if (groupCall == null) { + return; + } + if (animated) { + ArrayList oldVideoParticipants = new ArrayList<>(); + + oldVideoParticipants.addAll(videoParticipants); + videoParticipants.clear(); + videoParticipants.addAll(groupCall.visibleVideoParticipants); + + DiffUtil.calculateDiff(new DiffUtil.Callback() { + @Override + public int getOldListSize() { + return oldVideoParticipants.size(); + } + + @Override + public int getNewListSize() { + return videoParticipants.size(); + } + + @Override + public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { + if (oldItemPosition < oldVideoParticipants.size() && newItemPosition < videoParticipants.size()) { + return oldVideoParticipants.get(oldItemPosition).equals(videoParticipants.get(newItemPosition)); + } + return false; + } + + @Override + public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { + return true; + } + }).dispatchUpdatesTo(this); + AndroidUtilities.updateVisibleRows(listView); + } else { + videoParticipants.clear(); + videoParticipants.addAll(groupCall.visibleVideoParticipants); + notifyDataSetChanged(); + } + } + + public int getSpanCount(int position) { + int itemsCount = getItemCount(); + if (itemsCount <= 1) { + return 6; + } else if (itemsCount == 2) { + return 6; + } else if (itemsCount == 3) { + if (position == 0 || position == 1) { + return 3; + } + return 6; + } + + return 3; + } + + public int getItemHeight(int position) { + View parentView = activity.tabletVideoGridView; + int itemsCount = getItemCount(); + if (itemsCount <= 1) { + return parentView.getMeasuredHeight(); + } else if (itemsCount <= 4) { + return parentView.getMeasuredHeight() / 2; + } else { + return (int) (parentView.getMeasuredHeight() / 2.5f); + } + } +} diff --git a/TMessagesProj/src/main/java/org/telegram/ui/IdenticonActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/IdenticonActivity.java index 10c5bdc91..bf20f0448 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/IdenticonActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/IdenticonActivity.java @@ -90,14 +90,14 @@ public class IdenticonActivity extends BaseFragment implements NotificationCente @Override public boolean onFragmentCreate() { chat_id = getArguments().getInt("chat_id"); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); return super.onFragmentCreate(); } @Override public void onFragmentDestroy() { super.onFragmentDestroy(); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); } @Override @@ -244,7 +244,7 @@ public class IdenticonActivity extends BaseFragment implements NotificationCente @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (emojiTextView != null) { emojiTextView.invalidate(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java index 55de1873c..8ecffea20 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java @@ -51,6 +51,7 @@ import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Components.BottomPagesView; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.SizeNotifierFrameLayout; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGLConfig; @@ -269,11 +270,15 @@ public class IntroActivity extends Activity implements NotificationCenter.Notifi FrameLayout frameLayout3 = new FrameLayout(this); setContentView(frameLayout3); - View imageView = new ImageView(this); - BitmapDrawable drawable = (BitmapDrawable) getResources().getDrawable(R.drawable.catstile); - drawable.setTileModeXY(Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); - imageView.setBackgroundDrawable(drawable); - frameLayout3.addView(imageView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + SizeNotifierFrameLayout backgroundTablet = new SizeNotifierFrameLayout(this) { + @Override + protected boolean isActionBarVisible() { + return false; + } + }; + backgroundTablet.setOccupyStatusBar(false); + backgroundTablet.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); + frameLayout3.addView(backgroundTablet, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); FrameLayout frameLayout4 = new FrameLayout(this); frameLayout4.setBackgroundResource(R.drawable.btnshadow); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java index 711a60a8d..113820610 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java @@ -17,16 +17,17 @@ import android.app.ActivityManager; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; -import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Matrix; +import android.graphics.Paint; import android.graphics.Point; import android.graphics.Shader; -import android.graphics.drawable.BitmapDrawable; import android.location.LocationManager; import android.media.AudioManager; import android.net.Uri; @@ -39,7 +40,9 @@ import android.provider.ContactsContract; import android.provider.Settings; import android.text.TextUtils; import android.util.Base64; +import android.util.TypedValue; import android.view.ActionMode; +import android.view.Gravity; import android.view.KeyEvent; import android.view.Menu; import android.view.MotionEvent; @@ -53,6 +56,7 @@ import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; +import android.widget.TextView; import android.widget.Toast; import androidx.annotation.NonNull; @@ -95,6 +99,7 @@ import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; import org.telegram.messenger.browser.Browser; import org.telegram.messenger.camera.CameraController; +import org.telegram.messenger.voip.VideoCapturerDevice; import org.telegram.messenger.voip.VoIPPendingCall; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; @@ -104,6 +109,7 @@ import org.telegram.ui.ActionBar.ActionBarLayout; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.DrawerLayoutContainer; +import org.telegram.ui.ActionBar.SimpleTextView; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Adapters.DrawerLayoutAdapter; import org.telegram.ui.Cells.DrawerAddCell; @@ -115,19 +121,23 @@ import org.telegram.ui.Components.AudioPlayerAlert; import org.telegram.ui.Components.BlockingUpdateView; import org.telegram.ui.Components.Bulletin; import org.telegram.ui.Components.BulletinFactory; +import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.Easings; import org.telegram.ui.Components.EmbedBottomSheet; import org.telegram.ui.Components.GroupCallPip; import org.telegram.ui.Components.JoinGroupAlert; import org.telegram.ui.Components.LayoutHelper; +import org.telegram.ui.Components.MediaActionDrawable; import org.telegram.ui.Components.PasscodeView; import org.telegram.ui.Components.PhonebookShareAlert; import org.telegram.ui.Components.PipRoundVideoView; import org.telegram.ui.Components.RLottieDrawable; import org.telegram.ui.Components.RLottieImageView; +import org.telegram.ui.Components.RadialProgress2; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.SharingLocationsAlert; import org.telegram.ui.Components.SideMenultItemAnimator; +import org.telegram.ui.Components.SizeNotifierFrameLayout; import org.telegram.ui.Components.StickerSetBulletinLayout; import org.telegram.ui.Components.StickersAlert; import org.telegram.ui.Components.TermsOfServiceView; @@ -168,6 +178,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa private static ArrayList layerFragmentsStack = new ArrayList<>(); private static ArrayList rightFragmentsStack = new ArrayList<>(); private ViewTreeObserver.OnGlobalLayoutListener onGlobalLayoutListener; + private ArrayList importingStickers; + private ArrayList importingStickersEmoji; + private String importingStickersSoftware; private ActionMode visibleActionMode; @@ -181,7 +194,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa private ActionBarLayout rightActionBarLayout; private FrameLayout shadowTablet; private FrameLayout shadowTabletSide; - private View backgroundTablet; + private SizeNotifierFrameLayout backgroundTablet; private FrameLayout frameLayout; public DrawerLayoutContainer drawerLayoutContainer; private DrawerLayoutAdapter drawerLayoutAdapter; @@ -192,6 +205,10 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa private AlertDialog proxyErrorDialog; private RecyclerListView sideMenu; private SideMenultItemAnimator itemAnimator; + private FrameLayout updateLayout; + private RadialProgress2 updateLayoutIcon; + private SimpleTextView updateTextView; + private TextView updateSizeTextView; private AlertDialog localeDialog; private boolean loadingLocaleDialog; @@ -217,6 +234,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa private Runnable lockRunnable; private static final int PLAY_SERVICES_REQUEST_CHECK_SETTINGS = 140; + public static final int SCREEN_CAPTURE_REQUEST_CODE = 520; @Override protected void onCreate(Bundle savedInstanceState) { @@ -312,7 +330,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa themeSwitchImageView.setVisibility(View.GONE); } - drawerLayoutContainer = new DrawerLayoutContainer(this); + drawerLayoutContainer = new DrawerLayoutContainer(this) { + @Override + protected void onLayout(boolean changed, int l, int t, int r, int b) { + super.onLayout(changed, l, t, r, b); + setDrawerPosition(getDrawerPosition()); + } + }; drawerLayoutContainer.setBehindKeyboardColor(Theme.getColor(Theme.key_windowBackgroundWhite)); frameLayout.addView(drawerLayoutContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); @@ -396,10 +420,14 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa }; drawerLayoutContainer.addView(launchLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); - backgroundTablet = new View(this); - BitmapDrawable drawable = (BitmapDrawable) getResources().getDrawable(R.drawable.catstile); - drawable.setTileModeXY(Shader.TileMode.REPEAT, Shader.TileMode.REPEAT); - backgroundTablet.setBackgroundDrawable(drawable); + backgroundTablet = new SizeNotifierFrameLayout(this) { + @Override + protected boolean isActionBarVisible() { + return false; + } + }; + backgroundTablet.setOccupyStatusBar(false); + backgroundTablet.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); launchLayout.addView(backgroundTablet, LayoutHelper.createRelative(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); launchLayout.addView(actionBarLayout); @@ -460,6 +488,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa drawerLayoutContainer.addView(actionBarLayout, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); } //FileLog.d("UI create7 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); + FrameLayout sideMenuContainer = new FrameLayout(this); sideMenu = new RecyclerListView(this) { @Override public boolean drawChild(Canvas canvas, View child, long drawingTime) { @@ -484,12 +513,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa sideMenu.setLayoutManager(new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false)); sideMenu.setAllowItemsInteractionDuringAnimation(false); sideMenu.setAdapter(drawerLayoutAdapter = new DrawerLayoutAdapter(this, itemAnimator)); - drawerLayoutContainer.setDrawerLayout(sideMenu); - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) sideMenu.getLayoutParams(); + sideMenuContainer.addView(sideMenu, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + drawerLayoutContainer.setDrawerLayout(sideMenuContainer); + FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) sideMenuContainer.getLayoutParams(); Point screenSize = AndroidUtilities.getRealScreenSize(); layoutParams.width = AndroidUtilities.isTablet() ? AndroidUtilities.dp(320) : Math.min(AndroidUtilities.dp(320), Math.min(screenSize.x, screenSize.y) - AndroidUtilities.dp(56)); layoutParams.height = LayoutHelper.MATCH_PARENT; - sideMenu.setLayoutParams(layoutParams); + sideMenuContainer.setLayoutParams(layoutParams); sideMenu.setOnItemClickListener((view, position, x, y) -> { if (position == 0) { DrawerProfileCell profileCell = (DrawerProfileCell) view; @@ -709,6 +739,79 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa Theme.loadWallpaper(); //FileLog.d("UI create8 time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); + updateLayout = new FrameLayout(this) { + + private Paint paint = new Paint(); + private Matrix matrix = new Matrix(); + private LinearGradient updateGradient; + private int lastGradientWidth; + + @Override + protected void onDraw(Canvas canvas) { + if (updateGradient == null) { + return; + } + paint.setColor(0xffffffff); + paint.setShader(updateGradient); + updateGradient.setLocalMatrix(matrix); + canvas.drawRect(0, 0, getMeasuredWidth(), getMeasuredHeight(), paint); + updateLayoutIcon.setBackgroundGradientDrawable(updateGradient); + updateLayoutIcon.draw(canvas); + } + + @Override + protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { + super.onMeasure(widthMeasureSpec, heightMeasureSpec); + int width = MeasureSpec.getSize(widthMeasureSpec); + if (lastGradientWidth != width) { + updateGradient = new LinearGradient(0, 0, width, 0, new int[]{0xff69BF72, 0xff53B3AD}, new float[]{0.0f, 1.0f}, Shader.TileMode.CLAMP); + lastGradientWidth = width; + } + } + }; + updateLayout.setWillNotDraw(false); + updateLayout.setVisibility(View.INVISIBLE); + updateLayout.setTranslationY(AndroidUtilities.dp(44)); + if (Build.VERSION.SDK_INT >= 21) { + updateLayout.setBackground(Theme.getSelectorDrawable(Theme.getColor(Theme.key_listSelector), null)); + } + sideMenuContainer.addView(updateLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 44, Gravity.LEFT | Gravity.BOTTOM)); + updateLayout.setOnClickListener(v -> { + if (!SharedConfig.isAppUpdateAvailable()) { + return; + } + if (updateLayoutIcon.getIcon() == MediaActionDrawable.ICON_DOWNLOAD) { + FileLoader.getInstance(currentAccount).loadFile(SharedConfig.pendingAppUpdate.document, "update", 1, 1); + updateAppUpdateViews(true); + } else if (updateLayoutIcon.getIcon() == MediaActionDrawable.ICON_CANCEL) { + FileLoader.getInstance(currentAccount).cancelLoadFile(SharedConfig.pendingAppUpdate.document); + updateAppUpdateViews(true); + } else { + AndroidUtilities.openForView(SharedConfig.pendingAppUpdate.document, true, this); + } + }); + + updateLayoutIcon = new RadialProgress2(updateLayout); + updateLayoutIcon.setColors(0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff); + updateLayoutIcon.setProgressRect(AndroidUtilities.dp(22), AndroidUtilities.dp(11), AndroidUtilities.dp(22 + 22), AndroidUtilities.dp(11 + 22)); + updateLayoutIcon.setCircleRadius(AndroidUtilities.dp(11)); + updateLayoutIcon.setAsMini(); + + updateTextView = new SimpleTextView(this); + updateTextView.setTextSize(15); + updateTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + updateTextView.setText(LocaleController.getString("AppUpdate", R.string.AppUpdate)); + updateTextView.setTextColor(0xffffffff); + updateTextView.setGravity(Gravity.LEFT); + updateLayout.addView(updateTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL, 74, 0, 0, 0)); + + updateSizeTextView = new TextView(this); + updateSizeTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 15); + updateSizeTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); + updateSizeTextView.setGravity(Gravity.RIGHT); + updateSizeTextView.setTextColor(0xffffffff); + updateLayout.addView(updateSizeTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER_VERTICAL | Gravity.RIGHT, 0, 0, 17, 0)); + passcodeView = new PasscodeView(this); drawerLayoutContainer.addView(passcodeView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); @@ -731,6 +834,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.notificationsCountUpdated); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.screenStateChanged); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.showBulletin); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.appUpdateAvailable); if (actionBarLayout.fragmentsStack.isEmpty()) { if (!UserConfig.getInstance(currentAccount).isClientActivated()) { @@ -861,6 +965,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } MediaController.getInstance().setBaseActivity(this, true); AndroidUtilities.startAppCenter(this); + updateAppUpdateViews(false); //FileLog.d("UI create time = " + (SystemClock.elapsedRealtime() - ApplicationLoader.startTime)); } @@ -995,10 +1100,12 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.openArticle); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.hasNewContactsToImport); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.needShowPlayServicesAlert); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadFailed); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.historyImportProgressChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.groupCallUpdated); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.stickersImportComplete); } currentAccount = UserConfig.selectedAccount; NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.appDidLogout); @@ -1009,10 +1116,12 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.openArticle); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.hasNewContactsToImport); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.needShowPlayServicesAlert); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadFailed); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.historyImportProgressChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.groupCallUpdated); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.stickersImportComplete); } private void checkLayout() { @@ -1132,6 +1241,10 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } else if (ArticleViewer.hasInstance() && ArticleViewer.getInstance().isVisible()) { ArticleViewer.getInstance().close(false, true); } + MessageObject messageObject = MediaController.getInstance().getPlayingMessageObject(); + if (messageObject != null && messageObject.isRoundVideo()) { + MediaController.getInstance().cleanupPlayer(true, true); + } passcodeView.onShow(); SharedConfig.isWaitingForPasscodeEnter = true; drawerLayoutContainer.setAllowOpenDrawer(false, false); @@ -1229,6 +1342,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa exportingChatUri = null; contactsToSend = null; contactsToSendUri = null; + importingStickers = null; + importingStickersEmoji = null; + importingStickersSoftware = null; if ((flags & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0) { if (intent != null && intent.getAction() != null && !restore) { @@ -1381,6 +1497,17 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa if (error) { Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show(); } + } else if ("org.telegram.messenger.CREATE_STICKER_PACK".equals(intent.getAction())) { + try { + importingStickers = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM); + importingStickersEmoji = intent.getStringArrayListExtra("STICKER_EMOJIS"); + importingStickersSoftware = intent.getStringExtra("IMPORTER"); + } catch (Throwable e) { + FileLog.e(e); + importingStickers = null; + importingStickersEmoji = null; + importingStickersSoftware = null; + } } else if (Intent.ACTION_SEND_MULTIPLE.equals(intent.getAction())) { boolean error = false; try { @@ -1533,11 +1660,16 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } wallPaper.slug = null; - } else if (wallPaper.slug != null && wallPaper.slug.length() == 13 && wallPaper.slug.charAt(6) == '-') { + } else if (wallPaper.slug != null && wallPaper.slug.length() >= 13 && AndroidUtilities.isValidWallChar(wallPaper.slug.charAt(6))) { try { wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug.substring(0, 6), 16) | 0xff000000; - wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; + wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7, 13), 16) | 0xff000000; + if (wallPaper.slug.length() >= 20 && AndroidUtilities.isValidWallChar(wallPaper.slug.charAt(13))) { + wallPaper.settings.third_background_color = Integer.parseInt(wallPaper.slug.substring(14, 20), 16) | 0xff000000; + } + if (wallPaper.slug.length() == 27 && AndroidUtilities.isValidWallChar(wallPaper.slug.charAt(20))) { + wallPaper.settings.fourth_background_color = Integer.parseInt(wallPaper.slug.substring(21), 16) | 0xff000000; + } } catch (Exception ignore) { } @@ -1575,9 +1707,14 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa String bgColor = data.getQueryParameter("bg_color"); if (!TextUtils.isEmpty(bgColor)) { wallPaper.settings.background_color = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; - if (bgColor.length() > 6) { - wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; + if (bgColor.length() >= 13) { + wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7, 13), 16) | 0xff000000; + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + wallPaper.settings.third_background_color = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + wallPaper.settings.fourth_background_color = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; + } } } else { wallPaper.settings.background_color = 0xffffffff; @@ -1745,11 +1882,16 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } wallPaper.slug = null; - } else if (wallPaper.slug != null && wallPaper.slug.length() == 13 && wallPaper.slug.charAt(6) == '-') { + } else if (wallPaper.slug != null && wallPaper.slug.length() >= 13 && AndroidUtilities.isValidWallChar(wallPaper.slug.charAt(6))) { try { wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug.substring(0, 6), 16) | 0xff000000; - wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; + wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7, 13), 16) | 0xff000000; + if (wallPaper.slug.length() >= 20 && AndroidUtilities.isValidWallChar(wallPaper.slug.charAt(13))) { + wallPaper.settings.third_background_color = Integer.parseInt(wallPaper.slug.substring(14, 20), 16) | 0xff000000; + } + if (wallPaper.slug.length() == 27 && AndroidUtilities.isValidWallChar(wallPaper.slug.charAt(20))) { + wallPaper.settings.fourth_background_color = Integer.parseInt(wallPaper.slug.substring(21), 16) | 0xff000000; + } } catch (Exception ignore) { } @@ -1782,9 +1924,14 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa String bgColor = data.getQueryParameter("bg_color"); if (!TextUtils.isEmpty(bgColor)) { wallPaper.settings.background_color = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; - if (bgColor.length() > 6) { - wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; + if (bgColor.length() >= 13) { + wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(8, 13), 16) | 0xff000000; + if (bgColor.length() >= 20 && AndroidUtilities.isValidWallChar(bgColor.charAt(13))) { + wallPaper.settings.third_background_color = Integer.parseInt(bgColor.substring(14, 20), 16) | 0xff000000; + } + if (bgColor.length() == 27 && AndroidUtilities.isValidWallChar(bgColor.charAt(20))) { + wallPaper.settings.fourth_background_color = Integer.parseInt(bgColor.substring(21), 16) | 0xff000000; + } } } } catch (Exception ignore) { @@ -1972,7 +2119,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa .setActionToken(intent.getStringExtra(EXTRA_ACTION_TOKEN)) .setActionStatus(success ? Action.Builder.STATUS_TYPE_COMPLETED : Action.Builder.STATUS_TYPE_FAILED) .build(); - FirebaseUserActions.getInstance().end(assistAction); + FirebaseUserActions.getInstance(this).end(assistAction); intent.removeExtra(EXTRA_ACTION_TOKEN); } if (code != null || UserConfig.getInstance(currentAccount).isClientActivated()) { @@ -2154,6 +2301,14 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa pushOpened = false; } else if (exportingChatUri != null) { runImportRequest(exportingChatUri, documentsUrisArray); + } else if (importingStickers != null) { + AndroidUtilities.runOnUIThread(() -> { + if (!actionBarLayout.fragmentsStack.isEmpty()) { + BaseFragment fragment = actionBarLayout.fragmentsStack.get(0); + fragment.showDialog(new StickersAlert(this, importingStickersSoftware, importingStickers, importingStickersEmoji)); + } + }); + pushOpened = false; } else if (videoPath != null || photoPathsArray != null || sendingText != null || documentsPathsArray != null || contactsToSend != null || documentsUrisArray != null) { if (!AndroidUtilities.isTablet()) { NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); @@ -2977,7 +3132,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa StickersAlert alert; if (fragment instanceof ChatActivity) { ChatActivity chatActivity = (ChatActivity) fragment; - alert = new StickersAlert(LaunchActivity.this, fragment, stickerset, null, chatActivity.getChatActivityEnterView()); + alert = new StickersAlert(LaunchActivity.this, fragment, stickerset, null, chatActivity.getChatActivityEnterViewForStickers()); alert.setCalcMandatoryInsets(chatActivity.isKeyboardVisible()); } else { alert = new StickersAlert(LaunchActivity.this, fragment, stickerset, null, null); @@ -3096,8 +3251,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa boolean ok = false; if (TextUtils.isEmpty(wallPaper.slug)) { try { - WallpapersListActivity.ColorWallpaper colorWallpaper = new WallpapersListActivity.ColorWallpaper(Theme.COLOR_BACKGROUND_SLUG, wallPaper.settings.background_color, wallPaper.settings.second_background_color, AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)); - ThemePreviewActivity wallpaperActivity = new ThemePreviewActivity(colorWallpaper, null); + WallpapersListActivity.ColorWallpaper colorWallpaper; + if (wallPaper.settings.third_background_color != 0) { + colorWallpaper = new WallpapersListActivity.ColorWallpaper(Theme.COLOR_BACKGROUND_SLUG, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color); + } else { + colorWallpaper = new WallpapersListActivity.ColorWallpaper(Theme.COLOR_BACKGROUND_SLUG, wallPaper.settings.background_color, wallPaper.settings.second_background_color, AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)); + } + ThemePreviewActivity wallpaperActivity = new ThemePreviewActivity(colorWallpaper, null, true, false); AndroidUtilities.runOnUIThread(() -> presentFragment(wallpaperActivity)); ok = true; } catch (Exception e) { @@ -3119,13 +3279,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa TLRPC.TL_wallPaper res = (TLRPC.TL_wallPaper) response; Object object; if (res.pattern) { - WallpapersListActivity.ColorWallpaper colorWallpaper = new WallpapersListActivity.ColorWallpaper(res.slug, wallPaper.settings.background_color, wallPaper.settings.second_background_color, AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false), wallPaper.settings.intensity / 100.0f, wallPaper.settings.motion, null); + WallpapersListActivity.ColorWallpaper colorWallpaper = new WallpapersListActivity.ColorWallpaper(res.slug, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color, AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false), wallPaper.settings.intensity / 100.0f, wallPaper.settings.motion, null); colorWallpaper.pattern = res; object = colorWallpaper; } else { object = res; } - ThemePreviewActivity wallpaperActivity = new ThemePreviewActivity(object, null); + ThemePreviewActivity wallpaperActivity = new ThemePreviewActivity(object, null, true, false); wallpaperActivity.setInitialModes(wallPaper.settings.blur, wallPaper.settings.motion); presentFragment(wallpaperActivity); } else { @@ -3376,11 +3536,92 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa return foundContacts; } + private void updateAppUpdateViews(boolean animated) { + if (updateLayout == null) { + return; + } + if (SharedConfig.isAppUpdateAvailable()) { + updateSizeTextView.setText(AndroidUtilities.formatFileSize(SharedConfig.pendingAppUpdate.document.size)); + String fileName = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + File path = FileLoader.getPathToAttach(SharedConfig.pendingAppUpdate.document, true); + boolean showSize; + if (path.exists()) { + updateLayoutIcon.setIcon(MediaActionDrawable.ICON_UPDATE, true, animated); + updateTextView.setText(LocaleController.getString("AppUpdateNow", R.string.AppUpdateNow)); + showSize = false; + } else { + if (FileLoader.getInstance(currentAccount).isLoadingFile(fileName)) { + updateLayoutIcon.setIcon(MediaActionDrawable.ICON_CANCEL, true, animated); + Float p = ImageLoader.getInstance().getFileProgress(fileName); + updateTextView.setText(LocaleController.formatString("AppUpdateDownloading", R.string.AppUpdateDownloading, (int) ((p != null ? p : 0.0f) * 100))); + showSize = false; + } else { + updateLayoutIcon.setIcon(MediaActionDrawable.ICON_DOWNLOAD, true, animated); + updateTextView.setText(LocaleController.getString("AppUpdate", R.string.AppUpdate)); + showSize = true; + } + } + if (showSize) { + if (updateSizeTextView.getTag() != null) { + if (animated) { + updateSizeTextView.setTag(null); + updateSizeTextView.animate().alpha(1.0f).scaleX(1.0f).scaleY(1.0f).setDuration(180).start(); + } else { + updateSizeTextView.setAlpha(1.0f); + updateSizeTextView.setScaleX(1.0f); + updateSizeTextView.setScaleY(1.0f); + } + } + } else { + if (updateSizeTextView.getTag() == null) { + if (animated) { + updateSizeTextView.setTag(1); + updateSizeTextView.animate().alpha(0.0f).scaleX(0.0f).scaleY(0.0f).setDuration(180).start(); + } else { + updateSizeTextView.setAlpha(0.0f); + updateSizeTextView.setScaleX(0.0f); + updateSizeTextView.setScaleY(0.0f); + } + } + } + if (updateLayout.getTag() != null) { + return; + } + updateLayout.setVisibility(View.VISIBLE); + updateLayout.setTag(1); + if (animated) { + updateLayout.animate().translationY(0).setInterpolator(CubicBezierInterpolator.EASE_OUT).setListener(null).setDuration(180).start(); + } else { + updateLayout.setTranslationY(0); + } + sideMenu.setPadding(0, 0, 0, AndroidUtilities.dp(44)); + } else { + if (updateLayout.getTag() == null) { + return; + } + updateLayout.setTag(null); + if (animated) { + updateLayout.animate().translationY(AndroidUtilities.dp(44)).setInterpolator(CubicBezierInterpolator.EASE_OUT).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (updateLayout.getTag() == null) { + updateLayout.setVisibility(View.INVISIBLE); + } + } + }).setDuration(180).start(); + } else { + updateLayout.setTranslationY(AndroidUtilities.dp(44)); + updateLayout.setVisibility(View.INVISIBLE); + } + sideMenu.setPadding(0, 0, 0, 0); + } + } + public void checkAppUpdate(boolean force) { if (!force && BuildVars.DEBUG_VERSION || !force && !BuildVars.CHECK_UPDATES) { return; } - if (!force && Math.abs(System.currentTimeMillis() - UserConfig.getInstance(0).lastUpdateCheckTime) < MessagesController.getInstance(0).updateCheckDelay * 1000) { + if (!force && Math.abs(System.currentTimeMillis() - SharedConfig.lastUpdateCheckTime) < MessagesController.getInstance(0).updateCheckDelay * 1000) { return; } TLRPC.TL_help_getAppUpdate req = new TLRPC.TL_help_getAppUpdate(); @@ -3394,26 +3635,23 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } final int accountNum = currentAccount; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { - UserConfig.getInstance(0).lastUpdateCheckTime = System.currentTimeMillis(); - UserConfig.getInstance(0).saveConfig(false); + SharedConfig.lastUpdateCheckTime = System.currentTimeMillis(); + SharedConfig.saveConfig(); if (response instanceof TLRPC.TL_help_appUpdate) { final TLRPC.TL_help_appUpdate res = (TLRPC.TL_help_appUpdate) response; AndroidUtilities.runOnUIThread(() -> { + SharedConfig.setNewAppVersionAvailable(res); if (res.can_not_skip) { - UserConfig.getInstance(0).pendingAppUpdate = res; - UserConfig.getInstance(0).pendingAppUpdateBuildVersion = BuildVars.BUILD_VERSION; - try { - PackageInfo packageInfo = ApplicationLoader.applicationContext.getPackageManager().getPackageInfo(ApplicationLoader.applicationContext.getPackageName(), 0); - UserConfig.getInstance(0).pendingAppUpdateInstallTime = Math.max(packageInfo.lastUpdateTime, packageInfo.firstInstallTime); - } catch (Exception e) { - FileLog.e(e); - UserConfig.getInstance(0).pendingAppUpdateInstallTime = 0; - } - UserConfig.getInstance(0).saveConfig(false); showUpdateActivity(accountNum, res, false); } else { - (new UpdateAppAlertDialog(LaunchActivity.this, res, accountNum)).show(); + drawerLayoutAdapter.notifyDataSetChanged(); + try { + (new UpdateAppAlertDialog(LaunchActivity.this, res, accountNum)).show(); + } catch (Exception e) { + FileLog.e(e); + } } + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.appUpdateAvailable); }); } }); @@ -3676,10 +3914,12 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.openArticle); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.hasNewContactsToImport); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.needShowPlayServicesAlert); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidFailToLoad); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadFailed); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.historyImportProgressChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.groupCallUpdated); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.stickersImportComplete); } NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.needShowAlert); @@ -3694,6 +3934,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.notificationsCountUpdated); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.screenStateChanged); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.showBulletin); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.appUpdateAvailable); } public void presentFragment(BaseFragment fragment) { @@ -3740,7 +3981,15 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa return; } super.onActivityResult(requestCode, resultCode, data); - if (requestCode == PLAY_SERVICES_REQUEST_CHECK_SETTINGS) { + if (requestCode == SCREEN_CAPTURE_REQUEST_CODE) { + if (resultCode == Activity.RESULT_OK) { + VoIPService service = VoIPService.getSharedInstance(); + if (service != null && service.groupCall != null) { + VideoCapturerDevice.mediaProjectionPermissionResultData = data; + service.createCaptureDevice(true); + } + } + } else if (requestCode == PLAY_SERVICES_REQUEST_CHECK_SETTINGS) { LocationController.getInstance(currentAccount).startFusedLocationRequest(resultCode == Activity.RESULT_OK); } else { ThemeEditorView editorView = ThemeEditorView.getInstance(); @@ -3776,7 +4025,15 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa boolean granted = grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED; - if (requestCode == 4) { + if (requestCode == 104) { + if (granted) { + if (GroupCallActivity.groupCallInstance != null) { + GroupCallActivity.groupCallInstance.enableCamera(); + } + } else { + showPermissionErrorAlert(LocaleController.getString("VoipNeedCameraPermission", R.string.VoipNeedCameraPermission)); + } + } else if (requestCode == 4) { if (!granted) { showPermissionErrorAlert(LocaleController.getString("PermissionStorage", R.string.PermissionStorage)); } else { @@ -3891,6 +4148,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa Browser.bindCustomTabsService(this); ApplicationLoader.mainInterfaceStopped = false; GroupCallPip.updateVisibility(this); + if (GroupCallActivity.groupCallInstance != null) { + GroupCallActivity.groupCallInstance.onResume(); + } } @Override @@ -3899,6 +4159,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa Browser.unbindCustomTabsService(this); ApplicationLoader.mainInterfaceStopped = true; GroupCallPip.updateVisibility(this); + if (GroupCallActivity.groupCallInstance != null) { + GroupCallActivity.groupCallInstance.onPause(); + } } @Override @@ -4003,8 +4266,8 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } if (UserConfig.getInstance(UserConfig.selectedAccount).unacceptedTermsOfService != null) { showTosActivity(UserConfig.selectedAccount, UserConfig.getInstance(UserConfig.selectedAccount).unacceptedTermsOfService); - } else if (UserConfig.getInstance(0).pendingAppUpdate != null) { - showUpdateActivity(UserConfig.selectedAccount, UserConfig.getInstance(0).pendingAppUpdate, true); + } else if (SharedConfig.pendingAppUpdate != null && SharedConfig.pendingAppUpdate.can_not_skip) { + showUpdateActivity(UserConfig.selectedAccount, SharedConfig.pendingAppUpdate, true); } checkAppUpdate(false); @@ -4147,6 +4410,9 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa child.invalidate(); } } + if (backgroundTablet != null) { + backgroundTablet.setBackgroundImage(Theme.getCachedWallpaper(), Theme.isWallpaperMotion()); + } } else if (id == NotificationCenter.didSetPasscode) { if (SharedConfig.passcodeHash.length() > 0 && !SharedConfig.allowScreenCapture) { try { @@ -4309,9 +4575,15 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } catch (Throwable ignore) { } - } else if (id == NotificationCenter.fileDidLoad) { + } else if (id == NotificationCenter.fileLoaded) { + String path = (String) args[0]; + if (SharedConfig.isAppUpdateAvailable()) { + String name = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + if (name.equals(path)) { + updateAppUpdateViews(true); + } + } if (loadingThemeFileName != null) { - String path = (String) args[0]; if (loadingThemeFileName.equals(path)) { loadingThemeFileName = null; File locFile = new File(ApplicationLoader.getFilesDirFixed(), "remote" + loadingTheme.id + ".attheme"); @@ -4346,7 +4618,6 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa onThemeLoadFinish(); } } else if (loadingThemeWallpaperName != null) { - String path = (String) args[0]; if (loadingThemeWallpaperName.equals(path)) { loadingThemeWallpaperName = null; File file = (File) args[1]; @@ -4372,11 +4643,17 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } } - } else if (id == NotificationCenter.fileDidFailToLoad) { + } else if (id == NotificationCenter.fileLoadFailed) { String path = (String) args[0]; if (path.equals(loadingThemeFileName) || path.equals(loadingThemeWallpaperName)) { onThemeLoadFinish(); } + if (SharedConfig.isAppUpdateAvailable()) { + String name = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + if (name.equals(path)) { + updateAppUpdateViews(true); + } + } } else if (id == NotificationCenter.screenStateChanged) { if (ApplicationLoader.mainInterfacePaused) { return; @@ -4392,6 +4669,8 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa if (args.length > 1 && !mainFragmentsStack.isEmpty()) { AlertsCreator.processError(currentAccount, (TLRPC.TL_error) args[2], mainFragmentsStack.get(mainFragmentsStack.size() - 1), (TLObject) args[1]); } + } else if (id == NotificationCenter.stickersImportComplete) { + MediaDataController.getInstance(account).toggleStickerSet(this, (TLObject) args[0], 2, !mainFragmentsStack.isEmpty() ? mainFragmentsStack.get(mainFragmentsStack.size() - 1) : null, false, true); } else if (id == NotificationCenter.showBulletin) { if (!mainFragmentsStack.isEmpty()) { int type = (int) args[0]; @@ -4432,6 +4711,20 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa } } else if (id == NotificationCenter.groupCallUpdated) { checkWasMutedByAdmin(false); + } else if (id == NotificationCenter.fileLoadProgressChanged) { + if (updateTextView != null && SharedConfig.isAppUpdateAvailable()) { + String location = (String) args[0]; + String fileName = FileLoader.getAttachFileName(SharedConfig.pendingAppUpdate.document); + if (fileName != null && fileName.equals(location)) { + Long loadedSize = (Long) args[1]; + Long totalSize = (Long) args[2]; + float loadProgress = loadedSize / (float) totalSize; + updateLayoutIcon.setProgress(loadProgress, true); + updateTextView.setText(LocaleController.formatString("AppUpdateDownloading", R.string.AppUpdateDownloading, (int) (loadProgress * 100))); + } + } + } else if (id == NotificationCenter.appUpdateAvailable) { + updateAppUpdateViews(mainFragmentsStack.size() == 1); } } @@ -4539,6 +4832,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa private void checkFreeDiscSpace() { SharedConfig.checkKeepMedia(); + SharedConfig.checkLogsToDelete(); if (Build.VERSION.SDK_INT >= 26) { return; } @@ -5004,7 +5298,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa int keyCode = event.getKeyCode(); if (event.getAction() == KeyEvent.ACTION_DOWN && (event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_UP || event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_DOWN)) { if (VoIPService.getSharedInstance() != null) { - if (Build.VERSION.SDK_INT >= 31 && !SharedConfig.useMediaStream) { + if (Build.VERSION.SDK_INT >= 32) { boolean oldValue = WebRtcAudioTrack.isSpeakerMuted(); AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); int minVolume = am.getStreamMinVolume(AudioManager.STREAM_VOICE_CALL); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java index acab2160a..e67afc219 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java @@ -1698,7 +1698,11 @@ public class LoginActivity extends BaseFragment { needShowProgress(reqId); } + private boolean numberFilled; public void fillNumber() { + if (numberFilled) { + return; + } try { TelephonyManager tm = (TelephonyManager) ApplicationLoader.applicationContext.getSystemService(Context.TELEPHONY_SERVICE); if (tm.getSimState() != TelephonyManager.SIM_STATE_ABSENT && tm.getPhoneType() != TelephonyManager.PHONE_TYPE_NONE) { @@ -1727,6 +1731,7 @@ public class LoginActivity extends BaseFragment { return; } } + numberFilled = true; if (!newAccount && allowCall) { String number = PhoneFormat.stripExceptNumbers(tm.getLine1Number()); String textToSet = null; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java index ab465a6de..74a6e2456 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ManageLinksActivity.java @@ -581,7 +581,7 @@ public class ManageLinksActivity extends BaseFragment { } } }); - recyclerItemsEnterAnimator = new RecyclerItemsEnterAnimator(listView); + recyclerItemsEnterAnimator = new RecyclerItemsEnterAnimator(listView, false); DefaultItemAnimator defaultItemAnimator = new DefaultItemAnimator(); defaultItemAnimator.setDelayAnimations(false); defaultItemAnimator.setSupportsChangeAnimations(false); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java index 4db563b10..9c0624a3d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/MediaActivity.java @@ -195,6 +195,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No final RecyclerListView listView = mediaPages[0].listView; for (int a = 0, count = listView.getChildCount(); a < count; a++) { View view = listView.getChildAt(a); + if (view.getTop() >= mediaPages[0].listView.getMeasuredHeight()) { + continue; + } BackupImageView imageView = null; if (view instanceof SharedPhotoVideoCell) { SharedPhotoVideoCell cell = (SharedPhotoVideoCell) view; @@ -255,9 +258,19 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } } } - return object; } + if (mediaPages[0].selectedType == 0) { + int position = photoVideoAdapter.getPositionForIndex(index); + int firstVisiblePosition = mediaPages[0].layoutManager.findFirstVisibleItemPosition(); + int lastVisiblePosition = mediaPages[0].layoutManager.findLastVisibleItemPosition(); + + if (position <= firstVisiblePosition) { + mediaPages[0].layoutManager.scrollToPositionWithOffset(position, 0); + } else if (position >= lastVisiblePosition && lastVisiblePosition >= 0) { + mediaPages[0].layoutManager.scrollToPositionWithOffset(position, 0, true); + } + } } return null; } @@ -445,7 +458,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); } SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessages, did, true, 0); } @@ -1074,6 +1087,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No protected void onLayout(boolean changed, int l, int t, int r, int b) { super.onLayout(changed, l, t, r, b); updateSections(this, true); + if (mediaPage.selectedType == 0) { + PhotoViewer.getInstance().checkCurrentImageVisibility(); + } } }; mediaPages[a].listView.setScrollingTouchSlop(RecyclerView.TOUCH_SLOP_PAGING); @@ -1644,11 +1660,8 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No changed = true; } } else { - TLRPC.EncryptedChat currentEncryptedChat = MessagesController.getInstance(currentAccount).getEncryptedChat((int) (dialog_id >> 32)); - if (currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46) { - if (hasMedia[4] != 0 && !scrollSlidingTextTabStrip.hasTab(4)) { - changed = true; - } + if (hasMedia[4] != 0 && !scrollSlidingTextTabStrip.hasTab(4)) { + changed = true; } } if (hasMedia[2] != 0 && !scrollSlidingTextTabStrip.hasTab(2)) { @@ -1678,12 +1691,9 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } } } else { - TLRPC.EncryptedChat currentEncryptedChat = MessagesController.getInstance(currentAccount).getEncryptedChat((int) (dialog_id >> 32)); - if (currentEncryptedChat != null && AndroidUtilities.getPeerLayerVersion(currentEncryptedChat.layer) >= 46) { - if (hasMedia[4] != 0) { - if (!scrollSlidingTextTabStrip.hasTab(4)) { - scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2)); - } + if (hasMedia[4] != 0) { + if (!scrollSlidingTextTabStrip.hasTab(4)) { + scrollSlidingTextTabStrip.addTextTab(4, LocaleController.getString("SharedMusicTab2", R.string.SharedMusicTab2)); } } } @@ -2101,7 +2111,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { return row != 0; } @@ -2228,7 +2238,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { return row != 0; } @@ -2404,7 +2414,7 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No } @Override - public boolean isEnabled(int section, int row) { + public boolean isEnabled(RecyclerView.ViewHolder holder, int section, int row) { return false; } @@ -2553,6 +2563,10 @@ public class MediaActivity extends BaseFragment implements NotificationCenter.No public int getPositionForScrollProgress(float progress) { return 0; } + + public int getPositionForIndex(int i) { + return i / columnsCount; + } } public class MediaSearchAdapter extends RecyclerListView.SelectionAdapter { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java index d3433ba97..696f58e85 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PassportActivity.java @@ -74,7 +74,6 @@ import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.ImageLoader; -import org.telegram.messenger.ImageLocation; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MessageObject; @@ -928,8 +927,8 @@ public class PassportActivity extends BaseFragment implements NotificationCenter @Override public boolean onFragmentCreate() { - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileDidUpload); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileDidFailUpload); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileUploaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileUploadFailed); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.twoStepPasswordChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.didRemoveTwoStepPassword); return super.onFragmentCreate(); @@ -938,8 +937,8 @@ public class PassportActivity extends BaseFragment implements NotificationCenter @Override public void onFragmentDestroy() { super.onFragmentDestroy(); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileDidUpload); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileDidFailUpload); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileUploaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileUploadFailed); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.twoStepPasswordChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.didRemoveTwoStepPassword); callCallback(false); @@ -4934,7 +4933,7 @@ public class PassportActivity extends BaseFragment implements NotificationCenter doneItem.setEnabled(true); doneItem.setAlpha(1.0f); } - FileLoader.getInstance(currentAccount).cancelUploadFile(document.path, false); + FileLoader.getInstance(currentAccount).cancelFileUpload(document.path, false); } }); showDialog(builder.create()); @@ -6490,7 +6489,7 @@ public class PassportActivity extends BaseFragment implements NotificationCenter @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.FileDidUpload) { + if (id == NotificationCenter.fileUploaded) { final String location = (String) args[0]; SecureDocument document = uploadingDocuments.get(location); if (document != null) { @@ -6524,7 +6523,7 @@ public class PassportActivity extends BaseFragment implements NotificationCenter errorsValues.remove("translation_all"); } } - } else if (id == NotificationCenter.FileDidFailUpload) { + } else if (id == NotificationCenter.fileUploadFailed) { } else if (id == NotificationCenter.twoStepPasswordChanged) { if (args != null && args.length > 0) { @@ -6826,11 +6825,11 @@ public class PassportActivity extends BaseFragment implements NotificationCenter return; } if (chatAttachAlert == null) { - chatAttachAlert = new ChatAttachAlert(getParentActivity(), this, false); + chatAttachAlert = new ChatAttachAlert(getParentActivity(), this, false, false); chatAttachAlert.setDelegate(new ChatAttachAlert.ChatAttachViewDelegate() { @Override - public void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate) { + public void didPressedButton(int button, boolean arg, boolean notify, int scheduleDate, boolean forceDocument) { if (getParentActivity() == null || chatAttachAlert == null) { return; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java index 904f2032d..6e98327a7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java @@ -45,7 +45,6 @@ import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; import android.view.WindowManager; -import android.view.animation.DecelerateInterpolator; import android.view.inputmethod.EditorInfo; import android.webkit.CookieManager; import android.webkit.JavascriptInterface; @@ -2744,11 +2743,6 @@ public class PaymentFormActivity extends BaseFragment implements NotificationCen if (task1.isSuccessful()) { if (googlePayContainer != null) { googlePayContainer.setVisibility(View.VISIBLE); - AnimatorSet animatorSet = new AnimatorSet(); - animatorSet.playTogether(ObjectAnimator.ofFloat(googlePayContainer, View.ALPHA, 0.0f, 1.0f)); - animatorSet.setInterpolator(new DecelerateInterpolator()); - animatorSet.setDuration(180); - animatorSet.start(); } } else { FileLog.e("isReadyToPay failed", task1.getException()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java index 205238cb3..2b6354524 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PeopleNearbyActivity.java @@ -19,6 +19,7 @@ import android.location.Location; import android.os.Build; import android.os.Bundle; import android.os.SystemClock; +import android.util.SparseIntArray; import android.util.TypedValue; import android.view.Gravity; import android.view.View; @@ -33,6 +34,7 @@ import org.telegram.messenger.ChatObject; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.LocationController; +import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; @@ -57,6 +59,7 @@ import org.telegram.ui.Components.UndoView; import java.util.ArrayList; import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.DiffUtil; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; @@ -133,10 +136,10 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe users = new ArrayList<>(getLocationController().getCachedNearbyUsers()); chats = new ArrayList<>(getLocationController().getCachedNearbyChats()); checkForExpiredLocations(false); - updateRows(true); + updateRows(null); } - private void updateRows(boolean notifyDataSetChanged) { + private void updateRows(DiffCallback diffCallback) { rowCount = 0; usersStartRow = -1; usersEndRow = -1; @@ -175,9 +178,92 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe } chatsSectionRow = rowCount++; - if (notifyDataSetChanged && listViewAdapter != null) { - listView.setItemAnimator(null); - listViewAdapter.notifyDataSetChanged(); + if (listViewAdapter != null) { + if (diffCallback == null) { + listView.setItemAnimator(null); + listViewAdapter.notifyDataSetChanged(); + } else { + listView.setItemAnimator(itemAnimator); + diffCallback.fillPositions(diffCallback.newPositionToItem); + DiffUtil.calculateDiff(diffCallback).dispatchUpdatesTo(listViewAdapter); + } + } + } + + private class DiffCallback extends DiffUtil.Callback { + + int oldRowCount; + + SparseIntArray oldPositionToItem = new SparseIntArray(); + SparseIntArray newPositionToItem = new SparseIntArray(); + + int oldUsersStartRow; + int oldUsersEndRow; + + int oldChatsStartRow; + int oldChatsEndRow; + + private final ArrayList oldUsers = new ArrayList<>(); + private final ArrayList oldChats = new ArrayList<>(); + + @Override + public int getOldListSize() { + return oldRowCount; + } + + @Override + public int getNewListSize() { + return rowCount; + } + + @Override + public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { + if (newItemPosition >= usersStartRow && newItemPosition < usersEndRow && oldItemPosition >= oldUsersStartRow && oldItemPosition < oldUsersEndRow) { + return MessageObject.getPeerId(oldUsers.get(oldItemPosition - oldUsersStartRow).peer) == MessageObject.getPeerId(users.get(newItemPosition - usersStartRow).peer); + } + if (newItemPosition >= chatsStartRow && newItemPosition < chatsEndRow && oldItemPosition >= oldChatsStartRow && oldItemPosition < oldChatsEndRow) { + return MessageObject.getPeerId(oldChats.get(oldItemPosition - oldChatsStartRow).peer) == MessageObject.getPeerId(chats.get(newItemPosition - chatsStartRow).peer); + } + int oldIndex = oldPositionToItem.get(oldItemPosition, -1); + int newIndex = newPositionToItem.get(newItemPosition, -1); + return oldIndex == newIndex && oldIndex >= 0; + } + + @Override + public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { + return areItemsTheSame(oldItemPosition, newItemPosition); + } + + public void fillPositions(SparseIntArray sparseIntArray) { + sparseIntArray.clear(); + int pointer = 0; + + put(++pointer, helpRow, sparseIntArray); + put(++pointer, helpSectionRow, sparseIntArray); + put(++pointer, usersHeaderRow, sparseIntArray); + put(++pointer, showMoreRow, sparseIntArray); + put(++pointer, usersSectionRow, sparseIntArray); + put(++pointer, chatsHeaderRow, sparseIntArray); + put(++pointer, chatsCreateRow, sparseIntArray); + put(++pointer, chatsSectionRow, sparseIntArray); + put(++pointer, showMeRow, sparseIntArray); + } + + public void saveCurrentState() { + this.oldRowCount = rowCount; + this.oldUsersStartRow = usersStartRow; + this.oldUsersEndRow = usersEndRow; + this.oldChatsStartRow = chatsStartRow; + this.oldChatsEndRow = chatsEndRow; + oldUsers.addAll(users); + oldChats.addAll(chats); + fillPositions(oldPositionToItem); + } + + private void put(int id, int position, SparseIntArray sparseIntArray) { + if (position >= 0) { + sparseIntArray.put(position, id); + } } } @@ -315,7 +401,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe userConfig.sharingMyLocationUntil = 0; userConfig.saveConfig(false); sendRequest(false, 2); - updateRows(true); + updateRows(null); } else { AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); builder.setTitle(LocaleController.getString("MakeMyselfVisibleTitle", R.string.MakeMyselfVisibleTitle)); @@ -324,19 +410,17 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe userConfig.sharingMyLocationUntil = 0x7fffffff; userConfig.saveConfig(false); sendRequest(false, 1); - updateRows(true); + updateRows(null); }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); showDialog(builder.create()); } userConfig.saveConfig(false); } else if (position == showMoreRow) { - int newCount = users.size() - Math.min(5, users.size()); expanded = true; - updateRows(false); - listView.setItemAnimator(itemAnimator); - listViewAdapter.notifyItemRemoved(position); - listViewAdapter.notifyItemRangeInserted(position, newCount); + DiffCallback diffCallback = new DiffCallback(); + diffCallback.saveCurrentState(); + updateRows(diffCallback); } }); listView.setOnScrollListener(new RecyclerView.OnScrollListener() { @@ -365,7 +449,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe undoView = new UndoView(context); frameLayout.addView(undoView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); - updateRows(true); + updateRows(null); return fragmentView; } @@ -540,12 +624,16 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe if (share == 1 && error != null) { userConfig.sharingMyLocationUntil = 0; saveConfig = true; - updateRows(true); + updateRows(null); } if (response != null && share != 2) { TLRPC.Updates updates = (TLRPC.TL_updates) response; getMessagesController().putUsers(updates.users, false); getMessagesController().putChats(updates.chats, false); + + DiffCallback diffCallback = new DiffCallback(); + diffCallback.saveCurrentState(); + users.clear(); chats.clear(); if (userConfig.sharingMyLocationUntil != 0) { @@ -583,7 +671,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe } checkForExpiredLocations(true); - updateRows(true); + updateRows(diffCallback); } if (saveConfig) { userConfig.saveConfig(false); @@ -653,6 +741,8 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe sendRequest(false, 0); } else if (id == NotificationCenter.newPeopleNearbyAvailable) { TLRPC.TL_updatePeerLocated update = (TLRPC.TL_updatePeerLocated) args[0]; + DiffCallback diffCallback = new DiffCallback(); + diffCallback.saveCurrentState(); for (int b = 0, N2 = update.peers.size(); b < N2; b++) { TLRPC.PeerLocated object = update.peers.get(b); if (object instanceof TLRPC.TL_peerLocated) { @@ -677,7 +767,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe } } checkForExpiredLocations(true); - updateRows(true); + updateRows(diffCallback); } else if (id == NotificationCenter.needDeleteDialog) { if (fragmentView == null || isPaused) { return; @@ -713,11 +803,16 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe int currentTime = getConnectionsManager().getCurrentTime(); int minExpired = Integer.MAX_VALUE; boolean changed = false; + DiffCallback callback = null; for (int a = 0; a < 2; a++) { ArrayList arrayList = a == 0 ? users : chats; for (int b = 0, N = arrayList.size(); b < N; b++) { TLRPC.TL_peerLocated peer = arrayList.get(b); if (peer.expires <= currentTime) { + if (callback == null) { + callback = new DiffCallback(); + callback.saveCurrentState(); + } arrayList.remove(b); b--; N--; @@ -728,7 +823,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe } } if (changed && listViewAdapter != null) { - updateRows(true); + updateRows(callback); } if (changed || cache) { getLocationController().setCachedNearbyUsersAndChats(users, chats); @@ -782,7 +877,7 @@ public class PeopleNearbyActivity extends BaseFragment implements NotificationCe titleTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 24); titleTextView.setGravity(Gravity.CENTER); titleTextView.setText(AndroidUtilities.replaceTags(LocaleController.formatString("PeopleNearby", R.string.PeopleNearby))); - addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 52, top + 120, 52, 27)); + addView(titleTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.LEFT, 17, top + 120, 17, 27)); messageTextView = new TextView(context); messageTextView.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteGrayText)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java index 14bdef41d..62f711966 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoAlbumPickerActivity.java @@ -775,7 +775,7 @@ public class PhotoAlbumPickerActivity extends BaseFragment implements Notificati private void openPhotoPicker(MediaController.AlbumEntry albumEntry, int type) { if (albumEntry != null) { - PhotoPickerActivity fragment = new PhotoPickerActivity(type, albumEntry, selectedPhotos, selectedPhotosOrder, selectPhotoType, allowCaption, chatActivity); + PhotoPickerActivity fragment = new PhotoPickerActivity(type, albumEntry, selectedPhotos, selectedPhotosOrder, selectPhotoType, allowCaption, chatActivity, false); fragment.setCaption(caption = commentTextView.getText()); fragment.setDelegate(new PhotoPickerActivity.PhotoPickerActivityDelegate() { @Override @@ -826,7 +826,7 @@ public class PhotoAlbumPickerActivity extends BaseFragment implements Notificati fragment.setMaxSelectedPhotos(maxSelectedPhotos, allowOrder); presentFragment(fragment); } else { - PhotoPickerActivity fragment = new PhotoPickerActivity(0, albumEntry, photos, order, selectPhotoType, allowCaption, chatActivity); + PhotoPickerActivity fragment = new PhotoPickerActivity(0, albumEntry, photos, order, selectPhotoType, allowCaption, chatActivity, false); fragment.setCaption(caption = commentTextView.getText()); fragment.setDelegate(new PhotoPickerActivity.PhotoPickerActivityDelegate() { @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java index bc1b5e3ec..d4e296be4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerActivity.java @@ -73,6 +73,7 @@ import org.telegram.ui.ActionBar.ActionBarPopupWindow; import org.telegram.ui.ActionBar.ActionBar; import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; +import org.telegram.ui.ActionBar.SimpleTextView; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; import org.telegram.ui.Cells.DividerCell; @@ -179,6 +180,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen private String initialSearchString; private boolean needsBottomLayout = true; + private final boolean forceDarckTheme; private final static int change_sort = 1; private final static int open_in = 2; @@ -186,6 +188,10 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen private PhotoPickerActivityDelegate delegate; private PhotoPickerActivitySearchDelegate searchDelegate; + private final String dialogBackgroundKey; + private final String textKey; + private final String selectorKey; + private PhotoViewer.PhotoViewerProvider provider = new PhotoViewer.EmptyPhotoViewerProvider() { @Override public boolean scaleToFill() { @@ -380,7 +386,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { if (selectedPhotos.isEmpty()) { if (selectedAlbum != null) { if (index < 0 || index >= selectedAlbum.photos.size()) { @@ -412,7 +418,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen } }; - public PhotoPickerActivity(int type, MediaController.AlbumEntry selectedAlbum, HashMap selectedPhotos, ArrayList selectedPhotosOrder, int selectPhotoType, boolean allowCaption, ChatActivity chatActivity) { + public PhotoPickerActivity(int type, MediaController.AlbumEntry selectedAlbum, HashMap selectedPhotos, ArrayList selectedPhotosOrder, int selectPhotoType, boolean allowCaption, ChatActivity chatActivity, boolean forceDarkTheme) { super(); this.selectedAlbum = selectedAlbum; this.selectedPhotos = selectedPhotos; @@ -421,10 +427,21 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen this.selectPhotoType = selectPhotoType; this.chatActivity = chatActivity; this.allowCaption = allowCaption; + this.forceDarckTheme = forceDarkTheme; if (selectedAlbum == null) { loadRecentSearch(); } + + if (forceDarkTheme) { + dialogBackgroundKey = Theme.key_voipgroup_dialogBackground; + textKey = Theme.key_voipgroup_actionBarItems; + selectorKey = Theme.key_voipgroup_actionBarItemsSelector; + } else { + dialogBackgroundKey = Theme.key_dialogBackground; + textKey = Theme.key_dialogTextBlack; + selectorKey = Theme.key_dialogButtonSelector; + } } public void setDocumentsPicker(boolean value) { @@ -455,10 +472,10 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen public View createView(Context context) { listSort = false; - actionBar.setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); - actionBar.setTitleColor(Theme.getColor(Theme.key_dialogTextBlack)); - actionBar.setItemsColor(Theme.getColor(Theme.key_dialogTextBlack), false); - actionBar.setItemsBackgroundColor(Theme.getColor(Theme.key_dialogButtonSelector), false); + actionBar.setBackgroundColor(Theme.getColor(dialogBackgroundKey)); + actionBar.setTitleColor(Theme.getColor(textKey)); + actionBar.setItemsColor(Theme.getColor(textKey), false); + actionBar.setItemsBackgroundColor(Theme.getColor(selectorKey), false); actionBar.setBackButtonImage(R.drawable.ic_ab_back); if (selectedAlbum != null) { actionBar.setTitle(selectedAlbum.bucketName); @@ -546,8 +563,8 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen } }); EditTextBoldCursor editText = searchItem.getSearchField(); - editText.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); - editText.setCursorColor(Theme.getColor(Theme.key_dialogTextBlack)); + editText.setTextColor(Theme.getColor(textKey)); + editText.setCursorColor(Theme.getColor(textKey)); editText.setHintTextColor(Theme.getColor(Theme.key_chat_messagePanelHint)); } @@ -728,7 +745,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen super.requestLayout(); } }; - sizeNotifierFrameLayout.setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); + sizeNotifierFrameLayout.setBackgroundColor(Theme.getColor(dialogBackgroundKey)); fragmentView = sizeNotifierFrameLayout; listView = new RecyclerListView(context); @@ -756,7 +773,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen }); sizeNotifierFrameLayout.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); listView.setAdapter(listAdapter = new ListAdapter(context)); - listView.setGlowColor(Theme.getColor(Theme.key_dialogBackground)); + listView.setGlowColor(Theme.getColor(dialogBackgroundKey)); listView.setOnItemClickListener((view, position) -> { if (selectedAlbum == null && searchResult.isEmpty()) { if (position < recentSearches.size()) { @@ -931,7 +948,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen sizeNotifierFrameLayout.addView(shadow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 3, Gravity.BOTTOM | Gravity.LEFT, 0, 0, 0, 48)); frameLayout2 = new FrameLayout(context); - frameLayout2.setBackgroundColor(Theme.getColor(Theme.key_dialogBackground)); + frameLayout2.setBackgroundColor(Theme.getColor(dialogBackgroundKey)); frameLayout2.setVisibility(View.INVISIBLE); frameLayout2.setTranslationY(AndroidUtilities.dp(48)); sizeNotifierFrameLayout.addView(frameLayout2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 48, Gravity.LEFT | Gravity.BOTTOM)); @@ -1085,7 +1102,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen } }); } - sendPopupLayout.setupRadialSelectors(Theme.getColor(Theme.key_dialogButtonSelector)); + sendPopupLayout.setupRadialSelectors(Theme.getColor(selectorKey)); sendPopupWindow = new ActionBarPopupWindow(sendPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); sendPopupWindow.setAnimationEnabled(false); @@ -1121,7 +1138,7 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen int cy = getMeasuredHeight() / 2; textPaint.setColor(Theme.getColor(Theme.key_dialogRoundCheckBoxCheck)); - paint.setColor(Theme.getColor(Theme.key_dialogBackground)); + paint.setColor(Theme.getColor(dialogBackgroundKey)); rect.set(cx - size / 2, 0, cx + size / 2, getMeasuredHeight()); canvas.drawRoundRect(rect, AndroidUtilities.dp(12), AndroidUtilities.dp(12), paint); @@ -1824,12 +1841,19 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen break; case 3: { view = new TextCell(mContext, 23, true); + view.setLayoutParams(new RecyclerView.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); + if (forceDarckTheme) { + TextCell textCell = (TextCell) view; + textCell.textView.setTextColor(Theme.getColor(textKey)); + textCell.imageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_voipgroup_mutedIcon), PorterDuff.Mode.MULTIPLY)); + } break; } case 4: default: { view = new DividerCell(mContext); + ((DividerCell) view).setForceDarkTheme(forceDarckTheme); break; } } @@ -1913,17 +1937,17 @@ public class PhotoPickerActivity extends BaseFragment implements NotificationCen public ArrayList getThemeDescriptions() { ArrayList themeDescriptions = new ArrayList<>(); - themeDescriptions.add(new ThemeDescription(sizeNotifierFrameLayout, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_dialogBackground)); + themeDescriptions.add(new ThemeDescription(sizeNotifierFrameLayout, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, dialogBackgroundKey)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, Theme.key_dialogBackground)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, null, Theme.key_dialogTextBlack)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, Theme.key_dialogTextBlack)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SELECTORCOLOR, null, null, null, null, Theme.key_dialogButtonSelector)); - themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SEARCH, null, null, null, null, Theme.key_dialogTextBlack)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_BACKGROUND, null, null, null, null, dialogBackgroundKey)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_ITEMSCOLOR, null, null, null, null, textKey)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_TITLECOLOR, null, null, null, null, textKey)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SELECTORCOLOR, null, null, null, null, selectorKey)); + themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SEARCH, null, null, null, null, textKey)); themeDescriptions.add(new ThemeDescription(actionBar, ThemeDescription.FLAG_AB_SEARCHPLACEHOLDER, null, null, null, null, Theme.key_chat_messagePanelHint)); - themeDescriptions.add(new ThemeDescription(searchItem != null ? searchItem.getSearchField() : null, ThemeDescription.FLAG_CURSORCOLOR, null, null, null, null, Theme.key_dialogTextBlack)); + themeDescriptions.add(new ThemeDescription(searchItem != null ? searchItem.getSearchField() : null, ThemeDescription.FLAG_CURSORCOLOR, null, null, null, null, textKey)); - themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, Theme.key_dialogBackground)); + themeDescriptions.add(new ThemeDescription(listView, ThemeDescription.FLAG_LISTGLOWCOLOR, null, null, null, null, dialogBackgroundKey)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, null, new Drawable[]{Theme.chat_attachEmptyDrawable}, null, Theme.key_chat_attachEmptyImage)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{View.class}, null, null, null, Theme.key_chat_attachPhotoBackground)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java index 9c27b10ba..c5e3fa681 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoPickerSearchActivity.java @@ -86,8 +86,8 @@ public class PhotoPickerSearchActivity extends BaseFragment { public PhotoPickerSearchActivity(HashMap selectedPhotos, ArrayList selectedPhotosOrder, int selectPhotoType, boolean allowCaption, ChatActivity chatActivity) { super(); - imagesSearch = new PhotoPickerActivity(0, null, selectedPhotos, selectedPhotosOrder, selectPhotoType, allowCaption, chatActivity); - gifsSearch = new PhotoPickerActivity(1, null, selectedPhotos, selectedPhotosOrder, selectPhotoType, allowCaption, chatActivity); + imagesSearch = new PhotoPickerActivity(0, null, selectedPhotos, selectedPhotosOrder, selectPhotoType, allowCaption, chatActivity, false); + gifsSearch = new PhotoPickerActivity(1, null, selectedPhotos, selectedPhotosOrder, selectPhotoType, allowCaption, chatActivity, false); } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java b/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java index 08ca127fd..63848489f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PhotoViewer.java @@ -1679,7 +1679,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { } @@ -1809,7 +1809,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat int setPhotoUnchecked(Object photoEntry); boolean cancelButtonPressed(); void needAddMorePhotos(); - void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate); + void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument); void replaceButtonPressed(int index, VideoEditedInfo videoEditedInfo); boolean canReplace(int index); int getSelectedCount(); @@ -2884,7 +2884,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat @SuppressWarnings("unchecked") @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.fileDidFailToLoad) { + if (id == NotificationCenter.fileLoadFailed) { String location = (String) args[0]; for (int a = 0; a < 3; a++) { if (currentFileNames[a] != null && currentFileNames[a].equals(location)) { @@ -2894,7 +2894,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat break; } } - } else if (id == NotificationCenter.fileDidLoad) { + } else if (id == NotificationCenter.fileLoaded) { String location = (String) args[0]; for (int a = 0; a < 3; a++) { if (currentFileNames[a] != null && currentFileNames[a].equals(location)) { @@ -2910,7 +2910,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat break; } } - } else if (id == NotificationCenter.FileLoadProgressChanged) { + } else if (id == NotificationCenter.fileLoadProgressChanged) { String location = (String) args[0]; for (int a = 0; a < 3; a++) { if (currentFileNames[a] != null && currentFileNames[a].equals(location)) { @@ -3033,6 +3033,13 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat location = ImageLocation.getForUserOrChat(chat, ImageLocation.TYPE_BIG); } if (location != null) { + if (!imagesArrLocations.isEmpty() && imagesArrLocations.get(0).photoId == location.photoId) { + imagesArrLocations.remove(0); + avatarsArr.remove(0); + imagesArrLocationsSizes.remove(0); + imagesArrLocationsVideo.remove(0); + imagesArrMessages.remove(0); + } imagesArrLocations.add(0, location); avatarsArr.add(0, new TLRPC.TL_photoEmpty()); imagesArrLocationsSizes.add(0, currentFileLocationVideo.currentSize); @@ -3179,7 +3186,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } } } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { if (captionTextViewSwitcher != null) { captionTextViewSwitcher.invalidateViews(); } @@ -3667,7 +3674,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } closePhoto(true, false); } else if (id == gallery_menu_save) { - if (Build.VERSION.SDK_INT >= 23 && parentActivity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && parentActivity.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { parentActivity.requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); return; } @@ -3762,7 +3769,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); } SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessages, did, true, 0); } @@ -4823,8 +4830,8 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat sendPopupLayout.setBackgroundColor(0xf9222222); final boolean canReplace = placeProvider != null && placeProvider.canReplace(currentIndex); - final int[] order = {3, 2, 0, 1}; - for (int i = 0; i < 4; i++) { + final int[] order = {4, 3, 2, 0, 1}; + for (int i = 0; i < 5; i++) { final int a = order[i]; if (a != 2 && a != 3 && canReplace) { continue; @@ -4858,6 +4865,8 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat continue; } else if ((a == 2 || a == 3) && !canReplace) { continue; + } else if (a == 4 && (isCurrentVideo || timeItem.getColorFilter() != null)) { + continue; } ActionBarMenuSubItem cell = new ActionBarMenuSubItem(parentActivity, a == 0, a == 3); if (a == 0) { @@ -4872,6 +4881,8 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat cell.setTextAndIcon(LocaleController.getString("ReplacePhoto", R.string.ReplacePhoto), R.drawable.msg_replace); } else if (a == 3) { cell.setTextAndIcon(LocaleController.getString("SendAsNewPhoto", R.string.SendAsNewPhoto), R.drawable.msg_sendphoto); + } else if (a == 4) { + cell.setTextAndIcon(LocaleController.getString("SendWithoutCompression", R.string.SendWithoutCompression), R.drawable.msg_sendfile); } cell.setMinimumWidth(AndroidUtilities.dp(196)); cell.setColors(0xffffffff, 0xffffffff); @@ -4888,9 +4899,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat replacePressed(); } else if (a == 3) { sendPressed(true, 0); + } else if (a == 4) { + sendPressed(true, 0, false, true); } }); } + if (sendPopupLayout.getChildCount() == 0) { + return false; + } sendPopupLayout.setupRadialSelectors(0x24ffffff); sendPopupWindow = new ActionBarPopupWindow(sendPopupLayout, LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT); @@ -5730,14 +5746,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } private void sendPressed(boolean notify, int scheduleDate) { - sendPressed(notify, scheduleDate, false); + sendPressed(notify, scheduleDate, false, false); } private void replacePressed() { - sendPressed(false, 0, true); + sendPressed(false, 0, true, false); } - private void sendPressed(boolean notify, int scheduleDate, boolean replace) { + private void sendPressed(boolean notify, int scheduleDate, boolean replace, boolean forceDocument) { if (captionEditText.getTag() != null) { return; } @@ -5761,7 +5777,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } doneButtonPressed = true; if (!replace) { - placeProvider.sendButtonPressed(currentIndex, videoEditedInfo, notify, scheduleDate); + placeProvider.sendButtonPressed(currentIndex, videoEditedInfo, notify, scheduleDate, forceDocument); } else { placeProvider.replaceButtonPressed(currentIndex, videoEditedInfo); } @@ -9602,7 +9618,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat mirrorItem.setVisibility(View.GONE); allowCaption = cropItem.getVisibility() == View.VISIBLE; } - if (parentChatActivity != null && (parentChatActivity.currentEncryptedChat == null || AndroidUtilities.getPeerLayerVersion(parentChatActivity.currentEncryptedChat.layer) >= 46)) { + if (parentChatActivity != null) { mentionsAdapter.setChatInfo(parentChatActivity.chatInfo); mentionsAdapter.setNeedUsernames(parentChatActivity.currentChat != null); mentionsAdapter.setNeedBotContext(false); @@ -9673,7 +9689,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } private boolean canSendMediaToParentChatActivity() { - return parentChatActivity != null && (parentChatActivity.currentUser != null || parentChatActivity.currentChat != null && ChatObject.canSendMedia(parentChatActivity.currentChat)); + return parentChatActivity != null && (parentChatActivity.currentUser != null || parentChatActivity.currentChat != null && !ChatObject.isNotInChat(parentChatActivity.currentChat) && ChatObject.canSendMedia(parentChatActivity.currentChat)); } private void setDoubleTapEnabled(boolean value) { @@ -9715,6 +9731,8 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat allowShare = false; bottomLayout.setTranslationY(AndroidUtilities.dp(48)); captionTextViewSwitcher.setTranslationY(AndroidUtilities.dp(48)); + nameTextView.setText(""); + dateTextView.setText(""); } else { if (newMessageObject.isNewGif()) { menuItem.showSubItem(gallery_menu_savegif); @@ -11258,7 +11276,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat if (currentPlaceObject != null) { currentPlaceObject.imageReceiver.setVisible(true, true); } - currentPlaceObject = placeProvider.getPlaceForPhoto(currentMessageObject, getFileLocation(currentFileLocation), currentIndex, false); + currentPlaceObject = placeProvider == null ? null : placeProvider.getPlaceForPhoto(currentMessageObject, getFileLocation(currentFileLocation), currentIndex, false); if (currentPlaceObject != null) { currentPlaceObject.imageReceiver.setVisible(false, true); } @@ -11425,14 +11443,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } @Override - public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate) { - sendMedia(videoEditedInfo, notify, scheduleDate, false); + public void sendButtonPressed(int index, VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean forceDocument) { + sendMedia(videoEditedInfo, notify, scheduleDate, false, forceDocument); } @Override public void replaceButtonPressed(int index, VideoEditedInfo videoEditedInfo) { if (photoEntry.isCropped || photoEntry.isPainted || photoEntry.isFiltered || videoEditedInfo != null || !TextUtils.isEmpty(photoEntry.caption)) { - sendMedia(videoEditedInfo, false, 0, true); + sendMedia(videoEditedInfo, false, 0, true, false); } } @@ -11451,7 +11469,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat return false; } - private void sendMedia(VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean replace) { + private void sendMedia(VideoEditedInfo videoEditedInfo, boolean notify, int scheduleDate, boolean replace, boolean forceDocument) { if (parentChatActivity != null) { final MessageObject editingMessageObject = replace ? finalMessageObject : null; if (editingMessageObject != null && !TextUtils.isEmpty(photoEntry.caption)) { @@ -11460,15 +11478,15 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } if (photoEntry.isVideo) { if (videoEditedInfo != null) { - SendMessagesHelper.prepareSendingVideo(parentChatActivity.getAccountInstance(), photoEntry.path, videoEditedInfo, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate); + SendMessagesHelper.prepareSendingVideo(parentChatActivity.getAccountInstance(), photoEntry.path, videoEditedInfo, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate, forceDocument); } else { - SendMessagesHelper.prepareSendingVideo(parentChatActivity.getAccountInstance(), photoEntry.path, null, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate); + SendMessagesHelper.prepareSendingVideo(parentChatActivity.getAccountInstance(), photoEntry.path, null, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.ttl, editingMessageObject, notify, scheduleDate, forceDocument); } } else { if (photoEntry.imagePath != null) { - SendMessagesHelper.prepareSendingPhoto(parentChatActivity.getAccountInstance(), photoEntry.imagePath, photoEntry.thumbPath, null, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate); + SendMessagesHelper.prepareSendingPhoto(parentChatActivity.getAccountInstance(), photoEntry.imagePath, photoEntry.thumbPath, null, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate, forceDocument); } else if (photoEntry.path != null) { - SendMessagesHelper.prepareSendingPhoto(parentChatActivity.getAccountInstance(), photoEntry.path, photoEntry.thumbPath, null, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate); + SendMessagesHelper.prepareSendingPhoto(parentChatActivity.getAccountInstance(), photoEntry.path, photoEntry.thumbPath, null, parentChatActivity.getDialogId(), parentChatActivity.getReplyMessage(), parentChatActivity.getThreadMessage(), photoEntry.caption, photoEntry.entities, photoEntry.stickers, null, photoEntry.ttl, editingMessageObject, videoEditedInfo, notify, scheduleDate, forceDocument); } } } @@ -11604,14 +11622,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat actionBar.setTitle(LocaleController.formatString("Of", R.string.Of, 1, 1)); actionBar.setTitleScrollNonFitText(false); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidFailToLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadFailed); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.mediaCountDidLoad); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.mediaDidLoad); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.dialogPhotosLoaded); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.messagesDeleted); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.filePreparingFailed); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.fileNewChunkAvailable); @@ -11926,6 +11944,61 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat parentChatActivity.getUndoView().hide(false, 1); parentChatActivity.getFragmentView().invalidate(); } + windowView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { + @Override + public boolean onPreDraw() { + windowView.getViewTreeObserver().removeOnPreDrawListener(this); + actionBar.setTranslationY(-AndroidUtilities.dp(32)); + actionBar.animate().alpha(1).translationY(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + checkImageView.setTranslationY(-AndroidUtilities.dp(32)); + checkImageView.animate().alpha(1).translationY(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + photosCounterView.setTranslationY(-AndroidUtilities.dp(32)); + photosCounterView.animate().alpha(1).translationY(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + + pickerView.setTranslationY(AndroidUtilities.dp(32)); + pickerView.animate().alpha(1).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + pickerViewSendButton.setTranslationY(AndroidUtilities.dp(32)); + pickerViewSendButton.setAlpha(0f); + pickerViewSendButton.animate().alpha(1).translationY(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + cameraItem.setTranslationY(AndroidUtilities.dp(32)); + cameraItem.animate().alpha(1).translationY(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + videoPreviewFrame.setTranslationY(AndroidUtilities.dp(32)); + videoPreviewFrame.animate().alpha(1).translationY(0).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); + + containerView.setAlpha(0); + backgroundDrawable.setAlpha(0); + + animationInProgress = 4; + containerView.invalidate(); + AnimatorSet animatorSet = new AnimatorSet(); + ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(containerView, View.ALPHA, 0f, 1f).setDuration(220); + ObjectAnimator a2 = ObjectAnimator.ofFloat(pickerView, View.TRANSLATION_Y, pickerView.getTranslationY(), 0f).setDuration(220); + a2.setInterpolator(CubicBezierInterpolator.DEFAULT); + animatorSet.playTogether( + alphaAnimator, + a2 + ); + animatorSet.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + super.onAnimationEnd(animation); + animationInProgress = 0; + backgroundDrawable.setAlpha(255); + containerView.invalidate(); + pickerView.setTranslationY(0f); + + } + }); + animatorSet.start(); + return true; + } + }); + } AccessibilityManager am = (AccessibilityManager) parentActivity.getSystemService(Context.ACCESSIBILITY_SERVICE); @@ -12330,14 +12403,14 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } private void removeObservers() { - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidFailToLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileDidLoad); - NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.FileLoadProgressChanged); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadFailed); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoaded); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileLoadProgressChanged); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.mediaCountDidLoad); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.mediaDidLoad); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.dialogPhotosLoaded); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.messagesDeleted); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.filePreparingFailed); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.fileNewChunkAvailable); ConnectionsManager.getInstance(currentAccount).cancelRequestsForGuid(classGuid); @@ -12589,12 +12662,7 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat } float longPressX; - Runnable longPressRunnable = new Runnable() { - @Override - public void run() { - onLongPress(); - } - }; + Runnable longPressRunnable = this::onLongPress; private boolean onTouchEvent(MotionEvent ev) { if (currentEditMode == 3 && animationStartTime != 0 && (ev.getActionMasked() == MotionEvent.ACTION_DOWN || ev.getActionMasked() == MotionEvent.ACTION_POINTER_DOWN)) { @@ -13176,13 +13244,15 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat int containerWidth = getContainerViewWidth(); int containerHeight = getContainerViewHeight(); - if (animationInProgress != 2 && !pipAnimationInProgress && !isInline) { + if (animationInProgress != 2 && animationInProgress != 4 && !pipAnimationInProgress && !isInline) { if (currentEditMode == 0 && sendPhotoType != SELECT_TYPE_AVATAR && scale == 1 && aty != -1 && !zoomAnimation) { float maxValue = containerWidth / 4.0f; backgroundDrawable.setAlpha((int) Math.max(127, 255 * (1.0f - (Math.min(Math.abs(aty), maxValue) / maxValue)))); } else { backgroundDrawable.setAlpha(255); } + } else if (animationInProgress == 4) { + canvas.drawColor(0xff000000); } sideImage = null; @@ -13816,6 +13886,9 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat private final Runnable backSeek = new Runnable() { @Override public void run() { + if (videoPlayer == null) { + return; + } long duration = videoPlayer.getDuration(); if (duration == 0 || duration == C.TIME_UNSET) { rewindLastTime = System.currentTimeMillis(); @@ -14086,6 +14159,8 @@ public class PhotoViewer implements NotificationCenter.NotificationCenterDelegat return current != C.TIME_UNSET && total > 15 * 1000 && (!forward || total - current > 10000); } + long totalRewinding; + @Override public boolean onDoubleTap(MotionEvent e) { if (videoPlayer != null && videoPlayerControlVisible) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java index 71cfe9f19..3ff224df9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PinchToZoomHelper.java @@ -688,11 +688,9 @@ public class PinchToZoomHelper { if (pointerId1 == ev.getPointerId(0) && pointerId2 == ev.getPointerId(1)) { return true; } - if (pointerId1 == ev.getPointerId(1) && pointerId2 == ev.getPointerId(0)) { return true; } - return false; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PopupNotificationActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PopupNotificationActivity.java index a50b75257..1213a1ad3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PopupNotificationActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PopupNotificationActivity.java @@ -170,7 +170,7 @@ public class PopupNotificationActivity extends Activity implements NotificationC NotificationCenter.getInstance(a).addObserver(this, NotificationCenter.contactsDidLoad); } NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.pushMessagesUpdated); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); classGuid = ConnectionsManager.generateClassGuid(); statusDrawables[0] = new TypingDotsDrawable(false); @@ -1534,7 +1534,7 @@ public class PopupNotificationActivity extends Activity implements NotificationC } } } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { if (messageContainer != null) { int count = messageContainer.getChildCount(); for (int a = 0; a < count; a++) { @@ -1583,7 +1583,7 @@ public class PopupNotificationActivity extends Activity implements NotificationC NotificationCenter.getInstance(a).removeObserver(this, NotificationCenter.contactsDidLoad); } NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.pushMessagesUpdated); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (chatActivityEnterView != null) { chatActivityEnterView.onDestroy(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java index 9f9664ed4..b98baaa65 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java @@ -277,7 +277,7 @@ public class PrivacyControlActivity extends BaseFragment implements Notification updateRows(false); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.privacyRulesUpdated); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetNewWallpapper); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); return true; } @@ -286,7 +286,7 @@ public class PrivacyControlActivity extends BaseFragment implements Notification super.onFragmentDestroy(); NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.privacyRulesUpdated); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); } @Override @@ -439,7 +439,7 @@ public class PrivacyControlActivity extends BaseFragment implements Notification public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.privacyRulesUpdated) { checkPrivacy(); - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { listView.invalidateViews(); } else if (id == NotificationCenter.didSetNewWallpapper) { if (messageCell != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java index 21b434618..df9e5fce4 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PrivacySettingsActivity.java @@ -754,6 +754,11 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { switch (holder.getItemViewType()) { case 0: + boolean showLoading = false; + String value = null; + int loadingLen = 16; + boolean animated = holder.itemView.getTag() != null && ((Integer) holder.itemView.getTag()) == position; + holder.itemView.setTag(position); TextSettingsCell textCell = (TextSettingsCell) holder.itemView; if (position == blockedRow) { int totalCount = getMessagesController().totalBlockedCount; @@ -762,6 +767,7 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio } else if (totalCount > 0) { textCell.setTextAndValue(LocaleController.getString("BlockedUsers", R.string.BlockedUsers), String.format("%d", totalCount), true); } else { + showLoading = true; textCell.setText(LocaleController.getString("BlockedUsers", R.string.BlockedUsers), true); } } else if (position == sessionsRow) { @@ -769,9 +775,8 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio } else if (position == webSessionsRow) { textCell.setText(LocaleController.getString("WebSessionsTitle", R.string.WebSessionsTitle), false); } else if (position == passwordRow) { - String value; if (currentPassword == null) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; } else if (currentPassword.has_password) { value = LocaleController.getString("PasswordOn", R.string.PasswordOn); } else { @@ -781,49 +786,49 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio } else if (position == passcodeRow) { textCell.setText(LocaleController.getString("Passcode", R.string.Passcode), true); } else if (position == phoneNumberRow) { - String value; if (getContactsController().getLoadingPrivicyInfo(ContactsController.PRIVACY_RULES_TYPE_PHONE)) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; + loadingLen = 30; } else { value = formatRulesString(getAccountInstance(), ContactsController.PRIVACY_RULES_TYPE_PHONE); } textCell.setTextAndValue(LocaleController.getString("PrivacyPhone", R.string.PrivacyPhone), value, true); } else if (position == lastSeenRow) { - String value; if (getContactsController().getLoadingPrivicyInfo(ContactsController.PRIVACY_RULES_TYPE_LASTSEEN)) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; + loadingLen = 30; } else { value = formatRulesString(getAccountInstance(), ContactsController.PRIVACY_RULES_TYPE_LASTSEEN); } textCell.setTextAndValue(LocaleController.getString("PrivacyLastSeen", R.string.PrivacyLastSeen), value, true); } else if (position == groupsRow) { - String value; if (getContactsController().getLoadingPrivicyInfo(ContactsController.PRIVACY_RULES_TYPE_INVITE)) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; + loadingLen = 30; } else { value = formatRulesString(getAccountInstance(), ContactsController.PRIVACY_RULES_TYPE_INVITE); } textCell.setTextAndValue(LocaleController.getString("GroupsAndChannels", R.string.GroupsAndChannels), value, false); } else if (position == callsRow) { - String value; if (getContactsController().getLoadingPrivicyInfo(ContactsController.PRIVACY_RULES_TYPE_CALLS)) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; + loadingLen = 30; } else { value = formatRulesString(getAccountInstance(), ContactsController.PRIVACY_RULES_TYPE_CALLS); } textCell.setTextAndValue(LocaleController.getString("Calls", R.string.Calls), value, true); } else if (position == profilePhotoRow) { - String value; if (getContactsController().getLoadingPrivicyInfo(ContactsController.PRIVACY_RULES_TYPE_PHOTO)) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; + loadingLen = 30; } else { value = formatRulesString(getAccountInstance(), ContactsController.PRIVACY_RULES_TYPE_PHOTO); } textCell.setTextAndValue(LocaleController.getString("PrivacyProfilePhoto", R.string.PrivacyProfilePhoto), value, true); } else if (position == forwardsRow) { - String value; if (getContactsController().getLoadingPrivicyInfo(ContactsController.PRIVACY_RULES_TYPE_FORWARDS)) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; + loadingLen = 30; } else { value = formatRulesString(getAccountInstance(), ContactsController.PRIVACY_RULES_TYPE_FORWARDS); } @@ -831,9 +836,8 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio } else if (position == passportRow) { textCell.setText(LocaleController.getString("TelegramPassport", R.string.TelegramPassport), true); } else if (position == deleteAccountRow) { - String value; if (getContactsController().getLoadingDeleteInfo()) { - value = LocaleController.getString("Loading", R.string.Loading); + showLoading = true; } else { int ttl = getContactsController().getDeleteAccountTTL(); if (ttl <= 182) { @@ -848,7 +852,6 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio } else if (position == paymentsClearRow) { textCell.setText(LocaleController.getString("PrivacyPaymentsClear", R.string.PrivacyPaymentsClear), true); } else if (position == secretMapRow) { - String value; switch (SharedConfig.mapPreviewType) { case 0: value = LocaleController.getString("MapPreviewProviderTelegram", R.string.MapPreviewProviderTelegram); @@ -868,6 +871,7 @@ public class PrivacySettingsActivity extends BaseFragment implements Notificatio } else if (position == contactsDeleteRow) { textCell.setText(LocaleController.getString("SyncContactsDelete", R.string.SyncContactsDelete), true); } + textCell.setDrawLoading(showLoading, loadingLen, animated); break; case 1: TextInfoPrivacyCell privacyCell = (TextInfoPrivacyCell) holder.itemView; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java index ca3cb5dc6..9c8c935b3 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java @@ -215,6 +215,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private int overlayCountVisible; + private ImageLocation prevLoadedImageLocation; + private int lastMeasuredContentWidth; private int lastMeasuredContentHeight; private int listContentHeight; @@ -259,6 +261,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private boolean transitionAnimationInProress; private boolean recreateMenuAfterAnimation; private int playProfileAnimation; + private boolean needTimerImage; private boolean allowProfileAnimation = true; private float extraHeight; private float initialAnimationExtraHeight; @@ -363,10 +366,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. private int helpSectionCell; private int debugHeaderRow; private int sendLogsRow; + private int sendLastLogsRow; private int clearLogsRow; private int switchBackendRow; private int versionRow; - private int emptyRow; private int bottomPaddingRow; private int infoHeaderRow; @@ -1317,7 +1320,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. userBlocked = getMessagesController().blockePeers.indexOfKey(user_id) >= 0; if (user.bot) { isBot = true; - getMediaDataController().loadBotInfo(user.id, true, classGuid); + getMediaDataController().loadBotInfo(user.id, user.id, true, classGuid); } userInfo = getMessagesController().getUserFull(user_id); getMessagesController().loadFullUser(getMessagesController().getUser(user_id), classGuid, true); @@ -1382,7 +1385,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().addObserver(this, NotificationCenter.updateInterfaces); getNotificationCenter().addObserver(this, NotificationCenter.didReceiveNewMessages); getNotificationCenter().addObserver(this, NotificationCenter.closeChats); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); updateRowsIds(); if (listAdapter != null) { listAdapter.notifyDataSetChanged(); @@ -1411,7 +1414,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. getNotificationCenter().removeObserver(this, NotificationCenter.updateInterfaces); getNotificationCenter().removeObserver(this, NotificationCenter.closeChats); getNotificationCenter().removeObserver(this, NotificationCenter.didReceiveNewMessages); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (avatarsViewPager != null) { avatarsViewPager.onDestroy(); } @@ -1528,7 +1531,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. getMessagesController().blockPeer(user_id); } else { getMessagesController().unblockPeer(user_id); - getSendMessagesHelper().sendMessage("/start", user_id, null, null, null, false, null, null, null, true, 0); + getSendMessagesHelper().sendMessage("/start", user_id, null, null, null, false, null, null, null, true, 0, null); finishFragment(); } } @@ -1702,7 +1705,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (getParentActivity() == null) { return; } - if (Build.VERSION.SDK_INT >= 23 && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { + if (Build.VERSION.SDK_INT >= 23 && (Build.VERSION.SDK_INT <= 28 || BuildVars.NO_SCOPED_STORAGE) && getParentActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { getParentActivity().requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 4); return; } @@ -2603,7 +2606,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } else if (position == policyRow) { Browser.openUrl(getParentActivity(), LocaleController.getString("PrivacyPolicyUrl", R.string.PrivacyPolicyUrl)); } else if (position == sendLogsRow) { - sendLogs(); + sendLogs(false); + } else if (position == sendLastLogsRow) { + sendLogs(true); } else if (position == clearLogsRow) { FileLog.cleanupLogs(); } else if (position == switchBackendRow) { @@ -2655,7 +2660,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. LocaleController.getString("DebugMenuReloadContacts", R.string.DebugMenuReloadContacts), LocaleController.getString("DebugMenuResetContacts", R.string.DebugMenuResetContacts), LocaleController.getString("DebugMenuResetDialogs", R.string.DebugMenuResetDialogs), - BuildVars.LOGS_ENABLED ? LocaleController.getString("DebugMenuDisableLogs", R.string.DebugMenuDisableLogs) : LocaleController.getString("DebugMenuEnableLogs", R.string.DebugMenuEnableLogs), + BuildVars.DEBUG_VERSION ? null : (BuildVars.LOGS_ENABLED ? LocaleController.getString("DebugMenuDisableLogs", R.string.DebugMenuDisableLogs) : LocaleController.getString("DebugMenuEnableLogs", R.string.DebugMenuEnableLogs)), SharedConfig.inappCamera ? LocaleController.getString("DebugMenuDisableCamera", R.string.DebugMenuDisableCamera) : LocaleController.getString("DebugMenuEnableCamera", R.string.DebugMenuEnableCamera), LocaleController.getString("DebugMenuClearMediaCache", R.string.DebugMenuClearMediaCache), LocaleController.getString("DebugMenuCallSettings", R.string.DebugMenuCallSettings), @@ -2666,7 +2671,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. BuildVars.DEBUG_VERSION && !AndroidUtilities.isTablet() && Build.VERSION.SDK_INT >= 23 ? (SharedConfig.smoothKeyboard ? LocaleController.getString("DebugMenuDisableSmoothKeyboard", R.string.DebugMenuDisableSmoothKeyboard) : LocaleController.getString("DebugMenuEnableSmoothKeyboard", R.string.DebugMenuEnableSmoothKeyboard)) : null, BuildVars.DEBUG_PRIVATE_VERSION ? (SharedConfig.disableVoiceAudioEffects ? "Enable voip audio effects" : "Disable voip audio effects") : null, Build.VERSION.SDK_INT >= 21 ? (SharedConfig.noStatusBar ? "Show status bar background" : "Hide status bar background") : null, - SharedConfig.useMediaStream ? "Use call stream in voice chats" : "Use media stream in voice chats" + BuildVars.DEBUG_PRIVATE_VERSION ? "Clean app update" : null, }; builder.setItems(items, (dialog, which) -> { if (which == 0) { @@ -2722,7 +2727,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } } else if (which == 15) { - SharedConfig.toggleUseMediaStream(); + SharedConfig.pendingAppUpdate = null; + SharedConfig.saveConfig(); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.appUpdateAvailable); } }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); @@ -4663,7 +4670,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { if (listView != null) { listView.invalidateViews(); } @@ -4690,7 +4697,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. timeItem.setVisibility(View.VISIBLE); } else if (userInfo != null) { timerDrawable.setTime(userInfo.ttl_period); - if (userInfo.ttl_period != 0) { + if (needTimerImage && userInfo.ttl_period != 0) { timeItem.setTag(1); timeItem.setVisibility(View.VISIBLE); } else { @@ -4699,7 +4706,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } else if (chatInfo != null) { timerDrawable.setTime(chatInfo.ttl_period); - if (chatInfo.ttl_period != 0) { + if (needTimerImage && chatInfo.ttl_period != 0) { timeItem.setTag(1); timeItem.setVisibility(View.VISIBLE); } else { @@ -4803,6 +4810,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. public void setPlayProfileAnimation(int type) { SharedPreferences preferences = MessagesController.getGlobalMainSettings(); if (!AndroidUtilities.isTablet()) { + needTimerImage = type != 0; if (preferences.getBoolean("view_animations", true)) { playProfileAnimation = type; } else if (type == 2) { @@ -5269,6 +5277,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. helpSectionCell = -1; debugHeaderRow = -1; sendLogsRow = -1; + sendLastLogsRow = -1; clearLogsRow = -1; switchBackendRow = -1; versionRow = -1; @@ -5354,6 +5363,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } if (BuildVars.LOGS_ENABLED) { sendLogsRow = rowCount++; + sendLastLogsRow = rowCount++; clearLogsRow = rowCount++; } if (BuildVars.DEBUG_PRIVATE_VERSION) { @@ -5593,7 +5603,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. updateListAnimated(false); needLayout(true); } - getFileLoader().loadFile(imageLocation, user, null, 0, 1); + if (imageLocation != null && (prevLoadedImageLocation == null || imageLocation.photoId != prevLoadedImageLocation.photoId)) { + prevLoadedImageLocation = imageLocation; + getFileLoader().loadFile(imageLocation, user, null, 0, 1); + } String newString = UserObject.getUserName(user); String newString2; @@ -5802,7 +5815,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (avatarBig == null) { avatarImage.setImage(videoLocation, filter, thumbLocation, "50_50", avatarDrawable, chat); } - getFileLoader().loadFile(imageLocation, chat, null, 0, 1); + if (imageLocation != null && (prevLoadedImageLocation == null || imageLocation.photoId != prevLoadedImageLocation.photoId)) { + prevLoadedImageLocation = imageLocation; + getFileLoader().loadFile(imageLocation, chat, null, 0, 1); + } avatarImage.getImageReceiver().setVisible(!PhotoViewer.isShowingImage(photoBig), false); } } @@ -6077,7 +6093,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } if (grantResults.length > 0 && allGranted) { - VoIPHelper.startCall(currentChat, null, null, true, getParentActivity(), ProfileActivity.this, getAccountInstance()); + ChatObject.Call call = getMessagesController().getGroupCall(chat_id, false); + VoIPHelper.startCall(currentChat, null, null, call == null, getParentActivity(), ProfileActivity.this, getAccountInstance()); } else { VoIPHelper.permissionDenied(getParentActivity(), null, requestCode); } @@ -6428,7 +6445,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } } - private void sendLogs() { + private void sendLogs(boolean last) { if (getParentActivity() == null) { return; } @@ -6448,6 +6465,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. File[] files = dir.listFiles(); boolean[] finished = new boolean[1]; + long currentDate = System.currentTimeMillis(); BufferedInputStream origin = null; ZipOutputStream out = null; @@ -6457,6 +6475,9 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. byte[] data = new byte[1024 * 64]; for (int i = 0; i < files.length; i++) { + if (last && (currentDate - files[i].lastModified()) > 24 * 60 * 60 * 1000) { + continue; + } FileInputStream fi = new FileInputStream(files[i]); origin = new BufferedInputStream(fi, data.length); @@ -6888,6 +6909,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. textCell.setTextAndIcon(LocaleController.getString("PrivacyPolicy", R.string.PrivacyPolicy), R.drawable.menu_policy, false); } else if (position == sendLogsRow) { textCell.setText(LocaleController.getString("DebugSendLogs", R.string.DebugSendLogs), true); + } else if (position == sendLastLogsRow) { + textCell.setText(LocaleController.getString("DebugSendLastLogs", R.string.DebugSendLastLogs), true); } else if (position == clearLogsRow) { textCell.setText(LocaleController.getString("DebugClearLogs", R.string.DebugClearLogs), switchBackendRow != -1); } else if (position == switchBackendRow) { @@ -7023,7 +7046,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. position == languageRow || position == setUsernameRow || position == bioRow || position == versionRow || position == dataRow || position == chatRow || position == questionRow || position == devicesRow || position == filtersRow || - position == faqRow || position == policyRow || position == sendLogsRow || + position == faqRow || position == policyRow || position == sendLogsRow || position == sendLastLogsRow || position == clearLogsRow || position == switchBackendRow || position == setAvatarRow; } if (holder.itemView instanceof UserCell) { @@ -7061,7 +7084,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. position == sendMessageRow || position == notificationRow || position == privacyRow || position == languageRow || position == dataRow || position == chatRow || position == questionRow || position == devicesRow || position == filtersRow || - position == faqRow || position == policyRow || position == sendLogsRow || + position == faqRow || position == policyRow || position == sendLogsRow || position == sendLastLogsRow || position == clearLogsRow || position == switchBackendRow || position == setAvatarRow) { return 4; } else if (position == notificationsDividerRow) { @@ -7688,9 +7711,13 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. if (scamDrawable != null) { scamDrawable.setColor(Theme.getColor(Theme.key_avatar_subtitleInProfileBlue)); } - nameTextView[1].setTextColor(Theme.getColor(Theme.key_profile_title)); - actionBar.setItemsColor(Theme.getColor(Theme.key_actionBarDefaultIcon), false); - actionBar.setItemsBackgroundColor(Theme.getColor(Theme.key_avatar_actionBarSelectorBlue), false); + if (nameTextView[1] != null) { + nameTextView[1].setTextColor(Theme.getColor(Theme.key_profile_title)); + } + if (actionBar != null) { + actionBar.setItemsColor(Theme.getColor(Theme.key_actionBarDefaultIcon), false); + actionBar.setItemsBackgroundColor(Theme.getColor(Theme.key_avatar_actionBarSelectorBlue), false); + } } }; ArrayList arrayList = new ArrayList<>(); @@ -7940,6 +7967,7 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. put(++pointer, helpSectionCell, sparseIntArray); put(++pointer, debugHeaderRow, sparseIntArray); put(++pointer, sendLogsRow, sparseIntArray); + put(++pointer, sendLastLogsRow, sparseIntArray); put(++pointer, clearLogsRow, sparseIntArray); put(++pointer, switchBackendRow, sparseIntArray); put(++pointer, versionRow, sparseIntArray); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java index cbc497919..9421fb475 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ThemeActivity.java @@ -669,7 +669,7 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetNewWallpapper); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.themeListUpdated); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.themeAccentListUpdated); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.needShareTheme); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.needSetDayNightTheme); getNotificationCenter().addObserver(this, NotificationCenter.themeUploadedToServer); @@ -689,7 +689,7 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.themeListUpdated); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.themeAccentListUpdated); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.needShareTheme); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.needSetDayNightTheme); getNotificationCenter().removeObserver(this, NotificationCenter.themeUploadedToServer); @@ -701,10 +701,11 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.locationPermissionGranted) { updateSunTime(null, true); - } else if (id == NotificationCenter.didSetNewWallpapper || id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.didSetNewWallpapper || id == NotificationCenter.emojiLoaded) { if (listView != null) { listView.invalidateViews(); } + updateMenuItem(); } else if (id == NotificationCenter.themeAccentListUpdated) { if (listAdapter != null && themeAccentListRow != -1) { listAdapter.notifyItemChanged(themeAccentListRow, new Object()); @@ -816,6 +817,15 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No if (themesHorizontalListCell != null) { Theme.ThemeInfo themeInfo = Theme.getTheme("Blue"); Theme.ThemeInfo currentTheme = Theme.getCurrentTheme(); + Theme.ThemeAccent accent = themeInfo.themeAccentsMap.get(Theme.DEFALT_THEME_ACCENT_ID); + if (accent != null) { + Theme.OverrideWallpaperInfo info = new Theme.OverrideWallpaperInfo(); + info.slug = Theme.DEFAULT_BACKGROUND_SLUG; + info.fileName = "Blue_99_wp.jpg"; + info.originalFileName = "Blue_99_wp.jpg"; + accent.overrideWallpaper = info; + themeInfo.setOverrideWallpaper(info); + } if (themeInfo != currentTheme) { themeInfo.setCurrentAccentId(Theme.DEFALT_THEME_ACCENT_ID); Theme.saveThemeAccents(themeInfo, true, false, true, false); @@ -824,6 +834,8 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No } else if (themeInfo.currentAccentId != Theme.DEFALT_THEME_ACCENT_ID) { NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.needSetDayNightTheme, currentTheme, currentType == THEME_TYPE_NIGHT, null, Theme.DEFALT_THEME_ACCENT_ID); listAdapter.notifyItemChanged(themeAccentListRow); + } else { + Theme.reloadWallpaper(); } } }); @@ -1073,7 +1085,7 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No } int fontSize = AndroidUtilities.isTablet() ? 18 : 16; Theme.ThemeInfo currentTheme = Theme.getCurrentTheme(); - if (SharedConfig.fontSize != fontSize || SharedConfig.bubbleRadius != 10 || !currentTheme.firstAccentIsDefault || currentTheme.currentAccentId != Theme.DEFALT_THEME_ACCENT_ID) { + if (SharedConfig.fontSize != fontSize || SharedConfig.bubbleRadius != 10 || !currentTheme.firstAccentIsDefault || currentTheme.currentAccentId != Theme.DEFALT_THEME_ACCENT_ID || accent != null && accent.overrideWallpaper != null && !Theme.DEFAULT_BACKGROUND_SLUG.equals(accent.overrideWallpaper.slug)) { menuItem.showSubItem(reset_settings); } else { menuItem.hideSubItem(reset_settings); @@ -2149,7 +2161,7 @@ public class ThemeActivity extends BaseFragment implements NotificationCenter.No themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{BubbleRadiusCell.class}, new String[]{"sizeBar"}, null, null, null, Theme.key_player_progressBackground)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ChatListCell.class}, null, null, null, Theme.key_radioBackground)); - themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ChatListCell.class}, null, null, null, Theme.key_radioBackgroundChecked));; + themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{ChatListCell.class}, null, null, null, Theme.key_radioBackgroundChecked)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{NotificationsCheckCell.class}, new String[]{"textView"}, null, null, null, Theme.key_windowBackgroundWhiteBlackText)); themeDescriptions.add(new ThemeDescription(listView, 0, new Class[]{NotificationsCheckCell.class}, new String[]{"valueTextView"}, null, null, null, Theme.key_windowBackgroundWhiteGrayText2)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java index 622824b79..ef3628d01 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ThemePreviewActivity.java @@ -21,12 +21,14 @@ import android.content.SharedPreferences; import android.content.res.Configuration; import android.database.DataSetObserver; import android.graphics.Bitmap; +import android.graphics.BlendMode; import android.graphics.Canvas; import android.graphics.Outline; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.graphics.Rect; +import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; @@ -43,6 +45,7 @@ import androidx.viewpager.widget.ViewPager; import android.os.SystemClock; import android.text.TextPaint; import android.text.TextUtils; +import android.util.LongSparseArray; import android.util.TypedValue; import android.view.Gravity; import android.view.MotionEvent; @@ -58,6 +61,7 @@ import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; +import org.telegram.messenger.BuildVars; import org.telegram.messenger.DownloadController; import org.telegram.messenger.FileLoader; import org.telegram.messenger.FileLog; @@ -72,6 +76,7 @@ import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.SharedConfig; +import org.telegram.messenger.SvgHelper; import org.telegram.messenger.UserConfig; import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; @@ -99,16 +104,19 @@ import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.Components.MediaActionDrawable; +import org.telegram.ui.Components.MotionBackgroundDrawable; import org.telegram.ui.Components.RadialProgress2; import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.SeekBarView; import org.telegram.ui.Components.ShareAlert; +import org.telegram.ui.Components.UndoView; import org.telegram.ui.Components.WallpaperCheckBoxView; import org.telegram.ui.Components.WallpaperParallaxEffect; import java.io.File; import java.io.FileOutputStream; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; public class ThemePreviewActivity extends BaseFragment implements DownloadController.FileDownloadProgressListener, NotificationCenter.NotificationCenterDelegate { @@ -133,7 +141,11 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro private int backupMyMessagesAccentColor; private int backupMyMessagesGradientAccentColor; private long backupBackgroundOverrideColor; - private long backupBackgroundGradientOverrideColor; + private long backupBackgroundGradientOverrideColor1; + private long backupBackgroundGradientOverrideColor2; + private long backupBackgroundGradientOverrideColor3; + private float backupIntensity; + private String backupSlug; private int backupBackgroundRotation; private long watchForKeyboardEndTime; @@ -141,10 +153,11 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro private ColorPicker colorPicker; private int lastPickedColor; - private int lastPickedColorNum; + private int lastPickedColorNum = -1; private Runnable applyColorAction = () -> { applyColorScheduled = false; applyColor(lastPickedColor, lastPickedColorNum); + lastPickedColorNum = -1; }; private boolean applyColorScheduled; @@ -163,11 +176,15 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro private FrameLayout frameLayout; + private UndoView undoView; + private FrameLayout page1; private RecyclerListView listView; private DialogsAdapter dialogsAdapter; private ImageView floatingButton; + private boolean wasScroll; + private ActionBar actionBar2; private FrameLayout page2; private RecyclerListView listView2; @@ -177,6 +194,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro private AnimatorSet motionAnimation; private RadialProgress2 radialProgress; private FrameLayout bottomOverlayChat; + private FrameLayout playAnimationView; + private ImageView playAnimationImageView; + private AnimatorSet playViewAnimator; private WallpaperCheckBoxView[] checkBoxView; private FrameLayout[] patternLayout = new FrameLayout[2]; private TextView[] patternsCancelButton = new TextView[2]; @@ -188,16 +208,22 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro private HeaderCell intensityCell; private SeekBarView intensitySeekBar; private ArrayList patterns; + private HashMap patternsDict = new HashMap<>(); private TLRPC.TL_wallPaper selectedPattern; private TLRPC.TL_wallPaper previousSelectedPattern; private TLRPC.TL_wallPaper lastSelectedPattern; private int backgroundColor; private int previousBackgroundColor; - private int backgroundGradientColor; - private int previousBackgroundGradientColor; + private int backgroundGradientColor1; + private int backgroundGradientColor2; + private int backgroundGradientColor3; + private int previousBackgroundGradientColor1; + private int previousBackgroundGradientColor2; + private int previousBackgroundGradientColor3; private int backgroundRotation; private int previousBackgroundRotation; private int patternColor; + private int checkColor; private float currentIntensity = 0.5f; private float previousIntensity; @@ -221,10 +247,13 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro private Object currentWallpaper; private Bitmap currentWallpaperBitmap; + private boolean rotatePreview; private boolean isMotion; private boolean isBlurred; + private boolean showColor; + private boolean progressVisible; private String imageFilter = "640_360"; @@ -237,16 +266,25 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } public ThemePreviewActivity(Object wallPaper, Bitmap bitmap) { + this(wallPaper, bitmap, false, false); + } + + public ThemePreviewActivity(Object wallPaper, Bitmap bitmap, boolean rotate, boolean openColor) { super(); screenType = SCREEN_TYPE_CHANGE_BACKGROUND; + showColor = openColor; currentWallpaper = wallPaper; currentWallpaperBitmap = bitmap; + rotatePreview = rotate; if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { WallpapersListActivity.ColorWallpaper object = (WallpapersListActivity.ColorWallpaper) currentWallpaper; isMotion = object.motion; selectedPattern = object.pattern; if (selectedPattern != null) { currentIntensity = object.intensity; + if (currentIntensity < 0 && !Theme.getActiveTheme().isDark()) { + currentIntensity *= -1; + } } } } @@ -270,7 +308,11 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro backupMyMessagesAccentColor = accent.myMessagesAccentColor; backupMyMessagesGradientAccentColor = accent.myMessagesGradientAccentColor; backupBackgroundOverrideColor = accent.backgroundOverrideColor; - backupBackgroundGradientOverrideColor = accent.backgroundGradientOverrideColor; + backupBackgroundGradientOverrideColor1 = accent.backgroundGradientOverrideColor1; + backupBackgroundGradientOverrideColor2 = accent.backgroundGradientOverrideColor2; + backupBackgroundGradientOverrideColor3 = accent.backgroundGradientOverrideColor3; + backupIntensity = accent.patternIntensity; + backupSlug = accent.patternSlug; backupBackgroundRotation = accent.backgroundRotation; } else { accent = applyingTheme.getAccent(false); @@ -293,10 +335,13 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro isMotion = motion; } + @SuppressLint("Recycle") @Override public View createView(Context context) { hasOwnBackground = true; - + if (AndroidUtilities.isTablet()) { + actionBar.setOccupyStatusBar(false); + } page1 = new FrameLayout(context); ActionBarMenu menu = actionBar.createMenu(); final ActionBarMenuItem item = menu.addItem(0, R.drawable.ic_ab_search).setIsSearchField(true).setActionBarMenuItemSearchListener(new ActionBarMenuItem.ActionBarMenuItemSearchListener() { @@ -365,6 +410,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro listView.setLayoutManager(new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false)); listView.setVerticalScrollbarPosition(LocaleController.isRTL ? RecyclerListView.SCROLLBAR_POSITION_LEFT : RecyclerListView.SCROLLBAR_POSITION_RIGHT); listView.setPadding(0, 0, 0, AndroidUtilities.dp(screenType != SCREEN_TYPE_PREVIEW ? 12 : 0)); + listView.setOnItemClickListener((view, position) -> { + + }); page1.addView(listView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP)); floatingButton = new ImageView(context); @@ -383,8 +431,8 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro floatingButton.setImageResource(R.drawable.floating_pencil); if (Build.VERSION.SDK_INT >= 21) { StateListAnimator animator = new StateListAnimator(); - animator.addState(new int[]{android.R.attr.state_pressed}, ObjectAnimator.ofFloat(floatingButton, "translationZ", AndroidUtilities.dp(2), AndroidUtilities.dp(4)).setDuration(200)); - animator.addState(new int[]{}, ObjectAnimator.ofFloat(floatingButton, "translationZ", AndroidUtilities.dp(4), AndroidUtilities.dp(2)).setDuration(200)); + animator.addState(new int[]{android.R.attr.state_pressed}, ObjectAnimator.ofFloat(floatingButton, View.TRANSLATION_Z, AndroidUtilities.dp(2), AndroidUtilities.dp(4)).setDuration(200)); + animator.addState(new int[]{}, ObjectAnimator.ofFloat(floatingButton, View.TRANSLATION_Z, AndroidUtilities.dp(4), AndroidUtilities.dp(2)).setDuration(200)); floatingButton.setStateListAnimator(animator); floatingButton.setOutlineProvider(new ViewOutlineProvider() { @SuppressLint("NewApi") @@ -469,6 +517,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro messagesAdapter = new MessagesAdapter(context); actionBar2 = createActionBar(context); + if (AndroidUtilities.isTablet()) { + actionBar2.setOccupyStatusBar(false); + } actionBar2.setBackButtonDrawable(new BackDrawable(false)); actionBar2.setActionBarMenuOnItemClick(new ActionBar.ActionBarMenuOnItemClick() { @Override @@ -518,13 +569,41 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro link += "?mode=" + modes.toString(); } } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { - WallpapersListActivity.ColorWallpaper colorWallpaper = new WallpapersListActivity.ColorWallpaper(selectedPattern != null ? selectedPattern.slug : Theme.COLOR_BACKGROUND_SLUG, backgroundColor, backgroundGradientColor, backgroundRotation, currentIntensity, isMotion, null); + WallpapersListActivity.ColorWallpaper colorWallpaper = new WallpapersListActivity.ColorWallpaper(selectedPattern != null ? selectedPattern.slug : Theme.COLOR_BACKGROUND_SLUG, backgroundColor, backgroundGradientColor1, backgroundGradientColor2, backgroundGradientColor3, backgroundRotation, currentIntensity, isMotion, null); colorWallpaper.pattern = selectedPattern; link = colorWallpaper.getUrl(); } else { - return; + if (BuildVars.DEBUG_PRIVATE_VERSION) { + Theme.ThemeAccent accent = Theme.getActiveTheme().getAccent(false); + if (accent != null) { + WallpapersListActivity.ColorWallpaper colorWallpaper = new WallpapersListActivity.ColorWallpaper(accent.patternSlug, (int) accent.backgroundOverrideColor, (int) accent.backgroundGradientOverrideColor1, (int) accent.backgroundGradientOverrideColor2, (int) accent.backgroundGradientOverrideColor3, accent.backgroundRotation, accent.patternIntensity, accent.patternMotion, null); + for (int a = 0, N = patterns.size(); a < N; a++) { + TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) patterns.get(a); + if (wallPaper.pattern) { + if (accent.patternSlug.equals(wallPaper.slug)) { + colorWallpaper.pattern = wallPaper; + break; + } + } + } + link = colorWallpaper.getUrl(); + } else { + return; + } + } else { + return; + } } - showDialog(new ShareAlert(getParentActivity(), null, link, false, link, false)); + showDialog(new ShareAlert(getParentActivity(), null, link, false, link, false) { + @Override + protected void onSend(LongSparseArray dids, int count) { + if (dids.size() == 1) { + undoView.showWithAction(dids.valueAt(0).id, UndoView.ACTION_SHARE_BACKGROUND, count); + } else { + undoView.showWithAction(0, UndoView.ACTION_SHARE_BACKGROUND, count, dids.size(), null, null); + } + } + }); } } }); @@ -553,7 +632,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override protected void onDraw(Canvas canvas) { - if (background instanceof ColorDrawable || background instanceof GradientDrawable) { + if (background instanceof ColorDrawable || background instanceof GradientDrawable || background instanceof MotionBackgroundDrawable) { background.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight()); background.draw(canvas); } else if (background instanceof BitmapDrawable) { @@ -601,12 +680,20 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } } }; - int textsCount = currentWallpaper instanceof WallpapersListActivity.ColorWallpaper ? 3 : 2; - if (currentWallpaper instanceof WallpapersListActivity.FileWallpaper) { - WallpapersListActivity.FileWallpaper fileWallpaper = (WallpapersListActivity.FileWallpaper) currentWallpaper; - if (Theme.THEME_BACKGROUND_SLUG.equals(fileWallpaper.slug)) { + int textsCount; + if (screenType == SCREEN_TYPE_ACCENT_COLOR || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + textsCount = 3; + if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper && Theme.DEFAULT_BACKGROUND_SLUG.equals(((WallpapersListActivity.ColorWallpaper) currentWallpaper).slug)) { textsCount = 0; } + } else { + textsCount = 2; + if (currentWallpaper instanceof WallpapersListActivity.FileWallpaper) { + WallpapersListActivity.FileWallpaper fileWallpaper = (WallpapersListActivity.FileWallpaper) currentWallpaper; + if (Theme.THEME_BACKGROUND_SLUG.equals(fileWallpaper.slug)) { + textsCount = 0; + } + } } page2.addView(backgroundImage, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 48)); @@ -615,8 +702,8 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro if (!(currentWallpaper instanceof WallpapersListActivity.ColorWallpaper)) { Drawable dr = imageReceiver.getDrawable(); if (set && dr != null) { - if (!Theme.hasThemeKey(Theme.key_chat_serviceBackground)) { - Theme.applyChatServiceMessageColor(AndroidUtilities.calcDrawableColor(dr)); + if (!Theme.hasThemeKey(Theme.key_chat_serviceBackground) || backgroundImage.getBackground() instanceof MotionBackgroundDrawable) { + Theme.applyChatServiceMessageColor(AndroidUtilities.calcDrawableColor(dr), dr); } listView2.invalidateViews(); if (buttonsContainer != null) { @@ -643,9 +730,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } else { if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { actionBar2.setTitle(LocaleController.getString("BackgroundPreview", R.string.BackgroundPreview)); - if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper || currentWallpaper instanceof TLRPC.TL_wallPaper) { + if (BuildVars.DEBUG_PRIVATE_VERSION && Theme.getActiveTheme().getAccent(false) != null || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper && !Theme.DEFAULT_BACKGROUND_SLUG.equals(((WallpapersListActivity.ColorWallpaper) currentWallpaper).slug) || currentWallpaper instanceof TLRPC.TL_wallPaper) { ActionBarMenu menu2 = actionBar2.createMenu(); - menu2.addItem(5, R.drawable.ic_share_video); + menu2.addItem(5, R.drawable.msg_share_filled); } } else if (screenType == SCREEN_TYPE_ACCENT_COLOR) { ActionBarMenu menu2 = actionBar2.createMenu(); @@ -659,8 +746,8 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } }; dropDownContainer.setSubMenuOpenSide(1); - dropDownContainer.addSubItem(1, LocaleController.getString("ColorPickerMainColor", R.string.ColorPickerMainColor)); dropDownContainer.addSubItem(2, LocaleController.getString("ColorPickerBackground", R.string.ColorPickerBackground)); + dropDownContainer.addSubItem(1, LocaleController.getString("ColorPickerMainColor", R.string.ColorPickerMainColor)); dropDownContainer.addSubItem(3, LocaleController.getString("ColorPickerMyMessages", R.string.ColorPickerMyMessages)); dropDownContainer.setAllowCloseAnimation(false); dropDownContainer.setForceSmoothKeyboard(true); @@ -776,6 +863,19 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro return result; } + @Override + public void setTranslationY(float translationY) { + super.setTranslationY(translationY); + if (checkBoxView != null) { + for (int a = 0; a < checkBoxView.length; a++) { + checkBoxView[a].invalidate(); + } + } + if (playAnimationView != null) { + playAnimationView.invalidate(); + } + } + @Override protected void onChildPressed(View child, float x, float y, boolean pressed) { if (pressed && child instanceof ChatMessageCell) { @@ -795,6 +895,21 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } return super.allowSelectChildAtPosition(child); } + + @Override + public boolean onTouchEvent(MotionEvent e) { + checkMotionEvent(e); + return super.onTouchEvent(e); + } + + private void checkMotionEvent(MotionEvent e) { + if (e.getAction() == MotionEvent.ACTION_UP) { + if (!wasScroll && currentWallpaper instanceof WallpapersListActivity.ColorWallpaper && patternLayout[0].getVisibility() == View.VISIBLE) { + showPatternsView(0, false, true); + } + wasScroll = false; + } + } }; ((DefaultItemAnimator) listView2.getItemAnimator()).setDelayAnimations(false); listView2.setVerticalScrollBarEnabled(true); @@ -810,7 +925,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro listView2.setLayoutManager(new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, true)); listView2.setVerticalScrollbarPosition(LocaleController.isRTL ? RecyclerListView.SCROLLBAR_POSITION_LEFT : RecyclerListView.SCROLLBAR_POSITION_RIGHT); if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - page2.addView(listView2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 294)); + page2.addView(listView2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, 273)); listView2.setOnItemClickListener((view, position, x, y) -> { if (view instanceof ChatMessageCell) { ChatMessageCell cell = (ChatMessageCell) view; @@ -832,15 +947,24 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { listView2.invalidateViews(); + wasScroll = true; + } + + @Override + public void onScrollStateChanged(RecyclerView recyclerView, int newState) { + if (newState == RecyclerView.SCROLL_STATE_IDLE) { + wasScroll = false; + } } }); page2.addView(actionBar2, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); parallaxEffect = new WallpaperParallaxEffect(context); - parallaxEffect.setCallback((offsetX, offsetY) -> { + parallaxEffect.setCallback((offsetX, offsetY, angle) -> { if (!isMotion) { return; } + Drawable background = backgroundImage.getBackground(); float progress; if (motionAnimation != null) { progress = (backgroundImage.getScaleX() - 1.0f) / (parallaxScale - 1.0f); @@ -922,8 +1046,10 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @SuppressLint("DrawAllocation") Bitmap dst = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(dst); - if (backgroundGradientColor != 0) { - GradientDrawable gradientDrawable = new GradientDrawable(BackgroundGradientDrawable.getGradientOrientation(backgroundRotation), new int[]{backgroundColor, backgroundGradientColor}); + if (backgroundGradientColor2 != 0) { + + } else if (backgroundGradientColor1 != 0) { + GradientDrawable gradientDrawable = new GradientDrawable(BackgroundGradientDrawable.getGradientOrientation(backgroundRotation), new int[]{backgroundColor, backgroundGradientColor1}); gradientDrawable.setBounds(0, 0, dst.getWidth(), dst.getHeight()); gradientDrawable.draw(canvas); } else { @@ -931,11 +1057,15 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG); paint.setColorFilter(new PorterDuffColorFilter(patternColor, blendMode)); - paint.setAlpha((int) (255 * currentIntensity)); + paint.setAlpha((int) (255 * Math.abs(currentIntensity))); canvas.drawBitmap(bitmap, 0, 0, paint); FileOutputStream stream = new FileOutputStream(toFile); - dst.compress(Bitmap.CompressFormat.JPEG, 87, stream); + if (backgroundGradientColor2 != 0) { + dst.compress(Bitmap.CompressFormat.PNG, 100, stream); + } else { + dst.compress(Bitmap.CompressFormat.JPEG, 87, stream); + } stream.close(); done = true; } catch (Throwable e) { @@ -995,7 +1125,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro String slug; int rotation = 45; int color = 0; - int gradientColor = 0; + int gradientColor1 = 0; + int gradientColor2 = 0; + int gradientColor3 = 0; File path = null; if (currentWallpaper instanceof TLRPC.TL_wallPaper) { @@ -1003,14 +1135,21 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro slug = wallPaper.slug; } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { WallpapersListActivity.ColorWallpaper wallPaper = (WallpapersListActivity.ColorWallpaper) currentWallpaper; - if (selectedPattern != null) { - slug = selectedPattern.slug; + if (Theme.DEFAULT_BACKGROUND_SLUG.equals(wallPaper.slug)) { + slug = Theme.DEFAULT_BACKGROUND_SLUG; + color = 0; } else { - slug = Theme.COLOR_BACKGROUND_SLUG; + if (selectedPattern != null) { + slug = selectedPattern.slug; + } else { + slug = Theme.COLOR_BACKGROUND_SLUG; + } + color = backgroundColor; + gradientColor1 = backgroundGradientColor1; + gradientColor2 = backgroundGradientColor2; + gradientColor3 = backgroundGradientColor3; + rotation = backgroundRotation; } - color = backgroundColor; - gradientColor = backgroundGradientColor; - rotation = backgroundRotation; } else if (currentWallpaper instanceof WallpapersListActivity.FileWallpaper) { WallpapersListActivity.FileWallpaper wallPaper = (WallpapersListActivity.FileWallpaper) currentWallpaper; slug = wallPaper.slug; @@ -1036,9 +1175,30 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro wallpaperInfo.isBlurred = isBlurred; wallpaperInfo.isMotion = isMotion; wallpaperInfo.color = color; - wallpaperInfo.gradientColor = gradientColor; + wallpaperInfo.gradientColor1 = gradientColor1; + wallpaperInfo.gradientColor2 = gradientColor2; + wallpaperInfo.gradientColor3 = gradientColor3; wallpaperInfo.rotation = rotation; wallpaperInfo.intensity = currentIntensity; + if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + WallpapersListActivity.ColorWallpaper colorWallpaper = (WallpapersListActivity.ColorWallpaper) currentWallpaper; + String slugStr; + if (!Theme.COLOR_BACKGROUND_SLUG.equals(slug) && !Theme.THEME_BACKGROUND_SLUG.equals(slug) && !Theme.DEFAULT_BACKGROUND_SLUG.equals(slug)) { + slugStr = slug; + } else { + slugStr = null; + } + float intensity = colorWallpaper.intensity; + if (intensity < 0 && !Theme.getActiveTheme().isDark()) { + intensity *= -1; + } + if (colorWallpaper.parentWallpaper != null && colorWallpaper.color == color && + colorWallpaper.gradientColor1 == gradientColor1 && colorWallpaper.gradientColor2 == gradientColor2 && colorWallpaper.gradientColor3 == gradientColor3 && TextUtils.equals(colorWallpaper.slug, slugStr) && + colorWallpaper.gradientRotation == rotation && (selectedPattern == null || Math.abs(intensity - currentIntensity) < 0.001f)) { + wallpaperInfo.wallpaperId = colorWallpaper.parentWallpaper.id; + wallpaperInfo.accessHash = colorWallpaper.parentWallpaper.access_hash; + } + } MessagesController.getInstance(currentAccount).saveWallpaperToServer(path, wallpaperInfo, slug != null, 0); if (done) { @@ -1077,11 +1237,8 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro int maxTextSize = 0; if (textsCount != 0) { buttonsContainer = new FrameLayout(context); - if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - texts[0] = LocaleController.getString("BackgroundMotion", R.string.BackgroundMotion); - texts[1] = LocaleController.getString("BackgroundPattern", R.string.BackgroundPattern); - } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { - texts[0] = LocaleController.getString("BackgroundColor", R.string.BackgroundColor); + if (screenType == SCREEN_TYPE_ACCENT_COLOR || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + texts[0] = LocaleController.getString("BackgroundColors", R.string.BackgroundColors); texts[1] = LocaleController.getString("BackgroundPattern", R.string.BackgroundPattern); texts[2] = LocaleController.getString("BackgroundMotion", R.string.BackgroundMotion); } else { @@ -1095,36 +1252,91 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro textSizes[a] = (int) Math.ceil(textPaint.measureText(texts[a])); maxTextSize = Math.max(maxTextSize, textSizes[a]); } + + playAnimationView = new FrameLayout(context) { + + private RectF rect = new RectF(); + + @Override + protected void onDraw(Canvas canvas) { + rect.set(0, 0, getMeasuredWidth(), getMeasuredHeight()); + Theme.applyServiceShaderMatrixForView(playAnimationView, backgroundImage); + canvas.drawRoundRect(rect, getMeasuredHeight() / 2, getMeasuredHeight() / 2, Theme.chat_actionBackgroundPaint); + if (Theme.hasGradientService()) { + canvas.drawRoundRect(rect, getMeasuredHeight() / 2, getMeasuredHeight() / 2, Theme.chat_actionBackgroundGradientDarkenPaint); + } + } + }; + playAnimationView.setWillNotDraw(false); + playAnimationView.setVisibility(backgroundGradientColor1 != 0 ? View.VISIBLE : View.INVISIBLE); + playAnimationView.setScaleX(backgroundGradientColor1 != 0 ? 1.0f : 0.1f); + playAnimationView.setScaleY(backgroundGradientColor1 != 0 ? 1.0f : 0.1f); + playAnimationView.setAlpha(backgroundGradientColor1 != 0 ? 1.0f : 0.0f); + playAnimationView.setTag(backgroundGradientColor1 != 0 ? 1 : null); + buttonsContainer.addView(playAnimationView, LayoutHelper.createFrame(48, 48, Gravity.CENTER)); + playAnimationView.setOnClickListener(new View.OnClickListener() { + + int rotation = 0; + + @Override + public void onClick(View v) { + Drawable background = backgroundImage.getBackground(); + playAnimationImageView.setRotation(rotation); + rotation -= 45; + playAnimationImageView.animate().rotationBy(-45).setDuration(300).setInterpolator(CubicBezierInterpolator.EASE_OUT).start(); + if (background instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) background; + motionBackgroundDrawable.switchToNextPosition(); + } else { + onColorsRotate(); + } + } + }); + + playAnimationImageView = new ImageView(context); + playAnimationImageView.setScaleType(ImageView.ScaleType.CENTER); + playAnimationImageView.setImageResource(R.drawable.bg_rotate_large); + playAnimationView.addView(playAnimationImageView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); } for (int a = 0; a < textsCount; a++) { final int num = a; - checkBoxView[a] = new WallpaperCheckBoxView(context, screenType == SCREEN_TYPE_ACCENT_COLOR || !(currentWallpaper instanceof WallpapersListActivity.ColorWallpaper && a == 0)); + checkBoxView[a] = new WallpaperCheckBoxView(context, screenType != SCREEN_TYPE_ACCENT_COLOR && !(currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) || a != 0, backgroundImage); checkBoxView[a].setBackgroundColor(backgroundColor); checkBoxView[a].setText(texts[a], textSizes[a], maxTextSize); - if (screenType != SCREEN_TYPE_ACCENT_COLOR) { - if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { - if (a == 1) { - checkBoxView[a].setChecked(selectedPattern != null, false); - } else if (a == 2) { - checkBoxView[a].setChecked(isMotion, false); - } - } else { - checkBoxView[a].setChecked(a == 0 ? isBlurred : isMotion, false); + if (screenType == SCREEN_TYPE_ACCENT_COLOR || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + if (a == 1) { + checkBoxView[a].setChecked(selectedPattern != null || accent != null && !TextUtils.isEmpty(accent.patternSlug), false); + } else if (a == 2) { + checkBoxView[a].setChecked(isMotion, false); } + } else { + checkBoxView[a].setChecked(a == 0 ? isBlurred : isMotion, false); } int width = maxTextSize + AndroidUtilities.dp(14 * 2 + 28); FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(width, ViewGroup.LayoutParams.WRAP_CONTENT); - layoutParams.gravity = Gravity.LEFT | Gravity.CENTER_VERTICAL; - layoutParams.leftMargin = a == 1 ? width + AndroidUtilities.dp(9) : 0; + layoutParams.gravity = Gravity.CENTER; + if (textsCount == 3) { + if (a == 0 || a == 2) { + layoutParams.leftMargin = width / 2 + AndroidUtilities.dp(10); + } else { + layoutParams.rightMargin = width / 2 + AndroidUtilities.dp(10); + } + } else { + if (a == 1) { + layoutParams.leftMargin = width / 2 + AndroidUtilities.dp(10); + } else { + layoutParams.rightMargin = width / 2 + AndroidUtilities.dp(10); + } + } buttonsContainer.addView(checkBoxView[a], layoutParams); WallpaperCheckBoxView view = checkBoxView[a]; checkBoxView[a].setOnClickListener(v -> { if (buttonsContainer.getAlpha() != 1.0f || patternViewAnimation != null) { return; } - if (screenType == SCREEN_TYPE_ACCENT_COLOR && num == 0 || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper && num == 2) { + if ((screenType == SCREEN_TYPE_ACCENT_COLOR || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) && num == 2) { view.setChecked(!view.isChecked(), true); isMotion = view.isChecked(); parallaxEffect.setEnabled(isMotion); @@ -1139,17 +1351,17 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro animateMotionChange(); if (patternLayout[1].getVisibility() == View.VISIBLE) { if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - showPatternsView(0, true); + showPatternsView(0, true, true); } else { - showPatternsView(num, patternLayout[num].getVisibility() != View.VISIBLE); + showPatternsView(num, patternLayout[num].getVisibility() != View.VISIBLE, true); } } } else { selectPattern(lastSelectedPattern != null ? -1 : 0); if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - showPatternsView(1, true); + showPatternsView(1, true, true); } else { - showPatternsView(num, patternLayout[num].getVisibility() != View.VISIBLE); + showPatternsView(num, patternLayout[num].getVisibility() != View.VISIBLE, true); } } checkBoxView[1].setChecked(selectedPattern != null, true); @@ -1157,11 +1369,14 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro patternsListView.invalidateViews(); updateMotionButton(); } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { - showPatternsView(num, patternLayout[num].getVisibility() != View.VISIBLE); - } else { + showPatternsView(num, patternLayout[num].getVisibility() != View.VISIBLE, true); + } else if (screenType != SCREEN_TYPE_ACCENT_COLOR) { view.setChecked(!view.isChecked(), true); if (num == 0) { isBlurred = view.isChecked(); + if (isBlurred) { + backgroundImage.getImageReceiver().setForceCrossfade(true); + } updateBlurred(); } else { isMotion = view.isChecked(); @@ -1175,9 +1390,6 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro checkBoxView[a].setVisibility(View.INVISIBLE); } } - if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - updateCheckboxes(); - } if (screenType == SCREEN_TYPE_ACCENT_COLOR || currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { isBlurred = false; @@ -1202,9 +1414,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro patternLayout[a].setWillNotDraw(false); FrameLayout.LayoutParams layoutParams; if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - layoutParams = LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, a == 0 ? 342 : 316, Gravity.LEFT | Gravity.BOTTOM); + layoutParams = LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, a == 0 ? 321 : 316, Gravity.LEFT | Gravity.BOTTOM); } else { - layoutParams = LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, a == 0 ? 294 : 316, Gravity.LEFT | Gravity.BOTTOM); + layoutParams = LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, a == 0 ? 273 : 316, Gravity.LEFT | Gravity.BOTTOM); } if (a == 0) { layoutParams.height += AndroidUtilities.dp(12) + paddings.top; @@ -1242,8 +1454,10 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } if (num == 0) { backgroundRotation = previousBackgroundRotation; - setBackgroundColor(previousBackgroundGradientColor, 1, true); - setBackgroundColor(previousBackgroundColor, 0, true); + setBackgroundColor(previousBackgroundGradientColor3, 3, true, true); + setBackgroundColor(previousBackgroundGradientColor2, 2, true, true); + setBackgroundColor(previousBackgroundGradientColor1, 1, true, true); + setBackgroundColor(previousBackgroundColor, 0, true, true); } else { selectedPattern = previousSelectedPattern; if (selectedPattern == null) { @@ -1260,7 +1474,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro updateSelectedPattern(true); } if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - showPatternsView(num, false); + showPatternsView(num, false, true); } else { if (selectedPattern == null) { if (isMotion) { @@ -1270,7 +1484,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } updateMotionButton(); } - showPatternsView(0, true); + showPatternsView(0, true, true); } }); @@ -1288,9 +1502,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro return; } if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - showPatternsView(num, false); + showPatternsView(num, false, true); } else { - showPatternsView(0, true); + showPatternsView(0, true, true); } }); } @@ -1373,9 +1587,23 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override public void onSeekBarDrag(boolean stop, float progress) { currentIntensity = progress; - backgroundImage.getImageReceiver().setAlpha(currentIntensity); + backgroundImage.getImageReceiver().setAlpha(Math.abs(currentIntensity)); backgroundImage.invalidate(); patternsListView.invalidateViews(); + if (currentIntensity >= 0) { + if (Build.VERSION.SDK_INT >= 29 && backgroundImage.getBackground() instanceof MotionBackgroundDrawable) { + backgroundImage.getImageReceiver().setBlendMode(BlendMode.SOFT_LIGHT); + } + backgroundImage.getImageReceiver().setGradientBitmap(null); + } else { + if (Build.VERSION.SDK_INT >= 29) { + backgroundImage.getImageReceiver().setBlendMode(null); + } + if (backgroundImage.getBackground() instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable motionBackgroundDrawable = (MotionBackgroundDrawable) backgroundImage.getBackground(); + backgroundImage.getImageReceiver().setGradientBitmap(motionBackgroundDrawable.getBitmap()); + } + } } @Override @@ -1389,7 +1617,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override public void setColor(int color, int num, boolean applyNow) { if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - setBackgroundColor(color, num, applyNow); + setBackgroundColor(color, num, applyNow, true); } else { scheduleApplyColor(color, num, applyNow); } @@ -1437,19 +1665,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override public void rotateColors() { - if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - backgroundRotation += 45; - while (backgroundRotation >= 360) { - backgroundRotation -= 360; - } - setBackgroundColor(backgroundColor, 0, true); - } else { - accent.backgroundRotation += 45; - while (accent.backgroundRotation >= 360) { - accent.backgroundRotation -= 360; - } - Theme.refreshThemeColors(); - } + onColorsRotate(); } @Override @@ -1475,7 +1691,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro colorPicker.setMaxBrightness(0.8f); } - colorPicker.setType(1, hasChanges(1), false, false, false, 0, false); + colorPicker.setType(1, hasChanges(1), false, 1, false, 0, false); colorPicker.setColor(accent.accentColor, 0); } else { patternLayout[a].addView(colorPicker, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.CENTER_HORIZONTAL, 0, 0, 0, 48)); @@ -1491,7 +1707,6 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro if (screenType != SCREEN_TYPE_ACCENT_COLOR && !(currentWallpaper instanceof WallpapersListActivity.ColorWallpaper)) { backgroundImage.getImageReceiver().setCrossfadeWithOldImage(true); - backgroundImage.getImageReceiver().setForceCrossfade(true); } } @@ -1513,7 +1728,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro protected void onDraw(Canvas canvas) { if (!AndroidUtilities.usingHardwareInput) { getLocationInWindow(loc); - if (Build.VERSION.SDK_INT < 21) { + if (Build.VERSION.SDK_INT < 21 && !AndroidUtilities.isTablet()) { loc[1] -= AndroidUtilities.statusBarHeight; } if (actionBar2.getTranslationY() != loc[1]) { @@ -1589,6 +1804,10 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro AndroidUtilities.setViewPagerEdgeEffectColor(viewPager, Theme.getColor(Theme.key_actionBarDefault)); frameLayout.addView(viewPager, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT, Gravity.LEFT | Gravity.TOP, 0, 0, 0, screenType == SCREEN_TYPE_PREVIEW ? 48 : 0)); + undoView = new UndoView(context, this); + undoView.setAdditionalTranslationY(AndroidUtilities.dp(51)); + frameLayout.addView(undoView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM | Gravity.LEFT, 8, 0, 8, 8)); + if (screenType == SCREEN_TYPE_PREVIEW) { View shadow = new View(context); shadow.setBackgroundColor(Theme.getColor(Theme.key_dialogShadowLine)); @@ -1668,12 +1887,37 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro }); } + if (screenType == SCREEN_TYPE_ACCENT_COLOR && !Theme.hasCustomWallpaper() && accent.backgroundOverrideColor != 0x100000000L) { + selectColorType(2); + } + themeDescriptions = getThemeDescriptionsInternal(); setCurrentImage(true); + updatePlayAnimationView(false); + + if (showColor) { + showPatternsView(0, true, false); + } return fragmentView; } + private void onColorsRotate() { + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + backgroundRotation += 45; + while (backgroundRotation >= 360) { + backgroundRotation -= 360; + } + setBackgroundColor(backgroundColor, 0, true, true); + } else { + accent.backgroundRotation += 45; + while (accent.backgroundRotation >= 360) { + accent.backgroundRotation -= 360; + } + Theme.refreshThemeColors(); + } + } + private void selectColorType(int id) { if (getParentActivity() == null || colorType == id || patternViewAnimation != null) { return; @@ -1681,18 +1925,86 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro if (id == 2 && (Theme.hasCustomWallpaper() || accent.backgroundOverrideColor == 0x100000000L)) { AlertDialog.Builder builder = new AlertDialog.Builder(getParentActivity()); builder.setTitle(LocaleController.getString("ChangeChatBackground", R.string.ChangeChatBackground)); - builder.setMessage(LocaleController.getString("ChangeWallpaperToColor", R.string.ChangeWallpaperToColor)); - builder.setPositiveButton(LocaleController.getString("Change", R.string.Change), (dialog, which) -> { - if (accent.backgroundOverrideColor == 0x100000000L) { - accent.backgroundOverrideColor = 0; - accent.backgroundGradientOverrideColor = 0; - Theme.refreshThemeColors(); - } - removeBackgroundOverride = true; - Theme.resetCustomWallpaper(true); - selectColorType(2); - }); - builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + if (!Theme.hasCustomWallpaper() || Theme.isCustomWallpaperColor()) { + builder.setMessage(LocaleController.getString("ChangeColorToColor", R.string.ChangeColorToColor)); + builder.setPositiveButton(LocaleController.getString("Reset", R.string.Reset), (dialog, which) -> { + if (accent.backgroundOverrideColor == 0x100000000L) { + accent.backgroundOverrideColor = 0; + accent.backgroundGradientOverrideColor1 = 0; + accent.backgroundGradientOverrideColor2 = 0; + accent.backgroundGradientOverrideColor3 = 0; + updatePlayAnimationView(false); + Theme.refreshThemeColors(); + } + removeBackgroundOverride = true; + Theme.resetCustomWallpaper(true); + selectColorType(2); + }); + builder.setNegativeButton(LocaleController.getString("Continue", R.string.Continue), (dialog, which) -> { + if (Theme.isCustomWallpaperColor()) { + accent.backgroundOverrideColor = accent.overrideWallpaper.color; + accent.backgroundGradientOverrideColor1 = accent.overrideWallpaper.gradientColor1; + accent.backgroundGradientOverrideColor2 = accent.overrideWallpaper.gradientColor2; + accent.backgroundGradientOverrideColor3 = accent.overrideWallpaper.gradientColor3; + accent.backgroundRotation = accent.overrideWallpaper.rotation; + accent.patternSlug = accent.overrideWallpaper.slug; + currentIntensity = accent.patternIntensity = accent.overrideWallpaper.intensity; + if (accent.patternSlug != null && !Theme.COLOR_BACKGROUND_SLUG.equals(accent.patternSlug)) { + for (int a = 0, N = patterns.size(); a < N; a++) { + TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) patterns.get(a); + if (wallPaper.pattern) { + if (accent.patternSlug.equals(wallPaper.slug)) { + selectedPattern = wallPaper; + break; + } + } + } + } else { + selectedPattern = null; + } + removeBackgroundOverride = true; + checkBoxView[1].setChecked(selectedPattern != null, true); + updatePlayAnimationView(false); + Theme.refreshThemeColors(); + } + Drawable background = backgroundImage.getBackground(); + if (background instanceof MotionBackgroundDrawable) { + MotionBackgroundDrawable drawable = (MotionBackgroundDrawable) background; + drawable.setPatternBitmap(100, null); + if (Theme.getActiveTheme().isDark()) { + if (currentIntensity < 0) { + backgroundImage.getImageReceiver().setGradientBitmap(drawable.getBitmap()); + } + if (intensitySeekBar != null) { + intensitySeekBar.setTwoSided(true); + } + } else if (currentIntensity < 0) { + currentIntensity = -currentIntensity; + } + } + if (intensitySeekBar != null) { + intensitySeekBar.setProgress(currentIntensity); + } + Theme.resetCustomWallpaper(true); + selectColorType(2); + }); + } else { + builder.setMessage(LocaleController.getString("ChangeWallpaperToColor", R.string.ChangeWallpaperToColor)); + builder.setPositiveButton(LocaleController.getString("Change", R.string.Change), (dialog, which) -> { + if (accent.backgroundOverrideColor == 0x100000000L) { + accent.backgroundOverrideColor = 0; + accent.backgroundGradientOverrideColor1 = 0; + accent.backgroundGradientOverrideColor2 = 0; + accent.backgroundGradientOverrideColor3 = 0; + updatePlayAnimationView(false); + Theme.refreshThemeColors(); + } + removeBackgroundOverride = true; + Theme.resetCustomWallpaper(true); + selectColorType(2); + }); + builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); + } showDialog(builder.create()); return; } @@ -1701,29 +2013,53 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro switch (id) { case 1: dropDown.setText(LocaleController.getString("ColorPickerMainColor", R.string.ColorPickerMainColor)); - colorPicker.setType(1, hasChanges(1), false, false, false, 0, false); + colorPicker.setType(1, hasChanges(1), false, 1, false, 0, false); colorPicker.setColor(accent.accentColor, 0); break; case 2: dropDown.setText(LocaleController.getString("ColorPickerBackground", R.string.ColorPickerBackground)); int defaultBackground = Theme.getColor(Theme.key_chat_wallpaper); - int defaultGradient = Theme.hasThemeKey(Theme.key_chat_wallpaper_gradient_to) ? Theme.getColor(Theme.key_chat_wallpaper_gradient_to) : 0; + int defaultGradient1 = Theme.hasThemeKey(Theme.key_chat_wallpaper_gradient_to1) ? Theme.getColor(Theme.key_chat_wallpaper_gradient_to1) : 0; + int defaultGradient2 = Theme.hasThemeKey(Theme.key_chat_wallpaper_gradient_to2) ? Theme.getColor(Theme.key_chat_wallpaper_gradient_to2) : 0; + int defaultGradient3 = Theme.hasThemeKey(Theme.key_chat_wallpaper_gradient_to3) ? Theme.getColor(Theme.key_chat_wallpaper_gradient_to3) : 0; - int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor; - if (backgroundGradientOverrideColor == 0 && accent.backgroundGradientOverrideColor != 0) { - defaultGradient = 0; + int backgroundGradientOverrideColor1 = (int) accent.backgroundGradientOverrideColor1; + if (backgroundGradientOverrideColor1 == 0 && accent.backgroundGradientOverrideColor1 != 0) { + defaultGradient1 = 0; + } + int backgroundGradientOverrideColor2 = (int) accent.backgroundGradientOverrideColor2; + if (backgroundGradientOverrideColor2 == 0 && accent.backgroundGradientOverrideColor2 != 0) { + defaultGradient2 = 0; + } + int backgroundGradientOverrideColor3 = (int) accent.backgroundGradientOverrideColor3; + if (backgroundGradientOverrideColor3 == 0 && accent.backgroundGradientOverrideColor3 != 0) { + defaultGradient3 = 0; } int backgroundOverrideColor = (int) accent.backgroundOverrideColor; - colorPicker.setType(2, hasChanges(2), true, backgroundGradientOverrideColor != 0 || defaultGradient != 0, false, accent.backgroundRotation, false); - colorPicker.setColor(backgroundGradientOverrideColor != 0 ? backgroundGradientOverrideColor : defaultGradient, 1); + int count; + if (backgroundGradientOverrideColor1 != 0 || defaultGradient1 != 0) { + if (backgroundGradientOverrideColor3 != 0 || defaultGradient3 != 0) { + count = 4; + } else if (backgroundGradientOverrideColor2 != 0 || defaultGradient2 != 0) { + count = 3; + } else { + count = 2; + } + } else { + count = 1; + } + colorPicker.setType(2, hasChanges(2), true, count, false, accent.backgroundRotation, false); + colorPicker.setColor(backgroundGradientOverrideColor3 != 0 ? backgroundGradientOverrideColor3 : defaultGradient3, 3); + colorPicker.setColor(backgroundGradientOverrideColor2 != 0 ? backgroundGradientOverrideColor2 : defaultGradient2, 2); + colorPicker.setColor(backgroundGradientOverrideColor1 != 0 ? backgroundGradientOverrideColor1 : defaultGradient1, 1); colorPicker.setColor(backgroundOverrideColor != 0 ? backgroundOverrideColor : defaultBackground, 0); messagesAdapter.notifyItemInserted(0); listView2.smoothScrollBy(0, AndroidUtilities.dp(60)); break; case 3: dropDown.setText(LocaleController.getString("ColorPickerMyMessages", R.string.ColorPickerMyMessages)); - colorPicker.setType(2, hasChanges(3), true, accent.myMessagesGradientAccentColor != 0, true, 0, false); + colorPicker.setType(2, hasChanges(3), true, accent.myMessagesGradientAccentColor != 0 ? 2 : 1, true, 0, false); colorPicker.setColor(accent.myMessagesGradientAccentColor, 1); colorPicker.setColor(accent.myMessagesAccentColor != 0 ? accent.myMessagesAccentColor : accent.accentColor, 0); break; @@ -1732,7 +2068,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro if (prevType == 2) { messagesAdapter.notifyItemRemoved(0); if (patternLayout[1].getVisibility() == View.VISIBLE) { - showPatternsView(0, true); + showPatternsView(0, true, true); } } if (applyingTheme.isDark()) { @@ -1759,29 +2095,10 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } backgroundImage.setImage(ImageLocation.getForDocument(wallPaper.document), imageFilter, null, null, "jpg", wallPaper.document.size, 1, wallPaper); selectedPattern = wallPaper; - if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - isMotion = checkBoxView[0].isChecked(); - } else { - isMotion = checkBoxView[2].isChecked(); - } + isMotion = checkBoxView[2].isChecked(); updateButtonState(false, true); } - private void updateCheckboxes() { - if (checkBoxView == null || screenType != SCREEN_TYPE_ACCENT_COLOR) { - return; - } - FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) checkBoxView[1].getLayoutParams(); - checkBoxView[1].setChecked(selectedPattern != null, false); - int offset = (layoutParams.width + AndroidUtilities.dp(9)) / 2; - checkBoxView[1].setTranslationX(selectedPattern != null ? 0 : -offset); - checkBoxView[0].setTranslationX(selectedPattern != null ? 0 : offset); - checkBoxView[0].setChecked(isMotion, false); - checkBoxView[0].setEnabled(selectedPattern != null); - checkBoxView[0].setVisibility(selectedPattern != null ? View.VISIBLE : View.INVISIBLE); - checkBoxView[0].setAlpha(selectedPattern != null ? 1.0f : 0.0f); - } - private void saveAccentWallpaper() { if (accent == null || TextUtils.isEmpty(accent.patternSlug)) { return; @@ -1794,16 +2111,18 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro Bitmap dst = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(dst); - background.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight()); - background.draw(canvas); + if (!(background instanceof MotionBackgroundDrawable)) { + background.setBounds(0, 0, bitmap.getWidth(), bitmap.getHeight()); + background.draw(canvas); + } Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG); paint.setColorFilter(new PorterDuffColorFilter(patternColor, blendMode)); - paint.setAlpha((int) (255 * currentIntensity)); + paint.setAlpha((int) (255 * Math.abs(currentIntensity))); canvas.drawBitmap(bitmap, 0, 0, paint); FileOutputStream stream = new FileOutputStream(toFile); - dst.compress(Bitmap.CompressFormat.JPEG, 87, stream); + dst.compress(background instanceof MotionBackgroundDrawable ? Bitmap.CompressFormat.PNG : Bitmap.CompressFormat.JPEG, 87, stream); stream.close(); } catch (Throwable e) { FileLog.e(e); @@ -1827,21 +2146,34 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro return true; } } - if (backupBackgroundGradientOverrideColor != 0) { - if (backupBackgroundGradientOverrideColor != accent.backgroundGradientOverrideColor) { + if (backupBackgroundGradientOverrideColor1 != 0 || backupBackgroundGradientOverrideColor2 != 0 || backupBackgroundGradientOverrideColor3 != 0) { + if (backupBackgroundGradientOverrideColor1 != accent.backgroundGradientOverrideColor1 || backupBackgroundGradientOverrideColor2 != accent.backgroundGradientOverrideColor2 || backupBackgroundGradientOverrideColor3 != accent.backgroundGradientOverrideColor3) { return true; } } else { - int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to); - int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor; - int currentGradient; - if (backgroundGradientOverrideColor == 0 && accent.backgroundGradientOverrideColor != 0) { - currentGradient = 0; - } else { - currentGradient = backgroundGradientOverrideColor == 0 ? defaultBackgroundGradient : backgroundGradientOverrideColor; - } - if (currentGradient != defaultBackgroundGradient) { - return true; + for (int a = 0; a < 3; a++) { + int defaultBackgroundGradient; + long backgroundGradientOverrideColorFull; + if (a == 0) { + defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to1); + backgroundGradientOverrideColorFull = accent.backgroundGradientOverrideColor1; + } else if (a == 1) { + defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + backgroundGradientOverrideColorFull = accent.backgroundGradientOverrideColor2; + } else { + defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to3); + backgroundGradientOverrideColorFull = accent.backgroundGradientOverrideColor3; + } + int backgroundGradientOverrideColor = (int) backgroundGradientOverrideColorFull; + int currentGradient; + if (backgroundGradientOverrideColor == 0 && backgroundGradientOverrideColorFull != 0) { + currentGradient = 0; + } else { + currentGradient = backgroundGradientOverrideColor == 0 ? defaultBackgroundGradient : backgroundGradientOverrideColor; + } + if (currentGradient != defaultBackgroundGradient) { + return true; + } } } if (accent.backgroundRotation != backupBackgroundRotation) { @@ -1877,7 +2209,10 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro accent.myMessagesAccentColor != backupMyMessagesAccentColor || accent.myMessagesGradientAccentColor != backupMyMessagesGradientAccentColor || accent.backgroundOverrideColor != backupBackgroundOverrideColor || - accent.backgroundGradientOverrideColor != backupBackgroundGradientOverrideColor || + accent.backgroundGradientOverrideColor1 != backupBackgroundGradientOverrideColor1 || + accent.backgroundGradientOverrideColor2 != backupBackgroundGradientOverrideColor2 || + accent.backgroundGradientOverrideColor3 != backupBackgroundGradientOverrideColor3 || + Math.abs(accent.patternIntensity - backupIntensity) > 0.001f || accent.backgroundRotation != backupBackgroundRotation || !accent.patternSlug.equals(selectedPattern != null ? selectedPattern.slug : "") || selectedPattern != null && accent.patternMotion != isMotion || @@ -1896,10 +2231,13 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override public boolean onFragmentCreate() { - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); - if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); + if (screenType == SCREEN_TYPE_ACCENT_COLOR || screenType == SCREEN_TYPE_PREVIEW) { NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetNewWallpapper); } + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND || screenType == SCREEN_TYPE_ACCENT_COLOR) { + Theme.setChangingWallpaper(true); + } if (screenType != SCREEN_TYPE_PREVIEW || accent != null) { if (SharedConfig.getDevicePerformanceClass() == SharedConfig.PERFORMANCE_CLASS_LOW) { int w = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); @@ -1927,10 +2265,13 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @Override public void onFragmentDestroy() { - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); if (frameLayout != null && onGlobalLayoutListener != null) { frameLayout.getViewTreeObserver().removeOnGlobalLayoutListener(onGlobalLayoutListener); } + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND || screenType == SCREEN_TYPE_ACCENT_COLOR) { + AndroidUtilities.runOnUIThread(() -> Theme.setChangingWallpaper(false)); + } if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { if (blurredBitmap != null) { @@ -1938,7 +2279,8 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro blurredBitmap = null; } Theme.applyChatServiceMessageColor(); - } else if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); + } else if (screenType == SCREEN_TYPE_ACCENT_COLOR || screenType == SCREEN_TYPE_PREVIEW) { NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); } if (screenType != SCREEN_TYPE_PREVIEW || accent != null) { @@ -1949,6 +2291,17 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro super.onFragmentDestroy(); } + @Override + protected void onTransitionAnimationStart(boolean isOpen, boolean backward) { + super.onTransitionAnimationStart(isOpen, backward); + if (!isOpen) { + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + Theme.applyChatServiceMessageColor(); + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didSetNewWallpapper); + } + } + } + @Override public void onResume() { super.onResume(); @@ -2045,7 +2398,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro @SuppressWarnings("unchecked") @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.emojiDidLoad) { + if (id == NotificationCenter.emojiLoaded) { if (listView == null) { return; } @@ -2069,20 +2422,25 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } } } else if (id == NotificationCenter.wallpapersDidLoad) { - ArrayList arrayList = (ArrayList) args[0]; + ArrayList arrayList = (ArrayList) args[0]; patterns.clear(); + patternsDict.clear(); boolean added = false; for (int a = 0, N = arrayList.size(); a < N; a++) { - TLRPC.TL_wallPaper wallPaper = arrayList.get(a); - if (wallPaper.pattern) { - patterns.add(wallPaper); + TLRPC.WallPaper wallPaper = arrayList.get(a); + if (wallPaper instanceof TLRPC.TL_wallPaper && wallPaper.pattern) { + if (wallPaper.document != null && !patternsDict.containsKey(wallPaper.document.id)) { + patterns.add(wallPaper); + patternsDict.put(wallPaper.document.id, wallPaper); + } if (accent != null && accent.patternSlug.equals(wallPaper.slug)) { - selectedPattern = wallPaper; + selectedPattern = (TLRPC.TL_wallPaper) wallPaper; added = true; setCurrentImage(false); updateButtonState(false, false); - updateCheckboxes(); + } else if (accent == null && selectedPattern != null && selectedPattern.slug.equals(wallPaper.slug)) { + added = true; } } } @@ -2094,11 +2452,13 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } long acc = 0; for (int a = 0, N = arrayList.size(); a < N; a++) { - TLRPC.TL_wallPaper wallPaper = arrayList.get(a); - int high_id = (int) (wallPaper.id >> 32); - int lower_id = (int) wallPaper.id; - acc = ((acc * 20261) + 0x80000000L + high_id) % 0x80000000L; - acc = ((acc * 20261) + 0x80000000L + lower_id) % 0x80000000L; + TLRPC.WallPaper wallPaper = arrayList.get(a); + if (wallPaper instanceof TLRPC.TL_wallPaper) { + int high_id = (int) (wallPaper.id >> 32); + int lower_id = (int) wallPaper.id; + acc = ((acc * 20261) + 0x80000000L + high_id) % 0x80000000L; + acc = ((acc * 20261) + 0x80000000L + lower_id) % 0x80000000L; + } } TLRPC.TL_account_getWallPapers req = new TLRPC.TL_account_getWallPapers(); req.hash = (int) acc; @@ -2106,17 +2466,25 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro if (response instanceof TLRPC.TL_account_wallPapers) { TLRPC.TL_account_wallPapers res = (TLRPC.TL_account_wallPapers) response; patterns.clear(); + patternsDict.clear(); boolean added2 = false; for (int a = 0, N = res.wallpapers.size(); a < N; a++) { + if (!(res.wallpapers.get(a) instanceof TLRPC.TL_wallPaper)) { + continue; + } TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) res.wallpapers.get(a); if (wallPaper.pattern) { - patterns.add(wallPaper); + if (wallPaper.document != null && !patternsDict.containsKey(wallPaper.document.id)) { + patterns.add(wallPaper); + patternsDict.put(wallPaper.document.id, wallPaper); + } if (accent != null && accent.patternSlug.equals(wallPaper.slug)) { selectedPattern = wallPaper; added2 = true; setCurrentImage(false); updateButtonState(false, false); - updateCheckboxes(); + } else if (accent == null && selectedPattern != null && selectedPattern.slug.equals(wallPaper.slug)) { + added2 = true; } } } @@ -2140,7 +2508,6 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro selectedPattern = wallPaper; setCurrentImage(false); updateButtonState(false, false); - updateCheckboxes(); patterns.add(0, selectedPattern); if (patternsAdapter != null) { patternsAdapter.notifyDataSetChanged(); @@ -2163,15 +2530,19 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro return; } Theme.applyPreviousTheme(); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); if (screenType == SCREEN_TYPE_ACCENT_COLOR) { - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didSetNewWallpapper); if (editingTheme) { accent.accentColor = backupAccentColor; accent.myMessagesAccentColor = backupMyMessagesAccentColor; accent.myMessagesGradientAccentColor = backupMyMessagesGradientAccentColor; accent.backgroundOverrideColor = backupBackgroundOverrideColor; - accent.backgroundGradientOverrideColor = backupBackgroundGradientOverrideColor; + accent.backgroundGradientOverrideColor1 = backupBackgroundGradientOverrideColor1; + accent.backgroundGradientOverrideColor2 = backupBackgroundGradientOverrideColor2; + accent.backgroundGradientOverrideColor3 = backupBackgroundGradientOverrideColor3; accent.backgroundRotation = backupBackgroundRotation; + accent.patternSlug = backupSlug; + accent.patternIntensity = backupIntensity; } Theme.saveThemeAccents(applyingTheme, false, true, false, false); } else { @@ -2200,18 +2571,34 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } else { accent.backgroundOverrideColor = 0; } - if (backupBackgroundGradientOverrideColor != 0) { - accent.backgroundGradientOverrideColor = backupBackgroundGradientOverrideColor; + if (backupBackgroundGradientOverrideColor1 != 0) { + accent.backgroundGradientOverrideColor1 = backupBackgroundGradientOverrideColor1; } else { - accent.backgroundGradientOverrideColor = 0; + accent.backgroundGradientOverrideColor1 = 0; + } + if (backupBackgroundGradientOverrideColor2 != 0) { + accent.backgroundGradientOverrideColor2 = backupBackgroundGradientOverrideColor2; + } else { + accent.backgroundGradientOverrideColor2 = 0; + } + if (backupBackgroundGradientOverrideColor3 != 0) { + accent.backgroundGradientOverrideColor3 = backupBackgroundGradientOverrideColor3; + } else { + accent.backgroundGradientOverrideColor3 = 0; } accent.backgroundRotation = backupBackgroundRotation; if (colorType == 2) { int defaultBackground = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper); - int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to); - int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor; + int defaultBackgroundGradient1 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to1); + int defaultBackgroundGradient2 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + int defaultBackgroundGradient3 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to3); + int backgroundGradientOverrideColor1 = (int) accent.backgroundGradientOverrideColor1; + int backgroundGradientOverrideColor2 = (int) accent.backgroundGradientOverrideColor2; + int backgroundGradientOverrideColor3 = (int) accent.backgroundGradientOverrideColor3; int backgroundOverrideColor = (int) accent.backgroundOverrideColor; - colorPicker.setColor(backgroundGradientOverrideColor != 0 ? backgroundGradientOverrideColor : defaultBackgroundGradient, 1); + colorPicker.setColor(backgroundGradientOverrideColor3 != 0 ? backgroundGradientOverrideColor3 : defaultBackgroundGradient3, 3); + colorPicker.setColor(backgroundGradientOverrideColor2 != 0 ? backgroundGradientOverrideColor2 : defaultBackgroundGradient2, 2); + colorPicker.setColor(backgroundGradientOverrideColor1 != 0 ? backgroundGradientOverrideColor1 : defaultBackgroundGradient1, 1); colorPicker.setColor(backgroundOverrideColor != 0 ? backgroundOverrideColor : defaultBackground, 0); } } @@ -2235,6 +2622,9 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro listView2.invalidateViews(); return; } + if (lastPickedColorNum != -1 && lastPickedColorNum != num) { + applyColorAction.run(); + } lastPickedColor = color; lastPickedColorNum = num; if (applyNow) { @@ -2255,15 +2645,32 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro if (lastPickedColorNum == 0) { accent.backgroundOverrideColor = color; } else { - int defaultGradientColor = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to); - if (color == 0 && defaultGradientColor != 0) { - accent.backgroundGradientOverrideColor = (1L << 32); - } else { - accent.backgroundGradientOverrideColor = color; + if (num == 1) { + int defaultGradientColor = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to1); + if (color == 0 && defaultGradientColor != 0) { + accent.backgroundGradientOverrideColor1 = (1L << 32); + } else { + accent.backgroundGradientOverrideColor1 = color; + } + } else if (num == 2) { + int defaultGradientColor = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + if (color == 0 && defaultGradientColor != 0) { + accent.backgroundGradientOverrideColor2 = (1L << 32); + } else { + accent.backgroundGradientOverrideColor2 = color; + } + } else if (num == 3) { + int defaultGradientColor = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to3); + if (color == 0 && defaultGradientColor != 0) { + accent.backgroundGradientOverrideColor3 = (1L << 32); + } else { + accent.backgroundGradientOverrideColor3 = color; + } } } - Theme.refreshThemeColors(); + Theme.refreshThemeColors(true); colorPicker.setHasChanges(hasChanges(colorType)); + updatePlayAnimationView(true); } else if (colorType == 3) { if (lastPickedColorNum == 0) { accent.myMessagesAccentColor = color; @@ -2409,11 +2816,11 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } private void updateMotionButton() { - if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - checkBoxView[selectedPattern != null ? 2 : 0].setVisibility(View.VISIBLE); + if (screenType == SCREEN_TYPE_ACCENT_COLOR || screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { if (selectedPattern == null && currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { checkBoxView[2].setChecked(false, true); } + checkBoxView[selectedPattern != null ? 2 : 0].setVisibility(View.VISIBLE); AnimatorSet animatorSet = new AnimatorSet(); animatorSet.playTogether( ObjectAnimator.ofFloat(checkBoxView[2], View.ALPHA, selectedPattern != null ? 1.0f : 0.0f), @@ -2459,16 +2866,30 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } } - private void showPatternsView(int num, boolean show) { + private void showPatternsView(int num, boolean show, boolean animated) { boolean showMotion = show && num == 1 && selectedPattern != null; if (show) { if (num == 0) { if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { previousBackgroundColor = backgroundColor; - previousBackgroundGradientColor = backgroundGradientColor; + previousBackgroundGradientColor1 = backgroundGradientColor1; + previousBackgroundGradientColor2 = backgroundGradientColor2; + previousBackgroundGradientColor3 = backgroundGradientColor3; previousBackgroundRotation = backupBackgroundRotation; - colorPicker.setType(0, false, true, previousBackgroundGradientColor != 0, false, previousBackgroundRotation, false); - colorPicker.setColor(backgroundGradientColor, 1); + int count; + if (previousBackgroundGradientColor3 != 0) { + count = 4; + } else if (previousBackgroundGradientColor2 != 0) { + count = 3; + } else if (previousBackgroundGradientColor1 != 0) { + count = 2; + } else { + count = 1; + } + colorPicker.setType(0, false, true, count, false, previousBackgroundRotation, false); + colorPicker.setColor(backgroundGradientColor3, 3); + colorPicker.setColor(backgroundGradientColor2, 2); + colorPicker.setColor(backgroundGradientColor1, 1); colorPicker.setColor(backgroundColor, 0); } } else { @@ -2486,65 +2907,139 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } } } - if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + if (screenType == SCREEN_TYPE_ACCENT_COLOR || screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { checkBoxView[showMotion ? 2 : 0].setVisibility(View.VISIBLE); } - patternViewAnimation = new AnimatorSet(); - ArrayList animators = new ArrayList<>(); - int otherNum = num == 0 ? 1 : 0; - if (show) { - patternLayout[num].setVisibility(View.VISIBLE); - if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - animators.add(ObjectAnimator.ofFloat(listView2, View.TRANSLATION_Y, -patternLayout[num].getMeasuredHeight() + AndroidUtilities.dp(48))); - animators.add(ObjectAnimator.ofFloat(checkBoxView[2], View.ALPHA, showMotion ? 1.0f : 0.0f)); - animators.add(ObjectAnimator.ofFloat(checkBoxView[0], View.ALPHA, showMotion ? 0.0f : 1.0f)); - animators.add(ObjectAnimator.ofFloat(backgroundImage, View.ALPHA, 0.0f)); - if (patternLayout[otherNum].getVisibility() == View.VISIBLE) { - animators.add(ObjectAnimator.ofFloat(patternLayout[otherNum], View.ALPHA, 0.0f)); - animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.ALPHA, 0.0f, 1.0f)); - patternLayout[num].setTranslationY(0); - } else { - animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.TRANSLATION_Y, patternLayout[num].getMeasuredHeight(), 0)); - } - } else { - if (num == 1) { - animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.ALPHA, 0.0f, 1.0f)); - } else { - patternLayout[num].setAlpha(1.0f); - animators.add(ObjectAnimator.ofFloat(patternLayout[otherNum], View.ALPHA, 0.0f)); - } - colorPicker.hideKeyboard(); - } - } else { - animators.add(ObjectAnimator.ofFloat(listView2, View.TRANSLATION_Y, 0)); - animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.TRANSLATION_Y, patternLayout[num].getMeasuredHeight())); - animators.add(ObjectAnimator.ofFloat(checkBoxView[0], View.ALPHA, 1.0f)); - animators.add(ObjectAnimator.ofFloat(checkBoxView[2], View.ALPHA, 0.0f)); - animators.add(ObjectAnimator.ofFloat(backgroundImage, View.ALPHA, 1.0f)); + if (num == 1 && !intensitySeekBar.isTwoSided() && currentIntensity < 0) { + currentIntensity = -currentIntensity; + intensitySeekBar.setProgress(currentIntensity); } - patternViewAnimation.playTogether(animators); - patternViewAnimation.addListener(new AnimatorListenerAdapter() { - @Override - public void onAnimationEnd(Animator animation) { - patternViewAnimation = null; - if (show && patternLayout[otherNum].getVisibility() == View.VISIBLE) { - patternLayout[otherNum].setAlpha(1.0f); - patternLayout[otherNum].setVisibility(View.INVISIBLE); - } else if (!show) { - patternLayout[num].setVisibility(View.INVISIBLE); - } - if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - checkBoxView[showMotion ? 0 : 2].setVisibility(View.INVISIBLE); + if (animated) { + patternViewAnimation = new AnimatorSet(); + ArrayList animators = new ArrayList<>(); + int otherNum = num == 0 ? 1 : 0; + if (show) { + patternLayout[num].setVisibility(View.VISIBLE); + if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + animators.add(ObjectAnimator.ofFloat(listView2, View.TRANSLATION_Y, num == 1 ? -AndroidUtilities.dp(21) : 0)); + animators.add(ObjectAnimator.ofFloat(checkBoxView[2], View.ALPHA, showMotion ? 1.0f : 0.0f)); + animators.add(ObjectAnimator.ofFloat(checkBoxView[0], View.ALPHA, showMotion ? 0.0f : 1.0f)); + if (num == 1) { + animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.ALPHA, 0.0f, 1.0f)); + } else { + patternLayout[num].setAlpha(1.0f); + animators.add(ObjectAnimator.ofFloat(patternLayout[otherNum], View.ALPHA, 0.0f)); + } + colorPicker.hideKeyboard(); + } else if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + animators.add(ObjectAnimator.ofFloat(listView2, View.TRANSLATION_Y, -patternLayout[num].getMeasuredHeight() + AndroidUtilities.dp(48))); + animators.add(ObjectAnimator.ofFloat(checkBoxView[2], View.ALPHA, showMotion ? 1.0f : 0.0f)); + animators.add(ObjectAnimator.ofFloat(checkBoxView[0], View.ALPHA, showMotion ? 0.0f : 1.0f)); + animators.add(ObjectAnimator.ofFloat(backgroundImage, View.ALPHA, 0.0f)); + if (patternLayout[otherNum].getVisibility() == View.VISIBLE) { + animators.add(ObjectAnimator.ofFloat(patternLayout[otherNum], View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.ALPHA, 0.0f, 1.0f)); + patternLayout[num].setTranslationY(0); + } else { + animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.TRANSLATION_Y, patternLayout[num].getMeasuredHeight(), 0)); + } } else { if (num == 1) { - patternLayout[otherNum].setAlpha(0.0f); + animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.ALPHA, 0.0f, 1.0f)); + } else { + patternLayout[num].setAlpha(1.0f); + animators.add(ObjectAnimator.ofFloat(patternLayout[otherNum], View.ALPHA, 0.0f)); + } + colorPicker.hideKeyboard(); + } + } else { + animators.add(ObjectAnimator.ofFloat(listView2, View.TRANSLATION_Y, 0)); + animators.add(ObjectAnimator.ofFloat(patternLayout[num], View.TRANSLATION_Y, patternLayout[num].getMeasuredHeight())); + animators.add(ObjectAnimator.ofFloat(checkBoxView[0], View.ALPHA, 1.0f)); + animators.add(ObjectAnimator.ofFloat(checkBoxView[2], View.ALPHA, 0.0f)); + animators.add(ObjectAnimator.ofFloat(backgroundImage, View.ALPHA, 1.0f)); + } + patternViewAnimation.playTogether(animators); + patternViewAnimation.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + patternViewAnimation = null; + if (show && patternLayout[otherNum].getVisibility() == View.VISIBLE) { + patternLayout[otherNum].setAlpha(1.0f); + patternLayout[otherNum].setVisibility(View.INVISIBLE); + } else if (!show) { + patternLayout[num].setVisibility(View.INVISIBLE); + } + if (screenType == SCREEN_TYPE_ACCENT_COLOR || screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + checkBoxView[showMotion ? 0 : 2].setVisibility(View.INVISIBLE); + } else { + if (num == 1) { + patternLayout[otherNum].setAlpha(0.0f); + } } } + }); + patternViewAnimation.setInterpolator(CubicBezierInterpolator.EASE_OUT); + patternViewAnimation.setDuration(200); + patternViewAnimation.start(); + } else { + int otherNum = num == 0 ? 1 : 0; + if (show) { + patternLayout[num].setVisibility(View.VISIBLE); + if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + listView2.setTranslationY(num == 1 ? -AndroidUtilities.dp(21) : 0); + checkBoxView[2].setAlpha(showMotion ? 1.0f : 0.0f); + checkBoxView[0].setAlpha(showMotion ? 0.0f : 1.0f); + if (num == 1) { + patternLayout[num].setAlpha(1.0f); + } else { + patternLayout[num].setAlpha(1.0f); + patternLayout[otherNum].setAlpha(0.0f); + } + colorPicker.hideKeyboard(); + } else if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + listView2.setTranslationY(-AndroidUtilities.dp(num == 0 ? 343 : 316) + AndroidUtilities.dp(48)); + checkBoxView[2].setAlpha(showMotion ? 1.0f : 0.0f); + checkBoxView[0].setAlpha(showMotion ? 0.0f : 1.0f); + backgroundImage.setAlpha(0.0f); + if (patternLayout[otherNum].getVisibility() == View.VISIBLE) { + patternLayout[otherNum].setAlpha(0.0f); + patternLayout[num].setAlpha(1.0f); + patternLayout[num].setTranslationY(0); + } else { + patternLayout[num].setTranslationY(0); + } + } else { + if (num == 1) { + patternLayout[num].setAlpha(1.0f); + } else { + patternLayout[num].setAlpha(1.0f); + patternLayout[otherNum].setAlpha(0.0f); + } + colorPicker.hideKeyboard(); + } + } else { + listView2.setTranslationY(0); + patternLayout[num].setTranslationY(patternLayout[num].getMeasuredHeight()); + checkBoxView[0].setAlpha(1.0f); + checkBoxView[2].setAlpha(1.0f); + backgroundImage.setAlpha(1.0f); } - }); - patternViewAnimation.setInterpolator(CubicBezierInterpolator.EASE_OUT); - patternViewAnimation.setDuration(200); - patternViewAnimation.start(); + + if (show && patternLayout[otherNum].getVisibility() == View.VISIBLE) { + patternLayout[otherNum].setAlpha(1.0f); + patternLayout[otherNum].setVisibility(View.INVISIBLE); + } else if (!show) { + patternLayout[num].setVisibility(View.INVISIBLE); + } + if (screenType == SCREEN_TYPE_ACCENT_COLOR || screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + checkBoxView[showMotion ? 0 : 2].setVisibility(View.INVISIBLE); + } else { + if (num == 1) { + patternLayout[otherNum].setAlpha(0.0f); + } + } + } } private void animateMotionChange() { @@ -2573,39 +3068,169 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro motionAnimation.start(); } - private void setBackgroundColor(int color, int num, boolean applyNow) { + private void updatePlayAnimationView(boolean animated) { + if (Build.VERSION.SDK_INT >= 29) { + int color2 = 0; + float intensity = 0; + if (screenType == SCREEN_TYPE_PREVIEW) { + if (accent != null) { + color2 = (int) accent.backgroundGradientOverrideColor2; + } else { + color2 = Theme.getColor(Theme.key_chat_wallpaper_gradient_to2); + } + } else if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + int defaultBackgroundGradient2 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + int backgroundGradientOverrideColor2 = (int) accent.backgroundGradientOverrideColor2; + if (backgroundGradientOverrideColor2 == 0 && accent.backgroundGradientOverrideColor2 != 0) { + color2 = 0; + } else { + color2 = backgroundGradientOverrideColor2 != 0 ? backgroundGradientOverrideColor2 : defaultBackgroundGradient2; + } + } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + WallpapersListActivity.ColorWallpaper colorWallpaper = (WallpapersListActivity.ColorWallpaper) currentWallpaper; + color2 = backgroundGradientColor2; + } + if (color2 != 0 && currentIntensity >= 0) { + backgroundImage.getImageReceiver().setBlendMode(BlendMode.SOFT_LIGHT); + } else { + backgroundImage.getImageReceiver().setBlendMode(null); + } + } + + if (playAnimationView == null) { + return; + } + boolean visible; + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + visible = backgroundGradientColor1 != 0; + } else if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + int defaultBackgroundGradient1 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to1); + int backgroundGradientOverrideColor1 = (int) accent.backgroundGradientOverrideColor1; + int color1; + if (backgroundGradientOverrideColor1 == 0 && accent.backgroundGradientOverrideColor1 != 0) { + color1 = 0; + } else { + color1 = backgroundGradientOverrideColor1 != 0 ? backgroundGradientOverrideColor1 : defaultBackgroundGradient1; + } + visible = color1 != 0; + } else { + visible = false; + } + boolean wasVisible = playAnimationView.getTag() != null; + playAnimationView.setTag(visible ? 1 : null); + if (wasVisible != visible) { + if (visible) { + playAnimationView.setVisibility(View.VISIBLE); + } + if (playViewAnimator != null) { + playViewAnimator.cancel(); + } + if (animated) { + playViewAnimator = new AnimatorSet(); + playViewAnimator.playTogether( + ObjectAnimator.ofFloat(playAnimationView, View.ALPHA, visible ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(playAnimationView, View.SCALE_X, visible ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(playAnimationView, View.SCALE_Y, visible ? 1.0f : 0.0f), + ObjectAnimator.ofFloat(checkBoxView[0], View.TRANSLATION_X, visible ? AndroidUtilities.dp(34) : 0.0f), + ObjectAnimator.ofFloat(checkBoxView[1], View.TRANSLATION_X, visible ? -AndroidUtilities.dp(34) : 0.0f), + ObjectAnimator.ofFloat(checkBoxView[2], View.TRANSLATION_X, visible ? AndroidUtilities.dp(34) : 0.0f)); + playViewAnimator.setDuration(180); + playViewAnimator.addListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + if (playAnimationView.getTag() == null) { + playAnimationView.setVisibility(View.INVISIBLE); + } + playViewAnimator = null; + } + }); + playViewAnimator.setInterpolator(CubicBezierInterpolator.EASE_OUT); + playViewAnimator.start(); + } else { + playAnimationView.setAlpha(visible ? 1.0f : 0.0f); + playAnimationView.setScaleX(visible ? 1.0f : 0.0f); + playAnimationView.setScaleY(visible ? 1.0f : 0.0f); + checkBoxView[0].setTranslationX(visible ? AndroidUtilities.dp(34) : 0.0f); + checkBoxView[1].setTranslationX(visible ? -AndroidUtilities.dp(34) : 0.0f); + checkBoxView[2].setTranslationX(visible ? AndroidUtilities.dp(34) : 0.0f); + } + } + } + + private void setBackgroundColor(int color, int num, boolean applyNow, boolean animated) { if (num == 0) { backgroundColor = color; - } else { - backgroundGradientColor = color; + } else if (num == 1) { + backgroundGradientColor1 = color; + } else if (num == 2) { + backgroundGradientColor2 = color; + } else if (num == 3) { + backgroundGradientColor3 = color; } + updatePlayAnimationView(animated); if (checkBoxView != null) { for (int a = 0; a < checkBoxView.length; a++) { if (checkBoxView[a] != null) { - if (num == 0) { - checkBoxView[a].setBackgroundColor(color); - } else { - checkBoxView[a].setBackgroundGradientColor(color); - } + checkBoxView[a].setColor(num, color); } } } - if (backgroundGradientColor != 0) { - GradientDrawable gradientDrawable = new GradientDrawable(BackgroundGradientDrawable.getGradientOrientation(backgroundRotation), new int[]{backgroundColor, backgroundGradientColor}); + if (backgroundGradientColor2 != 0) { + if (intensitySeekBar != null && Theme.getActiveTheme().isDark()) { + intensitySeekBar.setTwoSided(true); + } + Drawable currentBackground = backgroundImage.getBackground(); + MotionBackgroundDrawable motionBackgroundDrawable; + if (currentBackground instanceof MotionBackgroundDrawable) { + motionBackgroundDrawable = (MotionBackgroundDrawable) currentBackground; + } else { + motionBackgroundDrawable = new MotionBackgroundDrawable(); + motionBackgroundDrawable.setParentView(backgroundImage); + if (rotatePreview) { + motionBackgroundDrawable.rotatePreview(); + } + } + motionBackgroundDrawable.setColors(backgroundColor, backgroundGradientColor1, backgroundGradientColor2, backgroundGradientColor3); + backgroundImage.setBackground(motionBackgroundDrawable); + patternColor = motionBackgroundDrawable.getPatternColor(); + checkColor = 0x2D000000; + } else if (backgroundGradientColor1 != 0) { + GradientDrawable gradientDrawable = new GradientDrawable(BackgroundGradientDrawable.getGradientOrientation(backgroundRotation), new int[]{backgroundColor, backgroundGradientColor1}); backgroundImage.setBackground(gradientDrawable); - patternColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, backgroundGradientColor)); + patternColor = checkColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, backgroundGradientColor1)); } else { backgroundImage.setBackgroundColor(backgroundColor); - patternColor = AndroidUtilities.getPatternColor(backgroundColor); + patternColor = checkColor = AndroidUtilities.getPatternColor(backgroundColor); } - - if (!Theme.hasThemeKey(Theme.key_chat_serviceBackground)) { - Theme.applyChatServiceMessageColor(new int[]{patternColor, patternColor, patternColor, patternColor}); + if (!Theme.hasThemeKey(Theme.key_chat_serviceBackground) || backgroundImage.getBackground() instanceof MotionBackgroundDrawable) { + Theme.applyChatServiceMessageColor(new int[]{checkColor, checkColor, checkColor, checkColor}, backgroundImage.getBackground()); + } else if (Theme.getCachedWallpaper() instanceof MotionBackgroundDrawable) { + int c = Theme.getColor(Theme.key_chat_serviceBackground); + Theme.applyChatServiceMessageColor(new int[]{c, c, c, c}, backgroundImage.getBackground()); + } + if (playAnimationImageView != null) { + playAnimationImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_serviceText), PorterDuff.Mode.MULTIPLY)); } if (backgroundImage != null) { backgroundImage.getImageReceiver().setColorFilter(new PorterDuffColorFilter(patternColor, blendMode)); - backgroundImage.getImageReceiver().setAlpha(currentIntensity); + backgroundImage.getImageReceiver().setAlpha(Math.abs(currentIntensity)); backgroundImage.invalidate(); + if (Theme.getActiveTheme().isDark() && backgroundImage.getBackground() instanceof MotionBackgroundDrawable) { + if (intensitySeekBar != null) { + intensitySeekBar.setTwoSided(true); + } + if (currentIntensity < 0) { + backgroundImage.getImageReceiver().setGradientBitmap(((MotionBackgroundDrawable) backgroundImage.getBackground()).getBitmap()); + } + } else { + backgroundImage.getImageReceiver().setGradientBitmap(null); + if (intensitySeekBar != null) { + intensitySeekBar.setTwoSided(false); + } + } + if (intensitySeekBar != null) { + intensitySeekBar.setProgress(currentIntensity); + } } if (listView2 != null) { listView2.invalidateViews(); @@ -2631,12 +3256,24 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { WallpapersListActivity.ColorWallpaper wallPaper = (WallpapersListActivity.ColorWallpaper) currentWallpaper; backgroundRotation = wallPaper.gradientRotation; - setBackgroundColor(wallPaper.color, 0, true); - if (wallPaper.gradientColor != 0) { - setBackgroundColor(wallPaper.gradientColor, 1, true); + setBackgroundColor(wallPaper.color, 0, true, false); + if (wallPaper.gradientColor1 != 0) { + setBackgroundColor(wallPaper.gradientColor1, 1, true, false); } + setBackgroundColor(wallPaper.gradientColor2, 2, true, false); + setBackgroundColor(wallPaper.gradientColor3, 3, true, false); if (selectedPattern != null) { backgroundImage.setImage(ImageLocation.getForDocument(selectedPattern.document), imageFilter, null, null, "jpg", selectedPattern.document.size, 1, selectedPattern); + } else if (Theme.DEFAULT_BACKGROUND_SLUG.equals(wallPaper.slug)) { + int w = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); + int h = Math.max(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y); + int patternColor; + if (Build.VERSION.SDK_INT >= 29) { + patternColor = 0x57000000; + } else { + patternColor = MotionBackgroundDrawable.getPatternColor(wallPaper.color, wallPaper.gradientColor1, wallPaper.gradientColor2, wallPaper.gradientColor3); + } + backgroundImage.setImageBitmap(SvgHelper.getBitmap(R.raw.default_pattern, w, h, patternColor)); } } else if (currentWallpaper instanceof WallpapersListActivity.FileWallpaper) { if (currentWallpaperBitmap != null) { @@ -2674,20 +3311,51 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } int defaultBackground = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper); int backgroundOverrideColor = (int) accent.backgroundOverrideColor; - int color1 = backgroundOverrideColor != 0 ? backgroundOverrideColor : defaultBackground; - int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to); - int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor; + int backgroundColor = backgroundOverrideColor != 0 ? backgroundOverrideColor : defaultBackground; + + int defaultBackgroundGradient1 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to1); + int backgroundGradientOverrideColor1 = (int) accent.backgroundGradientOverrideColor1; + int color1; + if (backgroundGradientOverrideColor1 == 0 && accent.backgroundGradientOverrideColor1 != 0) { + color1 = 0; + } else { + color1 = backgroundGradientOverrideColor1 != 0 ? backgroundGradientOverrideColor1 : defaultBackgroundGradient1; + } + int defaultBackgroundGradient2 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + int backgroundGradientOverrideColor2 = (int) accent.backgroundGradientOverrideColor2; int color2; - if (backgroundGradientOverrideColor == 0 && accent.backgroundGradientOverrideColor != 0) { + if (backgroundGradientOverrideColor2 == 0 && accent.backgroundGradientOverrideColor2 != 0) { color2 = 0; } else { - color2 = backgroundGradientOverrideColor != 0 ? backgroundGradientOverrideColor : defaultBackgroundGradient; + color2 = backgroundGradientOverrideColor2 != 0 ? backgroundGradientOverrideColor2 : defaultBackgroundGradient2; + } + int defaultBackgroundGradient3 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to3); + int backgroundGradientOverrideColor3 = (int) accent.backgroundGradientOverrideColor3; + int color3; + if (backgroundGradientOverrideColor3 == 0 && accent.backgroundGradientOverrideColor3 != 0) { + color3 = 0; + } else { + color3 = backgroundGradientOverrideColor3 != 0 ? backgroundGradientOverrideColor3 : defaultBackgroundGradient3; } if (!TextUtils.isEmpty(accent.patternSlug) && !Theme.hasCustomWallpaper()) { Drawable backgroundDrawable; if (color2 != 0) { + Drawable currentBackground = backgroundImage.getBackground(); + MotionBackgroundDrawable motionBackgroundDrawable; + if (currentBackground instanceof MotionBackgroundDrawable) { + motionBackgroundDrawable = (MotionBackgroundDrawable) currentBackground; + } else { + motionBackgroundDrawable = new MotionBackgroundDrawable(); + motionBackgroundDrawable.setParentView(backgroundImage); + if (rotatePreview) { + motionBackgroundDrawable.rotatePreview(); + } + } + motionBackgroundDrawable.setColors(backgroundColor, color1, color2, color3); + backgroundDrawable = motionBackgroundDrawable; + } else if (color1 != 0) { final BackgroundGradientDrawable.Orientation orientation = BackgroundGradientDrawable.getGradientOrientation(accent.backgroundRotation); - final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, new int[]{color1, color2}); + final BackgroundGradientDrawable backgroundGradientDrawable = new BackgroundGradientDrawable(orientation, new int[]{backgroundColor, color1}); final BackgroundGradientDrawable.Listener listener = new BackgroundGradientDrawable.ListenerAdapter() { @Override public void onSizeReady(int width, int height) { @@ -2701,31 +3369,70 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro backgroundGradientDisposable = backgroundGradientDrawable.startDithering(BackgroundGradientDrawable.Sizes.ofDeviceScreen(), listener, 100); backgroundDrawable = backgroundGradientDrawable; } else { - backgroundDrawable = new ColorDrawable(color1); + backgroundDrawable = new ColorDrawable(backgroundColor); } backgroundImage.setBackground(backgroundDrawable); if (selectedPattern != null) { backgroundImage.setImage(ImageLocation.getForDocument(selectedPattern.document), imageFilter, null, null, "jpg", selectedPattern.document.size, 1, selectedPattern); } } else { - backgroundImage.setBackground(Theme.getCachedWallpaper()); + Drawable backgroundDrawable = Theme.getCachedWallpaper(); + if (backgroundDrawable != null) { + if (backgroundDrawable instanceof MotionBackgroundDrawable) { + ((MotionBackgroundDrawable) backgroundDrawable).setParentView(backgroundImage); + } + backgroundImage.setBackground(backgroundDrawable); + } } - if (color2 == 0) { - patternColor = AndroidUtilities.getPatternColor(color1); + if (color1 == 0) { + patternColor = checkColor = AndroidUtilities.getPatternColor(backgroundColor); } else { - patternColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(color1, color2)); + if (color2 != 0) { + patternColor = MotionBackgroundDrawable.getPatternColor(backgroundColor, color1, color2, color3); + checkColor = 0x2D000000; + } else { + patternColor = checkColor = AndroidUtilities.getPatternColor(AndroidUtilities.getAverageColor(backgroundColor, color1)); + } } if (backgroundImage != null) { backgroundImage.getImageReceiver().setColorFilter(new PorterDuffColorFilter(patternColor, blendMode)); - backgroundImage.getImageReceiver().setAlpha(currentIntensity); + backgroundImage.getImageReceiver().setAlpha(Math.abs(currentIntensity)); backgroundImage.invalidate(); + if (Theme.getActiveTheme().isDark() && backgroundImage.getBackground() instanceof MotionBackgroundDrawable) { + if (intensitySeekBar != null) { + intensitySeekBar.setTwoSided(true); + } + if (currentIntensity < 0) { + backgroundImage.getImageReceiver().setGradientBitmap(((MotionBackgroundDrawable) backgroundImage.getBackground()).getBitmap()); + } + } else { + backgroundImage.getImageReceiver().setGradientBitmap(null); + if (intensitySeekBar != null) { + intensitySeekBar.setTwoSided(false); + } + } + if (intensitySeekBar != null) { + intensitySeekBar.setProgress(currentIntensity); + } } if (checkBoxView != null) { for (int a = 0; a < checkBoxView.length; a++) { - checkBoxView[a].setBackgroundColor(color1); + checkBoxView[a].setColor(0, backgroundColor); + checkBoxView[a].setColor(1, color1); + checkBoxView[a].setColor(2, color2); + checkBoxView[a].setColor(3, color3); + } + } + if (playAnimationImageView != null) { + playAnimationImageView.setColorFilter(new PorterDuffColorFilter(Theme.getColor(Theme.key_chat_serviceText), PorterDuff.Mode.MULTIPLY)); + } + if (buttonsContainer != null) { + for (int a = 0, N = buttonsContainer.getChildCount(); a < N; a++) { + buttonsContainer.getChildAt(a).invalidate(); } } } + rotatePreview = false; } public static class DialogsAdapter extends RecyclerListView.SelectionAdapter { @@ -3238,16 +3945,6 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro messages.add(messageObject); } } - - message = new TLRPC.TL_message(); - message.message = LocaleController.formatDateChat(date); - message.id = 0; - message.date = date; - messageObject = new MessageObject(currentAccount, message, false, false); - messageObject.type = 10; - messageObject.contentType = 1; - messageObject.isDateObject = true; - messages.add(messageObject); } private boolean hasButtons() { @@ -3292,7 +3989,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(60), MeasureSpec.EXACTLY)); } }; - frameLayout.addView(buttonsContainer, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, 34, Gravity.CENTER)); + frameLayout.addView(buttonsContainer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 76, Gravity.CENTER)); view = frameLayout; } view.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.WRAP_CONTENT)); @@ -3321,7 +4018,7 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } else { pinnedBotton = false; } - if (prevType == holder.getItemViewType()) { + if (prevType == holder.getItemViewType() && position + 1 < messages.size()) { MessageObject prevMessage = messages.get(position + 1); pinnedTop = !(prevMessage.messageOwner.reply_markup instanceof TLRPC.TL_replyInlineMarkup) && prevMessage.isOutOwner() == message.isOutOwner() && Math.abs(prevMessage.messageOwner.date - message.messageOwner.date) <= 5 * 60; } else { @@ -3385,8 +4082,8 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } @Override - public int getPatternColor() { - return patternColor; + public int getCheckColor() { + return checkColor; } @Override @@ -3400,12 +4097,32 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } @Override - public int getBackgroundGradientColor() { + public int getBackgroundGradientColor1() { if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { - return backgroundGradientColor; + return backgroundGradientColor1; } - int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to); - int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor; + int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to1); + int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor1; + return backgroundGradientOverrideColor != 0 ? backgroundGradientOverrideColor : defaultBackgroundGradient; + } + + @Override + public int getBackgroundGradientColor2() { + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + return backgroundGradientColor2; + } + int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor2; + return backgroundGradientOverrideColor != 0 ? backgroundGradientOverrideColor : defaultBackgroundGradient; + } + + @Override + public int getBackgroundGradientColor3() { + if (screenType == SCREEN_TYPE_CHANGE_BACKGROUND) { + return backgroundGradientColor3; + } + int defaultBackgroundGradient = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to3); + int backgroundGradientOverrideColor = (int) accent.backgroundGradientOverrideColor3; return backgroundGradientOverrideColor != 0 ? backgroundGradientOverrideColor : defaultBackgroundGradient; } @@ -3416,6 +4133,16 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro } return accent.backgroundRotation; } + + @Override + public float getIntensity() { + return currentIntensity; + } + + @Override + public int getPatternColor() { + return patternColor; + } }); return new RecyclerListView.Holder(view); } @@ -3425,6 +4152,25 @@ public class ThemePreviewActivity extends BaseFragment implements DownloadContro PatternCell view = (PatternCell) holder.itemView; view.setPattern((TLRPC.TL_wallPaper) patterns.get(position)); view.getImageReceiver().setColorFilter(new PorterDuffColorFilter(patternColor, blendMode)); + if (Build.VERSION.SDK_INT >= 29) { + int color2 = 0; + if (screenType == SCREEN_TYPE_ACCENT_COLOR) { + int defaultBackgroundGradient2 = Theme.getDefaultAccentColor(Theme.key_chat_wallpaper_gradient_to2); + int backgroundGradientOverrideColor2 = (int) accent.backgroundGradientOverrideColor2; + if (backgroundGradientOverrideColor2 == 0 && accent.backgroundGradientOverrideColor2 != 0) { + color2 = 0; + } else { + color2 = backgroundGradientOverrideColor2 != 0 ? backgroundGradientOverrideColor2 : defaultBackgroundGradient2; + } + } else if (currentWallpaper instanceof WallpapersListActivity.ColorWallpaper) { + color2 = backgroundGradientColor2; + } + if (color2 != 0 && currentIntensity >= 0) { + backgroundImage.getImageReceiver().setBlendMode(BlendMode.SOFT_LIGHT); + } else { + view.getImageReceiver().setBlendMode(null); + } + } } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java index 843db0086..e4de3da9c 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java @@ -63,8 +63,7 @@ import org.telegram.messenger.UserObject; import org.telegram.messenger.Utilities; import org.telegram.messenger.voip.EncryptionKeyEmojifier; import org.telegram.messenger.voip.Instance; -import org.telegram.messenger.voip.VideoCameraCapturer; -import org.telegram.messenger.voip.VoIPBaseService; +import org.telegram.messenger.voip.VideoCapturerDevice; import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.AlertDialog; @@ -94,7 +93,7 @@ import org.webrtc.TextureViewRenderer; import java.io.ByteArrayOutputStream; -public class VoIPFragment implements VoIPBaseService.StateListener, NotificationCenter.NotificationCenterDelegate { +public class VoIPFragment implements VoIPService.StateListener, NotificationCenter.NotificationCenterDelegate { private final static int STATE_GONE = 0; private final static int STATE_FULLSCREEN = 1; @@ -110,7 +109,6 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification VoIPToggleButton[] bottomButtons = new VoIPToggleButton[4]; private ViewGroup fragmentView; - private VoIPTextureView callingUserTextureView; private VoIPOverlayBackground overlayBackground; private BackupImageView callingUserPhotoView; @@ -131,6 +129,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification private VoIPFloatingLayout callingUserMiniFloatingLayout; private TextureViewRenderer callingUserMiniTextureRenderer; + private VoIPTextureView callingUserTextureView; private VoIPTextureView currentUserTextureView; private AcceptDeclineView acceptDeclineView; @@ -392,7 +391,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification previousState = -1; currentState = VoIPService.getSharedInstance().getCallState(); NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.voipServiceCreated); - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.closeInCallActivity); } @@ -402,7 +401,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification service.unregisterStateListener(this); } NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.voipServiceCreated); - NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiDidLoad); + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.emojiLoaded); NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.closeInCallActivity); } @@ -427,7 +426,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification initRenderers(); VoIPService.getSharedInstance().registerStateListener(this); } - } else if (id == NotificationCenter.emojiDidLoad) { + } else if (id == NotificationCenter.emojiLoaded) { updateKeyView(true); } else if (id == NotificationCenter.closeInCallActivity) { windowView.finish(); @@ -546,9 +545,11 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification canvas.drawColor(blackoutColor); } }; - callingUserTextureView = new VoIPTextureView(context, false); + callingUserTextureView = new VoIPTextureView(context, false, true, false, false); callingUserTextureView.renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); callingUserTextureView.renderer.setEnableHardwareScaler(true); + callingUserTextureView.scaleType = VoIPTextureView.SCALE_TYPE_NONE; + // callingUserTextureView.attachBackgroundRenderer(); frameLayout.addView(callingUserPhotoView); frameLayout.addView(callingUserTextureView); @@ -576,7 +577,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification currentUserCameraFloatingLayout = new VoIPFloatingLayout(context); currentUserCameraFloatingLayout.setRelativePosition(1f, 1f); - currentUserTextureView = new VoIPTextureView(context, true); + currentUserTextureView = new VoIPTextureView(context, true, false); + currentUserTextureView.renderer.setIsCamera(true); + currentUserTextureView.renderer.setUseCameraRotation(true); currentUserCameraFloatingLayout.setOnTapListener(view -> { if (currentUserIsVideo && callingUserIsVideo && System.currentTimeMillis() - lastContentTapTime > 500) { AndroidUtilities.cancelRunOnUIThread(hideUIRunnable); @@ -597,6 +600,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification callingUserMiniTextureRenderer = new TextureViewRenderer(context); callingUserMiniTextureRenderer.setEnableHardwareScaler(true); callingUserMiniTextureRenderer.setIsCamera(false); + callingUserMiniTextureRenderer.setFpsReduction(30); callingUserMiniTextureRenderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); View backgroundView = new View(context); @@ -841,10 +845,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } private void initRenderers() { - if (VideoCameraCapturer.eglBase == null) { - VideoCameraCapturer.eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); - } - currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() { + currentUserTextureView.renderer.init(VideoCapturerDevice.getEglBase().getEglBaseContext(), new RendererCommon.RendererEvents() { @Override public void onFirstFrameRendered() { AndroidUtilities.runOnUIThread(() -> updateViewState()); @@ -856,7 +857,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } }); - callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() { + callingUserTextureView.renderer.init(VideoCapturerDevice.getEglBase().getEglBaseContext(), new RendererCommon.RendererEvents() { @Override public void onFirstFrameRendered() { AndroidUtilities.runOnUIThread(() -> updateViewState()); @@ -869,7 +870,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification }, EglBase.CONFIG_PLAIN, new GlRectDrawer()); - callingUserMiniTextureRenderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); + callingUserMiniTextureRenderer.init(VideoCapturerDevice.getEglBase().getEglBaseContext(), null); } public void switchToPip() { @@ -933,7 +934,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification public void startTransitionFromPiP() { enterFromPiP = true; VoIPService service = VoIPService.getSharedInstance(); - if (service != null && service.getVideoState() == Instance.VIDEO_STATE_ACTIVE) { + if (service != null && service.getVideoState(false) == Instance.VIDEO_STATE_ACTIVE) { callingUserTextureView.setStub(VoIPPiPView.getInstance().callingUserTextureView); currentUserTextureView.setStub(VoIPPiPView.getInstance().currentUserTextureView); } @@ -1177,8 +1178,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification VoIPService service = VoIPService.getSharedInstance(); if (service != null) { - callingUserIsVideo = service.getCurrentVideoState() == Instance.VIDEO_STATE_ACTIVE; - currentUserIsVideo = service.getVideoState() == Instance.VIDEO_STATE_ACTIVE || service.getVideoState() == Instance.VIDEO_STATE_PAUSED; + callingUserIsVideo = service.getRemoteVideoState() == Instance.VIDEO_STATE_ACTIVE; + currentUserIsVideo = service.getVideoState(false) == Instance.VIDEO_STATE_ACTIVE || service.getVideoState(false) == Instance.VIDEO_STATE_PAUSED; if (currentUserIsVideo && !isVideoCall) { isVideoCall = true; } @@ -1235,19 +1236,17 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification break; case VoIPService.STATE_ESTABLISHED: case VoIPService.STATE_RECONNECTING: - if (previousState != VoIPService.STATE_ESTABLISHED && previousState != VoIPService.STATE_RECONNECTING) { - updateKeyView(animated); - } + updateKeyView(animated); showTimer = true; if (currentState == VoIPService.STATE_RECONNECTING) { showReconnecting = true; } break; - case VoIPBaseService.STATE_ENDED: + case VoIPService.STATE_ENDED: currentUserTextureView.saveCameraLastBitmap(); AndroidUtilities.runOnUIThread(() -> windowView.finish(), 200); break; - case VoIPBaseService.STATE_FAILED: + case VoIPService.STATE_FAILED: statusTextView.setText(LocaleController.getString("VoipFailed", R.string.VoipFailed), false, animated); final VoIPService voipService = VoIPService.getSharedInstance(); final String lastError = voipService != null ? voipService.getLastError() : Instance.ERROR_UNKNOWN; @@ -1418,18 +1417,18 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification notificationsLayout.beforeLayoutChanges(); } if ((currentUserIsVideo || callingUserIsVideo) && (currentState == VoIPService.STATE_ESTABLISHED || currentState == VoIPService.STATE_RECONNECTING) && service.getCallDuration() > 500) { - if (service.getCurrentAudioState() == Instance.AUDIO_STATE_MUTED) { + if (service.getRemoteAudioState() == Instance.AUDIO_STATE_MUTED) { notificationsLayout.addNotification(R.drawable.calls_mute_mini, LocaleController.formatString("VoipUserMicrophoneIsOff", R.string.VoipUserMicrophoneIsOff, UserObject.getFirstName(callingUser)), "muted", animated); } else { notificationsLayout.removeNotification("muted"); } - if (service.getCurrentVideoState() == Instance.VIDEO_STATE_INACTIVE) { + if (service.getRemoteVideoState() == Instance.VIDEO_STATE_INACTIVE) { notificationsLayout.addNotification(R.drawable.calls_camera_mini, LocaleController.formatString("VoipUserCameraIsOff", R.string.VoipUserCameraIsOff, UserObject.getFirstName(callingUser)), "video", animated); } else { notificationsLayout.removeNotification("video"); } } else { - if (service.getCurrentAudioState() == Instance.AUDIO_STATE_MUTED) { + if (service.getRemoteAudioState() == Instance.AUDIO_STATE_MUTED) { notificationsLayout.addNotification(R.drawable.calls_mute_mini, LocaleController.formatString("VoipUserMicrophoneIsOff", R.string.VoipUserMicrophoneIsOff, UserObject.getFirstName(callingUser)), "muted", animated); } else { notificationsLayout.removeNotification("muted"); @@ -1668,6 +1667,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } private void updateKeyView(boolean animated) { + if (emojiLoaded) { + return; + } VoIPService service = VoIPService.getSharedInstance(); if (service == null) { return; @@ -1990,10 +1992,10 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification VoIPService.getSharedInstance().toggleSpeakerphoneOrShowRouteSheet(activity, false); } service.requestVideoCall(); - service.setVideoState(Instance.VIDEO_STATE_ACTIVE); + service.setVideoState(false, Instance.VIDEO_STATE_ACTIVE); } else { currentUserTextureView.saveCameraLastBitmap(); - service.setVideoState(Instance.VIDEO_STATE_INACTIVE); + service.setVideoState(false, Instance.VIDEO_STATE_INACTIVE); } previousState = currentState; @@ -2088,7 +2090,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification if (currentUserIsVideo && (!hasPermissionsToPip || !screenOn)) { VoIPService service = VoIPService.getSharedInstance(); if (service != null) { - service.setVideoState(Instance.VIDEO_STATE_PAUSED); + service.setVideoState(false, Instance.VIDEO_STATE_PAUSED); } } } @@ -2099,8 +2101,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } VoIPService service = VoIPService.getSharedInstance(); if (service != null) { - if (service.getVideoState() == Instance.VIDEO_STATE_PAUSED) { - service.setVideoState(Instance.VIDEO_STATE_ACTIVE); + if (service.getVideoState(false) == Instance.VIDEO_STATE_PAUSED) { + service.setVideoState(false, Instance.VIDEO_STATE_ACTIVE); } updateViewState(); } else { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java b/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java index 8e3ffc513..721a29b15 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoiceMessageEnterTransition.java @@ -3,9 +3,7 @@ package org.telegram.ui; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ValueAnimator; -import android.annotation.SuppressLint; import android.graphics.Canvas; -import android.graphics.Color; import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; @@ -18,8 +16,6 @@ import android.widget.FrameLayout; import androidx.core.graphics.ColorUtils; -import com.google.android.exoplayer2.util.Log; - import org.telegram.messenger.AndroidUtilities; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.Cells.ChatMessageCell; @@ -100,7 +96,7 @@ public class VoiceMessageEnterTransition { int clipBottom = 0; if (getMeasuredHeight() > 0) { clipBottom = (int) (getMeasuredHeight() * (1f - progress) + listViewBottom * progress); - canvas.saveLayerAlpha(0, getMeasuredHeight() - AndroidUtilities.dp(400), getMeasuredWidth(), getMeasuredHeight(), 255, canvas.ALL_SAVE_FLAG); + canvas.saveLayerAlpha(0, getMeasuredHeight() - AndroidUtilities.dp(400), getMeasuredWidth(), getMeasuredHeight(), 255, Canvas.ALL_SAVE_FLAG); } else { canvas.save(); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java index 9cecb84c7..504e01f4d 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/WallpapersListActivity.java @@ -18,6 +18,7 @@ import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.Canvas; +import android.graphics.Color; import android.graphics.Paint; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; @@ -42,14 +43,15 @@ import android.widget.TextView; import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.FileLoader; +import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MediaController; import org.telegram.messenger.MessagesController; -import org.telegram.messenger.MessagesStorage; import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.SendMessagesHelper; import org.telegram.messenger.UserConfig; +import org.telegram.messenger.Utilities; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLObject; import org.telegram.tgnet.TLRPC; @@ -65,6 +67,7 @@ import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.WallpaperCell; +import org.telegram.ui.Components.ColorPicker; import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.EmptyTextProgressView; import org.telegram.ui.Components.LayoutHelper; @@ -99,7 +102,7 @@ public class WallpapersListActivity extends BaseFragment implements Notification private ColorWallpaper addedColorWallpaper; private FileWallpaper addedFileWallpaper; - private FileWallpaper catsWallpaper; + private ColorWallpaper catsWallpaper; private FileWallpaper themeWallpaper; private RecyclerListView listView; @@ -118,7 +121,9 @@ public class WallpapersListActivity extends BaseFragment implements Notification private String selectedBackgroundSlug = ""; private int selectedColor; - private int selectedGradientColor; + private int selectedGradientColor1; + private int selectedGradientColor2; + private int selectedGradientColor3; private int selectedGradientRotation; private float selectedIntensity; private boolean selectedBackgroundMotion; @@ -126,8 +131,11 @@ public class WallpapersListActivity extends BaseFragment implements Notification private ArrayList allWallPapers = new ArrayList<>(); private HashMap allWallPapersDict = new HashMap<>(); + private HashMap localDict = new HashMap<>(); private ArrayList wallPapers = new ArrayList<>(); + private ArrayList localWallPapers = new ArrayList<>(); private ArrayList patterns = new ArrayList<>(); + private HashMap patternsDict = new HashMap<>(); private boolean loadingWallpapers; private LongSparseArray selectedWallPapers = new LongSparseArray<>(); @@ -136,40 +144,82 @@ public class WallpapersListActivity extends BaseFragment implements Notification private final static int forward = 3; private final static int delete = 4; - private static final int[] defaultColors = new int[]{ - 0xffffffff, - 0xffd4dfea, - 0xffb3cde1, - 0xff6ab7ea, - 0xff008dd0, - 0xffd3e2da, - 0xffc8e6c9, - 0xffc5e1a5, - 0xff61b06e, - 0xffcdcfaf, - 0xffa7a895, - 0xff7c6f72, - 0xffffd7ae, - 0xffffb66d, - 0xffde8751, - 0xffefd5e0, - 0xffdba1b9, - 0xffffafaf, - 0xfff16a60, - 0xffe8bcea, - 0xff9592ed, - 0xffd9bc60, - 0xffb17e49, - 0xffd5cef7, - 0xffdf506b, - 0xff8bd2cc, - 0xff3c847e, - 0xff22612c, - 0xff244d7c, - 0xff3d3b85, - 0xff65717d, - 0xff18222d, - 0xff000000 + private static final int[][] defaultColorsLight = new int[][]{ + new int[]{0xffdbddbb, 0xff6ba587, 0xffd5d88d, 0xff88b884}, + new int[]{0xff8dc0eb, 0xffb9d1ea, 0xffc6b1ef, 0xffebd7ef}, + new int[]{0xff97beeb, 0xffb1e9ea, 0xffc6b1ef, 0xffefb7dc}, + new int[]{0xff8adbf2, 0xff888dec, 0xffe39fea, 0xff679ced}, + new int[]{0xffb0cdeb, 0xff9fb0ea, 0xffbbead5, 0xffb2e3dd}, + new int[]{0xffdaeac8, 0xffa2b4ff, 0xffeccbff, 0xffb9e2ff}, + new int[]{0xffdceb92, 0xff8fe1d6, 0xff67a3f2, 0xff85d685}, + new int[]{0xffeaa36e, 0xfff0e486, 0xfff29ebf, 0xffe8c06e}, + new int[]{0xffffc3b2, 0xffe2c0ff, 0xffffe7b2, 0xfff8cece}, + new int[]{0xffD3DFEA}, + new int[]{0xffA5C5DB}, + new int[]{0xff6F99C8}, + new int[]{0xffD2E3A9}, + new int[]{0xffA4D48E}, + new int[]{0xff7DBB6E}, + new int[]{0xffE6DDAE}, + new int[]{0xffD5BE91}, + new int[]{0xffCBA479}, + new int[]{0xffEBC0B9}, + new int[]{0xffE0A79D}, + new int[]{0xffC97870}, + new int[]{0xffEBB9C8}, + new int[]{0xffE09DB7}, + new int[]{0xffD27593}, + new int[]{0xffDAC2ED}, + new int[]{0xffD3A5E7}, + new int[]{0xffB587D2}, + new int[]{0xffC2C2ED}, + new int[]{0xffA5A5E7}, + new int[]{0xff7F7FD0}, + new int[]{0xffC2E2ED}, + new int[]{0xffA5D6E7}, + new int[]{0xff7FBAD0}, + new int[]{0xffD6C2B9}, + new int[]{0xff9C8882}, + new int[]{0xff000000} + }; + + private static final int[][] defaultColorsDark = new int[][]{ + new int[]{0xff1e3557, 0xff151a36, 0xff1c4352, 0xff2a4541}, + new int[]{0xff1d223f, 0xff1d1832, 0xff1b2943, 0xff141631}, + new int[]{0xff203439, 0xff102028, 0xff1d3c3a, 0xff172635}, + new int[]{0xff1c2731, 0xff1a1c25, 0xff27303b, 0xff1b1b21}, + new int[]{0xff3a1c3a, 0xff24193c, 0xff392e3e, 0xff1a1632}, + new int[]{0xff2c211b, 0xff44332a, 0xff22191f, 0xff3b2d36}, + new int[]{0xff1e3557, 0xff182036, 0xff1c4352, 0xff16263a}, + new int[]{0xff111236, 0xff14424f, 0xff0b2334, 0xff3b315d}, + new int[]{0xff2d4836, 0xff172b19, 0xff364331, 0xff103231}, + new int[]{0xff1D2D3C}, + new int[]{0xff111B26}, + new int[]{0xff0B141E}, + new int[]{0xff1F361F}, + new int[]{0xff131F15}, + new int[]{0xff0E1710}, + new int[]{0xff2F2E27}, + new int[]{0xff2A261F}, + new int[]{0xff191817}, + new int[]{0xff432E30}, + new int[]{0xff2E1C1E}, + new int[]{0xff1F1314}, + new int[]{0xff432E3C}, + new int[]{0xff2E1C28}, + new int[]{0xff1F131B}, + new int[]{0xff3C2E43}, + new int[]{0xff291C2E}, + new int[]{0xff1D1221}, + new int[]{0xff312E43}, + new int[]{0xff1E1C2E}, + new int[]{0xff141221}, + new int[]{0xff2F3F3F}, + new int[]{0xff212D30}, + new int[]{0xff141E20}, + new int[]{0xff272524}, + new int[]{0xff191716}, + new int[]{0xff000000} }; private static final int[] searchColors = new int[]{ @@ -224,36 +274,90 @@ public class WallpapersListActivity extends BaseFragment implements Notification public String slug; public int color; - public int gradientColor; + public int gradientColor1; + public int gradientColor2; + public int gradientColor3; public int gradientRotation; public long patternId; public TLRPC.TL_wallPaper pattern; public float intensity; public File path; public boolean motion; + public boolean isGradient; + public TLRPC.WallPaper parentWallpaper; + public Bitmap defaultCache; + + public String getHash() { + String string = String.valueOf(color) + + gradientColor1 + + gradientColor2 + + gradientColor3 + + gradientRotation + + intensity + + (slug != null ? slug : ""); + return Utilities.MD5(string); + } public ColorWallpaper(String s, int c, int gc, int r) { slug = s; color = c | 0xff000000; - gradientColor = gc == 0 ? 0 : gc | 0xff000000; - gradientRotation = gradientColor != 0 ? r : 45; + gradientColor1 = gc == 0 ? 0 : gc | 0xff000000; + gradientRotation = gradientColor1 != 0 ? r : 0; intensity = 1.0f; } - public ColorWallpaper(String s, int c, int gc, int r, float in, boolean m, File ph) { + public ColorWallpaper(String s, int c, int gc1, int gc2, int gc3) { slug = s; color = c | 0xff000000; - gradientColor = gc == 0 ? 0 : gc | 0xff000000; - gradientRotation = gradientColor != 0 ? r : 45; + gradientColor1 = gc1 == 0 ? 0 : gc1 | 0xff000000; + gradientColor2 = gc2 == 0 ? 0 : gc2 | 0xff000000; + gradientColor3 = gc3 == 0 ? 0 : gc3 | 0xff000000; + intensity = 1.0f; + isGradient = true; + } + + public ColorWallpaper(String s, int c) { + slug = s; + color = c | 0xff000000; + + float[] hsv = new float[3]; + Color.colorToHSV(color, hsv); + if (hsv[0] > 180) { + hsv[0] -= 60; + } else { + hsv[0] += 60; + } + gradientColor1 = Color.HSVToColor(255, hsv); + gradientColor2 = ColorPicker.generateGradientColors(color); + gradientColor3 = ColorPicker.generateGradientColors(gradientColor1); + intensity = 1.0f; + isGradient = true; + } + + public ColorWallpaper(String s, int c, int gc1, int gc2, int gc3, int r, float in, boolean m, File ph) { + slug = s; + color = c | 0xff000000; + gradientColor1 = gc1 == 0 ? 0 : gc1 | 0xff000000; + gradientColor2 = gc2 == 0 ? 0 : gc2 | 0xff000000; + gradientColor3 = gc3 == 0 ? 0 : gc3 | 0xff000000; + gradientRotation = gradientColor1 != 0 ? r : 45; intensity = in; path = ph; motion = m; } public String getUrl() { - String color2 = gradientColor != 0 ? String.format("%02x%02x%02x", (byte) (gradientColor >> 16) & 0xff, (byte) (gradientColor >> 8) & 0xff, (byte) (gradientColor & 0xff)).toLowerCase() : null; + String color2 = gradientColor1 != 0 ? String.format("%02x%02x%02x", (byte) (gradientColor1 >> 16) & 0xff, (byte) (gradientColor1 >> 8) & 0xff, (byte) (gradientColor1 & 0xff)).toLowerCase() : null; String color1 = String.format("%02x%02x%02x", (byte) (color >> 16) & 0xff, (byte) (color >> 8) & 0xff, (byte) (color & 0xff)).toLowerCase(); - if (color2 != null) { + String color3 = gradientColor2 != 0 ? String.format("%02x%02x%02x", (byte) (gradientColor2 >> 16) & 0xff, (byte) (gradientColor2 >> 8) & 0xff, (byte) (gradientColor2 & 0xff)).toLowerCase() : null; + String color4 = gradientColor3 != 0 ? String.format("%02x%02x%02x", (byte) (gradientColor3 >> 16) & 0xff, (byte) (gradientColor3 >> 8) & 0xff, (byte) (gradientColor3 & 0xff)).toLowerCase() : null; + if (color2 != null && color3 != null) { + if (color4 != null) { + color1 += "~" + color2 + "~" + color3 + "~" + color4; + } else { + color1 += "~" + color2 + "~" + color3; + } + } else if (color2 != null) { color1 += "-" + color2; if (pattern != null) { color1 += "&rotation=" + AndroidUtilities.getWallpaperRotation(gradientRotation, true); @@ -310,14 +414,20 @@ public class WallpapersListActivity extends BaseFragment implements Notification NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.wallpapersDidLoad); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.didSetNewWallpapper); NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.wallpapersNeedReload); - MessagesStorage.getInstance(currentAccount).getWallpapers(); + getMessagesStorage().getWallpapers(); } else { + boolean darkTheme = Theme.isCurrentThemeNight(); + int[][] defaultColors = darkTheme ? defaultColorsDark : defaultColorsLight; for (int a = 0; a < defaultColors.length; a++) { - wallPapers.add(new ColorWallpaper(Theme.COLOR_BACKGROUND_SLUG, defaultColors[a], 0, 45)); + if (defaultColors[a].length == 1) { + wallPapers.add(new ColorWallpaper(Theme.COLOR_BACKGROUND_SLUG, defaultColors[a][0], 0, 45)); + } else { + wallPapers.add(new ColorWallpaper(Theme.COLOR_BACKGROUND_SLUG, defaultColors[a][0], defaultColors[a][1], defaultColors[a][2], defaultColors[a][3])); + } } if (currentType == TYPE_COLOR && patterns.isEmpty()) { NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.wallpapersDidLoad); - MessagesStorage.getInstance(currentAccount).getWallpapers(); + getMessagesStorage().getWallpapers(); } } return super.onFragmentCreate(); @@ -389,20 +499,40 @@ public class WallpapersListActivity extends BaseFragment implements Notification progressDialog.show(); ArrayList ids = new ArrayList<>(); - int[] deleteCount = new int[]{selectedWallPapers.size()}; + int[] deleteCount = new int[]{0}; for (int b = 0; b < selectedWallPapers.size(); b++) { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) selectedWallPapers.valueAt(b); - + Object object = selectedWallPapers.valueAt(b); + if (object instanceof ColorWallpaper) { + ColorWallpaper colorWallpaper = (ColorWallpaper) object; + if (colorWallpaper.parentWallpaper != null && colorWallpaper.parentWallpaper.id < 0) { + getMessagesStorage().deleteWallpaper(colorWallpaper.parentWallpaper.id); + localWallPapers.remove(colorWallpaper); + localDict.remove(colorWallpaper.getHash()); + } else { + object = colorWallpaper.parentWallpaper; + } + } + if (!(object instanceof TLRPC.WallPaper)) { + continue; + } + deleteCount[0]++; + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) object; TLRPC.TL_account_saveWallPaper req = new TLRPC.TL_account_saveWallPaper(); req.settings = new TLRPC.TL_wallPaperSettings(); req.unsave = true; - TLRPC.TL_inputWallPaper inputWallPaper = new TLRPC.TL_inputWallPaper(); - inputWallPaper.id = wallPaper.id; - inputWallPaper.access_hash = wallPaper.access_hash; - req.wallpaper = inputWallPaper; + if (object instanceof TLRPC.TL_wallPaperNoFile) { + TLRPC.TL_inputWallPaperNoFile inputWallPaper = new TLRPC.TL_inputWallPaperNoFile(); + inputWallPaper.id = wallPaper.id; + req.wallpaper = inputWallPaper; + } else { + TLRPC.TL_inputWallPaper inputWallPaper = new TLRPC.TL_inputWallPaper(); + inputWallPaper.id = wallPaper.id; + inputWallPaper.access_hash = wallPaper.access_hash; + req.wallpaper = inputWallPaper; + } - if (wallPaper.slug.equals(selectedBackgroundSlug)) { + if (wallPaper.slug != null && wallPaper.slug.equals(selectedBackgroundSlug)) { selectedBackgroundSlug = Theme.hasWallpaperFromTheme() ? Theme.THEME_BACKGROUND_SLUG : Theme.DEFAULT_BACKGROUND_SLUG; Theme.getActiveTheme().setOverrideWallpaper(null); Theme.reloadWallpaper(); @@ -411,10 +541,13 @@ public class WallpapersListActivity extends BaseFragment implements Notification ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { deleteCount[0]--; if (deleteCount[0] == 0) { - loadWallpapers(); + loadWallpapers(true); } })); } + if (deleteCount[0] == 0) { + loadWallpapers(true); + } selectedWallPapers.clear(); actionBar.hideActionMode(); actionBar.closeSearchField(); @@ -434,8 +567,15 @@ public class WallpapersListActivity extends BaseFragment implements Notification fragment.setDelegate((fragment1, dids, message, param) -> { StringBuilder fmessage = new StringBuilder(); for (int b = 0; b < selectedWallPapers.size(); b++) { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) selectedWallPapers.valueAt(b); - String link = AndroidUtilities.getWallPaperUrl(wallPaper); + Object object = selectedWallPapers.valueAt(b); + String link; + if (object instanceof TLRPC.TL_wallPaper) { + link = AndroidUtilities.getWallPaperUrl(object); + } else if (object instanceof ColorWallpaper) { + link = ((ColorWallpaper) object).getUrl(); + } else { + continue; + } if (!TextUtils.isEmpty(link)) { if (fmessage.length() > 0) { fmessage.append('\n'); @@ -452,9 +592,11 @@ public class WallpapersListActivity extends BaseFragment implements Notification for (int a = 0; a < dids.size(); a++) { long did = dids.get(a); if (message != null) { - SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(message.toString(), did, null, null, null, true, null, null, null, true, 0, null); + } + if (!TextUtils.isEmpty(fmessage)) { + SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessage.toString(), did, null, null, null, true, null, null, null, true, 0, null); } - SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessage.toString(), did, null, null, null, true, null, null, null, true, 0); } fragment1.finishFragment(); } else { @@ -481,7 +623,7 @@ public class WallpapersListActivity extends BaseFragment implements Notification ChatActivity chatActivity = new ChatActivity(args1); presentFragment(chatActivity, true); - SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessage.toString(), did, null, null, null, true, null, null, null, true, 0); + SendMessagesHelper.getInstance(currentAccount).sendMessage(fmessage.toString(), did, null, null, null, true, null, null, null, true, 0, null); } }); presentFragment(fragment); @@ -617,7 +759,7 @@ public class WallpapersListActivity extends BaseFragment implements Notification progressDialog.setCanCacnel(false); progressDialog.show(); TLRPC.TL_account_resetWallPapers req = new TLRPC.TL_account_resetWallPapers(); - ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(this::loadWallpapers)); + ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> loadWallpapers(false))); }); builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null); AlertDialog dialog = builder.create(); @@ -675,7 +817,9 @@ public class WallpapersListActivity extends BaseFragment implements Notification if (overrideWallpaper != null) { selectedBackgroundSlug = overrideWallpaper.slug; selectedColor = overrideWallpaper.color; - selectedGradientColor = overrideWallpaper.gradientColor; + selectedGradientColor1 = overrideWallpaper.gradientColor1; + selectedGradientColor2 = overrideWallpaper.gradientColor2; + selectedGradientColor3 = overrideWallpaper.gradientColor3; selectedGradientRotation = overrideWallpaper.rotation; selectedIntensity = overrideWallpaper.intensity; selectedBackgroundMotion = overrideWallpaper.isMotion; @@ -683,7 +827,9 @@ public class WallpapersListActivity extends BaseFragment implements Notification } else { selectedBackgroundSlug = Theme.hasWallpaperFromTheme() ? Theme.THEME_BACKGROUND_SLUG : Theme.DEFAULT_BACKGROUND_SLUG; selectedColor = 0; - selectedGradientColor = 0; + selectedGradientColor1 = 0; + selectedGradientColor2 = 0; + selectedGradientColor3 = 0; selectedGradientRotation = 45; selectedIntensity = 1.0f; selectedBackgroundMotion = false; @@ -718,12 +864,17 @@ public class WallpapersListActivity extends BaseFragment implements Notification } private boolean onItemLongClick(WallpaperCell view, Object object, int index) { - if (actionBar.isActionModeShowed() || getParentActivity() == null || !(object instanceof TLRPC.TL_wallPaper)) { + Object originalObject = object; + if (object instanceof ColorWallpaper) { + ColorWallpaper colorWallpaper = (ColorWallpaper) object; + object = colorWallpaper.parentWallpaper; + } + if (actionBar.isActionModeShowed() || getParentActivity() == null || !(object instanceof TLRPC.WallPaper)) { return false; } - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) object; AndroidUtilities.hideKeyboard(getParentActivity().getCurrentFocus()); - selectedWallPapers.put(wallPaper.id, wallPaper); + selectedWallPapers.put(wallPaper.id, originalObject); selectedMessagesCountTextView.setNumber(1, false); AnimatorSet animatorSet = new AnimatorSet(); ArrayList animators = new ArrayList<>(); @@ -743,14 +894,19 @@ public class WallpapersListActivity extends BaseFragment implements Notification private void onItemClick(WallpaperCell view, Object object, int index) { if (actionBar.isActionModeShowed()) { - if (!(object instanceof TLRPC.TL_wallPaper)) { + Object originalObject = object; + if (object instanceof ColorWallpaper) { + ColorWallpaper colorWallpaper = (ColorWallpaper) object; + object = colorWallpaper.parentWallpaper; + } + if (!(object instanceof TLRPC.WallPaper)) { return; } - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) object; if (selectedWallPapers.indexOfKey(wallPaper.id) >= 0) { selectedWallPapers.remove(wallPaper.id); } else { - selectedWallPapers.put(wallPaper.id, wallPaper); + selectedWallPapers.put(wallPaper.id, originalObject); } if (selectedWallPapers.size() == 0) { actionBar.hideActionMode(); @@ -764,12 +920,13 @@ public class WallpapersListActivity extends BaseFragment implements Notification if (object instanceof TLRPC.TL_wallPaper) { TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; if (wallPaper.pattern) { - ColorWallpaper colorWallpaper = new ColorWallpaper(wallPaper.slug, wallPaper.settings.background_color, wallPaper.settings.second_background_color, AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false), wallPaper.settings.intensity / 100.0f, wallPaper.settings.motion, null); + ColorWallpaper colorWallpaper = new ColorWallpaper(wallPaper.slug, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color, AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false), wallPaper.settings.intensity / 100.0f, wallPaper.settings.motion, null); colorWallpaper.pattern = wallPaper; + colorWallpaper.parentWallpaper = wallPaper; object = colorWallpaper; } } - ThemePreviewActivity wallpaperActivity = new ThemePreviewActivity(object, null); + ThemePreviewActivity wallpaperActivity = new ThemePreviewActivity(object, null, true, false); if (currentType == TYPE_COLOR) { wallpaperActivity.setDelegate(WallpapersListActivity.this::removeSelfFromStack); } @@ -810,27 +967,66 @@ public class WallpapersListActivity extends BaseFragment implements Notification @Override public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.wallpapersDidLoad) { - ArrayList arrayList = (ArrayList) args[0]; + ArrayList arrayList = (ArrayList) args[0]; patterns.clear(); + patternsDict.clear(); if (currentType != TYPE_COLOR) { wallPapers.clear(); + localWallPapers.clear(); + localDict.clear(); allWallPapers.clear(); allWallPapersDict.clear(); allWallPapers.addAll(arrayList); } + ArrayList wallPapersToDelete = null; for (int a = 0, N = arrayList.size(); a < N; a++) { - TLRPC.TL_wallPaper wallPaper = arrayList.get(a); - if (wallPaper.pattern) { - patterns.add(wallPaper); + TLRPC.WallPaper wallPaper = arrayList.get(a); + if ("fqv01SQemVIBAAAApND8LDRUhRU".equals(wallPaper.slug)) { + continue; } - if (currentType != TYPE_COLOR && (!wallPaper.pattern || wallPaper.settings != null && wallPaper.settings.background_color != 0)) { + if (wallPaper instanceof TLRPC.TL_wallPaper && !(wallPaper.document instanceof TLRPC.TL_documentEmpty)) { + if (wallPaper.pattern && wallPaper.document != null && !patternsDict.containsKey(wallPaper.document.id)) { + patterns.add(wallPaper); + patternsDict.put(wallPaper.document.id, wallPaper); + } allWallPapersDict.put(wallPaper.slug, wallPaper); - wallPapers.add(wallPaper); + if (currentType != TYPE_COLOR && (!wallPaper.pattern || wallPaper.settings != null && wallPaper.settings.background_color != 0)) { + wallPapers.add(wallPaper); + } + } else if (wallPaper.settings.background_color != 0) { + ColorWallpaper colorWallpaper; + if (wallPaper.settings.second_background_color != 0 && wallPaper.settings.third_background_color != 0) { + colorWallpaper = new ColorWallpaper(null, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color); + } else { + colorWallpaper = new ColorWallpaper(null, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.rotation); + } + colorWallpaper.slug = wallPaper.slug; + colorWallpaper.intensity = wallPaper.settings.intensity / 100.0f; + colorWallpaper.gradientRotation = AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false); + colorWallpaper.parentWallpaper = wallPaper; + if (wallPaper.id < 0) { + String hash = colorWallpaper.getHash(); + if (localDict.containsKey(hash)) { + if (wallPapersToDelete == null) { + wallPapersToDelete = new ArrayList<>(); + } + wallPapersToDelete.add(wallPaper); + continue; + } + localWallPapers.add(colorWallpaper); + localDict.put(hash, colorWallpaper); + } + wallPapers.add(colorWallpaper); + } + } + if (wallPapersToDelete != null) { + for (int a = 0, N = wallPapersToDelete.size(); a < N; a++) { + getMessagesStorage().deleteWallpaper(wallPapersToDelete.get(a).id); } } selectedBackgroundSlug = Theme.getSelectedBackgroundSlug(); fillWallpapersWithCustom(); - loadWallpapers(); + loadWallpapers(false); } else if (id == NotificationCenter.didSetNewWallpapper) { if (listView != null) { listView.invalidateViews(); @@ -839,22 +1035,27 @@ public class WallpapersListActivity extends BaseFragment implements Notification actionBar.closeSearchField(); } } else if (id == NotificationCenter.wallpapersNeedReload) { - MessagesStorage.getInstance(currentAccount).getWallpapers(); + getMessagesStorage().getWallpapers(); } } - private void loadWallpapers() { + private void loadWallpapers(boolean force) { long acc = 0; - for (int a = 0, N = allWallPapers.size(); a < N; a++) { - Object object = allWallPapers.get(a); - if (!(object instanceof TLRPC.TL_wallPaper)) { - continue; + if (!force) { + for (int a = 0, N = allWallPapers.size(); a < N; a++) { + Object object = allWallPapers.get(a); + if (!(object instanceof TLRPC.WallPaper)) { + continue; + } + TLRPC.WallPaper wallPaper = (TLRPC.WallPaper) object; + if (wallPaper.id < 0) { + continue; + } + int high_id = (int) (wallPaper.id >> 32); + int lower_id = (int) wallPaper.id; + acc = ((acc * 20261) + 0x80000000L + high_id) % 0x80000000L; + acc = ((acc * 20261) + 0x80000000L + lower_id) % 0x80000000L; } - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; - int high_id = (int) (wallPaper.id >> 32); - int lower_id = (int) wallPaper.id; - acc = ((acc * 20261) + 0x80000000L + high_id) % 0x80000000L; - acc = ((acc * 20261) + 0x80000000L + lower_id) % 0x80000000L; } TLRPC.TL_account_getWallPapers req = new TLRPC.TL_account_getWallPapers(); req.hash = (int) acc; @@ -862,28 +1063,50 @@ public class WallpapersListActivity extends BaseFragment implements Notification if (response instanceof TLRPC.TL_account_wallPapers) { TLRPC.TL_account_wallPapers res = (TLRPC.TL_account_wallPapers) response; patterns.clear(); + patternsDict.clear(); if (currentType != TYPE_COLOR) { wallPapers.clear(); allWallPapersDict.clear(); allWallPapers.clear(); allWallPapers.addAll(res.wallpapers); + wallPapers.addAll(localWallPapers); } for (int a = 0, N = res.wallpapers.size(); a < N; a++) { - TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) res.wallpapers.get(a); - allWallPapersDict.put(wallPaper.slug, wallPaper); - if (wallPaper.pattern) { - patterns.add(wallPaper); + TLRPC.WallPaper wallPaper = res.wallpapers.get(a); + if ("fqv01SQemVIBAAAApND8LDRUhRU".equals(wallPaper.slug)) { + continue; } - if (currentType != TYPE_COLOR && (!wallPaper.pattern || wallPaper.settings != null && wallPaper.settings.background_color != 0)) { - wallPapers.add(wallPaper); + if (wallPaper instanceof TLRPC.TL_wallPaper && !(wallPaper.document instanceof TLRPC.TL_documentEmpty)) { + allWallPapersDict.put(wallPaper.slug, wallPaper); + if (wallPaper.pattern && wallPaper.document != null && !patternsDict.containsKey(wallPaper.document.id)) { + patterns.add(wallPaper); + patternsDict.put(wallPaper.document.id, wallPaper); + } + if (currentType != TYPE_COLOR && (!wallPaper.pattern || wallPaper.settings != null && wallPaper.settings.background_color != 0)) { + wallPapers.add(wallPaper); + } + } else if (wallPaper.settings.background_color != 0) { + ColorWallpaper colorWallpaper; + if (wallPaper.settings.second_background_color != 0 && wallPaper.settings.third_background_color != 0) { + colorWallpaper = new ColorWallpaper(null, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.third_background_color, wallPaper.settings.fourth_background_color); + } else { + colorWallpaper = new ColorWallpaper(null, wallPaper.settings.background_color, wallPaper.settings.second_background_color, wallPaper.settings.rotation); + } + colorWallpaper.slug = wallPaper.slug; + colorWallpaper.intensity = wallPaper.settings.intensity / 100.0f; + colorWallpaper.gradientRotation = AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false); + colorWallpaper.parentWallpaper = wallPaper; + wallPapers.add(colorWallpaper); } } fillWallpapersWithCustom(); - MessagesStorage.getInstance(currentAccount).putWallpapers(res.wallpapers, 1); + getMessagesStorage().putWallpapers(res.wallpapers, 1); } if (progressDialog != null) { progressDialog.dismiss(); - listView.smoothScrollToPosition(0); + if (!force) { + listView.smoothScrollToPosition(0); + } } })); ConnectionsManager.getInstance(currentAccount).bindRequestToGuid(reqId, classGuid); @@ -903,69 +1126,135 @@ public class WallpapersListActivity extends BaseFragment implements Notification addedFileWallpaper = null; } if (catsWallpaper == null) { - catsWallpaper = new FileWallpaper(Theme.DEFAULT_BACKGROUND_SLUG, R.drawable.background_hd, R.drawable.catstile); + catsWallpaper = new ColorWallpaper(Theme.DEFAULT_BACKGROUND_SLUG, 0xffdbddbb, 0xff6ba587, 0xffd5d88d, 0xff88b884); + catsWallpaper.intensity = 0.34f; + //catsWallpaper.slug = "fqv01SQemVIBAAAApND8LDRUhRU"; } else { wallPapers.remove(catsWallpaper); } if (themeWallpaper != null) { wallPapers.remove(themeWallpaper); } - Object object = allWallPapersDict.get(selectedBackgroundSlug); + Object object = null; + for (int a = 0, N = wallPapers.size(); a < N; a++) { + Object obj = wallPapers.get(a); + if (obj instanceof ColorWallpaper) { + ColorWallpaper colorWallpaper = (ColorWallpaper) obj; + if (colorWallpaper.slug != null) { + colorWallpaper.pattern = (TLRPC.TL_wallPaper) allWallPapersDict.get(colorWallpaper.slug); + } + if ((Theme.COLOR_BACKGROUND_SLUG.equals(colorWallpaper.slug) || colorWallpaper.slug == null || TextUtils.equals(selectedBackgroundSlug, colorWallpaper.slug)) && + selectedColor == colorWallpaper.color && + selectedGradientColor1 == colorWallpaper.gradientColor1 && + selectedGradientColor2 == colorWallpaper.gradientColor2 && + selectedGradientColor3 == colorWallpaper.gradientColor3 && + (selectedGradientColor1 == 0 || selectedGradientRotation == colorWallpaper.gradientRotation)) { + object = colorWallpaper; + break; + } + } else if (obj instanceof TLRPC.TL_wallPaper) { + TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) obj; + if (wallPaper.settings != null && (TextUtils.equals(selectedBackgroundSlug, wallPaper.slug)) && + selectedColor == Theme.getWallpaperColor(wallPaper.settings.background_color) && + selectedGradientColor1 == Theme.getWallpaperColor(wallPaper.settings.second_background_color) && + selectedGradientColor2 == Theme.getWallpaperColor(wallPaper.settings.third_background_color) && + selectedGradientColor3 == Theme.getWallpaperColor(wallPaper.settings.fourth_background_color) && + (selectedGradientColor1 == 0 || selectedGradientRotation == AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)) && + Math.abs(Theme.getThemeIntensity(wallPaper.settings.intensity / 100.0f) - selectedIntensity) <= 0.001f) { + object = wallPaper; + break; + } + } + } TLRPC.TL_wallPaper pattern = null; String slugFinal; - if (object instanceof TLRPC.TL_wallPaper) { + long idFinal; + if (object instanceof TLRPC.WallPaper) { TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; Theme.OverrideWallpaperInfo info = Theme.getActiveTheme().overrideWallpaper; - if (wallPaper.settings != null && - (selectedColor != wallPaper.settings.background_color || - selectedGradientColor != wallPaper.settings.second_background_color || - (selectedGradientColor != 0 && selectedGradientRotation != AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)) && - (wallPaper.settings.intensity - selectedIntensity) > 0.001f)) { + if (wallPaper.settings == null || wallPaper.settings != null && + (selectedColor != Theme.getWallpaperColor(wallPaper.settings.background_color) || + selectedGradientColor1 != Theme.getWallpaperColor(wallPaper.settings.second_background_color) || + selectedGradientColor2 != Theme.getWallpaperColor(wallPaper.settings.third_background_color) || + selectedGradientColor3 != Theme.getWallpaperColor(wallPaper.settings.fourth_background_color) || + (selectedGradientColor1 != 0 && selectedGradientColor2 == 0 && selectedGradientRotation != AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)) && + Math.abs(Theme.getThemeIntensity(wallPaper.settings.intensity / 100.0f) - selectedIntensity) > 0.001f)) { pattern = wallPaper; object = null; slugFinal = ""; } else { slugFinal = selectedBackgroundSlug; } + idFinal = wallPaper.id; } else { slugFinal = selectedBackgroundSlug; + if (object instanceof ColorWallpaper && ((ColorWallpaper) object).parentWallpaper != null) { + idFinal = ((ColorWallpaper) object).parentWallpaper.id; + } else { + idFinal = 0; + } } boolean currentThemeDark = Theme.getCurrentTheme().isDark(); - Collections.sort(wallPapers, (o1, o2) -> { - if (o1 instanceof TLRPC.TL_wallPaper && o2 instanceof TLRPC.TL_wallPaper) { - TLRPC.TL_wallPaper wallPaper1 = (TLRPC.TL_wallPaper) o1; - TLRPC.TL_wallPaper wallPaper2 = (TLRPC.TL_wallPaper) o2; - if (slugFinal.equals(wallPaper1.slug)) { - return -1; - } else if (slugFinal.equals(wallPaper2.slug)) { - return 1; + try { + Collections.sort(wallPapers, (o1, o2) -> { + if (o1 instanceof ColorWallpaper) { + o1 = ((ColorWallpaper) o1).parentWallpaper; } - int index1 = allWallPapers.indexOf(wallPaper1); - int index2 = allWallPapers.indexOf(wallPaper2); - if (wallPaper1.dark && wallPaper2.dark || !wallPaper1.dark && !wallPaper2.dark) { - if (index1 > index2) { - return 1; - } else if (index1 < index2) { - return -1; + if (o2 instanceof ColorWallpaper) { + o2 = ((ColorWallpaper) o2).parentWallpaper; + } + if (o1 instanceof TLRPC.WallPaper && o2 instanceof TLRPC.WallPaper) { + TLRPC.WallPaper wallPaper1 = (TLRPC.WallPaper) o1; + TLRPC.WallPaper wallPaper2 = (TLRPC.WallPaper) o2; + if (idFinal != 0) { + if (wallPaper1.id == idFinal) { + return -1; + } else if (wallPaper2.id == idFinal) { + return 1; + } } else { - return 0; + if (slugFinal.equals(wallPaper1.slug)) { + return -1; + } else if (slugFinal.equals(wallPaper2.slug)) { + return 1; + } } - } else if (wallPaper1.dark && !wallPaper2.dark) { - if (currentThemeDark) { - return -1; - } else { - return 1; + if (!currentThemeDark) { + if ("qeZWES8rGVIEAAAARfWlK1lnfiI".equals(wallPaper1.slug)) { + return -1; + } else if ("qeZWES8rGVIEAAAARfWlK1lnfiI".equals(wallPaper2.slug)) { + return 1; + } } - } else { - if (currentThemeDark) { - return 1; + int index1 = allWallPapers.indexOf(wallPaper1); + int index2 = allWallPapers.indexOf(wallPaper2); + if (wallPaper1.dark && wallPaper2.dark || !wallPaper1.dark && !wallPaper2.dark) { + if (index1 > index2) { + return 1; + } else if (index1 < index2) { + return -1; + } else { + return 0; + } + } else if (wallPaper1.dark && !wallPaper2.dark) { + if (currentThemeDark) { + return -1; + } else { + return 1; + } } else { - return -1; + if (currentThemeDark) { + return 1; + } else { + return -1; + } } } - } - return 0; - }); + return 0; + }); + } catch (Exception e) { + FileLog.e(e); + } if (Theme.hasWallpaperFromTheme() && !Theme.isThemeWallpaperPublic()) { if (themeWallpaper == null) { themeWallpaper = new FileWallpaper(Theme.THEME_BACKGROUND_SLUG, -2, -2); @@ -978,27 +1267,37 @@ public class WallpapersListActivity extends BaseFragment implements Notification if (TextUtils.isEmpty(selectedBackgroundSlug) || !Theme.DEFAULT_BACKGROUND_SLUG.equals(selectedBackgroundSlug) && object == null) { if (!Theme.COLOR_BACKGROUND_SLUG.equals(selectedBackgroundSlug) && selectedColor != 0) { if (themeInfo.overrideWallpaper != null) { - addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor, selectedGradientRotation, selectedIntensity, selectedBackgroundMotion, new File(ApplicationLoader.getFilesDirFixed(), themeInfo.overrideWallpaper.fileName)); + addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor1, selectedGradientColor2, selectedGradientColor3, selectedGradientRotation, selectedIntensity, selectedBackgroundMotion, new File(ApplicationLoader.getFilesDirFixed(), themeInfo.overrideWallpaper.fileName)); addedColorWallpaper.pattern = pattern; wallPapers.add(0, addedColorWallpaper); } } else if (selectedColor != 0) { - addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor, selectedGradientRotation); + if (selectedGradientColor1 != 0 && selectedGradientColor2 != 0) { + addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor1, selectedGradientColor2, selectedGradientColor3); + addedColorWallpaper.gradientRotation = selectedGradientRotation; + } else { + addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor1, selectedGradientRotation); + } wallPapers.add(0, addedColorWallpaper); } else { - if (themeInfo.overrideWallpaper != null) { + if (themeInfo.overrideWallpaper != null && !allWallPapersDict.containsKey(selectedBackgroundSlug)) { addedFileWallpaper = new FileWallpaper(selectedBackgroundSlug, new File(ApplicationLoader.getFilesDirFixed(), themeInfo.overrideWallpaper.fileName), new File(ApplicationLoader.getFilesDirFixed(), themeInfo.overrideWallpaper.originalFileName)); - wallPapers.add(0, addedFileWallpaper); + wallPapers.add(themeWallpaper != null ? 1 : 0, addedFileWallpaper); } } - } else if (selectedColor != 0 && Theme.COLOR_BACKGROUND_SLUG.equals(selectedBackgroundSlug)) { - addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor, selectedGradientRotation); + } else if (object == null && selectedColor != 0 && Theme.COLOR_BACKGROUND_SLUG.equals(selectedBackgroundSlug)) { + if (selectedGradientColor1 != 0 && selectedGradientColor2 != 0 && selectedGradientColor3 != 0) { + addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor1, selectedGradientColor2, selectedGradientColor3); + addedColorWallpaper.gradientRotation = selectedGradientRotation; + } else { + addedColorWallpaper = new ColorWallpaper(selectedBackgroundSlug, selectedColor, selectedGradientColor1, selectedGradientRotation); + } wallPapers.add(0, addedColorWallpaper); } - if (Theme.DEFAULT_BACKGROUND_SLUG.equals(selectedBackgroundSlug)) { + if (Theme.DEFAULT_BACKGROUND_SLUG.equals(selectedBackgroundSlug) || wallPapers.isEmpty()) { wallPapers.add(0, catsWallpaper); } else { - wallPapers.add(catsWallpaper); + wallPapers.add(1, catsWallpaper); } updateRows(); } @@ -1215,7 +1514,7 @@ public class WallpapersListActivity extends BaseFragment implements Notification TLRPC.TL_contacts_resolvedPeer res = (TLRPC.TL_contacts_resolvedPeer) response; MessagesController.getInstance(currentAccount).putUsers(res.users, false); MessagesController.getInstance(currentAccount).putChats(res.chats, false); - MessagesStorage.getInstance(currentAccount).putUsersAndChats(res.users, res.chats, true, true); + getMessagesStorage().putUsersAndChats(res.users, res.chats, true, true); String str = lastSearchImageString; lastSearchImageString = null; searchImages(str, "", false); @@ -1350,7 +1649,7 @@ public class WallpapersListActivity extends BaseFragment implements Notification view = new WallpaperCell(mContext) { @Override protected void onWallpaperClick(Object wallPaper, int index) { - presentFragment(new ThemePreviewActivity(wallPaper, null)); + presentFragment(new ThemePreviewActivity(wallPaper, null, true, false)); } }; break; @@ -1507,7 +1806,7 @@ public class WallpapersListActivity extends BaseFragment implements Notification if (position == uploadImageRow) { textCell.setTextAndIcon(LocaleController.getString("SelectFromGallery", R.string.SelectFromGallery), R.drawable.profile_photos, true); } else if (position == setColorRow) { - textCell.setTextAndIcon(LocaleController.getString("SetColor", R.string.SetColor), R.drawable.menu_palette, false); + textCell.setTextAndIcon(LocaleController.getString("SetColor", R.string.SetColor), R.drawable.menu_palette, true); } else if (position == resetRow) { textCell.setText(LocaleController.getString("ResetChatBackgrounds", R.string.ResetChatBackgrounds), false); } @@ -1527,35 +1826,57 @@ public class WallpapersListActivity extends BaseFragment implements Notification for (int a = 0; a < columnsCount; a++) { int p = position + a; Object object = p < wallPapers.size() ? wallPapers.get(p) : null; - String slugFinal; + Object selectedWallpaper; long id; if (object instanceof TLRPC.TL_wallPaper) { TLRPC.TL_wallPaper wallPaper = (TLRPC.TL_wallPaper) object; Theme.OverrideWallpaperInfo info = Theme.getActiveTheme().overrideWallpaper; - if (selectedBackgroundSlug.equals(wallPaper.slug) && wallPaper.settings != null && - (selectedColor != wallPaper.settings.background_color || - selectedGradientColor != wallPaper.settings.second_background_color || - (selectedGradientColor != 0 && selectedGradientRotation != AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)) && - (wallPaper.settings.intensity - selectedIntensity) > 0.001f)) { - slugFinal = ""; + if (!selectedBackgroundSlug.equals(wallPaper.slug) || + selectedBackgroundSlug.equals(wallPaper.slug) && wallPaper.settings != null && + (selectedColor != Theme.getWallpaperColor(wallPaper.settings.background_color) || + selectedGradientColor1 != Theme.getWallpaperColor(wallPaper.settings.second_background_color) || + selectedGradientColor2 != Theme.getWallpaperColor(wallPaper.settings.third_background_color) || + selectedGradientColor3 != Theme.getWallpaperColor(wallPaper.settings.fourth_background_color) || + (selectedGradientColor1 != 0 && selectedGradientColor2 == 0 && selectedGradientRotation != AndroidUtilities.getWallpaperRotation(wallPaper.settings.rotation, false)) && + wallPaper.pattern && Math.abs(Theme.getThemeIntensity(wallPaper.settings.intensity / 100.0f) - selectedIntensity) > 0.001f)) { + selectedWallpaper = null; } else { - slugFinal = selectedBackgroundSlug; + selectedWallpaper = wallPaper; } id = wallPaper.id; } else { if (object instanceof ColorWallpaper) { ColorWallpaper colorWallpaper = (ColorWallpaper) object; - if (colorWallpaper.color != selectedColor || colorWallpaper.gradientColor != selectedGradientColor) { - slugFinal = ""; + if (Theme.DEFAULT_BACKGROUND_SLUG.equals(colorWallpaper.slug) && selectedBackgroundSlug.equals(colorWallpaper.slug)) { + selectedWallpaper = object; + } else if (colorWallpaper.color != selectedColor || colorWallpaper.gradientColor1 != selectedGradientColor1 || colorWallpaper.gradientColor2 != selectedGradientColor2 || colorWallpaper.gradientColor3 != selectedGradientColor3 || selectedGradientColor1 != 0 && colorWallpaper.gradientRotation != selectedGradientRotation) { + selectedWallpaper = null; } else { - slugFinal = selectedBackgroundSlug; + if (Theme.COLOR_BACKGROUND_SLUG.equals(selectedBackgroundSlug) && colorWallpaper.slug != null || !Theme.COLOR_BACKGROUND_SLUG.equals(selectedBackgroundSlug) && (!TextUtils.equals(selectedBackgroundSlug, colorWallpaper.slug) || (int) (colorWallpaper.intensity * 100) != (int) (selectedIntensity * 100))) { + selectedWallpaper = null; + } else { + selectedWallpaper = object; + } } + if (colorWallpaper.parentWallpaper != null) { + id = colorWallpaper.parentWallpaper.id; + } else { + id = 0; + } + } else if (object instanceof FileWallpaper) { + FileWallpaper fileWallpaper = (FileWallpaper) object; + if (selectedBackgroundSlug.equals(fileWallpaper.slug)) { + selectedWallpaper = object; + } else { + selectedWallpaper = null; + } + id = 0; } else { - slugFinal = selectedBackgroundSlug; + selectedWallpaper = null; + id = 0; } - id = 0; } - wallpaperCell.setWallpaper(currentType, a, object, slugFinal, null, false); + wallpaperCell.setWallpaper(currentType, a, object, selectedWallpaper, null, false); if (actionBar.isActionModeShowed()) { wallpaperCell.setChecked(a, selectedWallPapers.indexOfKey(id) >= 0, !scrolling); } else { diff --git a/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java b/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java index 3cd7621b6..bdc8ee86c 100644 --- a/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java +++ b/TMessagesProj/src/main/java/org/webrtc/AndroidVideoDecoder.java @@ -540,7 +540,6 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink { final int vEnd = vPos + uvStride * chromaHeight; VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height); - try { //don't crash buffer.limit(yEnd); buffer.position(yPos); @@ -596,13 +595,21 @@ class AndroidVideoDecoder implements VideoDecoder, VideoSink { } // Compare to existing width, height, and save values under the dimension lock. synchronized (dimensionLock) { - if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) { - stopOnOutputThread(new RuntimeException("Unexpected size change. Configured " + width + "*" - + height + ". New " + newWidth + "*" + newHeight)); - return; + if (newWidth != width || newHeight != height) { + if (hasDecodedFirstFrame) { + stopOnOutputThread(new RuntimeException("Unexpected size change. " + + "Configured " + width + "*" + height + ". " + + "New " + newWidth + "*" + newHeight)); + return; + } else if (newWidth <= 0 || newHeight <= 0) { + Logging.w(TAG, + "Unexpected format dimensions. Configured " + width + "*" + height + ". " + + "New " + newWidth + "*" + newHeight + ". Skip it"); + return; + } + width = newWidth; + height = newHeight; } - width = newWidth; - height = newHeight; } // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip diff --git a/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java b/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java index ab8fc0a5d..9c35917c6 100644 --- a/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java +++ b/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java @@ -322,6 +322,7 @@ class Camera1Session implements CameraSession { private int getFrameOrientation() { int rotation = orientationHelper.getOrientation(); + OrientationHelper.cameraOrientation = rotation; if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { rotation = 360 - rotation; } diff --git a/TMessagesProj/src/main/java/org/webrtc/Camera2Enumerator.java b/TMessagesProj/src/main/java/org/webrtc/Camera2Enumerator.java index 542a23edc..b646226d9 100644 --- a/TMessagesProj/src/main/java/org/webrtc/Camera2Enumerator.java +++ b/TMessagesProj/src/main/java/org/webrtc/Camera2Enumerator.java @@ -123,7 +123,7 @@ public class Camera2Enumerator implements CameraEnumerator { // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a // catch statement with an Exception from a newer API, even if the code is never executed. // https://code.google.com/p/android/issues/detail?id=209129 - } catch (/* CameraAccessException */ AndroidException e) { + } catch (/* CameraAccessException */ Throwable e) { Logging.e(TAG, "Camera access exception: " + e); return false; } diff --git a/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java b/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java index c62a5a47b..0cc319d9f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java +++ b/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java @@ -415,6 +415,7 @@ class Camera2Session implements CameraSession { private int getFrameOrientation() { int rotation = orientationHelper.getOrientation(); + OrientationHelper.cameraOrientation = rotation; if (isCameraFrontFacing) { rotation = 360 - rotation; } diff --git a/TMessagesProj/src/main/java/org/webrtc/EglBase.java b/TMessagesProj/src/main/java/org/webrtc/EglBase.java index 984ec4ad3..cb1cf2d3f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EglBase.java +++ b/TMessagesProj/src/main/java/org/webrtc/EglBase.java @@ -21,6 +21,7 @@ import javax.microedition.khronos.egl.EGL10; * and an EGLSurface. */ public interface EglBase { + // EGL wrapper for an actual EGLContext. public interface Context { public final static long NO_CONTEXT = 0; @@ -242,7 +243,7 @@ public interface EglBase { int surfaceHeight(); - void releaseSurface(); + void releaseSurface(boolean background); void release(); @@ -251,7 +252,14 @@ public interface EglBase { // Detach the current EGL context, so that it can be made current on another thread. void detachCurrent(); - void swapBuffers(); + void swapBuffers(boolean background); - void swapBuffers(long presentationTimeStampNs); + void swapBuffers(long presentationTimeStampNs,boolean background); + + // Create EGLSurface from the Android Surface. + void createBackgroundSurface(SurfaceTexture surface); + + void makeBackgroundCurrent(); + + boolean hasBackgroundSurface(); } diff --git a/TMessagesProj/src/main/java/org/webrtc/EglBase10Impl.java b/TMessagesProj/src/main/java/org/webrtc/EglBase10Impl.java index 71bf6106d..e1980554f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EglBase10Impl.java +++ b/TMessagesProj/src/main/java/org/webrtc/EglBase10Impl.java @@ -13,9 +13,12 @@ package org.webrtc; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.SurfaceTexture; + import androidx.annotation.Nullable; + import android.view.Surface; import android.view.SurfaceHolder; + import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLContext; @@ -27,306 +30,392 @@ import javax.microedition.khronos.egl.EGLSurface; * and an EGLSurface. */ class EglBase10Impl implements EglBase10 { - private static final String TAG = "EglBase10Impl"; - // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION. - private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + private static final String TAG = "EglBase10Impl"; + // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION. + private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; - private final EGL10 egl; - private EGLContext eglContext; - @Nullable private EGLConfig eglConfig; - private EGLDisplay eglDisplay; - private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE; + private final EGL10 egl; + private EGLContext eglContext; + @Nullable + private EGLConfig eglConfig; + private EGLDisplay eglDisplay; + private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE; + private EGLSurface eglBackgroundSurface = EGL10.EGL_NO_SURFACE; - // EGL wrapper for an actual EGLContext. - private static class Context implements EglBase10.Context { - private final EGLContext eglContext; + // EGL wrapper for an actual EGLContext. + private static class Context implements EglBase10.Context { + private final EGL10 egl; + private final EGLContext eglContext; + private final EGLConfig eglContextConfig; - @Override - public EGLContext getRawContext() { - return eglContext; + @Override + public EGLContext getRawContext() { + return eglContext; + } + + @Override + public long getNativeEglContext() { + EGLContext previousContext = egl.eglGetCurrentContext(); + EGLDisplay currentDisplay = egl.eglGetCurrentDisplay(); + EGLSurface previousDrawSurface = egl.eglGetCurrentSurface(EGL10.EGL_DRAW); + EGLSurface previousReadSurface = egl.eglGetCurrentSurface(EGL10.EGL_READ); + EGLSurface tempEglSurface = null; + + if (currentDisplay == EGL10.EGL_NO_DISPLAY) { + currentDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + } + + try { + if (previousContext != eglContext) { + int[] surfaceAttribs = {EGL10.EGL_WIDTH, 1, EGL10.EGL_HEIGHT, 1, EGL10.EGL_NONE}; + tempEglSurface = + egl.eglCreatePbufferSurface(currentDisplay, eglContextConfig, surfaceAttribs); + if (!egl.eglMakeCurrent(currentDisplay, tempEglSurface, tempEglSurface, eglContext)) { + throw new RuntimeException( + "Failed to make temporary EGL surface active: " + egl.eglGetError()); + } + } + + return nativeGetCurrentNativeEGLContext(); + } finally { + if (tempEglSurface != null) { + egl.eglMakeCurrent( + currentDisplay, previousDrawSurface, previousReadSurface, previousContext); + egl.eglDestroySurface(currentDisplay, tempEglSurface); + } + } + } + + public Context(EGL10 egl, EGLContext eglContext, EGLConfig eglContextConfig) { + this.egl = egl; + this.eglContext = eglContext; + this.eglContextConfig = eglContextConfig; + } + } + + // Create a new context with the specified config type, sharing data with sharedContext. + public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) { + this.egl = (EGL10) EGLContext.getEGL(); + eglDisplay = getEglDisplay(); + eglConfig = getEglConfig(egl, eglDisplay, configAttributes); + final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes); + Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion); + eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion); } @Override - public long getNativeEglContext() { - // TODO(magjed): Implement. There is no easy way of getting the native context for EGL 1.0. We - // need to make sure to have an EglSurface, then make the context current using that surface, - // and then call into JNI and call the native version of eglGetCurrentContext. Then we need to - // restore the state and return the native context. - return 0 /* EGL_NO_CONTEXT */; + public void createSurface(Surface surface) { + createSurfaceInternal(new FakeSurfaceHolder(surface), false); } - public Context(EGLContext eglContext) { - this.eglContext = eglContext; + // Create EGLSurface from the Android SurfaceTexture. + @Override + public void createSurface(SurfaceTexture surfaceTexture) { + createSurfaceInternal(surfaceTexture, false); + } + + // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture. + private void createSurfaceInternal(Object nativeWindow, boolean background) { + if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) { + throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture"); + } + checkIsNotReleased(); + if (background) { + if (eglBackgroundSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL10.EGL_NONE}; + eglBackgroundSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs); + if (eglBackgroundSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException( + "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError())); + } + } else { + if (eglSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL10.EGL_NONE}; + eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException( + "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + } + + // Create dummy 1x1 pixel buffer surface so the context can be made current. + @Override + public void createDummyPbufferSurface() { + createPbufferSurface(1, 1); + } + + @Override + public void createPbufferSurface(int width, int height) { + checkIsNotReleased(); + if (eglSurface != EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE}; + eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x" + + height + ": 0x" + Integer.toHexString(egl.eglGetError())); + } + } + + @Override + public org.webrtc.EglBase.Context getEglBaseContext() { + return new Context(egl, eglContext, eglConfig); + } + + @Override + public boolean hasSurface() { + return eglSurface != EGL10.EGL_NO_SURFACE; + } + + @Override + public int surfaceWidth() { + final int widthArray[] = new int[1]; + egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray); + return widthArray[0]; + } + + @Override + public int surfaceHeight() { + final int heightArray[] = new int[1]; + egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray); + return heightArray[0]; + } + + @Override + public void releaseSurface(boolean background) { + if (background) { + if (eglBackgroundSurface != EGL10.EGL_NO_SURFACE) { + egl.eglDestroySurface(eglDisplay, eglBackgroundSurface); + eglBackgroundSurface = EGL10.EGL_NO_SURFACE; + } + } else { + if (eglSurface != EGL10.EGL_NO_SURFACE) { + egl.eglDestroySurface(eglDisplay, eglSurface); + eglSurface = EGL10.EGL_NO_SURFACE; + } + } + } + + private void checkIsNotReleased() { + if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT + || eglConfig == null) { + throw new RuntimeException("This object has been released"); + } + } + + @Override + public void release() { + checkIsNotReleased(); + releaseSurface(false); + releaseSurface(true); + detachCurrent(); + egl.eglDestroyContext(eglDisplay, eglContext); + egl.eglTerminate(eglDisplay); + eglContext = EGL10.EGL_NO_CONTEXT; + eglDisplay = EGL10.EGL_NO_DISPLAY; + eglConfig = null; + } + + @Override + public void makeCurrent() { + checkIsNotReleased(); + if (eglSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { + throw new RuntimeException( + "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + } + + // Detach the current EGL context, so that it can be made current on another thread. + @Override + public void detachCurrent() { + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent( + eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) { + throw new RuntimeException( + "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + } + + @Override + public void swapBuffers(boolean background) { + EGLSurface surface = background ? eglBackgroundSurface : eglSurface; + checkIsNotReleased(); + if (surface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't swap buffers"); + } + synchronized (EglBase.lock) { + egl.eglSwapBuffers(eglDisplay, surface); + } + } + + @Override + public void swapBuffers(long timeStampNs, boolean background) { + // Setting presentation time is not supported for EGL 1.0. + swapBuffers(background); + } + + @Override + public void createBackgroundSurface(SurfaceTexture surface) { + createSurfaceInternal(surface, true); + } + + @Override + public void makeBackgroundCurrent() { + checkIsNotReleased(); + if (eglBackgroundSurface == EGL10.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + synchronized (EglBase.lock) { + if (!egl.eglMakeCurrent(eglDisplay, eglBackgroundSurface, eglBackgroundSurface, eglContext)) { + throw new RuntimeException( + "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + } + } + + @Override + public boolean hasBackgroundSurface() { + return eglBackgroundSurface != EGL10.EGL_NO_SURFACE; + } + + // Return an EGLDisplay, or die trying. + private EGLDisplay getEglDisplay() { + EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); + if (eglDisplay == EGL10.EGL_NO_DISPLAY) { + throw new RuntimeException( + "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError())); + } + int[] version = new int[2]; + if (!egl.eglInitialize(eglDisplay, version)) { + throw new RuntimeException( + "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError())); + } + return eglDisplay; + } + + // Return an EGLConfig, or die trying. + private static EGLConfig getEglConfig(EGL10 egl, EGLDisplay eglDisplay, int[] configAttributes) { + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) { + throw new RuntimeException( + "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError())); + } + if (numConfigs[0] <= 0) { + throw new RuntimeException("Unable to find any matching EGL config"); + } + final EGLConfig eglConfig = configs[0]; + if (eglConfig == null) { + throw new RuntimeException("eglChooseConfig returned null"); + } + return eglConfig; + } + + // Return an EGLConfig, or die trying. + private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay, + EGLConfig eglConfig, int openGlesVersion) { + if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) { + throw new RuntimeException("Invalid sharedContext"); + } + int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE}; + EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext; + final EGLContext eglContext; + synchronized (EglBase.lock) { + eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes); + } + if (eglContext == EGL10.EGL_NO_CONTEXT) { + throw new RuntimeException( + "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError())); + } + return eglContext; } - } - // Create a new context with the specified config type, sharing data with sharedContext. - public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) { - this.egl = (EGL10) EGLContext.getEGL(); - eglDisplay = getEglDisplay(); - eglConfig = getEglConfig(eglDisplay, configAttributes); - final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes); - Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion); - eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion); - } - @Override - public void createSurface(Surface surface) { /** * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface * couldn't actually take a Surface object until API 17. Older versions fortunately just call * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant. */ - class FakeSurfaceHolder implements SurfaceHolder { - private final Surface surface; + private class FakeSurfaceHolder implements SurfaceHolder { + private final Surface surface; - FakeSurfaceHolder(Surface surface) { - this.surface = surface; - } + FakeSurfaceHolder(Surface surface) { + this.surface = surface; + } - @Override - public void addCallback(Callback callback) {} + @Override + public void addCallback(Callback callback) { + } - @Override - public void removeCallback(Callback callback) {} + @Override + public void removeCallback(Callback callback) { + } - @Override - public boolean isCreating() { - return false; - } + @Override + public boolean isCreating() { + return false; + } - @Deprecated - @Override - public void setType(int i) {} + @Deprecated + @Override + public void setType(int i) { + } - @Override - public void setFixedSize(int i, int i2) {} + @Override + public void setFixedSize(int i, int i2) { + } - @Override - public void setSizeFromLayout() {} + @Override + public void setSizeFromLayout() { + } - @Override - public void setFormat(int i) {} + @Override + public void setFormat(int i) { + } - @Override - public void setKeepScreenOn(boolean b) {} + @Override + public void setKeepScreenOn(boolean b) { + } - @Nullable - @Override - public Canvas lockCanvas() { - return null; - } + @Nullable + @Override + public Canvas lockCanvas() { + return null; + } - @Nullable - @Override - public Canvas lockCanvas(Rect rect) { - return null; - } + @Nullable + @Override + public Canvas lockCanvas(Rect rect) { + return null; + } - @Override - public void unlockCanvasAndPost(Canvas canvas) {} + @Override + public void unlockCanvasAndPost(Canvas canvas) { + } - @Nullable - @Override - public Rect getSurfaceFrame() { - return null; - } + @Nullable + @Override + public Rect getSurfaceFrame() { + return null; + } - @Override - public Surface getSurface() { - return surface; - } + @Override + public Surface getSurface() { + return surface; + } } - createSurfaceInternal(new FakeSurfaceHolder(surface)); - } - - // Create EGLSurface from the Android SurfaceTexture. - @Override - public void createSurface(SurfaceTexture surfaceTexture) { - createSurfaceInternal(surfaceTexture); - } - - // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture. - private void createSurfaceInternal(Object nativeWindow) { - if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) { - throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture"); - } - checkIsNotReleased(); - if (eglSurface != EGL10.EGL_NO_SURFACE) { - throw new RuntimeException("Already has an EGLSurface"); - } - int[] surfaceAttribs = {EGL10.EGL_NONE}; - eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs); - if (eglSurface == EGL10.EGL_NO_SURFACE) { - throw new RuntimeException( - "Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError())); - } - } - - // Create dummy 1x1 pixel buffer surface so the context can be made current. - @Override - public void createDummyPbufferSurface() { - createPbufferSurface(1, 1); - } - - @Override - public void createPbufferSurface(int width, int height) { - checkIsNotReleased(); - if (eglSurface != EGL10.EGL_NO_SURFACE) { - throw new RuntimeException("Already has an EGLSurface"); - } - int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE}; - eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs); - if (eglSurface == EGL10.EGL_NO_SURFACE) { - throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x" - + height + ": 0x" + Integer.toHexString(egl.eglGetError())); - } - } - - @Override - public org.webrtc.EglBase.Context getEglBaseContext() { - return new Context(eglContext); - } - - @Override - public boolean hasSurface() { - return eglSurface != EGL10.EGL_NO_SURFACE; - } - - @Override - public int surfaceWidth() { - final int widthArray[] = new int[1]; - egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray); - return widthArray[0]; - } - - @Override - public int surfaceHeight() { - final int heightArray[] = new int[1]; - egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray); - return heightArray[0]; - } - - @Override - public void releaseSurface() { - if (eglSurface != EGL10.EGL_NO_SURFACE) { - egl.eglDestroySurface(eglDisplay, eglSurface); - eglSurface = EGL10.EGL_NO_SURFACE; - } - } - - private void checkIsNotReleased() { - if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT - || eglConfig == null) { - throw new RuntimeException("This object has been released"); - } - } - - @Override - public void release() { - checkIsNotReleased(); - releaseSurface(); - detachCurrent(); - egl.eglDestroyContext(eglDisplay, eglContext); - egl.eglTerminate(eglDisplay); - eglContext = EGL10.EGL_NO_CONTEXT; - eglDisplay = EGL10.EGL_NO_DISPLAY; - eglConfig = null; - } - - @Override - public void makeCurrent() { - checkIsNotReleased(); - if (eglSurface == EGL10.EGL_NO_SURFACE) { - throw new RuntimeException("No EGLSurface - can't make current"); - } - synchronized (EglBase.lock) { - if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { - throw new RuntimeException( - "eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); - } - } - } - - // Detach the current EGL context, so that it can be made current on another thread. - @Override - public void detachCurrent() { - synchronized (EglBase.lock) { - if (!egl.eglMakeCurrent( - eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) { - throw new RuntimeException( - "eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError())); - } - } - } - - @Override - public void swapBuffers() { - checkIsNotReleased(); - if (eglSurface == EGL10.EGL_NO_SURFACE) { - throw new RuntimeException("No EGLSurface - can't swap buffers"); - } - synchronized (EglBase.lock) { - egl.eglSwapBuffers(eglDisplay, eglSurface); - } - } - - @Override - public void swapBuffers(long timeStampNs) { - // Setting presentation time is not supported for EGL 1.0. - swapBuffers(); - } - - // Return an EGLDisplay, or die trying. - private EGLDisplay getEglDisplay() { - EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); - if (eglDisplay == EGL10.EGL_NO_DISPLAY) { - throw new RuntimeException( - "Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError())); - } - int[] version = new int[2]; - if (!egl.eglInitialize(eglDisplay, version)) { - throw new RuntimeException( - "Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError())); - } - return eglDisplay; - } - - // Return an EGLConfig, or die trying. - private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) { - EGLConfig[] configs = new EGLConfig[1]; - int[] numConfigs = new int[1]; - if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) { - throw new RuntimeException( - "eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError())); - } - if (numConfigs[0] <= 0) { - throw new RuntimeException("Unable to find any matching EGL config"); - } - final EGLConfig eglConfig = configs[0]; - if (eglConfig == null) { - throw new RuntimeException("eglChooseConfig returned null"); - } - return eglConfig; - } - - // Return an EGLConfig, or die trying. - private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay, - EGLConfig eglConfig, int openGlesVersion) { - if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) { - throw new RuntimeException("Invalid sharedContext"); - } - int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE}; - EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext; - final EGLContext eglContext; - synchronized (EglBase.lock) { - eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes); - } - if (eglContext == EGL10.EGL_NO_CONTEXT) { - throw new RuntimeException( - "Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError())); - } - return eglContext; - } + private static native long nativeGetCurrentNativeEGLContext(); } diff --git a/TMessagesProj/src/main/java/org/webrtc/EglBase14.java b/TMessagesProj/src/main/java/org/webrtc/EglBase14.java index 69c89c44d..bdef7207b 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EglBase14.java +++ b/TMessagesProj/src/main/java/org/webrtc/EglBase14.java @@ -11,9 +11,11 @@ package org.webrtc; import android.opengl.EGLContext; +import android.view.Surface; /** EGL 1.4 implementation of EglBase. */ public interface EglBase14 extends EglBase { + interface Context extends EglBase.Context { EGLContext getRawContext(); } diff --git a/TMessagesProj/src/main/java/org/webrtc/EglBase14Impl.java b/TMessagesProj/src/main/java/org/webrtc/EglBase14Impl.java index 4ad2802bf..b6cf59bca 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EglBase14Impl.java +++ b/TMessagesProj/src/main/java/org/webrtc/EglBase14Impl.java @@ -37,6 +37,7 @@ class EglBase14Impl implements EglBase14 { @Nullable private EGLConfig eglConfig; private EGLDisplay eglDisplay; private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE; + private EGLSurface eglSurfaceBackground = EGL14.EGL_NO_SURFACE; // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation // time stamp on a surface is supported from 18 so we require 18. @@ -81,29 +82,48 @@ class EglBase14Impl implements EglBase14 { // Create EGLSurface from the Android Surface. @Override public void createSurface(Surface surface) { - createSurfaceInternal(surface); + createSurfaceInternal(surface, false); } + // Create EGLSurface from the Android Surface. + @Override + public void createBackgroundSurface(SurfaceTexture surface) { + createSurfaceInternal(surface, true); + } + + // Create EGLSurface from the Android SurfaceTexture. @Override public void createSurface(SurfaceTexture surfaceTexture) { - createSurfaceInternal(surfaceTexture); + createSurfaceInternal(surfaceTexture, false); } // Create EGLSurface from either Surface or SurfaceTexture. - private void createSurfaceInternal(Object surface) { + private void createSurfaceInternal(Object surface, boolean background) { if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) { throw new IllegalStateException("Input must be either a Surface or SurfaceTexture"); } checkIsNotReleased(); - if (eglSurface != EGL14.EGL_NO_SURFACE) { - throw new RuntimeException("Already has an EGLSurface"); - } - int[] surfaceAttribs = {EGL14.EGL_NONE}; - eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0); - if (eglSurface == EGL14.EGL_NO_SURFACE) { - throw new RuntimeException( - "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError())); + if (background) { + if (eglSurfaceBackground != EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL14.EGL_NONE}; + eglSurfaceBackground = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0); + if (eglSurfaceBackground == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException( + "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } else { + if (eglSurface != EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("Already has an EGLSurface"); + } + int[] surfaceAttribs = {EGL14.EGL_NONE}; + eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0); + if (eglSurface == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException( + "Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError())); + } } } @@ -151,10 +171,17 @@ class EglBase14Impl implements EglBase14 { } @Override - public void releaseSurface() { - if (eglSurface != EGL14.EGL_NO_SURFACE) { - EGL14.eglDestroySurface(eglDisplay, eglSurface); - eglSurface = EGL14.EGL_NO_SURFACE; + public void releaseSurface(boolean background) { + if (background) { + if (eglSurfaceBackground != EGL14.EGL_NO_SURFACE) { + EGL14.eglDestroySurface(eglDisplay, eglSurfaceBackground); + eglSurfaceBackground = EGL14.EGL_NO_SURFACE; + } + } else { + if (eglSurface != EGL14.EGL_NO_SURFACE) { + EGL14.eglDestroySurface(eglDisplay, eglSurface); + eglSurface = EGL14.EGL_NO_SURFACE; + } } } @@ -168,7 +195,8 @@ class EglBase14Impl implements EglBase14 { @Override public void release() { checkIsNotReleased(); - releaseSurface(); + releaseSurface(false); + releaseSurface(true); detachCurrent(); synchronized (EglBase.lock) { EGL14.eglDestroyContext(eglDisplay, eglContext); @@ -194,6 +222,25 @@ class EglBase14Impl implements EglBase14 { } } + @Override + public void makeBackgroundCurrent() { + checkIsNotReleased(); + if (eglSurfaceBackground == EGL14.EGL_NO_SURFACE) { + throw new RuntimeException("No EGLSurface - can't make current"); + } + synchronized (EglBase.lock) { + if (!EGL14.eglMakeCurrent(eglDisplay, eglSurfaceBackground, eglSurfaceBackground, eglContext)) { + throw new RuntimeException( + "eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError())); + } + } + } + + @Override + public boolean hasBackgroundSurface() { + return eglSurfaceBackground != EGL14.EGL_NO_SURFACE; + } + // Detach the current EGL context, so that it can be made current on another thread. @Override public void detachCurrent() { @@ -207,27 +254,29 @@ class EglBase14Impl implements EglBase14 { } @Override - public void swapBuffers() { + public void swapBuffers(boolean background) { checkIsNotReleased(); - if (eglSurface == EGL14.EGL_NO_SURFACE) { + EGLSurface surface = background ? eglSurfaceBackground : eglSurface; + if (surface == EGL14.EGL_NO_SURFACE) { throw new RuntimeException("No EGLSurface - can't swap buffers"); } synchronized (EglBase.lock) { - EGL14.eglSwapBuffers(eglDisplay, eglSurface); + EGL14.eglSwapBuffers(eglDisplay, surface); } } @Override - public void swapBuffers(long timeStampNs) { + public void swapBuffers(long timeStampNs, boolean background) { checkIsNotReleased(); - if (eglSurface == EGL14.EGL_NO_SURFACE) { + EGLSurface surface = background ? eglSurfaceBackground : eglSurface; + if (surface == EGL14.EGL_NO_SURFACE) { throw new RuntimeException("No EGLSurface - can't swap buffers"); } synchronized (EglBase.lock) { // See // https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt - EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs); - EGL14.eglSwapBuffers(eglDisplay, eglSurface); + EGLExt.eglPresentationTimeANDROID(eglDisplay, surface, timeStampNs); + EGL14.eglSwapBuffers(eglDisplay, surface); } } diff --git a/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java b/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java index 678ac65fd..e2b78c611 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java @@ -20,8 +20,10 @@ import android.os.Looper; import android.os.Message; import androidx.annotation.Nullable; import android.view.Surface; + +import org.telegram.messenger.FileLog; + import java.nio.ByteBuffer; -import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Iterator; import java.util.concurrent.CountDownLatch; @@ -35,12 +37,12 @@ public class EglRenderer implements VideoSink { private static final String TAG = "EglRenderer"; private static final long LOG_INTERVAL_SEC = 4; - private boolean firstFrameRendered; + public boolean firstFrameRendered; public interface FrameListener { void onFrame(Bitmap frame); } /** Callback for clients to be notified about errors encountered during rendering. */ - public static interface ErrorCallback { + public interface ErrorCallback { /** Called if GLES20.GL_OUT_OF_MEMORY is encountered during rendering. */ void onGlOutOfMemory(); } @@ -62,6 +64,11 @@ public class EglRenderer implements VideoSink { private class EglSurfaceCreation implements Runnable { private Object surface; + private final boolean background; + + public EglSurfaceCreation(boolean background) { + this.background = background; + } // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. @SuppressWarnings("NoSynchronizedMethodCheck") @@ -73,17 +80,31 @@ public class EglRenderer implements VideoSink { // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. @SuppressWarnings("NoSynchronizedMethodCheck") public synchronized void run() { - if (surface != null && eglBase != null && !eglBase.hasSurface()) { + if (surface != null && eglBase != null && (background ? !eglBase.hasBackgroundSurface() : !eglBase.hasSurface())) { if (surface instanceof Surface) { eglBase.createSurface((Surface) surface); } else if (surface instanceof SurfaceTexture) { - eglBase.createSurface((SurfaceTexture) surface); + if (background) { + eglBase.createBackgroundSurface((SurfaceTexture) surface); + } else { + eglBase.createSurface((SurfaceTexture) surface); + } + } else { throw new IllegalStateException("Invalid surface: " + surface); } - eglBase.makeCurrent(); - // Necessary for YUV frames with odd width. - GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); + if (!background) { + eglBase.makeCurrent(); + // Necessary for YUV frames with odd width. + GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); + } else { + eglBase.makeBackgroundCurrent(); + // Necessary for YUV frames with odd width. + GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); + if (eglBase.hasSurface()) { + eglBase.makeCurrent(); + } + } } } } @@ -152,8 +173,6 @@ public class EglRenderer implements VideoSink { private int rotation; - // These variables are synchronized on |statisticsLock|. - private final Object statisticsLock = new Object(); // Total number of video frames received in renderFrame() call. private int framesReceived; // Number of video frames dropped by renderFrame() because previous frame has not been rendered @@ -161,8 +180,6 @@ public class EglRenderer implements VideoSink { private int framesDropped; // Number of rendered video frames. private int framesRendered; - // Start time for counting these statistics, or 0 if we haven't started measuring yet. - private long statisticsStartTimeNs; // Time in ns spent in renderFrameOnRenderThread() function. private long renderTimeNs; // Time in ns spent by the render thread in the swapBuffers() function. @@ -172,21 +189,8 @@ public class EglRenderer implements VideoSink { private final GlTextureFrameBuffer bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA); - private final Runnable logStatisticsRunnable = new Runnable() { - @Override - public void run() { - logStatistics(); - synchronized (handlerLock) { - if (renderThreadHandler != null) { - renderThreadHandler.removeCallbacks(logStatisticsRunnable); - renderThreadHandler.postDelayed( - logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC)); - } - } - } - }; - - private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation(); + private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation(false); + private final EglSurfaceCreation eglSurfaceBackgroundCreationRunnable = new EglSurfaceCreation(true); /** * Standard constructor. The name will be used for the render thread name and included when @@ -234,7 +238,7 @@ public class EglRenderer implements VideoSink { // Create EGL context on the newly created render thread. It should be possibly to create the // context on this thread and make it current on the render thread, but this causes failure on // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350. - ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> { + renderThreadHandler.post(() -> { // If sharedContext is null, then texture frames are disabled. This is typically for old // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has // caused trouble on some weird devices. @@ -247,10 +251,6 @@ public class EglRenderer implements VideoSink { } }); renderThreadHandler.post(eglSurfaceCreationRunnable); - final long currentTimeNs = System.nanoTime(); - resetStatistics(currentTimeNs); - renderThreadHandler.postDelayed( - logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC)); } } @@ -265,16 +265,32 @@ public class EglRenderer implements VideoSink { } public void createEglSurface(Surface surface) { - createEglSurfaceInternal(surface); + createEglSurfaceInternal(surface, false); } public void createEglSurface(SurfaceTexture surfaceTexture) { - createEglSurfaceInternal(surfaceTexture); + createEglSurfaceInternal(surfaceTexture, false); } - private void createEglSurfaceInternal(Object surface) { - eglSurfaceCreationRunnable.setSurface(surface); - postToRenderThread(eglSurfaceCreationRunnable); + public void createBackgroundSurface(SurfaceTexture surface) { + createEglSurfaceInternal(surface, true); + } + + private void createEglSurfaceInternal(Object surface, boolean background) { + if (background) { + eglSurfaceBackgroundCreationRunnable.setSurface(surface); + synchronized (handlerLock) { + if (renderThreadHandler != null) { + renderThreadHandler.post(eglSurfaceBackgroundCreationRunnable); + } else { + FileLog.d("can't create background surface. render thread is null"); + } + } + // postToRenderThread(eglSurfaceBackgroundCreationRunnable); + } else { + eglSurfaceCreationRunnable.setSurface(surface); + postToRenderThread(eglSurfaceCreationRunnable); + } } /** @@ -291,7 +307,6 @@ public class EglRenderer implements VideoSink { logD("Already released"); return; } - renderThreadHandler.removeCallbacks(logStatisticsRunnable); // Release EGL and GL resources on render thread. renderThreadHandler.postAtFrontOfQueue(() -> { // Detach current shader program. @@ -333,20 +348,6 @@ public class EglRenderer implements VideoSink { logD("Releasing done."); } - /** - * Reset the statistics logged in logStatistics(). - */ - private void resetStatistics(long currentTimeNs) { - synchronized (statisticsLock) { - statisticsStartTimeNs = currentTimeNs; - framesReceived = 0; - framesDropped = 0; - framesRendered = 0; - renderTimeNs = 0; - renderSwapBufferTimeNs = 0; - } - } - public void printStackTrace() { synchronized (handlerLock) { final Thread renderThread = @@ -388,9 +389,10 @@ public class EglRenderer implements VideoSink { * Set this to 0 to disable cropping. */ public void setLayoutAspectRatio(float layoutAspectRatio) { - logD("setLayoutAspectRatio: " + layoutAspectRatio); - synchronized (layoutLock) { - this.layoutAspectRatio = layoutAspectRatio; + if (this.layoutAspectRatio != layoutAspectRatio) { + synchronized (layoutLock) { + this.layoutAspectRatio = layoutAspectRatio; + } } } @@ -508,9 +510,6 @@ public class EglRenderer implements VideoSink { // VideoSink interface. @Override public void onFrame(VideoFrame frame) { - synchronized (statisticsLock) { - ++framesReceived; - } final boolean dropOldFrame; synchronized (handlerLock) { if (renderThreadHandler == null) { @@ -527,11 +526,6 @@ public class EglRenderer implements VideoSink { renderThreadHandler.post(this ::renderFrameOnRenderThread); } } - if (dropOldFrame) { - synchronized (statisticsLock) { - ++framesDropped; - } - } } public void setRotation(int value) { @@ -543,7 +537,7 @@ public class EglRenderer implements VideoSink { /** * Release EGL surface. This function will block until the EGL surface is released. */ - public void releaseEglSurface(final Runnable completionCallback) { + public void releaseEglSurface(final Runnable completionCallback, boolean background) { // Ensure that the render thread is no longer touching the Surface before returning from this // function. eglSurfaceCreationRunnable.setSurface(null /* surface */); @@ -553,14 +547,18 @@ public class EglRenderer implements VideoSink { renderThreadHandler.postAtFrontOfQueue(() -> { if (eglBase != null) { eglBase.detachCurrent(); - eglBase.releaseSurface(); + eglBase.releaseSurface(background); + } + if (completionCallback != null) { + completionCallback.run(); } - completionCallback.run(); }); return; } } - completionCallback.run(); + if (completionCallback != null) { + completionCallback.run(); + } } /** @@ -579,7 +577,7 @@ public class EglRenderer implements VideoSink { logD("clearSurface"); GLES20.glClearColor(r, g, b, a); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - eglBase.swapBuffers(); + eglBase.swapBuffers(false); } } @@ -588,6 +586,7 @@ public class EglRenderer implements VideoSink { */ public void clearImage() { clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); + firstFrameRendered = false; } /** @@ -602,6 +601,18 @@ public class EglRenderer implements VideoSink { } } + public void getTexture(GlGenericDrawer.TextureCallback callback) { + synchronized (handlerLock) { + try { + if (renderThreadHandler != null) { + renderThreadHandler.post(() -> frameDrawer.getRenderBufferBitmap(drawer, rotation, callback)); + } + } catch (Exception e) { + FileLog.e(e); + } + } + } + /** * Renders and releases |pendingFrame|. */ @@ -632,7 +643,6 @@ public class EglRenderer implements VideoSink { } else { final long currentTimeNs = System.nanoTime(); if (currentTimeNs < nextFrameTimeNs) { - logD("Skipping frame rendering - fps reduction is active."); shouldRenderFrame = false; } else { nextFrameTimeNs += minRenderPeriodNs; @@ -672,29 +682,35 @@ public class EglRenderer implements VideoSink { try { if (shouldRenderFrame) { - GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */, - eglBase.surfaceWidth(), eglBase.surfaceHeight(), rotate); + eglBase.surfaceWidth(), eglBase.surfaceHeight(), rotate, false); + + if (eglBase.hasBackgroundSurface()) { + eglBase.makeBackgroundCurrent(); + + frameDrawer.drawFrame(frame, drawer, drawMatrix, 0, 0, + eglBase.surfaceWidth(), eglBase.surfaceHeight(), rotate, true); + + if (usePresentationTimeStamp) { + eglBase.swapBuffers(frame.getTimestampNs(), true); + } else { + eglBase.swapBuffers(true); + } + eglBase.makeCurrent(); + } final long swapBuffersStartTimeNs = System.nanoTime(); if (usePresentationTimeStamp) { - eglBase.swapBuffers(frame.getTimestampNs()); + eglBase.swapBuffers(frame.getTimestampNs(), false); } else { - eglBase.swapBuffers(); + eglBase.swapBuffers(false); } + if (!firstFrameRendered) { firstFrameRendered = true; onFirstFrameRendered(); } - - final long currentTimeNs = System.nanoTime(); - synchronized (statisticsLock) { - ++framesRendered; - renderTimeNs += (currentTimeNs - startTimeNs); - renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs); - } } notifyCallbacks(frame, shouldRenderFrame); @@ -755,7 +771,7 @@ public class EglRenderer implements VideoSink { GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */, - 0 /* viewportY */, scaledWidth, scaledHeight, false); + 0 /* viewportY */, scaledWidth, scaledHeight, false, false); final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4); GLES20.glViewport(0, 0, scaledWidth, scaledHeight); @@ -771,31 +787,6 @@ public class EglRenderer implements VideoSink { } } - private String averageTimeAsString(long sumTimeNs, int count) { - return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us"; - } - - private void logStatistics() { - final DecimalFormat fpsFormat = new DecimalFormat("#.0"); - final long currentTimeNs = System.nanoTime(); - synchronized (statisticsLock) { - final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs; - if (elapsedTimeNs <= 0 || (minRenderPeriodNs == Long.MAX_VALUE && framesReceived == 0)) { - return; - } - final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs; - logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms." - + " Frames received: " + framesReceived + "." - + " Dropped: " + framesDropped + "." - + " Rendered: " + framesRendered + "." - + " Render fps: " + fpsFormat.format(renderFps) + "." - + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "." - + " Average swapBuffer time: " - + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + "."); - resetStatistics(currentTimeNs); - } - } - private void logE(String string, Throwable e) { Logging.e(TAG, name + string, e); } diff --git a/TMessagesProj/src/main/java/org/webrtc/GlGenericDrawer.java b/TMessagesProj/src/main/java/org/webrtc/GlGenericDrawer.java index 50269ae2d..1b6f46188 100644 --- a/TMessagesProj/src/main/java/org/webrtc/GlGenericDrawer.java +++ b/TMessagesProj/src/main/java/org/webrtc/GlGenericDrawer.java @@ -10,13 +10,18 @@ package org.webrtc; +import android.graphics.Bitmap; import android.opengl.GLES11Ext; import android.opengl.GLES20; + +import org.telegram.messenger.FileLog; + import androidx.annotation.Nullable; + +import java.nio.ByteBuffer; import java.nio.FloatBuffer; -import org.webrtc.GlShader; -import org.webrtc.GlUtil; -import org.webrtc.RendererCommon; + +import javax.microedition.khronos.opengles.GL10; /** * Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input @@ -31,18 +36,20 @@ import org.webrtc.RendererCommon; * This class covers the cases for most simple shaders and generates the necessary boiler plate. * Advanced shaders can always implement RendererCommon.GlDrawer directly. */ -class GlGenericDrawer implements RendererCommon.GlDrawer { +public class GlGenericDrawer implements RendererCommon.GlDrawer { /** * The different shader types representing different input sources. YUV here represents three * separate Y, U, V textures. */ - public static enum ShaderType { OES, RGB, YUV } + private static final int OES = 0; + private static final int RGB = 1; + private static final int YUV = 2; /** * The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set * uniform variables in the shader before a frame is drawn. */ - public static interface ShaderCallbacks { + public interface ShaderCallbacks { /** * This callback is called when a new shader has been compiled and created. It will be called * for the first frame as well as when the shader type is changed. This callback can be used to @@ -88,15 +95,17 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { 1.0f, 1.0f, // Top right. }); - static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) { + static String createFragmentShaderString(String genericFragmentSource, int shaderType, boolean blur) { final StringBuilder stringBuilder = new StringBuilder(); - if (shaderType == ShaderType.OES) { + if (shaderType == OES) { stringBuilder.append("#extension GL_OES_EGL_image_external : require\n"); } - stringBuilder.append("precision mediump float;\n"); - stringBuilder.append("varying vec2 tc;\n"); + stringBuilder.append("precision highp float;\n"); + if (!blur) { + stringBuilder.append("varying vec2 tc;\n"); + } - if (shaderType == ShaderType.YUV) { + if (shaderType == YUV) { stringBuilder.append("uniform sampler2D y_tex;\n"); stringBuilder.append("uniform sampler2D u_tex;\n"); stringBuilder.append("uniform sampler2D v_tex;\n"); @@ -113,11 +122,37 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { stringBuilder.append("}\n"); stringBuilder.append(genericFragmentSource); } else { - final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D"; + final String samplerName = shaderType == OES ? "samplerExternalOES" : "sampler2D"; stringBuilder.append("uniform ").append(samplerName).append(" tex;\n"); - - // Update the sampling function in-place. - stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, ")); + if (blur) { + stringBuilder.append("precision mediump float;\n") + .append("varying vec2 tc;\n") + .append("const mediump vec3 satLuminanceWeighting = vec3(0.2126, 0.7152, 0.0722);\n") + .append("uniform float texelWidthOffset;\n") + .append("uniform float texelHeightOffset;\n") + .append("void main(){\n") + .append("int rad = 3;\n") + .append("int diameter = 2 * rad + 1;\n") + .append("vec4 sampleTex = vec4(0, 0, 0, 0);\n") + .append("vec3 col = vec3(0, 0, 0);\n") + .append("float weightSum = 0.0;\n") + .append("for(int i = 0; i < diameter; i++) {\n") + .append("vec2 offset = vec2(float(i - rad) * texelWidthOffset, float(i - rad) * texelHeightOffset);\n") + .append("sampleTex = vec4(texture2D(tex, tc.st+offset));\n") + .append("float index = float(i);\n") + .append("float boxWeight = float(rad) + 1.0 - abs(index - float(rad));\n") + .append("col += sampleTex.rgb * boxWeight;\n") + .append("weightSum += boxWeight;\n") + .append("}\n") + .append("vec3 result = col / weightSum;\n") + .append("lowp float satLuminance = dot(result.rgb, satLuminanceWeighting);\n") + .append("lowp vec3 greyScaleColor = vec3(satLuminance);\n") + .append("gl_FragColor = vec4(clamp(mix(greyScaleColor, result.rgb, 1.1), 0.0, 1.0), 1.0);\n") + .append("}\n"); + } else { + // Update the sampling function in-place. + stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, ")); + } } return stringBuilder.toString(); @@ -126,11 +161,11 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { private final String genericFragmentSource; private final String vertexShader; private final ShaderCallbacks shaderCallbacks; - @Nullable private ShaderType currentShaderType; - @Nullable private GlShader currentShader; - private int inPosLocation; - private int inTcLocation; - private int texMatrixLocation; + @Nullable private GlShader[][] currentShader = new GlShader[3][3]; + private int[][] inPosLocation = new int[3][3]; + private int[][] inTcLocation = new int[3][3]; + private int[][] texMatrixLocation = new int[3][3]; + private int[][] texelLocation = new int[3][3]; public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) { this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks); @@ -144,28 +179,123 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { } // Visible for testing. - GlShader createShader(ShaderType shaderType) { - return new GlShader( - vertexShader, createFragmentShaderString(genericFragmentSource, shaderType)); + GlShader createShader(int shaderType, boolean blur) { + return new GlShader(vertexShader, createFragmentShaderString(genericFragmentSource, shaderType, blur)); } /** * Draw an OES texture frame with specified texture transformation matrix. Required resources are * allocated at the first call to this function. */ + private int[] renderTexture = new int[2]; + private int[] renderFrameBuffer; + private float[] renderMatrix; + + private int[] renderTextureWidth = new int[2]; + private int[] renderTextureHeight = new int[2]; + private float[] textureMatrix; + private float renderTextureDownscale; + + private void ensureRenderTargetCreated(int originalWidth, int originalHeight, int texIndex) { + if (renderFrameBuffer == null) { + renderFrameBuffer = new int[2]; + GLES20.glGenFramebuffers(2, renderFrameBuffer, 0); + GLES20.glGenTextures(2, renderTexture, 0); + for (int a = 0; a < renderTexture.length; a++) { + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[a]); + GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); + GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); + GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); + } + renderMatrix = new float[16]; + android.opengl.Matrix.setIdentityM(renderMatrix, 0); + } + if (renderTextureWidth[texIndex] != originalWidth) { + renderTextureDownscale = Math.max(1.0f, Math.max(originalWidth, originalHeight) / 50f); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[texIndex]); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, (int) (originalWidth / renderTextureDownscale), (int) (originalHeight / renderTextureDownscale), 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + renderTextureWidth[texIndex] = originalWidth; + renderTextureHeight[texIndex] = originalHeight; + } + } + public interface TextureCallback { + void run(Bitmap bitmap, int rotation); + } + + public void getRenderBufferBitmap(int baseRotation, TextureCallback callback) { + if (renderFrameBuffer == null || textureMatrix == null) { + callback.run(null, 0); + return; + } + + int rotation; + double Ry = Math.asin(textureMatrix[2]); + if (Ry < Math.PI / 2 && Ry > -Math.PI / 2) { + rotation = (int) (-Math.atan(-textureMatrix[1] / textureMatrix[0]) / (Math.PI / 180)); + } else { + rotation = baseRotation; + } + + int viewportW = (int) (renderTextureWidth[0] / renderTextureDownscale); + int viewportH = (int) (renderTextureHeight[0] / renderTextureDownscale); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[0]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[0], 0); + ByteBuffer buffer = ByteBuffer.allocateDirect(viewportW * viewportH * 4); + GLES20.glReadPixels(0, 0, viewportW, viewportH, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); + Bitmap bitmap = Bitmap.createBitmap(viewportW, viewportH, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(buffer); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + callback.run(bitmap, rotation); + } + @Override - public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight, - int viewportX, int viewportY, int viewportWidth, int viewportHeight) { - prepareShader( - ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); - // Bind the texture. - GLES20.glActiveTexture(GLES20.GL_TEXTURE0); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId); - // Draw the texture. - GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - // Unbind the texture as a precaution. - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + public void drawOes(int oesTextureId, int originalWidth, int originalHeight, int rotatedWidth, int rotatedHeight, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight, boolean blur) { + if (blur) { + ensureRenderTargetCreated(originalWidth, originalHeight, 1); + + textureMatrix = texMatrix; + int viewportW = (int) (originalWidth / renderTextureDownscale); + int viewportH = (int) (originalHeight / renderTextureDownscale); + GLES20.glViewport(0, 0, viewportW, viewportH); + prepareShader(OES, renderMatrix, rotatedWidth, rotatedHeight, frameWidth, frameHeight, viewportWidth, viewportHeight, 0); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[1]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[1], 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + + if (rotatedWidth != originalWidth) { + int temp = viewportW; + viewportW = viewportH; + viewportH = temp; + } + + ensureRenderTargetCreated(originalWidth, originalHeight, 0); + prepareShader(RGB, renderMatrix, rotatedWidth != originalWidth ? viewportH : viewportW, rotatedWidth != originalWidth ? viewportW : viewportH, frameWidth, frameHeight, viewportWidth, viewportHeight, 1); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[1]); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[0]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[0], 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + prepareShader(RGB, texMatrix, rotatedWidth != originalWidth ? viewportH : viewportW, rotatedWidth != originalWidth ? viewportW : viewportH, frameWidth, frameHeight, viewportWidth, viewportHeight, 2); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[0]); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + } else { + prepareShader(OES, texMatrix, rotatedWidth, rotatedHeight, frameWidth, frameHeight, viewportWidth, viewportHeight, 0); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId); + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); + } } /** @@ -173,18 +303,14 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { * are allocated at the first call to this function. */ @Override - public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, - int viewportX, int viewportY, int viewportWidth, int viewportHeight) { - prepareShader( - ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); - // Bind the texture. - GLES20.glActiveTexture(GLES20.GL_TEXTURE0); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); - // Draw the texture. - GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - // Unbind the texture as a precaution. - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + public void drawRgb(int textureId, int originalWidth, int originalHeight, int rotatedWidth, int rotatedHeight, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight, boolean blur) { + prepareShader(RGB, texMatrix, rotatedWidth, rotatedHeight, frameWidth, frameHeight, viewportWidth, viewportHeight, 0); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); } /** @@ -192,43 +318,85 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { * at the first call to this function. */ @Override - public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight, - int viewportX, int viewportY, int viewportWidth, int viewportHeight) { - prepareShader( - ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); - // Bind the textures. - for (int i = 0; i < 3; ++i) { - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); - } - // Draw the textures. - GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - // Unbind the textures as a precaution. - for (int i = 0; i < 3; ++i) { - GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + public void drawYuv(int[] yuvTextures, int originalWidth, int originalHeight, int rotatedWidth, int rotatedHeight, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight, boolean blur) { + if (blur && originalWidth > 0 && originalHeight > 0) { + textureMatrix = texMatrix; + ensureRenderTargetCreated(originalWidth, originalHeight, 1); + + int viewportW = (int) (originalWidth / renderTextureDownscale); + int viewportH = (int) (originalHeight / renderTextureDownscale); + + GLES20.glViewport(0, 0, viewportW, viewportH); + prepareShader(YUV, renderMatrix, rotatedWidth, rotatedHeight, frameWidth, frameHeight, viewportWidth, viewportHeight, 0); + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + } + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[1]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[1], 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + } + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + + if (rotatedWidth != originalWidth) { + int temp = viewportW; + viewportW = viewportH; + viewportH = temp; + } + + ensureRenderTargetCreated(originalWidth, originalHeight, 0); + prepareShader(RGB, renderMatrix, rotatedWidth != originalWidth ? viewportH : viewportW, rotatedWidth != originalWidth ? viewportW : viewportH, frameWidth, frameHeight, viewportWidth, viewportHeight, 1); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[1]); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, renderFrameBuffer[0]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, renderTexture[0], 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + prepareShader(RGB, texMatrix, rotatedWidth != originalWidth ? viewportH : viewportW, rotatedWidth != originalWidth ? viewportW : viewportH, frameWidth, frameHeight, viewportWidth, viewportHeight, 2); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, renderTexture[0]); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + } else { + prepareShader(YUV, texMatrix, rotatedWidth, rotatedHeight, frameWidth, frameHeight, viewportWidth, viewportHeight, 0); + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]); + } + GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + for (int i = 0; i < 3; ++i) { + GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0); + } } } - private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth, - int frameHeight, int viewportWidth, int viewportHeight) { + private void prepareShader(int shaderType, float[] texMatrix, int texWidth, int texHeight, int frameWidth, + int frameHeight, int viewportWidth, int viewportHeight, int blurPass) { final GlShader shader; - if (shaderType.equals(currentShaderType)) { - // Same shader type as before, reuse exising shader. - shader = currentShader; + + boolean blur = blurPass != 0; + if (currentShader[shaderType][blurPass] != null) { + shader = currentShader[shaderType][blurPass]; } else { - // Allocate new shader. - currentShaderType = shaderType; - if (currentShader != null) { - currentShader.release(); + try { + shader = createShader(shaderType, blur); + } catch (Exception e) { + FileLog.e(e); + return; } - shader = createShader(shaderType); - currentShader = shader; + currentShader[shaderType][blurPass] = shader; shader.useProgram(); // Set input texture units. - if (shaderType == ShaderType.YUV) { + if (shaderType == YUV) { GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0); GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1); GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2); @@ -238,32 +406,38 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { GlUtil.checkNoGLES2Error("Create shader"); shaderCallbacks.onNewShader(shader); - texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME); - inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME); - inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME); + if (blur) { + texelLocation[shaderType][0] = shader.getUniformLocation("texelWidthOffset"); + texelLocation[shaderType][1] = shader.getUniformLocation("texelHeightOffset"); + } + texMatrixLocation[shaderType][blurPass] = shader.getUniformLocation(TEXTURE_MATRIX_NAME); + inPosLocation[shaderType][blurPass] = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME); + inTcLocation[shaderType][blurPass] = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME); } shader.useProgram(); + if (blur) { + GLES20.glUniform1f(texelLocation[shaderType][0], blurPass == 1 ? 1.0f / texWidth : 0); + GLES20.glUniform1f(texelLocation[shaderType][1], blurPass == 2 ? 1.0f / texHeight : 0); + } + // Upload the vertex coordinates. - GLES20.glEnableVertexAttribArray(inPosLocation); - GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2, - /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, - FULL_RECTANGLE_BUFFER); + GLES20.glEnableVertexAttribArray(inPosLocation[shaderType][blurPass]); + GLES20.glVertexAttribPointer(inPosLocation[shaderType][blurPass], /* size= */ 2, + /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, + FULL_RECTANGLE_BUFFER); // Upload the texture coordinates. - GLES20.glEnableVertexAttribArray(inTcLocation); - GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2, - /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, - FULL_RECTANGLE_TEXTURE_BUFFER); + GLES20.glEnableVertexAttribArray(inTcLocation[shaderType][blurPass]); + GLES20.glVertexAttribPointer(inTcLocation[shaderType][blurPass], /* size= */ 2, + /* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0, + FULL_RECTANGLE_TEXTURE_BUFFER); // Upload the texture transformation matrix. - GLES20.glUniformMatrix4fv( - texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */); - + GLES20.glUniformMatrix4fv(texMatrixLocation[shaderType][blurPass], 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */); // Do custom per-frame shader preparation. - shaderCallbacks.onPrepareShader( - shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); + shaderCallbacks.onPrepareShader(shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight); GlUtil.checkNoGLES2Error("Prepare shader"); } @@ -272,10 +446,17 @@ class GlGenericDrawer implements RendererCommon.GlDrawer { */ @Override public void release() { - if (currentShader != null) { - currentShader.release(); - currentShader = null; - currentShaderType = null; + for (int a = 0; a < currentShader.length; a++) { + for (int b = 0; b < currentShader[a].length; b++) { + if (currentShader[a][b] != null) { + currentShader[a][b].release(); + currentShader[a][b] = null; + } + } + } + if (renderFrameBuffer != null) { + GLES20.glDeleteFramebuffers(2, renderFrameBuffer, 0); + GLES20.glDeleteTextures(2, renderTexture, 0); } } } diff --git a/TMessagesProj/src/main/java/org/webrtc/GlUtil.java b/TMessagesProj/src/main/java/org/webrtc/GlUtil.java index bdafe81fd..7e589cb2f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/GlUtil.java +++ b/TMessagesProj/src/main/java/org/webrtc/GlUtil.java @@ -30,12 +30,12 @@ public class GlUtil { // Assert that no OpenGL ES 2.0 error has been raised. public static void checkNoGLES2Error(String msg) { - int error = GLES20.glGetError(); + /*int error = GLES20.glGetError(); if (error != GLES20.GL_NO_ERROR) { throw error == GLES20.GL_OUT_OF_MEMORY ? new GlOutOfMemoryException(msg) : new RuntimeException(msg + ": GLES20 error: " + error); - } + }*/ } public static FloatBuffer createFloatBuffer(float[] coords) { diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java index a5563d653..fc226d24b 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java @@ -13,6 +13,7 @@ package org.webrtc; import android.media.MediaCodecInfo; import org.telegram.messenger.voip.Instance; +import org.telegram.messenger.voip.VoIPService; import androidx.annotation.Nullable; @@ -22,6 +23,9 @@ public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory { new Predicate() { @Override public boolean test(MediaCodecInfo arg) { + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { + return false; + } if (!MediaCodecUtils.isHardwareAccelerated(arg)) { return false; } diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java index 1ff8ce04b..e81048877 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoder.java @@ -402,7 +402,7 @@ class HardwareVideoEncoder implements VideoEncoder { VideoFrame derotatedFrame = new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs()); videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */); - textureEglBase.swapBuffers(videoFrame.getTimestampNs()); + textureEglBase.swapBuffers(videoFrame.getTimestampNs(), false); } catch (RuntimeException e) { Logging.e(TAG, "encodeTexture failed", e); return VideoCodecStatus.ERROR; @@ -564,6 +564,9 @@ class HardwareVideoEncoder implements VideoEncoder { final ByteBuffer frameBuffer; if (isKeyFrame && (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) { + if (configBuffer == null) { + configBuffer = ByteBuffer.allocateDirect(info.size); + } Logging.d(TAG, "Prepending config frame of size " + configBuffer.capacity() + " to output buffer with offset " + info.offset + ", size " + info.size); diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java index 76d55e051..8057dc6c4 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -20,6 +20,7 @@ import android.media.MediaCodecList; import android.os.Build; import org.telegram.messenger.voip.Instance; +import org.telegram.messenger.voip.VoIPService; import androidx.annotation.Nullable; import java.util.ArrayList; @@ -128,7 +129,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { @Override public VideoCodecInfo[] getSupportedCodecs() { // HW encoding is not supported below Android Kitkat. - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT || VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { return new VideoCodecInfo[0]; } @@ -192,6 +193,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { if (!config.enable_h264_encoder && !config.enable_h265_encoder && !config.enable_vp8_encoder && !config.enable_vp9_encoder) { return false; } + if (VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().groupCall != null) { + return false; + } switch (type) { case VP8: return isHardwareSupportedInCurrentSdkVp8(info); diff --git a/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java b/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java index f959a9a53..40dca27a5 100644 --- a/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java +++ b/TMessagesProj/src/main/java/org/webrtc/MediaCodecUtils.java @@ -17,6 +17,7 @@ import android.media.MediaCodecList; import android.os.Build; import org.telegram.messenger.FileLog; +import org.telegram.messenger.voip.VoIPService; import androidx.annotation.Nullable; @@ -119,6 +120,7 @@ class MediaCodecUtils { case VP8: case VP9: case H265: + case AV1: return new HashMap(); case H264: return H264Utils.getDefaultH264Params(highProfile); diff --git a/TMessagesProj/src/main/java/org/webrtc/NativeCapturerObserver.java b/TMessagesProj/src/main/java/org/webrtc/NativeCapturerObserver.java index b4594128c..a530b7a24 100644 --- a/TMessagesProj/src/main/java/org/webrtc/NativeCapturerObserver.java +++ b/TMessagesProj/src/main/java/org/webrtc/NativeCapturerObserver.java @@ -42,7 +42,7 @@ public class NativeCapturerObserver implements CapturerObserver { public void onFrameCaptured(VideoFrame frame) { final VideoProcessor.FrameAdaptationParameters parameters = nativeAndroidVideoTrackSource.adaptFrame(frame); - if (parameters == null) { + if (parameters == null || parameters.cropWidth == 0 || parameters.cropHeight == 0) { // Drop frame. return; } diff --git a/TMessagesProj/src/main/java/org/webrtc/OWNERS b/TMessagesProj/src/main/java/org/webrtc/OWNERS index e6ccc2dda..109bea272 100644 --- a/TMessagesProj/src/main/java/org/webrtc/OWNERS +++ b/TMessagesProj/src/main/java/org/webrtc/OWNERS @@ -1,3 +1,2 @@ -per-file Camera*=sakal@webrtc.org -per-file Histogram.java=sakal@webrtc.org -per-file Metrics.java=sakal@webrtc.org +magjed@webrtc.org +xalep@webrtc.org diff --git a/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java b/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java index d3e61497e..976dda797 100644 --- a/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java +++ b/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java @@ -11,6 +11,7 @@ public class OrientationHelper { private int rotation; public static volatile int cameraRotation; + public static volatile int cameraOrientation; private int roundOrientation(int orientation, int orientationHistory) { boolean changeOrientation; diff --git a/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java b/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java index 7306f95ad..013a8b17f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java +++ b/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java @@ -37,18 +37,18 @@ public class RendererCommon { * input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function * release() must be called manually to free the resources held by this object. */ - public static interface GlDrawer { + public interface GlDrawer { /** * Functions for drawing frames with different sources. The rendering surface target is * implied by the current EGL context of the calling thread and requires no explicit argument. * The coordinates specify the viewport location on the surface target. */ - void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight, - int viewportX, int viewportY, int viewportWidth, int viewportHeight); - void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX, - int viewportY, int viewportWidth, int viewportHeight); - void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight, - int viewportX, int viewportY, int viewportWidth, int viewportHeight); + void drawOes(int oesTextureId, int originalWidth, int originalHeight, int rotatedWidth, int rotatedHeight, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight, boolean blur); + void drawRgb(int textureId, int originalWidth, int originalHeight, int rotatedWidth, int rotatedHeight, float[] texMatrix, int frameWidth, int frameHeight, int viewportX, + int viewportY, int viewportWidth, int viewportHeight, boolean blur); + void drawYuv(int[] yuvTextures, int originalWidth, int originalHeight, int rotatedWidth, int rotatedHeight, float[] texMatrix, int frameWidth, int frameHeight, + int viewportX, int viewportY, int viewportWidth, int viewportHeight, boolean blur); /** * Release all GL resources. This needs to be done manually, otherwise resources may leak. @@ -88,7 +88,7 @@ public class RendererCommon { this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation; } - public Point measure(boolean isCamera, int widthSpec, int heightSpec, int frameWidth, int frameHeight) { + public Point measure(boolean applayRotation, int widthSpec, int heightSpec, int frameWidth, int frameHeight) { // Calculate max allowed layout size. final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec); final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec); @@ -105,11 +105,13 @@ public class RendererCommon { final Point layoutSize = getDisplaySize(visibleFraction, frameAspect, maxWidth, maxHeight); // If the measure specification is forcing a specific size - yield. - if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) { - layoutSize.x = maxWidth; - } - if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY || !isCamera && (frameAspect > 1.0f) == (displayAspect > 1.0f)) { - layoutSize.y = maxHeight; + if (!applayRotation) { + if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) { + layoutSize.x = maxWidth; + } + if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY || frameAspect > 1.0f == displayAspect > 1.0f) { + layoutSize.y = maxHeight; + } } return layoutSize; } diff --git a/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java b/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java index 05921a889..8d0c19838 100644 --- a/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java +++ b/TMessagesProj/src/main/java/org/webrtc/ScreenCapturerAndroid.java @@ -21,6 +21,8 @@ import android.media.projection.MediaProjectionManager; import androidx.annotation.Nullable; import android.view.Surface; +import org.telegram.messenger.FileLog; + /** * An implementation of VideoCapturer to capture the screen content as a video stream. * Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this @@ -106,20 +108,28 @@ public class ScreenCapturerAndroid implements VideoCapturer, VideoSink { @SuppressWarnings("NoSynchronizedMethodCheck") public synchronized void startCapture( final int width, final int height, final int ignoredFramerate) { - checkNotDisposed(); + if (mediaProjection != null || mediaProjectionManager == null) { + return; + } + try { + checkNotDisposed(); - this.width = width; - this.height = height; + this.width = width; + this.height = height; - mediaProjection = mediaProjectionManager.getMediaProjection( - Activity.RESULT_OK, mediaProjectionPermissionResultData); + mediaProjection = mediaProjectionManager.getMediaProjection( + Activity.RESULT_OK, mediaProjectionPermissionResultData); - // Let MediaProjection callback use the SurfaceTextureHelper thread. - mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler()); + // Let MediaProjection callback use the SurfaceTextureHelper thread. + mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler()); - createVirtualDisplay(); - capturerObserver.onCapturerStarted(true); - surfaceTextureHelper.startListening(ScreenCapturerAndroid.this); + createVirtualDisplay(); + capturerObserver.onCapturerStarted(true); + surfaceTextureHelper.startListening(ScreenCapturerAndroid.this); + } catch (Throwable e) { + mediaProjectionCallback.onStop(); + FileLog.e(e); + } } @Override @@ -127,24 +137,21 @@ public class ScreenCapturerAndroid implements VideoCapturer, VideoSink { @SuppressWarnings("NoSynchronizedMethodCheck") public synchronized void stopCapture() { checkNotDisposed(); - ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { - @Override - public void run() { - surfaceTextureHelper.stopListening(); - capturerObserver.onCapturerStopped(); + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), () -> { + surfaceTextureHelper.stopListening(); + capturerObserver.onCapturerStopped(); - if (virtualDisplay != null) { - virtualDisplay.release(); - virtualDisplay = null; - } + if (virtualDisplay != null) { + virtualDisplay.release(); + virtualDisplay = null; + } - if (mediaProjection != null) { - // Unregister the callback before stopping, otherwise the callback recursively - // calls this method. - mediaProjection.unregisterCallback(mediaProjectionCallback); - mediaProjection.stop(); - mediaProjection = null; - } + if (mediaProjection != null) { + // Unregister the callback before stopping, otherwise the callback recursively + // calls this method. + mediaProjection.unregisterCallback(mediaProjectionCallback); + mediaProjection.stop(); + mediaProjection = null; } }); } @@ -182,20 +189,21 @@ public class ScreenCapturerAndroid implements VideoCapturer, VideoSink { // Create a new virtual display on the surfaceTextureHelper thread to avoid interference // with frame processing, which happens on the same thread (we serialize events by running // them on the same thread). - ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { - @Override - public void run() { - virtualDisplay.release(); - createVirtualDisplay(); - } + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), () -> { + virtualDisplay.release(); + createVirtualDisplay(); }); } private void createVirtualDisplay() { surfaceTextureHelper.setTextureSize(width, height); - virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, - VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), - null /* callback */, null /* callback handler */); + try { + virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height, + VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()), + null /* callback */, null /* callback handler */); + } catch (Throwable e) { + FileLog.e(e); + } } // This is called on the internal looper thread of {@Code SurfaceTextureHelper}. diff --git a/TMessagesProj/src/main/java/org/webrtc/SurfaceEglRenderer.java b/TMessagesProj/src/main/java/org/webrtc/SurfaceEglRenderer.java index 7a6db1588..9ff875db1 100644 --- a/TMessagesProj/src/main/java/org/webrtc/SurfaceEglRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/SurfaceEglRenderer.java @@ -115,7 +115,7 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal public void surfaceDestroyed(SurfaceHolder holder) { ThreadUtils.checkIsOnMainThread(); final CountDownLatch completionLatch = new CountDownLatch(1); - releaseEglSurface(completionLatch::countDown); + releaseEglSurface(completionLatch::countDown, false); ThreadUtils.awaitUninterruptibly(completionLatch); } diff --git a/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java b/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java index a70c1da2c..9329b678e 100644 --- a/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java @@ -8,7 +8,10 @@ import android.os.Looper; import android.view.TextureView; import android.view.View; +import androidx.annotation.NonNull; + import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.voip.VoIPService; import java.util.concurrent.CountDownLatch; @@ -27,14 +30,59 @@ public class TextureViewRenderer extends TextureView private RendererCommon.RendererEvents rendererEvents; // Accessed only on the main thread. - private int rotatedFrameWidth; - private int rotatedFrameHeight; + public int rotatedFrameWidth; + public int rotatedFrameHeight; + + private int videoWidth; + private int videoHeight; private boolean enableFixedSize; private int surfaceWidth; private int surfaceHeight; private boolean isCamera; + private boolean mirror; + private boolean rotateTextureWitchScreen; + private int screenRotation; private OrientationHelper orientationHelper; + private int cameraRotation; + private TextureView backgroundRenderer; + private int maxTextureSize; + + private VideoSink parentSink; + + Runnable updateScreenRunnable; + + public void setBackgroundRenderer(TextureView backgroundRenderer) { + this.backgroundRenderer = backgroundRenderer; + backgroundRenderer.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() { + @Override + public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surfaceTexture, int i, int i1) { + createBackgroundSurface(surfaceTexture); + } + + @Override + public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surfaceTexture, int i, int i1) { + + } + + @Override + public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surfaceTexture) { + ThreadUtils.checkIsOnMainThread(); + eglRenderer.releaseEglSurface(null, true); + return false; + } + + @Override + public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surfaceTexture) { + + } + }); + } + + public void clearFirstFrame() { + eglRenderer.firstFrameRendered = false; + eglRenderer.isFirstFrameRendered = false; + } public static class TextureEglRenderer extends EglRenderer implements TextureView.SurfaceTextureListener { private static final String TAG = "TextureEglRenderer"; @@ -135,7 +183,7 @@ public class TextureViewRenderer extends TextureView public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { ThreadUtils.checkIsOnMainThread(); final CountDownLatch completionLatch = new CountDownLatch(1); - releaseEglSurface(completionLatch::countDown); + releaseEglSurface(completionLatch::countDown, false); ThreadUtils.awaitUninterruptibly(completionLatch); return true; } @@ -162,6 +210,7 @@ public class TextureViewRenderer extends TextureView } rotatedFrameWidth = frame.getRotatedWidth(); rotatedFrameHeight = frame.getRotatedHeight(); + frameRotation = frame.getRotation(); } } @@ -234,13 +283,16 @@ public class TextureViewRenderer extends TextureView * It should be lightweight and must not call removeFrameListener. * @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is * required. - * @param drawer Custom drawer to use for this frame listener. */ public void addFrameListener( EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) { eglRenderer.addFrameListener(listener, scale, drawerParam); } + public void getRenderBufferBitmap(GlGenericDrawer.TextureCallback callback) { + eglRenderer.getTexture(callback); + } + /** * Register a callback to be invoked when a new video frame has been received. This version uses * the drawer of the EglRenderer that was passed in init. @@ -264,11 +316,13 @@ public class TextureViewRenderer extends TextureView orientationHelper = new OrientationHelper() { @Override protected void onOrientationUpdate(int orientation) { - updateRotation(); + if (!isCamera) { + updateRotation(); + } } }; orientationHelper.start(); - } + } } /** @@ -324,7 +378,16 @@ public class TextureViewRenderer extends TextureView * Set if the video stream should be mirrored or not. */ public void setMirror(final boolean mirror) { - eglRenderer.setMirror(mirror); + if (this.mirror != mirror) { + this.mirror = mirror; + if (rotateTextureWitchScreen) { + onRotationChanged(); + } else { + eglRenderer.setMirror(mirror); + } + updateSurfaceSize(); + requestLayout(); + } } /** @@ -371,18 +434,20 @@ public class TextureViewRenderer extends TextureView @Override protected void onMeasure(int widthSpec, int heightSpec) { ThreadUtils.checkIsOnMainThread(); - Point size = videoLayoutMeasure.measure(isCamera, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); - setMeasuredDimension(size.x, size.y); - if (!isCamera) { - updateRotation(); + if (!isCamera && rotateTextureWitchScreen) { + updateVideoSizes(); + } + Point size; + if (maxTextureSize > 0) { + size = videoLayoutMeasure.measure(isCamera, MeasureSpec.makeMeasureSpec(Math.min(maxTextureSize, MeasureSpec.getSize(widthSpec)), MeasureSpec.getMode(widthSpec)), MeasureSpec.makeMeasureSpec(Math.min(maxTextureSize, MeasureSpec.getSize(heightSpec)), MeasureSpec.getMode(heightSpec)), rotatedFrameWidth, rotatedFrameHeight); + } else { + size = videoLayoutMeasure.measure(isCamera, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); + } + setMeasuredDimension(size.x, size.y); + // logD("onMeasure(). New size: " + size.x + "x" + size.y); + if (rotatedFrameWidth != 0 && rotatedFrameHeight != 0) { + eglRenderer.setLayoutAspectRatio(getMeasuredWidth() / (float) getMeasuredHeight()); } - logD("onMeasure(). New size: " + size.x + "x" + size.y); - } - - @Override - protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - ThreadUtils.checkIsOnMainThread(); - eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top)); updateSurfaceSize(); } @@ -435,6 +500,11 @@ public class TextureViewRenderer extends TextureView @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) { + if (parentSink instanceof VoIPService.ProxyVideoSink) { + VoIPService.ProxyVideoSink proxyVideoSink = (VoIPService.ProxyVideoSink) parentSink; + proxyVideoSink.removeTarget(this); + proxyVideoSink.removeBackground(this); + } eglRenderer.onSurfaceTextureDestroyed(surfaceTexture); return true; } @@ -457,6 +527,12 @@ public class TextureViewRenderer extends TextureView */ public void clearImage() { eglRenderer.clearImage(); + eglRenderer.isFirstFrameRendered = false; + } + + @Override + public void setParentSink(VideoSink parent) { + parentSink = parent; } @Override @@ -470,34 +546,148 @@ public class TextureViewRenderer extends TextureView return eglRenderer.isFirstFrameRendered; } + int textureRotation; @Override public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { if (rendererEvents != null) { rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation); } - if (isCamera) { - eglRenderer.setRotation(-OrientationHelper.cameraRotation); + textureRotation = rotation; + int rotatedWidth, rotatedHeight; + + if (rotateTextureWitchScreen) { + if (isCamera) { + onRotationChanged(); + } + if (useCameraRotation) { + rotatedWidth = screenRotation == 0 ? videoHeight : videoWidth; + rotatedHeight = screenRotation == 0 ? videoWidth : videoHeight; + } else { + rotatedWidth = textureRotation == 0 || textureRotation == 180 || textureRotation == -180 ? videoWidth : videoHeight; + rotatedHeight = textureRotation == 0 || textureRotation == 180 || textureRotation == -180 ? videoHeight : videoWidth; + } + } else { + if (isCamera) { + eglRenderer.setRotation(-OrientationHelper.cameraRotation); + } + rotation -= OrientationHelper.cameraOrientation; + rotatedWidth = rotation == 0 || rotation == 180 || rotation == -180 ? videoWidth : videoHeight; + rotatedHeight = rotation == 0 || rotation == 180 || rotation == -180? videoHeight : videoWidth; } - int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight; - int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth; // run immediately if possible for ui thread tests - postOrRun(() -> { - rotatedFrameWidth = rotatedWidth; - rotatedFrameHeight = rotatedHeight; - updateSurfaceSize(); + synchronized (eglRenderer.layoutLock) { + if (updateScreenRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(updateScreenRunnable); + } + postOrRun(updateScreenRunnable = () -> { + updateScreenRunnable = null; + this.videoWidth = videoWidth; + this.videoHeight = videoHeight; + + rotatedFrameWidth = rotatedWidth; + rotatedFrameHeight = rotatedHeight; + + updateSurfaceSize(); + requestLayout(); + }); + } + } + + public void setScreenRotation(int screenRotation) { + this.screenRotation = screenRotation; + onRotationChanged(); + updateVideoSizes(); + } + + private void updateVideoSizes() { + if (videoHeight != 0 && videoWidth != 0) { + int rotatedWidth; + int rotatedHeight; + if (rotateTextureWitchScreen) { + if (useCameraRotation) { + rotatedWidth = screenRotation == 0 ? videoHeight : videoWidth; + rotatedHeight = screenRotation == 0 ? videoWidth : videoHeight; + } else { + rotatedWidth = textureRotation == 0 || textureRotation == 180 || textureRotation == -180 ? videoWidth : videoHeight; + rotatedHeight = textureRotation == 0 || textureRotation == 180 || textureRotation == -180 ? videoHeight : videoWidth; + } + } else { + int rotation = textureRotation; + rotation -= OrientationHelper.cameraOrientation; + rotatedWidth = rotation == 0 || rotation == 180 || rotation == -180 ? videoWidth : videoHeight; + rotatedHeight = rotation == 0 || rotation == 180 || rotation == -180 ? videoHeight : videoWidth; + + } + if (rotatedFrameWidth != rotatedWidth || rotatedFrameHeight != rotatedHeight) { + synchronized (eglRenderer.layoutLock) { + if (updateScreenRunnable != null) { + AndroidUtilities.cancelRunOnUIThread(updateScreenRunnable); + } + postOrRun(updateScreenRunnable = () -> { + updateScreenRunnable = null; + + rotatedFrameWidth = rotatedWidth; + rotatedFrameHeight = rotatedHeight; + + updateSurfaceSize(); + requestLayout(); + }); + } + } + } + } + + public void setRotateTextureWitchScreen(boolean rotateTextureWitchScreen) { + if (this.rotateTextureWitchScreen != rotateTextureWitchScreen) { + this.rotateTextureWitchScreen = rotateTextureWitchScreen; requestLayout(); - }); + } + } + + boolean useCameraRotation; + + public void setUseCameraRotation(boolean useCameraRotation) { + if (this.useCameraRotation != useCameraRotation) { + this.useCameraRotation = useCameraRotation; + onRotationChanged(); + updateVideoSizes(); + } + } + private void onRotationChanged() { + int rotation = useCameraRotation ? OrientationHelper.cameraOrientation : 0; + if (mirror) { + rotation = 360 - rotation; + } + int r = -rotation; + if (useCameraRotation) { + if (screenRotation == 1) { + r += mirror ? 90 : -90; + } else if (screenRotation == 3) { + r += mirror ? 270 : -270; + } + } + + eglRenderer.setRotation(r); + eglRenderer.setMirror(mirror); } private void postOrRun(Runnable r) { if (Thread.currentThread() == Looper.getMainLooper().getThread()) { r.run(); } else { - post(r); + AndroidUtilities.runOnUIThread(r); } } private void logD(String string) { Logging.d(TAG, resourceName + ": " + string); } + + public void createBackgroundSurface(SurfaceTexture bluSurfaceTexturerRenderer) { + eglRenderer.createBackgroundSurface(bluSurfaceTexturerRenderer); + } + + public void setMaxTextureSize(int maxTextureSize) { + this.maxTextureSize = maxTextureSize; + } } diff --git a/TMessagesProj/src/main/java/org/webrtc/VideoCodecMimeType.java b/TMessagesProj/src/main/java/org/webrtc/VideoCodecMimeType.java index 47f4f955c..1f206476a 100644 --- a/TMessagesProj/src/main/java/org/webrtc/VideoCodecMimeType.java +++ b/TMessagesProj/src/main/java/org/webrtc/VideoCodecMimeType.java @@ -15,7 +15,8 @@ enum VideoCodecMimeType { VP8("video/x-vnd.on2.vp8"), VP9("video/x-vnd.on2.vp9"), H264("video/avc"), - H265("video/hevc"); + H265("video/hevc"), + AV1("video/av01"); private final String mimeType; diff --git a/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java b/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java index 1563c88cc..89fe33c3d 100644 --- a/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java +++ b/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java @@ -30,25 +30,31 @@ public class VideoFrameDrawer { * transformationMatrix) */ public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer, - Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY, - int viewportWidth, int viewportHeight) { + Matrix renderMatrix, int rotatedWidth, int rotatedHeight, int frameWidth, int frameHeight, int viewportX, int viewportY, + int viewportWidth, int viewportHeight, boolean blur) { Matrix finalMatrix = new Matrix(buffer.getTransformMatrix()); finalMatrix.preConcat(renderMatrix); float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix); switch (buffer.getType()) { case OES: - drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX, - viewportY, viewportWidth, viewportHeight); + drawer.drawOes(buffer.getTextureId(), buffer.getWidth(), buffer.getHeight(), rotatedWidth, rotatedHeight, finalGlMatrix, frameWidth, frameHeight, viewportX, + viewportY, viewportWidth, viewportHeight, blur); break; case RGB: - drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX, - viewportY, viewportWidth, viewportHeight); + drawer.drawRgb(buffer.getTextureId(), buffer.getWidth(), buffer.getHeight(), rotatedWidth, rotatedHeight, finalGlMatrix, frameWidth, frameHeight, viewportX, + viewportY, viewportWidth, viewportHeight, blur); break; default: throw new RuntimeException("Unknown texture type."); } } + public void getRenderBufferBitmap(RendererCommon.GlDrawer drawer, int rotation, GlGenericDrawer.TextureCallback callback) { + if (drawer instanceof GlGenericDrawer) { + ((GlGenericDrawer) drawer).getRenderBufferBitmap(rotation, callback); + } + } + /** * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies. @@ -160,7 +166,7 @@ public class VideoFrameDrawer { // Multiply with the width and height to get the positions in terms of pixels. for (int i = 0; i < 3; ++i) { - dstPoints[i * 2 + 0] *= frameWidth; + dstPoints[i * 2] *= frameWidth; dstPoints[i * 2 + 1] *= frameHeight; } @@ -174,6 +180,7 @@ public class VideoFrameDrawer { // textures. @Nullable private VideoFrame lastI420Frame; private final Matrix renderMatrix = new Matrix(); + private final Matrix renderRotateMatrix = new Matrix(); public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) { drawFrame(frame, drawer, null /* additionalRenderMatrix */); @@ -182,12 +189,12 @@ public class VideoFrameDrawer { public void drawFrame( VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) { drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */, - frame.getRotatedWidth(), frame.getRotatedHeight(), false); + frame.getRotatedWidth(), frame.getRotatedHeight(), false, false); } public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer, @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth, - int viewportHeight, boolean rotate) { + int viewportHeight, boolean rotate, boolean blur) { final int width = rotate ? frame.getRotatedHeight() : frame.getRotatedWidth(); final int height = rotate ? frame.getRotatedWidth() : frame.getRotatedHeight(); calculateTransformedRenderSize(width, height, additionalRenderMatrix); @@ -204,14 +211,15 @@ public class VideoFrameDrawer { } renderMatrix.preRotate(frame.getRotation()); renderMatrix.preTranslate(-0.5f, -0.5f); + renderRotateMatrix.set(renderMatrix); if (additionalRenderMatrix != null) { renderMatrix.preConcat(additionalRenderMatrix); } if (isTextureFrame) { lastI420Frame = null; - drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth, - renderHeight, viewportX, viewportY, viewportWidth, viewportHeight); + drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, frame.getRotatedWidth(), frame.getRotatedHeight(), renderWidth, + renderHeight, viewportX, viewportY, viewportWidth, viewportHeight, blur); } else { // Only upload the I420 data to textures once per frame, if we are called multiple times // with the same frame. @@ -222,9 +230,9 @@ public class VideoFrameDrawer { i420Buffer.release(); } - drawer.drawYuv(yuvUploader.getYuvTextures(), + drawer.drawYuv(yuvUploader.getYuvTextures(), frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotatedWidth(), frame.getRotatedHeight(), RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth, - renderHeight, viewportX, viewportY, viewportWidth, viewportHeight); + renderHeight, viewportX, viewportY, viewportWidth, viewportHeight, blur); } } diff --git a/TMessagesProj/src/main/java/org/webrtc/VideoSink.java b/TMessagesProj/src/main/java/org/webrtc/VideoSink.java index 5a0a6c719..cff0d83dd 100644 --- a/TMessagesProj/src/main/java/org/webrtc/VideoSink.java +++ b/TMessagesProj/src/main/java/org/webrtc/VideoSink.java @@ -20,4 +20,8 @@ public interface VideoSink { * when the reference is no longer needed. */ @CalledByNative void onFrame(VideoFrame frame); + + default void setParentSink(VideoSink parent) { + + } } diff --git a/TMessagesProj/src/main/java/org/webrtc/YuvConverter.java b/TMessagesProj/src/main/java/org/webrtc/YuvConverter.java index 0e2d5055f..cb33d2f53 100644 --- a/TMessagesProj/src/main/java/org/webrtc/YuvConverter.java +++ b/TMessagesProj/src/main/java/org/webrtc/YuvConverter.java @@ -13,6 +13,8 @@ package org.webrtc; import android.graphics.Matrix; import android.opengl.GLES20; import java.nio.ByteBuffer; + +import org.telegram.messenger.FileLog; import org.webrtc.VideoFrame.I420Buffer; import org.webrtc.VideoFrame.TextureBuffer; @@ -173,37 +175,41 @@ public class YuvConverter { renderMatrix.preScale(1f, -1f); renderMatrix.preTranslate(-0.5f, -0.5f); - i420TextureFrameBuffer.setSize(viewportWidth, totalHeight); + try { + i420TextureFrameBuffer.setSize(viewportWidth, totalHeight); - // Bind our framebuffer. - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, i420TextureFrameBuffer.getFrameBufferId()); - GlUtil.checkNoGLES2Error("glBindFramebuffer"); + // Bind our framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, i420TextureFrameBuffer.getFrameBufferId()); + GlUtil.checkNoGLES2Error("glBindFramebuffer"); - // Draw Y. - shaderCallbacks.setPlaneY(); - VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, - /* viewportX= */ 0, /* viewportY= */ 0, viewportWidth, - /* viewportHeight= */ frameHeight); + // Draw Y. + shaderCallbacks.setPlaneY(); + VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, frameWidth, frameHeight, + /* viewportX= */ 0, /* viewportY= */ 0, viewportWidth, + /* viewportHeight= */ frameHeight, false); - // Draw U. - shaderCallbacks.setPlaneU(); - VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, - /* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2, - /* viewportHeight= */ uvHeight); + // Draw U. + shaderCallbacks.setPlaneU(); + VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, frameWidth, frameHeight, + /* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2, + /* viewportHeight= */ uvHeight, false); - // Draw V. - shaderCallbacks.setPlaneV(); - VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, - /* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2, - /* viewportHeight= */ uvHeight); + // Draw V. + shaderCallbacks.setPlaneV(); + VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight, frameWidth, frameHeight, + /* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2, + /* viewportHeight= */ uvHeight, false); - GLES20.glReadPixels(0, 0, i420TextureFrameBuffer.getWidth(), i420TextureFrameBuffer.getHeight(), - GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, i420ByteBuffer); + GLES20.glReadPixels(0, 0, i420TextureFrameBuffer.getWidth(), i420TextureFrameBuffer.getHeight(), + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, i420ByteBuffer); - GlUtil.checkNoGLES2Error("YuvConverter.convert"); + GlUtil.checkNoGLES2Error("YuvConverter.convert"); - // Restore normal framebuffer. - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + // Restore normal framebuffer. + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + } catch (Exception e) { + FileLog.e(e); + } // Prepare Y, U, and V ByteBuffer slices. final int yPos = 0; diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java b/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java index 08edde3f6..ae5858a70 100644 --- a/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/TMessagesProj/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java @@ -49,12 +49,14 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { private boolean useStereoInput; private boolean useStereoOutput; private AudioAttributes audioAttributes; + private boolean useLowLatency; private Builder(Context context) { this.context = context; this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); + this.useLowLatency = false; } public Builder setScheduler(ScheduledExecutorService scheduler) { @@ -195,6 +197,14 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { return this; } + /** + * Control if the low-latency mode should be used. The default is disabled. + */ + public Builder setUseLowLatency(boolean useLowLatency) { + this.useLowLatency = useLowLatency; + return this; + } + /** * Set custom {@link AudioAttributes} to use. */ @@ -225,6 +235,12 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { } Logging.d(TAG, "HW AEC will not be used."); } + // Low-latency mode was introduced in API version 26, see + // https://developer.android.com/reference/android/media/AudioTrack#PERFORMANCE_MODE_LOW_LATENCY + final int MIN_LOW_LATENCY_SDK_VERSION = 26; + if (useLowLatency && Build.VERSION.SDK_INT >= MIN_LOW_LATENCY_SDK_VERSION) { + Logging.d(TAG, "Low latency mode will be used."); + } ScheduledExecutorService executor = this.scheduler; if (executor == null) { executor = WebRtcAudioRecord.newDefaultScheduler(); @@ -232,8 +248,8 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); - final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack( - context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback); + final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, + audioAttributes, audioTrackErrorCallback, audioTrackStateCallback, useLowLatency); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/LowLatencyAudioBufferManager.java b/TMessagesProj/src/main/java/org/webrtc/audio/LowLatencyAudioBufferManager.java new file mode 100644 index 000000000..70c625ab4 --- /dev/null +++ b/TMessagesProj/src/main/java/org/webrtc/audio/LowLatencyAudioBufferManager.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.audio; + +import android.media.AudioTrack; +import android.os.Build; +import org.webrtc.Logging; + +// Lowers the buffer size if no underruns are detected for 100 ms. Once an +// underrun is detected, the buffer size is increased by 10 ms and it will not +// be lowered further. The buffer size will never be increased more than +// 5 times, to avoid the possibility of the buffer size increasing without +// bounds. +class LowLatencyAudioBufferManager { + private static final String TAG = "LowLatencyAudioBufferManager"; + // The underrun count that was valid during the previous call to maybeAdjustBufferSize(). Used to + // detect increases in the value. + private int prevUnderrunCount; + // The number of ticks to wait without an underrun before decreasing the buffer size. + private int ticksUntilNextDecrease; + // Indicate if we should continue to decrease the buffer size. + private boolean keepLoweringBufferSize; + // How often the buffer size was increased. + private int bufferIncreaseCounter; + + public LowLatencyAudioBufferManager() { + this.prevUnderrunCount = 0; + this.ticksUntilNextDecrease = 10; + this.keepLoweringBufferSize = true; + this.bufferIncreaseCounter = 0; + } + + public void maybeAdjustBufferSize(AudioTrack audioTrack) { + if (Build.VERSION.SDK_INT >= 26) { + final int underrunCount = audioTrack.getUnderrunCount(); + if (underrunCount > prevUnderrunCount) { + // Don't increase buffer more than 5 times. Continuing to increase the buffer size + // could be harmful on low-power devices that regularly experience underruns under + // normal conditions. + if (bufferIncreaseCounter < 5) { + // Underrun detected, increase buffer size by 10ms. + final int currentBufferSize = audioTrack.getBufferSizeInFrames(); + final int newBufferSize = currentBufferSize + audioTrack.getPlaybackRate() / 100; + Logging.d(TAG, + "Underrun detected! Increasing AudioTrack buffer size from " + currentBufferSize + + " to " + newBufferSize); + audioTrack.setBufferSizeInFrames(newBufferSize); + bufferIncreaseCounter++; + } + // Stop trying to lower the buffer size. + keepLoweringBufferSize = false; + prevUnderrunCount = underrunCount; + ticksUntilNextDecrease = 10; + } else if (keepLoweringBufferSize) { + ticksUntilNextDecrease--; + if (ticksUntilNextDecrease <= 0) { + // No underrun seen for 100 ms, try to lower the buffer size by 10ms. + final int bufferSize10ms = audioTrack.getPlaybackRate() / 100; + // Never go below a buffer size of 10ms. + final int currentBufferSize = audioTrack.getBufferSizeInFrames(); + final int newBufferSize = Math.max(bufferSize10ms, currentBufferSize - bufferSize10ms); + if (newBufferSize != currentBufferSize) { + Logging.d(TAG, + "Lowering AudioTrack buffer size from " + currentBufferSize + " to " + + newBufferSize); + audioTrack.setBufferSizeInFrames(newBufferSize); + } + ticksUntilNextDecrease = 10; + } + } + } + } +} diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java index 196346fb6..fc1bb1108 100644 --- a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -19,7 +19,6 @@ import android.media.AudioTrack; import android.os.Build; import android.os.Process; import androidx.annotation.Nullable; -import java.lang.Thread; import java.nio.ByteBuffer; import org.webrtc.CalledByNative; import org.webrtc.Logging; @@ -27,6 +26,7 @@ import org.webrtc.ThreadUtils; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.LowLatencyAudioBufferManager; class WebRtcAudioTrack { private static final String TAG = "WebRtcAudioTrackExternal"; @@ -80,6 +80,8 @@ class WebRtcAudioTrack { // Can be used to ensure that the speaker is fully muted. private volatile boolean speakerMute; private byte[] emptyBytes; + private boolean useLowLatency; + private int initialBufferSizeInFrames; private final @Nullable AudioTrackErrorCallback errorCallback; private final @Nullable AudioTrackStateCallback stateCallback; @@ -92,9 +94,11 @@ class WebRtcAudioTrack { */ private class AudioTrackThread extends Thread { private volatile boolean keepAlive = true; + private LowLatencyAudioBufferManager bufferManager; public AudioTrackThread(String name) { super(name); + bufferManager = new LowLatencyAudioBufferManager(); } @Override @@ -134,6 +138,9 @@ class WebRtcAudioTrack { reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); } } + if (useLowLatency) { + bufferManager.maybeAdjustBufferSize(audioTrack); + } // The byte buffer must be rewinded since byteBuffer.position() is // increased at each call to AudioTrack.write(). If we don't do this, // next call to AudioTrack.write() will fail. @@ -164,12 +171,12 @@ class WebRtcAudioTrack { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, - null /* stateCallback */); + null /* stateCallback */, false /* useLowLatency */); } WebRtcAudioTrack(Context context, AudioManager audioManager, @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, - @Nullable AudioTrackStateCallback stateCallback) { + @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; @@ -177,6 +184,7 @@ class WebRtcAudioTrack { this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = new VolumeLogger(audioManager); + this.useLowLatency = useLowLatency; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } @@ -218,6 +226,13 @@ class WebRtcAudioTrack { return -1; } + // Don't use low-latency mode when a bufferSizeFactor > 1 is used. When bufferSizeFactor > 1 + // we want to use a larger buffer to prevent underruns. However, low-latency mode would + // decrease the buffer size, which makes the bufferSizeFactor have no effect. + if (bufferSizeFactor > 1.0) { + useLowLatency = false; + } + // Ensure that prevision audio session was stopped correctly before trying // to create a new AudioTrack. if (audioTrack != null) { @@ -228,7 +243,11 @@ class WebRtcAudioTrack { // Create an AudioTrack object and initialize its associated audio buffer. // The size of this buffer determines how long an AudioTrack can play // before running out of data. - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + // On API level 26 or higher, we can use a low latency mode. + audioTrack = createAudioTrackOnOreoOrHigher( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { // If we are on API level 21 or higher, it is possible to use a special AudioTrack // constructor that uses AudioAttributes and AudioFormat as input. It allows us to // supersede the notion of stream types for defining the behavior of audio playback, @@ -255,6 +274,11 @@ class WebRtcAudioTrack { releaseAudioResources(); return -1; } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + initialBufferSizeInFrames = audioTrack.getBufferSizeInFrames(); + } else { + initialBufferSizeInFrames = -1; + } logMainParameters(); logMainParametersExtended(); return minBufferSizeInBytes; @@ -382,22 +406,16 @@ class WebRtcAudioTrack { + "max gain: " + AudioTrack.getMaxVolume()); } - // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. - // It allows certain platforms or routing policies to use this information for more - // refined volume or routing decisions. - @TargetApi(Build.VERSION_CODES.LOLLIPOP) - private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz, - int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { - Logging.d(TAG, "createAudioTrackOnLollipopOrHigher"); - // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control - // performance when Android O is supported. Add some logging in the mean time. + private static void logNativeOutputSampleRate(int requestedSampleRateInHz) { final int nativeOutputSampleRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL); Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate); - if (sampleRateInHz != nativeOutputSampleRate) { + if (requestedSampleRateInHz != nativeOutputSampleRate) { Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); } + } + private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) { AudioAttributes.Builder attributesBuilder = new AudioAttributes.Builder() .setUsage(DEFAULT_USAGE) @@ -411,12 +429,26 @@ class WebRtcAudioTrack { attributesBuilder.setContentType(overrideAttributes.getContentType()); } - attributesBuilder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()) - .setFlags(overrideAttributes.getFlags()); + attributesBuilder.setFlags(overrideAttributes.getFlags()); + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + attributesBuilder = applyAttributesOnQOrHigher(attributesBuilder, overrideAttributes); + } } + return attributesBuilder.build(); + } + + // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. + // It allows certain platforms or routing policies to use this information for more + // refined volume or routing decisions. + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz, + int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + Logging.d(TAG, "createAudioTrackOnLollipopOrHigher"); + logNativeOutputSampleRate(sampleRateInHz); // Create an audio track where the audio usage is for VoIP and the content type is speech. - return new AudioTrack(attributesBuilder.build(), + return new AudioTrack(getAudioAttributes(overrideAttributes), new AudioFormat.Builder() .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(sampleRateInHz) @@ -425,6 +457,38 @@ class WebRtcAudioTrack { bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE); } + // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. + // Use the low-latency mode to improve audio latency. Note that the low-latency mode may + // prevent effects (such as AEC) from working. Assuming AEC is working, the delay changes + // that happen in low-latency mode during the call will cause the AEC to perform worse. + // The behavior of the low-latency mode may be device dependent, use at your own risk. + @TargetApi(Build.VERSION_CODES.O) + private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig, + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + Logging.d(TAG, "createAudioTrackOnOreoOrHigher"); + logNativeOutputSampleRate(sampleRateInHz); + + // Create an audio track where the audio usage is for VoIP and the content type is speech. + return new AudioTrack.Builder() + .setAudioAttributes(getAudioAttributes(overrideAttributes)) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY) + .setTransferMode(AudioTrack.MODE_STREAM) + .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE) + .build(); + } + + @TargetApi(Build.VERSION_CODES.Q) + private static AudioAttributes.Builder applyAttributesOnQOrHigher( + AudioAttributes.Builder builder, AudioAttributes overrideAttributes) { + return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()); + } + @SuppressWarnings("deprecation") // Deprecated in API level 25. private static AudioTrack createAudioTrackOnLowerThanLollipop( int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { @@ -449,6 +513,11 @@ class WebRtcAudioTrack { return -1; } + @CalledByNative + private int getInitialBufferSizeInFrames() { + return initialBufferSizeInFrames; + } + private void logBufferCapacityInFrames() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { Logging.d(TAG, diff --git a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java index 60d087511..dfe8dc849 100644 --- a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -153,6 +153,7 @@ public class WebRtcAudioTrack { try { nativeGetPlayoutData(sizeInBytes, nativeAudioTrack); } catch (Throwable e) { + keepAlive = false; continue; } // Write data until all data has been written to the audio sink. diff --git a/TMessagesProj/src/main/res/drawable-hdpi/background_hd.jpg b/TMessagesProj/src/main/res/drawable-hdpi/background_hd.jpg deleted file mode 100644 index b21f280ba..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/background_hd.jpg and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/bg_rotate_large.png b/TMessagesProj/src/main/res/drawable-hdpi/bg_rotate_large.png new file mode 100644 index 000000000..15833be6d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/bg_rotate_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_bl.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_in_bl.png deleted file mode 100755 index 3216380b1..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_br.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_in_br.png deleted file mode 100755 index c45255775..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_tl.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_in_tl.png deleted file mode 100755 index d51b8bd28..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_tr.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_in_tr.png deleted file mode 100755 index ea708a3ea..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_in_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_bl.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_out_bl.png deleted file mode 100755 index d3c36763f..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_br.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_out_br.png deleted file mode 100755 index a314e7886..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_tl.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_out_tl.png deleted file mode 100755 index 0803c64a8..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_tr.png b/TMessagesProj/src/main/res/drawable-hdpi/corner_out_tr.png deleted file mode 100755 index 5c21ae517..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/corner_out_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_minimize.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_minimize.png new file mode 100644 index 000000000..144563a28 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_minimize.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_pin.png new file mode 100644 index 000000000..324b74ba9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_pin.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_unpin.png new file mode 100644 index 000000000..40e27cf9d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_calls_unpin.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_noise_off.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_noise_off.png new file mode 100644 index 000000000..1732af939 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_noise_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_noise_on.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_noise_on.png new file mode 100644 index 000000000..827c22e48 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_noise_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_pin_filled.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_pin_filled.png new file mode 100644 index 000000000..5f02413c5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_pin_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_screencast.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_screencast.png new file mode 100644 index 000000000..be4b41d9d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_screencast_off.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_screencast_off.png new file mode 100644 index 000000000..e4e497e79 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_screencast_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_sendfile.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_sendfile.png new file mode 100644 index 000000000..d04c56d8c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_sendfile.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_share_filled.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_share_filled.png index 074f6e208..5e70648b7 100644 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/msg_share_filled.png and b/TMessagesProj/src/main/res/drawable-hdpi/msg_share_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_bluetooth.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_bluetooth.png new file mode 100644 index 000000000..38107aa68 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_bluetooth.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_headphones.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_headphones.png new file mode 100644 index 000000000..b36c5e075 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_headphones.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_phone.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_phone.png new file mode 100644 index 000000000..8e900055c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_phone.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_speaker.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_speaker.png new file mode 100644 index 000000000..14c3ff992 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_voice_speaker.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/screencast_big.png b/TMessagesProj/src/main/res/drawable-hdpi/screencast_big.png new file mode 100644 index 000000000..b8bdd80a2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/screencast_big.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/system_loader.png b/TMessagesProj/src/main/res/drawable-hdpi/system_loader.png deleted file mode 100755 index e22e72c70..000000000 Binary files a/TMessagesProj/src/main/res/drawable-hdpi/system_loader.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/themes_addcolor.png b/TMessagesProj/src/main/res/drawable-hdpi/themes_addcolor.png new file mode 100644 index 000000000..7ec146e29 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/themes_addcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/themes_deletecolor.png b/TMessagesProj/src/main/res/drawable-hdpi/themes_deletecolor.png new file mode 100644 index 000000000..99e41157b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/themes_deletecolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/themes_swapcolor.png b/TMessagesProj/src/main/res/drawable-hdpi/themes_swapcolor.png new file mode 100644 index 000000000..d6320af0e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/themes_swapcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/voicechat_screencast.png b/TMessagesProj/src/main/res/drawable-hdpi/voicechat_screencast.png new file mode 100644 index 000000000..520cd426f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/voicechat_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/background_hd.jpg b/TMessagesProj/src/main/res/drawable-mdpi/background_hd.jpg deleted file mode 100644 index dd7ca0176..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/background_hd.jpg and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/bg_rotate_large.png b/TMessagesProj/src/main/res/drawable-mdpi/bg_rotate_large.png new file mode 100644 index 000000000..740b8e677 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/bg_rotate_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_bl.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_in_bl.png deleted file mode 100755 index 6dd5135e4..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_br.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_in_br.png deleted file mode 100755 index 46f6c1f7d..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_tl.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_in_tl.png deleted file mode 100755 index 2de60d50f..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_tr.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_in_tr.png deleted file mode 100755 index 6976e6c42..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_in_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_bl.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_out_bl.png deleted file mode 100755 index 66ee2a578..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_br.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_out_br.png deleted file mode 100755 index 081baba38..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_tl.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_out_tl.png deleted file mode 100755 index a55c0f20f..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_tr.png b/TMessagesProj/src/main/res/drawable-mdpi/corner_out_tr.png deleted file mode 100755 index 8720eeb3b..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/corner_out_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_minimize.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_minimize.png new file mode 100644 index 000000000..31b6960b9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_minimize.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_pin.png new file mode 100644 index 000000000..6afc609fb Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_pin.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_unpin.png new file mode 100644 index 000000000..b1c5f67ba Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_calls_unpin.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_noise_off.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_noise_off.png new file mode 100644 index 000000000..ba8373447 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_noise_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_noise_on.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_noise_on.png new file mode 100644 index 000000000..d40136697 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_noise_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_pin_filled.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_pin_filled.png new file mode 100644 index 000000000..c6f98f7be Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_pin_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_screencast.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_screencast.png new file mode 100644 index 000000000..acf7039f6 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_screencast_off.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_screencast_off.png new file mode 100644 index 000000000..acfad0d34 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_screencast_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_sendfile.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_sendfile.png new file mode 100644 index 000000000..60d059be8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_sendfile.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_share_filled.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_share_filled.png index fbaf8442b..61b009aea 100644 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/msg_share_filled.png and b/TMessagesProj/src/main/res/drawable-mdpi/msg_share_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_bluetooth.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_bluetooth.png new file mode 100644 index 000000000..137ac98f8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_bluetooth.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_headphones.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_headphones.png new file mode 100644 index 000000000..00d51d260 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_headphones.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_phone.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_phone.png new file mode 100644 index 000000000..9433e1845 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_phone.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_speaker.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_speaker.png new file mode 100644 index 000000000..95277fe3d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_voice_speaker.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/screencast_big.png b/TMessagesProj/src/main/res/drawable-mdpi/screencast_big.png new file mode 100644 index 000000000..a84d92028 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/screencast_big.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/system_loader.png b/TMessagesProj/src/main/res/drawable-mdpi/system_loader.png deleted file mode 100755 index c02f0fcc0..000000000 Binary files a/TMessagesProj/src/main/res/drawable-mdpi/system_loader.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/themes_addcolor.png b/TMessagesProj/src/main/res/drawable-mdpi/themes_addcolor.png new file mode 100644 index 000000000..37e045e1f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/themes_addcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/themes_deletecolor.png b/TMessagesProj/src/main/res/drawable-mdpi/themes_deletecolor.png new file mode 100644 index 000000000..147bba895 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/themes_deletecolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/themes_swapcolor.png b/TMessagesProj/src/main/res/drawable-mdpi/themes_swapcolor.png new file mode 100644 index 000000000..cdf3380d5 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/themes_swapcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/voicechat_screencast.png b/TMessagesProj/src/main/res/drawable-mdpi/voicechat_screencast.png new file mode 100644 index 000000000..02d2b7234 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/voicechat_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/background_hd.jpg b/TMessagesProj/src/main/res/drawable-xhdpi/background_hd.jpg deleted file mode 100644 index 66c14324f..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/background_hd.jpg and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/bg_rotate_large.png b/TMessagesProj/src/main/res/drawable-xhdpi/bg_rotate_large.png new file mode 100644 index 000000000..eb7f1890a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/bg_rotate_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_bl.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_bl.png deleted file mode 100755 index d43d37e08..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_br.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_br.png deleted file mode 100755 index 1820d84b4..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_tl.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_tl.png deleted file mode 100755 index f5434afe5..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_tr.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_tr.png deleted file mode 100755 index dfd156b2a..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_in_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_bl.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_bl.png deleted file mode 100755 index d839cf8c8..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_br.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_br.png deleted file mode 100755 index ccc4b29db..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_tl.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_tl.png deleted file mode 100755 index 46e5690c0..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_tr.png b/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_tr.png deleted file mode 100755 index 1eb986546..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/corner_out_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_minimize.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_minimize.png new file mode 100644 index 000000000..a5e3e8b5b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_minimize.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_pin.png new file mode 100644 index 000000000..e79778c78 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_pin.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_unpin.png new file mode 100644 index 000000000..0217717b9 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_calls_unpin.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_noise_off.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_noise_off.png new file mode 100644 index 000000000..112c073d8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_noise_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_noise_on.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_noise_on.png new file mode 100644 index 000000000..adef0e890 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_noise_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_pin_filled.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_pin_filled.png new file mode 100644 index 000000000..603efc89a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_pin_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_screencast.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_screencast.png new file mode 100644 index 000000000..d1cfbb249 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_screencast_off.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_screencast_off.png new file mode 100644 index 000000000..137e6c217 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_screencast_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_sendfile.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_sendfile.png new file mode 100644 index 000000000..15937deba Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_sendfile.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_share_filled.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_share_filled.png index a6cf62444..a7347fd35 100644 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/msg_share_filled.png and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_share_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_bluetooth.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_bluetooth.png new file mode 100644 index 000000000..008c73a8e Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_bluetooth.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_headphones.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_headphones.png new file mode 100644 index 000000000..b648fee77 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_headphones.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_phone.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_phone.png new file mode 100644 index 000000000..cb7bbe9a1 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_phone.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_speaker.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_speaker.png new file mode 100644 index 000000000..64da766a8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_voice_speaker.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/screencast_big.png b/TMessagesProj/src/main/res/drawable-xhdpi/screencast_big.png new file mode 100644 index 000000000..b9c3b78aa Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/screencast_big.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/system_loader.png b/TMessagesProj/src/main/res/drawable-xhdpi/system_loader.png deleted file mode 100755 index 71485eb94..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xhdpi/system_loader.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/themes_addcolor.png b/TMessagesProj/src/main/res/drawable-xhdpi/themes_addcolor.png new file mode 100644 index 000000000..ed562a18b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/themes_addcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/themes_deletecolor.png b/TMessagesProj/src/main/res/drawable-xhdpi/themes_deletecolor.png new file mode 100644 index 000000000..d7578160c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/themes_deletecolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/themes_swapcolor.png b/TMessagesProj/src/main/res/drawable-xhdpi/themes_swapcolor.png new file mode 100644 index 000000000..89dc0178a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/themes_swapcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_screencast.png b/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_screencast.png new file mode 100644 index 000000000..dccd75644 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/voicechat_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/background_hd.jpg b/TMessagesProj/src/main/res/drawable-xxhdpi/background_hd.jpg deleted file mode 100644 index 022fb0010..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/background_hd.jpg and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/bg_rotate_large.png b/TMessagesProj/src/main/res/drawable-xxhdpi/bg_rotate_large.png new file mode 100644 index 000000000..c252100ce Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/bg_rotate_large.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_bl.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_bl.png deleted file mode 100755 index ff9cce560..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_br.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_br.png deleted file mode 100755 index 0ea1e3263..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_tl.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_tl.png deleted file mode 100755 index 5aabc50f5..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_tr.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_tr.png deleted file mode 100755 index 87bbb3b08..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_in_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_bl.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_bl.png deleted file mode 100755 index 7e9f321e8..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_bl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_br.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_br.png deleted file mode 100755 index 3706b2b6d..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_br.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_tl.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_tl.png deleted file mode 100755 index f8044c90b..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_tl.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_tr.png b/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_tr.png deleted file mode 100755 index 76a6a50e1..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/corner_out_tr.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_minimize.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_minimize.png new file mode 100644 index 000000000..866a0f854 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_minimize.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_pin.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_pin.png new file mode 100644 index 000000000..9bfc2da50 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_pin.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_unpin.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_unpin.png new file mode 100644 index 000000000..bbed5a697 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_calls_unpin.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_noise_off.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_noise_off.png new file mode 100644 index 000000000..fa67d6680 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_noise_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_noise_on.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_noise_on.png new file mode 100644 index 000000000..cdcf732f8 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_noise_on.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_pin_filled.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_pin_filled.png new file mode 100644 index 000000000..c45b60b2d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_pin_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_screencast.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_screencast.png new file mode 100644 index 000000000..87a776d30 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_screencast.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_screencast_off.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_screencast_off.png new file mode 100644 index 000000000..a1ba164ab Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_screencast_off.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_sendfile.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_sendfile.png new file mode 100644 index 000000000..47146a079 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_sendfile.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_share_filled.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_share_filled.png index 7cf43a91f..c18a5d02b 100644 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_share_filled.png and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_share_filled.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_bluetooth.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_bluetooth.png new file mode 100644 index 000000000..71534ec01 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_bluetooth.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_headphones.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_headphones.png new file mode 100644 index 000000000..f1c2f2c78 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_headphones.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_phone.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_phone.png new file mode 100644 index 000000000..39c535e03 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_phone.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_speaker.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_speaker.png new file mode 100644 index 000000000..ec2ad3e8a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_voice_speaker.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/screencast_big.png b/TMessagesProj/src/main/res/drawable-xxhdpi/screencast_big.png new file mode 100644 index 000000000..7510cc20a Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/screencast_big.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/system_loader.png b/TMessagesProj/src/main/res/drawable-xxhdpi/system_loader.png deleted file mode 100755 index fa7341741..000000000 Binary files a/TMessagesProj/src/main/res/drawable-xxhdpi/system_loader.png and /dev/null differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/themes_addcolor.png b/TMessagesProj/src/main/res/drawable-xxhdpi/themes_addcolor.png new file mode 100644 index 000000000..3df55c64d Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/themes_addcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/themes_deletecolor.png b/TMessagesProj/src/main/res/drawable-xxhdpi/themes_deletecolor.png new file mode 100644 index 000000000..517fb792f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/themes_deletecolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/themes_swapcolor.png b/TMessagesProj/src/main/res/drawable-xxhdpi/themes_swapcolor.png new file mode 100644 index 000000000..5eb667ef3 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/themes_swapcolor.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_screencast.png b/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_screencast.png new file mode 100644 index 000000000..16373ebf2 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/voicechat_screencast.png differ diff --git a/TMessagesProj/src/main/res/raw/camera_flip.json b/TMessagesProj/src/main/res/raw/camera_flip.json new file mode 100644 index 000000000..385b073e3 --- /dev/null +++ b/TMessagesProj/src/main/res/raw/camera_flip.json @@ -0,0 +1 @@ +{"v":"5.5.7","meta":{"g":"LottieFiles AE 0.1.20","a":"","k":"","d":"","tc":""},"fr":60,"ip":0,"op":40,"w":32,"h":32,"nm":"Comp 1","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Shape","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.313],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":0,"s":[0]},{"i":{"x":[0],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":18,"s":[180]},{"i":{"x":[0.295],"y":[1]},"o":{"x":[0.233],"y":[0]},"t":21,"s":[180]},{"t":39,"s":[360]}],"ix":10},"p":{"a":0,"k":[15.991,15.997,0],"ix":2},"a":{"a":0,"k":[0,0,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":0,"s":[33.333,33.333,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":11,"s":[32,32,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":18,"s":[33.333,33.333,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.167,0.167,0.167],"y":[0,0,0]},"t":21,"s":[33.333,33.333,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":32,"s":[32,32,100]},{"t":39,"s":[33.333,33.333,100]}],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-0.91,-0.75],[-0.09,-0.12],[0,0],[0.46,-0.38],[0.25,0],[0,0],[-11.5,0],[-4.74,6.85],[-1.49,-1.03],[1.03,-1.48],[10.66,0],[3.36,14.12],[0,0],[0,0.59],[-0.16,0.19],[0,0]],"o":[[0.11,0.09],[0,0],[0.37,0.46],[-0.19,0.15],[0,0],[3.2,10.46],[8.49,0],[1.03,-1.48],[1.49,1.03],[-5.96,8.6],[-15.15,0],[0,0],[-0.59,0],[0,-0.25],[0,0],[0.74,-0.92]],"v":[[-26.403,-5.744],[-26.093,-5.434],[-17.003,5.726],[-17.163,7.236],[-17.833,7.466],[-24.453,7.476],[-0.023,25.546],[20.987,14.526],[25.547,13.706],[26.377,18.256],[-0.023,32.086],[-31.233,7.476],[-37.663,7.466],[-38.733,6.406],[-38.493,5.726],[-29.403,-5.434]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ind":1,"ty":"sh","ix":2,"ks":{"a":0,"k":{"i":[[-10.94,0],[-3.36,-14.12],[0,0],[-0.2,-0.16],[0.29,-0.45],[0,0],[0,0],[0.12,-0.1],[0.76,0.8],[0,0],[0,0],[0,0.24],[-0.53,0.05],[0,0],[0,0],[11.5,0],[4.7,-7.14],[1.51,0.99],[-0.99,1.51]],"o":[[15.15,0],[0,0],[0.25,0],[0.42,0.34],[0,0],[0,0],[-0.09,0.11],[-0.87,0.7],[0,0],[0,0],[-0.15,-0.19],[0,-0.55],[0,0],[0,0],[-3.2,-10.46],[-8.71,0],[-0.99,1.51],[-1.52,-0.99],[5.89,-8.97]],"v":[[-0.016,-32.09],[31.194,-7.47],[37.664,-7.47],[38.344,-7.23],[38.564,-5.83],[38.494,-5.73],[29.404,5.44],[29.094,5.75],[26.204,5.56],[26.094,5.44],[17.004,-5.73],[16.764,-6.4],[17.714,-7.46],[17.834,-7.47],[24.414,-7.47],[-0.016,-25.54],[-21.376,-14.02],[-25.906,-13.08],[-26.846,-17.61]],"c":true},"ix":2},"nm":"Path 2","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"mm","mm":1,"nm":"Merge Paths 1","mn":"ADBE Vector Filter - Merge","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Shape","np":4,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":40,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/default_pattern.tgv b/TMessagesProj/src/main/res/raw/default_pattern.tgv new file mode 100644 index 000000000..05984b97a --- /dev/null +++ b/TMessagesProj/src/main/res/raw/default_pattern.tgv @@ -0,0 +1,4649 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/TMessagesProj/src/main/res/raw/utyan_call.tgs b/TMessagesProj/src/main/res/raw/utyan_call.tgs new file mode 100644 index 000000000..a50e28dad --- /dev/null +++ b/TMessagesProj/src/main/res/raw/utyan_call.tgs @@ -0,0 +1 @@ +{"tgs":1,"v":"5.5.2.2","fr":60,"ip":0,"op":180,"w":512,"h":512,"nm":"_025_PHONE_OUT","ddd":0,"assets":[{"id":"comp_0","layers":[{"ddd":0,"ind":1,"ty":4,"nm":"wing_bl2","parent":3,"sr":1,"ks":{"p":{"a":0,"k":[-1.234,34.254,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[-12.775,-5.53],[-0.651,13.123]],"o":[[12.775,5.53],[0.959,-19.334]],"v":[[-13.32,12.82],[10.279,-13.774]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.67,"y":0},"t":80,"s":[{"i":[[-12.775,-5.53],[-2.465,19.2]],"o":[[12.775,5.53],[2.465,-19.2]],"v":[[-15.016,8.182],[15.016,-10.324]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-12.775,-5.53],[-2.465,19.2]],"o":[[12.775,5.53],[2.465,-19.2]],"v":[[-15.016,8.182],[15.016,-10.324]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-12.775,-5.53],[-2.465,19.2]],"o":[[12.775,5.53],[2.465,-19.2]],"v":[[-14.016,10.182],[14.016,-11.824]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-12.775,-5.53],[-2.465,19.2]],"o":[[12.775,5.53],[2.465,-19.2]],"v":[[-14.016,10.182],[14.016,-11.824]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-12.775,-5.53],[-0.651,13.123]],"o":[[12.775,5.53],[0.959,-19.334]],"v":[[-13.32,12.82],[10.279,-13.774]],"c":false}]},{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-12.775,-5.53],[-0.651,13.123]],"o":[[12.775,5.53],[0.959,-19.334]],"v":[[-13.32,12.82],[10.279,-13.774]],"c":false}]},{"t":260,"s":[{"i":[[-12.775,-5.53],[-2.465,19.2]],"o":[[12.775,5.53],[2.465,-19.2]],"v":[[-15.016,8.182],[15.016,-10.324]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.988235294819,0.933333337307,0.129411771894,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[5]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[0]},{"t":240,"s":[0]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[100]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[95]},{"t":240,"s":[95]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"wing_bl1","parent":3,"sr":1,"ks":{"p":{"a":0,"k":[-2.819,-16.153,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[5.243,-6.389],[0.763,-21.174]],"o":[[-6.167,7.515],[-0.335,9.298]],"v":[[5.348,-23.138],[-11.408,22.909]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.67,"y":0},"t":80,"s":[{"i":[[5.009,-6.574],[0.411,-9.295]],"o":[[-5.009,6.574],[-0.411,9.295]],"v":[[6.408,-23.909],[-6.408,23.909]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[5.009,-6.574],[0.411,-9.295]],"o":[[-5.009,6.574],[-0.411,9.295]],"v":[[6.408,-23.909],[-6.408,23.909]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-1.163,-8.183],[10.23,-6.352]],"o":[[1.914,13.465],[-7.905,4.908]],"v":[[2.908,-23.409],[-11.408,22.909]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-1.163,-8.183],[10.23,-6.352]],"o":[[1.914,13.465],[-7.905,4.908]],"v":[[2.908,-23.409],[-11.408,22.909]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[5.243,-6.389],[0.763,-21.174]],"o":[[-6.167,7.515],[-0.335,9.298]],"v":[[5.348,-23.138],[-11.408,22.909]],"c":false}]},{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[5.243,-6.389],[0.763,-21.174]],"o":[[-6.167,7.515],[-0.335,9.298]],"v":[[5.348,-23.138],[-11.408,22.909]],"c":false}]},{"t":260,"s":[{"i":[[5.009,-6.574],[0.411,-9.295]],"o":[[-5.009,6.574],[-0.411,9.295]],"v":[[6.408,-23.909],[-6.408,23.909]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":60,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[5]},{"t":240,"s":[0]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":60,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[100]},{"t":240,"s":[95]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"wing","parent":24,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.33],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":40,"s":[-6.315]},{"i":{"x":[0.701],"y":[1]},"o":{"x":[0.67],"y":[0]},"t":80,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":140,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":160,"s":[-6.315]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":176,"s":[-6.315]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":186,"s":[-6.315]},{"i":{"x":[0.33],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":220,"s":[-6.315]},{"t":260,"s":[0]}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":40,"s":[-97.698,17.018,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":80,"s":[-97.698,17.018,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[-97.698,17.018,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":160,"s":[-78.056,1.74,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[-78.056,1.74,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":186,"s":[-97.698,17.018,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[-97.698,17.018,0],"to":[0,0,0],"ti":[0,0,0]},{"t":260,"s":[-97.698,17.018,0]}]},"a":{"a":0,"k":[-25,28,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[-8.466,14.625],[11.068,-21.76],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[15.782,-27.263],[-11.082,21.788],[0,0]],"v":[[-21.159,60.991],[19.41,32.922],[17.931,-42.907],[-10.049,-38.706],[-26.717,4.327]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.67,"y":0},"t":80,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[0.433,16.893],[9.096,-16.893],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[-0.433,-16.893],[-9.096,16.893],[0,0]],"v":[[-25.535,55.895],[22.098,39.88],[15.601,-53.248],[-9.089,-35.489],[-21.217,7.827]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[0.433,16.893],[9.096,-16.893],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[-0.433,-16.893],[-9.096,16.893],[0,0]],"v":[[-25.535,55.895],[22.098,39.88],[15.601,-53.248],[-9.089,-35.489],[-21.217,7.827]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[8.636,14.525],[-1.249,-19.133],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[-12.098,-20.348],[1.592,24.393],[0,0]],"v":[[-25.535,55.895],[22.098,39.88],[7.601,-52.248],[-9.089,-33.989],[-26.717,4.327]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[8.636,14.525],[-1.249,-19.133],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[-12.098,-20.348],[1.592,24.393],[0,0]],"v":[[-25.535,55.895],[22.098,39.88],[7.601,-52.248],[-9.089,-33.989],[-26.717,4.327]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[-8.466,14.625],[11.068,-21.76],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[15.782,-27.263],[-11.082,21.788],[0,0]],"v":[[-21.159,60.991],[19.41,32.922],[17.931,-42.907],[-10.049,-38.706],[-26.717,4.327]],"c":false}]},{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[-8.466,14.625],[11.068,-21.76],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[15.782,-27.263],[-11.082,21.788],[0,0]],"v":[[-21.159,60.991],[19.41,32.922],[17.931,-42.907],[-10.049,-38.706],[-26.717,4.327]],"c":false}]},{"t":260,"s":[{"i":[[-11.831,-6.015],[-6.572,16.156],[0.433,16.893],[9.096,-16.893],[0,0]],"o":[[20.336,10.338],[10.396,-25.556],[-0.433,-16.893],[-9.096,16.893],[0,0]],"v":[[-25.535,55.895],[22.098,39.88],[15.601,-53.248],[-9.089,-35.489],[-21.217,7.827]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Layer 17","parent":10,"sr":1,"ks":{"p":{"a":0,"k":[48.489,78.912,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[-21.789,-2.405],[-8.079,5.11]],"o":[[12.114,1.337],[2.716,-1.718]],"v":[[-15.368,2.735],[15.368,-3.129]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[-21.897,-1.025],[-7.74,5.61]],"o":[[12.174,0.57],[2.602,-1.886]],"v":[[-6.33,7.685],[22.384,-6.608]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-21.897,-1.025],[-7.74,5.61]],"o":[[12.174,0.57],[2.602,-1.886]],"v":[[-6.33,7.685],[22.384,-6.608]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-21.897,-1.025],[-7.74,5.61]],"o":[[12.174,0.57],[2.602,-1.886]],"v":[[-9.922,2.75],[20.382,-5.042]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-21.897,-1.025],[-7.74,5.61]],"o":[[12.174,0.57],[2.602,-1.886]],"v":[[-9.922,2.75],[20.382,-5.042]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-21.789,-2.405],[-8.079,5.11]],"o":[[12.114,1.337],[2.716,-1.718]],"v":[[-15.368,2.735],[15.368,-3.129]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-21.789,-2.405],[-8.079,5.11]],"o":[[12.114,1.337],[2.716,-1.718]],"v":[[-15.368,2.735],[15.368,-3.129]],"c":false}]},{"t":260,"s":[{"i":[[-21.897,-1.025],[-7.74,5.61]],"o":[[12.174,0.57],[2.602,-1.886]],"v":[[-6.33,7.685],[22.384,-6.608]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.152941182256,0.819607853889,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Layer 12","parent":10,"sr":1,"ks":{"p":{"a":0,"k":[51.603,58.988,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[-16.088,-4.257],[-7.804,-16.305]],"o":[[16.088,4.257],[7.804,16.305]],"v":[[-21.908,-23.278],[28.694,17.479]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[-10.602,12.343],[15.602,-19.43]],"o":[[21.476,-25.002],[-11.066,13.781]],"v":[[-24.738,-19.925],[29.738,16.925]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-10.602,12.343],[15.602,-19.43]],"o":[[21.476,-25.002],[-11.066,13.781]],"v":[[-24.738,-19.925],[29.738,16.925]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-11.8,11.203],[5.397,-13.317]],"o":[[17.873,-16.968],[-6.639,16.38]],"v":[[-24.738,-19.925],[29.738,16.925]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-11.8,11.203],[5.397,-13.317]],"o":[[17.873,-16.968],[-6.639,16.38]],"v":[[-24.738,-19.925],[29.738,16.925]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-16.088,-4.257],[-7.804,-16.305]],"o":[[16.088,4.257],[7.804,16.305]],"v":[[-21.908,-23.278],[28.694,17.479]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-16.088,-4.257],[-7.804,-16.305]],"o":[[16.088,4.257],[7.804,16.305]],"v":[[-21.908,-23.278],[28.694,17.479]],"c":false}]},{"t":260,"s":[{"i":[[-10.602,12.343],[15.602,-19.43]],"o":[[21.476,-25.002],[-11.066,13.781]],"v":[[-24.738,-19.925],[29.738,16.925]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Layer 11","parent":10,"sr":1,"ks":{"p":{"a":0,"k":[-57.45,-51.469,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[-15.503,-4.94],[-0.553,-12.992]],"o":[[10.479,3.339],[0.751,17.658]],"v":[[-20.879,-30.222],[25.161,25.462]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[-14.357,7.656],[16.556,-14.237]],"o":[[21.98,-11.721],[-13.401,11.524]],"v":[[-20.175,-27.771],[25.861,26.792]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-14.357,7.656],[16.556,-14.237]],"o":[[21.98,-11.721],[-13.401,11.524]],"v":[[-20.175,-27.771],[25.861,26.792]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-14.63,7.122],[12.669,-12.277]],"o":[[19.709,-9.594],[-12.692,12.3]],"v":[[-21.52,-29.342],[24.52,26.342]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-14.63,7.122],[12.669,-12.277]],"o":[[19.709,-9.594],[-12.692,12.3]],"v":[[-21.52,-29.342],[24.52,26.342]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-15.503,-4.94],[-0.553,-12.992]],"o":[[10.479,3.339],[0.751,17.658]],"v":[[-20.879,-30.222],[25.161,25.462]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-15.503,-4.94],[-0.553,-12.992]],"o":[[10.479,3.339],[0.751,17.658]],"v":[[-20.879,-30.222],[25.161,25.462]],"c":false}]},{"t":260,"s":[{"i":[[-14.357,7.656],[16.556,-14.237]],"o":[[21.98,-11.721],[-13.401,11.524]],"v":[[-20.175,-27.771],[25.861,26.792]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Layer 16","parent":10,"sr":1,"ks":{"p":{"a":0,"k":[78.946,60.002,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[-1.361,1.528],[-1.058,2.866]],"o":[[1.873,-2.102],[4.116,-11.147]],"v":[[-2.212,3.733],[2.212,-3.733]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[-1.103,1.724],[-0.592,2.997]],"o":[[1.517,-2.371],[2.302,-11.657]],"v":[[4.683,-0.307],[7.872,-8.378]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-1.103,1.724],[-0.592,2.997]],"o":[[1.517,-2.371],[2.302,-11.657]],"v":[[4.683,-0.307],[7.872,-8.378]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-1.103,1.724],[-0.592,2.997]],"o":[[1.517,-2.371],[2.302,-11.657]],"v":[[4.683,-0.307],[7.872,-8.378]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-1.103,1.724],[-0.592,2.997]],"o":[[1.517,-2.371],[2.302,-11.657]],"v":[[4.683,-0.307],[7.872,-8.378]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-1.361,1.528],[-1.058,2.866]],"o":[[1.873,-2.102],[4.116,-11.147]],"v":[[-2.212,3.733],[2.212,-3.733]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-1.361,1.528],[-1.058,2.866]],"o":[[1.873,-2.102],[4.116,-11.147]],"v":[[-2.212,3.733],[2.212,-3.733]],"c":false}]},{"t":260,"s":[{"i":[[-1.103,1.724],[-0.592,2.997]],"o":[[1.517,-2.371],[2.302,-11.657]],"v":[[4.683,-0.307],[7.872,-8.378]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.152941182256,0.819607853889,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Layer 14","parent":10,"sr":1,"ks":{"p":{"a":0,"k":[-62.225,-81.826,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[0.875,-0.546],[1.107,-0.875]],"o":[[-1.141,0.712],[-0.297,0.235]],"v":[[1.69,-1.19],[-1.69,1.19]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[1.693,-0.73],[1.107,-0.875]],"o":[[-1.235,0.533],[-0.297,0.235]],"v":[[4.486,1.903],[3.108,2.718]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[1.693,-0.73],[1.107,-0.875]],"o":[[-1.235,0.533],[-0.297,0.235]],"v":[[4.486,1.903],[3.108,2.718]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[0.875,-0.546],[1.107,-0.875]],"o":[[-1.141,0.712],[-0.297,0.235]],"v":[[4.69,-2.69],[1.31,-0.31]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[0.875,-0.546],[1.107,-0.875]],"o":[[-1.141,0.712],[-0.297,0.235]],"v":[[4.69,-2.69],[1.31,-0.31]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[0.875,-0.546],[1.107,-0.875]],"o":[[-1.141,0.712],[-0.297,0.235]],"v":[[1.69,-1.19],[-1.69,1.19]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[0.875,-0.546],[1.107,-0.875]],"o":[[-1.141,0.712],[-0.297,0.235]],"v":[[1.69,-1.19],[-1.69,1.19]],"c":false}]},{"t":260,"s":[{"i":[[1.693,-0.73],[1.107,-0.875]],"o":[[-1.235,0.533],[-0.297,0.235]],"v":[[4.486,1.903],[3.108,2.718]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":9,"ty":4,"nm":"Layer 13","parent":10,"sr":1,"ks":{"p":{"a":0,"k":[-80.005,-44.128,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[0.901,-2.015],[-5.189,-14.357]],"o":[[-3.956,8.852],[10.193,28.201]],"v":[[1.524,-17.418],[1.782,17.418]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[1.568,-1.554],[-4.38,-14.624]],"o":[[-8.976,8.899],[8.603,28.726]],"v":[[6.369,-22.829],[-4.016,12.964]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[1.568,-1.554],[-4.38,-14.624]],"o":[[-8.976,8.899],[8.603,28.726]],"v":[[6.369,-22.829],[-4.016,12.964]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[1.012,-1.962],[-4.38,-14.624]],"o":[[-4.444,8.618],[8.603,28.726]],"v":[[3.475,-21.376],[1.788,13.421]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[1.012,-1.962],[-4.38,-14.624]],"o":[[-4.444,8.618],[8.603,28.726]],"v":[[3.475,-21.376],[1.788,13.421]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[0.901,-2.015],[-5.189,-14.357]],"o":[[-3.956,8.852],[10.193,28.201]],"v":[[1.524,-17.418],[1.782,17.418]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[0.901,-2.015],[-5.189,-14.357]],"o":[[-3.956,8.852],[10.193,28.201]],"v":[[1.524,-17.418],[1.782,17.418]],"c":false}]},{"t":260,"s":[{"i":[[1.568,-1.554],[-4.38,-14.624]],"o":[[-8.976,8.899],[8.603,28.726]],"v":[[6.369,-22.829],[-4.016,12.964]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":0,"k":0},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":40,"s":[56]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":80,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":140,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":160,"s":[46]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":176,"s":[46]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":186,"s":[56]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":220,"s":[56]},{"t":260,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":10,"ty":4,"nm":"trubka","parent":3,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.33],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":40,"s":[11.145]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":80,"s":[26.649]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":140,"s":[26.649]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":160,"s":[1.785]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":176,"s":[1.785]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":186,"s":[11.145]},{"i":{"x":[0.33],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":220,"s":[11.145]},{"t":260,"s":[26.649]}]},"p":{"a":1,"k":[{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[14.909,-100.669,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.167,"y":0.167},"t":80,"s":[24.824,-98.469,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[24.824,-98.469,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":160,"s":[14.909,-100.669,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":176,"s":[14.909,-100.669,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":186,"s":[14.909,-100.669,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.33,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[14.909,-100.669,0],"to":[0,0,0],"ti":[0,0,0]},{"t":260,"s":[24.824,-98.469,0]}]},"a":{"a":0,"k":[-53.429,-55.456,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[0,-23.04],[-26.769,-26.595],[-29.036,0],[-11.431,14.817],[13.487,1.686],[7.024,-20.511],[22.603,22.927],[-19.219,6.582],[1.686,13.487],[19.668,-15.173]],"o":[[0,30.168],[27.161,26.984],[23.04,0],[15.173,-19.668],[-13.487,-1.686],[-6.913,20.185],[-22.603,-22.927],[20.511,-7.024],[-1.686,-13.487],[-14.817,11.431]],"v":[[-93.151,-44.059],[-42.717,42.616],[43.958,93.049],[90.039,68.323],[47.193,17.733],[25.277,41.897],[-24.091,24.466],[-40.734,-21.463],[-16.57,-43.38],[-68.425,-90.14]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[-0.93,-23.021],[-26.769,-26.595],[-19.147,0.753],[-11.492,14.77],[13.436,2.054],[3.721,-9.895],[22.603,22.927],[-6.346,1.999],[-2.712,4.76],[34.633,-29.999]],"o":[[1,24.755],[27.161,26.984],[23.022,-0.906],[22.395,-28.783],[-18.623,-2.847],[-4.432,11.788],[-22.603,-22.927],[14.439,-4.549],[13.641,-23.946],[-14.145,12.253]],"v":[[-98.925,-37.793],[-43.828,43.735],[39.07,97.748],[86.317,72.061],[57.603,19.359],[21.954,47.494],[-20.098,19.094],[-46.508,-15.197],[-23.844,-33.113],[-79.199,-78.873]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-0.93,-23.021],[-26.769,-26.595],[-19.147,0.753],[-11.492,14.77],[13.436,2.054],[3.721,-9.895],[22.603,22.927],[-6.346,1.999],[-2.712,4.76],[34.633,-29.999]],"o":[[1,24.755],[27.161,26.984],[23.022,-0.906],[22.395,-28.783],[-18.623,-2.847],[-4.432,11.788],[-22.603,-22.927],[14.439,-4.549],[13.641,-23.946],[-14.145,12.253]],"v":[[-98.925,-37.793],[-43.828,43.735],[39.07,97.748],[86.317,72.061],[57.603,19.359],[21.954,47.494],[-20.098,19.094],[-46.508,-15.197],[-23.844,-33.113],[-79.199,-78.873]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[0,-23.04],[-26.769,-26.595],[-29.036,0],[-11.283,14.93],[13.479,1.751],[3.961,-9.802],[22.603,22.927],[-19.421,5.96],[-2.161,5.034],[32.663,-26.765]],"o":[[0,30.168],[27.161,26.984],[23.04,0],[20.2,-26.728],[-16.455,-2.138],[-7.994,19.781],[-22.603,-22.927],[14.472,-4.441],[7.308,-17.025],[-14.475,11.861]],"v":[[-93.151,-44.059],[-42.717,42.616],[43.958,93.049],[88.539,69.823],[53.693,18.233],[25.277,41.897],[-24.091,24.466],[-40.734,-21.463],[-18.07,-39.38],[-73.425,-85.14]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[0,-23.04],[-26.769,-26.595],[-29.036,0],[-11.283,14.93],[13.479,1.751],[3.961,-9.802],[22.603,22.927],[-19.421,5.96],[-2.161,5.034],[32.663,-26.765]],"o":[[0,30.168],[27.161,26.984],[23.04,0],[20.2,-26.728],[-16.455,-2.138],[-7.994,19.781],[-22.603,-22.927],[14.472,-4.441],[7.308,-17.025],[-14.475,11.861]],"v":[[-93.151,-44.059],[-42.717,42.616],[43.958,93.049],[88.539,69.823],[53.693,18.233],[25.277,41.897],[-24.091,24.466],[-40.734,-21.463],[-18.07,-39.38],[-73.425,-85.14]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[0,-23.04],[-26.769,-26.595],[-29.036,0],[-11.431,14.817],[13.487,1.686],[7.024,-20.511],[22.603,22.927],[-19.219,6.582],[1.686,13.487],[19.668,-15.173]],"o":[[0,30.168],[27.161,26.984],[23.04,0],[15.173,-19.668],[-13.487,-1.686],[-6.913,20.185],[-22.603,-22.927],[20.511,-7.024],[-1.686,-13.487],[-14.817,11.431]],"v":[[-93.151,-44.059],[-42.717,42.616],[43.958,93.049],[90.039,68.323],[47.193,17.733],[25.277,41.897],[-24.091,24.466],[-40.734,-21.463],[-16.57,-43.38],[-68.425,-90.14]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[0,-23.04],[-26.769,-26.595],[-29.036,0],[-11.431,14.817],[13.487,1.686],[7.024,-20.511],[22.603,22.927],[-19.219,6.582],[1.686,13.487],[19.668,-15.173]],"o":[[0,30.168],[27.161,26.984],[23.04,0],[15.173,-19.668],[-13.487,-1.686],[-6.913,20.185],[-22.603,-22.927],[20.511,-7.024],[-1.686,-13.487],[-14.817,11.431]],"v":[[-93.151,-44.059],[-42.717,42.616],[43.958,93.049],[90.039,68.323],[47.193,17.733],[25.277,41.897],[-24.091,24.466],[-40.734,-21.463],[-16.57,-43.38],[-68.425,-90.14]],"c":true}]},{"t":260,"s":[{"i":[[-0.93,-23.021],[-26.769,-26.595],[-19.147,0.753],[-11.492,14.77],[13.436,2.054],[3.721,-9.895],[22.603,22.927],[-6.346,1.999],[-2.712,4.76],[34.633,-29.999]],"o":[[1,24.755],[27.161,26.984],[23.022,-0.906],[22.395,-28.783],[-18.623,-2.847],[-4.432,11.788],[-22.603,-22.927],[14.439,-4.549],[13.641,-23.946],[-14.145,12.253]],"v":[[-98.925,-37.793],[-43.828,43.735],[39.07,97.748],[86.317,72.061],[57.603,19.359],[21.954,47.494],[-20.098,19.094],[-46.508,-15.197],[-23.844,-33.113],[-79.199,-78.873]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.031372550875,0.607843160629,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":12},"lc":2,"lj":2,"bm":0,"nm":"Stroke 2","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":11,"ty":4,"nm":"wire","sr":1,"ks":{"p":{"a":0,"k":[455.818,402.702,0]},"a":{"a":0,"k":[90,11,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":140,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":150,"s":[98,105,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.296,0.296,0.296],"y":[0,0,0]},"t":164,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":176,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":182,"s":[98,105,100]},{"t":190,"s":[100,100,100]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":40,"s":[{"i":[[0,0],[0,0],[0.484,-0.718],[0,0],[-1.059,0.385],[0,0],[0.825,-0.897],[0,0],[-1.143,0.399],[0,0],[0.722,-0.914],[0,0],[-0.992,0.518],[0,0],[0.582,-0.965],[0,0],[-0.731,0.775],[0,0],[0.169,-0.935],[0,0],[-0.842,0.917],[0,0],[0.149,-0.971],[0,0],[-0.585,1.181],[0,0],[-0.288,-1.015],[0,0],[-0.425,1.176],[0,0],[-0.363,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.354,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[0.859,0.111],[0,0],[-0.63,0.935],[0,0],[1.146,-0.417],[0,0],[-0.819,0.891],[0,0],[1.1,-0.383],[0,0],[-0.564,0.966],[0,0],[0.999,-0.522],[0,0],[-0.461,1.022],[0,0],[0.744,-0.591],[0,0],[-0.499,1.14],[0,0],[0.837,-0.513],[0,0],[-0.2,1.303],[0,0],[0.468,-0.946],[0,0],[0.104,1.246],[0,0],[0.372,-1.03],[2.16,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.088],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.741,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-150.646,-24.645],[-129.089,-18.987],[-128.272,-17.182],[-137.059,-4.137],[-135.696,-2.392],[-117.224,-9.114],[-115.969,-7.233],[-130.775,8.87],[-129.535,10.757],[-110.163,4.003],[-108.866,5.823],[-120.149,24.254],[-118.669,25.805],[-102.455,17.334],[-100.983,18.898],[-108.24,34.932],[-106.738,35.907],[-92.279,24.453],[-90.625,25.443],[-98.157,43.638],[-96.336,44.825],[-81.496,30.755],[-79.753,31.913],[-82.348,48.86],[-80.174,49.546],[-72.959,31.643],[-70.815,31.84],[-67.58,52.703],[-65.324,53.002],[-59.199,36.051],[-57.007,36.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.493,14.717],[90.184,9.136]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":80,"s":[{"i":[[0,0],[0,0],[1.109,-0.214],[0,0],[-1.632,-0.07],[0,0],[1.672,-0.217],[0,0],[-1.75,-0.083],[0,0],[1.549,-0.248],[0,0],[-1.461,0.603],[0,0],[0.795,-1.043],[0,0],[-0.948,1.014],[0,0],[-0.236,-1.062],[0,0],[-0.943,1.146],[0,0],[0.08,-0.99],[0,0],[-0.69,1.243],[0,0],[-0.567,-0.992],[0,0],[-0.62,1.176],[0,0],[-0.427,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.354,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[1.039,0.249],[0,0],[-1.444,0.279],[0,0],[1.765,0.076],[0,0],[-1.661,0.215],[0,0],[1.683,0.08],[0,0],[-0.764,0.534],[0,0],[1.471,-0.607],[0,0],[-0.534,1.147],[0,0],[0.845,-0.905],[0,0],[-0.35,1.293],[0,0],[1.155,-0.597],[0,0],[-0.108,1.328],[0,0],[0.553,-0.996],[0,0],[0.151,1.246],[0,0],[0.543,-1.03],[2.538,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.087],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.741,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-171.818,2.863],[-147.666,10.424],[-147.818,11.433],[-167.968,15.327],[-167.371,16.435],[-138.91,17.656],[-138.546,18.802],[-168.551,22.692],[-168.21,23.836],[-138.559,25.251],[-138.1,26.378],[-152.87,36.103],[-150.487,37.121],[-126.608,27.263],[-124.224,28.8],[-132.325,46.672],[-129.729,47.339],[-113.282,29.733],[-110.17,30.185],[-114.983,50.927],[-111.831,51.712],[-93.924,34.322],[-91.203,35.316],[-92.605,52.595],[-89.319,53.074],[-80.724,32.659],[-77.553,32.648],[-71.714,53.703],[-68.421,54.002],[-59.483,37.051],[-56.802,37.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.493,14.717],[90.184,9.136]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[0,0],[0,0],[1.109,-0.214],[0,0],[-1.632,-0.07],[0,0],[1.672,-0.217],[0,0],[-1.75,-0.083],[0,0],[1.549,-0.248],[0,0],[-1.461,0.603],[0,0],[0.795,-1.043],[0,0],[-0.948,1.014],[0,0],[-0.236,-1.062],[0,0],[-0.943,1.146],[0,0],[0.08,-0.99],[0,0],[-0.69,1.243],[0,0],[-0.567,-0.992],[0,0],[-0.62,1.176],[0,0],[-0.427,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.354,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[1.039,0.249],[0,0],[-1.444,0.279],[0,0],[1.765,0.076],[0,0],[-1.661,0.215],[0,0],[1.683,0.08],[0,0],[-0.764,0.534],[0,0],[1.471,-0.607],[0,0],[-0.534,1.147],[0,0],[0.845,-0.905],[0,0],[-0.35,1.293],[0,0],[1.155,-0.597],[0,0],[-0.108,1.328],[0,0],[0.553,-0.996],[0,0],[0.151,1.246],[0,0],[0.543,-1.03],[2.538,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.087],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.741,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-171.818,2.863],[-147.666,10.424],[-147.818,11.433],[-167.968,15.327],[-167.371,16.435],[-138.91,17.656],[-138.546,18.802],[-168.551,22.692],[-168.21,23.836],[-138.559,25.251],[-138.1,26.378],[-152.87,36.103],[-150.487,37.121],[-126.608,27.263],[-124.224,28.8],[-132.325,46.672],[-129.729,47.339],[-113.282,29.733],[-110.17,30.185],[-114.983,50.927],[-111.831,51.712],[-93.924,34.322],[-91.203,35.316],[-92.605,52.595],[-89.319,53.074],[-80.724,32.659],[-77.553,32.648],[-71.714,53.703],[-68.421,54.002],[-59.483,37.051],[-56.802,37.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.493,14.717],[90.184,9.136]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[0,0],[0,0],[0.658,-0.563],[0,0],[-1.124,0.088],[0,0],[1.036,-0.644],[0,0],[-1.208,-0.093],[0,0],[1.028,-0.548],[0,0],[-1.165,-0.083],[0,0],[1.046,-0.539],[0,0],[-1.204,-0.033],[0,0],[0.827,-0.687],[0,0],[-1.233,0.459],[0,0],[0.336,-0.933],[0,0],[-0.809,1.056],[0,0],[-0.088,-1.061],[0,0],[-0.425,1.176],[0,0],[-0.363,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.353,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[0.798,0.337],[0,0],[-0.857,0.733],[0,0],[1.216,-0.095],[0,0],[-1.108,0.488],[0,0],[1.162,0.09],[0,0],[-1.031,0.55],[0,0],[1.174,0.084],[0,0],[-1.071,0.552],[0,0],[1.074,0.029],[0,0],[-1.018,0.834],[0,0],[0.929,-0.346],[0,0],[-0.451,1.252],[0,0],[0.648,-0.845],[0,0],[0.104,1.246],[0,0],[0.372,-1.03],[2.16,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.088],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.74,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-126.083,-54.465],[-106.816,-43.243],[-106.512,-41.285],[-118.472,-31.061],[-117.624,-29.015],[-98.02,-30.553],[-97.313,-28.404],[-113.443,-18.864],[-113.064,-16.637],[-92.6,-15.058],[-92.142,-12.869],[-111.874,-2.347],[-111.409,-0.158],[-92.355,1.206],[-91.905,3.403],[-109.147,12.286],[-108.645,14.485],[-87.749,15.053],[-87.036,17.113],[-101.181,29.061],[-100.037,31.053],[-80.058,23.621],[-78.556,25.108],[-84.422,41.389],[-82.402,42.492],[-72.534,29.612],[-70.449,30.224],[-68.746,50.703],[-66.49,51.002],[-60.366,34.051],[-58.174,34.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.494,14.717],[90.184,9.136]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[0,0],[0,0],[0.658,-0.563],[0,0],[-1.124,0.088],[0,0],[1.036,-0.644],[0,0],[-1.208,-0.093],[0,0],[1.028,-0.548],[0,0],[-1.165,-0.083],[0,0],[1.046,-0.539],[0,0],[-1.204,-0.033],[0,0],[0.827,-0.687],[0,0],[-1.233,0.459],[0,0],[0.336,-0.933],[0,0],[-0.809,1.056],[0,0],[-0.088,-1.061],[0,0],[-0.425,1.176],[0,0],[-0.363,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.353,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[0.798,0.337],[0,0],[-0.857,0.733],[0,0],[1.216,-0.095],[0,0],[-1.108,0.488],[0,0],[1.162,0.09],[0,0],[-1.031,0.55],[0,0],[1.174,0.084],[0,0],[-1.071,0.552],[0,0],[1.074,0.029],[0,0],[-1.018,0.834],[0,0],[0.929,-0.346],[0,0],[-0.451,1.252],[0,0],[0.648,-0.845],[0,0],[0.104,1.246],[0,0],[0.372,-1.03],[2.16,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.088],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.74,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-126.083,-54.465],[-106.816,-43.243],[-106.512,-41.285],[-118.472,-31.061],[-117.624,-29.015],[-98.02,-30.553],[-97.313,-28.404],[-113.443,-18.864],[-113.064,-16.637],[-92.6,-15.058],[-92.142,-12.869],[-111.874,-2.347],[-111.409,-0.158],[-92.355,1.206],[-91.905,3.403],[-109.147,12.286],[-108.645,14.485],[-87.749,15.053],[-87.036,17.113],[-101.181,29.061],[-100.037,31.053],[-80.058,23.621],[-78.556,25.108],[-84.422,41.389],[-82.402,42.492],[-72.534,29.612],[-70.449,30.224],[-68.746,50.703],[-66.49,51.002],[-60.366,34.051],[-58.174,34.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.494,14.717],[90.184,9.136]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[0,0],[0,0],[0.484,-0.718],[0,0],[-1.059,0.385],[0,0],[0.825,-0.897],[0,0],[-1.143,0.399],[0,0],[0.722,-0.914],[0,0],[-0.992,0.518],[0,0],[0.582,-0.965],[0,0],[-0.731,0.775],[0,0],[0.169,-0.935],[0,0],[-0.842,0.917],[0,0],[0.149,-0.971],[0,0],[-0.585,1.181],[0,0],[-0.288,-1.015],[0,0],[-0.425,1.176],[0,0],[-0.363,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.354,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[0.859,0.111],[0,0],[-0.63,0.935],[0,0],[1.146,-0.417],[0,0],[-0.819,0.891],[0,0],[1.1,-0.383],[0,0],[-0.564,0.966],[0,0],[0.999,-0.522],[0,0],[-0.461,1.022],[0,0],[0.744,-0.591],[0,0],[-0.499,1.14],[0,0],[0.837,-0.513],[0,0],[-0.2,1.303],[0,0],[0.468,-0.946],[0,0],[0.104,1.246],[0,0],[0.372,-1.03],[2.16,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.088],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.741,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-150.646,-24.645],[-129.089,-18.987],[-128.272,-17.182],[-137.059,-4.137],[-135.696,-2.392],[-117.224,-9.114],[-115.969,-7.233],[-130.775,8.87],[-129.535,10.757],[-110.163,4.003],[-108.866,5.823],[-120.149,24.254],[-118.669,25.805],[-102.455,17.334],[-100.983,18.898],[-108.24,34.932],[-106.738,35.907],[-92.279,24.453],[-90.625,25.443],[-98.157,43.638],[-96.336,44.825],[-81.496,30.755],[-79.753,31.913],[-82.348,48.86],[-80.174,49.546],[-72.959,31.643],[-70.815,31.84],[-67.58,52.703],[-65.324,53.002],[-59.199,36.051],[-57.007,36.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.493,14.717],[90.184,9.136]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[0,0],[0,0],[0.484,-0.718],[0,0],[-1.059,0.385],[0,0],[0.825,-0.897],[0,0],[-1.143,0.399],[0,0],[0.722,-0.914],[0,0],[-0.992,0.518],[0,0],[0.582,-0.965],[0,0],[-0.731,0.775],[0,0],[0.169,-0.935],[0,0],[-0.842,0.917],[0,0],[0.149,-0.971],[0,0],[-0.585,1.181],[0,0],[-0.288,-1.015],[0,0],[-0.425,1.176],[0,0],[-0.363,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.354,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[0.859,0.111],[0,0],[-0.63,0.935],[0,0],[1.146,-0.417],[0,0],[-0.819,0.891],[0,0],[1.1,-0.383],[0,0],[-0.564,0.966],[0,0],[0.999,-0.522],[0,0],[-0.461,1.022],[0,0],[0.744,-0.591],[0,0],[-0.499,1.14],[0,0],[0.837,-0.513],[0,0],[-0.2,1.303],[0,0],[0.468,-0.946],[0,0],[0.104,1.246],[0,0],[0.372,-1.03],[2.16,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.088],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.741,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-150.646,-24.645],[-129.089,-18.987],[-128.272,-17.182],[-137.059,-4.137],[-135.696,-2.392],[-117.224,-9.114],[-115.969,-7.233],[-130.775,8.87],[-129.535,10.757],[-110.163,4.003],[-108.866,5.823],[-120.149,24.254],[-118.669,25.805],[-102.455,17.334],[-100.983,18.898],[-108.24,34.932],[-106.738,35.907],[-92.279,24.453],[-90.625,25.443],[-98.157,43.638],[-96.336,44.825],[-81.496,30.755],[-79.753,31.913],[-82.348,48.86],[-80.174,49.546],[-72.959,31.643],[-70.815,31.84],[-67.58,52.703],[-65.324,53.002],[-59.199,36.051],[-57.007,36.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.493,14.717],[90.184,9.136]],"c":false}]},{"t":260,"s":[{"i":[[0,0],[0,0],[1.109,-0.214],[0,0],[-1.632,-0.07],[0,0],[1.672,-0.217],[0,0],[-1.75,-0.083],[0,0],[1.549,-0.248],[0,0],[-1.461,0.603],[0,0],[0.795,-1.043],[0,0],[-0.948,1.014],[0,0],[-0.236,-1.062],[0,0],[-0.943,1.146],[0,0],[0.08,-0.99],[0,0],[-0.69,1.243],[0,0],[-0.567,-0.992],[0,0],[-0.62,1.176],[0,0],[-0.427,-1.033],[0.207,-0.445],[-1.602,6.262],[-0.308,-1.159],[0,0],[-0.367,1.096],[0,0],[-0.238,-1.167],[0,0],[-0.373,1.134],[0,0],[-0.292,-1.125],[0,0],[-0.364,1.103],[0,0],[-0.317,-1.1],[0,0],[-0.354,1.093],[0,0],[-0.305,-1.117],[0,0],[-0.251,1.197],[0,0],[-0.38,-1.123],[0,0],[-0.242,1.161],[0,0],[-0.341,-1.163],[0,0],[-0.245,1.188],[0,0],[-0.443,-1.056],[0,0],[-0.133,1.194],[0,0],[-0.543,-1.004],[0,0],[0,1.209],[0,0],[-0.658,-0.923],[0,0],[0.29,1.243],[0,0],[-0.882,-0.539],[0,0],[0.799,1.021],[0,0],[-0.971,0],[0,0],[1.116,0.537],[0,0],[0.029,0.018],[9.439,0.371]],"o":[[0,0],[1.039,0.249],[0,0],[-1.444,0.279],[0,0],[1.765,0.076],[0,0],[-1.661,0.215],[0,0],[1.683,0.08],[0,0],[-0.764,0.534],[0,0],[1.471,-0.607],[0,0],[-0.534,1.147],[0,0],[0.845,-0.905],[0,0],[-0.35,1.293],[0,0],[1.155,-0.597],[0,0],[-0.108,1.328],[0,0],[0.553,-0.996],[0,0],[0.151,1.246],[0,0],[0.543,-1.03],[2.538,6.144],[-0.193,0.413],[0.297,-1.162],[0,0],[0.297,1.116],[0,0],[0.378,-1.13],[0,0],[0.239,1.17],[0,0],[0.363,-1.104],[0,0],[0.292,1.124],[0,0],[0.359,-1.087],[0,0],[0.318,1.104],[0,0],[0.356,-1.101],[0,0],[0.322,1.18],[0,0],[0.243,-1.16],[0,0],[0.381,1.123],[0,0],[0.248,-1.186],[0,0],[0.342,1.164],[0,0],[0.231,-1.121],[0,0],[0.465,1.107],[0,0],[0.126,-1.134],[0,0],[0.575,1.063],[0,0],[0,-1.134],[0,0],[0.741,1.039],[0,0],[-0.235,-1.006],[0,0],[1.107,0.676],[0,0],[-0.598,-0.764],[0,0],[1.238,0],[0,0],[-0.031,-0.015],[-0.667,-0.4],[-9.85,-0.387]],"v":[[-171.818,2.863],[-147.666,10.424],[-147.818,11.433],[-167.968,15.327],[-167.371,16.435],[-138.91,17.656],[-138.546,18.802],[-168.551,22.692],[-168.21,23.836],[-138.559,25.251],[-138.1,26.378],[-152.87,36.103],[-150.487,37.121],[-126.608,27.263],[-124.224,28.8],[-132.325,46.672],[-129.729,47.339],[-113.282,29.733],[-110.17,30.185],[-114.983,50.927],[-111.831,51.712],[-93.924,34.322],[-91.203,35.316],[-92.605,52.595],[-89.319,53.074],[-80.724,32.659],[-77.553,32.648],[-71.714,53.703],[-68.421,54.002],[-59.483,37.051],[-56.802,37.054],[-51.601,53.902],[-47.436,37.233],[-45.184,37.226],[-40.922,53.244],[-38.692,53.314],[-32.987,36.259],[-30.741,36.396],[-26.936,55.021],[-24.689,55.152],[-18.928,37.647],[-16.694,37.719],[-11.735,56.84],[-9.501,56.912],[-3.016,37.267],[-0.791,37.31],[5.616,58.078],[7.843,58.113],[14.943,37.602],[17.174,37.654],[22.237,56.204],[24.501,56.137],[28.296,38.053],[30.539,37.918],[36.486,55.465],[38.729,55.329],[42.457,37.466],[44.714,37.376],[49.778,54.636],[52.036,54.543],[55.609,37.182],[57.823,36.966],[65.064,54.213],[67.295,53.891],[69.214,36.617],[71.395,36.191],[79.726,51.595],[81.915,51.041],[81.915,32.692],[84.028,32.016],[91.992,43.194],[94.075,42.254],[90.816,28.279],[92.557,27.021],[105.441,34.896],[106.965,33.185],[98.241,22.039],[99.158,20.156],[114.674,20.156],[115.179,17.943],[108.583,14.766],[108.493,14.717],[90.184,9.136]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.188235294118,0.352941176471,0.556862745098,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":12,"ty":4,"nm":"mouth","parent":14,"sr":1,"ks":{"p":{"a":0,"k":[-0.965,19.089,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[{"i":[[-6.122,1.718],[25.054,6.122],[-33.043,2.462],[-7.73,2.258]],"o":[[-21.956,7.17],[0,0],[16.534,-1.232],[4.812,-1.406]],"v":[[50.525,-10.398],[-41.482,-1.739],[2.389,-0.393],[35.031,-5.696]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[{"i":[[-6.122,1.718],[25.054,6.122],[-33.043,2.462],[-7.73,2.258]],"o":[[-21.956,7.17],[0,0],[16.534,-1.232],[4.812,-1.406]],"v":[[50.525,-10.398],[-41.482,-1.739],[2.389,-0.393],[35.031,-5.696]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-6.122,1.718],[25.054,6.122],[-33.043,2.462],[-7.73,2.258]],"o":[[-21.956,7.17],[0,0],[16.534,-1.232],[4.812,-1.406]],"v":[[50.525,-10.398],[-41.482,-1.739],[2.389,-0.393],[35.031,-5.696]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-6.494,2.602],[41.331,7.94],[-44.504,-0.993],[11.457,17.17]],"o":[[-12.924,4.514],[0,0],[12.499,0.279],[2.244,0.314]],"v":[[41.718,-0.323],[-50.899,2.18],[7.479,24.295],[30.865,3.876]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-6.494,2.602],[41.331,7.94],[-44.504,-0.993],[11.457,17.17]],"o":[[-12.924,4.514],[0,0],[12.499,0.279],[2.244,0.314]],"v":[[41.718,-0.323],[-50.899,2.18],[7.479,24.295],[30.865,3.876]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-5.045,0.389],[39.612,-1.565],[-44.407,3.089],[-9.453,0.407]],"o":[[-21.744,0.761],[0,0],[16.561,-1.152],[5.884,-0.254]],"v":[[55.635,-2.947],[-50.899,2.18],[2.11,-0.062],[39.608,-2.064]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-5.045,0.389],[39.612,-1.565],[-44.407,3.089],[-9.453,0.407]],"o":[[-21.744,0.761],[0,0],[16.561,-1.152],[5.884,-0.254]],"v":[[55.635,-2.947],[-50.899,2.18],[2.11,-0.062],[39.608,-2.064]],"c":true}]},{"t":240,"s":[{"i":[[-6.122,1.718],[25.054,6.122],[-33.043,2.462],[-7.73,2.258]],"o":[[-21.956,7.17],[0,0],[16.534,-1.232],[4.812,-1.406]],"v":[[50.525,-10.398],[-41.482,-1.739],[2.389,-0.393],[35.031,-5.696]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.490196078431,0.035294117647,0.035294117647,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.490196079016,0.035294119269,0.035294119269,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":13,"ty":4,"nm":"beak_bl","parent":14,"sr":1,"ks":{"r":{"a":0,"k":10.231},"p":{"a":0,"k":[-13.059,-13.839,0]},"a":{"a":0,"k":[67.737,53.693,0]},"s":{"a":0,"k":[106.133,93.494,100]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[{"i":[[0,0],[0.468,20.066]],"o":[[11.422,-4.401],[0,0]],"v":[[27.474,52.871],[41.066,12.197]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[{"i":[[0,0],[0.468,20.066]],"o":[[11.422,-4.401],[0,0]],"v":[[27.474,52.871],[41.066,12.197]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[0,0],[0.468,20.066]],"o":[[11.422,-4.401],[0,0]],"v":[[27.474,52.871],[41.066,12.197]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[0,0],[-3.221,23.383]],"o":[[11.422,-4.401],[0,0]],"v":[[14.851,64.288],[42.551,17.335]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[0,0],[-3.221,23.383]],"o":[[11.422,-4.401],[0,0]],"v":[[14.851,64.288],[42.551,17.335]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[0,0],[-3.221,23.383]],"o":[[11.422,-4.401],[0,0]],"v":[[14.851,64.288],[42.551,17.335]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[0,0],[-3.221,23.383]],"o":[[11.422,-4.401],[0,0]],"v":[[14.851,64.288],[42.551,17.335]],"c":false}]},{"t":240,"s":[{"i":[[0,0],[0.468,20.066]],"o":[[11.422,-4.401],[0,0]],"v":[[27.474,52.871],[41.066,12.197]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,0.705882352941,0.517647058824,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[33.59,24.06]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[20]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[10]},{"t":240,"s":[20]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[80]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[90]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[70]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[80]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[90]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[70]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[80]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[80]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[90]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[70]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[80]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":-57,"op":253,"st":60,"bm":0},{"ddd":0,"ind":14,"ty":4,"nm":"beak","parent":21,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":140,"s":[14.119]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":160,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":176,"s":[-10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":186,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":220,"s":[0]},{"t":240,"s":[14.119]}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[74.249,77.513,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":160,"s":[81.749,23.013,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[81.749,23.013,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":186,"s":[74.249,77.513,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":220,"s":[74.249,77.513,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[74.249,77.513,0]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[{"i":[[-14.377,-2.739],[-26.409,8.024],[4.175,5.349],[12.663,-0.33],[20.156,-0.988],[33.421,-3.495]],"o":[[29.727,4.957],[8.713,-2.73],[4.016,-5.578],[-31.771,1.224],[-20.142,0.987],[-12.777,1.336]],"v":[[-45.2,30.292],[41.359,23.856],[51.385,9.608],[40.079,-0.316],[-0.79,-37.412],[-46.037,5.835]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[{"i":[[-14.377,-2.739],[-26.409,8.024],[4.175,5.349],[12.663,-0.33],[20.156,-0.988],[33.421,-3.495]],"o":[[29.727,4.957],[8.713,-2.73],[4.016,-5.578],[-31.771,1.224],[-20.142,0.987],[-12.777,1.336]],"v":[[-45.2,30.292],[41.359,23.856],[51.385,9.608],[40.079,-0.316],[-0.79,-37.412],[-46.037,5.835]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-14.377,-2.739],[-26.409,8.024],[4.175,5.349],[12.663,-0.33],[20.156,-0.988],[33.421,-3.495]],"o":[[29.727,4.957],[8.713,-2.73],[4.016,-5.578],[-31.771,1.224],[-20.142,0.987],[-12.777,1.336]],"v":[[-45.2,30.292],[41.359,23.856],[51.385,9.608],[40.079,-0.316],[-0.79,-37.412],[-46.037,5.835]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":160,"s":[{"i":[[-12.023,-8.479],[-9.206,15.424],[5.947,2.701],[8.977,-0.868],[20.156,-0.988],[33.421,-3.495]],"o":[[47.28,33.343],[5.135,-8.604],[35.547,-6.803],[-35.875,3.469],[-20.142,0.987],[-12.777,1.336]],"v":[[-56.012,36.038],[40.632,45.045],[36.799,20.924],[50.239,4.012],[0.259,-33.803],[-56.458,9.366]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-12.023,-8.479],[-9.206,15.424],[5.947,2.701],[8.977,-0.868],[20.156,-0.988],[33.421,-3.495]],"o":[[47.28,33.343],[5.135,-8.604],[35.547,-6.803],[-35.875,3.469],[-20.142,0.987],[-12.777,1.336]],"v":[[-56.012,36.038],[40.632,45.045],[36.799,20.924],[50.239,4.012],[0.259,-33.803],[-56.458,9.366]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":186,"s":[{"i":[[-14.708,0.316],[-37.783,3.619],[-0.874,5.808],[12.536,2.612],[20.156,-0.988],[33.421,-3.495]],"o":[[32.102,-0.689],[8.984,-0.86],[0.788,-5.236],[-36.67,-7.641],[-20.142,0.987],[-12.777,1.336]],"v":[[-55.13,33.568],[49.594,29.072],[66.256,17.769],[50.239,4.012],[0.259,-33.803],[-56.458,9.366]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[-14.708,0.316],[-37.783,3.619],[-0.874,5.808],[12.536,2.612],[20.156,-0.988],[33.421,-3.495]],"o":[[32.102,-0.689],[8.984,-0.86],[0.788,-5.236],[-36.67,-7.641],[-20.142,0.987],[-12.777,1.336]],"v":[[-55.13,33.568],[49.594,29.072],[66.256,17.769],[50.239,4.012],[0.259,-33.803],[-56.458,9.366]],"c":true}]},{"t":240,"s":[{"i":[[-14.377,-2.739],[-26.409,8.024],[4.175,5.349],[12.663,-0.33],[20.156,-0.988],[33.421,-3.495]],"o":[[29.727,4.957],[8.713,-2.73],[4.016,-5.578],[-31.771,1.224],[-20.142,0.987],[-12.777,1.336]],"v":[[-45.2,30.292],[41.359,23.856],[51.385,9.608],[40.079,-0.316],[-0.79,-37.412],[-46.037,5.835]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.811764717102,0.207843139768,0.007843137719,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.364705890417,0.121568627656,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":15,"ty":4,"nm":"eye_bl","parent":16,"sr":1,"ks":{"p":{"a":0,"k":[-4.538,-12.619,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[-2.536,2.306],[2.002,2.202],[2.536,-2.306],[-2.002,-2.202]],"o":[[2.536,-2.306],[-2.001,-2.202],[-2.536,2.306],[2.001,2.202]],"v":[[3.624,3.986],[4.592,-4.175],[-3.624,-3.986],[-4.592,4.175]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":98,"s":[{"i":[[-2.521,1.173],[2.016,1.059],[2.521,-1.173],[-2.016,-1.059]],"o":[[2.521,-1.173],[-2.016,-1.059],[-2.521,1.173],[2.016,1.059]],"v":[[4.268,26.317],[5.181,22.275],[-3.033,22.481],[-3.946,26.523]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":104,"s":[{"i":[[-2.536,2.306],[2.002,2.202],[2.536,-2.306],[-2.002,-2.202]],"o":[[2.536,-2.306],[-2.001,-2.202],[-2.536,2.306],[2.001,2.202]],"v":[[3.624,3.986],[4.592,-4.175],[-3.624,-3.986],[-4.592,4.175]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-2.536,2.306],[2.002,2.202],[2.536,-2.306],[-2.002,-2.202]],"o":[[2.536,-2.306],[-2.001,-2.202],[-2.536,2.306],[2.001,2.202]],"v":[[3.624,3.986],[4.592,-4.175],[-3.624,-3.986],[-4.592,4.175]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":148,"s":[{"i":[[-2.521,1.173],[2.016,1.059],[2.521,-1.173],[-2.016,-1.059]],"o":[[2.521,-1.173],[-2.016,-1.059],[-2.521,1.173],[2.016,1.059]],"v":[[4.268,26.317],[5.181,22.275],[-3.033,22.481],[-3.946,26.523]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":156,"s":[{"i":[[-2.536,2.306],[2.002,2.202],[2.536,-2.306],[-2.002,-2.202]],"o":[[2.536,-2.306],[-2.001,-2.202],[-2.536,2.306],[2.001,2.202]],"v":[[3.624,3.986],[4.592,-4.175],[-3.624,-3.986],[-4.592,4.175]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-2.536,2.306],[2.002,2.202],[2.536,-2.306],[-2.002,-2.202]],"o":[[2.536,-2.306],[-2.001,-2.202],[-2.536,2.306],[2.001,2.202]],"v":[[3.624,3.986],[4.592,-4.175],[-3.624,-3.986],[-4.592,4.175]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[-2.521,1.173],[2.016,1.059],[2.521,-1.173],[-2.016,-1.059]],"o":[[2.521,-1.173],[-2.016,-1.059],[-2.521,1.173],[2.016,1.059]],"v":[[4.268,26.317],[5.181,22.275],[-3.033,22.481],[-3.946,26.523]],"c":true}]},{"t":188,"s":[{"i":[[-2.536,2.306],[2.002,2.202],[2.536,-2.306],[-2.002,-2.202]],"o":[[2.536,-2.306],[-2.001,-2.202],[-2.536,2.306],[2.001,2.202]],"v":[[3.624,3.986],[4.592,-4.175],[-3.624,-3.986],[-4.592,4.175]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":1,"k":[{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":92,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":98,"s":[0,0,0,1]},{"i":{"x":[-0.196],"y":[1]},"o":{"x":[1],"y":[0]},"t":104,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":140,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":148,"s":[0,0,0,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":156,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":176,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":182,"s":[0,0,0,1]},{"t":188,"s":[1,1,1,1]}]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":16,"ty":4,"nm":"eye","parent":14,"sr":1,"ks":{"p":{"a":0,"k":[-63.933,-27.035,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[3.869,-13.862],[-11.79,-2.869],[-3.869,13.862],[11.79,2.869]],"o":[[-3.869,13.862],[11.79,2.869],[3.869,-13.862],[-11.79,-2.869]],"v":[[-21.348,-5.195],[-7.006,25.1],[21.348,5.195],[7.006,-25.1]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":98,"s":[{"i":[[3.685,-6.839],[-12.015,1.693],[5.561,6.567],[12.865,-2.538]],"o":[[-4.4,8.166],[12.015,-1.693],[-5.473,-6.464],[-15.472,3.052]],"v":[[-23.788,2.618],[-1.494,18.142],[19.724,-3.514],[-0.496,6.258]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":104,"s":[{"i":[[3.869,-13.862],[-11.79,-2.869],[-3.869,13.862],[11.79,2.869]],"o":[[-3.869,13.862],[11.79,2.869],[3.869,-13.862],[-11.79,-2.869]],"v":[[-21.348,-5.195],[-7.006,25.1],[21.348,5.195],[7.006,-25.1]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[3.869,-13.862],[-11.79,-2.869],[-3.869,13.862],[11.79,2.869]],"o":[[-3.869,13.862],[11.79,2.869],[3.869,-13.862],[-11.79,-2.869]],"v":[[-21.348,-5.195],[-7.006,25.1],[21.348,5.195],[7.006,-25.1]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":148,"s":[{"i":[[3.685,-6.839],[-12.015,1.693],[5.561,6.567],[12.865,-2.538]],"o":[[-4.4,8.166],[12.015,-1.693],[-5.473,-6.464],[-15.472,3.052]],"v":[[-23.788,2.618],[-1.494,18.142],[19.724,-3.514],[-0.496,6.258]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0},"t":156,"s":[{"i":[[3.869,-13.862],[-11.79,-2.869],[-3.869,13.862],[11.79,2.869]],"o":[[-3.869,13.862],[11.79,2.869],[3.869,-13.862],[-11.79,-2.869]],"v":[[-21.348,-5.195],[-7.006,25.1],[21.348,5.195],[7.006,-25.1]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[3.869,-13.862],[-11.79,-2.869],[-3.869,13.862],[11.79,2.869]],"o":[[-3.869,13.862],[11.79,2.869],[3.869,-13.862],[-11.79,-2.869]],"v":[[-21.348,-5.195],[-7.006,25.1],[21.348,5.195],[7.006,-25.1]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[3.685,-6.839],[-12.015,1.693],[5.561,6.567],[12.865,-2.538]],"o":[[-4.4,8.166],[12.015,-1.693],[-5.473,-6.464],[-15.472,3.052]],"v":[[-23.788,2.618],[-1.494,18.142],[19.724,-3.514],[-0.496,6.258]],"c":true}]},{"t":188,"s":[{"i":[[3.869,-13.862],[-11.79,-2.869],[-3.869,13.862],[11.79,2.869]],"o":[[-3.869,13.862],[11.79,2.869],[3.869,-13.862],[-11.79,-2.869]],"v":[[-21.348,-5.195],[-7.006,25.1],[21.348,5.195],[7.006,-25.1]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":17,"ty":4,"nm":"eye_bl","parent":18,"sr":1,"ks":{"p":{"a":0,"k":[-5.074,-9.191,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[1.051,-3.263],[-2.832,-0.912],[-1.051,3.263],[2.832,0.912]],"o":[[-1.051,3.263],[2.832,0.912],[1.051,-3.263],[-2.832,-0.912]],"v":[[-5.128,-1.652],[-1.903,5.908],[5.128,1.652],[1.903,-5.908]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":98,"s":[{"i":[[1.172,-0.479],[-2.734,-0.765],[-1.172,0.479],[2.734,0.765]],"o":[[-1.172,0.479],[2.734,0.765],[1.172,-0.479],[-2.734,-0.765]],"v":[[-3.94,14.767],[-1.111,17.019],[5.961,17.537],[3.132,15.285]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":104,"s":[{"i":[[1.051,-3.263],[-2.832,-0.912],[-1.051,3.263],[2.832,0.912]],"o":[[-1.051,3.263],[2.832,0.912],[1.051,-3.263],[-2.832,-0.912]],"v":[[-5.128,-1.652],[-1.903,5.908],[5.128,1.652],[1.903,-5.908]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[1.051,-3.263],[-2.832,-0.912],[-1.051,3.263],[2.832,0.912]],"o":[[-1.051,3.263],[2.832,0.912],[1.051,-3.263],[-2.832,-0.912]],"v":[[-5.128,-1.652],[-1.903,5.908],[5.128,1.652],[1.903,-5.908]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":148,"s":[{"i":[[1.172,-0.479],[-2.734,-0.765],[-1.172,0.479],[2.734,0.765]],"o":[[-1.172,0.479],[2.734,0.765],[1.172,-0.479],[-2.734,-0.765]],"v":[[-3.94,14.767],[-1.111,17.019],[5.961,17.537],[3.132,15.285]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":156,"s":[{"i":[[1.051,-3.263],[-2.832,-0.912],[-1.051,3.263],[2.832,0.912]],"o":[[-1.051,3.263],[2.832,0.912],[1.051,-3.263],[-2.832,-0.912]],"v":[[-5.128,-1.652],[-1.903,5.908],[5.128,1.652],[1.903,-5.908]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[1.051,-3.263],[-2.832,-0.912],[-1.051,3.263],[2.832,0.912]],"o":[[-1.051,3.263],[2.832,0.912],[1.051,-3.263],[-2.832,-0.912]],"v":[[-5.128,-1.652],[-1.903,5.908],[5.128,1.652],[1.903,-5.908]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[1.172,-0.479],[-2.734,-0.765],[-1.172,0.479],[2.734,0.765]],"o":[[-1.172,0.479],[2.734,0.765],[1.172,-0.479],[-2.734,-0.765]],"v":[[-3.94,14.767],[-1.111,17.019],[5.961,17.537],[3.132,15.285]],"c":true}]},{"t":188,"s":[{"i":[[1.051,-3.263],[-2.832,-0.912],[-1.051,3.263],[2.832,0.912]],"o":[[-1.051,3.263],[2.832,0.912],[1.051,-3.263],[-2.832,-0.912]],"v":[[-5.128,-1.652],[-1.903,5.908],[5.128,1.652],[1.903,-5.908]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":1,"k":[{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":92,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":98,"s":[0,0,0,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[0.542],"y":[0]},"t":104,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":140,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":148,"s":[0,0,0,1]},{"i":{"x":[0.421],"y":[1]},"o":{"x":[1],"y":[0]},"t":156,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":176,"s":[1,1,1,1]},{"i":{"x":[0],"y":[1]},"o":{"x":[1],"y":[0]},"t":182,"s":[0,0,0,1]},{"t":188,"s":[1,1,1,1]}]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":18,"ty":4,"nm":"eye","parent":14,"sr":1,"ks":{"p":{"a":0,"k":[48.862,-31.636,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":92,"s":[{"i":[[-0.631,-12.354],[9.109,0.088],[0.631,12.354],[-9.109,-0.088]],"o":[[0.631,12.354],[-9.109,-0.088],[-0.631,-12.354],[9.109,0.088]],"v":[[16.493,0.16],[1.142,22.369],[-16.493,-0.16],[-1.142,-22.369]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":98,"s":[{"i":[[-2.777,-5.755],[8.988,1.483],[-5.589,4.128],[-8.854,-2.142]],"o":[[2.741,5.68],[-11.086,-1.829],[4.432,-3.273],[8.854,2.142]],"v":[[17.063,4.043],[-2.015,14.046],[-15,-3.713],[1.194,3.89]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":104,"s":[{"i":[[-0.631,-12.354],[9.109,0.088],[0.631,12.354],[-9.109,-0.088]],"o":[[0.631,12.354],[-9.109,-0.088],[-0.631,-12.354],[9.109,0.088]],"v":[[16.493,0.16],[1.142,22.369],[-16.493,-0.16],[-1.142,-22.369]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[-0.631,-12.354],[9.109,0.088],[0.631,12.354],[-9.109,-0.088]],"o":[[0.631,12.354],[-9.109,-0.088],[-0.631,-12.354],[9.109,0.088]],"v":[[16.493,0.16],[1.142,22.369],[-16.493,-0.16],[-1.142,-22.369]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":148,"s":[{"i":[[-2.777,-5.755],[8.988,1.483],[-5.589,4.128],[-8.854,-2.142]],"o":[[2.741,5.68],[-11.086,-1.829],[4.432,-3.273],[8.854,2.142]],"v":[[17.063,4.043],[-2.015,14.046],[-15,-3.713],[1.194,3.89]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":156,"s":[{"i":[[-0.631,-12.354],[9.109,0.088],[0.631,12.354],[-9.109,-0.088]],"o":[[0.631,12.354],[-9.109,-0.088],[-0.631,-12.354],[9.109,0.088]],"v":[[16.493,0.16],[1.142,22.369],[-16.493,-0.16],[-1.142,-22.369]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":176,"s":[{"i":[[-0.631,-12.354],[9.109,0.088],[0.631,12.354],[-9.109,-0.088]],"o":[[0.631,12.354],[-9.109,-0.088],[-0.631,-12.354],[9.109,0.088]],"v":[[16.493,0.16],[1.142,22.369],[-16.493,-0.16],[-1.142,-22.369]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":182,"s":[{"i":[[-2.777,-5.755],[8.988,1.483],[-5.589,4.128],[-8.854,-2.142]],"o":[[2.741,5.68],[-11.086,-1.829],[4.432,-3.273],[8.854,2.142]],"v":[[17.063,4.043],[-2.015,14.046],[-15,-3.713],[1.194,3.89]],"c":true}]},{"t":188,"s":[{"i":[[-0.631,-12.354],[9.109,0.088],[0.631,12.354],[-9.109,-0.088]],"o":[[0.631,12.354],[-9.109,-0.088],[-0.631,-12.354],[9.109,0.088]],"v":[[16.493,0.16],[1.142,22.369],[-16.493,-0.16],[-1.142,-22.369]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0,0,0,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":19,"ty":4,"nm":"head_bl2","parent":21,"sr":1,"ks":{"p":{"a":0,"k":[114.382,15.666,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,-33.73],[17.983,-17.442]],"o":[[26.634,24.131],[0,27.048],[0,0]],"v":[[-21.657,-79.355],[21.657,11.96],[-7.051,79.355]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.988235294819,0.933333337307,0.129411771894,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":228,"s":[0]},{"t":240,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":204,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":228,"s":[95]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":20,"ty":4,"nm":"head_bl1","parent":21,"sr":1,"ks":{"p":{"a":0,"k":[-74.783,-60.203,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[-25.262,15.235],[-13.13,3.349]],"o":[[10.91,-25.636],[11.292,-6.809],[0,0]],"v":[[-46.414,39.575],[9.622,-24.124],[46.414,-39.575]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[5]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[0]},{"t":240,"s":[0]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[100]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[95]},{"t":240,"s":[95]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":21,"ty":4,"nm":"head","parent":24,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.19],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":68,"s":[5.9]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.81],"y":[0]},"t":98,"s":[11.733]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":126,"s":[5.9]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":140,"s":[5.9]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":160,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":220,"s":[0]},{"t":240,"s":[5.9]}]},"p":{"a":1,"k":[{"i":{"x":0.19,"y":0.19},"o":{"x":0.333,"y":0.333},"t":68,"s":[19,-47.125,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.81,"y":0.81},"t":98,"s":[19,-47.125,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":126,"s":[19,-47.125,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[19,-47.125,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":160,"s":[4,-52.125,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[4,-52.125,0],"to":[0,0,0],"ti":[0,0,0]},{"t":240,"s":[19,-47.125,0]}]},"a":{"a":0,"k":[0,117.338,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.19,"y":1},"o":{"x":0.333,"y":0},"t":68,"s":[{"i":[[0,0],[0,35.142],[83.51,0],[0,-74.654],[-25.672,-21.266]],"o":[[25.997,-21.434],[0,-74.654],[-83.509,0],[0,35.237],[0,0]],"v":[[110.113,117.338],[151.206,30.805],[0,-117.338],[-151.206,30.805],[-110.361,117.131]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.81,"y":0},"t":98,"s":[{"i":[[0,0],[1.849,35.093],[83.394,-4.394],[-3.928,-74.55],[-26.755,-19.886]],"o":[[24.833,-22.772],[-3.928,-74.55],[-83.393,4.394],[1.854,35.188],[0,0]],"v":[[114.911,111.636],[151.396,23.061],[-7.396,-116.921],[-150.598,38.971],[-105.268,123.029]],"c":false}]},{"t":126,"s":[{"i":[[0,0],[0,35.142],[83.51,0],[0,-74.654],[-25.672,-21.266]],"o":[[25.997,-21.434],[0,-74.654],[-83.509,0],[0,35.237],[0,0]],"v":[[110.113,117.338],[151.206,30.805],[0,-117.338],[-151.206,30.805],[-110.361,117.131]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":22,"ty":4,"nm":"body_bl2","parent":24,"sr":1,"ks":{"p":{"a":0,"k":[-116.874,-10.809,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[7.711,-7.121],[0,0]],"o":[[-7.711,7.121],[0,0]],"v":[[10.542,-14.213],[-4.807,16.686]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":158,"s":[{"i":[[6.257,-8.427],[0,0]],"o":[[-6.257,8.427],[0,0]],"v":[[4.678,-16.604],[-4.678,16.604]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[6.257,-8.427],[0,0]],"o":[[-6.257,8.427],[0,0]],"v":[[4.678,-16.604],[-4.678,16.604]],"c":false}]},{"t":238,"s":[{"i":[[7.711,-7.121],[0,0]],"o":[[-7.711,7.121],[0,0]],"v":[[10.542,-14.213],[-4.807,16.686]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[5]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[0]},{"t":240,"s":[0]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[100]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[95]},{"t":240,"s":[95]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":23,"ty":4,"nm":"body_bl1","parent":24,"sr":1,"ks":{"p":{"a":0,"k":[129.602,22.994,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[-5.341,-14.489],[20.902,-15.203]],"o":[[5.341,14.489],[-20.902,15.203]],"v":[[-1.236,-30.87],[-6.22,30.87]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.988235294819,0.933333337307,0.129411771894,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":8},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[5]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[0]},{"t":240,"s":[5]}]},"e":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":60,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":72,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":84,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":108,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":120,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":132,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":144,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":156,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":168,"s":[95]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":180,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":192,"s":[95]},{"i":{"x":[0.833],"y":[1]},"o":{"x":[0.167],"y":[0]},"t":204,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":216,"s":[100]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":228,"s":[95]},{"t":240,"s":[100]}]},"o":{"a":0,"k":0},"m":1,"nm":"Trim Paths 1","hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":24,"ty":4,"nm":"body","sr":1,"ks":{"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[194.553,429.954,0],"to":[-2.5,0,0],"ti":[2.5,0,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":158,"s":[179.553,429.954,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[179.553,429.954,0],"to":[2.5,0,0],"ti":[-2.5,0,0]},{"t":238,"s":[194.553,429.954,0]}]},"a":{"a":0,"k":[0,86.598,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":60,"s":[100,100,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":68,"s":[101,99,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":86,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":96,"s":[100,100,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":104,"s":[101,99,100]},{"i":{"x":[0.833,0.833,0.833],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":122,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":140,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":147,"s":[103,97,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":154,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":176,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":183,"s":[103,97,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":190,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":220,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":227,"s":[103,97,100]},{"t":234,"s":[100,100,100]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":140,"s":[{"i":[[63.785,0],[0,63.567],[-2.142,5.663],[-77.751,0],[-5.54,-88.014]],"o":[[-63.462,0],[0,-17.548],[27.506,-72.737],[78.802,0],[4.023,63.919]],"v":[[5.206,86.588],[-149.431,19.033],[-134.558,-0.119],[5.206,-86.598],[149.292,24.938]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":158,"s":[{"i":[[63.785,0],[2.905,59.651],[-1.087,5.956],[-77.751,0],[-5.54,-88.014]],"o":[[-63.462,0],[-0.744,-15.278],[13.006,-71.237],[78.802,0],[4.023,63.919]],"v":[[5.206,86.588],[-139.809,29.922],[-134.058,-2.619],[5.206,-86.598],[149.292,24.938]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":220,"s":[{"i":[[63.785,0],[2.905,59.651],[-1.087,5.956],[-77.751,0],[-5.54,-88.014]],"o":[[-63.462,0],[-0.744,-15.278],[13.006,-71.237],[78.802,0],[4.023,63.919]],"v":[[5.206,86.588],[-139.809,29.922],[-134.058,-2.619],[5.206,-86.598],[149.292,24.938]],"c":true}]},{"t":238,"s":[{"i":[[63.785,0],[0,63.567],[-2.142,5.663],[-77.751,0],[-5.54,-88.014]],"o":[[-63.462,0],[0,-17.548],[27.506,-72.737],[78.802,0],[4.023,63.919]],"v":[[5.206,86.588],[-149.431,19.033],[-134.558,-0.119],[5.206,-86.598],[149.292,24.938]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":1,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":25,"ty":4,"nm":"wing2","parent":24,"sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":192,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":196.617,"s":[-54.109]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":201.23,"s":[-38.585]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":208,"s":[-38.585]},{"t":214,"s":[0]}]},"p":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[102.664,-22.683,0],"to":[1.667,2.5,0],"ti":[-8.283,1.423,0]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":64,"s":[112.664,-7.683,0],"to":[-4.467,-1.423,0],"ti":[-18.533,-0.327,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":68,"s":[112.664,-22.683,0],"to":[16.533,1.827,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":78,"s":[112.664,-7.683,0],"to":[-4.167,-2.5,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":96,"s":[102.664,-22.683,0],"to":[0,0,0],"ti":[-1.667,-2.5,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":104,"s":[112.664,-7.683,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":114,"s":[112.664,-7.683,0],"to":[-1.667,-2.5,0],"ti":[1.667,2.5,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":126,"s":[102.664,-22.683,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":192,"s":[102.664,-22.683,0],"to":[1.945,-0.515,0],"ti":[-1.945,0.515,0]},{"i":{"x":0.667,"y":0.667},"o":{"x":0.333,"y":0.333},"t":196.617,"s":[114.334,-25.77,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":209.385,"s":[114.334,-25.77,0],"to":[-1.945,0.515,0],"ti":[1.945,-0.515,0]},{"t":214,"s":[102.664,-22.683,0]}]},"a":{"a":0,"k":[-32,-5,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":60,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":64,"s":[{"i":[[0,0],[-27.975,-7.152],[21.284,2.182],[12.642,19.488]],"o":[[17.284,26.843],[18.714,4.784],[-18.24,-1.87],[0,0]],"v":[[-28.001,-39.517],[38.758,29.978],[15.523,46.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":68,"s":[{"i":[[0,0],[-27.83,7.698],[19.554,-8.685],[12.642,19.488]],"o":[[17.284,26.843],[18.617,-5.15],[-16.757,7.443],[0,0]],"v":[[-35.501,-27.017],[25.334,12.599],[13.48,38.655],[-48.359,28.339]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":78,"s":[{"i":[[0,0],[-28.779,-2.353],[21.348,-1.423],[12.642,19.488]],"o":[[17.284,26.843],[19.252,1.574],[-18.295,1.219],[0,0]],"v":[[-28.001,-39.517],[57.908,6.686],[37.809,27.068],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":90,"s":[{"i":[[0,0],[-17.475,17.598],[16.096,-14.096],[12.892,19.488]],"o":[[30.491,13.562],[13.611,-13.706],[-18.99,16.63],[0,0]],"v":[[-28.001,-39.517],[41.258,-13.772],[38.523,15.947],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":96,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.333,"y":0},"t":104,"s":[{"i":[[0,0],[-27.975,-7.152],[21.284,2.182],[12.642,19.488]],"o":[[17.284,26.843],[18.714,4.784],[-18.24,-1.87],[0,0]],"v":[[-28.001,-39.517],[38.758,29.978],[15.523,46.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.167,"y":0.167},"t":110,"s":[{"i":[[0,0],[-27.83,7.698],[19.554,-8.685],[12.642,19.488]],"o":[[17.284,26.843],[18.617,-5.15],[-16.757,7.443],[0,0]],"v":[[-28.001,-39.517],[18.834,-4.901],[6.98,21.155],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":114,"s":[{"i":[[0,0],[-28.779,-2.353],[21.348,-1.423],[12.642,19.488]],"o":[[17.284,26.843],[19.252,1.574],[-18.295,1.219],[0,0]],"v":[[-28.001,-39.517],[57.908,6.686],[37.809,27.068],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":126,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":193.539,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":198.154,"s":[{"i":[[0,0],[-15.626,-11.355],[21.383,-0.74],[16.702,12.346]],"o":[[17.301,32.755],[15.626,11.355],[-21.383,0.74],[0,0]],"v":[[-28.001,-39.517],[41.778,9.355],[20.928,30.084],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":204,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":210,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]},{"t":215.5390625,"s":[{"i":[[0,0],[-7.039,-17.988],[18.408,10.905],[7.447,-2.826]],"o":[[30.491,13.562],[7.039,17.988],[-18.408,-10.905],[0,0]],"v":[[-28.001,-39.517],[39.258,25.478],[10.523,31.697],[-40.859,15.839]],"c":false}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.980392158031,0.564705908298,0.086274512112,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":10},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,0.835294127464,0.152941182256,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":26,"ty":4,"nm":"phone_thng","parent":29,"sr":1,"ks":{"p":{"a":0,"k":[147.674,117.929,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-15.226,-11.572],[4.111,11.572],[15.226,-5.938]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.352941185236,0.439215689898,0.478431373835,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.549019634724,0.72549021244,0.827450990677,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":27,"ty":4,"nm":"phone_crl","parent":29,"sr":1,"ks":{"p":{"a":0,"k":[127.664,101.788,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[7.15,0],[1.909,-7.15],[-7.15,0],[-1.909,7.15]],"o":[[-7.15,0],[-1.909,7.15],[7.15,0],[1.909,-7.15]],"v":[[3.457,-12.947],[-12.947,0],[-3.457,12.947],[12.947,0]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.031372550875,0.607843160629,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":28,"ty":0,"nm":"disk_dots 2 Comp 1","parent":29,"refId":"comp_1","sr":1,"ks":{"r":{"a":0,"k":30.711},"p":{"a":0,"k":[127,103,0]},"a":{"a":0,"k":[256,256,0]},"s":{"a":0,"k":[81.476,105.29,100]}},"ao":0,"w":512,"h":512,"ip":0,"op":300,"st":0,"bm":0},{"ddd":0,"ind":29,"ty":4,"nm":"phone","sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":200,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":206,"s":[-2]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":212,"s":[2]},{"t":220,"s":[0]}]},"p":{"a":0,"k":[386.133,414.448,0]},"a":{"a":0,"k":[130.133,158.448,0]},"s":{"a":1,"k":[{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":200,"s":[100,100,100]},{"i":{"x":[0.667,0.667,0.667],"y":[1,1,1]},"o":{"x":[0.333,0.333,0.333],"y":[0,0,0]},"t":208,"s":[103,97,100]},{"t":214,"s":[100,100,100]}]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[77.191,144.464],[149.222,144.464]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.152941182256,0.819607853889,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[181.551,79.48],[177.084,127.136]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.152941182256,0.819607853889,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 2","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[18.933,0],[5.055,-18.933],[-18.933,0],[-5.055,18.933]],"o":[[-18.933,0],[-5.055,18.933],[18.933,0],[5.055,-18.933]],"v":[[136.816,67.507],[93.382,101.788],[118.511,136.069],[161.945,101.788]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0.611764729023,0.843137264252,1,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 3","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[191.733,66.392],[203.056,66.392],[214.898,119.785],[185.105,137.184]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.035294119269,0.525490224361,0.800000011921,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 4","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0],[0,0]],"v":[[191.733,66.392],[173.75,66.392],[95.905,66.392],[45.367,137.184],[185.105,137.184]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.031372550875,0.607843160629,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 5","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[185.105,158.448],[214.898,141.05],[214.898,119.785],[185.105,137.184]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.035294119269,0.525490224361,0.800000011921,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 6","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[185.105,158.448],[45.367,158.448],[45.367,137.184],[185.105,137.184]],"c":true}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.031372550875,0.607843160629,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 7","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":200,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[130.997,66.345],[122.953,66.41],[122.981,56.095],[131.025,56.03]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":206,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[130.726,81.862],[122.681,81.927],[122.71,71.612],[130.754,71.548]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[130.726,81.862],[122.681,81.927],[122.71,71.612],[130.754,71.548]],"c":true}]},{"t":220,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[130.997,66.345],[122.953,66.41],[122.981,56.095],[131.025,56.03]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.031372550875,0.607843160629,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 10","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":200,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[181.855,66.345],[173.779,66.392],[173.759,56.078],[181.835,56.03]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":206,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[181.584,81.862],[173.508,81.91],[173.488,71.595],[181.564,71.548]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[181.584,81.862],[173.508,81.91],[173.488,71.595],[181.564,71.548]],"c":true}]},{"t":220,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[181.855,66.345],[173.779,66.392],[173.759,56.078],[181.835,56.03]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.031372550875,0.607843160629,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":200,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[185.029,52.539],[162.926,52.578],[155.699,67.211],[185.029,67.172]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":206,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[184.758,68.056],[162.655,68.095],[155.428,82.728],[184.758,82.689]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[184.758,68.056],[162.655,68.095],[155.428,82.728],[184.758,82.689]],"c":true}]},{"t":220,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[185.029,52.539],[162.926,52.578],[155.699,67.211],[185.029,67.172]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.152941182256,0.819607853889,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 8","bm":0,"hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":1,"k":[{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":200,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[134.578,52.539],[112.474,52.578],[105.248,67.211],[134.578,67.172]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":206,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[134.306,68.056],[112.203,68.095],[104.976,82.728],[134.306,82.689]],"c":true}]},{"i":{"x":0.667,"y":1},"o":{"x":0.333,"y":0},"t":212,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[134.306,68.056],[112.203,68.095],[104.976,82.728],[134.306,82.689]],"c":true}]},{"t":220,"s":[{"i":[[0,0],[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0],[0,0]],"v":[[134.578,52.539],[112.474,52.578],[105.248,67.211],[134.578,67.172]],"c":true}]}]},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.427450984716,0.666666686535,1]},"o":{"a":0,"k":100},"w":{"a":0,"k":7},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","hd":false},{"ty":"fl","c":{"a":0,"k":[0.152941182256,0.819607853889,1,1]},"o":{"a":0,"k":100},"r":1,"bm":0,"nm":"Fill 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 9","bm":0,"hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 11","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0}]},{"id":"comp_1","layers":[{"ddd":0,"ind":1,"ty":4,"nm":"disk_dots 2","sr":1,"ks":{"r":{"a":1,"k":[{"i":{"x":[0.448],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":64,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.804],"y":[0]},"t":80,"s":[230.593]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":96,"s":[0]},{"i":{"x":[0.304],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":104,"s":[0]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.873],"y":[0]},"t":118,"s":[135.97]},{"t":164,"s":[0]}]},"p":{"a":0,"k":[257.167,256,0]}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ks":{"a":0,"k":{"i":[[3.625,-4.311],[7.619,0],[0,13.692],[-13.692,0],[-2.243,-0.667]],"o":[[-4.548,5.408],[-13.692,0],[0,-13.692],[2.46,0],[10.243,3.047]],"v":[[17.817,15.947],[-1.167,24.792],[-25.959,0],[-1.167,-24.792],[5.913,-23.767]],"c":false}},"nm":"Path 1","hd":false},{"ty":"st","c":{"a":0,"k":[0,0.282352941176,0.439215686275,1]},"o":{"a":0,"k":100},"w":{"a":1,"k":[{"i":{"x":[0.304],"y":[1]},"o":{"x":[0.333],"y":[0]},"t":104,"s":[10]},{"i":{"x":[0.667],"y":[1]},"o":{"x":[0.873],"y":[0]},"t":118,"s":[12]},{"t":164,"s":[10]}]},"lc":2,"lj":2,"bm":0,"d":[{"n":"d","nm":"dash","v":{"a":0,"k":0.2}},{"n":"g","nm":"gap","v":{"a":0,"k":16.5}},{"n":"o","nm":"offset","v":{"a":0,"k":0}}],"nm":"Stroke 1","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0]},"a":{"a":0,"k":[0,0]},"s":{"a":0,"k":[100,100]},"r":{"a":0,"k":0},"o":{"a":0,"k":100},"sk":{"a":0,"k":0},"sa":{"a":0,"k":0},"nm":"Transform"}],"nm":"Group 1","bm":0,"hd":false}],"ip":0,"op":300,"st":0,"bm":0}]}],"layers":[{"ddd":0,"ind":1,"ty":0,"nm":"_025_PHONE","refId":"comp_0","sr":1,"ks":{"p":{"a":0,"k":[256,256,0]},"a":{"a":0,"k":[256,256,0]},"s":{"a":0,"k":[110,110,100]}},"ao":0,"w":512,"h":512,"ip":0,"op":180,"st":-60,"bm":0}]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/raw/voice_mini.json b/TMessagesProj/src/main/res/raw/voice_mini.json new file mode 100644 index 000000000..987f9255a --- /dev/null +++ b/TMessagesProj/src/main/res/raw/voice_mini.json @@ -0,0 +1 @@ +{"v":"4.8.0","meta":{"g":"LottieFiles AE ","a":"","k":"","d":"","tc":""},"fr":60,"ip":0,"op":172,"w":60,"h":60,"nm":"ALL 2","ddd":0,"assets":[{"id":"comp_0","layers":[{"ddd":0,"ind":1,"ty":0,"nm":"Pre-comp 2","refId":"comp_1","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[30,30,0],"to":[0,0,0],"ti":[0,0,0]},{"t":10,"s":[28.282,29.857,0]}],"ix":2},"a":{"a":0,"k":[256,256,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[13.6,13.6,100]},{"t":10,"s":[13.5,13.5,100]}],"ix":6}},"ao":0,"w":512,"h":512,"ip":0,"op":37,"st":0,"bm":0}]},{"id":"comp_1","layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Head 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[0.613,-101.537,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":12,"s":[11.613,-50.537,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":26,"s":[11.613,-86.537,0],"to":[0,0,0],"ti":[0,0,0]},{"t":36,"s":[11.113,-83.037,0]}],"ix":2},"a":{"a":0,"k":[11.613,-84.537,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,-17.328],[25.612,0],[0,17.328],[-25.612,0]],"o":[[0,17.328],[-25.612,0],[0,-17.328],[25.612,0]],"v":[[54.039,-62.942],[7.664,-31.567],[-38.711,-62.942],[7.664,-94.317]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,-23.543],[25.081,0],[0,23.543],[-25.081,0]],"o":[[0,23.543],[-25.081,0],[0,-23.543],[25.081,0]],"v":[[55.47,-76.028],[10.057,-33.4],[-35.356,-76.028],[10.057,-118.656]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":14,"s":[{"i":[[0,-24.735],[24.735,0],[0,24.735],[-24.735,0]],"o":[[0,24.735],[-24.735,0],[0,-24.735],[24.735,0]],"v":[[56.4,-84.537],[11.613,-39.75],[-33.174,-84.537],[11.613,-129.324]],"c":true}]},{"t":25,"s":[{"i":[[0,-26.924],[26.924,0],[0,26.924],[-26.924,0]],"o":[[0,26.924],[-26.924,0],[0,-26.924],[26.924,0]],"v":[[60.363,-84.537],[11.613,-35.787],[-37.137,-84.537],[11.613,-133.287]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":2,"op":37,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Hands 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.05,"y":0},"t":9,"s":[11.336,13.774,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.31,"y":0},"t":12,"s":[11.336,18.274,0],"to":[0,0,0],"ti":[0,0,0]},{"t":25,"s":[11.336,11.774,0]}],"ix":2},"a":{"a":0,"k":[11.336,10.774,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":7,"s":[{"i":[[0,0],[0.875,44.36],[0.012,11.61]],"o":[[0,0],[-0.348,-17.658],[-0.016,-14.955]],"v":[[-33.583,109.659],[-40.875,50.64],[-45.012,-17.11]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[-7.48,30.25],[-7.964,9.648]],"o":[[0,0],[2.73,-11.373],[7.228,-11.196]],"v":[[-57.163,97.688],[-57.996,36.651],[-42.036,0.102]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[0,0],[-14.611,30.379],[-23.925,1.795]],"o":[[0,0],[7.389,-12.121],[13.171,-8.112]],"v":[[-77.526,99.758],[-81.889,29.906],[-41.075,4.99]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[0,0],[-27.718,19.589],[-10.136,-1.028]],"o":[[0,0],[21.782,-16.411],[19.114,-5.028]],"v":[[-107.888,77.829],[-93.782,16.161],[-42.114,8.877]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[0,0],[-27.325,4.127],[-14.975,-3.949]],"o":[[0,0],[20.334,-3.734],[35.444,0.066]],"v":[[-136.225,32.858],[-90.834,7.893],[-38.525,10.472]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[0,0],[-27.129,-3.603],[-14.269,-4.157]],"o":[[0,0],[19.61,2.605],[43.609,2.613]],"v":[[-147.393,-5.627],[-89.61,-2.992],[-38.231,9.77]],"c":false}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[0,0],[-38.825,-22.336],[-19.04,-2.789]],"o":[[0,0],[14.675,9.164],[62.76,8.586]],"v":[[-144.801,-73.381],[-91.175,-7.197],[-37.96,10.757]],"c":false}]},{"t":28,"s":[{"i":[[0,0],[-46.458,-35.583],[-19.738,-3.567]],"o":[[0,0],[8.352,6.397],[79.039,14.286]],"v":[[-142.333,-130.341],[-88.062,-16.235],[-40.512,7.89]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":7,"s":[{"i":[[0,0],[0.583,25.777],[4.5,19.11]],"o":[[0,0],[-0.238,-10.518],[-18.41,-78.181]],"v":[[63,101.64],[63.167,54.973],[56,-16.61]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[2.455,25.149],[6.868,13.265]],"o":[[0,0],[-0.66,-9.745],[-29.265,-65.813]],"v":[[68.548,94.42],[70.368,48.723],[56.521,2.29]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[-11.495,10.995],[15.537,39.692],[11.504,1.535]],"o":[[14.034,-13.424],[-3.378,-8.551],[-47.075,-45.52]],"v":[[79.466,115.674],[91.463,38.058],[62.496,12.174]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[-14.54,10.097],[25.244,37.544],[12.466,1.965]],"o":[[15.872,-11.198],[-4.741,-7.952],[-56.007,-35.342]],"v":[[88.294,117.357],[93.089,33.781],[63.701,13.504]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[-16.063,9.648],[30.097,36.471],[11.946,0.457]],"o":[[16.791,-10.086],[-5.422,-7.653],[-60.473,-30.253]],"v":[[92.709,118.199],[93.903,31.642],[61.554,13.67]],"c":false}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[-5.509,3.309],[41.86,36.38],[11.653,0.923]],"o":[[5.759,-3.459],[-7.371,-6.796],[-73.243,-15.702]],"v":[[124.602,127.851],[94.405,26.111],[62.332,13.5]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":25,"s":[{"i":[[0,0],[48,36.333],[11.5,1.167]],"o":[[0,0],[-8.388,-6.35],[-79.91,-8.107]],"v":[[141.25,132.89],[94.667,23.223],[63.5,11.89]],"c":false}]},{"t":28,"s":[{"i":[[0,0],[48,36.333],[11.5,1.167]],"o":[[0,0],[-8.388,-6.35],[-79.91,-8.107]],"v":[[140,128.89],[94.667,23.848],[63.5,11.89]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":37,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Body 2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":12,"s":[266.676,398.378,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":23,"s":[266.676,387.378,0],"to":[0,0,0],"ti":[0,0,0]},{"t":34,"s":[266.676,392.378,0]}],"ix":2},"a":{"a":0,"k":[10.676,136.378,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":12,"s":[{"i":[[-12.288,0.098],[-12.498,-0.987],[0,0],[3.339,-1.222],[4.897,-9.213],[22.369,0],[7.456,13.82],[-1.892,54.808]],"o":[[8.847,-0.071],[35.724,2.82],[24.706,0],[0,0],[-7.456,13.82],[-22.369,0],[-4.897,-9.213],[-12.353,-3.008]],"v":[[-46.407,7.071],[-18.334,9.585],[61.794,9.679],[70.141,28.952],[68.805,121.176],[13.161,136.5],[-42.484,121.176],[-47.047,25.191]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.31,"y":0},"t":23,"s":[{"i":[[-10.8,-2.3],[-8.2,-0.4],[0,0],[3,-1.3],[4.4,-9.8],[20.1,0],[6.7,14.7],[-1.7,58.3]],"o":[[7.9,1.7],[0,0],[22.2,0],[0,0],[-6.7,14.7],[-20.1,0],[-4.4,-9.8],[-11.1,-3.2]],"v":[[-40.2,-2.3],[-15.1,1.5],[56.9,1.6],[64.4,22.1],[63.2,120.2],[13.2,136.5],[-36.8,120.2],[-40.9,18.1]],"c":true}]},{"t":34,"s":[{"i":[[-10.8,-2.3],[-8.2,-0.4],[0,0],[3,-1.3],[4.4,-9.8],[24.175,0.094],[6.7,14.7],[-1.7,58.3]],"o":[[7.9,1.7],[0,0],[22.2,0],[0,0],[-6.669,14.144],[-20.1,-0.078],[-4.4,-9.8],[-11.1,-3.2]],"v":[[-40.2,-2.3],[-15.1,1.5],[56.9,1.6],[64.4,22.1],[63.044,119.606],[13.2,136.156],[-36.8,120.2],[-40.9,18.1]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":37,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"EXAMPLE ON","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[252.75,259.55,0],"ix":2},"a":{"a":0,"k":[0,3.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[45.3,0],[0,42]],"o":[[0,42],[-45.3,0],[0,0]],"v":[[82,8.4],[0,84.5],[-82,6.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-25.4,0],[0,0],[0,-25.4],[0,0],[25.4,0],[0,0],[0,25.4],[0,0]],"o":[[0,0],[25.4,0],[0,0],[0,25.4],[0,0],[-25.4,0],[0,0],[0,-25.4]],"v":[[0,-117.5],[0,-117.5],[46,-71.5],[46,4.5],[0,50.5],[0,50.5],[-46,4.5],[-46,-71.5]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-1.6,78.5],[1.7,78.5],[13.1,89.9],[13.1,113.2],[1.7,124.6],[-1.6,124.6],[-13,113.2],[-13,89.9]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":1,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Head Mic","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[-3.6,-33.55,0],"to":[0,0,0],"ti":[0,0,0]},{"t":12,"s":[9.4,60.45,0]}],"ix":2},"a":{"a":0,"k":[-3.6,-33.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[11.45,-8.15],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[25.8,40.525],[-28.33,-5.73],[-49.7,-35.75],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[-25.393,-0.391],[0.18,-19.791],[-0.243,-7.895],[14.056,-6.795],[0,0],[0,0],[0,0]],"o":[[28.416,0.506],[0,0],[0.243,14.68],[0,0],[0,0],[0,0],[0.052,-15.482]],"v":[[-6.434,-101.102],[47.056,-73.018],[47.664,4.298],[29.68,40.735],[-22.557,-15.51],[-55.36,-50.751],[-55.066,-73.828]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[-25.388,-0.636],[-1.714,-14.258],[-0.395,-12.85],[18.974,-7.989],[0,0],[0,0],[0,0]],"o":[[30.309,0.824],[0,0],[0.395,16.737],[0,0],[0,0],[0,0],[2.421,-11.434]],"v":[[-4.713,-90.812],[49.978,-73.971],[50.966,4.171],[29.789,44.635],[-23.955,-14.43],[-58.974,-53.419],[-58.496,-75.289]],"c":true}]},{"t":12,"s":[{"i":[[-25.357,-0.966],[8.591,-7],[-0.599,-19.5],[30.67,-1.65],[0,0],[0,0],[0,0]],"o":[[32.818,1.25],[0,0],[0.599,19.5],[0,0],[0,0],[0,0],[7.976,-0.75]],"v":[[-7.528,-77],[56.909,-75.25],[58.407,4],[30.335,46.65],[-22.743,-12.98],[-60.7,-57],[-59.976,-81.5]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-11.6,9.325],[-25.4,0],[0,0],[0,0]],"v":[[-49.7,-36.15],[-27.202,-14.502],[-25.25,-12.55],[26.975,39.425],[-3.6,50.4],[-49.6,4.4],[-49.7,-31.65]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[6.435,0.16],[0,28.821],[0,0]],"o":[[0,0],[0,0],[0,0],[-13.044,10.194],[-29.138,-0.769],[0,0],[0,0]],"v":[[-55.579,-50.718],[-28.188,-26.309],[-26.057,-24.356],[32.281,39.246],[-10.39,49.691],[-55.329,4.4],[-55.38,-31.494]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[6.77,0.261],[0,30.968],[0,0]],"o":[[0,0],[0,0],[0,0],[-13.708,7.528],[-31.483,-1.252],[0,0],[0,0]],"v":[[-59.067,-54.691],[-31.67,-27.138],[-29.427,-25.186],[31.261,43.864],[-14.65,49.247],[-58.924,4.4],[-59.007,-41.593]],"c":true}]},{"t":12,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[7.214,0.396],[0,33.85],[0,0]],"o":[[0,0],[0,0],[0,0],[-14.586,3.95],[-34.597,-1.9],[0,0],[0,0]],"v":[[-60.625,-60.025],[-33.246,-28.252],[-30.856,-26.3],[32.633,46.55],[-17.287,48.65],[-60.625,4.4],[-60.75,-55.15]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":1,"op":13,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Arc L","parent":8,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.75,84.5,0],"ix":2},"a":{"a":0,"k":[-3.75,84.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[13.1,0],[0,42]],"o":[[-11,5.1],[-45.3,0],[0,0]],"v":[[33,76.5],[-3.5,84.5],[-85.5,6.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":2,"s":[{"i":[[0,0],[17.605,8.684],[-3.033,28.67]],"o":[[-19.95,7.814],[-28.895,-17.316],[0,0]],"v":[[24.45,79.436],[-39.105,77.316],[-76.362,0.644]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,0],[11.789,14.347],[-5.217,19.073]],"o":[[-50.375,15.159],[-15.78,-19.203],[0,0]],"v":[[26.125,87.591],[-58.72,68.703],[-69.783,-3.573]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[3.96,23.737],[-17.216,17.641]],"o":[[-21.241,-5.148],[-3.54,-27.263],[0,0]],"v":[[-26.759,91.648],[-66.96,42.763],[-43.784,-26.641]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[0,0],[0.701,27.677],[-23.215,14.71]],"o":[[-17.607,-9.204],[-0.405,-15.984],[0,0]],"v":[[-32.893,81.704],[-69.201,29.323],[-41.785,-18.71]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,0],[-6.372,14.388],[-16.86,4.436]],"o":[[-9.639,-11.784],[6.608,-14.922],[0,0]],"v":[[-74.929,75.894],[-84.108,31.922],[-41.89,3.314]],"c":false}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[-14.453,13.82],[-10.072,-1.911]],"o":[[-3.391,-12.713],[16.367,-15.651],[0,0]],"v":[[-102.109,72.713],[-93.047,27.18],[-41.962,18.507]],"c":false}]},{"t":12,"s":[{"i":[[0,0],[-10.081,5.804],[-6.5,-5.25]],"o":[[-1.25,-14.5],[24.75,-14.25],[0,0]],"v":[[-121.75,68.75],[-97.75,28.5],[-42,26.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":1,"op":13,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Arc R","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[66.05,51.45,0],"ix":2},"a":{"a":0,"k":[66.05,51.45,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[42.3,-15.5]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[28.45,78.125]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":2,"s":[{"i":[[0,0],[45.259,-17.574]],"o":[[5.569,19.742],[0,0]],"v":[[71.436,-1.265],[27.374,78.492]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,0],[47.39,-19.067]],"o":[[9.578,22.724],[0,0]],"v":[[66.422,-8.224],[29.61,86.067]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[43.341,-23.274]],"o":[[42.63,43.092],[0,0]],"v":[[43.37,-29.592],[33.159,90.274]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[0,0],[32.542,-17.732]],"o":[[46.682,32.959],[0,0]],"v":[[44.318,-21.959],[47.958,77.232]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,0],[29.55,-17.51]],"o":[[48.487,23.807],[0,0]],"v":[[50.036,1.981],[67.878,99.974]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[0,0],[28.054,-17.399]],"o":[[49.39,19.231],[0,0]],"v":[[56.395,12.201],[71.838,111.344]],"c":false}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[27.199,-17.335]],"o":[[49.907,16.614],[0,0]],"v":[[57.245,18.726],[77.588,118.775]],"c":false}]},{"t":12,"s":[{"i":[[0,0],[26.05,-17.25]],"o":[[47.35,10.35],[0,0]],"v":[[62.15,25.9],[83.95,128.75]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":13,"st":0,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Leg","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[252.45,380.6,0],"ix":2},"a":{"a":0,"k":[-3.55,124.6,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-5.2,78.5],[-1.9,78.5],[9.5,89.9],[9.5,113.2],[-1.9,124.6],[-5.2,124.6],[-16.6,113.2],[-16.6,89.9]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[-10.717,-0.585],[0,0],[-5.941,-0.906],[0.206,-8.157],[11.865,-1.977],[0,0],[1.529,7.318],[0,0]],"o":[[0,0],[12.64,0.23],[0,0],[-0.717,13.324],[0,0],[-9.832,-0.809],[-1.086,-1.566],[7.288,1.01]],"v":[[-6.991,83.123],[-1.449,83.426],[26.156,78.756],[29.671,97.051],[10.094,121.234],[-4.602,122.184],[-26.436,108.079],[-29.519,79.278]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[-15.856,0],[0,0],[-3.721,-3.379],[3.175,-9.561],[10.154,-0.123],[0,0],[3.074,9.411],[0,0]],"o":[[0,0],[17.039,-0.353],[0,0],[-4.368,11.421],[0,0],[-13.978,-0.72],[-2.523,-3.997],[4.562,-2.465]],"v":[[-1.547,83.311],[6.603,83.311],[48.503,77.629],[40.118,101.079],[14.346,125.123],[-0.309,126.215],[-28.324,109.339],[-40.364,80.189]],"c":true}]},{"t":12,"s":[{"i":[[-24.292,0],[0,0],[0,-6.3],[8.795,-7.101],[15.905,-0.35],[0,0],[7.818,10.05],[0,0]],"o":[[0,0],[24.292,0],[0,0],[-9.351,7.55],[0,0],[-20.755,-1.1],[-4.75,-6.106],[0,-6.3]],"v":[[1.971,106.75],[14.403,106.75],[66.25,108.9],[57.149,125.2],[14.987,135.85],[2.263,135.85],[-43.318,122.45],[-49,110.15]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":1,"op":10,"st":0,"bm":0}]},{"id":"comp_2","layers":[{"ddd":0,"ind":1,"ty":0,"nm":"Pre-comp 1","refId":"comp_3","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[30,30,0],"to":[0,0,0],"ti":[0,0,0]},{"t":10,"s":[28.282,29.857,0]}],"ix":2},"a":{"a":0,"k":[256,256,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[13.6,13.6,100]},{"t":10,"s":[13.5,13.5,100]}],"ix":6}},"ao":0,"w":512,"h":512,"ip":0,"op":37,"st":0,"bm":0}]},{"id":"comp_3","layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Line","parent":2,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[3.687,-6.898,0],"to":[0,0,0],"ti":[0,0,0]},{"t":8,"s":[-3.313,3.102,0]}],"ix":2},"a":{"a":0,"k":[3.6,-7,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-101.6,-111.5],[108.8,97.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[0]},{"t":7,"s":[31]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.3],"y":[0]},"t":0,"s":[100]},{"t":7,"s":[63]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path-10","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":9,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Head Mic","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[-3.6,-33.55,0],"to":[0,0,0],"ti":[0,0,0]},{"t":12,"s":[9.4,60.45,0]}],"ix":2},"a":{"a":0,"k":[-3.6,-33.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[6.9,-8],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[31.3,34.4],[-20.33,-17.23],[-49.6,-46.5],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[-25.393,-0.391],[0.18,-19.791],[-0.243,-7.895],[14.056,-6.795],[0,0],[0,0],[0,0]],"o":[[28.416,0.506],[0,0],[0.243,14.68],[0,0],[0,0],[0,0],[0.052,-15.482]],"v":[[-6.434,-101.102],[47.056,-73.018],[47.664,4.298],[29.68,40.735],[-22.557,-15.51],[-55.36,-50.751],[-55.066,-73.828]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[-25.388,-0.636],[-1.714,-14.258],[-0.395,-12.85],[18.974,-7.989],[0,0],[0,0],[0,0]],"o":[[30.309,0.824],[0,0],[0.395,16.737],[0,0],[0,0],[0,0],[2.421,-11.434]],"v":[[-4.713,-90.812],[49.978,-73.971],[50.966,4.171],[29.789,44.635],[-23.955,-14.43],[-58.974,-53.419],[-58.496,-75.289]],"c":true}]},{"t":12,"s":[{"i":[[-25.306,-0.966],[8.574,-7],[-0.598,-19.5],[30.609,-1.65],[0,0],[0,0],[0,0]],"o":[[32.753,1.25],[0,0],[0.598,19.5],[0,0],[0,0],[0,0],[5.227,-2.25]],"v":[[-7.633,-77],[56.676,-75.25],[58.171,4],[29.904,46.4],[-22.818,-12.98],[-60.7,-57],[-59.227,-81]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-5.2,2],[-25.4,0],[0,0],[0,0]],"v":[[-49.6,-15.4],[-40.702,-6.502],[-38.75,-4.55],[13.1,47.3],[-3.6,50.4],[-49.6,4.4],[-49.6,-15.4]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[6.435,0.16],[0,28.821],[0,0]],"o":[[0,0],[0,0],[0,0],[-13.044,10.194],[-29.138,-0.769],[0,0],[0,0]],"v":[[-55.579,-50.718],[-28.188,-26.309],[-26.057,-24.356],[32.281,39.246],[-10.39,49.691],[-55.329,4.4],[-55.38,-31.494]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[6.77,0.261],[0,30.968],[0,0]],"o":[[0,0],[0,0],[0,0],[-13.708,7.528],[-31.483,-1.252],[0,0],[0,0]],"v":[[-59.067,-54.691],[-31.67,-27.138],[-29.427,-25.186],[31.261,43.864],[-14.65,49.247],[-58.924,4.4],[-59.007,-41.593]],"c":true}]},{"t":12,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[7.199,0.396],[0,33.85],[0,0]],"o":[[0,0],[0,0],[0,0],[-14.557,3.95],[-34.528,-1.9],[0,0],[0,0]],"v":[[-60.625,-60.025],[-33.301,-28.252],[-30.915,-26.3],[32.198,46.3],[-17.623,48.4],[-60.625,4.4],[-60.75,-55.15]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":13,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Arc L","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.75,84.5,0],"ix":2},"a":{"a":0,"k":[-3.75,84.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[13.1,0],[0,42]],"o":[[-11,5.1],[-45.3,0],[0,0]],"v":[[33,76.5],[-3.5,84.5],[-85.5,6.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":2,"s":[{"i":[[0,0],[17.605,8.684],[-3.033,28.67]],"o":[[-19.95,7.814],[-28.895,-17.316],[0,0]],"v":[[24.45,79.436],[-39.105,77.316],[-76.362,0.644]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,0],[11.789,14.347],[-5.217,19.073]],"o":[[-50.375,15.159],[-15.78,-19.203],[0,0]],"v":[[26.125,87.591],[-58.72,68.703],[-69.783,-3.573]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[3.96,23.737],[-17.216,17.641]],"o":[[-21.241,-5.148],[-3.54,-27.263],[0,0]],"v":[[-26.759,91.648],[-66.96,42.763],[-43.784,-26.641]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[0,0],[0.701,27.677],[-23.215,14.71]],"o":[[-17.607,-9.204],[-0.405,-15.984],[0,0]],"v":[[-32.893,81.704],[-69.201,29.323],[-41.785,-18.71]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,0],[-6.372,14.388],[-16.86,4.436]],"o":[[-9.639,-11.784],[6.608,-14.922],[0,0]],"v":[[-74.929,75.894],[-84.108,31.922],[-41.89,3.314]],"c":false}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[-14.453,13.82],[-10.072,-1.911]],"o":[[-3.391,-12.713],[16.367,-15.651],[0,0]],"v":[[-102.109,72.713],[-93.047,27.18],[-41.962,18.507]],"c":false}]},{"t":12,"s":[{"i":[[0,0],[-10.081,5.804],[-6.5,-5.25]],"o":[[-1.25,-14.5],[24.75,-14.25],[0,0]],"v":[[-121.75,68.75],[-97.75,28.5],[-42,26.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":13,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Arc R","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[66.05,51.45,0],"ix":2},"a":{"a":0,"k":[66.05,51.45,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[0,0],[8.7,-12]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[64.7,50.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,0],[16.64,-12.817]],"o":[[15.078,25.724],[0,0]],"v":[[66.422,-8.224],[58.36,66.317]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":4,"s":[{"i":[[0,0],[38.841,-19.274]],"o":[[42.63,43.092],[0,0]],"v":[[43.37,-29.592],[35.659,89.274]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[0,0],[32.542,-17.732]],"o":[[46.682,32.959],[0,0]],"v":[[44.318,-21.959],[47.958,77.232]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,0],[29.55,-17.51]],"o":[[48.487,23.807],[0,0]],"v":[[50.036,1.981],[67.878,99.974]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[0,0],[28.054,-17.399]],"o":[[49.39,19.231],[0,0]],"v":[[56.395,12.201],[71.838,111.344]],"c":false}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[27.199,-17.335]],"o":[[49.907,16.614],[0,0]],"v":[[57.245,18.726],[77.588,118.775]],"c":false}]},{"t":12,"s":[{"i":[[0,0],[26.05,-17.25]],"o":[[47.35,10.35],[0,0]],"v":[[62.15,25.9],[83.95,128.75]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":13,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Leg","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[252.45,380.6,0],"ix":2},"a":{"a":0,"k":[-3.55,124.6,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":0,"s":[{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-5.2,78.5],[-1.9,78.5],[9.5,89.9],[9.5,113.2],[-1.9,124.6],[-5.2,124.6],[-16.6,113.2],[-16.6,89.9]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[-10.717,-0.585],[0,0],[-5.941,-0.906],[0.206,-8.157],[11.865,-1.977],[0,0],[1.529,7.318],[0,0]],"o":[[0,0],[12.64,0.23],[0,0],[-0.717,13.324],[0,0],[-9.832,-0.809],[-1.086,-1.566],[7.288,1.01]],"v":[[-6.991,83.123],[-1.449,83.426],[26.156,78.756],[29.671,97.051],[10.094,121.234],[-4.602,122.184],[-26.436,108.079],[-29.519,79.278]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[-15.856,0],[0,0],[-3.721,-3.379],[3.175,-9.561],[10.154,-0.123],[0,0],[3.074,9.411],[0,0]],"o":[[0,0],[17.039,-0.353],[0,0],[-4.368,11.421],[0,0],[-13.978,-0.72],[-2.523,-3.997],[4.562,-2.465]],"v":[[-1.547,83.311],[6.603,83.311],[48.503,77.629],[40.118,101.079],[14.346,125.123],[-0.309,126.215],[-28.324,109.339],[-40.364,80.189]],"c":true}]},{"t":12,"s":[{"i":[[-24.292,0],[0,0],[0,-6.3],[8.795,-7.101],[15.905,-0.35],[0,0],[7.818,10.05],[0,0]],"o":[[0,0],[24.292,0],[0,0],[-9.351,7.55],[0,0],[-20.755,-1.1],[-4.75,-6.106],[0,-6.3]],"v":[[1.971,106.75],[14.403,106.75],[66.25,108.9],[57.149,125.2],[14.987,135.85],[2.263,135.85],[-43.318,122.45],[-49,110.15]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":10,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Head 2","parent":8,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[0.613,-101.537,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":12,"s":[11.613,-50.537,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":26,"s":[11.613,-86.537,0],"to":[0,0,0],"ti":[0,0,0]},{"t":36,"s":[11.113,-83.037,0]}],"ix":2},"a":{"a":0,"k":[11.613,-84.537,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":3,"s":[{"i":[[0,-17.328],[25.612,0],[0,17.328],[-25.612,0]],"o":[[0,17.328],[-25.612,0],[0,-17.328],[25.612,0]],"v":[[54.039,-62.942],[7.664,-31.567],[-38.711,-62.942],[7.664,-94.317]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,-23.543],[25.081,0],[0,23.543],[-25.081,0]],"o":[[0,23.543],[-25.081,0],[0,-23.543],[25.081,0]],"v":[[55.47,-76.028],[10.057,-33.4],[-35.356,-76.028],[10.057,-118.656]],"c":true}]},{"i":{"x":0.39,"y":1},"o":{"x":0.3,"y":0},"t":14,"s":[{"i":[[0,-24.735],[24.735,0],[0,24.735],[-24.735,0]],"o":[[0,24.735],[-24.735,0],[0,-24.735],[24.735,0]],"v":[[56.4,-84.537],[11.613,-39.75],[-33.174,-84.537],[11.613,-129.324]],"c":true}]},{"t":25,"s":[{"i":[[0,-26.924],[26.924,0],[0,26.924],[-26.924,0]],"o":[[0,26.924],[-26.924,0],[0,-26.924],[26.924,0]],"v":[[60.363,-84.537],[11.613,-35.787],[-37.137,-84.537],[11.613,-133.287]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":2,"op":37,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Hands 2","parent":8,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0,"y":1},"o":{"x":0.05,"y":0},"t":9,"s":[11.336,13.774,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.31,"y":0},"t":12,"s":[11.336,18.274,0],"to":[0,0,0],"ti":[0,0,0]},{"t":25,"s":[11.336,11.774,0]}],"ix":2},"a":{"a":0,"k":[11.336,10.774,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":7,"s":[{"i":[[0,0],[0.875,44.36],[0.012,11.61]],"o":[[0,0],[-0.348,-17.658],[-0.016,-14.955]],"v":[[-33.583,109.659],[-40.875,50.64],[-45.012,-17.11]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[-7.48,30.25],[-7.964,9.648]],"o":[[0,0],[2.73,-11.373],[7.228,-11.196]],"v":[[-57.163,97.688],[-57.996,36.651],[-42.036,0.102]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[0,0],[-14.611,30.379],[-23.925,1.795]],"o":[[0,0],[7.389,-12.121],[13.171,-8.112]],"v":[[-77.526,99.758],[-81.889,29.906],[-41.075,4.99]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[0,0],[-27.718,19.589],[-10.136,-1.028]],"o":[[0,0],[21.782,-16.411],[19.114,-5.028]],"v":[[-107.888,77.829],[-93.782,16.161],[-42.114,8.877]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[0,0],[-27.325,4.127],[-14.975,-3.949]],"o":[[0,0],[20.334,-3.734],[35.444,0.066]],"v":[[-136.225,32.858],[-90.834,7.893],[-38.525,10.472]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[0,0],[-27.129,-3.603],[-14.269,-4.157]],"o":[[0,0],[19.61,2.605],[43.609,2.613]],"v":[[-147.393,-5.627],[-89.61,-2.992],[-38.231,9.77]],"c":false}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[0,0],[-38.825,-22.336],[-19.04,-2.789]],"o":[[0,0],[14.675,9.164],[62.76,8.586]],"v":[[-144.801,-73.381],[-91.175,-7.197],[-37.96,10.757]],"c":false}]},{"t":28,"s":[{"i":[[0,0],[-46.458,-35.583],[-19.738,-3.567]],"o":[[0,0],[8.352,6.397],[79.039,14.286]],"v":[[-142.333,-130.341],[-88.062,-16.235],[-40.512,7.89]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.3,"y":0},"t":7,"s":[{"i":[[0,0],[0.583,25.777],[4.5,19.11]],"o":[[0,0],[-0.238,-10.518],[-18.41,-78.181]],"v":[[63,101.64],[63.167,54.973],[56,-16.61]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[2.455,25.149],[6.868,13.265]],"o":[[0,0],[-0.66,-9.745],[-29.265,-65.813]],"v":[[68.548,94.42],[70.368,48.723],[56.521,2.29]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[-11.495,10.995],[15.537,39.692],[11.504,1.535]],"o":[[14.034,-13.424],[-3.378,-8.551],[-47.075,-45.52]],"v":[[79.466,115.674],[91.463,38.058],[62.496,12.174]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[-14.54,10.097],[25.244,37.544],[12.466,1.965]],"o":[[15.872,-11.198],[-4.741,-7.952],[-56.007,-35.342]],"v":[[88.294,117.357],[93.089,33.781],[63.701,13.504]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[-16.063,9.648],[30.097,36.471],[11.946,0.457]],"o":[[16.791,-10.086],[-5.422,-7.653],[-60.473,-30.253]],"v":[[92.709,118.199],[93.903,31.642],[61.554,13.67]],"c":false}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":18,"s":[{"i":[[-5.509,3.309],[41.86,36.38],[11.653,0.923]],"o":[[5.759,-3.459],[-7.371,-6.796],[-73.243,-15.702]],"v":[[124.602,127.851],[94.405,26.111],[62.332,13.5]],"c":false}]},{"i":{"x":0.833,"y":1},"o":{"x":0.167,"y":0},"t":25,"s":[{"i":[[0,0],[48,36.333],[11.5,1.167]],"o":[[0,0],[-8.388,-6.35],[-79.91,-8.107]],"v":[[141.25,132.89],[94.667,23.223],[63.5,11.89]],"c":false}]},{"t":28,"s":[{"i":[[0,0],[48,36.333],[11.5,1.167]],"o":[[0,0],[-8.388,-6.35],[-79.91,-8.107]],"v":[[140,128.89],[94.667,23.848],[63.5,11.89]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":37,"st":0,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Body 2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":12,"s":[266.676,398.378,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.3,"y":0},"t":23,"s":[266.676,387.378,0],"to":[0,0,0],"ti":[0,0,0]},{"t":34,"s":[266.676,392.378,0]}],"ix":2},"a":{"a":0,"k":[10.676,136.378,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":12,"s":[{"i":[[-12.288,0.098],[-12.498,-0.987],[0,0],[3.339,-1.222],[4.897,-9.213],[22.369,0],[7.456,13.82],[-1.892,54.808]],"o":[[8.847,-0.071],[35.724,2.82],[24.706,0],[0,0],[-7.456,13.82],[-22.369,0],[-4.897,-9.213],[-12.353,-3.008]],"v":[[-46.407,7.071],[-18.334,9.585],[61.794,9.679],[70.141,28.952],[68.805,121.176],[13.161,136.5],[-42.484,121.176],[-47.047,25.191]],"c":true}]},{"i":{"x":0.833,"y":1},"o":{"x":0.31,"y":0},"t":23,"s":[{"i":[[-10.8,-2.3],[-8.2,-0.4],[0,0],[3,-1.3],[4.4,-9.8],[20.1,0],[6.7,14.7],[-1.7,58.3]],"o":[[7.9,1.7],[0,0],[22.2,0],[0,0],[-6.7,14.7],[-20.1,0],[-4.4,-9.8],[-11.1,-3.2]],"v":[[-40.2,-2.3],[-15.1,1.5],[56.9,1.6],[64.4,22.1],[63.2,120.2],[13.2,136.5],[-36.8,120.2],[-40.9,18.1]],"c":true}]},{"t":34,"s":[{"i":[[-10.8,-2.3],[-8.2,-0.4],[0,0],[3,-1.3],[4.4,-9.8],[24.175,0.094],[6.7,14.7],[-1.7,58.3]],"o":[[7.9,1.7],[0,0],[22.2,0],[0,0],[-6.669,14.144],[-20.1,-0.078],[-4.4,-9.8],[-11.1,-3.2]],"v":[[-40.2,-2.3],[-15.1,1.5],[56.9,1.6],[64.4,22.1],[63.044,119.606],[13.2,136.156],[-36.8,120.2],[-40.9,18.1]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":37,"st":0,"bm":0}]},{"id":"comp_4","layers":[{"ddd":0,"ind":1,"ty":0,"nm":"Pre-comp 1","refId":"comp_5","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[256,256,0],"ix":1},"s":{"a":0,"k":[13.6,13.6,100],"ix":6}},"ao":0,"w":512,"h":512,"ip":0,"op":30,"st":0,"bm":0}]},{"id":"comp_5","layers":[{"ddd":0,"ind":1,"ty":3,"nm":"Null 1","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[254.042,238.57,0],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.72,0.72,0.72],"y":[1,1,1]},"o":{"x":[0.28,0.28,0.28],"y":[0,0,0]},"t":0,"s":[100,100,100]},{"i":{"x":[0.72,0.72,0.72],"y":[1,1,1]},"o":{"x":[0.28,0.28,0.28],"y":[0,0,0]},"t":11,"s":[93,93,100]},{"i":{"x":[0.72,0.72,0.72],"y":[1,1,1]},"o":{"x":[0.28,0.28,0.28],"y":[0,0,0]},"t":21,"s":[101,101,100]},{"t":29,"s":[100,100,100]}],"ix":6}},"ao":0,"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Line","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":0,"s":[-13.538,-23.613,0],"to":[0,0,0],"ti":[0,0,0]},{"t":17,"s":[3.687,-6.898,0]}],"ix":2},"a":{"a":0,"k":[3.6,-7,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-101.6,-111.5],[108.8,97.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":0,"k":0,"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":0,"s":[0]},{"t":17,"s":[100]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path-10","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":2,"op":30,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Head","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.6,-33.55,0],"ix":2},"a":{"a":0,"k":[-3.6,-33.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[15.481,-9.962],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[23.519,42.712],[-23.209,-2.257],[-49.7,-27.75],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[6.9,-8],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[31.3,34.4],[-10.034,-21.084],[-49.6,-46.5],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[6.9,-8],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[31.3,34.4],[19.623,9.37],[-49.6,-46.5],[-49.6,-71.5]],"c":true}]},{"t":8,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[6.9,-8],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[31.3,34.4],[-20.33,-17.23],[-49.6,-46.5],[-49.6,-71.5]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-12.35,7.7],[-25.4,0],[0,0],[0,0]],"v":[[-49.7,-29.15],[-39.262,-18.975],[-36.972,-16.742],[23.85,42.55],[-3.6,50.4],[-49.6,4.4],[-49.7,-29.15]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":6,"s":[{"i":[[0,0],[-3.331,-2.789],[4.414,24.475],[0,0],[12.582,-0.112],[0,25.4],[0,0]],"o":[[0,0],[4.215,3.528],[21.749,15.62],[-9.968,12.883],[-25.399,0.226],[0,0],[0,0]],"v":[[-49.6,-15.4],[-38.553,-4.468],[-12.248,-24.48],[31.491,33.902],[-3.6,50.4],[-49.6,4.4],[-49.6,-15.4]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[0,0],[-1.647,-1.416],[3.578,20.948],[0,0],[13.582,-0.261],[0,25.4],[0,0]],"o":[[0,0],[7.932,6.822],[10.875,7.81],[-10.683,10.613],[-25.395,0.487],[0,0],[0,0]],"v":[[-49.6,-15.4],[-6.153,27.653],[21.197,9.349],[31.475,35.139],[-3.6,50.4],[-49.6,4.4],[-49.6,-15.4]],"c":true}]},{"t":8,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-5.2,2],[-25.4,0],[0,0],[0,0]],"v":[[-49.6,-15.4],[-40.702,-6.502],[-38.75,-4.55],[13.1,47.3],[-3.6,50.4],[-49.6,4.4],[-49.6,-15.4]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Arc L","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.75,84.5,0],"ix":2},"a":{"a":0,"k":[-3.75,84.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[13.1,0],[0,42]],"o":[[-11,5.1],[-45.3,0],[0,0]],"v":[[33,76.5],[-3.5,84.5],[-85.5,6.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Arc R","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[66.05,51.45,0],"ix":2},"a":{"a":0,"k":[66.05,51.45,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[0,0],[34.576,-19.595]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[38.027,74.567]],"c":false}]},{"t":9,"s":[{"i":[[0,0],[8.7,-12]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[64.7,50.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Leg","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[48.408,192.03,0],"ix":2},"a":{"a":0,"k":[-3.55,124.6,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-5.2,78.5],[-1.9,78.5],[9.5,89.9],[9.5,113.2],[-1.9,124.6],[-5.2,124.6],[-16.6,113.2],[-16.6,89.9]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":30,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"EXAMPLE ON","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[48.708,70.98,0],"ix":2},"a":{"a":0,"k":[0,3.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[45.3,0],[0,42]],"o":[[0,42],[-45.3,0],[0,0]],"v":[[82,8.4],[0,84.5],[-82,6.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":20.888,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-25.4,0],[0,0],[0,-25.4],[0,0],[25.4,0],[0,0],[0,25.4],[0,0]],"o":[[0,0],[25.4,0],[0,0],[0,25.4],[0,0],[-25.4,0],[0,0],[0,-25.4]],"v":[[0,-117.5],[0,-117.5],[46,-71.5],[46,4.5],[0,50.5],[0,50.5],[-46,4.5],[-46,-71.5]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-1.6,78.5],[1.7,78.5],[13.1,89.9],[13.1,113.2],[1.7,124.6],[-1.6,124.6],[-13,113.2],[-13,89.9]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":5,"st":0,"bm":0}]},{"id":"comp_6","layers":[{"ddd":0,"ind":1,"ty":0,"nm":"Pre-comp 1","refId":"comp_7","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[256,256,0],"ix":1},"s":{"a":0,"k":[13.6,13.6,100],"ix":6}},"ao":0,"w":512,"h":512,"ip":0,"op":33,"st":0,"bm":0}]},{"id":"comp_7","layers":[{"ddd":0,"ind":1,"ty":3,"nm":"Null 1","sr":1,"ks":{"o":{"a":0,"k":0,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[254.062,238.57,0],"ix":2},"a":{"a":0,"k":[50,50,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[100,100,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":12,"s":[93,93,100]},{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":22,"s":[101,101,100]},{"t":32,"s":[100,100,100]}],"ix":6}},"ao":0,"ip":0,"op":33,"st":0,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Line","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.4,"y":0},"t":2,"s":[3.687,-6.898,0],"to":[0,0,0],"ti":[0,0,0]},{"t":23,"s":[12.601,2.016,0]}],"ix":2},"a":{"a":0,"k":[3.6,-7,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-101.6,-111.5],[108.8,97.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.3],"y":[1]},"o":{"x":[0.4],"y":[0]},"t":2,"s":[0]},{"t":23,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path-10","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":21,"st":0,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Head","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.6,-33.55,0],"ix":2},"a":{"a":0,"k":[-3.6,-33.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[6.9,-8],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[31.3,34.4],[-20.33,-17.23],[-49.6,-46.5],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[5.665,-6.576],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[32.133,31.549],[-27.809,-30.43],[-49.62,-42.75],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[5.665,-6.576],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[32.133,31.549],[-4.565,-6.652],[-49.62,-42.75],[-49.6,-71.5]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[5.665,-6.576],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[32.133,31.549],[17.977,14.816],[-49.62,-42.75],[-49.6,-71.5]],"c":true}]},{"t":12,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[15.481,-9.962],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[23.519,42.712],[-23.209,-2.257],[-49.7,-27.75],[-49.6,-71.5]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":8,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-5.2,2],[-25.4,0],[0,0],[0,0]],"v":[[-49.6,-15.4],[-40.702,-6.502],[-38.75,-4.55],[13.1,47.3],[-3.6,50.4],[-49.6,4.4],[-49.6,-15.4]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":9,"s":[{"i":[[0,0],[0,0],[-11.683,-16.357],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[4.734,6.628],[-6.63,3.14],[-25.4,0],[0,0],[0,0]],"v":[[-49.575,-44.964],[-29.795,-33.421],[-38.394,-6.988],[15.25,46.35],[-3.6,50.4],[-49.6,4.4],[-49.575,-44.167]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[0,0],[0,0],[-11.683,-16.357],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[4.734,6.628],[-6.63,3.14],[-25.4,0],[0,0],[0,0]],"v":[[-49.575,-44.964],[-6.551,-9.643],[-15.151,16.79],[15.25,46.35],[-3.6,50.4],[-49.6,4.4],[-49.575,-44.167]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":11,"s":[{"i":[[0,0],[0,0],[-11.683,-16.357],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[4.734,6.628],[-6.63,3.14],[-25.4,0],[0,0],[0,0]],"v":[[-49.575,-44.964],[15.99,11.825],[7.391,38.258],[15.25,46.35],[-3.6,50.4],[-49.6,4.4],[-49.575,-44.167]],"c":true}]},{"t":12,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-12.35,7.7],[-25.4,0],[0,0],[0,0]],"v":[[-49.7,-29.15],[-39.262,-18.975],[-36.972,-16.742],[23.85,42.55],[-3.6,50.4],[-49.6,4.4],[-49.7,-29.15]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":33,"st":0,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Arc L","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.75,84.5,0],"ix":2},"a":{"a":0,"k":[-3.75,84.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[13.1,0],[0,42]],"o":[[-11,5.1],[-45.3,0],[0,0]],"v":[[33,76.5],[-3.5,84.5],[-85.5,6.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":33,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Arc R","parent":4,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[66.05,51.45,0],"ix":2},"a":{"a":0,"k":[66.05,51.45,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[0,0],[8.7,-12]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[64.7,50.5]],"c":false}]},{"t":14,"s":[{"i":[[0,0],[36.503,-16.928]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[34.73,75.676]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":33,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Leg","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[48.408,192.03,0],"ix":2},"a":{"a":0,"k":[-3.55,124.6,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-5.2,78.5],[-1.9,78.5],[9.5,89.9],[9.5,113.2],[-1.9,124.6],[-5.2,124.6],[-16.6,113.2],[-16.6,89.9]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":33,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"EXAMPLE ON","parent":1,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[48.708,70.98,0],"ix":2},"a":{"a":0,"k":[0,3.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[45.3,0],[0,42]],"o":[[0,42],[-45.3,0],[0,0]],"v":[[82,8.4],[0,84.5],[-82,6.5]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":20.888,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-25.4,0],[0,0],[0,-25.4],[0,0],[25.4,0],[0,0],[0,25.4],[0,0]],"o":[[0,0],[25.4,0],[0,0],[0,25.4],[0,0],[-25.4,0],[0,0],[0,-25.4]],"v":[[0,-117.5],[0,-117.5],[46,-71.5],[46,4.5],[0,50.5],[0,50.5],[-46,4.5],[-46,-71.5]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-1.6,78.5],[1.7,78.5],[13.1,89.9],[13.1,113.2],[1.7,124.6],[-1.6,124.6],[-13,113.2],[-13,89.9]],"c":true},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false}],"ip":14,"op":33,"st":0,"bm":0}]},{"id":"comp_8","layers":[{"ddd":0,"ind":1,"ty":0,"nm":"Pre-comp 2","refId":"comp_9","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.7,"y":1},"o":{"x":0.3,"y":0},"t":0,"s":[28.282,29.857,0],"to":[0,0,0],"ti":[0,0,0]},{"t":10,"s":[30,30,0]}],"ix":2},"a":{"a":0,"k":[256,256,0],"ix":1},"s":{"a":1,"k":[{"i":{"x":[0.7,0.7,0.7],"y":[1,1,1]},"o":{"x":[0.3,0.3,0.3],"y":[0,0,0]},"t":0,"s":[13.5,13.5,100]},{"t":10,"s":[13.6,13.6,100]}],"ix":6}},"ao":0,"w":512,"h":512,"ip":0,"op":36,"st":0,"bm":0}]},{"id":"comp_9","layers":[{"ddd":0,"ind":1,"ty":4,"nm":"Head 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.5,"y":0},"t":0,"s":[11.613,-83.537,0],"to":[0,0,0],"ti":[0,0,0]},{"t":14,"s":[13.613,-4.037,0]}],"ix":2},"a":{"a":0,"k":[11.613,-84.537,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.5,"y":0},"t":0,"s":[{"i":[[0,-26.924],[26.924,0],[0,26.924],[-26.924,0]],"o":[[0,26.924],[-26.924,0],[0,-26.924],[26.924,0]],"v":[[60.363,-84.537],[11.613,-35.787],[-37.137,-84.537],[11.613,-133.287]],"c":true}]},{"t":14,"s":[{"i":[[0,-25.609],[26.234,0],[0,25.609],[-26.234,0]],"o":[[0,25.609],[-26.234,0],[0,-25.609],[26.234,0]],"v":[[55.5,-92.894],[8,-46.525],[-39.5,-92.894],[8,-139.262]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":13,"st":-2,"bm":0},{"ddd":0,"ind":2,"ty":4,"nm":"Hands 2","parent":3,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[11.336,11.274,0],"ix":2},"a":{"a":0,"k":[11.336,10.774,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.818},"o":{"x":0.5,"y":0},"t":0,"s":[{"i":[[0,0],[-46.458,-35.583],[-24.488,-5.64]],"o":[[0,0],[8.352,6.397],[78.271,18.027]],"v":[[-141.771,-129.716],[-87.875,-16.36],[-40.512,7.89]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.182},"t":2.742,"s":[{"i":[[0,0],[-36.01,-10.9],[-24.155,1.341]],"o":[[0,0],[17.49,5.6],[69.436,6.533]],"v":[[-154.359,-38.55],[-101.24,-1.379],[-38.096,3.379]],"c":false}]},{"i":{"x":0.833,"y":0.818},"o":{"x":0.167,"y":0.167},"t":4.572,"s":[{"i":[[0,0],[-22.495,10.354],[-22.973,-2.409]],"o":[[0,0],[18.597,-8.552],[52.89,-5.679]],"v":[[-130.223,30.529],[-97.344,4.391],[-37.533,-6.68]],"c":false}]},{"i":{"x":0.833,"y":0.818},"o":{"x":0.167,"y":0.182},"t":5.486,"s":[{"i":[[0,0],[-15.738,20.981],[-22.382,-4.284]],"o":[[0,0],[16.262,-20.519],[44.618,-11.784]],"v":[[-110.655,59.818],[-92.396,11.026],[-37.251,-11.709]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.182},"t":9,"s":[{"i":[[0,0],[15.091,34.278],[-16.072,17.731]],"o":[[0,0],[-7.208,-22.817],[16.812,-10.179]],"v":[[-13.011,129.282],[-62.232,89.104],[-50.644,19.509]],"c":false}]},{"i":{"x":0.833,"y":0.818},"o":{"x":0.167,"y":0.167},"t":10,"s":[{"i":[[0,0],[19.6,34.344],[-11.896,20.735]],"o":[[0,0],[-10.53,-23.142],[12.875,-9.952]],"v":[[0.934,129.625],[-59.674,93.522],[-55.966,25.06]],"c":false}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.182},"t":11,"s":[{"i":[[0,0],[24.108,34.41],[-7.719,23.739]],"o":[[0,0],[-13.853,-23.467],[8.939,-9.725]],"v":[[1.88,129.968],[-57.117,97.94],[-61.289,30.611]],"c":false}]},{"t":14,"s":[{"i":[[0,0],[25.209,29.401],[-7.488,20.11]],"o":[[0,0],[-15.125,-17.64],[3.974,-10.674]],"v":[[6.417,130.659],[-53.875,103.64],[-63.512,36.89]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.818},"o":{"x":0.5,"y":0},"t":0,"s":[{"i":[[0,0],[48,36.333],[11.5,1.167]],"o":[[0,0],[-8.388,-6.35],[-79.91,-8.107]],"v":[[140.75,128.89],[94.542,23.473],[63.5,11.89]],"c":false}]},{"i":{"x":0.833,"y":0.818},"o":{"x":0.167,"y":0.182},"t":2.742,"s":[{"i":[[-0.107,1.301],[43.705,35.181],[12.499,4.424]],"o":[[1.452,-17.63],[-7.425,-6.513],[-75.279,-8.976]],"v":[[133.797,130.851],[92.055,22.64],[62.256,6.25]],"c":false}]},{"i":{"x":0.833,"y":0.818},"o":{"x":0.167,"y":0.182},"t":5.486,"s":[{"i":[[-6.276,1.09],[19.852,26.915],[21.224,10.921]],"o":[[6.157,-12.218],[-8.148,-12.585],[-54.801,-12.818]],"v":[[109.709,105.725],[104.015,23.092],[59.642,-11.414]],"c":false}]},{"i":{"x":0.833,"y":0.818},"o":{"x":0.167,"y":0.182},"t":9,"s":[{"i":[[-14.16,0.82],[-3.078,22.988],[16.441,25.833]],"o":[[22.052,-4.736],[2.47,-14.595],[-28.631,-17.728]],"v":[[32.732,128.976],[84.858,79.012],[68.64,15.393]],"c":false}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.182},"t":11,"s":[{"i":[[-18.622,0.668],[-13.293,19.932],[12.19,32.742]],"o":[[37.47,-1.344],[5.361,-8.677],[-13.817,-20.508]],"v":[[7.53,130.546],[77.418,98.03],[80.302,30.109]],"c":false}]},{"t":14,"s":[{"i":[[-19.233,1.202],[-15.167,22.777],[7,26.11]],"o":[[30.25,-1.89],[5.831,-8.757],[-6.192,-23.095]],"v":[[11.75,130.39],[78.167,98.723],[84,36.89]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":12,"st":-2,"bm":0},{"ddd":0,"ind":3,"ty":4,"nm":"Body 2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.3,"y":1},"o":{"x":0.5,"y":0},"t":0,"s":[266.426,392.378,0],"to":[0,0,0],"ti":[0,0,0]},{"t":14,"s":[253.676,337.378,0]}],"ix":2},"a":{"a":0,"k":[10.676,136.378,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.5,"y":0},"t":0,"s":[{"i":[[-10.795,-2.32],[-8.2,-0.399],[0,0],[3,-1.298],[6.05,-16.428],[20.1,0],[6.7,14.68],[-1.7,58.223]],"o":[[7.9,1.698],[0,0],[22.2,0],[0,0],[-7.825,16.029],[-20.1,0],[-4.4,-9.787],[-11.1,-3.196]],"v":[[-40.2,-2.303],[-15.1,1.492],[56.9,1.592],[64.619,22.065],[63.2,120.034],[13.2,136.312],[-36.8,120.034],[-40.9,18.07]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":5,"s":[{"i":[[-7.729,4.405],[-14.608,0.316],[-11.697,-7.483],[-0.238,-10.621],[7.036,-14.499],[18.581,0],[6.908,15.026],[-1.237,40.257]],"o":[[7.771,-9.845],[25.241,-0.704],[10.053,2.017],[0,0],[-6.792,14.491],[-18.581,0],[-5.829,-11.041],[-0.133,-10.892]],"v":[[-35.485,-12.023],[0.894,-25.934],[52.233,-18.385],[61.774,26.253],[58.326,106.281],[12.028,125.952],[-35.105,105.616],[-40.582,23.524]],"c":true}]},{"i":{"x":0.3,"y":1},"o":{"x":0.167,"y":0.167},"t":7,"s":[{"i":[[-4.018,5.73],[-11.874,0.331],[-0.482,-11.186],[1.315,-6.931],[7.788,-13.027],[17.423,0],[7.066,15.289],[-0.885,26.553]],"o":[[3.929,-18.735],[44.497,-1.242],[7.668,7.255],[0,0],[-6.004,13.318],[-17.423,0],[-6.92,-11.998],[0.031,-13.41]],"v":[[-38.5,-13.268],[1.196,-38.483],[57.046,-13.793],[59.603,29.448],[54.608,95.788],[11.134,118.049],[-33.812,94.617],[-40.339,27.685]],"c":true}]},{"t":14,"s":[{"i":[[-0.661,9.718],[-13.678,-0.71],[-0.721,-16.729],[0.48,-9.722],[8.649,-11.342],[16.096,0],[7.247,15.59],[-0.48,10.861]],"o":[[1.962,-28.86],[25.305,1.314],[0.467,10.849],[0,0],[-5.101,11.975],[-16.096,0],[-8.168,-13.094],[0.521,-13.007]],"v":[[-37.657,-18.701],[9.27,-58.29],[57.118,-21.416],[57.118,33.106],[50.351,83.775],[10.111,109],[-32.332,82.023],[-37.817,32.45]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":13,"st":-2,"bm":0},{"ddd":0,"ind":4,"ty":4,"nm":"Line","parent":5,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[3.687,-6.898,0],"ix":2},"a":{"a":0,"k":[3.6,-7,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.1,"y":0},"t":8,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-87.744,-97.736],[94.944,83.736]],"c":false}]},{"t":24,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[-101.6,-111.5],[108.8,97.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.2],"y":[0]},"t":8.143,"s":[65]},{"t":23.857421875,"s":[0]}],"ix":1},"e":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.2],"y":[0]},"t":8,"s":[22]},{"t":23.857421875,"s":[100]}],"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Path-10","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":15,"op":36,"st":0,"bm":0},{"ddd":0,"ind":5,"ty":4,"nm":"Head Mic","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[-3.6,-33.55,0],"ix":2},"a":{"a":0,"k":[-3.6,-33.55,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.32,"y":0},"t":12,"s":[{"i":[[-26.211,0],[-0.75,-24.287],[0,0],[8.887,-9.825],[0,0],[0,0],[0,0]],"o":[[26.211,0],[0,0],[0,11.606],[0,0],[0,0],[0,0],[-1,-21.796]],"v":[[-3.735,-109.75],[45.281,-65.463],[43.991,8.35],[34.343,45.154],[-28.222,-7.666],[-50.958,-26.52],[-50.969,-65.204]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[3.254,-6.84],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.715,-109.619],[43.599,-65.808],[42.504,7.127],[37.527,30.34],[-24.609,-26.91],[-49.585,-52.154],[-50.153,-65.37]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[2.173,-4.255],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.699,-110.688],[43.436,-66.58],[42.49,6.771],[35.692,29.42],[-20.918,-24.458],[-49.597,-54.517],[-50.078,-66.202]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":17,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[2.511,-3.584],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.668,-112.826],[43.111,-68.124],[42.462,6.058],[35.521,27.954],[-13.534,-19.556],[-49.621,-59.243],[-49.928,-67.865]],"c":true}]},{"t":25,"s":[{"i":[[-25.4,0],[0,-25.4],[0,0],[6.9,-8],[0,0],[0,0],[0,0]],"o":[[25.4,0],[0,0],[0,11.4],[0,0],[0,0],[0,0],[0,-25.4]],"v":[[-3.6,-117.5],[42.4,-71.5],[42.4,4.5],[31.3,34.4],[-20.33,-17.23],[-49.6,-46.5],[-49.6,-71.5]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.32,"y":0},"t":12,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[6.088,0],[-0.503,31.164],[0,0]],"o":[[0,0],[0,0],[0,0],[-10.319,13.723],[-26.211,0],[0,0],[0,0]],"v":[[-50.984,-30.364],[-31.444,-15.166],[-26.333,-11.224],[33.943,44.851],[-3.477,59],[-50.945,10.019],[-50.958,-21.446]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":13,"s":[{"i":[[0,0],[-0.064,2.378],[0,0],[0,0],[6.019,0],[-0.47,30.673],[0,0]],"o":[[0,0],[0.089,-3.285],[0,0],[-10.163,17.808],[-25.911,0],[0,0],[0,0]],"v":[[-50.431,-27.196],[-50.235,-36.056],[-34.964,-26.612],[35.415,38.984],[-3.485,57.977],[-50.411,9.433],[-50.391,-18.196]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":14,"s":[{"i":[[0,0],[-0.174,6.439],[0,0],[0,0],[5.9,0],[-0.415,29.833],[0,0]],"o":[[0,0],[0.241,-8.895],[0,0],[-9.897,24.787],[-25.4,0],[0,0],[0,0]],"v":[[-49.489,-21.787],[-49.835,-35.855],[-49.705,-52.894],[37.928,28.963],[-3.499,56.229],[-49.499,8.433],[-49.422,-12.646]],"c":true}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[-0.361,29.258],[0,0]],"o":[[0,0],[0,0],[0,0],[-5.945,8.024],[-25.4,0],[0,0],[0,0]],"v":[[-49.614,-15.443],[-16.569,15.355],[-4.062,27.479],[16.726,47.226],[-3.512,55.472],[-49.512,7.909],[-49.493,-12.332]],"c":true}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":17,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[-0.253,28.106],[0,0]],"o":[[0,0],[0,0],[0,0],[-4.54,4.12],[-25.4,0],[0,0],[0,0]],"v":[[-49.647,-16.255],[-37.036,-2.726],[-33.526,0.224],[15.322,49.25],[-3.538,53.958],[-49.538,6.862],[-49.634,-11.706]],"c":true}]},{"t":25,"s":[{"i":[[0,0],[0,0],[0,0],[0,0],[5.9,0],[0,25.4],[0,0]],"o":[[0,0],[0,0],[0,0],[-5.2,2],[-25.4,0],[0,0],[0,0]],"v":[[-49.6,-15.4],[-40.702,-6.502],[-38.75,-4.55],[13.1,47.3],[-3.6,50.4],[-49.6,4.4],[-49.6,-15.4]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":2,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":36,"st":0,"bm":0},{"ddd":0,"ind":6,"ty":4,"nm":"Arc L","parent":8,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":1,"k":[{"i":{"x":0.5,"y":1},"o":{"x":0.05,"y":0},"t":15,"s":[-3.781,80.5,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":25,"s":[-3.781,89.75,0],"to":[0,0,0],"ti":[0,0,0]},{"t":33,"s":[-3.75,84.5,0]}],"ix":2},"a":{"a":0,"k":[-3.75,84.5,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.1,"y":0},"t":12,"s":[{"i":[[0,0],[13.1,0],[-24.469,59.5]],"o":[[-11,5.1],[-45.3,0],[0,0]],"v":[[32,72.5],[-6,80],[-74,-20]],"c":false}]},{"t":26,"s":[{"i":[[0,0],[13.1,0],[0,42]],"o":[[-11,5.1],[-45.3,0],[0,0]],"v":[[33,76.5],[-3.5,84.5],[-85.5,6.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":36,"st":0,"bm":0},{"ddd":0,"ind":7,"ty":4,"nm":"Arc R","parent":6,"sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[66.05,51.45,0],"ix":2},"a":{"a":0,"k":[66.05,51.45,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.833,"y":0.833},"o":{"x":0.1,"y":0},"t":12,"s":[{"i":[[0,0],[39.331,-15]],"o":[[9.631,29.1],[0,0]],"v":[[68.4,-20.1],[39.2,68.5]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":15,"s":[{"i":[[0,0],[38.51,-16.677]],"o":[[5.914,24.45],[0,0]],"v":[[72.925,-10.242],[39.451,70.103]],"c":false}]},{"i":{"x":0.833,"y":0.833},"o":{"x":0.167,"y":0.167},"t":16,"s":[{"i":[[0,0],[10.82,-9.511]],"o":[[4.75,22.994],[0,0]],"v":[[74.342,-7.154],[65.462,49.587]],"c":false}]},{"i":{"x":0.4,"y":1},"o":{"x":0.167,"y":0.167},"t":19,"s":[{"i":[[0,0],[13.536,-11.424]],"o":[[1.257,18.625],[0,0]],"v":[[78.594,2.109],[61.495,50.04]],"c":false}]},{"t":26,"s":[{"i":[[0,0],[8.7,-12]],"o":[[0,15.6],[0,0]],"v":[[78.4,8.4],[64.7,50.5]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[1,1,1,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":22,"ix":5},"lc":2,"lj":1,"ml":4,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":12,"op":36,"st":0,"bm":0},{"ddd":0,"ind":8,"ty":4,"nm":"Leg","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[252.45,380.6,0],"ix":2},"a":{"a":0,"k":[-3.55,124.6,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.1,"y":0},"t":11,"s":[{"i":[[-11.972,0],[0,0],[0,-2.631],[0,0],[11.972,0],[0,0],[0,2.631],[0,0]],"o":[[0,0],[11.972,0],[0,0],[0,2.631],[0,0],[-11.972,0],[0,0],[0,-2.631]],"v":[[-6.686,71.5],[-0.414,71.5],[21.25,76.26],[21.25,85.99],[-0.414,90.75],[-6.686,90.75],[-28.35,85.99],[-28.35,76.26]],"c":true}]},{"i":{"x":0.69,"y":1},"o":{"x":0.31,"y":0},"t":20,"s":[{"i":[[-6.3,0],[0,0],[0,-4.763],[0,0],[6.3,0],[0,0],[0,4.763],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,4.763],[0,0],[-6.3,0],[0,0],[0,-4.763]],"v":[[-5.2,89.75],[-1.9,89.75],[9.5,98.368],[9.5,115.982],[-1.9,124.6],[-5.2,124.6],[-16.6,115.982],[-16.6,98.368]],"c":true}]},{"t":33,"s":[{"i":[[-6.3,0],[0,0],[0,-6.3],[0,0],[6.3,0],[0,0],[0,6.3],[0,0]],"o":[[0,0],[6.3,0],[0,0],[0,6.3],[0,0],[-6.3,0],[0,0],[0,-6.3]],"v":[[-5.2,78.5],[-1.9,78.5],[9.5,89.9],[9.5,113.2],[-1.9,124.6],[-5.2,124.6],[-16.6,113.2],[-16.6,89.9]],"c":true}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"fl","c":{"a":0,"k":[1,1,1,1],"ix":4},"o":{"a":0,"k":100,"ix":5},"r":1,"bm":0,"nm":"Fill 1","mn":"ADBE Vector Graphic - Fill","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Rectangle","np":2,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false}],"ip":11,"op":36,"st":0,"bm":0}]}],"layers":[{"ddd":0,"ind":1,"ty":0,"nm":"Unmute to Rise Hand","refId":"comp_0","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[30,30,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"w":60,"h":60,"ip":136,"op":173,"st":136,"bm":0},{"ddd":0,"ind":2,"ty":0,"nm":"Mute to Rise Hand","refId":"comp_2","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[30,30,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"w":60,"h":60,"ip":99,"op":136,"st":99,"bm":0},{"ddd":0,"ind":3,"ty":0,"nm":"Mute","refId":"comp_4","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[30,30,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"w":60,"h":60,"ip":69,"op":99,"st":69,"bm":0},{"ddd":0,"ind":4,"ty":0,"nm":"Unmute Android","refId":"comp_6","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[30,30,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"w":60,"h":60,"ip":36,"op":69,"st":36,"bm":0},{"ddd":0,"ind":5,"ty":0,"nm":"Raise Hand to Mute","refId":"comp_8","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":0,"k":0,"ix":10},"p":{"a":0,"k":[30,30,0],"ix":2},"a":{"a":0,"k":[30,30,0],"ix":1},"s":{"a":0,"k":[100,100,100],"ix":6}},"ao":0,"w":60,"h":60,"ip":0,"op":36,"st":0,"bm":0}],"markers":[]} \ No newline at end of file diff --git a/TMessagesProj/src/main/res/values/strings.xml b/TMessagesProj/src/main/res/values/strings.xml index e6a5bad1c..7bde1e6aa 100644 --- a/TMessagesProj/src/main/res/values/strings.xml +++ b/TMessagesProj/src/main/res/values/strings.xml @@ -303,6 +303,7 @@ Chat imported successfully! Save space on your device by importing more chats to Telegram. Importing messages and media... + Importing stickers... Import messages and media to... Or choose one of your contacts @@ -1198,6 +1199,10 @@ Messages forwarded to **%1$s**. Message forwarded to **%1$s**. Messages forwarded to **%1$s**. + Background shared to **Saved Messages**. + Background shared to **%1$s**. + Background shared to **%1$s**. + Background shared to **%1$s**. %1$s set the self-destruct timer to %2$s You set the self-destruct timer to %1$s @@ -1608,6 +1613,22 @@ Are you sure you want to delete the selected sticker sets? Archive %1$s Are you sure you want to archive the selected sticker sets? + IMPORT %1$s + Processing stickers... + REMOVE FROM IMPORT + Remove from import + Enter name + Please choose a name for your set. + You can use a-z, 0-9 and underscores. + Sorry, this link is invalid. + Link must have at least 5 characters. + Link must not exceed 32 characters. + Checking link... + This link is available. + Sorry, this link is already taken. + Importing stickers + Stickers imported successfully! + Import more stickers to Telegram and share them with your friends. Theme Auto-Night Mode @@ -1717,6 +1738,7 @@ RESET ALL RESET Are you sure you want to change your chat wallpaper to a color? + Do you want to use your currently selected background or reset it to the default for this theme? Share theme Edit colors Reset to default @@ -1823,7 +1845,7 @@ Color: Brightness Hex color code - Color + Colors Pattern Choose pattern Intensity @@ -2008,6 +2030,7 @@ Telegram for Android %1$s Debug Menu Send Logs + Send Last Logs Clear Logs Enable Logs Disable Logs @@ -2118,6 +2141,37 @@ This code can be used to allow someone to log in to your Telegram account.\n\nTo confirm Telegram login, please go to Settings > Devices > Scan QR and scan the code. Telegram needs access to your camera so that you can scan QR codes. Set Profile Photo + Update Telegram + Version %1$s • %2$s + Downloading %1$d%%... + Update Now + Download Now + Remind me later + **What\'s new?**\n\n• Bug fixes and improvements. + Permanent + Link is no longer active + Cancel Forward + Select another chat + Do you want to cancel forwarding or to forward messages to a different chat? + Chat list swipe gesture + Pin + Read + Archive + Mute + Delete + Change Folder + Unmute + Mute + Delete + Read + Unread + Unpin + Pin + Cancel Forwarding + Bio updated. + Name updated. + Channel title updated. + Channel description updated. Local Database Clear local database @@ -3388,7 +3442,8 @@ Cancelled Video Call Declined Video Call %1$s (%2$s) - You haven\'t made any calls yet. + No recent calls + Your recent voice and video calls\nwill appear here. **%1$s**\'s app is using an incompatible protocol. They need to update their app before you can call them. **%1$s**\'s app does not support calls. They need to update their app before you can call them. Sorry, **%1$s** is using an old version of Telegram that doesn\'t support video calls. @@ -3396,6 +3451,7 @@ Please rate the quality of your Telegram call Telegram needs access to your microphone so that you can make calls. Telegram needs access to your microphone and camera so that you can make video calls. + Telegram needs access to your camera so that you can make video calls. Add an optional comment Call Back Call Again @@ -3406,6 +3462,7 @@ Earpiece Headset Speaker + Phone Bluetooth Output Devices RETURN TO CALL @@ -3584,6 +3641,48 @@ Late by Share We will notify you when it starts. + **%1$s** joined the voice chat. + **%1$s** joined the voice chat. + Active voice chats + Recent calls + Share screen + Stop screen sharing + this is you + Open Chat + Add Photo + Add Bio + Edit Name + Bio + You can add a few lines about yourself. Everyone will see this text. + Tap to add a Bio + Edit Bio + Add description + Edit description + tap to add photo or bio + tap to add bio + tap to add photo + tap to add description + Tap to add photo or description + Edit title + Title + Set New Photo + Camera + Video Preview + Video from your camera will be shared with other members of this voice chat + Share Camera Video + Preview + Pin + Unpin + Audio + Select audio output + Video on Pause + You are sharing\nyour screen + You are sharing your screen + STOP SHARING + Noise suppression + Enabled + Disabled + Video is only available\nfor the first %1$s Manage Invite Links You can create additional invite links that have a limited time or number of uses @@ -3640,6 +3739,12 @@ Voice call %1$s Video call %1$s + un1 are speaking + un1 is speaking + un1 are speaking + un1 are speaking + un1 are speaking + un1 are speaking Delete %1$d backgrounds Delete background Delete %1$d backgrounds @@ -4193,6 +4298,18 @@ %s peoples can join via this link %s peoples can join via this link %s peoples can join via this link + %1$d Dialogs pinned + Dialog pinned + %1$d Dialogs pinned + %1$d Dialogs pinned + %1$d Dialogs pinned + %1$d Dialogs pinned + %1$d Dialogs unpinned + Dialog unpinned + %1$d Dialogs unpinned + %1$d Dialogs unpinned + %1$d Dialogs unpinned + %1$d Dialogs unpinned Group Channel @@ -4366,47 +4483,5 @@ \'Remind today at\' HH:mm \'Remind on\' MMM d \'at\' HH:mm \'Remind on\' MMM d yyyy \'at\' HH:mm - Permanent - Link is no longer active - Cancel Forward - Select another chat - Do you want to cancel forwarding or to forward messages to a different chat? - Chat list swipe gesture - Pin - Read - Archive - Mute - Delete - Change Folder - Unmute - Mute - Delete - Read - Unread - Unpin - Pin - Cancel Forwarding - this is you - Open Chat - Add Photo - Add Bio - Edit Name - Bio - You can add a few lines about yourself. Everyone will see this text. - Tap to add a Bio - Edit Bio - Add description - Edit description - tap to add photo or bio - tap to add bio - tap to add photo - tap to add description - Tap to add photo or description - Edit title - Title - Set New Photo - Bio updated. - Name updated. - Channel title updated. - Channel description updated. + Menu diff --git a/build.gradle b/build.gradle index e15fedd39..e14be25e1 100644 --- a/build.gradle +++ b/build.gradle @@ -1,12 +1,11 @@ // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { repositories { - jcenter() mavenCentral() google() } dependencies { - classpath 'com.android.tools.build:gradle:4.1.2' + classpath 'com.android.tools.build:gradle:4.2.1' classpath 'com.google.gms:google-services:4.3.5' } }