Update to 7.8.0 (2359)

This commit is contained in:
DrKLO 2021-06-25 03:43:10 +03:00
parent a5939ccb34
commit e8d88e56e4
1875 changed files with 110771 additions and 43473 deletions

View file

@ -1,10 +1,10 @@
FROM gradle:6.5.0-jdk8 FROM gradle:6.7.1-jdk8
ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip
ENV ANDROID_API_LEVEL android-30 ENV ANDROID_API_LEVEL android-30
ENV ANDROID_BUILD_TOOLS_VERSION 30.0.3 ENV ANDROID_BUILD_TOOLS_VERSION 30.0.3
ENV ANDROID_HOME /usr/local/android-sdk-linux ENV ANDROID_HOME /usr/local/android-sdk-linux
ENV ANDROID_NDK_VERSION 21.1.6352462 ENV ANDROID_NDK_VERSION 21.4.7075529
ENV ANDROID_VERSION 30 ENV ANDROID_VERSION 30
ENV ANDROID_NDK_HOME ${ANDROID_HOME}/ndk/${ANDROID_NDK_VERSION}/ ENV ANDROID_NDK_HOME ${ANDROID_HOME}/ndk/${ANDROID_NDK_VERSION}/
ENV PATH ${PATH}:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools ENV PATH ${PATH}:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools

View file

@ -3,7 +3,6 @@ apply plugin: 'com.android.application'
repositories { repositories {
mavenCentral() mavenCentral()
google() google()
jcenter()
} }
configurations { configurations {
@ -16,7 +15,7 @@ configurations.all {
} }
dependencies { dependencies {
implementation 'androidx.core:core:1.3.2' implementation 'androidx.core:core:1.5.0'
implementation 'androidx.palette:palette:1.0.0' implementation 'androidx.palette:palette:1.0.0'
implementation 'androidx.exifinterface:exifinterface:1.3.2' implementation 'androidx.exifinterface:exifinterface:1.3.2'
implementation 'androidx.dynamicanimation:dynamicanimation:1.0.0' implementation 'androidx.dynamicanimation:dynamicanimation:1.0.0'
@ -25,16 +24,16 @@ dependencies {
compileOnly 'org.checkerframework:checker-qual:2.5.2' compileOnly 'org.checkerframework:checker-qual:2.5.2'
compileOnly 'org.checkerframework:checker-compat-qual:2.5.0' compileOnly 'org.checkerframework:checker-compat-qual:2.5.0'
implementation 'com.google.firebase:firebase-messaging:21.1.0' implementation 'com.google.firebase:firebase-messaging:22.0.0'
implementation 'com.google.firebase:firebase-config:20.0.4' implementation 'com.google.firebase:firebase-config:21.0.0'
implementation 'com.google.firebase:firebase-datatransport:17.0.11' implementation 'com.google.firebase:firebase-datatransport:18.0.0'
implementation 'com.google.firebase:firebase-appindexing:19.2.0' implementation 'com.google.firebase:firebase-appindexing:20.0.0'
implementation 'com.google.android.gms:play-services-maps:17.0.0' implementation 'com.google.android.gms:play-services-maps:17.0.1'
implementation 'com.google.android.gms:play-services-auth:19.0.0' implementation 'com.google.android.gms:play-services-auth:19.0.0'
implementation 'com.google.android.gms:play-services-vision:16.2.0' implementation 'com.google.android.gms:play-services-vision:16.2.0'
implementation 'com.google.android.gms:play-services-wearable:17.0.0' implementation 'com.google.android.gms:play-services-wearable:17.1.0'
implementation 'com.google.android.gms:play-services-location:18.0.0' implementation 'com.google.android.gms:play-services-location:18.0.0'
implementation 'com.google.android.gms:play-services-wallet:18.1.2' implementation 'com.google.android.gms:play-services-wallet:18.1.3'
implementation 'com.googlecode.mp4parser:isoparser:1.0.6' implementation 'com.googlecode.mp4parser:isoparser:1.0.6'
implementation 'com.stripe:stripe-android:2.0.2' implementation 'com.stripe:stripe-android:2.0.2'
implementation files('libs/libgsaverification-client.aar') implementation files('libs/libgsaverification-client.aar')
@ -45,7 +44,7 @@ dependencies {
android { android {
compileSdkVersion 30 compileSdkVersion 30
buildToolsVersion '30.0.3' buildToolsVersion '30.0.3'
ndkVersion "21.1.6352462" ndkVersion "21.4.7075529"
defaultConfig.applicationId = "org.telegram.messenger" defaultConfig.applicationId = "org.telegram.messenger"
@ -300,7 +299,7 @@ android {
} }
} }
defaultConfig.versionCode = 2293 defaultConfig.versionCode = 2359
applicationVariants.all { variant -> applicationVariants.all { variant ->
variant.outputs.all { output -> variant.outputs.all { output ->
@ -319,7 +318,7 @@ android {
defaultConfig { defaultConfig {
minSdkVersion 16 minSdkVersion 16
targetSdkVersion 29 targetSdkVersion 29
versionName "7.7.2" versionName "7.8.0"
vectorDrawables.generatedDensities = ['mdpi', 'hdpi', 'xhdpi', 'xxhdpi'] vectorDrawables.generatedDensities = ['mdpi', 'hdpi', 'xhdpi', 'xxhdpi']

View file

@ -399,7 +399,7 @@ target_compile_definitions(sqlite PUBLIC
#voip #voip
include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt) include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt)
set(NATIVE_LIB "tmessages.38") set(NATIVE_LIB "tmessages.39")
#tmessages #tmessages
add_library(${NATIVE_LIB} SHARED add_library(${NATIVE_LIB} SHARED
@ -669,7 +669,7 @@ target_include_directories(${NATIVE_LIB} PUBLIC
lz4) lz4)
target_link_libraries(${NATIVE_LIB} target_link_libraries(${NATIVE_LIB}
-Wl,--whole-archive voipandroid -Wl,--no-whole-archive -Wl,--whole-archive rnnoise voipandroid -Wl,--no-whole-archive
tgvoip tgvoip
tgcalls tgcalls
tgcalls_tp tgcalls_tp
@ -692,6 +692,7 @@ target_link_libraries(${NATIVE_LIB}
log log
z z
GLESv2 GLESv2
EGL
android android
OpenSLES OpenSLES
cpufeatures) cpufeatures)

View file

@ -302,8 +302,8 @@ int initRecorder(const char *path, opus_int32 sampleRate) {
inopt.gain = 0; inopt.gain = 0;
inopt.endianness = 0; inopt.endianness = 0;
inopt.copy_comments = 0; inopt.copy_comments = 0;
inopt.rawmode = 1; inopt.rawmode = 0;
inopt.ignorelength = 1; inopt.ignorelength = 0;
inopt.samplesize = 16; inopt.samplesize = 16;
inopt.channels = 1; inopt.channels = 1;
inopt.skip = 0; inopt.skip = 0;
@ -332,7 +332,7 @@ int initRecorder(const char *path, opus_int32 sampleRate) {
_packet = malloc(max_frame_bytes); _packet = malloc(max_frame_bytes);
result = opus_encoder_ctl(_encoder, OPUS_SET_BITRATE(bitrate)); result = opus_encoder_ctl(_encoder, OPUS_SET_BITRATE(bitrate));
result = opus_encoder_ctl(_encoder, OPUS_SET_COMPLEXITY(10)); //result = opus_encoder_ctl(_encoder, OPUS_SET_COMPLEXITY(10));
if (result != OPUS_OK) { if (result != OPUS_OK) {
LOGE("Error OPUS_SET_BITRATE returned: %s", opus_strerror(result)); LOGE("Error OPUS_SET_BITRATE returned: %s", opus_strerror(result));
return 0; return 0;

View file

@ -0,0 +1,376 @@
/*
* Bytestream functions
* copyright (c) 2006 Baptiste Coudurier <baptiste.coudurier@free.fr>
* Copyright (c) 2012 Aneesh Dogra (lionaneesh) <lionaneesh@gmail.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_BYTESTREAM_H
#define AVCODEC_BYTESTREAM_H
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/common.h"
#include "libavutil/intreadwrite.h"
typedef struct GetByteContext {
const uint8_t *buffer, *buffer_end, *buffer_start;
} GetByteContext;
typedef struct PutByteContext {
uint8_t *buffer, *buffer_end, *buffer_start;
int eof;
} PutByteContext;
#define DEF(type, name, bytes, read, write) \
static av_always_inline type bytestream_get_ ## name(const uint8_t **b) \
{ \
(*b) += bytes; \
return read(*b - bytes); \
} \
static av_always_inline void bytestream_put_ ## name(uint8_t **b, \
const type value) \
{ \
write(*b, value); \
(*b) += bytes; \
} \
static av_always_inline void bytestream2_put_ ## name ## u(PutByteContext *p, \
const type value) \
{ \
bytestream_put_ ## name(&p->buffer, value); \
} \
static av_always_inline void bytestream2_put_ ## name(PutByteContext *p, \
const type value) \
{ \
if (!p->eof && (p->buffer_end - p->buffer >= bytes)) { \
write(p->buffer, value); \
p->buffer += bytes; \
} else \
p->eof = 1; \
} \
static av_always_inline type bytestream2_get_ ## name ## u(GetByteContext *g) \
{ \
return bytestream_get_ ## name(&g->buffer); \
} \
static av_always_inline type bytestream2_get_ ## name(GetByteContext *g) \
{ \
if (g->buffer_end - g->buffer < bytes) { \
g->buffer = g->buffer_end; \
return 0; \
} \
return bytestream2_get_ ## name ## u(g); \
} \
static av_always_inline type bytestream2_peek_ ## name(GetByteContext *g) \
{ \
if (g->buffer_end - g->buffer < bytes) \
return 0; \
return read(g->buffer); \
}
DEF(uint64_t, le64, 8, AV_RL64, AV_WL64)
DEF(unsigned int, le32, 4, AV_RL32, AV_WL32)
DEF(unsigned int, le24, 3, AV_RL24, AV_WL24)
DEF(unsigned int, le16, 2, AV_RL16, AV_WL16)
DEF(uint64_t, be64, 8, AV_RB64, AV_WB64)
DEF(unsigned int, be32, 4, AV_RB32, AV_WB32)
DEF(unsigned int, be24, 3, AV_RB24, AV_WB24)
DEF(unsigned int, be16, 2, AV_RB16, AV_WB16)
DEF(unsigned int, byte, 1, AV_RB8 , AV_WB8)
#if AV_HAVE_BIGENDIAN
# define bytestream2_get_ne16 bytestream2_get_be16
# define bytestream2_get_ne24 bytestream2_get_be24
# define bytestream2_get_ne32 bytestream2_get_be32
# define bytestream2_get_ne64 bytestream2_get_be64
# define bytestream2_get_ne16u bytestream2_get_be16u
# define bytestream2_get_ne24u bytestream2_get_be24u
# define bytestream2_get_ne32u bytestream2_get_be32u
# define bytestream2_get_ne64u bytestream2_get_be64u
# define bytestream2_put_ne16 bytestream2_put_be16
# define bytestream2_put_ne24 bytestream2_put_be24
# define bytestream2_put_ne32 bytestream2_put_be32
# define bytestream2_put_ne64 bytestream2_put_be64
# define bytestream2_peek_ne16 bytestream2_peek_be16
# define bytestream2_peek_ne24 bytestream2_peek_be24
# define bytestream2_peek_ne32 bytestream2_peek_be32
# define bytestream2_peek_ne64 bytestream2_peek_be64
#else
# define bytestream2_get_ne16 bytestream2_get_le16
# define bytestream2_get_ne24 bytestream2_get_le24
# define bytestream2_get_ne32 bytestream2_get_le32
# define bytestream2_get_ne64 bytestream2_get_le64
# define bytestream2_get_ne16u bytestream2_get_le16u
# define bytestream2_get_ne24u bytestream2_get_le24u
# define bytestream2_get_ne32u bytestream2_get_le32u
# define bytestream2_get_ne64u bytestream2_get_le64u
# define bytestream2_put_ne16 bytestream2_put_le16
# define bytestream2_put_ne24 bytestream2_put_le24
# define bytestream2_put_ne32 bytestream2_put_le32
# define bytestream2_put_ne64 bytestream2_put_le64
# define bytestream2_peek_ne16 bytestream2_peek_le16
# define bytestream2_peek_ne24 bytestream2_peek_le24
# define bytestream2_peek_ne32 bytestream2_peek_le32
# define bytestream2_peek_ne64 bytestream2_peek_le64
#endif
static av_always_inline void bytestream2_init(GetByteContext *g,
const uint8_t *buf,
int buf_size)
{
av_assert0(buf_size >= 0);
g->buffer = buf;
g->buffer_start = buf;
g->buffer_end = buf + buf_size;
}
static av_always_inline void bytestream2_init_writer(PutByteContext *p,
uint8_t *buf,
int buf_size)
{
av_assert0(buf_size >= 0);
p->buffer = buf;
p->buffer_start = buf;
p->buffer_end = buf + buf_size;
p->eof = 0;
}
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
{
return g->buffer_end - g->buffer;
}
static av_always_inline int bytestream2_get_bytes_left_p(PutByteContext *p)
{
return p->buffer_end - p->buffer;
}
static av_always_inline void bytestream2_skip(GetByteContext *g,
unsigned int size)
{
g->buffer += FFMIN(g->buffer_end - g->buffer, size);
}
static av_always_inline void bytestream2_skipu(GetByteContext *g,
unsigned int size)
{
g->buffer += size;
}
static av_always_inline void bytestream2_skip_p(PutByteContext *p,
unsigned int size)
{
int size2;
if (p->eof)
return;
size2 = FFMIN(p->buffer_end - p->buffer, size);
if (size2 != size)
p->eof = 1;
p->buffer += size2;
}
static av_always_inline int bytestream2_tell(GetByteContext *g)
{
return (int)(g->buffer - g->buffer_start);
}
static av_always_inline int bytestream2_tell_p(PutByteContext *p)
{
return (int)(p->buffer - p->buffer_start);
}
static av_always_inline int bytestream2_size(GetByteContext *g)
{
return (int)(g->buffer_end - g->buffer_start);
}
static av_always_inline int bytestream2_size_p(PutByteContext *p)
{
return (int)(p->buffer_end - p->buffer_start);
}
static av_always_inline int bytestream2_seek(GetByteContext *g,
int offset,
int whence)
{
switch (whence) {
case SEEK_CUR:
offset = av_clip(offset, -(g->buffer - g->buffer_start),
g->buffer_end - g->buffer);
g->buffer += offset;
break;
case SEEK_END:
offset = av_clip(offset, -(g->buffer_end - g->buffer_start), 0);
g->buffer = g->buffer_end + offset;
break;
case SEEK_SET:
offset = av_clip(offset, 0, g->buffer_end - g->buffer_start);
g->buffer = g->buffer_start + offset;
break;
default:
return AVERROR(EINVAL);
}
return bytestream2_tell(g);
}
static av_always_inline int bytestream2_seek_p(PutByteContext *p,
int offset,
int whence)
{
p->eof = 0;
switch (whence) {
case SEEK_CUR:
if (p->buffer_end - p->buffer < offset)
p->eof = 1;
offset = av_clip(offset, -(p->buffer - p->buffer_start),
p->buffer_end - p->buffer);
p->buffer += offset;
break;
case SEEK_END:
if (offset > 0)
p->eof = 1;
offset = av_clip(offset, -(p->buffer_end - p->buffer_start), 0);
p->buffer = p->buffer_end + offset;
break;
case SEEK_SET:
if (p->buffer_end - p->buffer_start < offset)
p->eof = 1;
offset = av_clip(offset, 0, p->buffer_end - p->buffer_start);
p->buffer = p->buffer_start + offset;
break;
default:
return AVERROR(EINVAL);
}
return bytestream2_tell_p(p);
}
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g,
uint8_t *dst,
unsigned int size)
{
int size2 = FFMIN(g->buffer_end - g->buffer, size);
memcpy(dst, g->buffer, size2);
g->buffer += size2;
return size2;
}
static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g,
uint8_t *dst,
unsigned int size)
{
memcpy(dst, g->buffer, size);
g->buffer += size;
return size;
}
static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p,
const uint8_t *src,
unsigned int size)
{
int size2;
if (p->eof)
return 0;
size2 = FFMIN(p->buffer_end - p->buffer, size);
if (size2 != size)
p->eof = 1;
memcpy(p->buffer, src, size2);
p->buffer += size2;
return size2;
}
static av_always_inline unsigned int bytestream2_put_bufferu(PutByteContext *p,
const uint8_t *src,
unsigned int size)
{
memcpy(p->buffer, src, size);
p->buffer += size;
return size;
}
static av_always_inline void bytestream2_set_buffer(PutByteContext *p,
const uint8_t c,
unsigned int size)
{
int size2;
if (p->eof)
return;
size2 = FFMIN(p->buffer_end - p->buffer, size);
if (size2 != size)
p->eof = 1;
memset(p->buffer, c, size2);
p->buffer += size2;
}
static av_always_inline void bytestream2_set_bufferu(PutByteContext *p,
const uint8_t c,
unsigned int size)
{
memset(p->buffer, c, size);
p->buffer += size;
}
static av_always_inline unsigned int bytestream2_get_eof(PutByteContext *p)
{
return p->eof;
}
static av_always_inline unsigned int bytestream2_copy_bufferu(PutByteContext *p,
GetByteContext *g,
unsigned int size)
{
memcpy(p->buffer, g->buffer, size);
p->buffer += size;
g->buffer += size;
return size;
}
static av_always_inline unsigned int bytestream2_copy_buffer(PutByteContext *p,
GetByteContext *g,
unsigned int size)
{
int size2;
if (p->eof)
return 0;
size = FFMIN(g->buffer_end - g->buffer, size);
size2 = FFMIN(p->buffer_end - p->buffer, size);
if (size2 != size)
p->eof = 1;
return bytestream2_copy_bufferu(p, g, size2);
}
static av_always_inline unsigned int bytestream_get_buffer(const uint8_t **b,
uint8_t *dst,
unsigned int size)
{
memcpy(dst, *b, size);
(*b) += size;
return size;
}
static av_always_inline void bytestream_put_buffer(uint8_t **b,
const uint8_t *src,
unsigned int size)
{
memcpy(*b, src, size);
(*b) += size;
}
#endif /* AVCODEC_BYTESTREAM_H */

View file

@ -0,0 +1,673 @@
/*
* Copyright (c) 2004 Michael Niedermayer <michaelni@gmx.at>
* Copyright (c) 2016 Alexandra Hájková
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* bitstream reader API header.
*/
#ifndef AVCODEC_GET_BITS_H
#define AVCODEC_GET_BITS_H
#include <stdint.h>
#ifndef NEG_USR32
# define NEG_USR32(a,s) (((uint32_t)(a))>>(32-(s)))
#endif
/*
* Safe bitstream reading:
* optionally, the get_bits API can check to ensure that we
* don't read past input buffer boundaries. This is protected
* with CONFIG_SAFE_BITSTREAM_READER at the global level, and
* then below that with UNCHECKED_BITSTREAM_READER at the per-
* decoder level. This means that decoders that check internally
* can "#define UNCHECKED_BITSTREAM_READER 1" to disable
* overread checks.
* Boundary checking causes a minor performance penalty so for
* applications that won't want/need this, it can be disabled
* globally using "#define CONFIG_SAFE_BITSTREAM_READER 0".
*/
#ifndef UNCHECKED_BITSTREAM_READER
#define UNCHECKED_BITSTREAM_READER !CONFIG_SAFE_BITSTREAM_READER
#endif
#ifndef CACHED_BITSTREAM_READER
#define CACHED_BITSTREAM_READER 0
#endif
typedef struct GetBitContext {
const uint8_t *buffer, *buffer_end;
#if CACHED_BITSTREAM_READER
uint64_t cache;
unsigned bits_left;
#endif
int index;
int size_in_bits;
int size_in_bits_plus8;
} GetBitContext;
static inline unsigned int get_bits(GetBitContext *s, int n);
static inline void skip_bits(GetBitContext *s, int n);
static inline unsigned int show_bits(GetBitContext *s, int n);
/* Bitstream reader API docs:
* name
* arbitrary name which is used as prefix for the internal variables
*
* gb
* getbitcontext
*
* OPEN_READER(name, gb)
* load gb into local variables
*
* CLOSE_READER(name, gb)
* store local vars in gb
*
* UPDATE_CACHE(name, gb)
* Refill the internal cache from the bitstream.
* After this call at least MIN_CACHE_BITS will be available.
*
* GET_CACHE(name, gb)
* Will output the contents of the internal cache,
* next bit is MSB of 32 or 64 bits (FIXME 64 bits).
*
* SHOW_UBITS(name, gb, num)
* Will return the next num bits.
*
* SHOW_SBITS(name, gb, num)
* Will return the next num bits and do sign extension.
*
* SKIP_BITS(name, gb, num)
* Will skip over the next num bits.
* Note, this is equivalent to SKIP_CACHE; SKIP_COUNTER.
*
* SKIP_CACHE(name, gb, num)
* Will remove the next num bits from the cache (note SKIP_COUNTER
* MUST be called before UPDATE_CACHE / CLOSE_READER).
*
* SKIP_COUNTER(name, gb, num)
* Will increment the internal bit counter (see SKIP_CACHE & SKIP_BITS).
*
* LAST_SKIP_BITS(name, gb, num)
* Like SKIP_BITS, to be used if next call is UPDATE_CACHE or CLOSE_READER.
*
* BITS_LEFT(name, gb)
* Return the number of bits left
*
* For examples see get_bits, show_bits, skip_bits, get_vlc.
*/
#if CACHED_BITSTREAM_READER
# define MIN_CACHE_BITS 64
#elif defined LONG_BITSTREAM_READER
# define MIN_CACHE_BITS 32
#else
# define MIN_CACHE_BITS 25
#endif
#if !CACHED_BITSTREAM_READER
#define OPEN_READER_NOSIZE(name, gb) \
unsigned int name ## _index = (gb)->index; \
unsigned int av_unused name ## _cache
#if UNCHECKED_BITSTREAM_READER
#define OPEN_READER(name, gb) OPEN_READER_NOSIZE(name, gb)
#define BITS_AVAILABLE(name, gb) 1
#else
#define OPEN_READER(name, gb) \
OPEN_READER_NOSIZE(name, gb); \
unsigned int name ## _size_plus8 = (gb)->size_in_bits_plus8
#define BITS_AVAILABLE(name, gb) name ## _index < name ## _size_plus8
#endif
#define CLOSE_READER(name, gb) (gb)->index = name ## _index
# ifdef LONG_BITSTREAM_READER
# define UPDATE_CACHE_LE(name, gb) name ## _cache = \
AV_RL64((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7)
# define UPDATE_CACHE_BE(name, gb) name ## _cache = \
AV_RB64((gb)->buffer + (name ## _index >> 3)) >> (32 - (name ## _index & 7))
#else
# define UPDATE_CACHE_LE(name, gb) name ## _cache = \
AV_RL32((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7)
# define UPDATE_CACHE_BE(name, gb) name ## _cache = \
AV_RB32((gb)->buffer + (name ## _index >> 3)) << (name ## _index & 7)
#endif
#ifdef BITSTREAM_READER_LE
# define UPDATE_CACHE(name, gb) UPDATE_CACHE_LE(name, gb)
# define SKIP_CACHE(name, gb, num) name ## _cache >>= (num)
#else
# define UPDATE_CACHE(name, gb) UPDATE_CACHE_BE(name, gb)
# define SKIP_CACHE(name, gb, num) name ## _cache <<= (num)
#endif
#if UNCHECKED_BITSTREAM_READER
# define SKIP_COUNTER(name, gb, num) name ## _index += (num)
#else
# define SKIP_COUNTER(name, gb, num) \
name ## _index = FFMIN(name ## _size_plus8, name ## _index + (num))
#endif
#define BITS_LEFT(name, gb) ((int)((gb)->size_in_bits - name ## _index))
#define SKIP_BITS(name, gb, num) \
do { \
SKIP_CACHE(name, gb, num); \
SKIP_COUNTER(name, gb, num); \
} while (0)
#define LAST_SKIP_BITS(name, gb, num) SKIP_COUNTER(name, gb, num)
#define SHOW_UBITS_LE(name, gb, num) zero_extend(name ## _cache, num)
#define SHOW_SBITS_LE(name, gb, num) sign_extend(name ## _cache, num)
#define SHOW_UBITS_BE(name, gb, num) NEG_USR32(name ## _cache, num)
#define SHOW_SBITS_BE(name, gb, num) NEG_SSR32(name ## _cache, num)
#ifdef BITSTREAM_READER_LE
# define SHOW_UBITS(name, gb, num) SHOW_UBITS_LE(name, gb, num)
# define SHOW_SBITS(name, gb, num) SHOW_SBITS_LE(name, gb, num)
#else
# define SHOW_UBITS(name, gb, num) SHOW_UBITS_BE(name, gb, num)
# define SHOW_SBITS(name, gb, num) SHOW_SBITS_BE(name, gb, num)
#endif
#define GET_CACHE(name, gb) ((uint32_t) name ## _cache)
#endif
static inline int get_bits_count(const GetBitContext *s)
{
#if CACHED_BITSTREAM_READER
return s->index - s->bits_left;
#else
return s->index;
#endif
}
#if CACHED_BITSTREAM_READER
static inline void refill_32(GetBitContext *s, int is_le)
{
#if !UNCHECKED_BITSTREAM_READER
if (s->index >> 3 >= s->buffer_end - s->buffer)
return;
#endif
if (is_le)
s->cache = (uint64_t)AV_RL32(s->buffer + (s->index >> 3)) << s->bits_left | s->cache;
else
s->cache = s->cache | (uint64_t)AV_RB32(s->buffer + (s->index >> 3)) << (32 - s->bits_left);
s->index += 32;
s->bits_left += 32;
}
static inline void refill_64(GetBitContext *s, int is_le)
{
#if !UNCHECKED_BITSTREAM_READER
if (s->index >> 3 >= s->buffer_end - s->buffer)
return;
#endif
if (is_le)
s->cache = AV_RL64(s->buffer + (s->index >> 3));
else
s->cache = AV_RB64(s->buffer + (s->index >> 3));
s->index += 64;
s->bits_left = 64;
}
static inline uint64_t get_val(GetBitContext *s, unsigned n, int is_le)
{
uint64_t ret;
av_assert2(n>0 && n<=63);
if (is_le) {
ret = s->cache & ((UINT64_C(1) << n) - 1);
s->cache >>= n;
} else {
ret = s->cache >> (64 - n);
s->cache <<= n;
}
s->bits_left -= n;
return ret;
}
static inline unsigned show_val(const GetBitContext *s, unsigned n)
{
#ifdef BITSTREAM_READER_LE
return s->cache & ((UINT64_C(1) << n) - 1);
#else
return s->cache >> (64 - n);
#endif
}
#endif
/**
* Skips the specified number of bits.
* @param n the number of bits to skip,
* For the UNCHECKED_BITSTREAM_READER this must not cause the distance
* from the start to overflow int32_t. Staying within the bitstream + padding
* is sufficient, too.
*/
static inline void skip_bits_long(GetBitContext *s, int n)
{
#if CACHED_BITSTREAM_READER
skip_bits(s, n);
#else
#if UNCHECKED_BITSTREAM_READER
s->index += n;
#else
s->index += av_clip(n, -s->index, s->size_in_bits_plus8 - s->index);
#endif
#endif
}
#if CACHED_BITSTREAM_READER
static inline void skip_remaining(GetBitContext *s, unsigned n)
{
#ifdef BITSTREAM_READER_LE
s->cache >>= n;
#else
s->cache <<= n;
#endif
s->bits_left -= n;
}
#endif
/**
* Read 1-25 bits.
*/
static inline unsigned int get_bits(GetBitContext *s, int n)
{
register unsigned int tmp;
#if CACHED_BITSTREAM_READER
av_assert2(n>0 && n<=32);
if (n > s->bits_left) {
#ifdef BITSTREAM_READER_LE
refill_32(s, 1);
#else
refill_32(s, 0);
#endif
if (s->bits_left < 32)
s->bits_left = n;
}
#ifdef BITSTREAM_READER_LE
tmp = get_val(s, n, 1);
#else
tmp = get_val(s, n, 0);
#endif
#else
OPEN_READER(re, s);
av_assert2(n>0 && n<=25);
UPDATE_CACHE(re, s);
tmp = SHOW_UBITS(re, s, n);
LAST_SKIP_BITS(re, s, n);
CLOSE_READER(re, s);
#endif
av_assert2(tmp < UINT64_C(1) << n);
return tmp;
}
static inline void skip_bits(GetBitContext *s, int n)
{
#if CACHED_BITSTREAM_READER
if (n < s->bits_left)
skip_remaining(s, n);
else {
n -= s->bits_left;
s->cache = 0;
s->bits_left = 0;
if (n >= 64) {
unsigned skip = (n / 8) * 8;
n -= skip;
s->index += skip;
}
#ifdef BITSTREAM_READER_LE
refill_64(s, 1);
#else
refill_64(s, 0);
#endif
if (n)
skip_remaining(s, n);
}
#else
OPEN_READER(re, s);
LAST_SKIP_BITS(re, s, n);
CLOSE_READER(re, s);
#endif
}
static inline unsigned int get_bits1(GetBitContext *s)
{
#if CACHED_BITSTREAM_READER
if (!s->bits_left)
#ifdef BITSTREAM_READER_LE
refill_64(s, 1);
#else
refill_64(s, 0);
#endif
#ifdef BITSTREAM_READER_LE
return get_val(s, 1, 1);
#else
return get_val(s, 1, 0);
#endif
#else
unsigned int index = s->index;
uint8_t result = s->buffer[index >> 3];
#ifdef BITSTREAM_READER_LE
result >>= index & 7;
result &= 1;
#else
result <<= index & 7;
result >>= 8 - 1;
#endif
#if !UNCHECKED_BITSTREAM_READER
if (s->index < s->size_in_bits_plus8)
#endif
index++;
s->index = index;
return result;
#endif
}
static inline void skip_bits1(GetBitContext *s)
{
skip_bits(s, 1);
}
/**
* Read 0-32 bits.
*/
static inline unsigned int get_bits_long(GetBitContext *s, int n)
{
av_assert2(n>=0 && n<=32);
if (!n) {
return 0;
#if CACHED_BITSTREAM_READER
}
return get_bits(s, n);
#else
} else if (n <= MIN_CACHE_BITS) {
return get_bits(s, n);
} else {
#ifdef BITSTREAM_READER_LE
unsigned ret = get_bits(s, 16);
return ret | (get_bits(s, n - 16) << 16);
#else
unsigned ret = get_bits(s, 16) << (n - 16);
return ret | get_bits(s, n - 16);
#endif
}
#endif
}
/**
* Read 0-64 bits.
*/
static inline uint64_t get_bits64(GetBitContext *s, int n)
{
if (n <= 32) {
return get_bits_long(s, n);
} else {
#ifdef BITSTREAM_READER_LE
uint64_t ret = get_bits_long(s, 32);
return ret | (uint64_t) get_bits_long(s, n - 32) << 32;
#else
uint64_t ret = (uint64_t) get_bits_long(s, n - 32) << 32;
return ret | get_bits_long(s, 32);
#endif
}
}
static inline int check_marker(void *logctx, GetBitContext *s, const char *msg)
{
int bit = get_bits1(s);
if (!bit)
av_log(logctx, AV_LOG_INFO, "Marker bit missing at %d of %d %s\n",
get_bits_count(s) - 1, s->size_in_bits, msg);
return bit;
}
static inline int init_get_bits_xe(GetBitContext *s, const uint8_t *buffer,
int bit_size, int is_le)
{
int buffer_size;
int ret = 0;
if (bit_size >= INT_MAX - FFMAX(7, AV_INPUT_BUFFER_PADDING_SIZE*8) || bit_size < 0 || !buffer) {
bit_size = 0;
buffer = NULL;
ret = AVERROR_INVALIDDATA;
}
buffer_size = (bit_size + 7) >> 3;
s->buffer = buffer;
s->size_in_bits = bit_size;
s->size_in_bits_plus8 = bit_size + 8;
s->buffer_end = buffer + buffer_size;
s->index = 0;
#if CACHED_BITSTREAM_READER
s->cache = 0;
s->bits_left = 0;
refill_64(s, is_le);
#endif
return ret;
}
/**
* Initialize GetBitContext.
* @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes
* larger than the actual read bits because some optimized bitstream
* readers read 32 or 64 bit at once and could read over the end
* @param bit_size the size of the buffer in bits
* @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow.
*/
static inline int init_get_bits(GetBitContext *s, const uint8_t *buffer,
int bit_size)
{
#ifdef BITSTREAM_READER_LE
return init_get_bits_xe(s, buffer, bit_size, 1);
#else
return init_get_bits_xe(s, buffer, bit_size, 0);
#endif
}
/**
* Initialize GetBitContext.
* @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes
* larger than the actual read bits because some optimized bitstream
* readers read 32 or 64 bit at once and could read over the end
* @param byte_size the size of the buffer in bytes
* @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow.
*/
static inline int init_get_bits8(GetBitContext *s, const uint8_t *buffer,
int byte_size)
{
if (byte_size > INT_MAX / 8 || byte_size < 0)
byte_size = -1;
return init_get_bits(s, buffer, byte_size * 8);
}
static inline int init_get_bits8_le(GetBitContext *s, const uint8_t *buffer,
int byte_size)
{
if (byte_size > INT_MAX / 8 || byte_size < 0)
byte_size = -1;
return init_get_bits_xe(s, buffer, byte_size * 8, 1);
}
static inline const uint8_t *align_get_bits(GetBitContext *s)
{
int n = -get_bits_count(s) & 7;
if (n)
skip_bits(s, n);
return s->buffer + (s->index >> 3);
}
/**
* If the vlc code is invalid and max_depth=1, then no bits will be removed.
* If the vlc code is invalid and max_depth>1, then the number of bits removed
* is undefined.
*/
#define GET_VLC(code, name, gb, table, bits, max_depth) \
do { \
int n, nb_bits; \
unsigned int index; \
\
index = SHOW_UBITS(name, gb, bits); \
code = table[index][0]; \
n = table[index][1]; \
\
if (max_depth > 1 && n < 0) { \
LAST_SKIP_BITS(name, gb, bits); \
UPDATE_CACHE(name, gb); \
\
nb_bits = -n; \
\
index = SHOW_UBITS(name, gb, nb_bits) + code; \
code = table[index][0]; \
n = table[index][1]; \
if (max_depth > 2 && n < 0) { \
LAST_SKIP_BITS(name, gb, nb_bits); \
UPDATE_CACHE(name, gb); \
\
nb_bits = -n; \
\
index = SHOW_UBITS(name, gb, nb_bits) + code; \
code = table[index][0]; \
n = table[index][1]; \
} \
} \
SKIP_BITS(name, gb, n); \
} while (0)
#define GET_RL_VLC(level, run, name, gb, table, bits, \
max_depth, need_update) \
do { \
int n, nb_bits; \
unsigned int index; \
\
index = SHOW_UBITS(name, gb, bits); \
level = table[index].level; \
n = table[index].len; \
\
if (max_depth > 1 && n < 0) { \
SKIP_BITS(name, gb, bits); \
if (need_update) { \
UPDATE_CACHE(name, gb); \
} \
\
nb_bits = -n; \
\
index = SHOW_UBITS(name, gb, nb_bits) + level; \
level = table[index].level; \
n = table[index].len; \
if (max_depth > 2 && n < 0) { \
LAST_SKIP_BITS(name, gb, nb_bits); \
if (need_update) { \
UPDATE_CACHE(name, gb); \
} \
nb_bits = -n; \
\
index = SHOW_UBITS(name, gb, nb_bits) + level; \
level = table[index].level; \
n = table[index].len; \
} \
} \
run = table[index].run; \
SKIP_BITS(name, gb, n); \
} while (0)
static inline int decode012(GetBitContext *gb)
{
int n;
n = get_bits1(gb);
if (n == 0)
return 0;
else
return get_bits1(gb) + 1;
}
static inline int decode210(GetBitContext *gb)
{
if (get_bits1(gb))
return 0;
else
return 2 - get_bits1(gb);
}
static inline int get_bits_left(GetBitContext *gb)
{
return gb->size_in_bits - get_bits_count(gb);
}
static inline int skip_1stop_8data_bits(GetBitContext *gb)
{
if (get_bits_left(gb) <= 0)
return AVERROR_INVALIDDATA;
while (get_bits1(gb)) {
skip_bits(gb, 8);
if (get_bits_left(gb) <= 0)
return AVERROR_INVALIDDATA;
}
return 0;
}
static inline unsigned int show_bits_long(GetBitContext *s, int n)
{
if (n <= MIN_CACHE_BITS) {
return show_bits(s, n);
} else {
GetBitContext gb = *s;
return get_bits_long(&gb, n);
}
}
#endif /* AVCODEC_GET_BITS_H */

View file

@ -0,0 +1,478 @@
/*
* exp golomb vlc stuff
* Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
* Copyright (c) 2004 Alex Beregszaszi
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* @brief
* exp golomb vlc stuff
* @author Michael Niedermayer <michaelni@gmx.at> and Alex Beregszaszi
*/
#ifndef AVCODEC_GOLOMB_H
#define AVCODEC_GOLOMB_H
#include <stdint.h>
#include "get_bits.h"
#define INVALID_VLC 0x80000000
extern const uint8_t ff_golomb_vlc_len[512];
extern const uint8_t ff_ue_golomb_vlc_code[512];
extern const int8_t ff_se_golomb_vlc_code[512];
extern const uint8_t ff_ue_golomb_len[256];
extern const uint8_t ff_interleaved_golomb_vlc_len[256];
extern const uint8_t ff_interleaved_ue_golomb_vlc_code[256];
extern const int8_t ff_interleaved_se_golomb_vlc_code[256];
extern const uint8_t ff_interleaved_dirac_golomb_vlc_code[256];
/**
* Read an unsigned Exp-Golomb code in the range 0 to 8190.
*
* @returns the read value or a negative error code.
*/
static inline int get_ue_golomb(GetBitContext *gb)
{
unsigned int buf;
#if CACHED_BITSTREAM_READER
buf = show_bits_long(gb, 32);
if (buf >= (1 << 27)) {
buf >>= 32 - 9;
skip_bits_long(gb, ff_golomb_vlc_len[buf]);
return ff_ue_golomb_vlc_code[buf];
} else {
int log = 2 * av_log2(buf) - 31;
buf >>= log;
buf--;
skip_bits_long(gb, 32 - log);
return buf;
}
#else
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
if (buf >= (1 << 27)) {
buf >>= 32 - 9;
LAST_SKIP_BITS(re, gb, ff_golomb_vlc_len[buf]);
CLOSE_READER(re, gb);
return ff_ue_golomb_vlc_code[buf];
} else {
int log = 2 * av_log2(buf) - 31;
LAST_SKIP_BITS(re, gb, 32 - log);
CLOSE_READER(re, gb);
if (log < 7) {
av_log(NULL, AV_LOG_ERROR, "Invalid UE golomb code\n");
return AVERROR_INVALIDDATA;
}
buf >>= log;
buf--;
return buf;
}
#endif
}
/**
* Read an unsigned Exp-Golomb code in the range 0 to UINT32_MAX-1.
*/
static inline unsigned get_ue_golomb_long(GetBitContext *gb)
{
unsigned buf, log;
buf = show_bits_long(gb, 32);
log = 31 - av_log2(buf);
skip_bits_long(gb, log);
return get_bits_long(gb, log + 1) - 1;
}
/**
* read unsigned exp golomb code, constraint to a max of 31.
* the return value is undefined if the stored value exceeds 31.
*/
static inline int get_ue_golomb_31(GetBitContext *gb)
{
unsigned int buf;
#if CACHED_BITSTREAM_READER
buf = show_bits_long(gb, 32);
buf >>= 32 - 9;
skip_bits_long(gb, ff_golomb_vlc_len[buf]);
#else
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
buf >>= 32 - 9;
LAST_SKIP_BITS(re, gb, ff_golomb_vlc_len[buf]);
CLOSE_READER(re, gb);
#endif
return ff_ue_golomb_vlc_code[buf];
}
static inline unsigned get_interleaved_ue_golomb(GetBitContext *gb)
{
uint32_t buf;
#if CACHED_BITSTREAM_READER
buf = show_bits_long(gb, 32);
if (buf & 0xAA800000) {
buf >>= 32 - 8;
skip_bits_long(gb, ff_interleaved_golomb_vlc_len[buf]);
return ff_interleaved_ue_golomb_vlc_code[buf];
} else {
unsigned ret = 1;
do {
buf >>= 32 - 8;
skip_bits_long(gb, FFMIN(ff_interleaved_golomb_vlc_len[buf], 8));
if (ff_interleaved_golomb_vlc_len[buf] != 9) {
ret <<= (ff_interleaved_golomb_vlc_len[buf] - 1) >> 1;
ret |= ff_interleaved_dirac_golomb_vlc_code[buf];
break;
}
ret = (ret << 4) | ff_interleaved_dirac_golomb_vlc_code[buf];
buf = show_bits_long(gb, 32);
} while (get_bits_left(gb) > 0);
return ret - 1;
}
#else
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
if (buf & 0xAA800000) {
buf >>= 32 - 8;
LAST_SKIP_BITS(re, gb, ff_interleaved_golomb_vlc_len[buf]);
CLOSE_READER(re, gb);
return ff_interleaved_ue_golomb_vlc_code[buf];
} else {
unsigned ret = 1;
do {
buf >>= 32 - 8;
LAST_SKIP_BITS(re, gb,
FFMIN(ff_interleaved_golomb_vlc_len[buf], 8));
if (ff_interleaved_golomb_vlc_len[buf] != 9) {
ret <<= (ff_interleaved_golomb_vlc_len[buf] - 1) >> 1;
ret |= ff_interleaved_dirac_golomb_vlc_code[buf];
break;
}
ret = (ret << 4) | ff_interleaved_dirac_golomb_vlc_code[buf];
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
} while (ret<0x8000000U && BITS_AVAILABLE(re, gb));
CLOSE_READER(re, gb);
return ret - 1;
}
#endif
}
/**
* read unsigned truncated exp golomb code.
*/
static inline int get_te0_golomb(GetBitContext *gb, int range)
{
av_assert2(range >= 1);
if (range == 1)
return 0;
else if (range == 2)
return get_bits1(gb) ^ 1;
else
return get_ue_golomb(gb);
}
/**
* read unsigned truncated exp golomb code.
*/
static inline int get_te_golomb(GetBitContext *gb, int range)
{
av_assert2(range >= 1);
if (range == 2)
return get_bits1(gb) ^ 1;
else
return get_ue_golomb(gb);
}
/**
* read signed exp golomb code.
*/
static inline int get_se_golomb(GetBitContext *gb)
{
unsigned int buf;
#if CACHED_BITSTREAM_READER
buf = show_bits_long(gb, 32);
if (buf >= (1 << 27)) {
buf >>= 32 - 9;
skip_bits_long(gb, ff_golomb_vlc_len[buf]);
return ff_se_golomb_vlc_code[buf];
} else {
int log = 2 * av_log2(buf) - 31;
buf >>= log;
skip_bits_long(gb, 32 - log);
if (buf & 1)
buf = -(buf >> 1);
else
buf = (buf >> 1);
return buf;
}
#else
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
if (buf >= (1 << 27)) {
buf >>= 32 - 9;
LAST_SKIP_BITS(re, gb, ff_golomb_vlc_len[buf]);
CLOSE_READER(re, gb);
return ff_se_golomb_vlc_code[buf];
} else {
int log = av_log2(buf), sign;
LAST_SKIP_BITS(re, gb, 31 - log);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
buf >>= log;
LAST_SKIP_BITS(re, gb, 32 - log);
CLOSE_READER(re, gb);
sign = -(buf & 1);
buf = ((buf >> 1) ^ sign) - sign;
return buf;
}
#endif
}
static inline int get_se_golomb_long(GetBitContext *gb)
{
unsigned int buf = get_ue_golomb_long(gb);
int sign = (buf & 1) - 1;
return ((buf >> 1) ^ sign) + 1;
}
static inline int get_interleaved_se_golomb(GetBitContext *gb)
{
unsigned int buf;
#if CACHED_BITSTREAM_READER
buf = show_bits_long(gb, 32);
if (buf & 0xAA800000) {
buf >>= 32 - 8;
skip_bits_long(gb, ff_interleaved_golomb_vlc_len[buf]);
return ff_interleaved_se_golomb_vlc_code[buf];
} else {
int log;
skip_bits(gb, 8);
buf |= 1 | show_bits(gb, 24);
if ((buf & 0xAAAAAAAA) == 0)
return INVALID_VLC;
for (log = 31; (buf & 0x80000000) == 0; log--)
buf = (buf << 2) - ((buf << log) >> (log - 1)) + (buf >> 30);
skip_bits_long(gb, 63 - 2 * log - 8);
return (signed) (((((buf << log) >> log) - 1) ^ -(buf & 0x1)) + 1) >> 1;
}
#else
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
if (buf & 0xAA800000) {
buf >>= 32 - 8;
LAST_SKIP_BITS(re, gb, ff_interleaved_golomb_vlc_len[buf]);
CLOSE_READER(re, gb);
return ff_interleaved_se_golomb_vlc_code[buf];
} else {
int log;
LAST_SKIP_BITS(re, gb, 8);
UPDATE_CACHE(re, gb);
buf |= 1 | (GET_CACHE(re, gb) >> 8);
if ((buf & 0xAAAAAAAA) == 0)
return INVALID_VLC;
for (log = 31; (buf & 0x80000000) == 0; log--)
buf = (buf << 2) - ((buf << log) >> (log - 1)) + (buf >> 30);
LAST_SKIP_BITS(re, gb, 63 - 2 * log - 8);
CLOSE_READER(re, gb);
return (signed) (((((buf << log) >> log) - 1) ^ -(buf & 0x1)) + 1) >> 1;
}
#endif
}
static inline int dirac_get_se_golomb(GetBitContext *gb)
{
uint32_t ret = get_interleaved_ue_golomb(gb);
if (ret) {
int sign = -get_bits1(gb);
ret = (ret ^ sign) - sign;
}
return ret;
}
/**
* read unsigned golomb rice code (ffv1).
*/
static inline int get_ur_golomb(GetBitContext *gb, int k, int limit,
int esc_len)
{
unsigned int buf;
int log;
#if CACHED_BITSTREAM_READER
buf = show_bits_long(gb, 32);
log = av_log2(buf);
if (log > 31 - limit) {
buf >>= log - k;
buf += (30 - log) << k;
skip_bits_long(gb, 32 + k - log);
return buf;
} else {
skip_bits_long(gb, limit);
buf = get_bits_long(gb, esc_len);
return buf + limit - 1;
}
#else
OPEN_READER(re, gb);
UPDATE_CACHE(re, gb);
buf = GET_CACHE(re, gb);
log = av_log2(buf);
if (log > 31 - limit) {
buf >>= log - k;
buf += (30U - log) << k;
LAST_SKIP_BITS(re, gb, 32 + k - log);
CLOSE_READER(re, gb);
return buf;
} else {
LAST_SKIP_BITS(re, gb, limit);
UPDATE_CACHE(re, gb);
buf = SHOW_UBITS(re, gb, esc_len);
LAST_SKIP_BITS(re, gb, esc_len);
CLOSE_READER(re, gb);
return buf + limit - 1;
}
#endif
}
#ifdef TRACE
static inline int get_ue(GetBitContext *s, const char *file, const char *func,
int line)
{
int show = show_bits(s, 24);
int pos = get_bits_count(s);
int i = get_ue_golomb(s);
int len = get_bits_count(s) - pos;
int bits = show >> (24 - len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d ue @%5d in %s %s:%d\n",
bits, len, i, pos, file, func, line);
return i;
}
static inline int get_se(GetBitContext *s, const char *file, const char *func,
int line)
{
int show = show_bits(s, 24);
int pos = get_bits_count(s);
int i = get_se_golomb(s);
int len = get_bits_count(s) - pos;
int bits = show >> (24 - len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d se @%5d in %s %s:%d\n",
bits, len, i, pos, file, func, line);
return i;
}
static inline int get_te(GetBitContext *s, int r, char *file, const char *func,
int line)
{
int show = show_bits(s, 24);
int pos = get_bits_count(s);
int i = get_te0_golomb(s, r);
int len = get_bits_count(s) - pos;
int bits = show >> (24 - len);
av_log(NULL, AV_LOG_DEBUG, "%5d %2d %3d te @%5d in %s %s:%d\n",
bits, len, i, pos, file, func, line);
return i;
}
#define get_ue_golomb(a) get_ue(a, __FILE__, __func__, __LINE__)
#define get_se_golomb(a) get_se(a, __FILE__, __func__, __LINE__)
#define get_te_golomb(a, r) get_te(a, r, __FILE__, __func__, __LINE__)
#define get_te0_golomb(a, r) get_te(a, r, __FILE__, __func__, __LINE__)
#endif /* TRACE */
#endif /* AVCODEC_GOLOMB_H */

View file

@ -0,0 +1,81 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VLC_H
#define AVCODEC_VLC_H
#include <stdint.h>
#define VLC_TYPE int16_t
typedef struct VLC {
int bits;
VLC_TYPE (*table)[2]; ///< code, bits
int table_size, table_allocated;
} VLC;
typedef struct RL_VLC_ELEM {
int16_t level;
int8_t len;
uint8_t run;
} RL_VLC_ELEM;
#define init_vlc(vlc, nb_bits, nb_codes, \
bits, bits_wrap, bits_size, \
codes, codes_wrap, codes_size, \
flags) \
ff_init_vlc_sparse(vlc, nb_bits, nb_codes, \
bits, bits_wrap, bits_size, \
codes, codes_wrap, codes_size, \
NULL, 0, 0, flags)
int ff_init_vlc_sparse(VLC *vlc, int nb_bits, int nb_codes,
const void *bits, int bits_wrap, int bits_size,
const void *codes, int codes_wrap, int codes_size,
const void *symbols, int symbols_wrap, int symbols_size,
int flags);
void ff_free_vlc(VLC *vlc);
#define INIT_VLC_LE 2
#define INIT_VLC_USE_NEW_STATIC 4
#define INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \
do { \
static VLC_TYPE table[static_size][2]; \
(vlc)->table = table; \
(vlc)->table_allocated = static_size; \
ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \
INIT_VLC_USE_NEW_STATIC); \
} while (0)
#define INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \
do { \
static VLC_TYPE table[static_size][2]; \
(vlc)->table = table; \
(vlc)->table_allocated = static_size; \
ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \
INIT_VLC_USE_NEW_STATIC | INIT_VLC_LE); \
} while (0)
#define INIT_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \
INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size)
#define INIT_LE_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \
INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size)
#endif /* AVCODEC_VLC_H */

View file

@ -0,0 +1,165 @@
/*
* Copyright (c) 2010 Mans Rullgard <mans@mansr.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_INTMATH_H
#define AVUTIL_INTMATH_H
#include <stdint.h>
#include "config.h"
#include "attributes.h"
#if ARCH_ARM
# include "arm/intmath.h"
#endif
#if ARCH_X86
# include "x86/intmath.h"
#endif
#if HAVE_FAST_CLZ
#if AV_GCC_VERSION_AT_LEAST(3,4)
#ifndef ff_log2
# define ff_log2(x) (31 - __builtin_clz((x)|1))
# ifndef ff_log2_16bit
# define ff_log2_16bit av_log2
# endif
#endif /* ff_log2 */
#endif /* AV_GCC_VERSION_AT_LEAST(3,4) */
#endif
extern const uint8_t ff_log2_tab[256];
#ifndef ff_log2
#define ff_log2 ff_log2_c
static av_always_inline av_const int ff_log2_c(unsigned int v)
{
int n = 0;
if (v & 0xffff0000) {
v >>= 16;
n += 16;
}
if (v & 0xff00) {
v >>= 8;
n += 8;
}
n += ff_log2_tab[v];
return n;
}
#endif
#ifndef ff_log2_16bit
#define ff_log2_16bit ff_log2_16bit_c
static av_always_inline av_const int ff_log2_16bit_c(unsigned int v)
{
int n = 0;
if (v & 0xff00) {
v >>= 8;
n += 8;
}
n += ff_log2_tab[v];
return n;
}
#endif
#define av_log2 ff_log2
#define av_log2_16bit ff_log2_16bit
/**
* @addtogroup lavu_math
* @{
*/
#if HAVE_FAST_CLZ
#if AV_GCC_VERSION_AT_LEAST(3,4)
#ifndef ff_ctz
#define ff_ctz(v) __builtin_ctz(v)
#endif
#ifndef ff_ctzll
#define ff_ctzll(v) __builtin_ctzll(v)
#endif
#ifndef ff_clz
#define ff_clz(v) __builtin_clz(v)
#endif
#endif
#endif
#ifndef ff_ctz
#define ff_ctz ff_ctz_c
/**
* Trailing zero bit count.
*
* @param v input value. If v is 0, the result is undefined.
* @return the number of trailing 0-bits
*/
/* We use the De-Bruijn method outlined in:
* http://supertech.csail.mit.edu/papers/debruijn.pdf. */
static av_always_inline av_const int ff_ctz_c(int v)
{
static const uint8_t debruijn_ctz32[32] = {
0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8,
31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9
};
return debruijn_ctz32[(uint32_t)((v & -v) * 0x077CB531U) >> 27];
}
#endif
#ifndef ff_ctzll
#define ff_ctzll ff_ctzll_c
/* We use the De-Bruijn method outlined in:
* http://supertech.csail.mit.edu/papers/debruijn.pdf. */
static av_always_inline av_const int ff_ctzll_c(long long v)
{
static const uint8_t debruijn_ctz64[64] = {
0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28,
62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11,
63, 52, 6, 26, 37, 40, 33, 47, 61, 45, 43, 21, 23, 58, 17, 10,
51, 25, 36, 32, 60, 20, 57, 16, 50, 31, 19, 15, 30, 14, 13, 12
};
return debruijn_ctz64[(uint64_t)((v & -v) * 0x022FDD63CC95386DU) >> 58];
}
#endif
#ifndef ff_clz
#define ff_clz ff_clz_c
static av_always_inline av_const unsigned ff_clz_c(unsigned x)
{
unsigned i = sizeof(x) * 8;
while (x) {
x >>= 1;
i--;
}
return i;
}
#endif
#if AV_GCC_VERSION_AT_LEAST(3,4)
#ifndef av_parity
#define av_parity __builtin_parity
#endif
#endif
/**
* @}
*/
#endif /* AVUTIL_INTMATH_H */

View file

@ -10,12 +10,18 @@
#include <libyuv.h> #include <libyuv.h>
#include <tgnet/FileLog.h> #include <tgnet/FileLog.h>
#include "tgnet/ConnectionsManager.h" #include "tgnet/ConnectionsManager.h"
#include "voip/webrtc/common_video/h264/sps_parser.h"
#include "voip/webrtc/common_video/h264/h264_common.h"
#include "c_utils.h" #include "c_utils.h"
extern "C" { extern "C" {
#include <libavformat/avformat.h> #include <libavformat/avformat.h>
#include <libavformat/isom.h> #include <libavformat/isom.h>
#include <libavcodec/bytestream.h>
#include <libavcodec/get_bits.h>
#include <libavcodec/golomb.h>
#include <libavutil/eval.h> #include <libavutil/eval.h>
#include <libavutil/intmath.h>
#include <libswscale/swscale.h> #include <libswscale/swscale.h>
} }
@ -35,6 +41,53 @@ jmethodID jclass_AnimatedFileDrawableStream_cancel;
jmethodID jclass_AnimatedFileDrawableStream_isFinishedLoadingFile; jmethodID jclass_AnimatedFileDrawableStream_isFinishedLoadingFile;
jmethodID jclass_AnimatedFileDrawableStream_getFinishedFilePath; jmethodID jclass_AnimatedFileDrawableStream_getFinishedFilePath;
typedef struct H2645NAL {
uint8_t *rbsp_buffer;
int size;
const uint8_t *data;
int size_bits;
int raw_size;
const uint8_t *raw_data;
int type;
int temporal_id;
int nuh_layer_id;
int skipped_bytes;
int skipped_bytes_pos_size;
int *skipped_bytes_pos;
int ref_idc;
GetBitContext gb;
} H2645NAL;
typedef struct H2645RBSP {
uint8_t *rbsp_buffer;
AVBufferRef *rbsp_buffer_ref;
int rbsp_buffer_alloc_size;
int rbsp_buffer_size;
} H2645RBSP;
typedef struct H2645Packet {
H2645NAL *nals;
H2645RBSP rbsp;
int nb_nals;
int nals_allocated;
unsigned nal_buffer_size;
} H2645Packet;
void ff_h2645_packet_uninit(H2645Packet *pkt) {
int i;
for (i = 0; i < pkt->nals_allocated; i++) {
av_freep(&pkt->nals[i].skipped_bytes_pos);
}
av_freep(&pkt->nals);
pkt->nals_allocated = pkt->nal_buffer_size = 0;
if (pkt->rbsp.rbsp_buffer_ref) {
av_buffer_unref(&pkt->rbsp.rbsp_buffer_ref);
pkt->rbsp.rbsp_buffer = NULL;
} else
av_freep(&pkt->rbsp.rbsp_buffer);
pkt->rbsp.rbsp_buffer_alloc_size = pkt->rbsp.rbsp_buffer_size = 0;
}
typedef struct VideoInfo { typedef struct VideoInfo {
~VideoInfo() { ~VideoInfo() {
@ -88,6 +141,7 @@ typedef struct VideoInfo {
fd = -1; fd = -1;
} }
ff_h2645_packet_uninit(&h2645Packet);
av_packet_unref(&orig_pkt); av_packet_unref(&orig_pkt);
video_stream_idx = -1; video_stream_idx = -1;
@ -108,6 +162,13 @@ typedef struct VideoInfo {
bool stopped = false; bool stopped = false;
bool seeking = false; bool seeking = false;
int firstWidth = 0;
int firstHeight = 0;
bool dropFrames = false;
H2645Packet h2645Packet = {nullptr};
int32_t dst_linesize[1]; int32_t dst_linesize[1];
struct SwsContext *sws_ctx = nullptr; struct SwsContext *sws_ctx = nullptr;
@ -121,12 +182,24 @@ typedef struct VideoInfo {
int64_t last_seek_p = 0; int64_t last_seek_p = 0;
}; };
void custom_log(void *ptr, int level, const char* fmt, va_list vl){
va_list vl2;
char line[1024];
static int print_prefix = 1;
va_copy(vl2, vl);
av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
va_end(vl2);
LOGE(line);
}
int open_codec_context(int *stream_idx, AVCodecContext **dec_ctx, AVFormatContext *fmt_ctx, enum AVMediaType type) { int open_codec_context(int *stream_idx, AVCodecContext **dec_ctx, AVFormatContext *fmt_ctx, enum AVMediaType type) {
int ret, stream_index; int ret, stream_index;
AVStream *st; AVStream *st;
AVCodec *dec = NULL; AVCodec *dec = NULL;
AVDictionary *opts = NULL; AVDictionary *opts = NULL;
ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0); ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
if (ret < 0) { if (ret < 0) {
LOGE("can't find %s stream in input file", av_get_media_type_string(type)); LOGE("can't find %s stream in input file", av_get_media_type_string(type));
@ -159,19 +232,481 @@ int open_codec_context(int *stream_idx, AVCodecContext **dec_ctx, AVFormatContex
} }
*stream_idx = stream_index; *stream_idx = stream_index;
} }
return 0; return 0;
} }
#define MAX_MBPAIR_SIZE (256*1024)
int ff_h2645_extract_rbsp(const uint8_t *src, int length, H2645RBSP *rbsp, H2645NAL *nal)
{
int i, si, di;
uint8_t *dst;
nal->skipped_bytes = 0;
#define STARTCODE_TEST \
if (i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) { \
if (src[i + 2] != 3 && src[i + 2] != 0) { \
/* startcode, so we must be past the end */ \
length = i; \
} \
break; \
}
for (i = 0; i + 1 < length; i += 2) {
if (src[i])
continue;
if (i > 0 && src[i - 1] == 0)
i--;
STARTCODE_TEST;
}
if (i > length)
i = length;
nal->rbsp_buffer = &rbsp->rbsp_buffer[rbsp->rbsp_buffer_size];
dst = nal->rbsp_buffer;
memcpy(dst, src, i);
si = di = i;
while (si + 2 < length) {
if (src[si + 2] > 3) {
dst[di++] = src[si++];
dst[di++] = src[si++];
} else if (src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) {
if (src[si + 2] == 3) {
dst[di++] = 0;
dst[di++] = 0;
si += 3;
if (nal->skipped_bytes_pos) {
nal->skipped_bytes++;
if (nal->skipped_bytes_pos_size < nal->skipped_bytes) {
nal->skipped_bytes_pos_size *= 2;
av_reallocp_array(&nal->skipped_bytes_pos,
nal->skipped_bytes_pos_size,
sizeof(*nal->skipped_bytes_pos));
if (!nal->skipped_bytes_pos) {
nal->skipped_bytes_pos_size = 0;
return AVERROR(ENOMEM);
}
}
if (nal->skipped_bytes_pos)
nal->skipped_bytes_pos[nal->skipped_bytes-1] = di - 1;
}
continue;
} else // next start code
goto nsc;
}
dst[di++] = src[si++];
}
while (si < length)
dst[di++] = src[si++];
nsc:
memset(dst + di, 0, AV_INPUT_BUFFER_PADDING_SIZE);
nal->data = dst;
nal->size = di;
nal->raw_data = src;
nal->raw_size = si;
rbsp->rbsp_buffer_size += si;
return si;
}
static inline int get_nalsize(int nal_length_size, const uint8_t *buf, int buf_size, int *buf_index) {
int i, nalsize = 0;
if (*buf_index >= buf_size - nal_length_size) {
return AVERROR(EAGAIN);
}
for (i = 0; i < nal_length_size; i++)
nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++];
if (nalsize <= 0 || nalsize > buf_size - *buf_index) {
return AVERROR_INVALIDDATA;
}
return nalsize;
}
static int find_next_start_code(const uint8_t *buf, const uint8_t *next_avc) {
int i = 0;
if (buf + 3 >= next_avc)
return next_avc - buf;
while (buf + i + 3 < next_avc) {
if (buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1)
break;
i++;
}
return i + 3;
}
static int get_bit_length(H2645NAL *nal, int skip_trailing_zeros) {
int size = nal->size;
int v;
while (skip_trailing_zeros && size > 0 && nal->data[size - 1] == 0)
size--;
if (!size)
return 0;
v = nal->data[size - 1];
if (size > INT_MAX / 8)
return AVERROR(ERANGE);
size *= 8;
/* remove the stop bit and following trailing zeros,
* or nothing for damaged bitstreams */
if (v)
size -= ff_ctz(v) + 1;
return size;
}
static void alloc_rbsp_buffer(H2645RBSP *rbsp, unsigned int size) {
int min_size = size;
if (size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE)
goto fail;
size += AV_INPUT_BUFFER_PADDING_SIZE;
if (rbsp->rbsp_buffer_alloc_size >= size &&
(!rbsp->rbsp_buffer_ref || av_buffer_is_writable(rbsp->rbsp_buffer_ref))) {
memset(rbsp->rbsp_buffer + min_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
return;
}
size = FFMIN(size + size / 16 + 32, INT_MAX);
if (rbsp->rbsp_buffer_ref)
av_buffer_unref(&rbsp->rbsp_buffer_ref);
else
av_free(rbsp->rbsp_buffer);
rbsp->rbsp_buffer = (uint8_t *) av_mallocz(size);
if (!rbsp->rbsp_buffer)
goto fail;
rbsp->rbsp_buffer_alloc_size = size;
return;
fail:
rbsp->rbsp_buffer_alloc_size = 0;
if (rbsp->rbsp_buffer_ref) {
av_buffer_unref(&rbsp->rbsp_buffer_ref);
rbsp->rbsp_buffer = NULL;
} else
av_freep(&rbsp->rbsp_buffer);
return;
}
static int h264_parse_nal_header(H2645NAL *nal) {
GetBitContext *gb = &nal->gb;
if (get_bits1(gb) != 0)
return AVERROR_INVALIDDATA;
nal->ref_idc = get_bits(gb, 2);
nal->type = get_bits(gb, 5);
return 1;
}
int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, int is_nalff, int nal_length_size) {
GetByteContext bc;
int consumed, ret = 0;
int next_avc = is_nalff ? 0 : length;
int64_t padding = MAX_MBPAIR_SIZE;
bytestream2_init(&bc, buf, length);
alloc_rbsp_buffer(&pkt->rbsp, length + padding);
if (!pkt->rbsp.rbsp_buffer)
return AVERROR(ENOMEM);
pkt->rbsp.rbsp_buffer_size = 0;
pkt->nb_nals = 0;
while (bytestream2_get_bytes_left(&bc) >= 4) {
H2645NAL *nal;
int extract_length = 0;
int skip_trailing_zeros = 1;
if (bytestream2_tell(&bc) == next_avc) {
int i = 0;
extract_length = get_nalsize(nal_length_size, bc.buffer, bytestream2_get_bytes_left(&bc), &i);
if (extract_length < 0)
return extract_length;
bytestream2_skip(&bc, nal_length_size);
next_avc = bytestream2_tell(&bc) + extract_length;
} else {
int buf_index;
buf_index = find_next_start_code(bc.buffer, buf + next_avc);
bytestream2_skip(&bc, buf_index);
if (!bytestream2_get_bytes_left(&bc)) {
if (pkt->nb_nals > 0) {
return 0;
} else {
return AVERROR_INVALIDDATA;
}
}
extract_length = FFMIN(bytestream2_get_bytes_left(&bc), next_avc - bytestream2_tell(&bc));
if (bytestream2_tell(&bc) >= next_avc) {
bytestream2_skip(&bc, next_avc - bytestream2_tell(&bc));
continue;
}
}
if (pkt->nals_allocated < pkt->nb_nals + 1) {
int new_size = pkt->nals_allocated + 1;
void *tmp;
if (new_size >= INT_MAX / sizeof(*pkt->nals))
return AVERROR(ENOMEM);
tmp = av_fast_realloc(pkt->nals, &pkt->nal_buffer_size, new_size * sizeof(*pkt->nals));
if (!tmp)
return AVERROR(ENOMEM);
pkt->nals = (H2645NAL *) tmp;
memset(pkt->nals + pkt->nals_allocated, 0, sizeof(*pkt->nals));
nal = &pkt->nals[pkt->nb_nals];
nal->skipped_bytes_pos_size = 1024;
nal->skipped_bytes_pos = (int *) av_malloc_array(nal->skipped_bytes_pos_size, sizeof(*nal->skipped_bytes_pos));
if (!nal->skipped_bytes_pos)
return AVERROR(ENOMEM);
pkt->nals_allocated = new_size;
}
nal = &pkt->nals[pkt->nb_nals];
consumed = ff_h2645_extract_rbsp(bc.buffer, extract_length, &pkt->rbsp, nal);
if (consumed < 0)
return consumed;
pkt->nb_nals++;
bytestream2_skip(&bc, consumed);
/* see commit 3566042a0 */
if (bytestream2_get_bytes_left(&bc) >= 4 &&
bytestream2_peek_be32(&bc) == 0x000001E0)
skip_trailing_zeros = 0;
nal->size_bits = get_bit_length(nal, skip_trailing_zeros);
ret = init_get_bits(&nal->gb, nal->data, nal->size_bits);
if (ret < 0)
return ret;
ret = h264_parse_nal_header(nal);
if (ret <= 0 || nal->size <= 0 || nal->size_bits <= 0) {
pkt->nb_nals--;
}
}
return 0;
}
#define MAX_SPS_COUNT 32
const uint8_t ff_zigzag_direct[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36,
29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46,
53, 60, 61, 54, 47, 55, 62, 63
};
const uint8_t ff_zigzag_scan[16+1] = {
0 + 0 * 4, 1 + 0 * 4, 0 + 1 * 4, 0 + 2 * 4,
1 + 1 * 4, 2 + 0 * 4, 3 + 0 * 4, 2 + 1 * 4,
1 + 2 * 4, 0 + 3 * 4, 1 + 3 * 4, 2 + 2 * 4,
3 + 1 * 4, 3 + 2 * 4, 2 + 3 * 4, 3 + 3 * 4,
};
static int decode_scaling_list(GetBitContext *gb, uint8_t *factors, int size) {
int i, last = 8, next = 8;
const uint8_t *scan = size == 16 ? ff_zigzag_scan : ff_zigzag_direct;
if (!get_bits1(gb)) {
} else {
for (i = 0; i < size; i++) {
if (next) {
int v = get_se_golomb(gb);
if (v < -128 || v > 127) {
return AVERROR_INVALIDDATA;
}
next = (last + v) & 0xff;
}
if (!i && !next) { /* matrix not written, we use the preset one */
break;
}
last = factors[scan[i]] = next ? next : last;
}
}
return 0;
}
static int decode_scaling_matrices(GetBitContext *gb, int chroma_format_idc, uint8_t(*scaling_matrix4)[16], uint8_t(*scaling_matrix8)[64]) {
int ret = 0;
if (get_bits1(gb)) {
ret |= decode_scaling_list(gb, scaling_matrix4[0], 16); // Intra, Y
ret |= decode_scaling_list(gb, scaling_matrix4[1], 16); // Intra, Cr
ret |= decode_scaling_list(gb, scaling_matrix4[2], 16); // Intra, Cb
ret |= decode_scaling_list(gb, scaling_matrix4[3], 16); // Inter, Y
ret |= decode_scaling_list(gb, scaling_matrix4[4], 16); // Inter, Cr
ret |= decode_scaling_list(gb, scaling_matrix4[5], 16); // Inter, Cb
ret |= decode_scaling_list(gb, scaling_matrix8[0], 64); // Intra, Y
ret |= decode_scaling_list(gb, scaling_matrix8[3], 64); // Inter, Y
if (chroma_format_idc == 3) {
ret |= decode_scaling_list(gb, scaling_matrix8[1], 64); // Intra, Cr
ret |= decode_scaling_list(gb, scaling_matrix8[4], 64); // Inter, Cr
ret |= decode_scaling_list(gb, scaling_matrix8[2], 64); // Intra, Cb
ret |= decode_scaling_list(gb, scaling_matrix8[5], 64); // Inter, Cb
}
if (!ret)
ret = 1;
}
return ret;
}
int ff_h264_decode_seq_parameter_set(GetBitContext *gb, int &width, int &height) {
int profile_idc, level_idc, constraint_set_flags = 0;
unsigned int sps_id;
int i, log2_max_frame_num_minus4;
int ret;
profile_idc = get_bits(gb, 8);
constraint_set_flags |= get_bits1(gb) << 0;
constraint_set_flags |= get_bits1(gb) << 1;
constraint_set_flags |= get_bits1(gb) << 2;
constraint_set_flags |= get_bits1(gb) << 3;
constraint_set_flags |= get_bits1(gb) << 4;
constraint_set_flags |= get_bits1(gb) << 5;
skip_bits(gb, 2);
level_idc = get_bits(gb, 8);
sps_id = get_ue_golomb_31(gb);
if (sps_id >= MAX_SPS_COUNT) {
return false;
}
if (profile_idc == 100 || // High profile
profile_idc == 110 || // High10 profile
profile_idc == 122 || // High422 profile
profile_idc == 244 || // High444 Predictive profile
profile_idc == 44 || // Cavlc444 profile
profile_idc == 83 || // Scalable Constrained High profile (SVC)
profile_idc == 86 || // Scalable High Intra profile (SVC)
profile_idc == 118 || // Stereo High profile (MVC)
profile_idc == 128 || // Multiview High profile (MVC)
profile_idc == 138 || // Multiview Depth High profile (MVCD)
profile_idc == 144) { // old High444 profile
int chroma_format_idc = get_ue_golomb_31(gb);
if (chroma_format_idc > 3U) {
return false;
} else if (chroma_format_idc == 3) {
int residual_color_transform_flag = get_bits1(gb);
if (residual_color_transform_flag) {
return false;
}
}
int bit_depth_luma = get_ue_golomb(gb) + 8;
int bit_depth_chroma = get_ue_golomb(gb) + 8;
if (bit_depth_chroma != bit_depth_luma) {
return false;
}
if (bit_depth_luma < 8 || bit_depth_luma > 14 || bit_depth_chroma < 8 || bit_depth_chroma > 14) {
return false;
}
get_bits1(gb);
uint8_t scaling_matrix4[6][16];
uint8_t scaling_matrix8[6][64];
ret = decode_scaling_matrices(gb, chroma_format_idc, scaling_matrix4, scaling_matrix8);
if (ret < 0)
return false;
}
get_ue_golomb(gb);
int poc_type = get_ue_golomb_31(gb);
if (poc_type == 0) {
unsigned t = get_ue_golomb(gb);
if (t > 12) {
return false;
}
} else if (poc_type == 1) {
get_bits1(gb);
int offset_for_non_ref_pic = get_se_golomb_long(gb);
int offset_for_top_to_bottom_field = get_se_golomb_long(gb);
if (offset_for_non_ref_pic == INT32_MIN || offset_for_top_to_bottom_field == INT32_MIN) {
return false;
}
int poc_cycle_length = get_ue_golomb(gb);
if ((unsigned) poc_cycle_length >= 256) {
return false;
}
for (i = 0; i < poc_cycle_length; i++) {
int offset_for_ref_frame = get_se_golomb_long(gb);
if (offset_for_ref_frame == INT32_MIN) {
return false;
}
}
} else if (poc_type != 2) {
return false;
}
get_ue_golomb_31(gb);
get_bits1(gb);
int mb_width = get_ue_golomb(gb) + 1;
int mb_height = get_ue_golomb(gb) + 1;
if (width == 0 || height == 0) {
width = mb_width;
height = mb_height;
}
return mb_width != width || mb_height != height;
}
int decode_packet(VideoInfo *info, int *got_frame) { int decode_packet(VideoInfo *info, int *got_frame) {
int ret = 0; int ret = 0;
int decoded = info->pkt.size; int decoded = info->pkt.size;
*got_frame = 0; *got_frame = 0;
if (info->pkt.stream_index == info->video_stream_idx) { if (info->pkt.stream_index == info->video_stream_idx) {
ret = avcodec_decode_video2(info->video_dec_ctx, info->frame, got_frame, &info->pkt); if (info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264 && decoded > 0) {
if (ret != 0) { ff_h2645_packet_split(&info->h2645Packet, info->pkt.data, info->pkt.size, 1, 4);
return ret; for (int i = 0; i < info->h2645Packet.nb_nals; i++) {
H2645NAL *nal = &info->h2645Packet.nals[i];
switch (nal->type) {
case 7: {
GetBitContext tmp_gb = nal->gb;
info->dropFrames = ff_h264_decode_seq_parameter_set(&tmp_gb, info->firstWidth, info->firstHeight);
}
}
}
}
if (!info->dropFrames) {
ret = avcodec_decode_video2(info->video_dec_ctx, info->frame, got_frame, &info->pkt);
if (ret != 0) {
return ret;
}
} }
} }
@ -585,7 +1120,7 @@ extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDr
if (got_frame) { if (got_frame) {
info->has_decoded_frames = true; info->has_decoded_frames = true;
bool finished = false; bool finished = false;
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
int64_t pkt_pts = info->frame->best_effort_timestamp; int64_t pkt_pts = info->frame->best_effort_timestamp;
if (pkt_pts >= pts) { if (pkt_pts >= pts) {
finished = true; finished = true;
@ -620,9 +1155,9 @@ static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray da
wantedHeight = bitmapHeight; wantedHeight = bitmapHeight;
} }
void *pixels; if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) {
if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) { void *pixels;
if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) { if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) {
if (info->sws_ctx == nullptr) { if (info->sws_ctx == nullptr) {
if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB) { if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB) {
info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
@ -631,7 +1166,9 @@ static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray da
} }
} }
if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) { if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) {
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->format == AV_PIX_FMT_YUV444P) {
libyuv::H444ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight);
} else if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) {
if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) { if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) {
libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight); libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight);
} else { } else {
@ -706,7 +1243,7 @@ extern "C" JNIEXPORT int JNICALL Java_org_telegram_ui_Components_AnimatedFileDra
} }
if (got_frame) { if (got_frame) {
bool finished = false; bool finished = false;
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
int64_t pkt_pts = info->frame->best_effort_timestamp; int64_t pkt_pts = info->frame->best_effort_timestamp;
bool isLastPacket = false; bool isLastPacket = false;
if (info->pkt.size == 0) { if (info->pkt.size == 0) {
@ -800,7 +1337,7 @@ extern "C" JNIEXPORT jint JNICALL Java_org_telegram_ui_Components_AnimatedFileDr
} }
if (got_frame) { if (got_frame) {
//LOGD("decoded frame with w = %d, h = %d, format = %d", info->frame->width, info->frame->height, info->frame->format); //LOGD("decoded frame with w = %d, h = %d, format = %d", info->frame->width, info->frame->height, info->frame->format);
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P || info->frame->format == AV_PIX_FMT_YUV444P) {
writeFrameToBitmap(env, info, data, bitmap, stride); writeFrameToBitmap(env, info, data, bitmap, stride);
} }
info->has_decoded_frames = true; info->has_decoded_frames = true;
@ -815,6 +1352,7 @@ extern "C" JNIEXPORT jint JNICALL Java_org_telegram_ui_Components_AnimatedFileDr
} }
extern "C" jint videoOnJNILoad(JavaVM *vm, JNIEnv *env) { extern "C" jint videoOnJNILoad(JavaVM *vm, JNIEnv *env) {
//av_log_set_callback(custom_log);
jclass_AnimatedFileDrawableStream = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/AnimatedFileDrawableStream")); jclass_AnimatedFileDrawableStream = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/AnimatedFileDrawableStream"));
if (jclass_AnimatedFileDrawableStream == 0) { if (jclass_AnimatedFileDrawableStream == 0) {
return JNI_FALSE; return JNI_FALSE;

View file

@ -1,14 +1,17 @@
#include <jni.h> #include <jni.h>
#include <stdio.h> #include <cstdio>
#include <setjmp.h> #include <csetjmp>
#include <stdlib.h> #include <cstdlib>
#include <string.h> #include <cstring>
#include <math.h> #include <cmath>
#include <unistd.h> #include <unistd.h>
#include <android/bitmap.h> #include <android/bitmap.h>
#include <string> #include <string>
#include <mozjpeg/java/org_libjpegturbo_turbojpeg_TJ.h> #include <mozjpeg/java/org_libjpegturbo_turbojpeg_TJ.h>
#include <mozjpeg/jpeglib.h> #include <mozjpeg/jpeglib.h>
#include <tgnet/FileLog.h>
#include <vector>
#include <algorithm>
#include "libwebp/webp/decode.h" #include "libwebp/webp/decode.h"
#include "libwebp/webp/encode.h" #include "libwebp/webp/encode.h"
#include "mozjpeg/turbojpeg.h" #include "mozjpeg/turbojpeg.h"
@ -150,7 +153,7 @@ static void fastBlurMore(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int
} }
static void fastBlur(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int32_t radius) { static void fastBlur(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int32_t radius) {
if (pix == NULL) { if (pix == nullptr) {
return; return;
} }
const int32_t r1 = radius + 1; const int32_t r1 = radius + 1;
@ -173,7 +176,7 @@ static void fastBlur(int32_t w, int32_t h, int32_t stride, uint8_t *pix, int32_t
} }
uint64_t *rgb = new uint64_t[w * h]; uint64_t *rgb = new uint64_t[w * h];
if (rgb == NULL) { if (rgb == nullptr) {
return; return;
} }
@ -450,11 +453,11 @@ JNIEXPORT int Java_org_telegram_messenger_Utilities_needInvert(JNIEnv *env, jcla
return 0; return 0;
} }
void *pixels = 0; void *pixels = nullptr;
if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) { if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) {
return 0; return 0;
} }
if (pixels == NULL) { if (pixels == nullptr) {
return 0; return 0;
} }
uint8_t *pix = (uint8_t *) pixels; uint8_t *pix = (uint8_t *) pixels;
@ -516,7 +519,7 @@ JNIEXPORT void Java_org_telegram_messenger_Utilities_blurBitmap(JNIEnv *env, jcl
return; return;
} }
void *pixels = 0; void *pixels = nullptr;
if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) { if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) {
return; return;
} }
@ -642,7 +645,7 @@ JNIEXPORT jint Java_org_telegram_messenger_Utilities_pinBitmap(JNIEnv *env, jcla
} }
JNIEXPORT void Java_org_telegram_messenger_Utilities_unpinBitmap(JNIEnv *env, jclass clazz, jobject bitmap) { JNIEXPORT void Java_org_telegram_messenger_Utilities_unpinBitmap(JNIEnv *env, jclass clazz, jobject bitmap) {
if (bitmap == NULL) { if (bitmap == nullptr) {
return; return;
} }
AndroidBitmap_unlockPixels(env, bitmap); AndroidBitmap_unlockPixels(env, bitmap);
@ -680,7 +683,7 @@ JNIEXPORT jboolean Java_org_telegram_messenger_Utilities_loadWebpImage(JNIEnv *e
return 0; return 0;
} }
void *bitmapPixels = 0; void *bitmapPixels = nullptr;
if (AndroidBitmap_lockPixels(env, outputBitmap, &bitmapPixels) != ANDROID_BITMAP_RESUT_SUCCESS) { if (AndroidBitmap_lockPixels(env, outputBitmap, &bitmapPixels) != ANDROID_BITMAP_RESUT_SUCCESS) {
env->ThrowNew(jclass_RuntimeException, "Failed to lock Bitmap pixels"); env->ThrowNew(jclass_RuntimeException, "Failed to lock Bitmap pixels");
return 0; return 0;
@ -723,7 +726,7 @@ JNIEXPORT void Java_org_telegram_messenger_Utilities_stackBlurBitmap(JNIEnv *env
int h = info.height; int h = info.height;
int stride = info.stride; int stride = info.stride;
unsigned char *pixels = 0; unsigned char *pixels = nullptr;
AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels); AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels);
if (!pixels) { if (!pixels) {
return; return;
@ -1166,4 +1169,129 @@ JNIEXPORT jint Java_org_telegram_messenger_Utilities_saveProgressiveJpeg(JNIEnv
return outSize;*/ return outSize;*/
} }
std::vector<std::pair<float, float>> gatherPositions(std::vector<std::pair<float, float>> list, int phase) {
std::vector<std::pair<float, float>> result(4);
for (int i = 0; i < 4; i++) {
int pos = phase + i * 2;
while (pos >= 8) {
pos -= 8;
}
result[i] = list[pos];
result[i].second = 1.0f - result[i].second;
}
return result;
}
static float *pixelCache = nullptr;
JNIEXPORT void Java_org_telegram_messenger_Utilities_generateGradient(JNIEnv *env, jclass clazz, jobject bitmap, jboolean unpin, jint phase, jfloat progress, jint width, jint height, jint stride, jintArray colors) {
if (!bitmap) {
return;
}
if (!width || !height) {
return;
}
uint8_t *pixels = nullptr;
if (AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels) < 0) {
return;
}
std::vector<std::pair<float, float>> positions{
{0.80f, 0.10f},
{0.60f, 0.20f},
{0.35f, 0.25f},
{0.25f, 0.60f},
{0.20f, 0.90f},
{0.40f, 0.80f},
{0.65f, 0.75f},
{0.75f, 0.40f}
};
int32_t previousPhase = phase + 1;
if (previousPhase > 7) {
previousPhase = 0;
}
std::vector<std::pair<float, float>> previous = gatherPositions(positions, previousPhase);
std::vector<std::pair<float, float>> current = gatherPositions(positions, phase);
auto colorsArray = (uint8_t *) env->GetIntArrayElements(colors, nullptr);
/*float *newPixelCache = nullptr;
if (pixelCache == nullptr) {
newPixelCache = new float[width * height * 2];
}*/
float directPixelY;
float centerDistanceY;
float centerDistanceY2;
int32_t colorsCount = colorsArray[12] == 0 ? 3 : 4;
for (int y = 0; y < height; y++) {
//if (pixelCache == nullptr) {
directPixelY = (float) y / (float) height;
centerDistanceY = directPixelY - 0.5f;
centerDistanceY2 = centerDistanceY * centerDistanceY;
//}
uint32_t offset = y * stride;
for (int x = 0; x < width; x++) {
float pixelX;
float pixelY;
/*if (pixelCache != nullptr) {
pixelX = pixelCache[(y * width + x) * 2];
pixelX = pixelCache[(y * width + x) * 2 + 1];
} else {*/
float directPixelX = (float) x / (float) width;
float centerDistanceX = directPixelX - 0.5f;
float centerDistance = sqrtf(centerDistanceX * centerDistanceX + centerDistanceY2);
float swirlFactor = 0.35f * centerDistance;
float theta = swirlFactor * swirlFactor * 0.8f * 8.0f;
float sinTheta = sinf(theta);
float cosTheta = cosf(theta);
pixelX = /*newPixelCache[(y * width + x) * 2] =*/ std::max(0.0f, std::min(1.0f, 0.5f + centerDistanceX * cosTheta - centerDistanceY * sinTheta));
pixelY = /*newPixelCache[(y * width + x) * 2 + 1] =*/ std::max(0.0f, std::min(1.0f, 0.5f + centerDistanceX * sinTheta + centerDistanceY * cosTheta));
//}
float distanceSum = 0.0f;
float r = 0.0f;
float g = 0.0f;
float b = 0.0f;
for (int i = 0; i < colorsCount; i++) {
float colorX = previous[i].first + (current[i].first - previous[i].first) * progress;
float colorY = previous[i].second + (current[i].second - previous[i].second) * progress;
float distanceX = pixelX - colorX;
float distanceY = pixelY - colorY;
float distance = std::max(0.0f, 0.9f - sqrtf(distanceX * distanceX + distanceY * distanceY));
distance = distance * distance * distance * distance;
distanceSum += distance;
r = r + distance * ((float) colorsArray[i * 4] / 255.0f);
g = g + distance * ((float) colorsArray[i * 4 + 1] / 255.0f);
b = b + distance * ((float) colorsArray[i * 4 + 2] / 255.0f);
}
pixels[offset + x * 4] = (uint8_t) (b / distanceSum * 255.0f);
pixels[offset + x * 4 + 1] = (uint8_t) (g / distanceSum * 255.0f);
pixels[offset + x * 4 + 2] = (uint8_t) (r / distanceSum * 255.0f);
pixels[offset + x * 4 + 3] = 0xff;
}
}
/*if (newPixelCache != nullptr) {
delete [] pixelCache;
pixelCache = newPixelCache;
}*/
env->ReleaseIntArrayElements(colors, (jint *) colorsArray, JNI_ABORT);
if (unpin) {
AndroidBitmap_unlockPixels(env, bitmap);
}
}
} }

View file

@ -45,19 +45,19 @@ typedef struct LottieInfo {
char *compressBuffer = nullptr; char *compressBuffer = nullptr;
const char *buffer = nullptr; const char *buffer = nullptr;
bool firstFrame = false; bool firstFrame = false;
int bufferSize; int bufferSize = 0;
int compressBound; int compressBound = 0;
int firstFrameSize; int firstFrameSize = 0;
volatile uint32_t framesAvailableInCache = 0; volatile uint32_t framesAvailableInCache = 0;
}; };
JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *env, jclass clazz, jstring src, jint w, jint h, jintArray data, jboolean precache, jintArray colorReplacement, jboolean limitFps) { JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *env, jclass clazz, jstring src, jstring json, jint w, jint h, jintArray data, jboolean precache, jintArray colorReplacement, jboolean limitFps) {
LottieInfo *info = new LottieInfo(); auto info = new LottieInfo();
std::map<int32_t, int32_t> *colors = nullptr; std::map<int32_t, int32_t> *colors = nullptr;
int color = 0; int color = 0;
if (colorReplacement != nullptr) { if (colorReplacement != nullptr) {
jint *arr = env->GetIntArrayElements(colorReplacement, 0); jint *arr = env->GetIntArrayElements(colorReplacement, nullptr);
if (arr != nullptr) { if (arr != nullptr) {
jsize len = env->GetArrayLength(colorReplacement); jsize len = env->GetArrayLength(colorReplacement);
colors = new std::map<int32_t, int32_t>(); colors = new std::map<int32_t, int32_t>();
@ -71,10 +71,18 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e
} }
} }
char const *srcString = env->GetStringUTFChars(src, 0); char const *srcString = env->GetStringUTFChars(src, nullptr);
info->path = srcString; info->path = srcString;
info->animation = rlottie::Animation::loadFromFile(info->path, colors); if (json != nullptr) {
if (srcString != 0) { char const *jsonString = env->GetStringUTFChars(json, nullptr);
if (jsonString) {
info->animation = rlottie::Animation::loadFromData(jsonString, info->path, colors);
env->ReleaseStringUTFChars(json, jsonString);
}
} else {
info->animation = rlottie::Animation::loadFromFile(info->path, colors);
}
if (srcString) {
env->ReleaseStringUTFChars(src, srcString); env->ReleaseStringUTFChars(src, srcString);
} }
if (info->animation == nullptr) { if (info->animation == nullptr) {
@ -91,7 +99,7 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e
info->precache = precache; info->precache = precache;
if (info->precache) { if (info->precache) {
info->cacheFile = info->path; info->cacheFile = info->path;
std::string::size_type index = info->cacheFile.find_last_of("/"); std::string::size_type index = info->cacheFile.find_last_of('/');
if (index != std::string::npos) { if (index != std::string::npos) {
std::string dir = info->cacheFile.substr(0, index) + "/acache"; std::string dir = info->cacheFile.substr(0, index) + "/acache";
mkdir(dir.c_str(), 0777); mkdir(dir.c_str(), 0777);
@ -119,13 +127,13 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e
info->maxFrameSize = maxFrameSize; info->maxFrameSize = maxFrameSize;
fread(&(info->imageSize), sizeof(uint32_t), 1, precacheFile); fread(&(info->imageSize), sizeof(uint32_t), 1, precacheFile);
info->fileOffset = 9; info->fileOffset = 9;
utimensat(0, info->cacheFile.c_str(), NULL, 0); utimensat(0, info->cacheFile.c_str(), nullptr, 0);
} }
fclose(precacheFile); fclose(precacheFile);
} }
} }
jint *dataArr = env->GetIntArrayElements(data, 0); jint *dataArr = env->GetIntArrayElements(data, nullptr);
if (dataArr != nullptr) { if (dataArr != nullptr) {
dataArr[0] = (jint) info->frameCount; dataArr[0] = (jint) info->frameCount;
dataArr[1] = (jint) info->animation->frameRate(); dataArr[1] = (jint) info->animation->frameRate();
@ -138,7 +146,7 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_create(JNIEnv *e
JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(JNIEnv *env, jclass clazz, jstring json, jstring name, jintArray data, jintArray colorReplacement) { JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(JNIEnv *env, jclass clazz, jstring json, jstring name, jintArray data, jintArray colorReplacement) {
std::map<int32_t, int32_t> *colors = nullptr; std::map<int32_t, int32_t> *colors = nullptr;
if (colorReplacement != nullptr) { if (colorReplacement != nullptr) {
jint *arr = env->GetIntArrayElements(colorReplacement, 0); jint *arr = env->GetIntArrayElements(colorReplacement, nullptr);
if (arr != nullptr) { if (arr != nullptr) {
jsize len = env->GetArrayLength(colorReplacement); jsize len = env->GetArrayLength(colorReplacement);
colors = new std::map<int32_t, int32_t>(); colors = new std::map<int32_t, int32_t>();
@ -149,15 +157,15 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(J
} }
} }
LottieInfo *info = new LottieInfo(); auto info = new LottieInfo();
char const *jsonString = env->GetStringUTFChars(json, 0); char const *jsonString = env->GetStringUTFChars(json, nullptr);
char const *nameString = env->GetStringUTFChars(name, 0); char const *nameString = env->GetStringUTFChars(name, nullptr);
info->animation = rlottie::Animation::loadFromData(jsonString, nameString, colors); info->animation = rlottie::Animation::loadFromData(jsonString, nameString, colors);
if (jsonString != 0) { if (jsonString) {
env->ReleaseStringUTFChars(json, jsonString); env->ReleaseStringUTFChars(json, jsonString);
} }
if (nameString != 0) { if (nameString) {
env->ReleaseStringUTFChars(name, nameString); env->ReleaseStringUTFChars(name, nameString);
} }
if (info->animation == nullptr) { if (info->animation == nullptr) {
@ -167,7 +175,7 @@ JNIEXPORT jlong Java_org_telegram_ui_Components_RLottieDrawable_createWithJson(J
info->frameCount = info->animation->totalFrame(); info->frameCount = info->animation->totalFrame();
info->fps = (int) info->animation->frameRate(); info->fps = (int) info->animation->frameRate();
jint *dataArr = env->GetIntArrayElements(data, 0); jint *dataArr = env->GetIntArrayElements(data, nullptr);
if (dataArr != nullptr) { if (dataArr != nullptr) {
dataArr[0] = (int) info->frameCount; dataArr[0] = (int) info->frameCount;
dataArr[1] = (int) info->animation->frameRate(); dataArr[1] = (int) info->animation->frameRate();
@ -181,7 +189,7 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_destroy(JNIEnv *e
if (!ptr) { if (!ptr) {
return; return;
} }
LottieInfo *info = (LottieInfo *) (intptr_t) ptr; auto info = (LottieInfo *) (intptr_t) ptr;
delete info; delete info;
} }
@ -189,10 +197,10 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_setLayerColor(JNI
if (!ptr || layer == nullptr) { if (!ptr || layer == nullptr) {
return; return;
} }
LottieInfo *info = (LottieInfo *) (intptr_t) ptr; auto info = (LottieInfo *) (intptr_t) ptr;
char const *layerString = env->GetStringUTFChars(layer, 0); char const *layerString = env->GetStringUTFChars(layer, nullptr);
info->animation->setValue<Property::Color>(layerString, Color(((color) & 0xff) / 255.0f, ((color >> 8) & 0xff) / 255.0f, ((color >> 16) & 0xff) / 255.0f)); info->animation->setValue<Property::Color>(layerString, Color(((color) & 0xff) / 255.0f, ((color >> 8) & 0xff) / 255.0f, ((color >> 16) & 0xff) / 255.0f));
if (layerString != 0) { if (layerString) {
env->ReleaseStringUTFChars(layer, layerString); env->ReleaseStringUTFChars(layer, layerString);
} }
} }
@ -201,9 +209,9 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_replaceColors(JNI
if (!ptr || colorReplacement == nullptr) { if (!ptr || colorReplacement == nullptr) {
return; return;
} }
LottieInfo *info = (LottieInfo *) (intptr_t) ptr; auto info = (LottieInfo *) (intptr_t) ptr;
jint *arr = env->GetIntArrayElements(colorReplacement, 0); jint *arr = env->GetIntArrayElements(colorReplacement, nullptr);
if (arr != nullptr) { if (arr != nullptr) {
jsize len = env->GetArrayLength(colorReplacement); jsize len = env->GetArrayLength(colorReplacement);
for (int32_t a = 0; a < len / 2; a++) { for (int32_t a = 0; a < len / 2; a++) {
@ -240,7 +248,7 @@ void CacheWriteThreadProc() {
lk.unlock(); lk.unlock();
if (task != nullptr) { if (task != nullptr) {
uint32_t size = (uint32_t) LZ4_compress_default(task->buffer, task->compressBuffer, task->bufferSize, task->compressBound); auto size = (uint32_t) LZ4_compress_default(task->buffer, task->compressBuffer, task->bufferSize, task->compressBound);
if (task->firstFrame) { if (task->firstFrame) {
task->firstFrameSize = size; task->firstFrameSize = size;
task->fileOffset = 9 + sizeof(uint32_t) + task->firstFrameSize; task->fileOffset = 9 + sizeof(uint32_t) + task->firstFrameSize;
@ -262,7 +270,7 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_createCache(JNIEn
if (ptr == NULL) { if (ptr == NULL) {
return; return;
} }
LottieInfo *info = (LottieInfo *) (intptr_t) ptr; auto info = (LottieInfo *) (intptr_t) ptr;
FILE *cacheFile = fopen(info->cacheFile.c_str(), "r+"); FILE *cacheFile = fopen(info->cacheFile.c_str(), "r+");
if (cacheFile != nullptr) { if (cacheFile != nullptr) {
@ -288,8 +296,8 @@ JNIEXPORT void Java_org_telegram_ui_Components_RLottieDrawable_createCache(JNIEn
info->imageSize = (uint32_t) w * h * 4; info->imageSize = (uint32_t) w * h * 4;
info->compressBound = LZ4_compressBound(info->bufferSize); info->compressBound = LZ4_compressBound(info->bufferSize);
info->compressBuffer = new char[info->compressBound]; info->compressBuffer = new char[info->compressBound];
uint8_t *firstBuffer = new uint8_t[info->bufferSize]; auto firstBuffer = new uint8_t[info->bufferSize];
uint8_t *secondBuffer = new uint8_t[info->bufferSize]; auto secondBuffer = new uint8_t[info->bufferSize];
//long time = ConnectionsManager::getInstance(0).getCurrentTimeMonotonicMillis(); //long time = ConnectionsManager::getInstance(0).getCurrentTimeMonotonicMillis();
Surface surface1((uint32_t *) firstBuffer, (size_t) w, (size_t) h, (size_t) w * 4); Surface surface1((uint32_t *) firstBuffer, (size_t) w, (size_t) h, (size_t) w * 4);
@ -337,7 +345,7 @@ JNIEXPORT jint Java_org_telegram_ui_Components_RLottieDrawable_getFrame(JNIEnv *
if (!ptr || bitmap == nullptr) { if (!ptr || bitmap == nullptr) {
return 0; return 0;
} }
LottieInfo *info = (LottieInfo *) (intptr_t) ptr; auto info = (LottieInfo *) (intptr_t) ptr;
int framesPerUpdate = !info->limitFps || info->fps < 60 ? 1 : 2; int framesPerUpdate = !info->limitFps || info->fps < 60 ? 1 : 2;
int framesAvailableInCache = info->framesAvailableInCache; int framesAvailableInCache = info->framesAvailableInCache;

View file

@ -60,7 +60,7 @@ private:
std::shared_ptr<LOTModel> mModel; std::shared_ptr<LOTModel> mModel;
std::unique_ptr<LOTCompItem> mCompItem; std::unique_ptr<LOTCompItem> mCompItem;
SharedRenderTask mTask; SharedRenderTask mTask;
std::atomic<bool> mRenderInProgress; std::atomic<bool> mRenderInProgress{false};
}; };
void AnimationImpl::setValue(const std::string &keypath, LOTVariant &&value) void AnimationImpl::setValue(const std::string &keypath, LOTVariant &&value)
@ -141,9 +141,7 @@ std::unique_ptr<Animation> Animation::loadFromData(
animation->d->init(loader.model()); animation->d->init(loader.model());
return animation; return animation;
} }
if (colorReplacement != nullptr) { delete colorReplacement;
delete colorReplacement;
}
return nullptr; return nullptr;
} }
@ -161,9 +159,7 @@ std::unique_ptr<Animation> Animation::loadFromFile(const std::string &path, std:
animation->d->init(loader.model()); animation->d->init(loader.model());
return animation; return animation;
} }
if (colorReplacement != nullptr) { delete colorReplacement;
delete colorReplacement;
}
return nullptr; return nullptr;
} }

View file

@ -679,63 +679,69 @@ static void
gray_render_conic( RAS_ARG_ const SW_FT_Vector* control, gray_render_conic( RAS_ARG_ const SW_FT_Vector* control,
const SW_FT_Vector* to ) const SW_FT_Vector* to )
{ {
SW_FT_Vector bez_stack[16 * 2 + 1]; /* enough to accommodate bisections */ TPos dx, dy;
SW_FT_Vector* arc = bez_stack; TPos min, max, y;
TPos dx, dy; int top, level;
int draw, split; int* levels;
SW_FT_Vector* arc;
levels = ras.lev_stack;
arc[0].x = UPSCALE( to->x ); arc = ras.bez_stack;
arc[0].y = UPSCALE( to->y ); arc[0].x = UPSCALE(to->x);
arc[1].x = UPSCALE( control->x ); arc[0].y = UPSCALE(to->y);
arc[1].y = UPSCALE( control->y ); arc[1].x = UPSCALE(control->x);
arc[1].y = UPSCALE(control->y);
arc[2].x = ras.x; arc[2].x = ras.x;
arc[2].y = ras.y; arc[2].y = ras.y;
top = 0;
dx = SW_FT_ABS(arc[2].x + arc[0].x - 2 * arc[1].x);
dy = SW_FT_ABS(arc[2].y + arc[0].y - 2 * arc[1].y);
if (dx < dy) dx = dy;
if (dx < ONE_PIXEL / 4) goto Draw;
/* short-cut the arc that crosses the current band */ /* short-cut the arc that crosses the current band */
if ( ( TRUNC( arc[0].y ) >= ras.max_ey && min = max = arc[0].y;
TRUNC( arc[1].y ) >= ras.max_ey &&
TRUNC( arc[2].y ) >= ras.max_ey ) ||
( TRUNC( arc[0].y ) < ras.min_ey &&
TRUNC( arc[1].y ) < ras.min_ey &&
TRUNC( arc[2].y ) < ras.min_ey ) )
{
ras.x = arc[0].x;
ras.y = arc[0].y;
return;
}
dx = SW_FT_ABS( arc[2].x + arc[0].x - 2 * arc[1].x ); y = arc[1].y;
dy = SW_FT_ABS( arc[2].y + arc[0].y - 2 * arc[1].y ); if (y < min) min = y;
if ( dx < dy ) if (y > max) max = y;
dx = dy;
/* We can calculate the number of necessary bisections because */ y = arc[2].y;
/* each bisection predictably reduces deviation exactly 4-fold. */ if (y < min) min = y;
/* Even 32-bit deviation would vanish after 16 bisections. */ if (y > max) max = y;
draw = 1;
while ( dx > ONE_PIXEL / 4 )
{
dx >>= 2;
draw <<= 1;
}
/* We use decrement counter to count the total number of segments */ if (TRUNC(min) >= ras.max_ey || TRUNC(max) < ras.min_ey) goto Draw;
/* to draw starting from 2^level. Before each draw we split as */
/* many times as there are trailing zeros in the counter. */ level = 0;
do do {
{ dx >>= 2;
split = draw & ( -draw ); /* isolate the rightmost 1-bit */ level++;
while ( ( split >>= 1 ) ) } while (dx > ONE_PIXEL / 4);
{
gray_split_conic( arc ); levels[0] = level;
do {
level = levels[top];
if (level > 0) {
gray_split_conic(arc);
arc += 2; arc += 2;
top++;
if (top + 1 > 32) return;
levels[top] = levels[top - 1] = level - 1;
continue;
} }
gray_render_line( RAS_VAR_ arc[0].x, arc[0].y ); Draw:
gray_render_line(RAS_VAR_ arc[0].x, arc[0].y);
top--;
arc -= 2; arc -= 2;
} while ( --draw ); } while (top >= 0);
} }
static void static void
@ -809,7 +815,7 @@ gray_render_cubic( RAS_ARG_ const SW_FT_Vector* control1,
/* with each split, control points quickly converge towards */ /* with each split, control points quickly converge towards */
/* chord trisection points and the vanishing distances below */ /* chord trisection points and the vanishing distances below */
/* indicate when the segment is flat enough to draw */ /* indicate when the segment is flat enough to draw */
if (num < 0 || num >= count) { if (num < 0 || num + 7 >= count) {
return; return;
} }
if ( SW_FT_ABS( 2 * arc[0].x - 3 * arc[1].x + arc[3].x ) > ONE_PIXEL / 2 || if ( SW_FT_ABS( 2 * arc[0].x - 3 * arc[1].x + arc[3].x ) > ONE_PIXEL / 2 ||

View file

@ -1172,7 +1172,7 @@ UserProfilePhoto *UserProfilePhoto::TLdeserialize(NativeByteBuffer *stream, uint
case 0x4f11bae1: case 0x4f11bae1:
result = new TL_userProfilePhotoEmpty(); result = new TL_userProfilePhotoEmpty();
break; break;
case 0xcc656077: case 0x82d1f706:
result = new TL_userProfilePhoto(); result = new TL_userProfilePhoto();
break; break;
default: default:
@ -1192,8 +1192,6 @@ void TL_userProfilePhoto::readParams(NativeByteBuffer *stream, int32_t instanceN
flags = stream->readInt32(&error); flags = stream->readInt32(&error);
has_video = (flags & 1) != 0; has_video = (flags & 1) != 0;
photo_id = stream->readInt64(&error); photo_id = stream->readInt64(&error);
photo_small = std::unique_ptr<FileLocation>(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error));
photo_big = std::unique_ptr<FileLocation>(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error));
if ((flags & 2) != 0) { if ((flags & 2) != 0) {
stripped_thumb = std::unique_ptr<ByteArray>(stream->readByteArray(&error)); stripped_thumb = std::unique_ptr<ByteArray>(stream->readByteArray(&error));
} }
@ -1205,8 +1203,6 @@ void TL_userProfilePhoto::serializeToStream(NativeByteBuffer *stream) {
flags = has_video ? (flags | 1) : (flags &~ 1); flags = has_video ? (flags | 1) : (flags &~ 1);
stream->writeInt32(flags); stream->writeInt32(flags);
stream->writeInt64(photo_id); stream->writeInt64(photo_id);
photo_small->serializeToStream(stream);
photo_big->serializeToStream(stream);
if ((flags & 2) != 0) { if ((flags & 2) != 0) {
stream->writeByteArray(stripped_thumb.get()); stream->writeByteArray(stripped_thumb.get());
} }

View file

@ -255,8 +255,6 @@ public:
int32_t flags; int32_t flags;
bool has_video; bool has_video;
int64_t photo_id; int64_t photo_id;
std::unique_ptr<FileLocation> photo_small;
std::unique_ptr<FileLocation> photo_big;
std::unique_ptr<ByteArray> stripped_thumb; std::unique_ptr<ByteArray> stripped_thumb;
int32_t dc_id; int32_t dc_id;
@ -274,7 +272,7 @@ public:
class TL_userProfilePhoto : public UserProfilePhoto { class TL_userProfilePhoto : public UserProfilePhoto {
public: public:
static const uint32_t constructor = 0xcc656077; static const uint32_t constructor = 0x82d1f706;
void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error); void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error);
void serializeToStream(NativeByteBuffer *stream); void serializeToStream(NativeByteBuffer *stream);

View file

@ -6,6 +6,7 @@
* Copyright Nikolai Kudashov, 2015-2018. * Copyright Nikolai Kudashov, 2015-2018.
*/ */
#include <cassert>
#include <unistd.h> #include <unistd.h>
#include <fcntl.h> #include <fcntl.h>
#include <cerrno> #include <cerrno>

View file

@ -6,7 +6,8 @@
* Copyright Nikolai Kudashov, 2015-2018. * Copyright Nikolai Kudashov, 2015-2018.
*/ */
#include <stdlib.h> #include <cassert>
#include <cstdlib>
#include <sys/eventfd.h> #include <sys/eventfd.h>
#include <unistd.h> #include <unistd.h>
#include <chrono> #include <chrono>
@ -16,7 +17,7 @@
#include <openssl/rand.h> #include <openssl/rand.h>
#include <zlib.h> #include <zlib.h>
#include <string> #include <string>
#include <inttypes.h> #include <cinttypes>
#include "ConnectionsManager.h" #include "ConnectionsManager.h"
#include "FileLog.h" #include "FileLog.h"
#include "EventObject.h" #include "EventObject.h"
@ -3453,14 +3454,6 @@ void ConnectionsManager::setIpStrategy(uint8_t value) {
}); });
} }
void ConnectionsManager::setMtProtoVersion(int version) {
mtProtoVersion = version;
}
int32_t ConnectionsManager::getMtProtoVersion() {
return mtProtoVersion;
}
int64_t ConnectionsManager::checkProxy(std::string address, uint16_t port, std::string username, std::string password, std::string secret, onRequestTimeFunc requestTimeFunc, jobject ptr1) { int64_t ConnectionsManager::checkProxy(std::string address, uint16_t port, std::string username, std::string password, std::string secret, onRequestTimeFunc requestTimeFunc, jobject ptr1) {
ProxyCheckInfo *proxyCheckInfo = new ProxyCheckInfo(); ProxyCheckInfo *proxyCheckInfo = new ProxyCheckInfo();
proxyCheckInfo->address = address; proxyCheckInfo->address = address;

View file

@ -71,8 +71,6 @@ public:
void updateDcSettings(uint32_t datacenterId, bool workaround); void updateDcSettings(uint32_t datacenterId, bool workaround);
void setPushConnectionEnabled(bool value); void setPushConnectionEnabled(bool value);
void applyDnsConfig(NativeByteBuffer *buffer, std::string phone, int32_t date); void applyDnsConfig(NativeByteBuffer *buffer, std::string phone, int32_t date);
void setMtProtoVersion(int version);
int32_t getMtProtoVersion();
int64_t checkProxy(std::string address, uint16_t port, std::string username, std::string password, std::string secret, onRequestTimeFunc requestTimeFunc, jobject ptr1); int64_t checkProxy(std::string address, uint16_t port, std::string username, std::string password, std::string secret, onRequestTimeFunc requestTimeFunc, jobject ptr1);
#ifdef ANDROID #ifdef ANDROID
@ -227,7 +225,6 @@ private:
int32_t currentUserId = 0; int32_t currentUserId = 0;
bool registeredForInternalPush = false; bool registeredForInternalPush = false;
bool pushConnectionEnabled = true; bool pushConnectionEnabled = true;
int32_t mtProtoVersion = 2;
std::map<uint32_t, std::vector<std::unique_ptr<NetworkMessage>>> genericMessagesToDatacenters; std::map<uint32_t, std::vector<std::unique_ptr<NetworkMessage>>> genericMessagesToDatacenters;
std::map<uint32_t, std::vector<std::unique_ptr<NetworkMessage>>> genericMediaMessagesToDatacenters; std::map<uint32_t, std::vector<std::unique_ptr<NetworkMessage>>> genericMediaMessagesToDatacenters;

View file

@ -1158,7 +1158,7 @@ NativeByteBuffer *Datacenter::createRequestsData(std::vector<std::unique_ptr<Net
if (pfsInit) { if (pfsInit) {
mtProtoVersion = 1; mtProtoVersion = 1;
} else { } else {
mtProtoVersion = ConnectionsManager::getInstance(instanceNum).getMtProtoVersion(); mtProtoVersion = 2;
} }
uint32_t messageSize = messageBody->getObjectSize(); uint32_t messageSize = messageBody->getObjectSize();
uint32_t additionalSize = (32 + messageSize) % 16; uint32_t additionalSize = (32 + messageSize) % 16;
@ -1235,43 +1235,29 @@ bool Datacenter::decryptServerResponse(int64_t keyId, uint8_t *key, uint8_t *dat
if (authKey == nullptr) { if (authKey == nullptr) {
return false; return false;
} }
bool error = false; bool error = authKeyId != keyId;
if (authKeyId != keyId) {
error = true;
}
thread_local static uint8_t messageKey[96]; thread_local static uint8_t messageKey[96];
int mtProtoVersion = ConnectionsManager::getInstance(instanceNum).getMtProtoVersion(); generateMessageKey(instanceNum, authKey->bytes, key, messageKey + 32, true, 2);
generateMessageKey(instanceNum, authKey->bytes, key, messageKey + 32, true, mtProtoVersion);
aesIgeEncryption(data, messageKey + 32, messageKey + 64, false, false, length); aesIgeEncryption(data, messageKey + 32, messageKey + 64, false, false, length);
uint32_t messageLength; uint32_t messageLength;
memcpy(&messageLength, data + 28, sizeof(uint32_t)); memcpy(&messageLength, data + 28, sizeof(uint32_t));
uint32_t paddingLength = (int32_t) length - (messageLength + 32); uint32_t paddingLength = length - (messageLength + 32);
if (messageLength > length - 32) {
error = true; error |= (messageLength > length - 32);
} else if (paddingLength < 12 || paddingLength > 1024) { error |= (paddingLength < 12);
error = true; error |= (paddingLength > 1024);
}
messageLength += 32; SHA256_Init(&sha256Ctx);
if (messageLength > length) { SHA256_Update(&sha256Ctx, authKey->bytes + 88 + 8, 32);
messageLength = length; SHA256_Update(&sha256Ctx, data, length);
SHA256_Final(messageKey, &sha256Ctx);
for (uint32_t i = 0; i < 16; i++) {
error |= (messageKey[i + 8] != key[i]);
} }
switch (mtProtoVersion) { return !error;
case 2: {
SHA256_Init(&sha256Ctx);
SHA256_Update(&sha256Ctx, authKey->bytes + 88 + 8, 32);
SHA256_Update(&sha256Ctx, data, length);
SHA256_Final(messageKey, &sha256Ctx);
break;
}
default: {
SHA1(data, messageLength, messageKey + 4);
break;
}
}
return memcmp(messageKey + 8, key, 16) == 0 && !error;
} }
bool Datacenter::hasPermanentAuthKey() { bool Datacenter::hasPermanentAuthKey() {

View file

@ -849,7 +849,7 @@ void Handshake::processHandshakeResponse(TLObject *message, int64_t messageId) {
inner->temp_session_id = connection->getSessionId(); inner->temp_session_id = connection->getSessionId();
NetworkMessage *networkMessage = new NetworkMessage(); NetworkMessage *networkMessage = new NetworkMessage();
networkMessage->message = std::unique_ptr<TL_message>(new TL_message()); networkMessage->message = std::make_unique<TL_message>();
networkMessage->message->msg_id = authKeyPendingMessageId = messageId; networkMessage->message->msg_id = authKeyPendingMessageId = messageId;
networkMessage->message->bytes = inner->getObjectSize(); networkMessage->message->bytes = inner->getObjectSize();
networkMessage->message->body = std::unique_ptr<TLObject>(inner); networkMessage->message->body = std::unique_ptr<TLObject>(inner);

View file

@ -47,7 +47,7 @@ set_target_properties(tgvoip PROPERTIES
target_compile_definitions(tgvoip PUBLIC target_compile_definitions(tgvoip PUBLIC
HAVE_PTHREAD __STDC_LIMIT_MACROS BSD=1 USE_KISS_FFT TGVOIP_NO_VIDEO NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED _LARGEFILE_SOURCE=1 _FILE_OFFSET_BITS=64 restrict= __EMX__ OPUS_BUILD FIXED_POINT USE_ALLOCA HAVE_LRINT HAVE_LRINTF) HAVE_PTHREAD __STDC_LIMIT_MACROS BSD=1 USE_KISS_FFT TGVOIP_NO_VIDEO NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED _LARGEFILE_SOURCE=1 _FILE_OFFSET_BITS=64 restrict= __EMX__ OPUS_BUILD FIXED_POINT USE_ALLOCA HAVE_LRINT HAVE_LRINTF)
target_compile_definitions(tgvoip PUBLIC target_compile_definitions(tgvoip PUBLIC
RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux) RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux)
target_include_directories(tgvoip PUBLIC target_include_directories(tgvoip PUBLIC
./ ./
voip voip
@ -76,6 +76,50 @@ elseif(${ANDROID_ABI} STREQUAL "x86_64")
HAVE_SSE2) HAVE_SSE2)
endif() endif()
#rnnoise
add_library(rnnoise STATIC
voip/rnnoise/src/celt_lpc.c
voip/rnnoise/src/denoise.c
voip/rnnoise/src/kiss_fft.c
voip/rnnoise/src/pitch.c
voip/rnnoise/src/rnn_data.c
voip/rnnoise/src/rnn_reader.c
voip/rnnoise/src/rnn_reader.c
voip/rnnoise/src/rnn.c)
target_compile_options(rnnoise PUBLIC
-Wall -finline-functions -fno-strict-aliasing -O3 -frtti -Wno-unknown-pragmas -funroll-loops -fexceptions -fno-math-errno)
set_target_properties(rnnoise PROPERTIES
ANDROID_ARM_MODE arm)
target_compile_definitions(rnnoise PRIVATE
HAVE_PTHREAD __STDC_LIMIT_MACROS BSD=1 USE_KISS_FFT NULL=0 SOCKLEN_T=socklen_t LOCALE_NOT_USED _LARGEFILE_SOURCE=1 _FILE_OFFSET_BITS=64 restrict= __EMX__ OPUS_BUILD USE_ALLOCA HAVE_LRINT HAVE_LRINTF
_celt_autocorr=rnnoise__celt_autocorr
celt_fir=rnnoise_celt_fir
celt_iir=rnnoise_celt_iir
_celt_lpc=rnnoise__celt_lpc
celt_pitch_xcorr=rnnoise_celt_pitch_xcorr
compute_band_corr=rnnoise_compute_band_corr
compute_band_energy=rnnoise_compute_band_energy
compute_dense=rnnoise_compute_dense
compute_gru=rnnoise_compute_gru
compute_rnn=rnnoise_compute_rnn
interp_band_gain=rnnoise_interp_band_gain
opus_fft_alloc=rnnoise_opus_fft_alloc
opus_fft_alloc_arch_c=rnnoise_opus_fft_alloc_arch_c
opus_fft_alloc_twiddles=rnnoise_opus_fft_alloc_twiddles
opus_fft_c=rnnoise_opus_fft_c
opus_fft_free=rnnoise_opus_fft_free
opus_fft_free_arch_c=rnnoise_opus_fft_free_arch_c
opus_fft_impl=rnnoise_opus_fft_impl
opus_ifft_c=rnnoise_opus_ifft_c
pitch_downsample=rnnoise_pitch_downsample
pitch_filter=rnnoise_pitch_filter
pitch_search=rnnoise_pitch_search
remove_doubling=rnnoise_remove_doubling)
target_compile_definitions(rnnoise PUBLIC
RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux)
target_include_directories(rnnoise PUBLIC
voip/rnnoise/include)
#tgcalls_tp #tgcalls_tp
add_library(tgcalls_tp STATIC add_library(tgcalls_tp STATIC
third_party/rnnoise/src/rnn_vad_weights.cc third_party/rnnoise/src/rnn_vad_weights.cc
@ -427,7 +471,7 @@ target_compile_options(tgcalls_tp PUBLIC
set_target_properties(tgcalls_tp PROPERTIES set_target_properties(tgcalls_tp PROPERTIES
ANDROID_ARM_MODE arm) ANDROID_ARM_MODE arm)
target_compile_definitions(tgcalls_tp PUBLIC target_compile_definitions(tgcalls_tp PUBLIC
HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux HAVE_WEBRTC_VIDEO __ANDROID__) RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE __Userspace__ SCTP_SIMPLE_ALLOCATOR SCTP_PROCESS_LEVEL_LOCKS __Userspace_os_Linux HAVE_WEBRTC_VIDEO __ANDROID__)
target_include_directories(tgcalls_tp PUBLIC target_include_directories(tgcalls_tp PUBLIC
./ ./
voip voip
@ -738,15 +782,18 @@ add_library(tgcalls STATIC
voip/tgcalls/ThreadLocalObject.cpp voip/tgcalls/ThreadLocalObject.cpp
voip/tgcalls/VideoCaptureInterface.cpp voip/tgcalls/VideoCaptureInterface.cpp
voip/tgcalls/VideoCaptureInterfaceImpl.cpp voip/tgcalls/VideoCaptureInterfaceImpl.cpp
voip/tgcalls/JsonConfig.cpp
voip/tgcalls/AudioDeviceHelper.cpp voip/tgcalls/AudioDeviceHelper.cpp
voip/tgcalls/reference/InstanceImplReference.cpp voip/tgcalls/reference/InstanceImplReference.cpp
voip/tgcalls/legacy/InstanceImplLegacy.cpp voip/tgcalls/legacy/InstanceImplLegacy.cpp
voip/tgcalls/group/GroupNetworkManager.cpp voip/tgcalls/group/GroupNetworkManager.cpp
voip/tgcalls/group/GroupInstanceCustomImpl.cpp voip/tgcalls/group/GroupInstanceCustomImpl.cpp
voip/tgcalls/group/GroupJoinPayloadInternal.cpp
voip/tgcalls/group/StreamingPart.cpp voip/tgcalls/group/StreamingPart.cpp
voip/tgcalls/third-party/json11.cpp
voip/webrtc/rtc_base/async_invoker.cc voip/webrtc/rtc_base/async_invoker.cc
voip/webrtc/rtc_base/system_time.cc
voip/webrtc/rtc_base/async_resolver.cc
voip/webrtc/rtc_base/async_packet_socket.cc voip/webrtc/rtc_base/async_packet_socket.cc
voip/webrtc/rtc_base/async_resolver_interface.cc voip/webrtc/rtc_base/async_resolver_interface.cc
voip/webrtc/rtc_base/async_socket.cc voip/webrtc/rtc_base/async_socket.cc
@ -762,9 +809,12 @@ add_library(tgcalls STATIC
voip/webrtc/rtc_base/data_rate_limiter.cc voip/webrtc/rtc_base/data_rate_limiter.cc
voip/webrtc/rtc_base/event.cc voip/webrtc/rtc_base/event.cc
voip/webrtc/rtc_base/event_tracer.cc voip/webrtc/rtc_base/event_tracer.cc
voip/webrtc/rtc_base/boringssl_certificate.cc
voip/webrtc/rtc_base/boringssl_identity.cc
voip/webrtc/rtc_base/experiments/alr_experiment.cc voip/webrtc/rtc_base/experiments/alr_experiment.cc
voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc voip/webrtc/rtc_base/experiments/balanced_degradation_settings.cc
voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc voip/webrtc/rtc_base/experiments/cpu_speed_experiment.cc
voip/webrtc/rtc_base/experiments/encoder_info_settings.cc
voip/webrtc/rtc_base/experiments/field_trial_list.cc voip/webrtc/rtc_base/experiments/field_trial_list.cc
voip/webrtc/rtc_base/experiments/field_trial_parser.cc voip/webrtc/rtc_base/experiments/field_trial_parser.cc
voip/webrtc/rtc_base/experiments/field_trial_units.cc voip/webrtc/rtc_base/experiments/field_trial_units.cc
@ -812,6 +862,7 @@ add_library(tgcalls STATIC
voip/webrtc/rtc_base/openssl_certificate.cc voip/webrtc/rtc_base/openssl_certificate.cc
voip/webrtc/rtc_base/openssl_digest.cc voip/webrtc/rtc_base/openssl_digest.cc
voip/webrtc/rtc_base/openssl_identity.cc voip/webrtc/rtc_base/openssl_identity.cc
voip/webrtc/rtc_base/openssl_key_pair.cc
voip/webrtc/rtc_base/openssl_session_cache.cc voip/webrtc/rtc_base/openssl_session_cache.cc
voip/webrtc/rtc_base/openssl_stream_adapter.cc voip/webrtc/rtc_base/openssl_stream_adapter.cc
voip/webrtc/rtc_base/openssl_utility.cc voip/webrtc/rtc_base/openssl_utility.cc
@ -845,7 +896,7 @@ add_library(tgcalls STATIC
voip/webrtc/rtc_base/strings/string_format.cc voip/webrtc/rtc_base/strings/string_format.cc
voip/webrtc/rtc_base/synchronization/mutex.cc voip/webrtc/rtc_base/synchronization/mutex.cc
voip/webrtc/rtc_base/synchronization/yield.cc voip/webrtc/rtc_base/synchronization/yield.cc
voip/webrtc/rtc_base/synchronization/sequence_checker.cc voip/webrtc/rtc_base/synchronization/sequence_checker_internal.cc
voip/webrtc/rtc_base/synchronization/yield_policy.cc voip/webrtc/rtc_base/synchronization/yield_policy.cc
voip/webrtc/rtc_base/system/file_wrapper.cc voip/webrtc/rtc_base/system/file_wrapper.cc
voip/webrtc/rtc_base/system/thread_registry.cc voip/webrtc/rtc_base/system/thread_registry.cc
@ -866,7 +917,7 @@ add_library(tgcalls STATIC
voip/webrtc/rtc_base/zero_memory.cc voip/webrtc/rtc_base/zero_memory.cc
voip/webrtc/rtc_base/callback_list.cc voip/webrtc/rtc_base/callback_list.cc
voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc voip/webrtc/rtc_base/deprecated/recursive_critical_section.cc
voip/webrtc/rtc_base/deprecated/signal_thread.cc voip/webrtc/rtc_base/internal/default_socket_server.cc
voip/webrtc/api/audio/audio_frame.cc voip/webrtc/api/audio/audio_frame.cc
voip/webrtc/api/audio/channel_layout.cc voip/webrtc/api/audio/channel_layout.cc
voip/webrtc/api/audio/echo_canceller3_config.cc voip/webrtc/api/audio/echo_canceller3_config.cc
@ -973,14 +1024,12 @@ add_library(tgcalls STATIC
voip/webrtc/api/video/nv12_buffer.cc voip/webrtc/api/video/nv12_buffer.cc
voip/webrtc/api/video/video_source_interface.cc voip/webrtc/api/video/video_source_interface.cc
voip/webrtc/api/video/video_stream_decoder_create.cc voip/webrtc/api/video/video_stream_decoder_create.cc
voip/webrtc/api/video/video_stream_encoder_create.cc
voip/webrtc/api/video/video_timing.cc voip/webrtc/api/video/video_timing.cc
voip/webrtc/api/video_codecs/builtin_video_decoder_factory.cc voip/webrtc/api/video_codecs/builtin_video_decoder_factory.cc
voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc voip/webrtc/api/video_codecs/builtin_video_encoder_factory.cc
voip/webrtc/api/video_codecs/sdp_video_format.cc voip/webrtc/api/video_codecs/sdp_video_format.cc
voip/webrtc/api/video_codecs/video_codec.cc voip/webrtc/api/video_codecs/video_codec.cc
voip/webrtc/api/video_codecs/video_decoder.cc voip/webrtc/api/video_codecs/video_decoder.cc
voip/webrtc/api/video_codecs/video_decoder_factory.cc
voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc
voip/webrtc/api/video_codecs/video_encoder.cc voip/webrtc/api/video_codecs/video_encoder.cc
voip/webrtc/api/video_codecs/video_encoder_config.cc voip/webrtc/api/video_codecs/video_encoder_config.cc
@ -989,11 +1038,12 @@ add_library(tgcalls STATIC
voip/webrtc/api/video_codecs/vp8_temporal_layers.cc voip/webrtc/api/video_codecs/vp8_temporal_layers.cc
voip/webrtc/api/video_codecs/vp8_temporal_layers_factory.cc voip/webrtc/api/video_codecs/vp8_temporal_layers_factory.cc
voip/webrtc/api/video_codecs/spatial_layer.cc voip/webrtc/api/video_codecs/spatial_layer.cc
voip/webrtc/api/video_codecs/h264_profile_level_id.cc
voip/webrtc/api/video_codecs/vp9_profile.cc
voip/webrtc/pc/audio_rtp_receiver.cc voip/webrtc/pc/audio_rtp_receiver.cc
voip/webrtc/pc/audio_track.cc voip/webrtc/pc/audio_track.cc
voip/webrtc/pc/channel.cc voip/webrtc/pc/channel.cc
voip/webrtc/pc/channel_manager.cc voip/webrtc/pc/channel_manager.cc
voip/webrtc/pc/composite_rtp_transport.cc
voip/webrtc/pc/data_channel_controller.cc voip/webrtc/pc/data_channel_controller.cc
voip/webrtc/pc/data_channel_utils.cc voip/webrtc/pc/data_channel_utils.cc
voip/webrtc/pc/dtls_srtp_transport.cc voip/webrtc/pc/dtls_srtp_transport.cc
@ -1021,7 +1071,6 @@ add_library(tgcalls STATIC
voip/webrtc/pc/rtp_media_utils.cc voip/webrtc/pc/rtp_media_utils.cc
voip/webrtc/pc/rtp_parameters_conversion.cc voip/webrtc/pc/rtp_parameters_conversion.cc
voip/webrtc/pc/rtp_receiver.cc voip/webrtc/pc/rtp_receiver.cc
voip/webrtc/pc/rtp_data_channel.cc
voip/webrtc/pc/rtp_sender.cc voip/webrtc/pc/rtp_sender.cc
voip/webrtc/pc/rtp_transceiver.cc voip/webrtc/pc/rtp_transceiver.cc
voip/webrtc/pc/rtp_transport.cc voip/webrtc/pc/rtp_transport.cc
@ -1051,6 +1100,7 @@ add_library(tgcalls STATIC
voip/webrtc/pc/sdp_offer_answer.cc voip/webrtc/pc/sdp_offer_answer.cc
voip/webrtc/pc/transceiver_list.cc voip/webrtc/pc/transceiver_list.cc
voip/webrtc/pc/usage_pattern.cc voip/webrtc/pc/usage_pattern.cc
voip/webrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc
voip/webrtc/media/base/adapted_video_track_source.cc voip/webrtc/media/base/adapted_video_track_source.cc
voip/webrtc/media/base/codec.cc voip/webrtc/media/base/codec.cc
voip/webrtc/media/base/h264_profile_level_id.cc voip/webrtc/media/base/h264_profile_level_id.cc
@ -1058,18 +1108,15 @@ add_library(tgcalls STATIC
voip/webrtc/media/base/media_constants.cc voip/webrtc/media/base/media_constants.cc
voip/webrtc/media/base/media_engine.cc voip/webrtc/media/base/media_engine.cc
voip/webrtc/media/base/rid_description.cc voip/webrtc/media/base/rid_description.cc
voip/webrtc/media/base/rtp_data_engine.cc
voip/webrtc/media/base/rtp_utils.cc voip/webrtc/media/base/rtp_utils.cc
voip/webrtc/media/base/sdp_fmtp_utils.cc voip/webrtc/media/base/sdp_video_format_utils.cc
voip/webrtc/media/base/stream_params.cc voip/webrtc/media/base/stream_params.cc
voip/webrtc/media/base/turn_utils.cc voip/webrtc/media/base/turn_utils.cc
voip/webrtc/media/base/video_adapter.cc voip/webrtc/media/base/video_adapter.cc
voip/webrtc/media/base/video_broadcaster.cc voip/webrtc/media/base/video_broadcaster.cc
voip/webrtc/media/base/video_common.cc voip/webrtc/media/base/video_common.cc
voip/webrtc/media/base/video_source_base.cc voip/webrtc/media/base/video_source_base.cc
voip/webrtc/media/base/vp9_profile.cc
voip/webrtc/media/engine/adm_helpers.cc voip/webrtc/media/engine/adm_helpers.cc
voip/webrtc/media/engine/constants.cc
voip/webrtc/media/engine/encoder_simulcast_proxy.cc voip/webrtc/media/engine/encoder_simulcast_proxy.cc
voip/webrtc/media/engine/internal_decoder_factory.cc voip/webrtc/media/engine/internal_decoder_factory.cc
voip/webrtc/media/engine/internal_encoder_factory.cc voip/webrtc/media/engine/internal_encoder_factory.cc
@ -1082,8 +1129,9 @@ add_library(tgcalls STATIC
voip/webrtc/media/engine/webrtc_media_engine_defaults.cc voip/webrtc/media/engine/webrtc_media_engine_defaults.cc
voip/webrtc/media/engine/webrtc_video_engine.cc voip/webrtc/media/engine/webrtc_video_engine.cc
voip/webrtc/media/engine/webrtc_voice_engine.cc voip/webrtc/media/engine/webrtc_voice_engine.cc
voip/webrtc/media/sctp/noop.cc voip/webrtc/media/sctp/dcsctp_transport.cc
voip/webrtc/media/sctp/sctp_transport.cc voip/webrtc/media/sctp/sctp_transport_factory.cc
voip/webrtc/media/sctp/usrsctp_transport.cc
voip/webrtc/system_wrappers/source/clock.cc voip/webrtc/system_wrappers/source/clock.cc
voip/webrtc/system_wrappers/source/cpu_features.cc voip/webrtc/system_wrappers/source/cpu_features.cc
voip/webrtc/system_wrappers/source/cpu_info.cc voip/webrtc/system_wrappers/source/cpu_info.cc
@ -1376,9 +1424,9 @@ add_library(tgcalls STATIC
voip/webrtc/modules/audio_processing/agc/loudness_histogram.cc voip/webrtc/modules/audio_processing/agc/loudness_histogram.cc
voip/webrtc/modules/audio_processing/agc/utility.cc voip/webrtc/modules/audio_processing/agc/utility.cc
voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc voip/webrtc/modules/audio_processing/agc2/adaptive_agc.cc
voip/webrtc/modules/audio_processing/agc2/cpu_features.cc
voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc voip/webrtc/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc
voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc
voip/webrtc/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc
voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc voip/webrtc/modules/audio_processing/agc2/agc2_testing_common.cc
voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc voip/webrtc/modules/audio_processing/agc2/biquad_filter.cc
voip/webrtc/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc voip/webrtc/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc
@ -1394,8 +1442,8 @@ add_library(tgcalls STATIC
voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc voip/webrtc/modules/audio_processing/agc2/signal_classifier.cc
voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc voip/webrtc/modules/audio_processing/agc2/vad_with_level.cc
voip/webrtc/modules/audio_processing/agc2/vector_float_frame.cc voip/webrtc/modules/audio_processing/agc2/vector_float_frame.cc
voip/webrtc/modules/audio_processing/agc2/saturation_protector_buffer.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/common.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/features_extraction.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/lp_residual.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/pitch_search.cc
@ -1403,6 +1451,8 @@ add_library(tgcalls STATIC
voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc voip/webrtc/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc
voip/webrtc/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc
voip/webrtc/modules/audio_processing/audio_buffer.cc voip/webrtc/modules/audio_processing/audio_buffer.cc
voip/webrtc/modules/audio_processing/audio_processing_impl.cc voip/webrtc/modules/audio_processing/audio_processing_impl.cc
voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc voip/webrtc/modules/audio_processing/audio_processing_builder_impl.cc
@ -1459,6 +1509,9 @@ add_library(tgcalls STATIC
voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc
voip/webrtc/modules/audio_processing/voice_detection.cc voip/webrtc/modules/audio_processing/voice_detection.cc
voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc
voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc
voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc
voip/webrtc/modules/congestion_controller/remb_throttler.cc
voip/webrtc/modules/congestion_controller/pcc/bitrate_controller.cc voip/webrtc/modules/congestion_controller/pcc/bitrate_controller.cc
voip/webrtc/modules/congestion_controller/pcc/monitor_interval.cc voip/webrtc/modules/congestion_controller/pcc/monitor_interval.cc
voip/webrtc/modules/congestion_controller/pcc/pcc_factory.cc voip/webrtc/modules/congestion_controller/pcc/pcc_factory.cc
@ -1483,6 +1536,7 @@ add_library(tgcalls STATIC
voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc voip/webrtc/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc
voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc voip/webrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.cc
voip/webrtc/modules/congestion_controller/goog_cc/inter_arrival_delta.cc
voip/webrtc/modules/pacing/bitrate_prober.cc voip/webrtc/modules/pacing/bitrate_prober.cc
voip/webrtc/modules/pacing/interval_budget.cc voip/webrtc/modules/pacing/interval_budget.cc
voip/webrtc/modules/pacing/paced_sender.cc voip/webrtc/modules/pacing/paced_sender.cc
@ -1492,6 +1546,8 @@ add_library(tgcalls STATIC
voip/webrtc/modules/pacing/task_queue_paced_sender.cc voip/webrtc/modules/pacing/task_queue_paced_sender.cc
voip/webrtc/modules/rtp_rtcp/include/report_block_data.cc voip/webrtc/modules/rtp_rtcp/include/report_block_data.cc
voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.cc
voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc
voip/webrtc/modules/rtp_rtcp/source/capture_clock_offset_updater.cc
voip/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc voip/webrtc/modules/rtp_rtcp/source/active_decode_targets_helper.cc
voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_receiver.cc
voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_sender.cc voip/webrtc/modules/rtp_rtcp/source/absolute_capture_time_sender.cc
@ -1586,6 +1642,7 @@ add_library(tgcalls STATIC
voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc
voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc voip/webrtc/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc
voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc
voip/webrtc/modules/utility/source/helpers_android.cc voip/webrtc/modules/utility/source/helpers_android.cc
voip/webrtc/modules/utility/source/jvm_android.cc voip/webrtc/modules/utility/source/jvm_android.cc
voip/webrtc/modules/utility/source/process_thread_impl.cc voip/webrtc/modules/utility/source/process_thread_impl.cc
@ -1636,6 +1693,7 @@ add_library(tgcalls STATIC
voip/webrtc/modules/video_coding/timing.cc voip/webrtc/modules/video_coding/timing.cc
voip/webrtc/modules/video_coding/unique_timestamp_counter.cc voip/webrtc/modules/video_coding/unique_timestamp_counter.cc
voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc voip/webrtc/modules/video_coding/utility/decoded_frames_history.cc
voip/webrtc/modules/video_coding/utility/qp_parser.cc
voip/webrtc/modules/video_coding/utility/frame_dropper.cc voip/webrtc/modules/video_coding/utility/frame_dropper.cc
voip/webrtc/modules/video_coding/utility/framerate_controller.cc voip/webrtc/modules/video_coding/utility/framerate_controller.cc
voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc
@ -1650,8 +1708,8 @@ add_library(tgcalls STATIC
voip/webrtc/modules/video_coding/video_coding_impl.cc voip/webrtc/modules/video_coding/video_coding_impl.cc
voip/webrtc/modules/video_coding/video_receiver.cc voip/webrtc/modules/video_coding/video_receiver.cc
voip/webrtc/modules/video_coding/video_receiver2.cc voip/webrtc/modules/video_coding/video_receiver2.cc
voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.cc
voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc voip/webrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
voip/webrtc/modules/video_coding/codecs/vp8/libvpx_interface.cc
voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
voip/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc voip/webrtc/modules/video_coding/codecs/vp8/screenshare_layers.cc
@ -1659,21 +1717,21 @@ add_library(tgcalls STATIC
voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc voip/webrtc/modules/video_coding/codecs/vp9/svc_config.cc
voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc
voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc
voip/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc
voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc voip/webrtc/modules/video_coding/svc/create_scalability_structure.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc voip/webrtc/modules/video_coding/svc/scalability_structure_full_svc.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc voip/webrtc/modules/video_coding/svc/scalability_structure_key_svc.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l1t2.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l1t3.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l2t1h.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc voip/webrtc/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l3t1.cc voip/webrtc/modules/video_coding/svc/scalability_structure_simulcast.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_l3t3.cc
voip/webrtc/modules/video_coding/svc/scalability_structure_s2t1.cc
voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc voip/webrtc/modules/video_coding/svc/scalable_video_controller_no_layering.cc
voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc voip/webrtc/modules/video_coding/svc/svc_rate_allocator.cc
voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc
voip/webrtc/modules/video_coding/rtp_frame_id_only_ref_finder.cc
voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc
voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc
voip/webrtc/modules/video_coding/rtp_generic_ref_finder.cc
voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc
voip/webrtc/modules/video_processing/util/denoiser_filter.cc voip/webrtc/modules/video_processing/util/denoiser_filter.cc
voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc
voip/webrtc/modules/video_processing/util/noise_estimation.cc voip/webrtc/modules/video_processing/util/noise_estimation.cc
@ -1682,6 +1740,7 @@ add_library(tgcalls STATIC
voip/webrtc/call/adaptation/resource_adaptation_processor_interface.cc voip/webrtc/call/adaptation/resource_adaptation_processor_interface.cc
voip/webrtc/call/adaptation/video_source_restrictions.cc voip/webrtc/call/adaptation/video_source_restrictions.cc
voip/webrtc/call/audio_receive_stream.cc voip/webrtc/call/audio_receive_stream.cc
voip/webrtc/call/version.cc
voip/webrtc/call/audio_send_stream.cc voip/webrtc/call/audio_send_stream.cc
voip/webrtc/call/audio_state.cc voip/webrtc/call/audio_state.cc
voip/webrtc/call/bitrate_allocator.cc voip/webrtc/call/bitrate_allocator.cc
@ -1792,7 +1851,6 @@ add_library(tgcalls STATIC
voip/webrtc/p2p/base/ice_controller_interface.cc voip/webrtc/p2p/base/ice_controller_interface.cc
voip/webrtc/p2p/base/ice_credentials_iterator.cc voip/webrtc/p2p/base/ice_credentials_iterator.cc
voip/webrtc/p2p/base/ice_transport_internal.cc voip/webrtc/p2p/base/ice_transport_internal.cc
voip/webrtc/p2p/base/mdns_message.cc
voip/webrtc/p2p/base/p2p_constants.cc voip/webrtc/p2p/base/p2p_constants.cc
voip/webrtc/p2p/base/p2p_transport_channel.cc voip/webrtc/p2p/base/p2p_transport_channel.cc
voip/webrtc/p2p/base/packet_transport_internal.cc voip/webrtc/p2p/base/packet_transport_internal.cc
@ -1857,6 +1915,7 @@ add_library(tgcalls STATIC
voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc
voip/webrtc/video/adaptation/balanced_constraint.cc voip/webrtc/video/adaptation/balanced_constraint.cc
voip/webrtc/video/adaptation/bitrate_constraint.cc voip/webrtc/video/adaptation/bitrate_constraint.cc
voip/webrtc/video/adaptation/pixel_limit_resource.cc
voip/webrtc/video/buffered_frame_decryptor.cc voip/webrtc/video/buffered_frame_decryptor.cc
voip/webrtc/video/call_stats.cc voip/webrtc/video/call_stats.cc
voip/webrtc/video/encoder_bitrate_adjuster.cc voip/webrtc/video/encoder_bitrate_adjuster.cc
@ -1916,6 +1975,7 @@ add_library(tgcalls STATIC
voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.cc
voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc voip/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
voip/webrtc/modules/remote_bitrate_estimator/packet_arrival_map.cc
voip/webrtc/sdk/media_constraints.cc voip/webrtc/sdk/media_constraints.cc
voip/webrtc/stats/rtc_stats_report.cc voip/webrtc/stats/rtc_stats_report.cc
voip/webrtc/stats/rtc_stats.cc voip/webrtc/stats/rtc_stats.cc
@ -1939,12 +1999,13 @@ target_compile_options(tgcalls PUBLIC
set_target_properties(tgcalls PROPERTIES set_target_properties(tgcalls PROPERTIES
ANDROID_ARM_MODE arm) ANDROID_ARM_MODE arm)
target_compile_definitions(tgcalls PUBLIC target_compile_definitions(tgcalls PUBLIC
WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO)
target_include_directories(tgcalls PUBLIC target_include_directories(tgcalls PUBLIC
./ ./
voip voip
boringssl/include boringssl/include
voip/tgcalls voip/tgcalls
voip/rnnoise/include
voip/webrtc voip/webrtc
opus/include opus/include
opus/silk opus/silk
@ -2043,6 +2104,8 @@ add_library(voipandroid STATIC
voip/webrtc/sdk/android/native_api/video/wrapper.cc voip/webrtc/sdk/android/native_api/video/wrapper.cc
voip/webrtc/sdk/android/native_api/network_monitor/network_monitor.cc voip/webrtc/sdk/android/native_api/network_monitor/network_monitor.cc
voip/webrtc/sdk/android/src/jni/android_histogram.cc voip/webrtc/sdk/android/src/jni/android_histogram.cc
voip/webrtc/sdk/android/src/jni/av1_codec.cc
voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc
voip/webrtc/sdk/android/src/jni/android_metrics.cc voip/webrtc/sdk/android/src/jni/android_metrics.cc
voip/webrtc/sdk/android/src/jni/android_network_monitor.cc voip/webrtc/sdk/android/src/jni/android_network_monitor.cc
voip/webrtc/sdk/android/src/jni/android_video_track_source.cc voip/webrtc/sdk/android/src/jni/android_video_track_source.cc
@ -2116,7 +2179,7 @@ target_compile_options(voipandroid PUBLIC
set_target_properties(voipandroid PROPERTIES set_target_properties(voipandroid PROPERTIES
ANDROID_ARM_MODE arm) ANDROID_ARM_MODE arm)
target_compile_definitions(voipandroid PUBLIC target_compile_definitions(voipandroid PUBLIC
WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO) RTC_DISABLE_TRACE_EVENTS WEBRTC_OPUS_SUPPORT_120MS_PTIME=1 BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0 ABSL_ALLOCATOR_NOTHROW=1 WEBRTC_NS_FLOAT HAVE_PTHREAD RTC_ENABLE_VP9 WEBRTC_POSIX WEBRTC_LINUX WEBRTC_ANDROID NDEBUG WEBRTC_HAVE_USRSCTP WEBRTC_HAVE_SCTP WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_USE_BUILTIN_ISAC_FLOAT WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 HAVE_NETINET_IN_H WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE HAVE_WEBRTC_VIDEO)
target_include_directories(voipandroid PUBLIC target_include_directories(voipandroid PUBLIC
./ ./
voip voip

View file

@ -13,6 +13,8 @@
#include <voip/tgcalls/group/GroupInstanceCustomImpl.h> #include <voip/tgcalls/group/GroupInstanceCustomImpl.h>
#include <memory> #include <memory>
#include <utility>
#include <map>
#include "pc/video_track.h" #include "pc/video_track.h"
#include "legacy/InstanceImplLegacy.h" #include "legacy/InstanceImplLegacy.h"
@ -23,10 +25,6 @@
#include "libtgvoip/os/android/JNIUtilities.h" #include "libtgvoip/os/android/JNIUtilities.h"
#include "tgcalls/VideoCaptureInterface.h" #include "tgcalls/VideoCaptureInterface.h"
#include "rapidjson/document.h"
#include "rapidjson/stringbuffer.h"
#include "rapidjson/writer.h"
using namespace tgcalls; using namespace tgcalls;
const auto RegisterTag = Register<InstanceImpl>(); const auto RegisterTag = Register<InstanceImpl>();
@ -39,13 +37,48 @@ jclass FinalStateClass;
jclass NativeInstanceClass; jclass NativeInstanceClass;
jmethodID FinalStateInitMethod; jmethodID FinalStateInitMethod;
class RequestMediaChannelDescriptionTaskJava : public RequestMediaChannelDescriptionTask {
public:
RequestMediaChannelDescriptionTaskJava(std::shared_ptr<PlatformContext> platformContext,
std::function<void(std::vector<MediaChannelDescription> &&)> callback) :
_platformContext(std::move(platformContext)),
_callback(std::move(callback)) {
}
void call(JNIEnv *env, jintArray audioSsrcs) {
std::vector<MediaChannelDescription> descriptions;
jint *ssrcsArr = env->GetIntArrayElements(audioSsrcs, nullptr);
jsize size = env->GetArrayLength(audioSsrcs);
for (int i = 0; i < size; i++) {
MediaChannelDescription description;
description.type = MediaChannelDescription::Type::Audio;
description.audioSsrc = ssrcsArr[i];
descriptions.push_back(description);
}
env->ReleaseIntArrayElements(audioSsrcs, ssrcsArr, JNI_ABORT);
_callback(std::move<>(descriptions));
}
private:
void cancel() override {
/*tgvoip::jni::DoWithJNI([&](JNIEnv *env) {
jobject globalRef = ((AndroidContext *) _platformContext.get())->getJavaInstance();
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onCancelRequestMediaChannelDescription", "(J)V"), _timestamp);
});*/
}
std::shared_ptr<PlatformContext> _platformContext;
std::function<void(std::vector<MediaChannelDescription> &&)> _callback;
};
class BroadcastPartTaskJava : public BroadcastPartTask { class BroadcastPartTaskJava : public BroadcastPartTask {
public: public:
BroadcastPartTaskJava(std::shared_ptr<PlatformContext> platformContext, BroadcastPartTaskJava(std::shared_ptr<PlatformContext> platformContext,
std::function<void(BroadcastPart &&)> callback, std::function<void(BroadcastPart &&)> callback,
int64_t timestamp) : int64_t timestamp) :
_platformContext(platformContext), _platformContext(std::move(platformContext)),
_callback(callback), _callback(std::move(callback)),
_timestamp(timestamp) { _timestamp(timestamp) {
} }
@ -71,7 +104,6 @@ private:
}); });
} }
private:
std::shared_ptr<PlatformContext> _platformContext; std::shared_ptr<PlatformContext> _platformContext;
std::function<void(BroadcastPart &&)> _callback; std::function<void(BroadcastPart &&)> _callback;
int64_t _timestamp; int64_t _timestamp;
@ -113,9 +145,24 @@ public:
return (jbyteArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[B")); return (jbyteArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[B"));
} }
jintArray getIntArrayField(const char *name) {
return (jintArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[I"));
}
jstring getStringField(const char *name) { jstring getStringField(const char *name) {
return (jstring) env->GetObjectField(obj, env->GetFieldID(clazz, name, "Ljava/lang/String;")); return (jstring) env->GetObjectField(obj, env->GetFieldID(clazz, name, "Ljava/lang/String;"));
} }
jobjectArray getObjectArrayField(const char *name) {
return (jobjectArray) env->GetObjectField(obj, env->GetFieldID(clazz, name, "[Ljava/lang/Object;"));
}
};
struct SetVideoSink {
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink;
VideoChannelDescription::Quality quality;
std::string endpointId;
std::vector<MediaSsrcGroup> ssrcGroups;
}; };
struct InstanceHolder { struct InstanceHolder {
@ -123,6 +170,7 @@ struct InstanceHolder {
std::unique_ptr<GroupInstanceCustomImpl> groupNativeInstance; std::unique_ptr<GroupInstanceCustomImpl> groupNativeInstance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture; std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
std::shared_ptr<PlatformContext> _platformContext; std::shared_ptr<PlatformContext> _platformContext;
std::map<std::string, SetVideoSink> remoteGroupSinks;
}; };
jlong getInstanceHolderId(JNIEnv *env, jobject obj) { jlong getInstanceHolderId(JNIEnv *env, jobject obj) {
@ -262,7 +310,7 @@ jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) {
return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested); return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested);
} }
jobject asJavaFingerprint(JNIEnv *env, std::string hash, std::string setup, std::string fingerprint) { jobject asJavaFingerprint(JNIEnv *env, const std::string& hash, const std::string& setup, const std::string& fingerprint) {
jstring hashStr = env->NewStringUTF(hash.c_str()); jstring hashStr = env->NewStringUTF(hash.c_str());
jstring setupStr = env->NewStringUTF(setup.c_str()); jstring setupStr = env->NewStringUTF(setup.c_str());
jstring fingerprintStr = env->NewStringUTF(fingerprint.c_str()); jstring fingerprintStr = env->NewStringUTF(fingerprint.c_str());
@ -292,16 +340,24 @@ void initWebRTC(JNIEnv *env) {
FinalStateInitMethod = env->GetMethodID(FinalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); FinalStateInitMethod = env->GetMethodID(FinalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
} }
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jstring logFilePath, jboolean highQuality) { JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jstring logFilePath, jboolean highQuality, jlong videoCapturer, jboolean screencast, jboolean noiseSupression) {
initWebRTC(env); initWebRTC(env);
std::shared_ptr<PlatformContext> platformContext = std::make_shared<AndroidContext>(env, instanceObj); std::shared_ptr<VideoCaptureInterface> videoCapture = videoCapturer ? std::shared_ptr<VideoCaptureInterface>(reinterpret_cast<VideoCaptureInterface *>(videoCapturer)) : nullptr;
std::shared_ptr<PlatformContext> platformContext;
if (videoCapture) {
platformContext = videoCapture->getPlatformContext();
((AndroidContext *) platformContext.get())->setJavaInstance(env, instanceObj);
} else {
platformContext = std::make_shared<AndroidContext>(env, instanceObj, screencast);
}
GroupInstanceDescriptor descriptor = { GroupInstanceDescriptor descriptor = {
.threads = StaticThreads::getThreads(), .threads = StaticThreads::getThreads(),
.config = { .config = {
.need_log = false, .need_log = true,
//.logPath = tgvoip::jni::JavaStringToStdString(env, logFilePath), .logPath = {tgvoip::jni::JavaStringToStdString(env, logFilePath)},
}, },
.networkStateUpdated = [platformContext](GroupNetworkState state) { .networkStateUpdated = [platformContext](GroupNetworkState state) {
tgvoip::jni::DoWithJNI([platformContext, state](JNIEnv *env) { tgvoip::jni::DoWithJNI([platformContext, state](JNIEnv *env) {
@ -321,8 +377,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou
jboolean boolFill[size]; jboolean boolFill[size];
for (int a = 0; a < size; a++) { for (int a = 0; a < size; a++) {
intFill[a] = update.updates[a].ssrc; intFill[a] = update.updates[a].ssrc;
floatFill[a] = update.updates[a].value.level; floatFill[a] = update.updates[a].value.isMuted ? 0 : update.updates[a].value.level;
boolFill[a] = update.updates[a].value.voice; boolFill[a] = !update.updates[a].value.isMuted && update.updates[a].value.voice;
} }
env->SetIntArrayRegion(intArray, 0, size, intFill); env->SetIntArrayRegion(intArray, 0, size, intFill);
env->SetFloatArrayRegion(floatArray, 0, size, floatFill); env->SetFloatArrayRegion(floatArray, 0, size, floatFill);
@ -335,22 +391,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou
env->DeleteLocalRef(boolArray); env->DeleteLocalRef(boolArray);
}); });
}, },
.participantDescriptionsRequired = [platformContext](std::vector<uint32_t> const &update) { .videoCapture = videoCapture,
tgvoip::jni::DoWithJNI([platformContext, update](JNIEnv *env) {
unsigned int size = update.size();
jintArray intArray = env->NewIntArray(size);
jint intFill[size];
for (int a = 0; a < size; a++) {
intFill[a] = update[a];
}
env->SetIntArrayRegion(intArray, 0, size, intFill);
jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance();
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onParticipantDescriptionsRequired", "([I)V"), intArray);
env->DeleteLocalRef(intArray);
});
},
.requestBroadcastPart = [](std::shared_ptr<PlatformContext> platformContext, int64_t timestamp, int64_t duration, std::function<void(BroadcastPart &&)> callback) -> std::shared_ptr<BroadcastPartTask> { .requestBroadcastPart = [](std::shared_ptr<PlatformContext> platformContext, int64_t timestamp, int64_t duration, std::function<void(BroadcastPart &&)> callback) -> std::shared_ptr<BroadcastPartTask> {
std::shared_ptr<BroadcastPartTask> task = std::make_shared<BroadcastPartTaskJava>(platformContext, callback, timestamp); std::shared_ptr<BroadcastPartTask> task = std::make_shared<BroadcastPartTaskJava>(platformContext, callback, timestamp);
((AndroidContext *) platformContext.get())->streamTask = task; ((AndroidContext *) platformContext.get())->streamTask = task;
@ -360,61 +401,44 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou
}); });
return task; return task;
}, },
.videoContentType = screencast ? VideoContentType::Screencast : VideoContentType::Generic,
.initialEnableNoiseSuppression = (bool) noiseSupression,
.requestMediaChannelDescriptions = [platformContext](std::vector<uint32_t> const &ssrcs, std::function<void(std::vector<MediaChannelDescription> &&)> callback) -> std::shared_ptr<RequestMediaChannelDescriptionTask> {
std::shared_ptr<RequestMediaChannelDescriptionTaskJava> task = std::make_shared<RequestMediaChannelDescriptionTaskJava>(platformContext, callback);
((AndroidContext *) platformContext.get())->descriptionTasks.push_back(task);
tgvoip::jni::DoWithJNI([platformContext, ssrcs, task](JNIEnv *env) {
unsigned int size = ssrcs.size();
jintArray intArray = env->NewIntArray(size);
jint intFill[size];
for (int a = 0; a < size; a++) {
intFill[a] = ssrcs[a];
}
env->SetIntArrayRegion(intArray, 0, size, intFill);
jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance();
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onParticipantDescriptionsRequired", "(J[I)V"), (jlong) task.get(), intArray);
env->DeleteLocalRef(intArray);
});
return task;
},
.platformContext = platformContext .platformContext = platformContext
}; };
auto *holder = new InstanceHolder; auto *holder = new InstanceHolder;
holder->groupNativeInstance = std::make_unique<GroupInstanceCustomImpl>(std::move(descriptor)); holder->groupNativeInstance = std::make_unique<GroupInstanceCustomImpl>(std::move(descriptor));
holder->_platformContext = platformContext; holder->_platformContext = platformContext;
holder->_videoCapture = videoCapture;
return reinterpret_cast<jlong>(holder); return reinterpret_cast<jlong>(holder);
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setJoinResponsePayload(JNIEnv *env, jobject obj, jstring ufrag, jstring pwd, jobjectArray fingerprints, jobjectArray candidates) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setJoinResponsePayload(JNIEnv *env, jobject obj, jstring payload) {
InstanceHolder *instance = getInstanceHolder(env, obj); InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->groupNativeInstance == nullptr) { if (instance->groupNativeInstance == nullptr) {
return; return;
} }
std::vector<GroupJoinPayloadFingerprint> fingerprintsArray;
std::vector<GroupJoinResponseCandidate> candidatesArray;
instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeRtc, true); instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeRtc, true);
jsize size = env->GetArrayLength(fingerprints); instance->groupNativeInstance->setJoinResponsePayload(tgvoip::jni::JavaStringToStdString(env, payload));
for (int i = 0; i < size; i++) {
JavaObject fingerprintObject(env, env->GetObjectArrayElement(fingerprints, i));
fingerprintsArray.push_back(
{
.hash = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("hash")),
.setup = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("setup")),
.fingerprint = tgvoip::jni::JavaStringToStdString(env, fingerprintObject.getStringField("fingerprint"))
});
}
size = env->GetArrayLength(candidates);
for (int i = 0; i < size; i++) {
JavaObject candidateObject(env, env->GetObjectArrayElement(candidates, i));
candidatesArray.push_back(
{
.port = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("port")),
.protocol = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("protocol")),
.network = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("network")),
.generation = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("generation")),
.id = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("id")),
.component = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("component")),
.foundation = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("foundation")),
.priority = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("priority")),
.ip = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("ip")),
.type = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("type")),
.tcpType = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("tcpType")),
.relAddr = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("relAddr")),
.relPort = tgvoip::jni::JavaStringToStdString(env, candidateObject.getStringField("relPort")),
});
}
std::vector<tgcalls::GroupParticipantDescription> participants;
instance->groupNativeInstance->setJoinResponsePayload(
{
.ufrag = tgvoip::jni::JavaStringToStdString(env, ufrag),
.pwd = tgvoip::jni::JavaStringToStdString(env, pwd),
.fingerprints = fingerprintsArray,
.candidates = candidatesArray,
}, std::move(participants));
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_prepareForStream(JNIEnv *env, jobject obj) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_prepareForStream(JNIEnv *env, jobject obj) {
@ -425,86 +449,108 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_prepareFo
instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeBroadcast, true); instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeBroadcast, true);
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_resetGroupInstance(JNIEnv *env, jobject obj, jboolean disconnect) { void onEmitJoinPayload(const std::shared_ptr<PlatformContext>& platformContext, const GroupJoinPayload& payload) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance();
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onEmitJoinPayload", "(Ljava/lang/String;I)V"), env->NewStringUTF(payload.json.c_str()), (jint) payload.audioSsrc);
}
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_resetGroupInstance(JNIEnv *env, jobject obj, jboolean set, jboolean disconnect) {
InstanceHolder *instance = getInstanceHolder(env, obj); InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->groupNativeInstance == nullptr) { if (instance->groupNativeInstance == nullptr) {
return; return;
} }
instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeNone, !disconnect); if (set) {
instance->groupNativeInstance->setConnectionMode(GroupConnectionMode::GroupConnectionModeNone, !disconnect);
}
std::shared_ptr<PlatformContext> platformContext = instance->_platformContext; std::shared_ptr<PlatformContext> platformContext = instance->_platformContext;
instance->groupNativeInstance->emitJoinPayload([platformContext](const GroupJoinPayload& payload) { instance->groupNativeInstance->emitJoinPayload([platformContext](const GroupJoinPayload& payload) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); onEmitJoinPayload(platformContext, payload);
jobjectArray array = env->NewObjectArray(payload.fingerprints.size(), FingerprintClass, 0);
for (int a = 0; a < payload.fingerprints.size(); a++) {
env->SetObjectArrayElement(array, a, asJavaFingerprint(env, payload.fingerprints[a].hash, payload.fingerprints[a].setup, payload.fingerprints[a].fingerprint));
}
jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance();
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onEmitJoinPayload", "(Ljava/lang/String;Ljava/lang/String;[Lorg/telegram/messenger/voip/Instance$Fingerprint;I)V"), env->NewStringUTF(payload.ufrag.c_str()), env->NewStringUTF(payload.pwd.c_str()), array, (jint) payload.ssrc);
}); });
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_addParticipants(JNIEnv *env, jobject obj, jintArray ssrcs, jobjectArray array) { void broadcastRequestedSinks(InstanceHolder *instance) {
std::vector<VideoChannelDescription> descriptions;
for (auto & remoteGroupSink : instance->remoteGroupSinks) {
VideoChannelDescription description;
description.endpointId = remoteGroupSink.second.endpointId;
description.ssrcGroups = remoteGroupSink.second.ssrcGroups;
description.maxQuality = remoteGroupSink.second.quality;
descriptions.push_back(std::move(description));
}
instance->groupNativeInstance->setRequestedVideoChannels(std::move(descriptions));
}
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setNoiseSuppressionEnabled(JNIEnv *env, jobject obj, jboolean enabled) {
InstanceHolder *instance = getInstanceHolder(env, obj); InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->groupNativeInstance == nullptr) { if (instance->groupNativeInstance == nullptr) {
return; return;
} }
instance->groupNativeInstance->setIsNoiseSuppressionEnabled(enabled);
}
rapidjson::Document d;
jint *ssrcsArr = env->GetIntArrayElements(ssrcs, 0); JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_addIncomingVideoOutput(JNIEnv *env, jobject obj, jint quality, jstring endpointId, jobjectArray ssrcGroups, jobject remoteSink) {
InstanceHolder *instance = getInstanceHolder(env, obj);
jsize size = env->GetArrayLength(array); if (instance->groupNativeInstance == nullptr) {
std::vector<tgcalls::GroupParticipantDescription> participants; return 0;
for (int i = 0; i < size; i++) { }
GroupParticipantDescription participantDescription; SetVideoSink sink;
participantDescription.audioSsrc = ssrcsArr[i]; std::string endpointIdStr = tgvoip::jni::JavaStringToStdString(env, endpointId);
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> ptr = webrtc::JavaToNativeVideoSink(env, remoteSink);
jstring str = (jstring) env->GetObjectArrayElement(array, i); sink.sink = ptr;
if (str != nullptr) { sink.endpointId = endpointIdStr;
std::string json = tgvoip::jni::JavaStringToStdString(env, str); if (ssrcGroups) {
d.Parse(json); for (int i = 0, size = env->GetArrayLength(ssrcGroups); i < size; i++) {
participantDescription.endpointId = d["endpoint"].GetString(); JavaObject javaObject(env, env->GetObjectArrayElement(ssrcGroups, i));
MediaSsrcGroup ssrcGroup;
for (const auto &group : d["ssrc-groups"].GetArray()) { ssrcGroup.semantics = tgvoip::jni::JavaStringToStdString(env, javaObject.getStringField("semantics"));
tgcalls::GroupJoinPayloadVideoSourceGroup groupDesc; jintArray ssrcsArray = javaObject.getIntArrayField("ssrcs");
groupDesc.semantics = group["semantics"].GetString(); jint *elements = env->GetIntArrayElements(ssrcsArray, nullptr);
for (const auto &source : group["sources"].GetArray()) { for (int j = 0, size2 = env->GetArrayLength(ssrcsArray); j < size2; j++) {
groupDesc.ssrcs.push_back(source.GetUint()); ssrcGroup.ssrcs.push_back(elements[j]);
}
participantDescription.videoSourceGroups.push_back(std::move(groupDesc));
} }
env->ReleaseIntArrayElements(ssrcsArray, elements, JNI_ABORT);
sink.ssrcGroups.push_back(std::move(ssrcGroup));
}
}
sink.quality = (VideoChannelDescription::Quality) quality;
instance->remoteGroupSinks[endpointIdStr] = std::move(sink);
broadcastRequestedSinks(instance);
instance->groupNativeInstance->addIncomingVideoOutput(endpointIdStr, ptr);
return reinterpret_cast<intptr_t>(ptr.get());
}
for (const auto &extDict : d["rtp-hdrexts"].GetArray()) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_removeIncomingVideoOutput(JNIEnv *env, jobject obj, jlong nativeRemoteSink) {
participantDescription.videoExtensionMap.emplace_back(extDict["id"].GetUint(), extDict["uri"].GetString()); InstanceHolder *instance = getInstanceHolder(env, obj);
} if (instance->groupNativeInstance == nullptr) {
return;
for (const auto &payload : d["payload-types"].GetArray()) { }
tgcalls::GroupJoinPayloadVideoPayloadType parsedPayload; if (nativeRemoteSink == 0) {
parsedPayload.id = payload["id"].GetUint(); instance->remoteGroupSinks.clear();
parsedPayload.clockrate = payload["clockrate"].GetUint(); } else {
parsedPayload.channels = payload["channels"].GetUint(); for (auto iter = instance->remoteGroupSinks.begin(); iter != instance->remoteGroupSinks.end(); iter++) {
parsedPayload.name = payload["name"].GetString(); if (reinterpret_cast<intptr_t>(iter->second.sink.get()) == nativeRemoteSink) {
instance->remoteGroupSinks.erase(iter);
for (const auto &fb : payload["rtcp-fbs"].GetArray()) { break;
tgcalls::GroupJoinPayloadVideoPayloadFeedbackType parsedFeedback;
parsedFeedback.type = fb["type"].GetString();
if (fb.HasMember("subtype")) {
parsedFeedback.subtype = fb["subtype"].GetString();
}
parsedPayload.feedbackTypes.push_back(std::move(parsedFeedback));
}
for (const auto &fb : payload["parameters"].GetObject()) {
parsedPayload.parameters.emplace_back(fb.name.GetString(), fb.value.GetString());
}
participantDescription.videoPayloadTypes.push_back(std::move(parsedPayload));
} }
} }
participants.push_back(std::move(participantDescription));
} }
env->ReleaseIntArrayElements(ssrcs, ssrcsArr, JNI_ABORT); broadcastRequestedSinks(instance);
instance->groupNativeInstance->addParticipants(std::move(participants)); }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setVideoEndpointQuality(JNIEnv *env, jobject obj, jstring endpointId, jint quality) {
InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->groupNativeInstance == nullptr) {
return;
}
broadcastRequestedSinks(instance);
auto sink = instance->remoteGroupSinks.find(tgvoip::jni::JavaStringToStdString(env, endpointId));
if (sink == instance->remoteGroupSinks.end()) {
return;
}
sink->second.quality = (VideoChannelDescription::Quality) quality;
broadcastRequestedSinks(instance);
} }
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) { JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) {
@ -527,7 +573,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
platformContext = videoCapture->getPlatformContext(); platformContext = videoCapture->getPlatformContext();
((AndroidContext *) platformContext.get())->setJavaInstance(env, instanceObj); ((AndroidContext *) platformContext.get())->setJavaInstance(env, instanceObj);
} else { } else {
platformContext = std::make_shared<AndroidContext>(env, instanceObj); platformContext = std::make_shared<AndroidContext>(env, instanceObj, false);
} }
Descriptor descriptor = { Descriptor descriptor = {
@ -541,8 +587,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
.enableNS = configObject.getBooleanField("enableNs") == JNI_TRUE, .enableNS = configObject.getBooleanField("enableNs") == JNI_TRUE,
.enableAGC = configObject.getBooleanField("enableAgc") == JNI_TRUE, .enableAGC = configObject.getBooleanField("enableAgc") == JNI_TRUE,
.enableVolumeControl = true, .enableVolumeControl = true,
.logPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath")), .logPath = {tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath"))},
.statsLogPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("statsLogPath")), .statsLogPath = {tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("statsLogPath"))},
.maxApiLayer = configObject.getIntField("maxApiLayer"), .maxApiLayer = configObject.getIntField("maxApiLayer"),
.enableHighBitrateVideo = true, .enableHighBitrateVideo = true,
.preferredVideoCodecs = {cricket::kVp9CodecName} .preferredVideoCodecs = {cricket::kVp9CodecName}
@ -625,7 +671,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
if (!env->IsSameObject(proxyClass, nullptr)) { if (!env->IsSameObject(proxyClass, nullptr)) {
JavaObject proxyObject(env, proxyClass); JavaObject proxyObject(env, proxyClass);
descriptor.proxy = std::unique_ptr<Proxy>(new Proxy); descriptor.proxy = std::make_unique<Proxy>();
descriptor.proxy->host = tgvoip::jni::JavaStringToStdString(env, proxyObject.getStringField("host")); descriptor.proxy->host = tgvoip::jni::JavaStringToStdString(env, proxyObject.getStringField("host"));
descriptor.proxy->port = static_cast<uint16_t>(proxyObject.getIntField("port")); descriptor.proxy->port = static_cast<uint16_t>(proxyObject.getIntField("port"));
descriptor.proxy->login = tgvoip::jni::JavaStringToStdString(env, proxyObject.getStringField("login")); descriptor.proxy->login = tgvoip::jni::JavaStringToStdString(env, proxyObject.getStringField("login"));
@ -742,7 +788,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNativ
if (instance->nativeInstance == nullptr) { if (instance->nativeInstance == nullptr) {
return; return;
} }
instance->nativeInstance->stop([instance](FinalState finalState) { instance->nativeInstance->stop([instance](const FinalState& finalState) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
jobject globalRef = ((AndroidContext *) instance->_platformContext.get())->getJavaInstance(); jobject globalRef = ((AndroidContext *) instance->_platformContext.get())->getJavaInstance();
const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, globalRef).getStringField("persistentStateFilePath")); const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, globalRef).getStringField("persistentStateFilePath"));
@ -767,12 +813,12 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onStreamP
if (instance->groupNativeInstance == nullptr) { if (instance->groupNativeInstance == nullptr) {
return; return;
} }
AndroidContext *context = (AndroidContext *) instance->_platformContext.get(); auto context = (AndroidContext *) instance->_platformContext.get();
std::shared_ptr<BroadcastPartTask> streamTask = context->streamTask; std::shared_ptr<BroadcastPartTask> streamTask = context->streamTask;
BroadcastPartTaskJava *task = (BroadcastPartTaskJava *) streamTask.get(); auto task = (BroadcastPartTaskJava *) streamTask.get();
if (task != nullptr) { if (task != nullptr) {
if (byteBuffer != nullptr) { if (byteBuffer != nullptr) {
uint8_t *buf = (uint8_t *) env->GetDirectBufferAddress(byteBuffer); auto buf = (uint8_t *) env->GetDirectBufferAddress(byteBuffer);
task->call(ts, responseTs, BroadcastPart::Status::Success, buf, size); task->call(ts, responseTs, BroadcastPart::Status::Success, buf, size);
} else { } else {
task->call(ts, responseTs, size == 0 ? BroadcastPart::Status::NotReady : BroadcastPart::Status::ResyncNeeded, nullptr, 0); task->call(ts, responseTs, size == 0 ? BroadcastPart::Status::NotReady : BroadcastPart::Status::ResyncNeeded, nullptr, 0);
@ -780,45 +826,79 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onStreamP
} }
} }
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink, jboolean front) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onMediaDescriptionAvailable(JNIEnv *env, jobject obj, jlong taskPtr, jintArray ssrcs) {
InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->groupNativeInstance == nullptr) {
return;
}
auto task = reinterpret_cast<RequestMediaChannelDescriptionTaskJava *>(taskPtr);
task->call(env, ssrcs);
auto context = (AndroidContext *) instance->_platformContext.get();
for (auto iter = context->descriptionTasks.begin(); iter != context->descriptionTasks.end(); iter++) {
if (reinterpret_cast<intptr_t>(iter->get()) == taskPtr) {
context->descriptionTasks.erase(iter);
break;
}
}
}
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink, jint type) {
initWebRTC(env); initWebRTC(env);
std::unique_ptr<VideoCaptureInterface> capture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), front ? "front" : "back", std::make_shared<AndroidContext>(env, nullptr)); std::unique_ptr<VideoCaptureInterface> capture;
if (type == 0 || type == 1) {
capture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), type == 1 ? "front" : "back", std::make_shared<AndroidContext>(env, nullptr, false));
} else {
capture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), "screen", std::make_shared<AndroidContext>(env, nullptr, true));
}
capture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink)); capture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink));
capture->setState(VideoState::Active); capture->setState(VideoState::Active);
return reinterpret_cast<intptr_t>(capture.release()); return reinterpret_cast<intptr_t>(capture.release());
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_activateVideoCapturer(JNIEnv *env, jobject obj, jlong videoCapturer) {
InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->nativeInstance) {
instance->nativeInstance->setVideoCapture(nullptr);
} else if (instance->groupNativeInstance) {
instance->groupNativeInstance->setVideoSource(nullptr);
}
auto capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer);
capturer->setState(VideoState::Active);
}
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_destroyVideoCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_destroyVideoCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer) {
VideoCaptureInterface *capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer); auto capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer);
delete capturer; delete capturer;
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCameraCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer, jboolean front) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCameraCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer, jboolean front) {
VideoCaptureInterface *capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer); auto capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer);
capturer->switchToDevice(front ? "front" : "back"); capturer->switchToDevice(front ? "front" : "back");
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setVideoStateCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer, jint videoState) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setVideoStateCapturer(JNIEnv *env, jclass clazz, jlong videoCapturer, jint videoState) {
VideoCaptureInterface *capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer); auto capturer = reinterpret_cast<VideoCaptureInterface *>(videoCapturer);
capturer->setState(static_cast<VideoState>(videoState)); capturer->setState(static_cast<VideoState>(videoState));
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCamera(JNIEnv *env, jobject obj, jboolean front) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_switchCamera(JNIEnv *env, jobject obj, jboolean front) {
InstanceHolder *instance = getInstanceHolder(env, obj); InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->nativeInstance == nullptr) {
return;
}
if (instance->_videoCapture == nullptr) { if (instance->_videoCapture == nullptr) {
return; return;
} }
instance->_videoCapture->switchToDevice(front ? "front" : "back"); instance->_videoCapture->switchToDevice(front ? "front" : "back");
} }
JNIEXPORT jboolean JNICALL Java_org_telegram_messenger_voip_NativeInstance_hasVideoCapturer(JNIEnv *env, jobject obj) {
InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->_videoCapture == nullptr) {
return JNI_FALSE;
}
return JNI_TRUE;
}
JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNIEnv *env, jobject obj, jint state) { JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNIEnv *env, jobject obj, jint state) {
InstanceHolder *instance = getInstanceHolder(env, obj); InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->nativeInstance == nullptr) {
return;
}
if (instance->_videoCapture == nullptr) { if (instance->_videoCapture == nullptr) {
return; return;
} }
@ -827,16 +907,33 @@ JNIEXPORT void Java_org_telegram_messenger_voip_NativeInstance_setVideoState(JNI
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutgoingVideo(JNIEnv *env, jobject obj, jobject localSink, jboolean front) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutgoingVideo(JNIEnv *env, jobject obj, jobject localSink, jboolean front) {
InstanceHolder *instance = getInstanceHolder(env, obj); InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->nativeInstance == nullptr) {
return;
}
if (instance->_videoCapture) { if (instance->_videoCapture) {
return; return;
} }
instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), front ? "front" : "back", instance->_platformContext); instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(StaticThreads::getThreads(), front ? "front" : "back", instance->_platformContext);
instance->_videoCapture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink)); instance->_videoCapture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink));
instance->_videoCapture->setState(VideoState::Active); instance->_videoCapture->setState(VideoState::Active);
instance->nativeInstance->setVideoCapture(instance->_videoCapture); if (instance->nativeInstance) {
instance->nativeInstance->setVideoCapture(instance->_videoCapture);
} else if (instance->groupNativeInstance) {
instance->groupNativeInstance->setVideoCapture(instance->_videoCapture);
}
}
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutgoingVideoCreated(JNIEnv *env, jobject obj, jlong videoCapturer) {
if (videoCapturer == 0) {
return;
}
InstanceHolder *instance = getInstanceHolder(env, obj);
if (instance->_videoCapture == nullptr) {
instance->_videoCapture = std::shared_ptr<VideoCaptureInterface>(reinterpret_cast<VideoCaptureInterface *>(videoCapturer));
}
instance->_videoCapture->setState(VideoState::Active);
if (instance->nativeInstance) {
instance->nativeInstance->setVideoCapture(instance->_videoCapture);
} else if (instance->groupNativeInstance) {
instance->groupNativeInstance->setVideoCapture(instance->_videoCapture);
}
} }
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignalingDataReceive(JNIEnv *env, jobject obj, jbyteArray value) { JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignalingDataReceive(JNIEnv *env, jobject obj, jbyteArray value) {
@ -849,7 +946,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_onSignali
const size_t size = env->GetArrayLength(value); const size_t size = env->GetArrayLength(value);
auto array = std::vector<uint8_t>(size); auto array = std::vector<uint8_t>(size);
memcpy(&array[0], valueBytes, size); memcpy(&array[0], valueBytes, size);
instance->nativeInstance->receiveSignalingData(std::move(array)); instance->nativeInstance->receiveSignalingData(array);
env->ReleaseByteArrayElements(value, (jbyte *) valueBytes, JNI_ABORT); env->ReleaseByteArrayElements(value, (jbyte *) valueBytes, JNI_ABORT);
} }

View file

@ -0,0 +1 @@
Jean-Marc Valin <jmvalin@jmvalin.ca>

View file

@ -0,0 +1,31 @@
Copyright (c) 2017, Mozilla
Copyright (c) 2007-2017, Jean-Marc Valin
Copyright (c) 2005-2017, Xiph.Org Foundation
Copyright (c) 2003-2004, Mark Borgerding
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.Org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,21 @@
RNNoise is a noise suppression library based on a recurrent neural network.
To compile, just type:
% ./autogen.sh
% ./configure
% make
Optionally:
% make install
While it is meant to be used as a library, a simple command-line tool is
provided as an example. It operates on RAW 16-bit (machine endian) mono
PCM files sampled at 48 kHz. It can be used as:
./examples/rnnoise_demo <noisy speech> <output denoised>
The output is also a 16-bit raw PCM file.
The latest version of the source is available from
https://gitlab.xiph.org/xiph/rnnoise . The github repository
is a convenience copy.

View file

@ -0,0 +1,114 @@
/* Copyright (c) 2018 Gregor Richards
* Copyright (c) 2017 Mozilla */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef RNNOISE_H
#define RNNOISE_H 1
#include <stdio.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifndef RNNOISE_EXPORT
# if defined(WIN32)
# if defined(RNNOISE_BUILD) && defined(DLL_EXPORT)
# define RNNOISE_EXPORT __declspec(dllexport)
# else
# define RNNOISE_EXPORT
# endif
# elif defined(__GNUC__) && defined(RNNOISE_BUILD)
# define RNNOISE_EXPORT __attribute__ ((visibility ("default")))
# else
# define RNNOISE_EXPORT
# endif
#endif
typedef struct DenoiseState DenoiseState;
typedef struct RNNModel RNNModel;
/**
* Return the size of DenoiseState
*/
RNNOISE_EXPORT int rnnoise_get_size();
/**
* Return the number of samples processed by rnnoise_process_frame at a time
*/
RNNOISE_EXPORT int rnnoise_get_frame_size();
/**
* Initializes a pre-allocated DenoiseState
*
* If model is NULL the default model is used.
*
* See: rnnoise_create() and rnnoise_model_from_file()
*/
RNNOISE_EXPORT int rnnoise_init(DenoiseState *st, RNNModel *model);
/**
* Allocate and initialize a DenoiseState
*
* If model is NULL the default model is used.
*
* The returned pointer MUST be freed with rnnoise_destroy().
*/
RNNOISE_EXPORT DenoiseState *rnnoise_create(RNNModel *model);
/**
* Free a DenoiseState produced by rnnoise_create.
*
* The optional custom model must be freed by rnnoise_model_free() after.
*/
RNNOISE_EXPORT void rnnoise_destroy(DenoiseState *st);
/**
* Denoise a frame of samples
*
* in and out must be at least rnnoise_get_frame_size() large.
*/
RNNOISE_EXPORT float rnnoise_process_frame(DenoiseState *st, float *out, const float *in);
/**
* Load a model from a file
*
* It must be deallocated with rnnoise_model_free()
*/
RNNOISE_EXPORT RNNModel *rnnoise_model_from_file(FILE *f);
/**
* Free a custom model
*
* It must be called after all the DenoiseStates referring to it are freed.
*/
RNNOISE_EXPORT void rnnoise_model_free(RNNModel *model);
#ifdef __cplusplus
}
#endif
#endif

View file

@ -0,0 +1,182 @@
/*Copyright (c) 2003-2004, Mark Borgerding
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.*/
#ifndef KISS_FFT_GUTS_H
#define KISS_FFT_GUTS_H
#define MIN(a,b) ((a)<(b) ? (a):(b))
#define MAX(a,b) ((a)>(b) ? (a):(b))
/* kiss_fft.h
defines kiss_fft_scalar as either short or a float type
and defines
typedef struct { kiss_fft_scalar r; kiss_fft_scalar i; }kiss_fft_cpx; */
#include "kiss_fft.h"
/*
Explanation of macros dealing with complex math:
C_MUL(m,a,b) : m = a*b
C_FIXDIV( c , div ) : if a fixed point impl., c /= div. noop otherwise
C_SUB( res, a,b) : res = a - b
C_SUBFROM( res , a) : res -= a
C_ADDTO( res , a) : res += a
* */
#ifdef FIXED_POINT
#include "arch.h"
#define SAMP_MAX 2147483647
#define TWID_MAX 32767
#define TRIG_UPSCALE 1
#define SAMP_MIN -SAMP_MAX
# define S_MUL(a,b) MULT16_32_Q15(b, a)
# define C_MUL(m,a,b) \
do{ (m).r = SUB32_ovflw(S_MUL((a).r,(b).r) , S_MUL((a).i,(b).i)); \
(m).i = ADD32_ovflw(S_MUL((a).r,(b).i) , S_MUL((a).i,(b).r)); }while(0)
# define C_MULC(m,a,b) \
do{ (m).r = ADD32_ovflw(S_MUL((a).r,(b).r) , S_MUL((a).i,(b).i)); \
(m).i = SUB32_ovflw(S_MUL((a).i,(b).r) , S_MUL((a).r,(b).i)); }while(0)
# define C_MULBYSCALAR( c, s ) \
do{ (c).r = S_MUL( (c).r , s ) ;\
(c).i = S_MUL( (c).i , s ) ; }while(0)
# define DIVSCALAR(x,k) \
(x) = S_MUL( x, (TWID_MAX-((k)>>1))/(k)+1 )
# define C_FIXDIV(c,div) \
do { DIVSCALAR( (c).r , div); \
DIVSCALAR( (c).i , div); }while (0)
#define C_ADD( res, a,b)\
do {(res).r=ADD32_ovflw((a).r,(b).r); (res).i=ADD32_ovflw((a).i,(b).i); \
}while(0)
#define C_SUB( res, a,b)\
do {(res).r=SUB32_ovflw((a).r,(b).r); (res).i=SUB32_ovflw((a).i,(b).i); \
}while(0)
#define C_ADDTO( res , a)\
do {(res).r = ADD32_ovflw((res).r, (a).r); (res).i = ADD32_ovflw((res).i,(a).i);\
}while(0)
#define C_SUBFROM( res , a)\
do {(res).r = ADD32_ovflw((res).r,(a).r); (res).i = SUB32_ovflw((res).i,(a).i); \
}while(0)
#if defined(OPUS_ARM_INLINE_ASM)
#include "arm/kiss_fft_armv4.h"
#endif
#if defined(OPUS_ARM_INLINE_EDSP)
#include "arm/kiss_fft_armv5e.h"
#endif
#if defined(MIPSr1_ASM)
#include "mips/kiss_fft_mipsr1.h"
#endif
#else /* not FIXED_POINT*/
# define S_MUL(a,b) ( (a)*(b) )
#define C_MUL(m,a,b) \
do{ (m).r = (a).r*(b).r - (a).i*(b).i;\
(m).i = (a).r*(b).i + (a).i*(b).r; }while(0)
#define C_MULC(m,a,b) \
do{ (m).r = (a).r*(b).r + (a).i*(b).i;\
(m).i = (a).i*(b).r - (a).r*(b).i; }while(0)
#define C_MUL4(m,a,b) C_MUL(m,a,b)
# define C_FIXDIV(c,div) /* NOOP */
# define C_MULBYSCALAR( c, s ) \
do{ (c).r *= (s);\
(c).i *= (s); }while(0)
#endif
#ifndef CHECK_OVERFLOW_OP
# define CHECK_OVERFLOW_OP(a,op,b) /* noop */
#endif
#ifndef C_ADD
#define C_ADD( res, a,b)\
do { \
CHECK_OVERFLOW_OP((a).r,+,(b).r)\
CHECK_OVERFLOW_OP((a).i,+,(b).i)\
(res).r=(a).r+(b).r; (res).i=(a).i+(b).i; \
}while(0)
#define C_SUB( res, a,b)\
do { \
CHECK_OVERFLOW_OP((a).r,-,(b).r)\
CHECK_OVERFLOW_OP((a).i,-,(b).i)\
(res).r=(a).r-(b).r; (res).i=(a).i-(b).i; \
}while(0)
#define C_ADDTO( res , a)\
do { \
CHECK_OVERFLOW_OP((res).r,+,(a).r)\
CHECK_OVERFLOW_OP((res).i,+,(a).i)\
(res).r += (a).r; (res).i += (a).i;\
}while(0)
#define C_SUBFROM( res , a)\
do {\
CHECK_OVERFLOW_OP((res).r,-,(a).r)\
CHECK_OVERFLOW_OP((res).i,-,(a).i)\
(res).r -= (a).r; (res).i -= (a).i; \
}while(0)
#endif /* C_ADD defined */
#ifdef FIXED_POINT
/*# define KISS_FFT_COS(phase) TRIG_UPSCALE*floor(MIN(32767,MAX(-32767,.5+32768 * cos (phase))))
# define KISS_FFT_SIN(phase) TRIG_UPSCALE*floor(MIN(32767,MAX(-32767,.5+32768 * sin (phase))))*/
# define KISS_FFT_COS(phase) floor(.5+TWID_MAX*cos (phase))
# define KISS_FFT_SIN(phase) floor(.5+TWID_MAX*sin (phase))
# define HALF_OF(x) ((x)>>1)
#elif defined(USE_SIMD)
# define KISS_FFT_COS(phase) _mm_set1_ps( cos(phase) )
# define KISS_FFT_SIN(phase) _mm_set1_ps( sin(phase) )
# define HALF_OF(x) ((x)*_mm_set1_ps(.5f))
#else
# define KISS_FFT_COS(phase) (kiss_fft_scalar) cos(phase)
# define KISS_FFT_SIN(phase) (kiss_fft_scalar) sin(phase)
# define HALF_OF(x) ((x)*.5f)
#endif
#define kf_cexp(x,phase) \
do{ \
(x)->r = KISS_FFT_COS(phase);\
(x)->i = KISS_FFT_SIN(phase);\
}while(0)
#define kf_cexp2(x,phase) \
do{ \
(x)->r = TRIG_UPSCALE*celt_cos_norm((phase));\
(x)->i = TRIG_UPSCALE*celt_cos_norm((phase)-32768);\
}while(0)
#endif /* KISS_FFT_GUTS_H */

View file

@ -0,0 +1,261 @@
/* Copyright (c) 2003-2008 Jean-Marc Valin
Copyright (c) 2007-2008 CSIRO
Copyright (c) 2007-2009 Xiph.Org Foundation
Written by Jean-Marc Valin */
/**
@file arch.h
@brief Various architecture definitions for CELT
*/
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef ARCH_H
#define ARCH_H
#include "opus_types.h"
#include "common.h"
# if !defined(__GNUC_PREREQ)
# if defined(__GNUC__)&&defined(__GNUC_MINOR__)
# define __GNUC_PREREQ(_maj,_min) \
((__GNUC__<<16)+__GNUC_MINOR__>=((_maj)<<16)+(_min))
# else
# define __GNUC_PREREQ(_maj,_min) 0
# endif
# endif
#define CELT_SIG_SCALE 32768.f
#define celt_fatal(str) _celt_fatal(str, __FILE__, __LINE__);
#ifdef ENABLE_ASSERTIONS
#include <stdio.h>
#include <stdlib.h>
#ifdef __GNUC__
__attribute__((noreturn))
#endif
static OPUS_INLINE void _celt_fatal(const char *str, const char *file, int line)
{
fprintf (stderr, "Fatal (internal) error in %s, line %d: %s\n", file, line, str);
abort();
}
#define celt_assert(cond) {if (!(cond)) {celt_fatal("assertion failed: " #cond);}}
#define celt_assert2(cond, message) {if (!(cond)) {celt_fatal("assertion failed: " #cond "\n" message);}}
#else
#define celt_assert(cond)
#define celt_assert2(cond, message)
#endif
#define IMUL32(a,b) ((a)*(b))
#define MIN16(a,b) ((a) < (b) ? (a) : (b)) /**< Minimum 16-bit value. */
#define MAX16(a,b) ((a) > (b) ? (a) : (b)) /**< Maximum 16-bit value. */
#define MIN32(a,b) ((a) < (b) ? (a) : (b)) /**< Minimum 32-bit value. */
#define MAX32(a,b) ((a) > (b) ? (a) : (b)) /**< Maximum 32-bit value. */
#define IMIN(a,b) ((a) < (b) ? (a) : (b)) /**< Minimum int value. */
#define IMAX(a,b) ((a) > (b) ? (a) : (b)) /**< Maximum int value. */
#define UADD32(a,b) ((a)+(b))
#define USUB32(a,b) ((a)-(b))
/* Set this if opus_int64 is a native type of the CPU. */
/* Assume that all LP64 architectures have fast 64-bit types; also x86_64
(which can be ILP32 for x32) and Win64 (which is LLP64). */
#if defined(__x86_64__) || defined(__LP64__) || defined(_WIN64)
#define OPUS_FAST_INT64 1
#else
#define OPUS_FAST_INT64 0
#endif
#define PRINT_MIPS(file)
#ifdef FIXED_POINT
typedef opus_int16 opus_val16;
typedef opus_int32 opus_val32;
typedef opus_int64 opus_val64;
typedef opus_val32 celt_sig;
typedef opus_val16 celt_norm;
typedef opus_val32 celt_ener;
#define Q15ONE 32767
#define SIG_SHIFT 12
/* Safe saturation value for 32-bit signals. Should be less than
2^31*(1-0.85) to avoid blowing up on DC at deemphasis.*/
#define SIG_SAT (300000000)
#define NORM_SCALING 16384
#define DB_SHIFT 10
#define EPSILON 1
#define VERY_SMALL 0
#define VERY_LARGE16 ((opus_val16)32767)
#define Q15_ONE ((opus_val16)32767)
#define SCALEIN(a) (a)
#define SCALEOUT(a) (a)
#define ABS16(x) ((x) < 0 ? (-(x)) : (x))
#define ABS32(x) ((x) < 0 ? (-(x)) : (x))
static OPUS_INLINE opus_int16 SAT16(opus_int32 x) {
return x > 32767 ? 32767 : x < -32768 ? -32768 : (opus_int16)x;
}
#ifdef FIXED_DEBUG
#include "fixed_debug.h"
#else
#include "fixed_generic.h"
#ifdef OPUS_ARM_PRESUME_AARCH64_NEON_INTR
#include "arm/fixed_arm64.h"
#elif OPUS_ARM_INLINE_EDSP
#include "arm/fixed_armv5e.h"
#elif defined (OPUS_ARM_INLINE_ASM)
#include "arm/fixed_armv4.h"
#elif defined (BFIN_ASM)
#include "fixed_bfin.h"
#elif defined (TI_C5X_ASM)
#include "fixed_c5x.h"
#elif defined (TI_C6X_ASM)
#include "fixed_c6x.h"
#endif
#endif
#else /* FIXED_POINT */
typedef float opus_val16;
typedef float opus_val32;
typedef float opus_val64;
typedef float celt_sig;
typedef float celt_norm;
typedef float celt_ener;
#ifdef FLOAT_APPROX
/* This code should reliably detect NaN/inf even when -ffast-math is used.
Assumes IEEE 754 format. */
static OPUS_INLINE int celt_isnan(float x)
{
union {float f; opus_uint32 i;} in;
in.f = x;
return ((in.i>>23)&0xFF)==0xFF && (in.i&0x007FFFFF)!=0;
}
#else
#ifdef __FAST_MATH__
#error Cannot build libopus with -ffast-math unless FLOAT_APPROX is defined. This could result in crashes on extreme (e.g. NaN) input
#endif
#define celt_isnan(x) ((x)!=(x))
#endif
#define Q15ONE 1.0f
#define NORM_SCALING 1.f
#define EPSILON 1e-15f
#define VERY_SMALL 1e-30f
#define VERY_LARGE16 1e15f
#define Q15_ONE ((opus_val16)1.f)
/* This appears to be the same speed as C99's fabsf() but it's more portable. */
#define ABS16(x) ((float)fabs(x))
#define ABS32(x) ((float)fabs(x))
#define QCONST16(x,bits) (x)
#define QCONST32(x,bits) (x)
#define NEG16(x) (-(x))
#define NEG32(x) (-(x))
#define NEG32_ovflw(x) (-(x))
#define EXTRACT16(x) (x)
#define EXTEND32(x) (x)
#define SHR16(a,shift) (a)
#define SHL16(a,shift) (a)
#define SHR32(a,shift) (a)
#define SHL32(a,shift) (a)
#define PSHR32(a,shift) (a)
#define VSHR32(a,shift) (a)
#define PSHR(a,shift) (a)
#define SHR(a,shift) (a)
#define SHL(a,shift) (a)
#define SATURATE(x,a) (x)
#define SATURATE16(x) (x)
#define ROUND16(a,shift) (a)
#define SROUND16(a,shift) (a)
#define HALF16(x) (.5f*(x))
#define HALF32(x) (.5f*(x))
#define ADD16(a,b) ((a)+(b))
#define SUB16(a,b) ((a)-(b))
#define ADD32(a,b) ((a)+(b))
#define SUB32(a,b) ((a)-(b))
#define ADD32_ovflw(a,b) ((a)+(b))
#define SUB32_ovflw(a,b) ((a)-(b))
#define MULT16_16_16(a,b) ((a)*(b))
#define MULT16_16(a,b) ((opus_val32)(a)*(opus_val32)(b))
#define MAC16_16(c,a,b) ((c)+(opus_val32)(a)*(opus_val32)(b))
#define MULT16_32_Q15(a,b) ((a)*(b))
#define MULT16_32_Q16(a,b) ((a)*(b))
#define MULT32_32_Q31(a,b) ((a)*(b))
#define MAC16_32_Q15(c,a,b) ((c)+(a)*(b))
#define MAC16_32_Q16(c,a,b) ((c)+(a)*(b))
#define MULT16_16_Q11_32(a,b) ((a)*(b))
#define MULT16_16_Q11(a,b) ((a)*(b))
#define MULT16_16_Q13(a,b) ((a)*(b))
#define MULT16_16_Q14(a,b) ((a)*(b))
#define MULT16_16_Q15(a,b) ((a)*(b))
#define MULT16_16_P15(a,b) ((a)*(b))
#define MULT16_16_P13(a,b) ((a)*(b))
#define MULT16_16_P14(a,b) ((a)*(b))
#define MULT16_32_P16(a,b) ((a)*(b))
#define DIV32_16(a,b) (((opus_val32)(a))/(opus_val16)(b))
#define DIV32(a,b) (((opus_val32)(a))/(opus_val32)(b))
#define SCALEIN(a) ((a)*CELT_SIG_SCALE)
#define SCALEOUT(a) ((a)*(1/CELT_SIG_SCALE))
#define SIG2WORD16(x) (x)
#endif /* !FIXED_POINT */
#ifndef GLOBAL_STACK_SIZE
#ifdef FIXED_POINT
#define GLOBAL_STACK_SIZE 120000
#else
#define GLOBAL_STACK_SIZE 120000
#endif
#endif
#endif /* ARCH_H */

View file

@ -0,0 +1,279 @@
/* Copyright (c) 2009-2010 Xiph.Org Foundation
Written by Jean-Marc Valin */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "celt_lpc.h"
#include "arch.h"
#include "common.h"
#include "pitch.h"
void _celt_lpc(
opus_val16 *_lpc, /* out: [0...p-1] LPC coefficients */
const opus_val32 *ac, /* in: [0...p] autocorrelation values */
int p
)
{
int i, j;
opus_val32 r;
opus_val32 error = ac[0];
#ifdef FIXED_POINT
opus_val32 lpc[LPC_ORDER];
#else
float *lpc = _lpc;
#endif
RNN_CLEAR(lpc, p);
if (ac[0] != 0)
{
for (i = 0; i < p; i++) {
/* Sum up this iteration's reflection coefficient */
opus_val32 rr = 0;
for (j = 0; j < i; j++)
rr += MULT32_32_Q31(lpc[j],ac[i - j]);
rr += SHR32(ac[i + 1],3);
r = -SHL32(rr,3)/error;
/* Update LPC coefficients and total error */
lpc[i] = SHR32(r,3);
for (j = 0; j < (i+1)>>1; j++)
{
opus_val32 tmp1, tmp2;
tmp1 = lpc[j];
tmp2 = lpc[i-1-j];
lpc[j] = tmp1 + MULT32_32_Q31(r,tmp2);
lpc[i-1-j] = tmp2 + MULT32_32_Q31(r,tmp1);
}
error = error - MULT32_32_Q31(MULT32_32_Q31(r,r),error);
/* Bail out once we get 30 dB gain */
#ifdef FIXED_POINT
if (error<SHR32(ac[0],10))
break;
#else
if (error<.001f*ac[0])
break;
#endif
}
}
#ifdef FIXED_POINT
for (i=0;i<p;i++)
_lpc[i] = ROUND16(lpc[i],16);
#endif
}
void celt_fir(
const opus_val16 *x,
const opus_val16 *num,
opus_val16 *y,
int N,
int ord)
{
int i,j;
opus_val16 rnum[ord];
for(i=0;i<ord;i++)
rnum[i] = num[ord-i-1];
for (i=0;i<N-3;i+=4)
{
opus_val32 sum[4];
sum[0] = SHL32(EXTEND32(x[i ]), SIG_SHIFT);
sum[1] = SHL32(EXTEND32(x[i+1]), SIG_SHIFT);
sum[2] = SHL32(EXTEND32(x[i+2]), SIG_SHIFT);
sum[3] = SHL32(EXTEND32(x[i+3]), SIG_SHIFT);
xcorr_kernel(rnum, x+i-ord, sum, ord);
y[i ] = ROUND16(sum[0], SIG_SHIFT);
y[i+1] = ROUND16(sum[1], SIG_SHIFT);
y[i+2] = ROUND16(sum[2], SIG_SHIFT);
y[i+3] = ROUND16(sum[3], SIG_SHIFT);
}
for (;i<N;i++)
{
opus_val32 sum = SHL32(EXTEND32(x[i]), SIG_SHIFT);
for (j=0;j<ord;j++)
sum = MAC16_16(sum,rnum[j],x[i+j-ord]);
y[i] = ROUND16(sum, SIG_SHIFT);
}
}
void celt_iir(const opus_val32 *_x,
const opus_val16 *den,
opus_val32 *_y,
int N,
int ord,
opus_val16 *mem)
{
#ifdef SMALL_FOOTPRINT
int i,j;
for (i=0;i<N;i++)
{
opus_val32 sum = _x[i];
for (j=0;j<ord;j++)
{
sum -= MULT16_16(den[j],mem[j]);
}
for (j=ord-1;j>=1;j--)
{
mem[j]=mem[j-1];
}
mem[0] = SROUND16(sum, SIG_SHIFT);
_y[i] = sum;
}
#else
int i,j;
celt_assert((ord&3)==0);
opus_val16 rden[ord];
opus_val16 y[N+ord];
for(i=0;i<ord;i++)
rden[i] = den[ord-i-1];
for(i=0;i<ord;i++)
y[i] = -mem[ord-i-1];
for(;i<N+ord;i++)
y[i]=0;
for (i=0;i<N-3;i+=4)
{
/* Unroll by 4 as if it were an FIR filter */
opus_val32 sum[4];
sum[0]=_x[i];
sum[1]=_x[i+1];
sum[2]=_x[i+2];
sum[3]=_x[i+3];
xcorr_kernel(rden, y+i, sum, ord);
/* Patch up the result to compensate for the fact that this is an IIR */
y[i+ord ] = -SROUND16(sum[0],SIG_SHIFT);
_y[i ] = sum[0];
sum[1] = MAC16_16(sum[1], y[i+ord ], den[0]);
y[i+ord+1] = -SROUND16(sum[1],SIG_SHIFT);
_y[i+1] = sum[1];
sum[2] = MAC16_16(sum[2], y[i+ord+1], den[0]);
sum[2] = MAC16_16(sum[2], y[i+ord ], den[1]);
y[i+ord+2] = -SROUND16(sum[2],SIG_SHIFT);
_y[i+2] = sum[2];
sum[3] = MAC16_16(sum[3], y[i+ord+2], den[0]);
sum[3] = MAC16_16(sum[3], y[i+ord+1], den[1]);
sum[3] = MAC16_16(sum[3], y[i+ord ], den[2]);
y[i+ord+3] = -SROUND16(sum[3],SIG_SHIFT);
_y[i+3] = sum[3];
}
for (;i<N;i++)
{
opus_val32 sum = _x[i];
for (j=0;j<ord;j++)
sum -= MULT16_16(rden[j],y[i+j]);
y[i+ord] = SROUND16(sum,SIG_SHIFT);
_y[i] = sum;
}
for(i=0;i<ord;i++)
mem[i] = _y[N-i-1];
#endif
}
int _celt_autocorr(
const opus_val16 *x, /* in: [0...n-1] samples x */
opus_val32 *ac, /* out: [0...lag-1] ac values */
const opus_val16 *window,
int overlap,
int lag,
int n)
{
opus_val32 d;
int i, k;
int fastN=n-lag;
int shift;
const opus_val16 *xptr;
opus_val16 xx[n];
celt_assert(n>0);
celt_assert(overlap>=0);
if (overlap == 0)
{
xptr = x;
} else {
for (i=0;i<n;i++)
xx[i] = x[i];
for (i=0;i<overlap;i++)
{
xx[i] = MULT16_16_Q15(x[i],window[i]);
xx[n-i-1] = MULT16_16_Q15(x[n-i-1],window[i]);
}
xptr = xx;
}
shift=0;
#ifdef FIXED_POINT
{
opus_val32 ac0;
ac0 = 1+(n<<7);
if (n&1) ac0 += SHR32(MULT16_16(xptr[0],xptr[0]),9);
for(i=(n&1);i<n;i+=2)
{
ac0 += SHR32(MULT16_16(xptr[i],xptr[i]),9);
ac0 += SHR32(MULT16_16(xptr[i+1],xptr[i+1]),9);
}
shift = celt_ilog2(ac0)-30+10;
shift = (shift)/2;
if (shift>0)
{
for(i=0;i<n;i++)
xx[i] = PSHR32(xptr[i], shift);
xptr = xx;
} else
shift = 0;
}
#endif
celt_pitch_xcorr(xptr, xptr, ac, fastN, lag+1);
for (k=0;k<=lag;k++)
{
for (i = k+fastN, d = 0; i < n; i++)
d = MAC16_16(d, xptr[i], xptr[i-k]);
ac[k] += d;
}
#ifdef FIXED_POINT
shift = 2*shift;
if (shift<=0)
ac[0] += SHL32((opus_int32)1, -shift);
if (ac[0] < 268435456)
{
int shift2 = 29 - EC_ILOG(ac[0]);
for (i=0;i<=lag;i++)
ac[i] = SHL32(ac[i], shift2);
shift -= shift2;
} else if (ac[0] >= 536870912)
{
int shift2=1;
if (ac[0] >= 1073741824)
shift2++;
for (i=0;i<=lag;i++)
ac[i] = SHR32(ac[i], shift2);
shift += shift2;
}
#endif
return shift;
}

View file

@ -0,0 +1,59 @@
/* Copyright (c) 2009-2010 Xiph.Org Foundation
Written by Jean-Marc Valin */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef PLC_H
#define PLC_H
#include "arch.h"
#include "common.h"
#if defined(OPUS_X86_MAY_HAVE_SSE4_1)
#include "x86/celt_lpc_sse.h"
#endif
#define LPC_ORDER 24
void _celt_lpc(opus_val16 *_lpc, const opus_val32 *ac, int p);
void celt_fir(
const opus_val16 *x,
const opus_val16 *num,
opus_val16 *y,
int N,
int ord);
void celt_iir(const opus_val32 *x,
const opus_val16 *den,
opus_val32 *y,
int N,
int ord,
opus_val16 *mem);
int _celt_autocorr(const opus_val16 *x, opus_val32 *ac,
const opus_val16 *window, int overlap, int lag, int n);
#endif /* PLC_H */

View file

@ -0,0 +1,48 @@
#ifndef COMMON_H
#define COMMON_H
#include "stdlib.h"
#include "string.h"
#define RNN_INLINE inline
#define OPUS_INLINE inline
/** RNNoise wrapper for malloc(). To do your own dynamic allocation, all you need t
o do is replace this function and rnnoise_free */
#ifndef OVERRIDE_RNNOISE_ALLOC
static RNN_INLINE void *rnnoise_alloc (size_t size)
{
return malloc(size);
}
#endif
/** RNNoise wrapper for free(). To do your own dynamic allocation, all you need to do is replace this function and rnnoise_alloc */
#ifndef OVERRIDE_RNNOISE_FREE
static RNN_INLINE void rnnoise_free (void *ptr)
{
free(ptr);
}
#endif
/** Copy n elements from src to dst. The 0* term provides compile-time type checking */
#ifndef OVERRIDE_RNN_COPY
#define RNN_COPY(dst, src, n) (memcpy((dst), (src), (n)*sizeof(*(dst)) + 0*((dst)-(src)) ))
#endif
/** Copy n elements from src to dst, allowing overlapping regions. The 0* term
provides compile-time type checking */
#ifndef OVERRIDE_RNN_MOVE
#define RNN_MOVE(dst, src, n) (memmove((dst), (src), (n)*sizeof(*(dst)) + 0*((dst)-(src)) ))
#endif
/** Set n elements of dst to zero */
#ifndef OVERRIDE_RNN_CLEAR
#define RNN_CLEAR(dst, n) (memset((dst), 0, (n)*sizeof(*(dst))))
#endif
#endif

View file

@ -0,0 +1,646 @@
/* Copyright (c) 2018 Gregor Richards
* Copyright (c) 2017 Mozilla */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "kiss_fft.h"
#include "common.h"
#include <math.h>
#include "rnnoise.h"
#include "pitch.h"
#include "arch.h"
#include "rnn.h"
#include "rnn_data.h"
#define FRAME_SIZE_SHIFT 2
#define FRAME_SIZE (120<<FRAME_SIZE_SHIFT)
#define WINDOW_SIZE (2*FRAME_SIZE)
#define FREQ_SIZE (FRAME_SIZE + 1)
#define PITCH_MIN_PERIOD 60
#define PITCH_MAX_PERIOD 768
#define PITCH_FRAME_SIZE 960
#define PITCH_BUF_SIZE (PITCH_MAX_PERIOD+PITCH_FRAME_SIZE)
#define SQUARE(x) ((x)*(x))
#define NB_BANDS 22
#define CEPS_MEM 8
#define NB_DELTA_CEPS 6
#define NB_FEATURES (NB_BANDS+3*NB_DELTA_CEPS+2)
#ifndef TRAINING
#define TRAINING 0
#endif
/* The built-in model, used if no file is given as input */
extern const struct RNNModel rnnoise_model_orig;
static const opus_int16 eband5ms[] = {
/*0 200 400 600 800 1k 1.2 1.4 1.6 2k 2.4 2.8 3.2 4k 4.8 5.6 6.8 8k 9.6 12k 15.6 20k*/
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 34, 40, 48, 60, 78, 100
};
typedef struct {
int init;
kiss_fft_state *kfft;
float half_window[FRAME_SIZE];
float dct_table[NB_BANDS*NB_BANDS];
} CommonState;
struct DenoiseState {
float analysis_mem[FRAME_SIZE];
float cepstral_mem[CEPS_MEM][NB_BANDS];
int memid;
float synthesis_mem[FRAME_SIZE];
float pitch_buf[PITCH_BUF_SIZE];
float pitch_enh_buf[PITCH_BUF_SIZE];
float last_gain;
int last_period;
float mem_hp_x[2];
float lastg[NB_BANDS];
RNNState rnn;
};
void compute_band_energy(float *bandE, const kiss_fft_cpx *X) {
int i;
float sum[NB_BANDS] = {0};
for (i=0;i<NB_BANDS-1;i++)
{
int j;
int band_size;
band_size = (eband5ms[i+1]-eband5ms[i])<<FRAME_SIZE_SHIFT;
for (j=0;j<band_size;j++) {
float tmp;
float frac = (float)j/band_size;
tmp = SQUARE(X[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j].r);
tmp += SQUARE(X[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j].i);
sum[i] += (1-frac)*tmp;
sum[i+1] += frac*tmp;
}
}
sum[0] *= 2;
sum[NB_BANDS-1] *= 2;
for (i=0;i<NB_BANDS;i++)
{
bandE[i] = sum[i];
}
}
void compute_band_corr(float *bandE, const kiss_fft_cpx *X, const kiss_fft_cpx *P) {
int i;
float sum[NB_BANDS] = {0};
for (i=0;i<NB_BANDS-1;i++)
{
int j;
int band_size;
band_size = (eband5ms[i+1]-eband5ms[i])<<FRAME_SIZE_SHIFT;
for (j=0;j<band_size;j++) {
float tmp;
float frac = (float)j/band_size;
tmp = X[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j].r * P[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j].r;
tmp += X[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j].i * P[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j].i;
sum[i] += (1-frac)*tmp;
sum[i+1] += frac*tmp;
}
}
sum[0] *= 2;
sum[NB_BANDS-1] *= 2;
for (i=0;i<NB_BANDS;i++)
{
bandE[i] = sum[i];
}
}
void interp_band_gain(float *g, const float *bandE) {
int i;
memset(g, 0, FREQ_SIZE);
for (i=0;i<NB_BANDS-1;i++)
{
int j;
int band_size;
band_size = (eband5ms[i+1]-eband5ms[i])<<FRAME_SIZE_SHIFT;
for (j=0;j<band_size;j++) {
float frac = (float)j/band_size;
g[(eband5ms[i]<<FRAME_SIZE_SHIFT) + j] = (1-frac)*bandE[i] + frac*bandE[i+1];
}
}
}
CommonState common;
static void check_init() {
int i;
if (common.init) return;
common.kfft = opus_fft_alloc_twiddles(2*FRAME_SIZE, NULL, NULL, NULL, 0);
for (i=0;i<FRAME_SIZE;i++)
common.half_window[i] = sin(.5*M_PI*sin(.5*M_PI*(i+.5)/FRAME_SIZE) * sin(.5*M_PI*(i+.5)/FRAME_SIZE));
for (i=0;i<NB_BANDS;i++) {
int j;
for (j=0;j<NB_BANDS;j++) {
common.dct_table[i*NB_BANDS + j] = cos((i+.5)*j*M_PI/NB_BANDS);
if (j==0) common.dct_table[i*NB_BANDS + j] *= sqrt(.5);
}
}
common.init = 1;
}
static void dct(float *out, const float *in) {
int i;
check_init();
for (i=0;i<NB_BANDS;i++) {
int j;
float sum = 0;
for (j=0;j<NB_BANDS;j++) {
sum += in[j] * common.dct_table[j*NB_BANDS + i];
}
out[i] = sum*sqrt(2./22);
}
}
#if 0
static void idct(float *out, const float *in) {
int i;
check_init();
for (i=0;i<NB_BANDS;i++) {
int j;
float sum = 0;
for (j=0;j<NB_BANDS;j++) {
sum += in[j] * common.dct_table[i*NB_BANDS + j];
}
out[i] = sum*sqrt(2./22);
}
}
#endif
static void forward_transform(kiss_fft_cpx *out, const float *in) {
int i;
kiss_fft_cpx x[WINDOW_SIZE];
kiss_fft_cpx y[WINDOW_SIZE];
check_init();
for (i=0;i<WINDOW_SIZE;i++) {
x[i].r = in[i];
x[i].i = 0;
}
opus_fft(common.kfft, x, y, 0);
for (i=0;i<FREQ_SIZE;i++) {
out[i] = y[i];
}
}
static void inverse_transform(float *out, const kiss_fft_cpx *in) {
int i;
kiss_fft_cpx x[WINDOW_SIZE];
kiss_fft_cpx y[WINDOW_SIZE];
check_init();
for (i=0;i<FREQ_SIZE;i++) {
x[i] = in[i];
}
for (;i<WINDOW_SIZE;i++) {
x[i].r = x[WINDOW_SIZE - i].r;
x[i].i = -x[WINDOW_SIZE - i].i;
}
opus_fft(common.kfft, x, y, 0);
/* output in reverse order for IFFT. */
out[0] = WINDOW_SIZE*y[0].r;
for (i=1;i<WINDOW_SIZE;i++) {
out[i] = WINDOW_SIZE*y[WINDOW_SIZE - i].r;
}
}
static void apply_window(float *x) {
int i;
check_init();
for (i=0;i<FRAME_SIZE;i++) {
x[i] *= common.half_window[i];
x[WINDOW_SIZE - 1 - i] *= common.half_window[i];
}
}
int rnnoise_get_size() {
return sizeof(DenoiseState);
}
int rnnoise_get_frame_size() {
return FRAME_SIZE;
}
int rnnoise_init(DenoiseState *st, RNNModel *model) {
memset(st, 0, sizeof(*st));
if (model)
st->rnn.model = model;
else
st->rnn.model = &rnnoise_model_orig;
st->rnn.vad_gru_state = calloc(sizeof(float), st->rnn.model->vad_gru_size);
st->rnn.noise_gru_state = calloc(sizeof(float), st->rnn.model->noise_gru_size);
st->rnn.denoise_gru_state = calloc(sizeof(float), st->rnn.model->denoise_gru_size);
return 0;
}
DenoiseState *rnnoise_create(RNNModel *model) {
DenoiseState *st;
st = malloc(rnnoise_get_size());
rnnoise_init(st, model);
return st;
}
void rnnoise_destroy(DenoiseState *st) {
free(st->rnn.vad_gru_state);
free(st->rnn.noise_gru_state);
free(st->rnn.denoise_gru_state);
free(st);
}
#if TRAINING
int lowpass = FREQ_SIZE;
int band_lp = NB_BANDS;
#endif
static void frame_analysis(DenoiseState *st, kiss_fft_cpx *X, float *Ex, const float *in) {
int i;
float x[WINDOW_SIZE];
RNN_COPY(x, st->analysis_mem, FRAME_SIZE);
for (i=0;i<FRAME_SIZE;i++) x[FRAME_SIZE + i] = in[i];
RNN_COPY(st->analysis_mem, in, FRAME_SIZE);
apply_window(x);
forward_transform(X, x);
#if TRAINING
for (i=lowpass;i<FREQ_SIZE;i++)
X[i].r = X[i].i = 0;
#endif
compute_band_energy(Ex, X);
}
static int compute_frame_features(DenoiseState *st, kiss_fft_cpx *X, kiss_fft_cpx *P,
float *Ex, float *Ep, float *Exp, float *features, const float *in) {
int i;
float E = 0;
float *ceps_0, *ceps_1, *ceps_2;
float spec_variability = 0;
float Ly[NB_BANDS];
float p[WINDOW_SIZE];
float pitch_buf[PITCH_BUF_SIZE>>1];
int pitch_index;
float gain;
float *(pre[1]);
float tmp[NB_BANDS];
float follow, logMax;
frame_analysis(st, X, Ex, in);
RNN_MOVE(st->pitch_buf, &st->pitch_buf[FRAME_SIZE], PITCH_BUF_SIZE-FRAME_SIZE);
RNN_COPY(&st->pitch_buf[PITCH_BUF_SIZE-FRAME_SIZE], in, FRAME_SIZE);
pre[0] = &st->pitch_buf[0];
pitch_downsample(pre, pitch_buf, PITCH_BUF_SIZE, 1);
pitch_search(pitch_buf+(PITCH_MAX_PERIOD>>1), pitch_buf, PITCH_FRAME_SIZE,
PITCH_MAX_PERIOD-3*PITCH_MIN_PERIOD, &pitch_index);
pitch_index = PITCH_MAX_PERIOD-pitch_index;
gain = remove_doubling(pitch_buf, PITCH_MAX_PERIOD, PITCH_MIN_PERIOD,
PITCH_FRAME_SIZE, &pitch_index, st->last_period, st->last_gain);
st->last_period = pitch_index;
st->last_gain = gain;
for (i=0;i<WINDOW_SIZE;i++)
p[i] = st->pitch_buf[PITCH_BUF_SIZE-WINDOW_SIZE-pitch_index+i];
apply_window(p);
forward_transform(P, p);
compute_band_energy(Ep, P);
compute_band_corr(Exp, X, P);
for (i=0;i<NB_BANDS;i++) Exp[i] = Exp[i]/sqrt(.001+Ex[i]*Ep[i]);
dct(tmp, Exp);
for (i=0;i<NB_DELTA_CEPS;i++) features[NB_BANDS+2*NB_DELTA_CEPS+i] = tmp[i];
features[NB_BANDS+2*NB_DELTA_CEPS] -= 1.3;
features[NB_BANDS+2*NB_DELTA_CEPS+1] -= 0.9;
features[NB_BANDS+3*NB_DELTA_CEPS] = .01*(pitch_index-300);
logMax = -2;
follow = -2;
for (i=0;i<NB_BANDS;i++) {
Ly[i] = log10(1e-2+Ex[i]);
Ly[i] = MAX16(logMax-7, MAX16(follow-1.5, Ly[i]));
logMax = MAX16(logMax, Ly[i]);
follow = MAX16(follow-1.5, Ly[i]);
E += Ex[i];
}
if (!TRAINING && E < 0.04) {
/* If there's no audio, avoid messing up the state. */
RNN_CLEAR(features, NB_FEATURES);
return 1;
}
dct(features, Ly);
features[0] -= 12;
features[1] -= 4;
ceps_0 = st->cepstral_mem[st->memid];
ceps_1 = (st->memid < 1) ? st->cepstral_mem[CEPS_MEM+st->memid-1] : st->cepstral_mem[st->memid-1];
ceps_2 = (st->memid < 2) ? st->cepstral_mem[CEPS_MEM+st->memid-2] : st->cepstral_mem[st->memid-2];
for (i=0;i<NB_BANDS;i++) ceps_0[i] = features[i];
st->memid++;
for (i=0;i<NB_DELTA_CEPS;i++) {
features[i] = ceps_0[i] + ceps_1[i] + ceps_2[i];
features[NB_BANDS+i] = ceps_0[i] - ceps_2[i];
features[NB_BANDS+NB_DELTA_CEPS+i] = ceps_0[i] - 2*ceps_1[i] + ceps_2[i];
}
/* Spectral variability features. */
if (st->memid == CEPS_MEM) st->memid = 0;
for (i=0;i<CEPS_MEM;i++)
{
int j;
float mindist = 1e15f;
for (j=0;j<CEPS_MEM;j++)
{
int k;
float dist=0;
for (k=0;k<NB_BANDS;k++)
{
float tmp;
tmp = st->cepstral_mem[i][k] - st->cepstral_mem[j][k];
dist += tmp*tmp;
}
if (j!=i)
mindist = MIN32(mindist, dist);
}
spec_variability += mindist;
}
features[NB_BANDS+3*NB_DELTA_CEPS+1] = spec_variability/CEPS_MEM-2.1;
return TRAINING && E < 0.1;
}
static void frame_synthesis(DenoiseState *st, float *out, const kiss_fft_cpx *y) {
float x[WINDOW_SIZE];
int i;
inverse_transform(x, y);
apply_window(x);
for (i=0;i<FRAME_SIZE;i++) out[i] = x[i] + st->synthesis_mem[i];
RNN_COPY(st->synthesis_mem, &x[FRAME_SIZE], FRAME_SIZE);
}
static void biquad(float *y, float mem[2], const float *x, const float *b, const float *a, int N) {
int i;
for (i=0;i<N;i++) {
float xi, yi;
xi = x[i];
yi = x[i] + mem[0];
mem[0] = mem[1] + (b[0]*(double)xi - a[0]*(double)yi);
mem[1] = (b[1]*(double)xi - a[1]*(double)yi);
y[i] = yi;
}
}
void pitch_filter(kiss_fft_cpx *X, const kiss_fft_cpx *P, const float *Ex, const float *Ep,
const float *Exp, const float *g) {
int i;
float r[NB_BANDS];
float rf[FREQ_SIZE] = {0};
for (i=0;i<NB_BANDS;i++) {
#if 0
if (Exp[i]>g[i]) r[i] = 1;
else r[i] = Exp[i]*(1-g[i])/(.001 + g[i]*(1-Exp[i]));
r[i] = MIN16(1, MAX16(0, r[i]));
#else
if (Exp[i]>g[i]) r[i] = 1;
else r[i] = SQUARE(Exp[i])*(1-SQUARE(g[i]))/(.001 + SQUARE(g[i])*(1-SQUARE(Exp[i])));
r[i] = sqrt(MIN16(1, MAX16(0, r[i])));
#endif
r[i] *= sqrt(Ex[i]/(1e-8+Ep[i]));
}
interp_band_gain(rf, r);
for (i=0;i<FREQ_SIZE;i++) {
X[i].r += rf[i]*P[i].r;
X[i].i += rf[i]*P[i].i;
}
float newE[NB_BANDS];
compute_band_energy(newE, X);
float norm[NB_BANDS];
float normf[FREQ_SIZE]={0};
for (i=0;i<NB_BANDS;i++) {
norm[i] = sqrt(Ex[i]/(1e-8+newE[i]));
}
interp_band_gain(normf, norm);
for (i=0;i<FREQ_SIZE;i++) {
X[i].r *= normf[i];
X[i].i *= normf[i];
}
}
float rnnoise_process_frame(DenoiseState *st, float *out, const float *in) {
int i;
kiss_fft_cpx X[FREQ_SIZE];
kiss_fft_cpx P[WINDOW_SIZE];
float x[FRAME_SIZE];
float Ex[NB_BANDS], Ep[NB_BANDS];
float Exp[NB_BANDS];
float features[NB_FEATURES];
float g[NB_BANDS];
float gf[FREQ_SIZE]={1};
float vad_prob = 0;
int silence;
static const float a_hp[2] = {-1.99599, 0.99600};
static const float b_hp[2] = {-2, 1};
biquad(x, st->mem_hp_x, in, b_hp, a_hp, FRAME_SIZE);
silence = compute_frame_features(st, X, P, Ex, Ep, Exp, features, x);
if (!silence) {
compute_rnn(&st->rnn, g, &vad_prob, features);
pitch_filter(X, P, Ex, Ep, Exp, g);
for (i=0;i<NB_BANDS;i++) {
float alpha = .6f;
g[i] = MAX16(g[i], alpha*st->lastg[i]);
st->lastg[i] = g[i];
}
interp_band_gain(gf, g);
#if 1
for (i=0;i<FREQ_SIZE;i++) {
X[i].r *= gf[i];
X[i].i *= gf[i];
}
#endif
}
frame_synthesis(st, out, X);
return vad_prob;
}
#if TRAINING
static float uni_rand() {
return rand()/(double)RAND_MAX-.5;
}
static void rand_resp(float *a, float *b) {
a[0] = .75*uni_rand();
a[1] = .75*uni_rand();
b[0] = .75*uni_rand();
b[1] = .75*uni_rand();
}
int main(int argc, char **argv) {
int i;
int count=0;
static const float a_hp[2] = {-1.99599, 0.99600};
static const float b_hp[2] = {-2, 1};
float a_noise[2] = {0};
float b_noise[2] = {0};
float a_sig[2] = {0};
float b_sig[2] = {0};
float mem_hp_x[2]={0};
float mem_hp_n[2]={0};
float mem_resp_x[2]={0};
float mem_resp_n[2]={0};
float x[FRAME_SIZE];
float n[FRAME_SIZE];
float xn[FRAME_SIZE];
int vad_cnt=0;
int gain_change_count=0;
float speech_gain = 1, noise_gain = 1;
FILE *f1, *f2;
int maxCount;
DenoiseState *st;
DenoiseState *noise_state;
DenoiseState *noisy;
st = rnnoise_create(NULL);
noise_state = rnnoise_create(NULL);
noisy = rnnoise_create(NULL);
if (argc!=4) {
fprintf(stderr, "usage: %s <speech> <noise> <count>\n", argv[0]);
return 1;
}
f1 = fopen(argv[1], "r");
f2 = fopen(argv[2], "r");
maxCount = atoi(argv[3]);
for(i=0;i<150;i++) {
short tmp[FRAME_SIZE];
fread(tmp, sizeof(short), FRAME_SIZE, f2);
}
while (1) {
kiss_fft_cpx X[FREQ_SIZE], Y[FREQ_SIZE], N[FREQ_SIZE], P[WINDOW_SIZE];
float Ex[NB_BANDS], Ey[NB_BANDS], En[NB_BANDS], Ep[NB_BANDS];
float Exp[NB_BANDS];
float Ln[NB_BANDS];
float features[NB_FEATURES];
float g[NB_BANDS];
short tmp[FRAME_SIZE];
float vad=0;
float E=0;
if (count==maxCount) break;
if ((count%1000)==0) fprintf(stderr, "%d\r", count);
if (++gain_change_count > 2821) {
speech_gain = pow(10., (-40+(rand()%60))/20.);
noise_gain = pow(10., (-30+(rand()%50))/20.);
if (rand()%10==0) noise_gain = 0;
noise_gain *= speech_gain;
if (rand()%10==0) speech_gain = 0;
gain_change_count = 0;
rand_resp(a_noise, b_noise);
rand_resp(a_sig, b_sig);
lowpass = FREQ_SIZE * 3000./24000. * pow(50., rand()/(double)RAND_MAX);
for (i=0;i<NB_BANDS;i++) {
if (eband5ms[i]<<FRAME_SIZE_SHIFT > lowpass) {
band_lp = i;
break;
}
}
}
if (speech_gain != 0) {
fread(tmp, sizeof(short), FRAME_SIZE, f1);
if (feof(f1)) {
rewind(f1);
fread(tmp, sizeof(short), FRAME_SIZE, f1);
}
for (i=0;i<FRAME_SIZE;i++) x[i] = speech_gain*tmp[i];
for (i=0;i<FRAME_SIZE;i++) E += tmp[i]*(float)tmp[i];
} else {
for (i=0;i<FRAME_SIZE;i++) x[i] = 0;
E = 0;
}
if (noise_gain!=0) {
fread(tmp, sizeof(short), FRAME_SIZE, f2);
if (feof(f2)) {
rewind(f2);
fread(tmp, sizeof(short), FRAME_SIZE, f2);
}
for (i=0;i<FRAME_SIZE;i++) n[i] = noise_gain*tmp[i];
} else {
for (i=0;i<FRAME_SIZE;i++) n[i] = 0;
}
biquad(x, mem_hp_x, x, b_hp, a_hp, FRAME_SIZE);
biquad(x, mem_resp_x, x, b_sig, a_sig, FRAME_SIZE);
biquad(n, mem_hp_n, n, b_hp, a_hp, FRAME_SIZE);
biquad(n, mem_resp_n, n, b_noise, a_noise, FRAME_SIZE);
for (i=0;i<FRAME_SIZE;i++) xn[i] = x[i] + n[i];
if (E > 1e9f) {
vad_cnt=0;
} else if (E > 1e8f) {
vad_cnt -= 5;
} else if (E > 1e7f) {
vad_cnt++;
} else {
vad_cnt+=2;
}
if (vad_cnt < 0) vad_cnt = 0;
if (vad_cnt > 15) vad_cnt = 15;
if (vad_cnt >= 10) vad = 0;
else if (vad_cnt > 0) vad = 0.5f;
else vad = 1.f;
frame_analysis(st, Y, Ey, x);
frame_analysis(noise_state, N, En, n);
for (i=0;i<NB_BANDS;i++) Ln[i] = log10(1e-2+En[i]);
int silence = compute_frame_features(noisy, X, P, Ex, Ep, Exp, features, xn);
pitch_filter(X, P, Ex, Ep, Exp, g);
//printf("%f %d\n", noisy->last_gain, noisy->last_period);
for (i=0;i<NB_BANDS;i++) {
g[i] = sqrt((Ey[i]+1e-3)/(Ex[i]+1e-3));
if (g[i] > 1) g[i] = 1;
if (silence || i > band_lp) g[i] = -1;
if (Ey[i] < 5e-2 && Ex[i] < 5e-2) g[i] = -1;
if (vad==0 && noise_gain==0) g[i] = -1;
}
count++;
#if 1
fwrite(features, sizeof(float), NB_FEATURES, stdout);
fwrite(g, sizeof(float), NB_BANDS, stdout);
fwrite(Ln, sizeof(float), NB_BANDS, stdout);
fwrite(&vad, sizeof(float), 1, stdout);
#endif
}
fprintf(stderr, "matrix size: %d x %d\n", count, NB_FEATURES + 2*NB_BANDS + 1);
fclose(f1);
fclose(f2);
return 0;
}
#endif

View file

@ -0,0 +1,601 @@
/*Copyright (c) 2003-2004, Mark Borgerding
Lots of modifications by Jean-Marc Valin
Copyright (c) 2005-2007, Xiph.Org Foundation
Copyright (c) 2008, Xiph.Org Foundation, CSIRO
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.*/
/* This code is originally from Mark Borgerding's KISS-FFT but has been
heavily modified to better suit Opus */
#ifndef SKIP_CONFIG_H
# ifdef HAVE_CONFIG_H
# include "config.h"
# endif
#endif
#include "_kiss_fft_guts.h"
#define CUSTOM_MODES
/* The guts header contains all the multiplication and addition macros that are defined for
complex numbers. It also declares the kf_ internal functions.
*/
static void kf_bfly2(
kiss_fft_cpx * Fout,
int m,
int N
)
{
kiss_fft_cpx * Fout2;
int i;
(void)m;
#ifdef CUSTOM_MODES
if (m==1)
{
celt_assert(m==1);
for (i=0;i<N;i++)
{
kiss_fft_cpx t;
Fout2 = Fout + 1;
t = *Fout2;
C_SUB( *Fout2 , *Fout , t );
C_ADDTO( *Fout , t );
Fout += 2;
}
} else
#endif
{
opus_val16 tw;
tw = QCONST16(0.7071067812f, 15);
/* We know that m==4 here because the radix-2 is just after a radix-4 */
celt_assert(m==4);
for (i=0;i<N;i++)
{
kiss_fft_cpx t;
Fout2 = Fout + 4;
t = Fout2[0];
C_SUB( Fout2[0] , Fout[0] , t );
C_ADDTO( Fout[0] , t );
t.r = S_MUL(ADD32_ovflw(Fout2[1].r, Fout2[1].i), tw);
t.i = S_MUL(SUB32_ovflw(Fout2[1].i, Fout2[1].r), tw);
C_SUB( Fout2[1] , Fout[1] , t );
C_ADDTO( Fout[1] , t );
t.r = Fout2[2].i;
t.i = -Fout2[2].r;
C_SUB( Fout2[2] , Fout[2] , t );
C_ADDTO( Fout[2] , t );
t.r = S_MUL(SUB32_ovflw(Fout2[3].i, Fout2[3].r), tw);
t.i = S_MUL(NEG32_ovflw(ADD32_ovflw(Fout2[3].i, Fout2[3].r)), tw);
C_SUB( Fout2[3] , Fout[3] , t );
C_ADDTO( Fout[3] , t );
Fout += 8;
}
}
}
static void kf_bfly4(
kiss_fft_cpx * Fout,
const size_t fstride,
const kiss_fft_state *st,
int m,
int N,
int mm
)
{
int i;
if (m==1)
{
/* Degenerate case where all the twiddles are 1. */
for (i=0;i<N;i++)
{
kiss_fft_cpx scratch0, scratch1;
C_SUB( scratch0 , *Fout, Fout[2] );
C_ADDTO(*Fout, Fout[2]);
C_ADD( scratch1 , Fout[1] , Fout[3] );
C_SUB( Fout[2], *Fout, scratch1 );
C_ADDTO( *Fout , scratch1 );
C_SUB( scratch1 , Fout[1] , Fout[3] );
Fout[1].r = ADD32_ovflw(scratch0.r, scratch1.i);
Fout[1].i = SUB32_ovflw(scratch0.i, scratch1.r);
Fout[3].r = SUB32_ovflw(scratch0.r, scratch1.i);
Fout[3].i = ADD32_ovflw(scratch0.i, scratch1.r);
Fout+=4;
}
} else {
int j;
kiss_fft_cpx scratch[6];
const kiss_twiddle_cpx *tw1,*tw2,*tw3;
const int m2=2*m;
const int m3=3*m;
kiss_fft_cpx * Fout_beg = Fout;
for (i=0;i<N;i++)
{
Fout = Fout_beg + i*mm;
tw3 = tw2 = tw1 = st->twiddles;
/* m is guaranteed to be a multiple of 4. */
for (j=0;j<m;j++)
{
C_MUL(scratch[0],Fout[m] , *tw1 );
C_MUL(scratch[1],Fout[m2] , *tw2 );
C_MUL(scratch[2],Fout[m3] , *tw3 );
C_SUB( scratch[5] , *Fout, scratch[1] );
C_ADDTO(*Fout, scratch[1]);
C_ADD( scratch[3] , scratch[0] , scratch[2] );
C_SUB( scratch[4] , scratch[0] , scratch[2] );
C_SUB( Fout[m2], *Fout, scratch[3] );
tw1 += fstride;
tw2 += fstride*2;
tw3 += fstride*3;
C_ADDTO( *Fout , scratch[3] );
Fout[m].r = ADD32_ovflw(scratch[5].r, scratch[4].i);
Fout[m].i = SUB32_ovflw(scratch[5].i, scratch[4].r);
Fout[m3].r = SUB32_ovflw(scratch[5].r, scratch[4].i);
Fout[m3].i = ADD32_ovflw(scratch[5].i, scratch[4].r);
++Fout;
}
}
}
}
#ifndef RADIX_TWO_ONLY
static void kf_bfly3(
kiss_fft_cpx * Fout,
const size_t fstride,
const kiss_fft_state *st,
int m,
int N,
int mm
)
{
int i;
size_t k;
const size_t m2 = 2*m;
const kiss_twiddle_cpx *tw1,*tw2;
kiss_fft_cpx scratch[5];
kiss_twiddle_cpx epi3;
kiss_fft_cpx * Fout_beg = Fout;
#ifdef FIXED_POINT
/*epi3.r = -16384;*/ /* Unused */
epi3.i = -28378;
#else
epi3 = st->twiddles[fstride*m];
#endif
for (i=0;i<N;i++)
{
Fout = Fout_beg + i*mm;
tw1=tw2=st->twiddles;
/* For non-custom modes, m is guaranteed to be a multiple of 4. */
k=m;
do {
C_MUL(scratch[1],Fout[m] , *tw1);
C_MUL(scratch[2],Fout[m2] , *tw2);
C_ADD(scratch[3],scratch[1],scratch[2]);
C_SUB(scratch[0],scratch[1],scratch[2]);
tw1 += fstride;
tw2 += fstride*2;
Fout[m].r = SUB32_ovflw(Fout->r, HALF_OF(scratch[3].r));
Fout[m].i = SUB32_ovflw(Fout->i, HALF_OF(scratch[3].i));
C_MULBYSCALAR( scratch[0] , epi3.i );
C_ADDTO(*Fout,scratch[3]);
Fout[m2].r = ADD32_ovflw(Fout[m].r, scratch[0].i);
Fout[m2].i = SUB32_ovflw(Fout[m].i, scratch[0].r);
Fout[m].r = SUB32_ovflw(Fout[m].r, scratch[0].i);
Fout[m].i = ADD32_ovflw(Fout[m].i, scratch[0].r);
++Fout;
} while(--k);
}
}
#ifndef OVERRIDE_kf_bfly5
static void kf_bfly5(
kiss_fft_cpx * Fout,
const size_t fstride,
const kiss_fft_state *st,
int m,
int N,
int mm
)
{
kiss_fft_cpx *Fout0,*Fout1,*Fout2,*Fout3,*Fout4;
int i, u;
kiss_fft_cpx scratch[13];
const kiss_twiddle_cpx *tw;
kiss_twiddle_cpx ya,yb;
kiss_fft_cpx * Fout_beg = Fout;
#ifdef FIXED_POINT
ya.r = 10126;
ya.i = -31164;
yb.r = -26510;
yb.i = -19261;
#else
ya = st->twiddles[fstride*m];
yb = st->twiddles[fstride*2*m];
#endif
tw=st->twiddles;
for (i=0;i<N;i++)
{
Fout = Fout_beg + i*mm;
Fout0=Fout;
Fout1=Fout0+m;
Fout2=Fout0+2*m;
Fout3=Fout0+3*m;
Fout4=Fout0+4*m;
/* For non-custom modes, m is guaranteed to be a multiple of 4. */
for ( u=0; u<m; ++u ) {
scratch[0] = *Fout0;
C_MUL(scratch[1] ,*Fout1, tw[u*fstride]);
C_MUL(scratch[2] ,*Fout2, tw[2*u*fstride]);
C_MUL(scratch[3] ,*Fout3, tw[3*u*fstride]);
C_MUL(scratch[4] ,*Fout4, tw[4*u*fstride]);
C_ADD( scratch[7],scratch[1],scratch[4]);
C_SUB( scratch[10],scratch[1],scratch[4]);
C_ADD( scratch[8],scratch[2],scratch[3]);
C_SUB( scratch[9],scratch[2],scratch[3]);
Fout0->r = ADD32_ovflw(Fout0->r, ADD32_ovflw(scratch[7].r, scratch[8].r));
Fout0->i = ADD32_ovflw(Fout0->i, ADD32_ovflw(scratch[7].i, scratch[8].i));
scratch[5].r = ADD32_ovflw(scratch[0].r, ADD32_ovflw(S_MUL(scratch[7].r,ya.r), S_MUL(scratch[8].r,yb.r)));
scratch[5].i = ADD32_ovflw(scratch[0].i, ADD32_ovflw(S_MUL(scratch[7].i,ya.r), S_MUL(scratch[8].i,yb.r)));
scratch[6].r = ADD32_ovflw(S_MUL(scratch[10].i,ya.i), S_MUL(scratch[9].i,yb.i));
scratch[6].i = NEG32_ovflw(ADD32_ovflw(S_MUL(scratch[10].r,ya.i), S_MUL(scratch[9].r,yb.i)));
C_SUB(*Fout1,scratch[5],scratch[6]);
C_ADD(*Fout4,scratch[5],scratch[6]);
scratch[11].r = ADD32_ovflw(scratch[0].r, ADD32_ovflw(S_MUL(scratch[7].r,yb.r), S_MUL(scratch[8].r,ya.r)));
scratch[11].i = ADD32_ovflw(scratch[0].i, ADD32_ovflw(S_MUL(scratch[7].i,yb.r), S_MUL(scratch[8].i,ya.r)));
scratch[12].r = SUB32_ovflw(S_MUL(scratch[9].i,ya.i), S_MUL(scratch[10].i,yb.i));
scratch[12].i = SUB32_ovflw(S_MUL(scratch[10].r,yb.i), S_MUL(scratch[9].r,ya.i));
C_ADD(*Fout2,scratch[11],scratch[12]);
C_SUB(*Fout3,scratch[11],scratch[12]);
++Fout0;++Fout1;++Fout2;++Fout3;++Fout4;
}
}
}
#endif /* OVERRIDE_kf_bfly5 */
#endif
#ifdef CUSTOM_MODES
static
void compute_bitrev_table(
int Fout,
opus_int16 *f,
const size_t fstride,
int in_stride,
opus_int16 * factors,
const kiss_fft_state *st
)
{
const int p=*factors++; /* the radix */
const int m=*factors++; /* stage's fft length/p */
/*printf ("fft %d %d %d %d %d %d\n", p*m, m, p, s2, fstride*in_stride, N);*/
if (m==1)
{
int j;
for (j=0;j<p;j++)
{
*f = Fout+j;
f += fstride*in_stride;
}
} else {
int j;
for (j=0;j<p;j++)
{
compute_bitrev_table( Fout , f, fstride*p, in_stride, factors,st);
f += fstride*in_stride;
Fout += m;
}
}
}
/* facbuf is populated by p1,m1,p2,m2, ...
where
p[i] * m[i] = m[i-1]
m0 = n */
static
int kf_factor(int n,opus_int16 * facbuf)
{
int p=4;
int i;
int stages=0;
int nbak = n;
/*factor out powers of 4, powers of 2, then any remaining primes */
do {
while (n % p) {
switch (p) {
case 4: p = 2; break;
case 2: p = 3; break;
default: p += 2; break;
}
if (p>32000 || (opus_int32)p*(opus_int32)p > n)
p = n; /* no more factors, skip to end */
}
n /= p;
#ifdef RADIX_TWO_ONLY
if (p!=2 && p != 4)
#else
if (p>5)
#endif
{
return 0;
}
facbuf[2*stages] = p;
if (p==2 && stages > 1)
{
facbuf[2*stages] = 4;
facbuf[2] = 2;
}
stages++;
} while (n > 1);
n = nbak;
/* Reverse the order to get the radix 4 at the end, so we can use the
fast degenerate case. It turns out that reversing the order also
improves the noise behaviour. */
for (i=0;i<stages/2;i++)
{
int tmp;
tmp = facbuf[2*i];
facbuf[2*i] = facbuf[2*(stages-i-1)];
facbuf[2*(stages-i-1)] = tmp;
}
for (i=0;i<stages;i++)
{
n /= facbuf[2*i];
facbuf[2*i+1] = n;
}
return 1;
}
static void compute_twiddles(kiss_twiddle_cpx *twiddles, int nfft)
{
int i;
#ifdef FIXED_POINT
for (i=0;i<nfft;++i) {
opus_val32 phase = -i;
kf_cexp2(twiddles+i, DIV32(SHL32(phase,17),nfft));
}
#else
for (i=0;i<nfft;++i) {
const double pi=3.14159265358979323846264338327;
double phase = ( -2*pi /nfft ) * i;
kf_cexp(twiddles+i, phase );
}
#endif
}
int opus_fft_alloc_arch_c(kiss_fft_state *st) {
(void)st;
return 0;
}
/*
*
* Allocates all necessary storage space for the fft and ifft.
* The return value is a contiguous block of memory. As such,
* It can be freed with free().
* */
kiss_fft_state *opus_fft_alloc_twiddles(int nfft,void * mem,size_t * lenmem,
const kiss_fft_state *base, int arch)
{
kiss_fft_state *st=NULL;
size_t memneeded = sizeof(struct kiss_fft_state); /* twiddle factors*/
if ( lenmem==NULL ) {
st = ( kiss_fft_state*)KISS_FFT_MALLOC( memneeded );
}else{
if (mem != NULL && *lenmem >= memneeded)
st = (kiss_fft_state*)mem;
*lenmem = memneeded;
}
if (st) {
opus_int16 *bitrev;
kiss_twiddle_cpx *twiddles;
st->nfft=nfft;
#ifdef FIXED_POINT
st->scale_shift = celt_ilog2(st->nfft);
if (st->nfft == 1<<st->scale_shift)
st->scale = Q15ONE;
else
st->scale = (1073741824+st->nfft/2)/st->nfft>>(15-st->scale_shift);
#else
st->scale = 1.f/nfft;
#endif
if (base != NULL)
{
st->twiddles = base->twiddles;
st->shift = 0;
while (st->shift < 32 && nfft<<st->shift != base->nfft)
st->shift++;
if (st->shift>=32)
goto fail;
} else {
st->twiddles = twiddles = (kiss_twiddle_cpx*)KISS_FFT_MALLOC(sizeof(kiss_twiddle_cpx)*nfft);
compute_twiddles(twiddles, nfft);
st->shift = -1;
}
if (!kf_factor(nfft,st->factors))
{
goto fail;
}
/* bitrev */
st->bitrev = bitrev = (opus_int16*)KISS_FFT_MALLOC(sizeof(opus_int16)*nfft);
if (st->bitrev==NULL)
goto fail;
compute_bitrev_table(0, bitrev, 1,1, st->factors,st);
/* Initialize architecture specific fft parameters */
if (opus_fft_alloc_arch(st, arch))
goto fail;
}
return st;
fail:
opus_fft_free(st, arch);
return NULL;
}
kiss_fft_state *opus_fft_alloc(int nfft,void * mem,size_t * lenmem, int arch)
{
return opus_fft_alloc_twiddles(nfft, mem, lenmem, NULL, arch);
}
void opus_fft_free_arch_c(kiss_fft_state *st) {
(void)st;
}
void opus_fft_free(const kiss_fft_state *cfg, int arch)
{
if (cfg)
{
opus_fft_free_arch((kiss_fft_state *)cfg, arch);
opus_free((opus_int16*)cfg->bitrev);
if (cfg->shift < 0)
opus_free((kiss_twiddle_cpx*)cfg->twiddles);
opus_free((kiss_fft_state*)cfg);
}
}
#endif /* CUSTOM_MODES */
void opus_fft_impl(const kiss_fft_state *st,kiss_fft_cpx *fout)
{
int m2, m;
int p;
int L;
int fstride[MAXFACTORS];
int i;
int shift;
/* st->shift can be -1 */
shift = st->shift>0 ? st->shift : 0;
fstride[0] = 1;
L=0;
do {
p = st->factors[2*L];
m = st->factors[2*L+1];
fstride[L+1] = fstride[L]*p;
L++;
} while(m!=1);
m = st->factors[2*L-1];
for (i=L-1;i>=0;i--)
{
if (i!=0)
m2 = st->factors[2*i-1];
else
m2 = 1;
switch (st->factors[2*i])
{
case 2:
kf_bfly2(fout, m, fstride[i]);
break;
case 4:
kf_bfly4(fout,fstride[i]<<shift,st,m, fstride[i], m2);
break;
#ifndef RADIX_TWO_ONLY
case 3:
kf_bfly3(fout,fstride[i]<<shift,st,m, fstride[i], m2);
break;
case 5:
kf_bfly5(fout,fstride[i]<<shift,st,m, fstride[i], m2);
break;
#endif
}
m = m2;
}
}
void opus_fft_c(const kiss_fft_state *st,const kiss_fft_cpx *fin,kiss_fft_cpx *fout)
{
int i;
opus_val16 scale;
#ifdef FIXED_POINT
/* Allows us to scale with MULT16_32_Q16(), which is faster than
MULT16_32_Q15() on ARM. */
int scale_shift = st->scale_shift-1;
#endif
scale = st->scale;
celt_assert2 (fin != fout, "In-place FFT not supported");
/* Bit-reverse the input */
for (i=0;i<st->nfft;i++)
{
kiss_fft_cpx x = fin[i];
fout[st->bitrev[i]].r = SHR32(MULT16_32_Q16(scale, x.r), scale_shift);
fout[st->bitrev[i]].i = SHR32(MULT16_32_Q16(scale, x.i), scale_shift);
}
opus_fft_impl(st, fout);
}
void opus_ifft_c(const kiss_fft_state *st,const kiss_fft_cpx *fin,kiss_fft_cpx *fout)
{
int i;
celt_assert2 (fin != fout, "In-place FFT not supported");
/* Bit-reverse the input */
for (i=0;i<st->nfft;i++)
fout[st->bitrev[i]] = fin[i];
for (i=0;i<st->nfft;i++)
fout[i].i = -fout[i].i;
opus_fft_impl(st, fout);
for (i=0;i<st->nfft;i++)
fout[i].i = -fout[i].i;
}

View file

@ -0,0 +1,203 @@
/*Copyright (c) 2003-2004, Mark Borgerding
Lots of modifications by Jean-Marc Valin
Copyright (c) 2005-2007, Xiph.Org Foundation
Copyright (c) 2008, Xiph.Org Foundation, CSIRO
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.*/
#ifndef KISS_FFT_H
#define KISS_FFT_H
#include <stdlib.h>
#include <math.h>
#include "arch.h"
#include <stdlib.h>
#define opus_alloc(x) malloc(x)
#define opus_free(x) free(x)
#ifdef __cplusplus
extern "C" {
#endif
#ifdef USE_SIMD
# include <xmmintrin.h>
# define kiss_fft_scalar __m128
#define KISS_FFT_MALLOC(nbytes) memalign(16,nbytes)
#else
#define KISS_FFT_MALLOC opus_alloc
#endif
#ifdef FIXED_POINT
#include "arch.h"
# define kiss_fft_scalar opus_int32
# define kiss_twiddle_scalar opus_int16
#else
# ifndef kiss_fft_scalar
/* default is float */
# define kiss_fft_scalar float
# define kiss_twiddle_scalar float
# define KF_SUFFIX _celt_single
# endif
#endif
typedef struct {
kiss_fft_scalar r;
kiss_fft_scalar i;
}kiss_fft_cpx;
typedef struct {
kiss_twiddle_scalar r;
kiss_twiddle_scalar i;
}kiss_twiddle_cpx;
#define MAXFACTORS 8
/* e.g. an fft of length 128 has 4 factors
as far as kissfft is concerned
4*4*4*2
*/
typedef struct arch_fft_state{
int is_supported;
void *priv;
} arch_fft_state;
typedef struct kiss_fft_state{
int nfft;
opus_val16 scale;
#ifdef FIXED_POINT
int scale_shift;
#endif
int shift;
opus_int16 factors[2*MAXFACTORS];
const opus_int16 *bitrev;
const kiss_twiddle_cpx *twiddles;
arch_fft_state *arch_fft;
} kiss_fft_state;
#if defined(HAVE_ARM_NE10)
#include "arm/fft_arm.h"
#endif
/*typedef struct kiss_fft_state* kiss_fft_cfg;*/
/**
* opus_fft_alloc
*
* Initialize a FFT (or IFFT) algorithm's cfg/state buffer.
*
* typical usage: kiss_fft_cfg mycfg=opus_fft_alloc(1024,0,NULL,NULL);
*
* The return value from fft_alloc is a cfg buffer used internally
* by the fft routine or NULL.
*
* If lenmem is NULL, then opus_fft_alloc will allocate a cfg buffer using malloc.
* The returned value should be free()d when done to avoid memory leaks.
*
* The state can be placed in a user supplied buffer 'mem':
* If lenmem is not NULL and mem is not NULL and *lenmem is large enough,
* then the function places the cfg in mem and the size used in *lenmem
* and returns mem.
*
* If lenmem is not NULL and ( mem is NULL or *lenmem is not large enough),
* then the function returns NULL and places the minimum cfg
* buffer size in *lenmem.
* */
kiss_fft_state *opus_fft_alloc_twiddles(int nfft,void * mem,size_t * lenmem, const kiss_fft_state *base, int arch);
kiss_fft_state *opus_fft_alloc(int nfft,void * mem,size_t * lenmem, int arch);
/**
* opus_fft(cfg,in_out_buf)
*
* Perform an FFT on a complex input buffer.
* for a forward FFT,
* fin should be f[0] , f[1] , ... ,f[nfft-1]
* fout will be F[0] , F[1] , ... ,F[nfft-1]
* Note that each element is complex and can be accessed like
f[k].r and f[k].i
* */
void opus_fft_c(const kiss_fft_state *cfg,const kiss_fft_cpx *fin,kiss_fft_cpx *fout);
void opus_ifft_c(const kiss_fft_state *cfg,const kiss_fft_cpx *fin,kiss_fft_cpx *fout);
void opus_fft_impl(const kiss_fft_state *st,kiss_fft_cpx *fout);
void opus_ifft_impl(const kiss_fft_state *st,kiss_fft_cpx *fout);
void opus_fft_free(const kiss_fft_state *cfg, int arch);
void opus_fft_free_arch_c(kiss_fft_state *st);
int opus_fft_alloc_arch_c(kiss_fft_state *st);
#if !defined(OVERRIDE_OPUS_FFT)
/* Is run-time CPU detection enabled on this platform? */
#if defined(OPUS_HAVE_RTCD) && (defined(HAVE_ARM_NE10))
extern int (*const OPUS_FFT_ALLOC_ARCH_IMPL[OPUS_ARCHMASK+1])(
kiss_fft_state *st);
#define opus_fft_alloc_arch(_st, arch) \
((*OPUS_FFT_ALLOC_ARCH_IMPL[(arch)&OPUS_ARCHMASK])(_st))
extern void (*const OPUS_FFT_FREE_ARCH_IMPL[OPUS_ARCHMASK+1])(
kiss_fft_state *st);
#define opus_fft_free_arch(_st, arch) \
((*OPUS_FFT_FREE_ARCH_IMPL[(arch)&OPUS_ARCHMASK])(_st))
extern void (*const OPUS_FFT[OPUS_ARCHMASK+1])(const kiss_fft_state *cfg,
const kiss_fft_cpx *fin, kiss_fft_cpx *fout);
#define opus_fft(_cfg, _fin, _fout, arch) \
((*OPUS_FFT[(arch)&OPUS_ARCHMASK])(_cfg, _fin, _fout))
extern void (*const OPUS_IFFT[OPUS_ARCHMASK+1])(const kiss_fft_state *cfg,
const kiss_fft_cpx *fin, kiss_fft_cpx *fout);
#define opus_ifft(_cfg, _fin, _fout, arch) \
((*OPUS_IFFT[(arch)&OPUS_ARCHMASK])(_cfg, _fin, _fout))
#else /* else for if defined(OPUS_HAVE_RTCD) && (defined(HAVE_ARM_NE10)) */
#define opus_fft_alloc_arch(_st, arch) \
((void)(arch), opus_fft_alloc_arch_c(_st))
#define opus_fft_free_arch(_st, arch) \
((void)(arch), opus_fft_free_arch_c(_st))
#define opus_fft(_cfg, _fin, _fout, arch) \
((void)(arch), opus_fft_c(_cfg, _fin, _fout))
#define opus_ifft(_cfg, _fin, _fout, arch) \
((void)(arch), opus_ifft_c(_cfg, _fin, _fout))
#endif /* end if defined(OPUS_HAVE_RTCD) && (defined(HAVE_ARM_NE10)) */
#endif /* end if !defined(OVERRIDE_OPUS_FFT) */
#ifdef __cplusplus
}
#endif
#endif

View file

@ -0,0 +1,159 @@
/* (C) COPYRIGHT 1994-2002 Xiph.Org Foundation */
/* Modified by Jean-Marc Valin */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* opus_types.h based on ogg_types.h from libogg */
/**
@file opus_types.h
@brief Opus reference implementation types
*/
#ifndef OPUS_TYPES_H
#define OPUS_TYPES_H
/* Use the real stdint.h if it's there (taken from Paul Hsieh's pstdint.h) */
#if (defined(__STDC__) && __STDC__ && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) || (defined(__GNUC__) && (defined(_STDINT_H) || defined(_STDINT_H_)) || defined (HAVE_STDINT_H))
#include <stdint.h>
typedef int16_t opus_int16;
typedef uint16_t opus_uint16;
typedef int32_t opus_int32;
typedef uint32_t opus_uint32;
#elif defined(_WIN32)
# if defined(__CYGWIN__)
# include <_G_config.h>
typedef _G_int32_t opus_int32;
typedef _G_uint32_t opus_uint32;
typedef _G_int16 opus_int16;
typedef _G_uint16 opus_uint16;
# elif defined(__MINGW32__)
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
# elif defined(__MWERKS__)
typedef int opus_int32;
typedef unsigned int opus_uint32;
typedef short opus_int16;
typedef unsigned short opus_uint16;
# else
/* MSVC/Borland */
typedef __int32 opus_int32;
typedef unsigned __int32 opus_uint32;
typedef __int16 opus_int16;
typedef unsigned __int16 opus_uint16;
# endif
#elif defined(__MACOS__)
# include <sys/types.h>
typedef SInt16 opus_int16;
typedef UInt16 opus_uint16;
typedef SInt32 opus_int32;
typedef UInt32 opus_uint32;
#elif (defined(__APPLE__) && defined(__MACH__)) /* MacOS X Framework build */
# include <sys/types.h>
typedef int16_t opus_int16;
typedef u_int16_t opus_uint16;
typedef int32_t opus_int32;
typedef u_int32_t opus_uint32;
#elif defined(__BEOS__)
/* Be */
# include <inttypes.h>
typedef int16 opus_int16;
typedef u_int16 opus_uint16;
typedef int32_t opus_int32;
typedef u_int32_t opus_uint32;
#elif defined (__EMX__)
/* OS/2 GCC */
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#elif defined (DJGPP)
/* DJGPP */
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#elif defined(R5900)
/* PS2 EE */
typedef int opus_int32;
typedef unsigned opus_uint32;
typedef short opus_int16;
typedef unsigned short opus_uint16;
#elif defined(__SYMBIAN32__)
/* Symbian GCC */
typedef signed short opus_int16;
typedef unsigned short opus_uint16;
typedef signed int opus_int32;
typedef unsigned int opus_uint32;
#elif defined(CONFIG_TI_C54X) || defined (CONFIG_TI_C55X)
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef long opus_int32;
typedef unsigned long opus_uint32;
#elif defined(CONFIG_TI_C6X)
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#else
/* Give up, take a reasonable guess */
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#endif
#define opus_int int /* used for counters etc; at least 16 bits */
#define opus_int64 long long
#define opus_int8 signed char
#define opus_uint unsigned int /* used for counters etc; at least 16 bits */
#define opus_uint64 unsigned long long
#define opus_uint8 unsigned char
#endif /* OPUS_TYPES_H */

View file

@ -0,0 +1,526 @@
/* Copyright (c) 2007-2008 CSIRO
Copyright (c) 2007-2009 Xiph.Org Foundation
Written by Jean-Marc Valin */
/**
@file pitch.c
@brief Pitch analysis
*/
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "pitch.h"
#include "common.h"
//#include "modes.h"
//#include "stack_alloc.h"
//#include "mathops.h"
#include "celt_lpc.h"
#include "math.h"
static void find_best_pitch(opus_val32 *xcorr, opus_val16 *y, int len,
int max_pitch, int *best_pitch
#ifdef FIXED_POINT
, int yshift, opus_val32 maxcorr
#endif
)
{
int i, j;
opus_val32 Syy=1;
opus_val16 best_num[2];
opus_val32 best_den[2];
#ifdef FIXED_POINT
int xshift;
xshift = celt_ilog2(maxcorr)-14;
#endif
best_num[0] = -1;
best_num[1] = -1;
best_den[0] = 0;
best_den[1] = 0;
best_pitch[0] = 0;
best_pitch[1] = 1;
for (j=0;j<len;j++)
Syy = ADD32(Syy, SHR32(MULT16_16(y[j],y[j]), yshift));
for (i=0;i<max_pitch;i++)
{
if (xcorr[i]>0)
{
opus_val16 num;
opus_val32 xcorr16;
xcorr16 = EXTRACT16(VSHR32(xcorr[i], xshift));
#ifndef FIXED_POINT
/* Considering the range of xcorr16, this should avoid both underflows
and overflows (inf) when squaring xcorr16 */
xcorr16 *= 1e-12f;
#endif
num = MULT16_16_Q15(xcorr16,xcorr16);
if (MULT16_32_Q15(num,best_den[1]) > MULT16_32_Q15(best_num[1],Syy))
{
if (MULT16_32_Q15(num,best_den[0]) > MULT16_32_Q15(best_num[0],Syy))
{
best_num[1] = best_num[0];
best_den[1] = best_den[0];
best_pitch[1] = best_pitch[0];
best_num[0] = num;
best_den[0] = Syy;
best_pitch[0] = i;
} else {
best_num[1] = num;
best_den[1] = Syy;
best_pitch[1] = i;
}
}
}
Syy += SHR32(MULT16_16(y[i+len],y[i+len]),yshift) - SHR32(MULT16_16(y[i],y[i]),yshift);
Syy = MAX32(1, Syy);
}
}
static void celt_fir5(const opus_val16 *x,
const opus_val16 *num,
opus_val16 *y,
int N,
opus_val16 *mem)
{
int i;
opus_val16 num0, num1, num2, num3, num4;
opus_val32 mem0, mem1, mem2, mem3, mem4;
num0=num[0];
num1=num[1];
num2=num[2];
num3=num[3];
num4=num[4];
mem0=mem[0];
mem1=mem[1];
mem2=mem[2];
mem3=mem[3];
mem4=mem[4];
for (i=0;i<N;i++)
{
opus_val32 sum = SHL32(EXTEND32(x[i]), SIG_SHIFT);
sum = MAC16_16(sum,num0,mem0);
sum = MAC16_16(sum,num1,mem1);
sum = MAC16_16(sum,num2,mem2);
sum = MAC16_16(sum,num3,mem3);
sum = MAC16_16(sum,num4,mem4);
mem4 = mem3;
mem3 = mem2;
mem2 = mem1;
mem1 = mem0;
mem0 = x[i];
y[i] = ROUND16(sum, SIG_SHIFT);
}
mem[0]=mem0;
mem[1]=mem1;
mem[2]=mem2;
mem[3]=mem3;
mem[4]=mem4;
}
void pitch_downsample(celt_sig *x[], opus_val16 *x_lp,
int len, int C)
{
int i;
opus_val32 ac[5];
opus_val16 tmp=Q15ONE;
opus_val16 lpc[4], mem[5]={0,0,0,0,0};
opus_val16 lpc2[5];
opus_val16 c1 = QCONST16(.8f,15);
#ifdef FIXED_POINT
int shift;
opus_val32 maxabs = celt_maxabs32(x[0], len);
if (C==2)
{
opus_val32 maxabs_1 = celt_maxabs32(x[1], len);
maxabs = MAX32(maxabs, maxabs_1);
}
if (maxabs<1)
maxabs=1;
shift = celt_ilog2(maxabs)-10;
if (shift<0)
shift=0;
if (C==2)
shift++;
#endif
for (i=1;i<len>>1;i++)
x_lp[i] = SHR32(HALF32(HALF32(x[0][(2*i-1)]+x[0][(2*i+1)])+x[0][2*i]), shift);
x_lp[0] = SHR32(HALF32(HALF32(x[0][1])+x[0][0]), shift);
if (C==2)
{
for (i=1;i<len>>1;i++)
x_lp[i] += SHR32(HALF32(HALF32(x[1][(2*i-1)]+x[1][(2*i+1)])+x[1][2*i]), shift);
x_lp[0] += SHR32(HALF32(HALF32(x[1][1])+x[1][0]), shift);
}
_celt_autocorr(x_lp, ac, NULL, 0,
4, len>>1);
/* Noise floor -40 dB */
#ifdef FIXED_POINT
ac[0] += SHR32(ac[0],13);
#else
ac[0] *= 1.0001f;
#endif
/* Lag windowing */
for (i=1;i<=4;i++)
{
/*ac[i] *= exp(-.5*(2*M_PI*.002*i)*(2*M_PI*.002*i));*/
#ifdef FIXED_POINT
ac[i] -= MULT16_32_Q15(2*i*i, ac[i]);
#else
ac[i] -= ac[i]*(.008f*i)*(.008f*i);
#endif
}
_celt_lpc(lpc, ac, 4);
for (i=0;i<4;i++)
{
tmp = MULT16_16_Q15(QCONST16(.9f,15), tmp);
lpc[i] = MULT16_16_Q15(lpc[i], tmp);
}
/* Add a zero */
lpc2[0] = lpc[0] + QCONST16(.8f,SIG_SHIFT);
lpc2[1] = lpc[1] + MULT16_16_Q15(c1,lpc[0]);
lpc2[2] = lpc[2] + MULT16_16_Q15(c1,lpc[1]);
lpc2[3] = lpc[3] + MULT16_16_Q15(c1,lpc[2]);
lpc2[4] = MULT16_16_Q15(c1,lpc[3]);
celt_fir5(x_lp, lpc2, x_lp, len>>1, mem);
}
void celt_pitch_xcorr(const opus_val16 *_x, const opus_val16 *_y,
opus_val32 *xcorr, int len, int max_pitch)
{
#if 0 /* This is a simple version of the pitch correlation that should work
well on DSPs like Blackfin and TI C5x/C6x */
int i, j;
#ifdef FIXED_POINT
opus_val32 maxcorr=1;
#endif
for (i=0;i<max_pitch;i++)
{
opus_val32 sum = 0;
for (j=0;j<len;j++)
sum = MAC16_16(sum, _x[j], _y[i+j]);
xcorr[i] = sum;
#ifdef FIXED_POINT
maxcorr = MAX32(maxcorr, sum);
#endif
}
#ifdef FIXED_POINT
return maxcorr;
#endif
#else /* Unrolled version of the pitch correlation -- runs faster on x86 and ARM */
int i;
/*The EDSP version requires that max_pitch is at least 1, and that _x is
32-bit aligned.
Since it's hard to put asserts in assembly, put them here.*/
#ifdef FIXED_POINT
opus_val32 maxcorr=1;
#endif
celt_assert(max_pitch>0);
celt_assert((((unsigned char *)_x-(unsigned char *)NULL)&3)==0);
for (i=0;i<max_pitch-3;i+=4)
{
opus_val32 sum[4]={0,0,0,0};
xcorr_kernel(_x, _y+i, sum, len);
xcorr[i]=sum[0];
xcorr[i+1]=sum[1];
xcorr[i+2]=sum[2];
xcorr[i+3]=sum[3];
#ifdef FIXED_POINT
sum[0] = MAX32(sum[0], sum[1]);
sum[2] = MAX32(sum[2], sum[3]);
sum[0] = MAX32(sum[0], sum[2]);
maxcorr = MAX32(maxcorr, sum[0]);
#endif
}
/* In case max_pitch isn't a multiple of 4, do non-unrolled version. */
for (;i<max_pitch;i++)
{
opus_val32 sum;
sum = celt_inner_prod(_x, _y+i, len);
xcorr[i] = sum;
#ifdef FIXED_POINT
maxcorr = MAX32(maxcorr, sum);
#endif
}
#ifdef FIXED_POINT
return maxcorr;
#endif
#endif
}
void pitch_search(const opus_val16 *x_lp, opus_val16 *y,
int len, int max_pitch, int *pitch)
{
int i, j;
int lag;
int best_pitch[2]={0,0};
#ifdef FIXED_POINT
opus_val32 maxcorr;
opus_val32 xmax, ymax;
int shift=0;
#endif
int offset;
celt_assert(len>0);
celt_assert(max_pitch>0);
lag = len+max_pitch;
opus_val16 x_lp4[len>>2];
opus_val16 y_lp4[lag>>2];
opus_val32 xcorr[max_pitch>>1];
/* Downsample by 2 again */
for (j=0;j<len>>2;j++)
x_lp4[j] = x_lp[2*j];
for (j=0;j<lag>>2;j++)
y_lp4[j] = y[2*j];
#ifdef FIXED_POINT
xmax = celt_maxabs16(x_lp4, len>>2);
ymax = celt_maxabs16(y_lp4, lag>>2);
shift = celt_ilog2(MAX32(1, MAX32(xmax, ymax)))-11;
if (shift>0)
{
for (j=0;j<len>>2;j++)
x_lp4[j] = SHR16(x_lp4[j], shift);
for (j=0;j<lag>>2;j++)
y_lp4[j] = SHR16(y_lp4[j], shift);
/* Use double the shift for a MAC */
shift *= 2;
} else {
shift = 0;
}
#endif
/* Coarse search with 4x decimation */
#ifdef FIXED_POINT
maxcorr =
#endif
celt_pitch_xcorr(x_lp4, y_lp4, xcorr, len>>2, max_pitch>>2);
find_best_pitch(xcorr, y_lp4, len>>2, max_pitch>>2, best_pitch
#ifdef FIXED_POINT
, 0, maxcorr
#endif
);
/* Finer search with 2x decimation */
#ifdef FIXED_POINT
maxcorr=1;
#endif
for (i=0;i<max_pitch>>1;i++)
{
opus_val32 sum;
xcorr[i] = 0;
if (abs(i-2*best_pitch[0])>2 && abs(i-2*best_pitch[1])>2)
continue;
#ifdef FIXED_POINT
sum = 0;
for (j=0;j<len>>1;j++)
sum += SHR32(MULT16_16(x_lp[j],y[i+j]), shift);
#else
sum = celt_inner_prod(x_lp, y+i, len>>1);
#endif
xcorr[i] = MAX32(-1, sum);
#ifdef FIXED_POINT
maxcorr = MAX32(maxcorr, sum);
#endif
}
find_best_pitch(xcorr, y, len>>1, max_pitch>>1, best_pitch
#ifdef FIXED_POINT
, shift+1, maxcorr
#endif
);
/* Refine by pseudo-interpolation */
if (best_pitch[0]>0 && best_pitch[0]<(max_pitch>>1)-1)
{
opus_val32 a, b, c;
a = xcorr[best_pitch[0]-1];
b = xcorr[best_pitch[0]];
c = xcorr[best_pitch[0]+1];
if ((c-a) > MULT16_32_Q15(QCONST16(.7f,15),b-a))
offset = 1;
else if ((a-c) > MULT16_32_Q15(QCONST16(.7f,15),b-c))
offset = -1;
else
offset = 0;
} else {
offset = 0;
}
*pitch = 2*best_pitch[0]-offset;
}
#ifdef FIXED_POINT
static opus_val16 compute_pitch_gain(opus_val32 xy, opus_val32 xx, opus_val32 yy)
{
opus_val32 x2y2;
int sx, sy, shift;
opus_val32 g;
opus_val16 den;
if (xy == 0 || xx == 0 || yy == 0)
return 0;
sx = celt_ilog2(xx)-14;
sy = celt_ilog2(yy)-14;
shift = sx + sy;
x2y2 = SHR32(MULT16_16(VSHR32(xx, sx), VSHR32(yy, sy)), 14);
if (shift & 1) {
if (x2y2 < 32768)
{
x2y2 <<= 1;
shift--;
} else {
x2y2 >>= 1;
shift++;
}
}
den = celt_rsqrt_norm(x2y2);
g = MULT16_32_Q15(den, xy);
g = VSHR32(g, (shift>>1)-1);
return EXTRACT16(MIN32(g, Q15ONE));
}
#else
static opus_val16 compute_pitch_gain(opus_val32 xy, opus_val32 xx, opus_val32 yy)
{
return xy/sqrt(1+xx*yy);
}
#endif
static const int second_check[16] = {0, 0, 3, 2, 3, 2, 5, 2, 3, 2, 3, 2, 5, 2, 3, 2};
opus_val16 remove_doubling(opus_val16 *x, int maxperiod, int minperiod,
int N, int *T0_, int prev_period, opus_val16 prev_gain)
{
int k, i, T, T0;
opus_val16 g, g0;
opus_val16 pg;
opus_val32 xy,xx,yy,xy2;
opus_val32 xcorr[3];
opus_val32 best_xy, best_yy;
int offset;
int minperiod0;
minperiod0 = minperiod;
maxperiod /= 2;
minperiod /= 2;
*T0_ /= 2;
prev_period /= 2;
N /= 2;
x += maxperiod;
if (*T0_>=maxperiod)
*T0_=maxperiod-1;
T = T0 = *T0_;
opus_val32 yy_lookup[maxperiod+1];
dual_inner_prod(x, x, x-T0, N, &xx, &xy);
yy_lookup[0] = xx;
yy=xx;
for (i=1;i<=maxperiod;i++)
{
yy = yy+MULT16_16(x[-i],x[-i])-MULT16_16(x[N-i],x[N-i]);
yy_lookup[i] = MAX32(0, yy);
}
yy = yy_lookup[T0];
best_xy = xy;
best_yy = yy;
g = g0 = compute_pitch_gain(xy, xx, yy);
/* Look for any pitch at T/k */
for (k=2;k<=15;k++)
{
int T1, T1b;
opus_val16 g1;
opus_val16 cont=0;
opus_val16 thresh;
T1 = (2*T0+k)/(2*k);
if (T1 < minperiod)
break;
/* Look for another strong correlation at T1b */
if (k==2)
{
if (T1+T0>maxperiod)
T1b = T0;
else
T1b = T0+T1;
} else
{
T1b = (2*second_check[k]*T0+k)/(2*k);
}
dual_inner_prod(x, &x[-T1], &x[-T1b], N, &xy, &xy2);
xy = HALF32(xy + xy2);
yy = HALF32(yy_lookup[T1] + yy_lookup[T1b]);
g1 = compute_pitch_gain(xy, xx, yy);
if (abs(T1-prev_period)<=1)
cont = prev_gain;
else if (abs(T1-prev_period)<=2 && 5*k*k < T0)
cont = HALF16(prev_gain);
else
cont = 0;
thresh = MAX16(QCONST16(.3f,15), MULT16_16_Q15(QCONST16(.7f,15),g0)-cont);
/* Bias against very high pitch (very short period) to avoid false-positives
due to short-term correlation */
if (T1<3*minperiod)
thresh = MAX16(QCONST16(.4f,15), MULT16_16_Q15(QCONST16(.85f,15),g0)-cont);
else if (T1<2*minperiod)
thresh = MAX16(QCONST16(.5f,15), MULT16_16_Q15(QCONST16(.9f,15),g0)-cont);
if (g1 > thresh)
{
best_xy = xy;
best_yy = yy;
T = T1;
g = g1;
}
}
best_xy = MAX32(0, best_xy);
if (best_yy <= best_xy)
pg = Q15ONE;
else
pg = best_xy/(best_yy+1);
for (k=0;k<3;k++)
xcorr[k] = celt_inner_prod(x, x-(T+k-1), N);
if ((xcorr[2]-xcorr[0]) > MULT16_32_Q15(QCONST16(.7f,15),xcorr[1]-xcorr[0]))
offset = 1;
else if ((xcorr[0]-xcorr[2]) > MULT16_32_Q15(QCONST16(.7f,15),xcorr[1]-xcorr[2]))
offset = -1;
else
offset = 0;
if (pg > g)
pg = g;
*T0_ = 2*T+offset;
if (*T0_<minperiod0)
*T0_=minperiod0;
return pg;
}

View file

@ -0,0 +1,149 @@
/* Copyright (c) 2007-2008 CSIRO
Copyright (c) 2007-2009 Xiph.Org Foundation
Written by Jean-Marc Valin */
/**
@file pitch.h
@brief Pitch analysis
*/
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef PITCH_H
#define PITCH_H
//#include "modes.h"
//#include "cpu_support.h"
#include "arch.h"
void pitch_downsample(celt_sig *x[], opus_val16 *x_lp,
int len, int C);
void pitch_search(const opus_val16 *x_lp, opus_val16 *y,
int len, int max_pitch, int *pitch);
opus_val16 remove_doubling(opus_val16 *x, int maxperiod, int minperiod,
int N, int *T0, int prev_period, opus_val16 prev_gain);
/* OPT: This is the kernel you really want to optimize. It gets used a lot
by the prefilter and by the PLC. */
static OPUS_INLINE void xcorr_kernel(const opus_val16 * x, const opus_val16 * y, opus_val32 sum[4], int len)
{
int j;
opus_val16 y_0, y_1, y_2, y_3;
celt_assert(len>=3);
y_3=0; /* gcc doesn't realize that y_3 can't be used uninitialized */
y_0=*y++;
y_1=*y++;
y_2=*y++;
for (j=0;j<len-3;j+=4)
{
opus_val16 tmp;
tmp = *x++;
y_3=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_0);
sum[1] = MAC16_16(sum[1],tmp,y_1);
sum[2] = MAC16_16(sum[2],tmp,y_2);
sum[3] = MAC16_16(sum[3],tmp,y_3);
tmp=*x++;
y_0=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_1);
sum[1] = MAC16_16(sum[1],tmp,y_2);
sum[2] = MAC16_16(sum[2],tmp,y_3);
sum[3] = MAC16_16(sum[3],tmp,y_0);
tmp=*x++;
y_1=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_2);
sum[1] = MAC16_16(sum[1],tmp,y_3);
sum[2] = MAC16_16(sum[2],tmp,y_0);
sum[3] = MAC16_16(sum[3],tmp,y_1);
tmp=*x++;
y_2=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_3);
sum[1] = MAC16_16(sum[1],tmp,y_0);
sum[2] = MAC16_16(sum[2],tmp,y_1);
sum[3] = MAC16_16(sum[3],tmp,y_2);
}
if (j++<len)
{
opus_val16 tmp = *x++;
y_3=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_0);
sum[1] = MAC16_16(sum[1],tmp,y_1);
sum[2] = MAC16_16(sum[2],tmp,y_2);
sum[3] = MAC16_16(sum[3],tmp,y_3);
}
if (j++<len)
{
opus_val16 tmp=*x++;
y_0=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_1);
sum[1] = MAC16_16(sum[1],tmp,y_2);
sum[2] = MAC16_16(sum[2],tmp,y_3);
sum[3] = MAC16_16(sum[3],tmp,y_0);
}
if (j<len)
{
opus_val16 tmp=*x++;
y_1=*y++;
sum[0] = MAC16_16(sum[0],tmp,y_2);
sum[1] = MAC16_16(sum[1],tmp,y_3);
sum[2] = MAC16_16(sum[2],tmp,y_0);
sum[3] = MAC16_16(sum[3],tmp,y_1);
}
}
static OPUS_INLINE void dual_inner_prod(const opus_val16 *x, const opus_val16 *y01, const opus_val16 *y02,
int N, opus_val32 *xy1, opus_val32 *xy2)
{
int i;
opus_val32 xy01=0;
opus_val32 xy02=0;
for (i=0;i<N;i++)
{
xy01 = MAC16_16(xy01, x[i], y01[i]);
xy02 = MAC16_16(xy02, x[i], y02[i]);
}
*xy1 = xy01;
*xy2 = xy02;
}
/*We make sure a C version is always available for cases where the overhead of
vectorization and passing around an arch flag aren't worth it.*/
static OPUS_INLINE opus_val32 celt_inner_prod(const opus_val16 *x,
const opus_val16 *y, int N)
{
int i;
opus_val32 xy=0;
for (i=0;i<N;i++)
xy = MAC16_16(xy, x[i], y[i]);
return xy;
}
void celt_pitch_xcorr(const opus_val16 *_x, const opus_val16 *_y,
opus_val32 *xcorr, int len, int max_pitch);
#endif

View file

@ -0,0 +1,178 @@
/* Copyright (c) 2008-2011 Octasic Inc.
2012-2017 Jean-Marc Valin */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <math.h>
#include "opus_types.h"
#include "common.h"
#include "arch.h"
#include "tansig_table.h"
#include "rnn.h"
#include "rnn_data.h"
#include <stdio.h>
static OPUS_INLINE float tansig_approx(float x)
{
int i;
float y, dy;
float sign=1;
/* Tests are reversed to catch NaNs */
if (!(x<8))
return 1;
if (!(x>-8))
return -1;
#ifndef FIXED_POINT
/* Another check in case of -ffast-math */
if (celt_isnan(x))
return 0;
#endif
if (x<0)
{
x=-x;
sign=-1;
}
i = (int)floor(.5f+25*x);
x -= .04f*i;
y = tansig_table[i];
dy = 1-y*y;
y = y + x*dy*(1 - y*x);
return sign*y;
}
static OPUS_INLINE float sigmoid_approx(float x)
{
return .5 + .5*tansig_approx(.5*x);
}
static OPUS_INLINE float relu(float x)
{
return x < 0 ? 0 : x;
}
void compute_dense(const DenseLayer *layer, float *output, const float *input)
{
int i, j;
int N, M;
int stride;
M = layer->nb_inputs;
N = layer->nb_neurons;
stride = N;
for (i=0;i<N;i++)
{
/* Compute update gate. */
float sum = layer->bias[i];
for (j=0;j<M;j++)
sum += layer->input_weights[j*stride + i]*input[j];
output[i] = WEIGHTS_SCALE*sum;
}
if (layer->activation == ACTIVATION_SIGMOID) {
for (i=0;i<N;i++)
output[i] = sigmoid_approx(output[i]);
} else if (layer->activation == ACTIVATION_TANH) {
for (i=0;i<N;i++)
output[i] = tansig_approx(output[i]);
} else if (layer->activation == ACTIVATION_RELU) {
for (i=0;i<N;i++)
output[i] = relu(output[i]);
} else {
*(int*)0=0;
}
}
void compute_gru(const GRULayer *gru, float *state, const float *input)
{
int i, j;
int N, M;
int stride;
float z[MAX_NEURONS];
float r[MAX_NEURONS];
float h[MAX_NEURONS];
M = gru->nb_inputs;
N = gru->nb_neurons;
stride = 3*N;
for (i=0;i<N;i++)
{
/* Compute update gate. */
float sum = gru->bias[i];
for (j=0;j<M;j++)
sum += gru->input_weights[j*stride + i]*input[j];
for (j=0;j<N;j++)
sum += gru->recurrent_weights[j*stride + i]*state[j];
z[i] = sigmoid_approx(WEIGHTS_SCALE*sum);
}
for (i=0;i<N;i++)
{
/* Compute reset gate. */
float sum = gru->bias[N + i];
for (j=0;j<M;j++)
sum += gru->input_weights[N + j*stride + i]*input[j];
for (j=0;j<N;j++)
sum += gru->recurrent_weights[N + j*stride + i]*state[j];
r[i] = sigmoid_approx(WEIGHTS_SCALE*sum);
}
for (i=0;i<N;i++)
{
/* Compute output. */
float sum = gru->bias[2*N + i];
for (j=0;j<M;j++)
sum += gru->input_weights[2*N + j*stride + i]*input[j];
for (j=0;j<N;j++)
sum += gru->recurrent_weights[2*N + j*stride + i]*state[j]*r[j];
if (gru->activation == ACTIVATION_SIGMOID) sum = sigmoid_approx(WEIGHTS_SCALE*sum);
else if (gru->activation == ACTIVATION_TANH) sum = tansig_approx(WEIGHTS_SCALE*sum);
else if (gru->activation == ACTIVATION_RELU) sum = relu(WEIGHTS_SCALE*sum);
else *(int*)0=0;
h[i] = z[i]*state[i] + (1-z[i])*sum;
}
for (i=0;i<N;i++)
state[i] = h[i];
}
#define INPUT_SIZE 42
void compute_rnn(RNNState *rnn, float *gains, float *vad, const float *input) {
int i;
float dense_out[MAX_NEURONS];
float noise_input[MAX_NEURONS*3];
float denoise_input[MAX_NEURONS*3];
compute_dense(rnn->model->input_dense, dense_out, input);
compute_gru(rnn->model->vad_gru, rnn->vad_gru_state, dense_out);
compute_dense(rnn->model->vad_output, vad, rnn->vad_gru_state);
for (i=0;i<rnn->model->input_dense_size;i++) noise_input[i] = dense_out[i];
for (i=0;i<rnn->model->vad_gru_size;i++) noise_input[i+rnn->model->input_dense_size] = rnn->vad_gru_state[i];
for (i=0;i<INPUT_SIZE;i++) noise_input[i+rnn->model->input_dense_size+rnn->model->vad_gru_size] = input[i];
compute_gru(rnn->model->noise_gru, rnn->noise_gru_state, noise_input);
for (i=0;i<rnn->model->vad_gru_size;i++) denoise_input[i] = rnn->vad_gru_state[i];
for (i=0;i<rnn->model->noise_gru_size;i++) denoise_input[i+rnn->model->vad_gru_size] = rnn->noise_gru_state[i];
for (i=0;i<INPUT_SIZE;i++) denoise_input[i+rnn->model->vad_gru_size+rnn->model->noise_gru_size] = input[i];
compute_gru(rnn->model->denoise_gru, rnn->denoise_gru_state, denoise_input);
compute_dense(rnn->model->denoise_output, gains, rnn->denoise_gru_state);
}

View file

@ -0,0 +1,69 @@
/* Copyright (c) 2017 Jean-Marc Valin */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef RNN_H_
#define RNN_H_
#include "rnnoise.h"
#include "opus_types.h"
#define WEIGHTS_SCALE (1.f/256)
#define MAX_NEURONS 128
#define ACTIVATION_TANH 0
#define ACTIVATION_SIGMOID 1
#define ACTIVATION_RELU 2
typedef signed char rnn_weight;
typedef struct {
const rnn_weight *bias;
const rnn_weight *input_weights;
int nb_inputs;
int nb_neurons;
int activation;
} DenseLayer;
typedef struct {
const rnn_weight *bias;
const rnn_weight *input_weights;
const rnn_weight *recurrent_weights;
int nb_inputs;
int nb_neurons;
int activation;
} GRULayer;
typedef struct RNNState RNNState;
void compute_dense(const DenseLayer *layer, float *output, const float *input);
void compute_gru(const GRULayer *gru, float *state, const float *input);
void compute_rnn(RNNState *rnn, float *gains, float *vad, const float *input);
#endif /* RNN_H_ */

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,34 @@
#ifndef RNN_DATA_H
#define RNN_DATA_H
#include "rnn.h"
struct RNNModel {
int input_dense_size;
const DenseLayer *input_dense;
int vad_gru_size;
const GRULayer *vad_gru;
int noise_gru_size;
const GRULayer *noise_gru;
int denoise_gru_size;
const GRULayer *denoise_gru;
int denoise_output_size;
const DenseLayer *denoise_output;
int vad_output_size;
const DenseLayer *vad_output;
};
struct RNNState {
const RNNModel *model;
float *vad_gru_state;
float *noise_gru_state;
float *denoise_gru_state;
};
#endif

View file

@ -0,0 +1,168 @@
/* Copyright (c) 2018 Gregor Richards */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include "rnn.h"
#include "rnn_data.h"
#include "rnnoise.h"
/* Although these values are the same as in rnn.h, we make them separate to
* avoid accidentally burning internal values into a file format */
#define F_ACTIVATION_TANH 0
#define F_ACTIVATION_SIGMOID 1
#define F_ACTIVATION_RELU 2
RNNModel *rnnoise_model_from_file(FILE *f)
{
int i, in;
if (fscanf(f, "rnnoise-nu model file version %d\n", &in) != 1 || in != 1)
return NULL;
RNNModel *ret = calloc(1, sizeof(RNNModel));
if (!ret)
return NULL;
#define ALLOC_LAYER(type, name) \
type *name; \
name = calloc(1, sizeof(type)); \
if (!name) { \
rnnoise_model_free(ret); \
return NULL; \
} \
ret->name = name
ALLOC_LAYER(DenseLayer, input_dense);
ALLOC_LAYER(GRULayer, vad_gru);
ALLOC_LAYER(GRULayer, noise_gru);
ALLOC_LAYER(GRULayer, denoise_gru);
ALLOC_LAYER(DenseLayer, denoise_output);
ALLOC_LAYER(DenseLayer, vad_output);
#define INPUT_VAL(name) do { \
if (fscanf(f, "%d", &in) != 1 || in < 0 || in > 128) { \
rnnoise_model_free(ret); \
return NULL; \
} \
name = in; \
} while (0)
#define INPUT_ACTIVATION(name) do { \
int activation; \
INPUT_VAL(activation); \
switch (activation) { \
case F_ACTIVATION_SIGMOID: \
name = ACTIVATION_SIGMOID; \
break; \
case F_ACTIVATION_RELU: \
name = ACTIVATION_RELU; \
break; \
default: \
name = ACTIVATION_TANH; \
} \
} while (0)
#define INPUT_ARRAY(name, len) do { \
rnn_weight *values = malloc((len) * sizeof(rnn_weight)); \
if (!values) { \
rnnoise_model_free(ret); \
return NULL; \
} \
name = values; \
for (i = 0; i < (len); i++) { \
if (fscanf(f, "%d", &in) != 1) { \
rnnoise_model_free(ret); \
return NULL; \
} \
values[i] = in; \
} \
} while (0)
#define INPUT_DENSE(name) do { \
INPUT_VAL(name->nb_inputs); \
INPUT_VAL(name->nb_neurons); \
ret->name ## _size = name->nb_neurons; \
INPUT_ACTIVATION(name->activation); \
INPUT_ARRAY(name->input_weights, name->nb_inputs * name->nb_neurons); \
INPUT_ARRAY(name->bias, name->nb_neurons); \
} while (0)
#define INPUT_GRU(name) do { \
INPUT_VAL(name->nb_inputs); \
INPUT_VAL(name->nb_neurons); \
ret->name ## _size = name->nb_neurons; \
INPUT_ACTIVATION(name->activation); \
INPUT_ARRAY(name->input_weights, name->nb_inputs * name->nb_neurons * 3); \
INPUT_ARRAY(name->recurrent_weights, name->nb_neurons * name->nb_neurons * 3); \
INPUT_ARRAY(name->bias, name->nb_neurons * 3); \
} while (0)
INPUT_DENSE(input_dense);
INPUT_GRU(vad_gru);
INPUT_GRU(noise_gru);
INPUT_GRU(denoise_gru);
INPUT_DENSE(denoise_output);
INPUT_DENSE(vad_output);
return ret;
}
void rnnoise_model_free(RNNModel *model)
{
#define FREE_MAYBE(ptr) do { if (ptr) free(ptr); } while (0)
#define FREE_DENSE(name) do { \
if (model->name) { \
free((void *) model->name->input_weights); \
free((void *) model->name->bias); \
free((void *) model->name); \
} \
} while (0)
#define FREE_GRU(name) do { \
if (model->name) { \
free((void *) model->name->input_weights); \
free((void *) model->name->recurrent_weights); \
free((void *) model->name->bias); \
free((void *) model->name); \
} \
} while (0)
if (!model)
return;
FREE_DENSE(input_dense);
FREE_GRU(vad_gru);
FREE_GRU(noise_gru);
FREE_GRU(denoise_gru);
FREE_DENSE(denoise_output);
FREE_DENSE(vad_output);
free(model);
}

View file

@ -0,0 +1,45 @@
/* This file is auto-generated by gen_tables */
static const float tansig_table[201] = {
0.000000f, 0.039979f, 0.079830f, 0.119427f, 0.158649f,
0.197375f, 0.235496f, 0.272905f, 0.309507f, 0.345214f,
0.379949f, 0.413644f, 0.446244f, 0.477700f, 0.507977f,
0.537050f, 0.564900f, 0.591519f, 0.616909f, 0.641077f,
0.664037f, 0.685809f, 0.706419f, 0.725897f, 0.744277f,
0.761594f, 0.777888f, 0.793199f, 0.807569f, 0.821040f,
0.833655f, 0.845456f, 0.856485f, 0.866784f, 0.876393f,
0.885352f, 0.893698f, 0.901468f, 0.908698f, 0.915420f,
0.921669f, 0.927473f, 0.932862f, 0.937863f, 0.942503f,
0.946806f, 0.950795f, 0.954492f, 0.957917f, 0.961090f,
0.964028f, 0.966747f, 0.969265f, 0.971594f, 0.973749f,
0.975743f, 0.977587f, 0.979293f, 0.980869f, 0.982327f,
0.983675f, 0.984921f, 0.986072f, 0.987136f, 0.988119f,
0.989027f, 0.989867f, 0.990642f, 0.991359f, 0.992020f,
0.992631f, 0.993196f, 0.993718f, 0.994199f, 0.994644f,
0.995055f, 0.995434f, 0.995784f, 0.996108f, 0.996407f,
0.996682f, 0.996937f, 0.997172f, 0.997389f, 0.997590f,
0.997775f, 0.997946f, 0.998104f, 0.998249f, 0.998384f,
0.998508f, 0.998623f, 0.998728f, 0.998826f, 0.998916f,
0.999000f, 0.999076f, 0.999147f, 0.999213f, 0.999273f,
0.999329f, 0.999381f, 0.999428f, 0.999472f, 0.999513f,
0.999550f, 0.999585f, 0.999617f, 0.999646f, 0.999673f,
0.999699f, 0.999722f, 0.999743f, 0.999763f, 0.999781f,
0.999798f, 0.999813f, 0.999828f, 0.999841f, 0.999853f,
0.999865f, 0.999875f, 0.999885f, 0.999893f, 0.999902f,
0.999909f, 0.999916f, 0.999923f, 0.999929f, 0.999934f,
0.999939f, 0.999944f, 0.999948f, 0.999952f, 0.999956f,
0.999959f, 0.999962f, 0.999965f, 0.999968f, 0.999970f,
0.999973f, 0.999975f, 0.999977f, 0.999978f, 0.999980f,
0.999982f, 0.999983f, 0.999984f, 0.999986f, 0.999987f,
0.999988f, 0.999989f, 0.999990f, 0.999990f, 0.999991f,
0.999992f, 0.999992f, 0.999993f, 0.999994f, 0.999994f,
0.999994f, 0.999995f, 0.999995f, 0.999996f, 0.999996f,
0.999996f, 0.999997f, 0.999997f, 0.999997f, 0.999997f,
0.999997f, 0.999998f, 0.999998f, 0.999998f, 0.999998f,
0.999998f, 0.999998f, 0.999999f, 0.999999f, 0.999999f,
0.999999f, 0.999999f, 0.999999f, 0.999999f, 0.999999f,
0.999999f, 0.999999f, 0.999999f, 0.999999f, 0.999999f,
1.000000f, 1.000000f, 1.000000f, 1.000000f, 1.000000f,
1.000000f, 1.000000f, 1.000000f, 1.000000f, 1.000000f,
1.000000f,
};

View file

@ -27,7 +27,9 @@ int FormatPriority(const VideoFormat &format, const std::vector<std::string> &pr
static const auto kCodecs = { static const auto kCodecs = {
std::string(cricket::kAv1CodecName), std::string(cricket::kAv1CodecName),
std::string(cricket::kVp9CodecName), std::string(cricket::kVp9CodecName),
#ifndef WEBRTC_DISABLE_H265
std::string(cricket::kH265CodecName), std::string(cricket::kH265CodecName),
#endif
std::string(cricket::kH264CodecName), std::string(cricket::kH264CodecName),
std::string(cricket::kVp8CodecName), std::string(cricket::kVp8CodecName),
}; };

View file

@ -11,8 +11,8 @@ namespace {
constexpr auto kSingleMessagePacketSeqBit = (uint32_t(1) << 31); constexpr auto kSingleMessagePacketSeqBit = (uint32_t(1) << 31);
constexpr auto kMessageRequiresAckSeqBit = (uint32_t(1) << 30); constexpr auto kMessageRequiresAckSeqBit = (uint32_t(1) << 30);
constexpr auto kMaxAllowedCounter = std::numeric_limits<uint32_t>::max() constexpr auto kMaxAllowedCounter = std::numeric_limits<uint32_t>::max()
& ~kSingleMessagePacketSeqBit & ~kSingleMessagePacketSeqBit
& ~kMessageRequiresAckSeqBit; & ~kMessageRequiresAckSeqBit;
static_assert(kMaxAllowedCounter < kSingleMessagePacketSeqBit, "bad"); static_assert(kMaxAllowedCounter < kSingleMessagePacketSeqBit, "bad");
static_assert(kMaxAllowedCounter < kMessageRequiresAckSeqBit, "bad"); static_assert(kMaxAllowedCounter < kMessageRequiresAckSeqBit, "bad");
@ -35,143 +35,199 @@ static constexpr uint8_t kAckId = uint8_t(-1);
static constexpr uint8_t kEmptyId = uint8_t(-2); static constexpr uint8_t kEmptyId = uint8_t(-2);
void AppendSeq(rtc::CopyOnWriteBuffer &buffer, uint32_t seq) { void AppendSeq(rtc::CopyOnWriteBuffer &buffer, uint32_t seq) {
const auto bytes = rtc::HostToNetwork32(seq); const auto bytes = rtc::HostToNetwork32(seq);
buffer.AppendData(reinterpret_cast<const char*>(&bytes), sizeof(bytes)); buffer.AppendData(reinterpret_cast<const char*>(&bytes), sizeof(bytes));
} }
void WriteSeq(void *bytes, uint32_t seq) { void WriteSeq(void *bytes, uint32_t seq) {
*reinterpret_cast<uint32_t*>(bytes) = rtc::HostToNetwork32(seq); *reinterpret_cast<uint32_t*>(bytes) = rtc::HostToNetwork32(seq);
} }
uint32_t ReadSeq(const void *bytes) { uint32_t ReadSeq(const void *bytes) {
return rtc::NetworkToHost32(*reinterpret_cast<const uint32_t*>(bytes)); return rtc::NetworkToHost32(*reinterpret_cast<const uint32_t*>(bytes));
} }
uint32_t CounterFromSeq(uint32_t seq) { uint32_t CounterFromSeq(uint32_t seq) {
return seq & ~kSingleMessagePacketSeqBit & ~kMessageRequiresAckSeqBit; return seq & ~kSingleMessagePacketSeqBit & ~kMessageRequiresAckSeqBit;
} }
absl::nullopt_t LogError( absl::nullopt_t LogError(
const char *message, const char *message,
const std::string &additional = std::string()) { const std::string &additional = std::string()) {
RTC_LOG(LS_ERROR) << "ERROR! " << message << additional; RTC_LOG(LS_ERROR) << "ERROR! " << message << additional;
return absl::nullopt; return absl::nullopt;
} }
bool ConstTimeIsDifferent(const void *a, const void *b, size_t size) { bool ConstTimeIsDifferent(const void *a, const void *b, size_t size) {
auto ca = reinterpret_cast<const char*>(a); auto ca = reinterpret_cast<const char*>(a);
auto cb = reinterpret_cast<const char*>(b); auto cb = reinterpret_cast<const char*>(b);
volatile auto different = false; volatile auto different = false;
for (const auto ce = ca + size; ca != ce; ++ca, ++cb) { for (const auto ce = ca + size; ca != ce; ++ca, ++cb) {
different |= (*ca != *cb); different |= (*ca != *cb);
} }
return different; return different;
} }
} // namespace } // namespace
EncryptedConnection::EncryptedConnection( EncryptedConnection::EncryptedConnection(
Type type, Type type,
const EncryptionKey &key, const EncryptionKey &key,
std::function<void(int delayMs, int cause)> requestSendService) : std::function<void(int delayMs, int cause)> requestSendService) :
_type(type), _type(type),
_key(key), _key(key),
_delayIntervals(DelayIntervalsByType(type)), _delayIntervals(DelayIntervalsByType(type)),
_requestSendService(std::move(requestSendService)) { _requestSendService(std::move(requestSendService)) {
assert(_key.value != nullptr); assert(_key.value != nullptr);
}
absl::optional<rtc::CopyOnWriteBuffer> EncryptedConnection::encryptRawPacket(rtc::CopyOnWriteBuffer const &buffer) {
auto seq = ++_counter;
rtc::ByteBufferWriter writer;
writer.WriteUInt32(seq);
auto result = rtc::CopyOnWriteBuffer();
result.AppendData(writer.Data(), writer.Length());
result.AppendData(buffer);
auto encryptedPacket = encryptPrepared(result);
rtc::CopyOnWriteBuffer encryptedBuffer;
encryptedBuffer.AppendData(encryptedPacket.bytes.data(), encryptedPacket.bytes.size());
return encryptedBuffer;
}
absl::optional<rtc::CopyOnWriteBuffer> EncryptedConnection::decryptRawPacket(rtc::CopyOnWriteBuffer const &buffer) {
if (buffer.size() < 21 || buffer.size() > kMaxIncomingPacketSize) {
return absl::nullopt;
}
const auto x = (_key.isOutgoing ? 8 : 0) + (_type == Type::Signaling ? 128 : 0);
const auto key = _key.value->data();
const auto msgKey = reinterpret_cast<const uint8_t*>(buffer.data());
const auto encryptedData = msgKey + 16;
const auto dataSize = buffer.size() - 16;
auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x);
auto decryptionBuffer = rtc::Buffer(dataSize);
AesProcessCtr(
MemorySpan{ encryptedData, dataSize },
decryptionBuffer.data(),
std::move(aesKeyIv));
const auto msgKeyLarge = ConcatSHA256(
MemorySpan{ key + 88 + x, 32 },
MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() });
if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) {
return absl::nullopt;
}
const auto incomingSeq = ReadSeq(decryptionBuffer.data());
const auto incomingCounter = CounterFromSeq(incomingSeq);
if (!registerIncomingCounter(incomingCounter)) {
// We've received that packet already.
return absl::nullopt;
}
rtc::CopyOnWriteBuffer resultBuffer;
resultBuffer.AppendData(decryptionBuffer.data() + 4, decryptionBuffer.size() - 4);
return resultBuffer;
} }
auto EncryptedConnection::prepareForSending(const Message &message) auto EncryptedConnection::prepareForSending(const Message &message)
-> absl::optional<EncryptedPacket> { -> absl::optional<EncryptedPacket> {
const auto messageRequiresAck = absl::visit([](const auto &data) { const auto messageRequiresAck = absl::visit([](const auto &data) {
return std::decay_t<decltype(data)>::kRequiresAck; return std::decay_t<decltype(data)>::kRequiresAck;
}, message.data); }, message.data);
// If message requires ack, then we can't serialize it as a single // If message requires ack, then we can't serialize it as a single
// message packet, because later it may be sent as a part of big packet. // message packet, because later it may be sent as a part of big packet.
const auto singleMessagePacket = !haveAdditionalMessages() && !messageRequiresAck; const auto singleMessagePacket = !haveAdditionalMessages() && !messageRequiresAck;
const auto maybeSeq = computeNextSeq(messageRequiresAck, singleMessagePacket); const auto maybeSeq = computeNextSeq(messageRequiresAck, singleMessagePacket);
if (!maybeSeq) { if (!maybeSeq) {
return absl::nullopt; return absl::nullopt;
} }
const auto seq = *maybeSeq; const auto seq = *maybeSeq;
auto serialized = SerializeMessageWithSeq(message, seq, singleMessagePacket); auto serialized = SerializeMessageWithSeq(message, seq, singleMessagePacket);
if (!enoughSpaceInPacket(serialized, 0)) { if (!enoughSpaceInPacket(serialized, 0)) {
return LogError("Too large packet: ", std::to_string(serialized.size())); return LogError("Too large packet: ", std::to_string(serialized.size()));
} }
const auto notYetAckedCopy = messageRequiresAck const auto notYetAckedCopy = messageRequiresAck
? serialized ? serialized
: rtc::CopyOnWriteBuffer(); : rtc::CopyOnWriteBuffer();
if (!messageRequiresAck) { if (!messageRequiresAck) {
appendAdditionalMessages(serialized); appendAdditionalMessages(serialized);
return encryptPrepared(serialized); return encryptPrepared(serialized);
} }
const auto type = uint8_t(serialized.cdata()[4]); const auto type = uint8_t(serialized.cdata()[4]);
const auto sendEnqueued = !_myNotYetAckedMessages.empty(); const auto sendEnqueued = !_myNotYetAckedMessages.empty();
if (sendEnqueued) { if (sendEnqueued) {
// All requiring ack messages should always be sent in order within // All requiring ack messages should always be sent in order within
// one packet, starting with the least not-yet-acked one. // one packet, starting with the least not-yet-acked one.
// So if we still have those, we send an empty message with all // So if we still have those, we send an empty message with all
// requiring ack messages that will fit in correct order. // requiring ack messages that will fit in correct order.
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Enqueue SEND:type" << type << "#" << CounterFromSeq(seq); << "Enqueue SEND:type" << type << "#" << CounterFromSeq(seq);
} else { } else {
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Add SEND:type" << type << "#" << CounterFromSeq(seq); << "Add SEND:type" << type << "#" << CounterFromSeq(seq);
appendAdditionalMessages(serialized); appendAdditionalMessages(serialized);
} }
_myNotYetAckedMessages.push_back({ notYetAckedCopy, rtc::TimeMillis() }); _myNotYetAckedMessages.push_back({ notYetAckedCopy, rtc::TimeMillis() });
if (!sendEnqueued) { if (!sendEnqueued) {
return encryptPrepared(serialized); return encryptPrepared(serialized);
} }
for (auto &queued : _myNotYetAckedMessages) { for (auto &queued : _myNotYetAckedMessages) {
queued.lastSent = 0; queued.lastSent = 0;
} }
return prepareForSendingService(0); return prepareForSendingService(0);
} }
auto EncryptedConnection::prepareForSendingService(int cause) auto EncryptedConnection::prepareForSendingService(int cause)
-> absl::optional<EncryptedPacket> { -> absl::optional<EncryptedPacket> {
if (cause == kServiceCauseAcks) { if (cause == kServiceCauseAcks) {
_sendAcksTimerActive = false; _sendAcksTimerActive = false;
} else if (cause == kServiceCauseResend) { } else if (cause == kServiceCauseResend) {
_resendTimerActive = false; _resendTimerActive = false;
} }
if (!haveAdditionalMessages()) { if (!haveAdditionalMessages()) {
return absl::nullopt; return absl::nullopt;
} }
const auto messageRequiresAck = false; const auto messageRequiresAck = false;
const auto singleMessagePacket = false; const auto singleMessagePacket = false;
const auto seq = computeNextSeq(messageRequiresAck, singleMessagePacket); const auto seq = computeNextSeq(messageRequiresAck, singleMessagePacket);
if (!seq) { if (!seq) {
return absl::nullopt; return absl::nullopt;
} }
auto serialized = SerializeEmptyMessageWithSeq(*seq); auto serialized = SerializeEmptyMessageWithSeq(*seq);
assert(enoughSpaceInPacket(serialized, 0)); assert(enoughSpaceInPacket(serialized, 0));
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "SEND:empty#" << CounterFromSeq(*seq); << "SEND:empty#" << CounterFromSeq(*seq);
appendAdditionalMessages(serialized); appendAdditionalMessages(serialized);
return encryptPrepared(serialized); return encryptPrepared(serialized);
} }
bool EncryptedConnection::haveAdditionalMessages() const { bool EncryptedConnection::haveAdditionalMessages() const {
return !_myNotYetAckedMessages.empty() || !_acksToSendSeqs.empty(); return !_myNotYetAckedMessages.empty() || !_acksToSendSeqs.empty();
} }
absl::optional<uint32_t> EncryptedConnection::computeNextSeq( absl::optional<uint32_t> EncryptedConnection::computeNextSeq(
bool messageRequiresAck, bool messageRequiresAck,
bool singleMessagePacket) { bool singleMessagePacket) {
if (messageRequiresAck && _myNotYetAckedMessages.size() >= kNotAckedMessagesLimit) { if (messageRequiresAck && _myNotYetAckedMessages.size() >= kNotAckedMessagesLimit) {
return LogError("Too many not ACKed messages."); return LogError("Too many not ACKed messages.");
} else if (_counter == kMaxAllowedCounter) { } else if (_counter == kMaxAllowedCounter) {
return LogError("Outgoing packet limit reached."); return LogError("Outgoing packet limit reached.");
} }
return (++_counter) return (++_counter)
| (singleMessagePacket ? kSingleMessagePacketSeqBit : 0) | (singleMessagePacket ? kSingleMessagePacketSeqBit : 0)
| (messageRequiresAck ? kMessageRequiresAckSeqBit : 0); | (messageRequiresAck ? kMessageRequiresAckSeqBit : 0);
} }
size_t EncryptedConnection::packetLimit() const { size_t EncryptedConnection::packetLimit() const {
@ -184,337 +240,337 @@ size_t EncryptedConnection::packetLimit() const {
} }
bool EncryptedConnection::enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const { bool EncryptedConnection::enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const {
const auto limit = packetLimit(); const auto limit = packetLimit();
return (amount < limit) return (amount < limit)
&& (16 + buffer.size() + amount <= limit); && (16 + buffer.size() + amount <= limit);
} }
void EncryptedConnection::appendAcksToSend(rtc::CopyOnWriteBuffer &buffer) { void EncryptedConnection::appendAcksToSend(rtc::CopyOnWriteBuffer &buffer) {
auto i = _acksToSendSeqs.begin(); auto i = _acksToSendSeqs.begin();
while ((i != _acksToSendSeqs.end()) while ((i != _acksToSendSeqs.end())
&& enoughSpaceInPacket( && enoughSpaceInPacket(
buffer, buffer,
kAckSerializedSize)) { kAckSerializedSize)) {
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Add ACK#" << CounterFromSeq(*i); << "Add ACK#" << CounterFromSeq(*i);
AppendSeq(buffer, *i); AppendSeq(buffer, *i);
buffer.AppendData(&kAckId, 1); buffer.AppendData(&kAckId, 1);
++i; ++i;
} }
_acksToSendSeqs.erase(_acksToSendSeqs.begin(), i); _acksToSendSeqs.erase(_acksToSendSeqs.begin(), i);
for (const auto seq : _acksToSendSeqs) { for (const auto seq : _acksToSendSeqs) {
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Skip ACK#" << CounterFromSeq(seq) << "Skip ACK#" << CounterFromSeq(seq)
<< " (no space, length: " << kAckSerializedSize << ", already: " << buffer.size() << ")"; << " (no space, length: " << kAckSerializedSize << ", already: " << buffer.size() << ")";
} }
} }
size_t EncryptedConnection::fullNotAckedLength() const { size_t EncryptedConnection::fullNotAckedLength() const {
assert(_myNotYetAckedMessages.size() < kNotAckedMessagesLimit); assert(_myNotYetAckedMessages.size() < kNotAckedMessagesLimit);
auto result = size_t(); auto result = size_t();
for (const auto &message : _myNotYetAckedMessages) { for (const auto &message : _myNotYetAckedMessages) {
result += message.data.size(); result += message.data.size();
} }
return result; return result;
} }
void EncryptedConnection::appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer) { void EncryptedConnection::appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer) {
appendAcksToSend(buffer); appendAcksToSend(buffer);
if (_myNotYetAckedMessages.empty()) { if (_myNotYetAckedMessages.empty()) {
return; return;
} }
const auto now = rtc::TimeMillis(); const auto now = rtc::TimeMillis();
auto someWereNotAdded = false; auto someWereNotAdded = false;
for (auto &resending : _myNotYetAckedMessages) { for (auto &resending : _myNotYetAckedMessages) {
const auto sent = resending.lastSent; const auto sent = resending.lastSent;
const auto when = sent const auto when = sent
? (sent + _delayIntervals.minDelayBeforeMessageResend) ? (sent + _delayIntervals.minDelayBeforeMessageResend)
: 0; : 0;
assert(resending.data.size() >= 5); assert(resending.data.size() >= 5);
const auto counter = CounterFromSeq(ReadSeq(resending.data.data())); const auto counter = CounterFromSeq(ReadSeq(resending.data.data()));
const auto type = uint8_t(resending.data.data()[4]); const auto type = uint8_t(resending.data.data()[4]);
if (when > now) { if (when > now) {
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Skip RESEND:type" << type << "#" << counter << "Skip RESEND:type" << type << "#" << counter
<< " (wait " << (when - now) << "ms)."; << " (wait " << (when - now) << "ms).";
break; break;
} else if (enoughSpaceInPacket(buffer, resending.data.size())) { } else if (enoughSpaceInPacket(buffer, resending.data.size())) {
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Add RESEND:type" << type << "#" << counter; << "Add RESEND:type" << type << "#" << counter;
buffer.AppendData(resending.data); buffer.AppendData(resending.data);
resending.lastSent = now; resending.lastSent = now;
} else { } else {
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Skip RESEND:type" << type << "#" << counter << "Skip RESEND:type" << type << "#" << counter
<< " (no space, length: " << resending.data.size() << ", already: " << buffer.size() << ")"; << " (no space, length: " << resending.data.size() << ", already: " << buffer.size() << ")";
break; break;
} }
} }
if (!_resendTimerActive) { if (!_resendTimerActive) {
_resendTimerActive = true; _resendTimerActive = true;
_requestSendService( _requestSendService(
_delayIntervals.maxDelayBeforeMessageResend, _delayIntervals.maxDelayBeforeMessageResend,
kServiceCauseResend); kServiceCauseResend);
} }
} }
auto EncryptedConnection::encryptPrepared(const rtc::CopyOnWriteBuffer &buffer) auto EncryptedConnection::encryptPrepared(const rtc::CopyOnWriteBuffer &buffer)
-> EncryptedPacket { -> EncryptedPacket {
auto result = EncryptedPacket(); auto result = EncryptedPacket();
result.counter = CounterFromSeq(ReadSeq(buffer.data())); result.counter = CounterFromSeq(ReadSeq(buffer.data()));
result.bytes.resize(16 + buffer.size()); result.bytes.resize(16 + buffer.size());
const auto x = (_key.isOutgoing ? 0 : 8) + (_type == Type::Signaling ? 128 : 0); const auto x = (_key.isOutgoing ? 0 : 8) + (_type == Type::Signaling ? 128 : 0);
const auto key = _key.value->data(); const auto key = _key.value->data();
const auto msgKeyLarge = ConcatSHA256( const auto msgKeyLarge = ConcatSHA256(
MemorySpan{ key + 88 + x, 32 }, MemorySpan{ key + 88 + x, 32 },
MemorySpan{ buffer.data(), buffer.size() }); MemorySpan{ buffer.data(), buffer.size() });
const auto msgKey = result.bytes.data(); const auto msgKey = result.bytes.data();
memcpy(msgKey, msgKeyLarge.data() + 8, 16); memcpy(msgKey, msgKeyLarge.data() + 8, 16);
auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x);
AesProcessCtr( AesProcessCtr(
MemorySpan{ buffer.data(), buffer.size() }, MemorySpan{ buffer.data(), buffer.size() },
result.bytes.data() + 16, result.bytes.data() + 16,
std::move(aesKeyIv)); std::move(aesKeyIv));
return result; return result;
} }
bool EncryptedConnection::registerIncomingCounter(uint32_t incomingCounter) { bool EncryptedConnection::registerIncomingCounter(uint32_t incomingCounter) {
auto &list = _largestIncomingCounters; auto &list = _largestIncomingCounters;
const auto position = std::lower_bound(list.begin(), list.end(), incomingCounter); const auto position = std::lower_bound(list.begin(), list.end(), incomingCounter);
const auto largest = list.empty() ? 0 : list.back(); const auto largest = list.empty() ? 0 : list.back();
if (position != list.end() && *position == incomingCounter) { if (position != list.end() && *position == incomingCounter) {
// The packet is in the list already. // The packet is in the list already.
return false; return false;
} else if (incomingCounter + kKeepIncomingCountersCount <= largest) { } else if (incomingCounter + kKeepIncomingCountersCount <= largest) {
// The packet is too old. // The packet is too old.
return false; return false;
} }
const auto eraseTill = std::find_if(list.begin(), list.end(), [&](uint32_t counter) { const auto eraseTill = std::find_if(list.begin(), list.end(), [&](uint32_t counter) {
return (counter + kKeepIncomingCountersCount > incomingCounter); return (counter + kKeepIncomingCountersCount > incomingCounter);
}); });
const auto eraseCount = eraseTill - list.begin(); const auto eraseCount = eraseTill - list.begin();
const auto positionIndex = (position - list.begin()) - eraseCount; const auto positionIndex = (position - list.begin()) - eraseCount;
list.erase(list.begin(), eraseTill); list.erase(list.begin(), eraseTill);
assert(positionIndex >= 0 && positionIndex <= list.size()); assert(positionIndex >= 0 && positionIndex <= list.size());
list.insert(list.begin() + positionIndex, incomingCounter); list.insert(list.begin() + positionIndex, incomingCounter);
return true; return true;
} }
auto EncryptedConnection::handleIncomingPacket(const char *bytes, size_t size) auto EncryptedConnection::handleIncomingPacket(const char *bytes, size_t size)
-> absl::optional<DecryptedPacket> { -> absl::optional<DecryptedPacket> {
if (size < 21 || size > kMaxIncomingPacketSize) { if (size < 21 || size > kMaxIncomingPacketSize) {
return LogError("Bad incoming packet size: ", std::to_string(size)); return LogError("Bad incoming packet size: ", std::to_string(size));
} }
const auto x = (_key.isOutgoing ? 8 : 0) + (_type == Type::Signaling ? 128 : 0); const auto x = (_key.isOutgoing ? 8 : 0) + (_type == Type::Signaling ? 128 : 0);
const auto key = _key.value->data(); const auto key = _key.value->data();
const auto msgKey = reinterpret_cast<const uint8_t*>(bytes); const auto msgKey = reinterpret_cast<const uint8_t*>(bytes);
const auto encryptedData = msgKey + 16; const auto encryptedData = msgKey + 16;
const auto dataSize = size - 16; const auto dataSize = size - 16;
auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x); auto aesKeyIv = PrepareAesKeyIv(key, msgKey, x);
auto decryptionBuffer = rtc::Buffer(dataSize); auto decryptionBuffer = rtc::Buffer(dataSize);
AesProcessCtr( AesProcessCtr(
MemorySpan{ encryptedData, dataSize }, MemorySpan{ encryptedData, dataSize },
decryptionBuffer.data(), decryptionBuffer.data(),
std::move(aesKeyIv)); std::move(aesKeyIv));
const auto msgKeyLarge = ConcatSHA256( const auto msgKeyLarge = ConcatSHA256(
MemorySpan{ key + 88 + x, 32 }, MemorySpan{ key + 88 + x, 32 },
MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() }); MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() });
if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) { if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) {
return LogError("Bad incoming data hash."); return LogError("Bad incoming data hash.");
} }
const auto incomingSeq = ReadSeq(decryptionBuffer.data()); const auto incomingSeq = ReadSeq(decryptionBuffer.data());
const auto incomingCounter = CounterFromSeq(incomingSeq); const auto incomingCounter = CounterFromSeq(incomingSeq);
if (!registerIncomingCounter(incomingCounter)) { if (!registerIncomingCounter(incomingCounter)) {
// We've received that packet already. // We've received that packet already.
return LogError("Already handled packet received.", std::to_string(incomingCounter)); return LogError("Already handled packet received.", std::to_string(incomingCounter));
} }
return processPacket(decryptionBuffer, incomingSeq); return processPacket(decryptionBuffer, incomingSeq);
} }
auto EncryptedConnection::processPacket( auto EncryptedConnection::processPacket(
const rtc::Buffer &fullBuffer, const rtc::Buffer &fullBuffer,
uint32_t packetSeq) uint32_t packetSeq)
-> absl::optional<DecryptedPacket> { -> absl::optional<DecryptedPacket> {
assert(fullBuffer.size() >= 5); assert(fullBuffer.size() >= 5);
auto additionalMessage = false; auto additionalMessage = false;
auto firstMessageRequiringAck = true; auto firstMessageRequiringAck = true;
auto newRequiringAckReceived = false; auto newRequiringAckReceived = false;
auto currentSeq = packetSeq; auto currentSeq = packetSeq;
auto currentCounter = CounterFromSeq(currentSeq); auto currentCounter = CounterFromSeq(currentSeq);
rtc::ByteBufferReader reader( rtc::ByteBufferReader reader(
reinterpret_cast<const char*>(fullBuffer.data() + 4), // Skip seq. reinterpret_cast<const char*>(fullBuffer.data() + 4), // Skip seq.
fullBuffer.size() - 4); fullBuffer.size() - 4);
auto result = absl::optional<DecryptedPacket>(); auto result = absl::optional<DecryptedPacket>();
while (true) { while (true) {
const auto type = uint8_t(*reader.Data()); const auto type = uint8_t(*reader.Data());
const auto singleMessagePacket = ((currentSeq & kSingleMessagePacketSeqBit) != 0); const auto singleMessagePacket = ((currentSeq & kSingleMessagePacketSeqBit) != 0);
if (singleMessagePacket && additionalMessage) { if (singleMessagePacket && additionalMessage) {
return LogError("Single message packet bit in not first message."); return LogError("Single message packet bit in not first message.");
} }
if (type == kEmptyId) { if (type == kEmptyId) {
if (additionalMessage) { if (additionalMessage) {
return LogError("Empty message should be only the first one in the packet."); return LogError("Empty message should be only the first one in the packet.");
} }
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< "Got RECV:empty" << "#" << currentCounter; << "Got RECV:empty" << "#" << currentCounter;
reader.Consume(1); reader.Consume(1);
} else if (type == kAckId) { } else if (type == kAckId) {
if (!additionalMessage) { if (!additionalMessage) {
return LogError("Ack message must not be the first one in the packet."); return LogError("Ack message must not be the first one in the packet.");
} }
ackMyMessage(currentSeq); ackMyMessage(currentSeq);
reader.Consume(1); reader.Consume(1);
} else if (auto message = DeserializeMessage(reader, singleMessagePacket)) { } else if (auto message = DeserializeMessage(reader, singleMessagePacket)) {
const auto messageRequiresAck = ((currentSeq & kMessageRequiresAckSeqBit) != 0); const auto messageRequiresAck = ((currentSeq & kMessageRequiresAckSeqBit) != 0);
const auto skipMessage = messageRequiresAck const auto skipMessage = messageRequiresAck
? !registerSentAck(currentCounter, firstMessageRequiringAck) ? !registerSentAck(currentCounter, firstMessageRequiringAck)
: (additionalMessage && !registerIncomingCounter(currentCounter)); : (additionalMessage && !registerIncomingCounter(currentCounter));
if (messageRequiresAck) { if (messageRequiresAck) {
firstMessageRequiringAck = false; firstMessageRequiringAck = false;
if (!skipMessage) { if (!skipMessage) {
newRequiringAckReceived = true; newRequiringAckReceived = true;
} }
sendAckPostponed(currentSeq); sendAckPostponed(currentSeq);
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< (skipMessage ? "Repeated RECV:type" : "Got RECV:type") << type << "#" << currentCounter; << (skipMessage ? "Repeated RECV:type" : "Got RECV:type") << type << "#" << currentCounter;
} }
if (!skipMessage) { if (!skipMessage) {
appendReceivedMessage(result, std::move(*message), currentSeq); appendReceivedMessage(result, std::move(*message), currentSeq);
} }
} else { } else {
return LogError("Could not parse message from packet, type: ", std::to_string(type)); return LogError("Could not parse message from packet, type: ", std::to_string(type));
} }
if (!reader.Length()) { if (!reader.Length()) {
break; break;
} else if (singleMessagePacket) { } else if (singleMessagePacket) {
return LogError("Single message didn't fill the entire packet."); return LogError("Single message didn't fill the entire packet.");
} else if (reader.Length() < 5) { } else if (reader.Length() < 5) {
return LogError("Bad remaining data size: ", std::to_string(reader.Length())); return LogError("Bad remaining data size: ", std::to_string(reader.Length()));
} }
const auto success = reader.ReadUInt32(&currentSeq); const auto success = reader.ReadUInt32(&currentSeq);
assert(success); assert(success);
currentCounter = CounterFromSeq(currentSeq); currentCounter = CounterFromSeq(currentSeq);
additionalMessage = true; additionalMessage = true;
} }
if (!_acksToSendSeqs.empty()) { if (!_acksToSendSeqs.empty()) {
if (newRequiringAckReceived) { if (newRequiringAckReceived) {
_requestSendService(0, 0); _requestSendService(0, 0);
} else if (!_sendAcksTimerActive) { } else if (!_sendAcksTimerActive) {
_sendAcksTimerActive = true; _sendAcksTimerActive = true;
_requestSendService( _requestSendService(
_delayIntervals.maxDelayBeforeAckResend, _delayIntervals.maxDelayBeforeAckResend,
kServiceCauseAcks); kServiceCauseAcks);
} }
} }
return result; return result;
} }
void EncryptedConnection::appendReceivedMessage( void EncryptedConnection::appendReceivedMessage(
absl::optional<DecryptedPacket> &to, absl::optional<DecryptedPacket> &to,
Message &&message, Message &&message,
uint32_t incomingSeq) { uint32_t incomingSeq) {
auto decrypted = DecryptedMessage{ auto decrypted = DecryptedMessage{
std::move(message), std::move(message),
CounterFromSeq(incomingSeq) CounterFromSeq(incomingSeq)
}; };
if (to) { if (to) {
to->additional.push_back(std::move(decrypted)); to->additional.push_back(std::move(decrypted));
} else { } else {
to = DecryptedPacket{ std::move(decrypted) }; to = DecryptedPacket{ std::move(decrypted) };
} }
} }
const char *EncryptedConnection::logHeader() const { const char *EncryptedConnection::logHeader() const {
return (_type == Type::Signaling) ? "(signaling) " : "(transport) "; return (_type == Type::Signaling) ? "(signaling) " : "(transport) ";
} }
bool EncryptedConnection::registerSentAck(uint32_t counter, bool firstInPacket) { bool EncryptedConnection::registerSentAck(uint32_t counter, bool firstInPacket) {
auto &list = _acksSentCounters; auto &list = _acksSentCounters;
const auto position = std::lower_bound(list.begin(), list.end(), counter); const auto position = std::lower_bound(list.begin(), list.end(), counter);
const auto already = (position != list.end()) && (*position == counter); const auto already = (position != list.end()) && (*position == counter);
const auto was = list; const auto was = list;
if (firstInPacket) { if (firstInPacket) {
list.erase(list.begin(), position); list.erase(list.begin(), position);
if (!already) { if (!already) {
list.insert(list.begin(), counter); list.insert(list.begin(), counter);
} }
} else if (!already) { } else if (!already) {
list.insert(position, counter); list.insert(position, counter);
} }
return !already; return !already;
} }
void EncryptedConnection::sendAckPostponed(uint32_t incomingSeq) { void EncryptedConnection::sendAckPostponed(uint32_t incomingSeq) {
auto &list = _acksToSendSeqs; auto &list = _acksToSendSeqs;
const auto already = std::find(list.begin(), list.end(), incomingSeq); const auto already = std::find(list.begin(), list.end(), incomingSeq);
if (already == list.end()) { if (already == list.end()) {
list.push_back(incomingSeq); list.push_back(incomingSeq);
} }
} }
void EncryptedConnection::ackMyMessage(uint32_t seq) { void EncryptedConnection::ackMyMessage(uint32_t seq) {
auto type = uint8_t(0); auto type = uint8_t(0);
auto &list = _myNotYetAckedMessages; auto &list = _myNotYetAckedMessages;
for (auto i = list.begin(), e = list.end(); i != e; ++i) { for (auto i = list.begin(), e = list.end(); i != e; ++i) {
assert(i->data.size() >= 5); assert(i->data.size() >= 5);
if (ReadSeq(i->data.cdata()) == seq) { if (ReadSeq(i->data.cdata()) == seq) {
type = uint8_t(i->data.cdata()[4]); type = uint8_t(i->data.cdata()[4]);
list.erase(i); list.erase(i);
break; break;
} }
} }
RTC_LOG(LS_INFO) << logHeader() RTC_LOG(LS_INFO) << logHeader()
<< (type ? "Got ACK:type" + std::to_string(type) + "#" : "Repeated ACK#") << (type ? "Got ACK:type" + std::to_string(type) + "#" : "Repeated ACK#")
<< CounterFromSeq(seq); << CounterFromSeq(seq);
} }
auto EncryptedConnection::DelayIntervalsByType(Type type) -> DelayIntervals { auto EncryptedConnection::DelayIntervalsByType(Type type) -> DelayIntervals {
auto result = DelayIntervals(); auto result = DelayIntervals();
const auto signaling = (type == Type::Signaling); const auto signaling = (type == Type::Signaling);
// Don't resend faster than min delay even if we have a packet we can attach to. // Don't resend faster than min delay even if we have a packet we can attach to.
result.minDelayBeforeMessageResend = signaling ? 3000 : 300; result.minDelayBeforeMessageResend = signaling ? 3000 : 300;
// When max delay elapsed we resend anyway, in a dedicated packet. // When max delay elapsed we resend anyway, in a dedicated packet.
result.maxDelayBeforeMessageResend = signaling ? 5000 : 1000; result.maxDelayBeforeMessageResend = signaling ? 5000 : 1000;
result.maxDelayBeforeAckResend = signaling ? 5000 : 1000; result.maxDelayBeforeAckResend = signaling ? 5000 : 1000;
return result; return result;
} }
rtc::CopyOnWriteBuffer EncryptedConnection::SerializeEmptyMessageWithSeq(uint32_t seq) { rtc::CopyOnWriteBuffer EncryptedConnection::SerializeEmptyMessageWithSeq(uint32_t seq) {
auto result = rtc::CopyOnWriteBuffer(5); auto result = rtc::CopyOnWriteBuffer(5);
const auto bytes = result.data(); auto bytes = result.MutableData();
WriteSeq(bytes, seq); WriteSeq(bytes, seq);
bytes[4] = kEmptyId; bytes[4] = kEmptyId;
return result; return result;
} }
} // namespace tgcalls } // namespace tgcalls

View file

@ -12,75 +12,78 @@ namespace tgcalls {
class EncryptedConnection final { class EncryptedConnection final {
public: public:
enum class Type : uint8_t { enum class Type : uint8_t {
Signaling, Signaling,
Transport, Transport,
}; };
EncryptedConnection( EncryptedConnection(
Type type, Type type,
const EncryptionKey &key, const EncryptionKey &key,
std::function<void(int delayMs, int cause)> requestSendService); std::function<void(int delayMs, int cause)> requestSendService);
struct EncryptedPacket { struct EncryptedPacket {
std::vector<uint8_t> bytes; std::vector<uint8_t> bytes;
uint32_t counter = 0; uint32_t counter = 0;
}; };
absl::optional<EncryptedPacket> prepareForSending(const Message &message); absl::optional<EncryptedPacket> prepareForSending(const Message &message);
absl::optional<EncryptedPacket> prepareForSendingService(int cause); absl::optional<EncryptedPacket> prepareForSendingService(int cause);
struct DecryptedPacket { struct DecryptedPacket {
DecryptedMessage main; DecryptedMessage main;
std::vector<DecryptedMessage> additional; std::vector<DecryptedMessage> additional;
}; };
absl::optional<DecryptedPacket> handleIncomingPacket(const char *bytes, size_t size); absl::optional<DecryptedPacket> handleIncomingPacket(const char *bytes, size_t size);
absl::optional<rtc::CopyOnWriteBuffer> encryptRawPacket(rtc::CopyOnWriteBuffer const &buffer);
absl::optional<rtc::CopyOnWriteBuffer> decryptRawPacket(rtc::CopyOnWriteBuffer const &buffer);
private: private:
struct DelayIntervals { struct DelayIntervals {
// In milliseconds. // In milliseconds.
int minDelayBeforeMessageResend = 0; int minDelayBeforeMessageResend = 0;
int maxDelayBeforeMessageResend = 0; int maxDelayBeforeMessageResend = 0;
int maxDelayBeforeAckResend = 0; int maxDelayBeforeAckResend = 0;
}; };
struct MessageForResend { struct MessageForResend {
rtc::CopyOnWriteBuffer data; rtc::CopyOnWriteBuffer data;
int64_t lastSent = 0; int64_t lastSent = 0;
}; };
bool enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const; bool enoughSpaceInPacket(const rtc::CopyOnWriteBuffer &buffer, size_t amount) const;
size_t packetLimit() const; size_t packetLimit() const;
size_t fullNotAckedLength() const; size_t fullNotAckedLength() const;
void appendAcksToSend(rtc::CopyOnWriteBuffer &buffer); void appendAcksToSend(rtc::CopyOnWriteBuffer &buffer);
void appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer); void appendAdditionalMessages(rtc::CopyOnWriteBuffer &buffer);
EncryptedPacket encryptPrepared(const rtc::CopyOnWriteBuffer &buffer); EncryptedPacket encryptPrepared(const rtc::CopyOnWriteBuffer &buffer);
bool registerIncomingCounter(uint32_t incomingCounter); bool registerIncomingCounter(uint32_t incomingCounter);
absl::optional<DecryptedPacket> processPacket(const rtc::Buffer &fullBuffer, uint32_t packetSeq); absl::optional<DecryptedPacket> processPacket(const rtc::Buffer &fullBuffer, uint32_t packetSeq);
bool registerSentAck(uint32_t counter, bool firstInPacket); bool registerSentAck(uint32_t counter, bool firstInPacket);
void ackMyMessage(uint32_t counter); void ackMyMessage(uint32_t counter);
void sendAckPostponed(uint32_t incomingSeq); void sendAckPostponed(uint32_t incomingSeq);
bool haveAdditionalMessages() const; bool haveAdditionalMessages() const;
absl::optional<uint32_t> computeNextSeq(bool messageRequiresAck, bool singleMessagePacket); absl::optional<uint32_t> computeNextSeq(bool messageRequiresAck, bool singleMessagePacket);
void appendReceivedMessage( void appendReceivedMessage(
absl::optional<DecryptedPacket> &to, absl::optional<DecryptedPacket> &to,
Message &&message, Message &&message,
uint32_t incomingSeq); uint32_t incomingSeq);
const char *logHeader() const; const char *logHeader() const;
static DelayIntervals DelayIntervalsByType(Type type); static DelayIntervals DelayIntervalsByType(Type type);
static rtc::CopyOnWriteBuffer SerializeEmptyMessageWithSeq(uint32_t seq); static rtc::CopyOnWriteBuffer SerializeEmptyMessageWithSeq(uint32_t seq);
Type _type = Type(); Type _type = Type();
EncryptionKey _key; EncryptionKey _key;
uint32_t _counter = 0; uint32_t _counter = 0;
DelayIntervals _delayIntervals; DelayIntervals _delayIntervals;
std::vector<uint32_t> _largestIncomingCounters; std::vector<uint32_t> _largestIncomingCounters;
std::vector<uint32_t> _ackedIncomingCounters; std::vector<uint32_t> _ackedIncomingCounters;
std::vector<uint32_t> _acksToSendSeqs; std::vector<uint32_t> _acksToSendSeqs;
std::vector<uint32_t> _acksSentCounters; std::vector<uint32_t> _acksSentCounters;
std::vector<MessageForResend> _myNotYetAckedMessages; std::vector<MessageForResend> _myNotYetAckedMessages;
std::function<void(int delayMs, int cause)> _requestSendService; std::function<void(int delayMs, int cause)> _requestSendService;
bool _resendTimerActive = false; bool _resendTimerActive = false;
bool _sendAcksTimerActive = false; bool _sendAcksTimerActive = false;
}; };

View file

@ -14,14 +14,17 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD
public: public:
static rtc::scoped_refptr<webrtc::AudioDeviceModule> Create(webrtc::TaskQueueFactory* taskQueueFactory, static rtc::scoped_refptr<webrtc::AudioDeviceModule> Create(webrtc::TaskQueueFactory* taskQueueFactory,
std::shared_ptr<FakeAudioDeviceModule::Renderer> renderer, std::shared_ptr<FakeAudioDeviceModule::Renderer> renderer,
std::shared_ptr<FakeAudioDeviceModule::Recorder> recorder,
FakeAudioDeviceModule::Options options) { FakeAudioDeviceModule::Options options) {
return rtc::scoped_refptr<webrtc::AudioDeviceModule>( return rtc::scoped_refptr<webrtc::AudioDeviceModule>(
new rtc::RefCountedObject<FakeAudioDeviceModuleImpl>(taskQueueFactory, options, std::move(renderer))); new rtc::RefCountedObject<FakeAudioDeviceModuleImpl>(taskQueueFactory, options, std::move(renderer), std::move(recorder)));
} }
FakeAudioDeviceModuleImpl(webrtc::TaskQueueFactory*, FakeAudioDeviceModule::Options options, FakeAudioDeviceModuleImpl(webrtc::TaskQueueFactory*, FakeAudioDeviceModule::Options options,
std::shared_ptr<FakeAudioDeviceModule::Renderer> renderer) std::shared_ptr<FakeAudioDeviceModule::Renderer> renderer,
: num_channels_{options.num_channels}, samples_per_sec_{options.samples_per_sec}, scheduler_(options.scheduler_), renderer_(std::move(renderer)) { std::shared_ptr<FakeAudioDeviceModule::Recorder> recorder)
: num_channels_{options.num_channels}, samples_per_sec_{options.samples_per_sec}, scheduler_(options.scheduler_),
renderer_(std::move(renderer)), recorder_(std::move(recorder)) {
if (!scheduler_) { if (!scheduler_) {
scheduler_ = [](auto f) { scheduler_ = [](auto f) {
std::thread([f = std::move(f)]() { std::thread([f = std::move(f)]() {
@ -80,14 +83,16 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD
} }
int32_t RegisterAudioCallback(webrtc::AudioTransport* callback) override { int32_t RegisterAudioCallback(webrtc::AudioTransport* callback) override {
std::unique_lock<std::mutex> lock(mutex_); std::unique_lock<std::mutex> lock(render_mutex_);
audio_callback_ = callback; audio_callback_ = callback;
return 0; return 0;
} }
int32_t StartPlayout() override { int32_t StartPlayout() override {
std::unique_lock<std::mutex> lock(mutex_); std::unique_lock<std::mutex> lock(render_mutex_);
RTC_CHECK(renderer_); if (!renderer_) {
return 0;
}
if (rendering_) { if (rendering_) {
return 0; return 0;
} }
@ -105,8 +110,8 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD
} }
need_rendering_ = false; need_rendering_ = false;
std::unique_lock<std::mutex> lock(mutex_); std::unique_lock<std::mutex> lock(render_mutex_);
cond_.wait(lock, [this]{ return !rendering_; }); render_cond_.wait(lock, [this]{ return !rendering_; });
return 0; return 0;
} }
@ -115,20 +120,50 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD
return rendering_; return rendering_;
} }
private: int32_t StartRecording() override {
std::unique_lock<std::mutex> lock(record_mutex_);
if (!recorder_) {
return 0;
}
if (recording_) {
return 0;
}
need_recording_ = true;
recording_ = true;
scheduler_([this]{
return Record() / 1000000.0;
});
return 0;
}
int32_t StopRecording() override {
if (!recording_) {
return 0;
}
need_recording_ = false;
std::unique_lock<std::mutex> lock(record_mutex_);
record_cond_.wait(lock, [this]{ return !recording_; });
return 0;
}
bool Recording() const override {
return recording_;
}
private:
int32_t Render() { int32_t Render() {
std::unique_lock<std::mutex> lock(mutex_); std::unique_lock<std::mutex> lock(render_mutex_);
if (!need_rendering_) { if (!need_rendering_) {
rendering_ = false; rendering_ = false;
cond_.notify_all(); render_cond_.notify_all();
return -1; return -1;
} }
size_t samples_out = 0; size_t samples_out = 0;
int64_t elapsed_time_ms = -1; int64_t elapsed_time_ms = -1;
int64_t ntp_time_ms = -1; int64_t ntp_time_ms = -1;
size_t bytes_per_sample = 2; size_t bytes_per_sample = 2 * num_channels_;
RTC_CHECK(audio_callback_); RTC_CHECK(audio_callback_);
if (renderer_) { if (renderer_) {
@ -157,31 +192,66 @@ class FakeAudioDeviceModuleImpl : public webrtc::webrtc_impl::AudioDeviceModuleD
return wait_for_us; return wait_for_us;
} }
int32_t Record() {
std::unique_lock<std::mutex> lock(record_mutex_);
if (!need_recording_) {
recording_ = false;
record_cond_.notify_all();
return -1;
}
auto frame = recorder_->Record();
if (frame.num_samples != 0) {
uint32_t new_mic_level;
audio_callback_->RecordedDataIsAvailable(frame.audio_samples,
frame.num_samples, frame.bytes_per_sample, frame.num_channels,
frame.samples_per_sec, 0, 0, 0, false, new_mic_level);
}
int32_t wait_for_us = -1;
if (recorder_) {
wait_for_us = recorder_->WaitForUs();
}
return wait_for_us;
}
size_t num_channels_; size_t num_channels_;
const uint32_t samples_per_sec_; const uint32_t samples_per_sec_;
size_t samples_per_frame_{0}; size_t samples_per_frame_{0};
std::function<void(FakeAudioDeviceModule::Task)> scheduler_; std::function<void(FakeAudioDeviceModule::Task)> scheduler_;
mutable std::mutex mutex_; mutable std::mutex render_mutex_;
std::atomic<bool> need_rendering_{false}; std::atomic<bool> need_rendering_{false};
std::atomic<bool> rendering_{false}; std::atomic<bool> rendering_{false};
std::condition_variable cond_; std::condition_variable render_cond_;
std::unique_ptr<rtc::PlatformThread> renderThread_; std::unique_ptr<rtc::PlatformThread> renderThread_;
mutable std::mutex record_mutex_;
std::atomic<bool> need_recording_{false};
std::atomic<bool> recording_{false};
std::condition_variable record_cond_;
std::unique_ptr<rtc::PlatformThread> recordThread_;
webrtc::AudioTransport* audio_callback_{nullptr}; webrtc::AudioTransport* audio_callback_{nullptr};
const std::shared_ptr<FakeAudioDeviceModule::Renderer> renderer_; const std::shared_ptr<FakeAudioDeviceModule::Renderer> renderer_;
const std::shared_ptr<FakeAudioDeviceModule::Recorder> recorder_;
std::vector<int16_t> playout_buffer_; std::vector<int16_t> playout_buffer_;
}; };
std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> FakeAudioDeviceModule::Creator( std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> FakeAudioDeviceModule::Creator(
std::shared_ptr<Renderer> renderer, Options options) { std::shared_ptr<Renderer> renderer, std::shared_ptr<Recorder> recorder, Options options) {
bool is_renderer_empty = bool(renderer); bool is_renderer_empty = bool(renderer);
auto boxed_renderer = std::make_shared<std::shared_ptr<Renderer>>(std::move(renderer)); auto boxed_renderer = std::make_shared<std::shared_ptr<Renderer>>(std::move(renderer));
bool is_recorder_empty = bool(recorder);
auto boxed_recorder = std::make_shared<std::shared_ptr<Recorder>>(std::move(recorder));
return return
[boxed_renderer = std::move(boxed_renderer), is_renderer_empty, options](webrtc::TaskQueueFactory* task_factory) { [boxed_renderer = std::move(boxed_renderer), is_renderer_empty,
boxed_recorder = std::move(boxed_recorder), is_recorder_empty, options](webrtc::TaskQueueFactory* task_factory) {
RTC_CHECK(is_renderer_empty == bool(*boxed_renderer)); // call only once if renderer exists RTC_CHECK(is_renderer_empty == bool(*boxed_renderer)); // call only once if renderer exists
return FakeAudioDeviceModuleImpl::Create(task_factory, std::move(*boxed_renderer), options); RTC_CHECK(is_recorder_empty == bool(*boxed_recorder)); // call only once if recorder exists
return FakeAudioDeviceModuleImpl::Create(task_factory, std::move(*boxed_renderer), std::move(*boxed_recorder), options);
}; };
} }
} // namespace tgcalls } // namespace tgcalls

View file

@ -32,6 +32,14 @@ class FakeAudioDeviceModule {
return 10000; return 10000;
} }
}; };
class Recorder {
public:
virtual ~Recorder() = default;
virtual AudioFrame Record() = 0;
virtual int32_t WaitForUs() {
return 10000;
}
};
using Task = std::function<double()>; using Task = std::function<double()>;
struct Options { struct Options {
uint32_t samples_per_sec{48000}; uint32_t samples_per_sec{48000};
@ -39,6 +47,8 @@ class FakeAudioDeviceModule {
std::function<void(Task)> scheduler_; std::function<void(Task)> scheduler_;
}; };
static std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory *)> Creator( static std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory *)> Creator(
std::shared_ptr<Renderer> renderer, Options options); std::shared_ptr<Renderer> renderer,
std::shared_ptr<Recorder> recorder,
Options options);
}; };
} // namespace tgcalls } // namespace tgcalls

View file

@ -0,0 +1,173 @@
#include "FakeVideoTrackSource.h"
#include "api/video/i420_buffer.h"
#include "media/base/video_broadcaster.h"
#include "pc/video_track_source.h"
#include "libyuv.h"
#include <thread>
namespace tgcalls {
int WIDTH = 1280;
int HEIGHT = 720;
class ChessFrameSource : public FrameSource {
public:
ChessFrameSource() {
int N = 100;
frames_.reserve(N);
for (int i = 0; i < N; i++) {
frames_.push_back(genFrame(i, N));
}
}
Info info() const override{
return Info{WIDTH, HEIGHT};
}
// webrtc::VideoFrame next_frame() override {
// i = (i + 1) % frames_.size();
// return frames_[i].frame;
// }
void next_frame_rgb0(char *buf, double *pts) override {
*pts = 0;
i = (i + 1) % frames_.size();
size_t size = WIDTH * HEIGHT * 4;
memcpy(buf, frames_[i].rbga.get(), size);
}
private:
struct Frame {
webrtc::VideoFrame frame;
std::unique_ptr<std::uint8_t[]> rbga;
};
std::vector<Frame> frames_;
size_t i = 0;
Frame genFrame(int i, int n) {
int width = WIDTH;
int height = HEIGHT;
auto bytes_ptr = std::make_unique<std::uint8_t[]>(width * height * 4);
auto bytes = bytes_ptr.get();
auto set_rgb = [&](int x, int y, std::uint8_t r, std::uint8_t g, std::uint8_t b) {
auto dest = bytes + (x * width + y) * 4;
dest[0] = r;
dest[1] = g;
dest[2] = b;
dest[3] = 0;
};
auto angle = (double)i / n * M_PI;
auto co = cos(angle);
auto si = sin(angle);
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
double sx = (i - height / 2) * 20.0 / HEIGHT;
double sy = (j - width / 2) * 20.0 / HEIGHT;
int x, y;
if (sx * sx + sy * sy < 10) {
x = int(floor(sx * co - sy * si));
y = int(floor(sx * si + sy * co));
} else {
x = int(floor(sx));
y = int(floor(sy));
}
std::uint8_t color = ((y & 1) ^ (x & 1)) * 255;
set_rgb(i, j, color, color, color);
}
}
rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(width, height);
libyuv::RGBAToI420(bytes, width * 4, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(),
buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), width, height);
return Frame{webrtc::VideoFrame::Builder().set_video_frame_buffer(buffer).build(), std::move(bytes_ptr)};
}
};
webrtc::VideoFrame FrameSource::next_frame() {
auto info = this->info();
auto height = info.height;
auto width = info.width;
auto bytes_ptr = std::make_unique<std::uint8_t[]>(width * height * 4);
double pts;
next_frame_rgb0(reinterpret_cast<char *>(bytes_ptr.get()), &pts);
rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(width, height);
libyuv::ABGRToI420(bytes_ptr.get(), width * 4, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(),
buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), width, height);
return webrtc::VideoFrame::Builder().set_timestamp_us(static_cast<int64_t>(pts * 1000000)).set_video_frame_buffer(buffer).build();
}
class FakeVideoSource : public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
FakeVideoSource(std::unique_ptr<FrameSource> source) {
data_ = std::make_shared<Data>();
std::thread([data = data_, source = std::move(source)] {
std::uint32_t step = 0;
while (!data->flag_) {
step++;
std::this_thread::sleep_for(std::chrono::milliseconds(1000 / 30));
auto frame = source->next_frame();
frame.set_id(static_cast<std::uint16_t>(step));
frame.set_timestamp_us(rtc::TimeMicros());
data->broadcaster_.OnFrame(frame);
}
}).detach();
}
~FakeVideoSource() {
data_->flag_ = true;
}
using VideoFrameT = webrtc::VideoFrame;
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrameT> *sink, const rtc::VideoSinkWants &wants) override {
RTC_LOG(WARNING) << "ADD";
data_->broadcaster_.AddOrUpdateSink(sink, wants);
}
// RemoveSink must guarantee that at the time the method returns,
// there is no current and no future calls to VideoSinkInterface::OnFrame.
void RemoveSink(rtc::VideoSinkInterface<VideoFrameT> *sink) {
RTC_LOG(WARNING) << "REMOVE";
data_->broadcaster_.RemoveSink(sink);
}
private:
struct Data {
std::atomic<bool> flag_;
rtc::VideoBroadcaster broadcaster_;
};
std::shared_ptr<Data> data_;
};
class FakeVideoTrackSourceImpl : public webrtc::VideoTrackSource {
public:
static rtc::scoped_refptr<FakeVideoTrackSourceImpl> Create(std::unique_ptr<FrameSource> source) {
return rtc::scoped_refptr<FakeVideoTrackSourceImpl>(new rtc::RefCountedObject<FakeVideoTrackSourceImpl>(std::move(source)));
}
explicit FakeVideoTrackSourceImpl(std::unique_ptr<FrameSource> source) : VideoTrackSource(false), source_(std::move(source)) {
}
protected:
FakeVideoSource source_;
rtc::VideoSourceInterface<webrtc::VideoFrame> *source() override {
return &source_;
}
};
std::function<webrtc::VideoTrackSourceInterface*()> FakeVideoTrackSource::create(std::unique_ptr<FrameSource> frame_source) {
auto source = FakeVideoTrackSourceImpl::Create(std::move(frame_source));
return [source] {
return source.get();
};
}
std::unique_ptr<FrameSource> FrameSource::chess(){
return std::make_unique<ChessFrameSource>();
}
void FrameSource::video_frame_to_rgb0(const webrtc::VideoFrame & src, char *dest){
auto buffer = src.video_frame_buffer()->GetI420();
libyuv::I420ToABGR(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
buffer->StrideU(), buffer->DataV(), buffer->StrideV( ), reinterpret_cast<uint8_t *>(dest), src.width() * 4, src.width(), src.height());
}
}

View file

@ -0,0 +1,33 @@
#pragma once
#include <functional>
namespace webrtc {
class VideoTrackSourceInterface;
class VideoFrame;
}
namespace tgcalls {
class FrameSource {
public:
struct Info {
int32_t width;
int32_t height;
};
virtual ~FrameSource() = default;
virtual Info info() const = 0;
virtual webrtc::VideoFrame next_frame();
static void video_frame_to_rgb0(const webrtc::VideoFrame &src, char *dest);
virtual void next_frame_rgb0(char *buf, double *pt_in_seconds) = 0;
static std::unique_ptr<FrameSource> chess();
static std::unique_ptr<FrameSource> from_file(std::string path);
};
class FakeVideoTrackSource {
public:
static std::function<webrtc::VideoTrackSourceInterface*()> create(std::unique_ptr<FrameSource> source);
};
}

View file

@ -169,7 +169,7 @@ void InstanceImpl::stop(std::function<void(FinalState)> completion) {
} }
int InstanceImpl::GetConnectionMaxLayer() { int InstanceImpl::GetConnectionMaxLayer() {
return 92; // TODO: retrieve from LayerBase return 92;
} }
std::vector<std::string> InstanceImpl::GetVersions() { std::vector<std::string> InstanceImpl::GetVersions() {

View file

@ -1,13 +0,0 @@
#include "JsonConfig.h"
namespace tgcalls {
JsonConfig::JsonConfig(Values values) : _values(values) {
}
Value JsonConfig::getValue(std::string key) {
return _values[key];
}
} // namespace tgcalls

View file

@ -1,25 +0,0 @@
#ifndef TGCALLS_JSON_CONFIG_H
#define TGCALLS_JSON_CONFIG_H
#include <string>
#include <map>
#include "absl/types/variant.h"
namespace tgcalls {
typedef absl::variant<int, double, bool, std::string> Value;
typedef std::map<std::string, Value> Values;
class JsonConfig {
public:
JsonConfig(Values values);
Value getValue(std::string key);
private:
Values _values;
};
} // namespace tgcalls
#endif

View file

@ -29,7 +29,6 @@ void LogSinkImpl::OnLogMessage(const std::string &message) {
time_t rawTime; time_t rawTime;
time(&rawTime); time(&rawTime);
struct tm timeinfo; struct tm timeinfo;
timeval curTime = { 0 };
#ifdef WEBRTC_WIN #ifdef WEBRTC_WIN
localtime_s(&timeinfo, &rawTime); localtime_s(&timeinfo, &rawTime);
@ -45,14 +44,13 @@ void LogSinkImpl::OnLogMessage(const std::string &message) {
const auto deltaEpochInMicrosecs = 11644473600000000Ui64; const auto deltaEpochInMicrosecs = 11644473600000000Ui64;
full -= deltaEpochInMicrosecs; full -= deltaEpochInMicrosecs;
full /= 10; full /= 10;
curTime.tv_sec = (long)(full / 1000000UL); int32_t milliseconds = (long)(full % 1000000UL) / 1000;
curTime.tv_usec = (long)(full % 1000000UL);
#else #else
timeval curTime = { 0 };
localtime_r(&rawTime, &timeinfo); localtime_r(&rawTime, &timeinfo);
gettimeofday(&curTime, nullptr); gettimeofday(&curTime, nullptr);
#endif
int32_t milliseconds = curTime.tv_usec / 1000; int32_t milliseconds = curTime.tv_usec / 1000;
#endif
auto &stream = _file.is_open() ? (std::ostream&)_file : _data; auto &stream = _file.is_open() ? (std::ostream&)_file : _data;
stream stream

View file

@ -205,6 +205,7 @@ _platformContext(platformContext) {
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/" "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
"WebRTC-FlexFEC-03/Enabled/" "WebRTC-FlexFEC-03/Enabled/"
"WebRTC-FlexFEC-03-Advertised/Enabled/" "WebRTC-FlexFEC-03-Advertised/Enabled/"
"WebRTC-Turn-AllowSystemPorts/Enabled/"
); );
PlatformInterface::SharedInstance()->configurePlatformAudio(); PlatformInterface::SharedInstance()->configurePlatformAudio();
@ -391,7 +392,7 @@ void MediaManager::start() {
beginStatsTimer(3000); beginStatsTimer(3000);
if (_audioLevelUpdated != nullptr) { if (_audioLevelUpdated != nullptr) {
beginLevelsTimer(50); beginLevelsTimer(100);
} }
} }
@ -504,7 +505,7 @@ void MediaManager::beginLevelsTimer(int timeoutMs) {
float effectiveLevel = fmaxf(strong->_currentAudioLevel, strong->_currentMyAudioLevel); float effectiveLevel = fmaxf(strong->_currentAudioLevel, strong->_currentMyAudioLevel);
strong->_audioLevelUpdated(effectiveLevel); strong->_audioLevelUpdated(effectiveLevel);
strong->beginLevelsTimer(50); strong->beginLevelsTimer(100);
}, timeoutMs); }, timeoutMs);
} }
@ -746,7 +747,9 @@ void MediaManager::checkIsReceivingVideoChanged(bool wasReceiving) {
const auto codecs = { const auto codecs = {
cricket::kFlexfecCodecName, cricket::kFlexfecCodecName,
cricket::kH264CodecName, cricket::kH264CodecName,
#ifndef WEBRTC_DISABLE_H265
cricket::kH265CodecName, cricket::kH265CodecName,
#endif
cricket::kVp8CodecName, cricket::kVp8CodecName,
cricket::kVp9CodecName, cricket::kVp9CodecName,
cricket::kAv1CodecName, cricket::kAv1CodecName,

View file

@ -10,6 +10,9 @@
#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/task_utils/to_queued_task.h"
#include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/ice_credentials_iterator.h"
#include "api/jsep_ice_candidate.h" #include "api/jsep_ice_candidate.h"
#include "rtc_base/network_monitor_factory.h"
#include "platform/PlatformInterface.h"
extern "C" { extern "C" {
#include <openssl/sha.h> #include <openssl/sha.h>
@ -104,6 +107,8 @@ _transportMessageReceived(std::move(transportMessageReceived)),
_sendSignalingMessage(std::move(sendSignalingMessage)), _sendSignalingMessage(std::move(sendSignalingMessage)),
_localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) { _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) {
assert(_thread->IsCurrent()); assert(_thread->IsCurrent());
_networkMonitorFactory = PlatformInterface::SharedInstance()->createNetworkMonitorFactory();
} }
NetworkManager::~NetworkManager() { NetworkManager::~NetworkManager() {
@ -121,7 +126,7 @@ NetworkManager::~NetworkManager() {
void NetworkManager::start() { void NetworkManager::start() {
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread)); _socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread));
_networkManager = std::make_unique<rtc::BasicNetworkManager>(); _networkManager = std::make_unique<rtc::BasicNetworkManager>(_networkMonitorFactory.get());
if (_enableStunMarking) { if (_enableStunMarking) {
_turnCustomizer.reset(new TurnCustomizerImpl()); _turnCustomizer.reset(new TurnCustomizerImpl());

View file

@ -10,6 +10,7 @@
#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/copy_on_write_buffer.h"
#include "api/candidate.h" #include "api/candidate.h"
#include "rtc_base/network_monitor_factory.h"
#include <functional> #include <functional>
#include <memory> #include <memory>
@ -93,6 +94,7 @@ private:
std::function<void(DecryptedMessage &&)> _transportMessageReceived; std::function<void(DecryptedMessage &&)> _transportMessageReceived;
std::function<void(Message &&)> _sendSignalingMessage; std::function<void(Message &&)> _sendSignalingMessage;
std::unique_ptr<rtc::NetworkMonitorFactory> _networkMonitorFactory;
std::unique_ptr<rtc::BasicPacketSocketFactory> _socketFactory; std::unique_ptr<rtc::BasicPacketSocketFactory> _socketFactory;
std::unique_ptr<rtc::BasicNetworkManager> _networkManager; std::unique_ptr<rtc::BasicNetworkManager> _networkManager;
std::unique_ptr<webrtc::TurnCustomizer> _turnCustomizer; std::unique_ptr<webrtc::TurnCustomizer> _turnCustomizer;

View file

@ -61,10 +61,12 @@ public:
explicit ThreadsImpl(size_t i) { explicit ThreadsImpl(size_t i) {
auto suffix = i == 0 ? "" : "#" + std::to_string(i); auto suffix = i == 0 ? "" : "#" + std::to_string(i);
network_ = create_network("tgc-net" + suffix); network_ = create_network("tgc-net" + suffix);
network_->DisallowAllInvokes();
media_ = create("tgc-media" + suffix); media_ = create("tgc-media" + suffix);
worker_ = create("tgc-work" + suffix); worker_ = create("tgc-work" + suffix);
process_ = create("tgc-process" + suffix); process_ = create("tgc-process" + suffix);
shared_module_thread_ = webrtc::SharedModuleThread::Create(webrtc::ProcessThread::Create("tgc-module"), nullptr); worker_->DisallowAllInvokes();
worker_->AllowInvokesToThread(network_.get());
} }
rtc::Thread *getNetworkThread() override { rtc::Thread *getNetworkThread() override {
@ -80,6 +82,13 @@ public:
return process_.get(); return process_.get();
} }
rtc::scoped_refptr<webrtc::SharedModuleThread> getSharedModuleThread() override { rtc::scoped_refptr<webrtc::SharedModuleThread> getSharedModuleThread() override {
// This function must be called from a single thread because of SharedModuleThread implementation
// So we don't care about making it thread safe
if (!shared_module_thread_) {
shared_module_thread_ = webrtc::SharedModuleThread::Create(
webrtc::ProcessThread::Create("tgc-module"),
[=] { shared_module_thread_ = nullptr; });
}
return shared_module_thread_; return shared_module_thread_;
} }

View file

@ -3,6 +3,7 @@
#include <string> #include <string>
#include <memory> #include <memory>
#include <functional>
namespace rtc { namespace rtc {
template <typename VideoFrameT> template <typename VideoFrameT>
@ -24,6 +25,7 @@ enum class VideoState {
Active, Active,
}; };
class VideoCaptureInterface { class VideoCaptureInterface {
protected: protected:
VideoCaptureInterface() = default; VideoCaptureInterface() = default;
@ -40,10 +42,22 @@ public:
virtual void setState(VideoState state) = 0; virtual void setState(VideoState state) = 0;
virtual void setPreferredAspectRatio(float aspectRatio) = 0; virtual void setPreferredAspectRatio(float aspectRatio) = 0;
virtual void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0; virtual void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual void setOnFatalError(std::function<void()> error) {
// TODO: make this function pure virtual when everybody implements it.
}
virtual void setOnPause(std::function<void(bool)> pause) {
// TODO: make this function pure virtual when everybody implements it.
}
virtual void setOnIsActiveUpdated(std::function<void(bool)> onIsActiveUpdated) {
// TODO: make this function pure virtual when everybody implements it.
}
virtual void withNativeImplementation(std::function<void(void *)> completion) {
completion(nullptr);
}
virtual std::shared_ptr<PlatformContext> getPlatformContext() { virtual std::shared_ptr<PlatformContext> getPlatformContext() {
return nullptr; return nullptr;
} }
}; };
} // namespace tgcalls } // namespace tgcalls

View file

@ -10,7 +10,7 @@
namespace tgcalls { namespace tgcalls {
VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::string deviceId, std::shared_ptr<PlatformContext> platformContext, Threads &threads) VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::string deviceId, std::shared_ptr<PlatformContext> platformContext, Threads &threads)
: _videoSource(PlatformInterface::SharedInstance()->makeVideoSource(threads.getMediaThread(), threads.getWorkerThread())) { : _videoSource(PlatformInterface::SharedInstance()->makeVideoSource(threads.getMediaThread(), threads.getWorkerThread(), deviceId == "screen")) {
_platformContext = platformContext; _platformContext = platformContext;
switchToDevice(deviceId); switchToDevice(deviceId);
@ -26,8 +26,16 @@ webrtc::VideoTrackSourceInterface *VideoCaptureInterfaceObject::source() {
return _videoSource; return _videoSource;
} }
int VideoCaptureInterfaceObject::getRotation() {
if (_videoCapturer) {
return _videoCapturer->getRotation();
} else {
return 0;
}
}
void VideoCaptureInterfaceObject::switchToDevice(std::string deviceId) { void VideoCaptureInterfaceObject::switchToDevice(std::string deviceId) {
if (_videoCapturer && _currentUncroppedSink) { if (_videoCapturer && _currentUncroppedSink != nullptr) {
_videoCapturer->setUncroppedOutput(nullptr); _videoCapturer->setUncroppedOutput(nullptr);
} }
if (_videoSource) { if (_videoSource) {
@ -37,24 +45,53 @@ void VideoCaptureInterfaceObject::switchToDevice(std::string deviceId) {
if (this->_stateUpdated) { if (this->_stateUpdated) {
this->_stateUpdated(state); this->_stateUpdated(state);
} }
if (this->_onIsActiveUpdated) {
switch (state) {
case VideoState::Active: {
this->_onIsActiveUpdated(true);
break;
}
default: {
this->_onIsActiveUpdated(false);
break;
}
}
}
}, [this](PlatformCaptureInfo info) { }, [this](PlatformCaptureInfo info) {
if (this->_shouldBeAdaptedToReceiverAspectRate != info.shouldBeAdaptedToReceiverAspectRate) { if (this->_shouldBeAdaptedToReceiverAspectRate != info.shouldBeAdaptedToReceiverAspectRate) {
this->_shouldBeAdaptedToReceiverAspectRate = info.shouldBeAdaptedToReceiverAspectRate; this->_shouldBeAdaptedToReceiverAspectRate = info.shouldBeAdaptedToReceiverAspectRate;
this->updateAspectRateAdaptation();
} }
if (this->_rotationUpdated) {
this->_rotationUpdated(info.rotation);
}
this->updateAspectRateAdaptation();
}, _platformContext, _videoCapturerResolution); }, _platformContext, _videoCapturerResolution);
} }
if (_videoCapturer) { if (_videoCapturer) {
// if (_preferredAspectRatio > 0) { // if (_preferredAspectRatio > 0) {
// _videoCapturer->setPreferredCaptureAspectRatio(_preferredAspectRatio); // _videoCapturer->setPreferredCaptureAspectRatio(_preferredAspectRatio);
// } // }
if (_currentUncroppedSink) { if (_currentUncroppedSink) {
_videoCapturer->setUncroppedOutput(_currentUncroppedSink); _videoCapturer->setUncroppedOutput(_currentUncroppedSink);
} }
if (_onFatalError) {
_videoCapturer->setOnFatalError(_onFatalError);
}
if (_onPause) {
_videoCapturer->setOnPause(_onPause);
}
_videoCapturer->setState(_state); _videoCapturer->setState(_state);
} }
} }
void VideoCaptureInterfaceObject::withNativeImplementation(std::function<void(void *)> completion) {
if (_videoCapturer) {
_videoCapturer->withNativeImplementation(completion);
} else {
completion(nullptr);
}
}
void VideoCaptureInterfaceObject::setState(VideoState state) { void VideoCaptureInterfaceObject::setState(VideoState state) {
if (_state != state) { if (_state != state) {
_state = state; _state = state;
@ -84,10 +121,10 @@ void VideoCaptureInterfaceObject::updateAspectRateAdaptation() {
float height = (originalWidth > aspectRatio * originalHeight) float height = (originalWidth > aspectRatio * originalHeight)
? originalHeight ? originalHeight
: int(std::round(originalHeight / aspectRatio)); : int(std::round(originalHeight / aspectRatio));
PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30); PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 25);
} else { } else {
PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, _videoCapturerResolution.first, _videoCapturerResolution.second, 30); PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, _videoCapturerResolution.first, _videoCapturerResolution.second, 25);
} }
} }
} }
@ -100,10 +137,31 @@ void VideoCaptureInterfaceObject::setOutput(std::shared_ptr<rtc::VideoSinkInterf
_currentUncroppedSink = sink; _currentUncroppedSink = sink;
} }
void VideoCaptureInterfaceObject::setOnFatalError(std::function<void()> error) {
if (_videoCapturer) {
_videoCapturer->setOnFatalError(error);
}
_onFatalError = error;
}
void VideoCaptureInterfaceObject::setOnPause(std::function<void(bool)> pause) {
if (_videoCapturer) {
_videoCapturer->setOnPause(pause);
}
_onPause = pause;
}
void VideoCaptureInterfaceObject::setOnIsActiveUpdated(std::function<void(bool)> onIsActiveUpdated) {
_onIsActiveUpdated = onIsActiveUpdated;
}
void VideoCaptureInterfaceObject::setStateUpdated(std::function<void(VideoState)> stateUpdated) { void VideoCaptureInterfaceObject::setStateUpdated(std::function<void(VideoState)> stateUpdated) {
_stateUpdated = stateUpdated; _stateUpdated = stateUpdated;
} }
void VideoCaptureInterfaceObject::setRotationUpdated(std::function<void(int)> rotationUpdated) {
_rotationUpdated = rotationUpdated;
}
VideoCaptureInterfaceImpl::VideoCaptureInterfaceImpl(std::string deviceId, VideoCaptureInterfaceImpl::VideoCaptureInterfaceImpl(std::string deviceId,
std::shared_ptr<PlatformContext> platformContext, std::shared_ptr<Threads> threads) : std::shared_ptr<PlatformContext> platformContext, std::shared_ptr<Threads> threads) :
_platformContext(platformContext), _platformContext(platformContext),
@ -120,6 +178,12 @@ void VideoCaptureInterfaceImpl::switchToDevice(std::string deviceId) {
}); });
} }
void VideoCaptureInterfaceImpl::withNativeImplementation(std::function<void(void *)> completion) {
_impl.perform(RTC_FROM_HERE, [completion](VideoCaptureInterfaceObject *impl) {
impl->withNativeImplementation(completion);
});
}
void VideoCaptureInterfaceImpl::setState(VideoState state) { void VideoCaptureInterfaceImpl::setState(VideoState state) {
_impl.perform(RTC_FROM_HERE, [state](VideoCaptureInterfaceObject *impl) { _impl.perform(RTC_FROM_HERE, [state](VideoCaptureInterfaceObject *impl) {
impl->setState(state); impl->setState(state);
@ -131,6 +195,22 @@ void VideoCaptureInterfaceImpl::setPreferredAspectRatio(float aspectRatio) {
impl->setPreferredAspectRatio(aspectRatio); impl->setPreferredAspectRatio(aspectRatio);
}); });
} }
void VideoCaptureInterfaceImpl::setOnFatalError(std::function<void()> error) {
_impl.perform(RTC_FROM_HERE, [error](VideoCaptureInterfaceObject *impl) {
impl->setOnFatalError(error);
});
}
void VideoCaptureInterfaceImpl::setOnPause(std::function<void(bool)> pause) {
_impl.perform(RTC_FROM_HERE, [pause](VideoCaptureInterfaceObject *impl) {
impl->setOnPause(pause);
});
}
void VideoCaptureInterfaceImpl::setOnIsActiveUpdated(std::function<void(bool)> onIsActiveUpdated) {
_impl.perform(RTC_FROM_HERE, [onIsActiveUpdated](VideoCaptureInterfaceObject *impl) {
impl->setOnIsActiveUpdated(onIsActiveUpdated);
});
}
void VideoCaptureInterfaceImpl::setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) { void VideoCaptureInterfaceImpl::setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_impl.perform(RTC_FROM_HERE, [sink](VideoCaptureInterfaceObject *impl) { _impl.perform(RTC_FROM_HERE, [sink](VideoCaptureInterfaceObject *impl) {

View file

@ -18,11 +18,17 @@ public:
~VideoCaptureInterfaceObject(); ~VideoCaptureInterfaceObject();
void switchToDevice(std::string deviceId); void switchToDevice(std::string deviceId);
void withNativeImplementation(std::function<void(void *)> completion);
void setState(VideoState state); void setState(VideoState state);
void setPreferredAspectRatio(float aspectRatio); void setPreferredAspectRatio(float aspectRatio);
void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink); void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setStateUpdated(std::function<void(VideoState)> stateUpdated); void setStateUpdated(std::function<void(VideoState)> stateUpdated);
void setRotationUpdated(std::function<void(int)> rotationUpdated);
void setOnFatalError(std::function<void()> error);
void setOnPause(std::function<void(bool)> pause);
void setOnIsActiveUpdated(std::function<void(bool)> onIsActiveUpdated);
webrtc::VideoTrackSourceInterface *source(); webrtc::VideoTrackSourceInterface *source();
int getRotation();
private: private:
void updateAspectRateAdaptation(); void updateAspectRateAdaptation();
@ -33,6 +39,10 @@ private:
std::pair<int, int> _videoCapturerResolution; std::pair<int, int> _videoCapturerResolution;
std::unique_ptr<VideoCapturerInterface> _videoCapturer; std::unique_ptr<VideoCapturerInterface> _videoCapturer;
std::function<void(VideoState)> _stateUpdated; std::function<void(VideoState)> _stateUpdated;
std::function<void()> _onFatalError;
std::function<void(bool)> _onPause;
std::function<void(bool)> _onIsActiveUpdated;
std::function<void(int)> _rotationUpdated;
VideoState _state = VideoState::Active; VideoState _state = VideoState::Active;
float _preferredAspectRatio = 0.0f; float _preferredAspectRatio = 0.0f;
bool _shouldBeAdaptedToReceiverAspectRate = true; bool _shouldBeAdaptedToReceiverAspectRate = true;
@ -44,10 +54,14 @@ public:
virtual ~VideoCaptureInterfaceImpl(); virtual ~VideoCaptureInterfaceImpl();
void switchToDevice(std::string deviceId) override; void switchToDevice(std::string deviceId) override;
void withNativeImplementation(std::function<void(void *)> completion) override;
void setState(VideoState state) override; void setState(VideoState state) override;
void setPreferredAspectRatio(float aspectRatio) override; void setPreferredAspectRatio(float aspectRatio) override;
void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override; void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override;
std::shared_ptr<PlatformContext> getPlatformContext() override; void setOnFatalError(std::function<void()> error) override;
void setOnPause(std::function<void(bool)> pause) override;
void setOnIsActiveUpdated(std::function<void(bool)> onIsActiveUpdated) override;
std::shared_ptr<PlatformContext> getPlatformContext() override;
ThreadLocalObject<VideoCaptureInterfaceObject> *object(); ThreadLocalObject<VideoCaptureInterfaceObject> *object();

View file

@ -4,6 +4,7 @@
#include "Instance.h" #include "Instance.h"
#include <memory> #include <memory>
#include <functional>
namespace rtc { namespace rtc {
template <typename VideoFrameT> template <typename VideoFrameT>
@ -23,6 +24,16 @@ public:
virtual void setState(VideoState state) = 0; virtual void setState(VideoState state) = 0;
virtual void setPreferredCaptureAspectRatio(float aspectRatio) = 0; virtual void setPreferredCaptureAspectRatio(float aspectRatio) = 0;
virtual void setUncroppedOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0; virtual void setUncroppedOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual int getRotation() = 0;
virtual void setOnFatalError(std::function<void()> error) {
// TODO: make this function pure virtual when everybody implements it.
}
virtual void setOnPause(std::function<void(bool)> pause) {
// TODO: make this function pure virtual when everybody implements it.
}
virtual void withNativeImplementation(std::function<void(void *)> completion) {
completion(nullptr);
}
}; };

View file

@ -25,20 +25,22 @@ public:
void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled); void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled);
void emitJoinPayload(std::function<void(GroupJoinPayload)> completion); void emitJoinPayload(std::function<void(GroupJoinPayload const &)> completion);
void setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector<tgcalls::GroupParticipantDescription> &&participants); void setJoinResponsePayload(std::string const &payload);
void addParticipants(std::vector<GroupParticipantDescription> &&participants);
void removeSsrcs(std::vector<uint32_t> ssrcs); void removeSsrcs(std::vector<uint32_t> ssrcs);
void removeIncomingVideoSource(uint32_t ssrc);
void setIsMuted(bool isMuted); void setIsMuted(bool isMuted);
void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture, std::function<void(GroupJoinPayload)> completion); void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled);
void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture);
void setVideoSource(std::function<webrtc::VideoTrackSourceInterface*()> getVideoSource);
void setAudioOutputDevice(std::string id); void setAudioOutputDevice(std::string id);
void setAudioInputDevice(std::string id); void setAudioInputDevice(std::string id);
void addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink); void addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setVolume(uint32_t ssrc, double volume); void setVolume(uint32_t ssrc, double volume);
void setFullSizeVideoSsrc(uint32_t ssrc); void setRequestedVideoChannels(std::vector<VideoChannelDescription> &&requestedVideoChannels);
private: private:
std::shared_ptr<Threads> _threads; std::shared_ptr<Threads> _threads;

File diff suppressed because it is too large Load diff

View file

@ -10,10 +10,12 @@
#include "../Instance.h" #include "../Instance.h"
#include "../StaticThreads.h" #include "../StaticThreads.h"
#include "GroupJoinPayload.h"
namespace webrtc { namespace webrtc {
class AudioDeviceModule; class AudioDeviceModule;
class TaskQueueFactory; class TaskQueueFactory;
class VideoTrackSourceInterface;
} }
namespace rtc { namespace rtc {
@ -35,6 +37,7 @@ struct GroupConfig {
struct GroupLevelValue { struct GroupLevelValue {
float level = 0.; float level = 0.;
bool voice = false; bool voice = false;
bool isMuted = false;
}; };
struct GroupLevelUpdate { struct GroupLevelUpdate {
@ -77,6 +80,53 @@ struct GroupNetworkState {
bool isTransitioningFromBroadcastToRtc = false; bool isTransitioningFromBroadcastToRtc = false;
}; };
enum class VideoContentType {
None,
Screencast,
Generic
};
enum class VideoCodecName {
VP8,
VP9
};
class RequestMediaChannelDescriptionTask {
public:
virtual ~RequestMediaChannelDescriptionTask() = default;
virtual void cancel() = 0;
};
struct MediaChannelDescription {
enum class Type {
Audio,
Video
};
Type type = Type::Audio;
uint32_t audioSsrc = 0;
std::string videoInformation;
};
struct MediaSsrcGroup {
std::string semantics;
std::vector<uint32_t> ssrcs;
};
struct VideoChannelDescription {
enum class Quality {
Thumbnail,
Medium,
Full
};
uint32_t audioSsrc = 0;
std::string endpointId;
std::vector<MediaSsrcGroup> ssrcGroups;
Quality minQuality = Quality::Thumbnail;
Quality maxQuality = Quality::Thumbnail;
};
struct GroupInstanceDescriptor { struct GroupInstanceDescriptor {
std::shared_ptr<Threads> threads; std::shared_ptr<Threads> threads;
GroupConfig config; GroupConfig config;
@ -88,82 +138,20 @@ struct GroupInstanceDescriptor {
bool useDummyChannel{true}; bool useDummyChannel{true};
bool disableIncomingChannels{false}; bool disableIncomingChannels{false};
std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> createAudioDeviceModule; std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> createAudioDeviceModule;
std::shared_ptr<VideoCaptureInterface> videoCapture; std::shared_ptr<VideoCaptureInterface> videoCapture; // deprecated
std::function<void(std::vector<uint32_t> const &)> incomingVideoSourcesUpdated; std::function<webrtc::VideoTrackSourceInterface*()> getVideoSource;
std::function<void(std::vector<uint32_t> const &)> participantDescriptionsRequired;
std::function<std::shared_ptr<BroadcastPartTask>(std::shared_ptr<PlatformContext>, int64_t, int64_t, std::function<void(BroadcastPart &&)>)> requestBroadcastPart; std::function<std::shared_ptr<BroadcastPartTask>(std::shared_ptr<PlatformContext>, int64_t, int64_t, std::function<void(BroadcastPart &&)>)> requestBroadcastPart;
int outgoingAudioBitrateKbit{32};
bool disableOutgoingAudioProcessing{false};
VideoContentType videoContentType{VideoContentType::None};
bool initialEnableNoiseSuppression{false};
std::vector<VideoCodecName> videoCodecPreferences;
std::function<std::shared_ptr<RequestMediaChannelDescriptionTask>(std::vector<uint32_t> const &, std::function<void(std::vector<MediaChannelDescription> &&)>)> requestMediaChannelDescriptions;
int minOutgoingVideoBitrateKbit{100};
std::shared_ptr<PlatformContext> platformContext; std::shared_ptr<PlatformContext> platformContext;
}; };
struct GroupJoinPayloadFingerprint {
std::string hash;
std::string setup;
std::string fingerprint;
};
struct GroupJoinPayloadVideoSourceGroup {
std::vector<uint32_t> ssrcs;
std::string semantics;
};
struct GroupJoinPayloadVideoPayloadFeedbackType {
std::string type;
std::string subtype;
};
struct GroupJoinPayloadVideoPayloadType {
uint32_t id = 0;
std::string name;
uint32_t clockrate = 0;
uint32_t channels = 0;
std::vector<GroupJoinPayloadVideoPayloadFeedbackType> feedbackTypes;
std::vector<std::pair<std::string, std::string>> parameters;
};
struct GroupJoinPayload {
std::string ufrag;
std::string pwd;
std::vector<GroupJoinPayloadFingerprint> fingerprints;
std::vector<GroupJoinPayloadVideoPayloadType> videoPayloadTypes;
std::vector<std::pair<uint32_t, std::string>> videoExtensionMap;
uint32_t ssrc = 0;
std::vector<GroupJoinPayloadVideoSourceGroup> videoSourceGroups;
};
struct GroupParticipantDescription {
std::string endpointId;
uint32_t audioSsrc = 0;
std::vector<GroupJoinPayloadVideoPayloadType> videoPayloadTypes;
std::vector<std::pair<uint32_t, std::string>> videoExtensionMap;
std::vector<GroupJoinPayloadVideoSourceGroup> videoSourceGroups;
bool isRemoved = false;
};
struct GroupJoinResponseCandidate {
std::string port;
std::string protocol;
std::string network;
std::string generation;
std::string id;
std::string component;
std::string foundation;
std::string priority;
std::string ip;
std::string type;
std::string tcpType;
std::string relAddr;
std::string relPort;
};
struct GroupJoinResponsePayload {
std::string ufrag;
std::string pwd;
std::vector<GroupJoinPayloadFingerprint> fingerprints;
std::vector<GroupJoinResponseCandidate> candidates;
};
template <typename T> template <typename T>
class ThreadLocalObject; class ThreadLocalObject;
@ -178,20 +166,22 @@ public:
virtual void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled) = 0; virtual void setConnectionMode(GroupConnectionMode connectionMode, bool keepBroadcastIfWasEnabled) = 0;
virtual void emitJoinPayload(std::function<void(GroupJoinPayload)> completion) = 0; virtual void emitJoinPayload(std::function<void(GroupJoinPayload const &)> completion) = 0;
virtual void setJoinResponsePayload(GroupJoinResponsePayload payload, std::vector<tgcalls::GroupParticipantDescription> &&participants) = 0; virtual void setJoinResponsePayload(std::string const &payload) = 0;
virtual void addParticipants(std::vector<GroupParticipantDescription> &&participants) = 0;
virtual void removeSsrcs(std::vector<uint32_t> ssrcs) = 0; virtual void removeSsrcs(std::vector<uint32_t> ssrcs) = 0;
virtual void removeIncomingVideoSource(uint32_t ssrc) = 0;
virtual void setIsMuted(bool isMuted) = 0; virtual void setIsMuted(bool isMuted) = 0;
virtual void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture, std::function<void(GroupJoinPayload)> completion) = 0; virtual void setIsNoiseSuppressionEnabled(bool isNoiseSuppressionEnabled) = 0;
virtual void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) = 0;
virtual void setVideoSource(std::function<webrtc::VideoTrackSourceInterface*()> getVideoSource) = 0;
virtual void setAudioOutputDevice(std::string id) = 0; virtual void setAudioOutputDevice(std::string id) = 0;
virtual void setAudioInputDevice(std::string id) = 0; virtual void setAudioInputDevice(std::string id) = 0;
virtual void addIncomingVideoOutput(uint32_t ssrc, std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0; virtual void addIncomingVideoOutput(std::string const &endpointId, std::weak_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual void setVolume(uint32_t ssrc, double volume) = 0; virtual void setVolume(uint32_t ssrc, double volume) = 0;
virtual void setFullSizeVideoSsrc(uint32_t ssrc) = 0; virtual void setRequestedVideoChannels(std::vector<VideoChannelDescription> &&requestedVideoChannels) = 0;
struct AudioDevice { struct AudioDevice {
enum class Type {Input, Output}; enum class Type {Input, Output};

View file

@ -0,0 +1,78 @@
#ifndef TGCALLS_GROUP_JOIN_PAYLOAD_H
#define TGCALLS_GROUP_JOIN_PAYLOAD_H
#include <vector>
#include <string>
#include <stdint.h>
namespace tgcalls {
struct GroupJoinPayloadVideoSourceGroup {
std::vector<uint32_t> ssrcs;
std::string semantics;
};
struct GroupJoinPayloadVideoPayloadType {
struct FeedbackType {
std::string type;
std::string subtype;
};
uint32_t id = 0;
std::string name;
uint32_t clockrate = 0;
uint32_t channels = 0;
std::vector<FeedbackType> feedbackTypes;
std::vector<std::pair<std::string, std::string>> parameters;
};
struct GroupJoinTransportDescription {
struct Fingerprint {
std::string hash;
std::string setup;
std::string fingerprint;
};
struct Candidate {
std::string port;
std::string protocol;
std::string network;
std::string generation;
std::string id;
std::string component;
std::string foundation;
std::string priority;
std::string ip;
std::string type;
std::string tcpType;
std::string relAddr;
std::string relPort;
};
std::string ufrag;
std::string pwd;
std::vector<Fingerprint> fingerprints;
std::vector<Candidate> candidates;
};
struct GroupJoinVideoInformation {
uint32_t serverVideoBandwidthProbingSsrc = 0;
std::string endpointId;
std::vector<GroupJoinPayloadVideoPayloadType> payloadTypes;
std::vector<std::pair<uint32_t, std::string>> extensionMap;
};
struct GroupParticipantVideoInformation {
std::string endpointId;
std::vector<GroupJoinPayloadVideoSourceGroup> ssrcGroups;
};
struct GroupJoinPayload {
uint32_t audioSsrc = 0;
std::string json;
};
}
#endif

View file

@ -0,0 +1,373 @@
#include "GroupJoinPayloadInternal.h"
#include "third-party/json11.hpp"
#include <sstream>
namespace tgcalls {
namespace {
absl::optional<int32_t> parseInt(json11::Json::object const &object, std::string const &key) {
const auto value = object.find(key);
if (value == object.end() || !value->second.is_number()) {
return absl::nullopt;
}
return value->second.int_value();
}
absl::optional<std::string> parseString(json11::Json::object const &object, std::string const &key) {
const auto value = object.find(key);
if (value == object.end() || !value->second.is_string()) {
return absl::nullopt;
}
return value->second.string_value();
}
template <typename Out>
void splitString(const std::string &s, char delim, Out result) {
std::istringstream iss(s);
std::string item;
while (std::getline(iss, item, delim)) {
*result++ = item;
}
}
std::vector<std::string> splitString(const std::string &s, char delim) {
std::vector<std::string> elems;
splitString(s, delim, std::back_inserter(elems));
return elems;
}
absl::optional<GroupJoinTransportDescription> parseTransportDescription(json11::Json::object const &object) {
GroupJoinTransportDescription result;
if (const auto pwd = parseString(object, "pwd")) {
result.pwd = pwd.value();
} else {
return absl::nullopt;
}
if (const auto ufrag = parseString(object, "ufrag")) {
result.ufrag = ufrag.value();
} else {
return absl::nullopt;
}
const auto fingerprints = object.find("fingerprints");
if (fingerprints == object.end() || !fingerprints->second.is_array()) {
return absl::nullopt;
}
for (const auto &fingerprint : fingerprints->second.array_items()) {
if (!fingerprint.is_object()) {
return absl::nullopt;
}
GroupJoinTransportDescription::Fingerprint parsedFingerprint;
if (const auto hash = parseString(fingerprint.object_items(), "hash")) {
parsedFingerprint.hash = hash.value();
} else {
return absl::nullopt;
}
if (const auto fingerprintValue = parseString(fingerprint.object_items(), "fingerprint")) {
parsedFingerprint.fingerprint = fingerprintValue.value();
} else {
return absl::nullopt;
}
if (const auto setup = parseString(fingerprint.object_items(), "setup")) {
parsedFingerprint.setup = setup.value();
} else {
return absl::nullopt;
}
result.fingerprints.push_back(std::move(parsedFingerprint));
}
const auto candidates = object.find("candidates");
if (candidates == object.end() || !candidates->second.is_array()) {
return absl::nullopt;
}
for (const auto &candidate : candidates->second.array_items()) {
if (!candidate.is_object()) {
return absl::nullopt;
}
GroupJoinTransportDescription::Candidate parsedCandidate;
if (const auto port = parseString(candidate.object_items(), "port")) {
parsedCandidate.port = port.value();
} else {
return absl::nullopt;
}
if (const auto protocol = parseString(candidate.object_items(), "protocol")) {
parsedCandidate.protocol = protocol.value();
} else {
return absl::nullopt;
}
if (const auto network = parseString(candidate.object_items(), "network")) {
parsedCandidate.network = network.value();
} else {
return absl::nullopt;
}
if (const auto generation = parseString(candidate.object_items(), "generation")) {
parsedCandidate.generation = generation.value();
} else {
return absl::nullopt;
}
if (const auto id = parseString(candidate.object_items(), "id")) {
parsedCandidate.id = id.value();
} else {
return absl::nullopt;
}
if (const auto component = parseString(candidate.object_items(), "component")) {
parsedCandidate.component = component.value();
} else {
return absl::nullopt;
}
if (const auto foundation = parseString(candidate.object_items(), "foundation")) {
parsedCandidate.foundation = foundation.value();
} else {
return absl::nullopt;
}
if (const auto priority = parseString(candidate.object_items(), "priority")) {
parsedCandidate.priority = priority.value();
} else {
return absl::nullopt;
}
if (const auto ip = parseString(candidate.object_items(), "ip")) {
parsedCandidate.ip = ip.value();
} else {
return absl::nullopt;
}
if (const auto type = parseString(candidate.object_items(), "type")) {
parsedCandidate.type = type.value();
} else {
return absl::nullopt;
}
if (const auto tcpType = parseString(candidate.object_items(), "tcptype")) {
parsedCandidate.tcpType = tcpType.value();
}
if (const auto relAddr = parseString(candidate.object_items(), "rel-addr")) {
parsedCandidate.relAddr = relAddr.value();
}
if (const auto relPort = parseString(candidate.object_items(), "rel-port")) {
parsedCandidate.relPort = relPort.value();
}
result.candidates.push_back(std::move(parsedCandidate));
}
return result;
}
absl::optional<GroupJoinPayloadVideoPayloadType> parsePayloadType(json11::Json::object const &object) {
GroupJoinPayloadVideoPayloadType result;
if (const auto id = parseInt(object, "id")) {
result.id = (uint32_t)id.value();
} else {
return absl::nullopt;
}
if (const auto name = parseString(object, "name")) {
result.name = name.value();
} else {
return absl::nullopt;
}
if (const auto clockrate = parseInt(object, "clockrate")) {
result.clockrate = (uint32_t)clockrate.value();
} else {
result.clockrate = 0;
}
if (const auto channels = parseInt(object, "channels")) {
result.channels = (uint32_t)channels.value();
} else {
result.channels = 1;
}
const auto parameters = object.find("parameters");
if (parameters != object.end() && parameters->second.is_object()) {
for (const auto &parameter : parameters->second.object_items()) {
if (parameter.second.is_string()) {
result.parameters.push_back(std::make_pair(parameter.first, parameter.second.string_value()));
}
}
}
const auto rtcpFbs = object.find("rtcp-fbs");
if (rtcpFbs != object.end() && rtcpFbs->second.is_array()) {
for (const auto &item : rtcpFbs->second.array_items()) {
if (item.is_object()) {
const auto type = item.object_items().find("type");
if (type != item.object_items().end() && type->second.is_string()) {
GroupJoinPayloadVideoPayloadType::FeedbackType parsedFeedbackType;
const auto typeString = type->second.string_value();
const auto subtype = item.object_items().find("subtype");
if (subtype != item.object_items().end() && subtype->second.is_string()) {
parsedFeedbackType.type = typeString;
parsedFeedbackType.subtype = subtype->second.string_value();
} else {
auto components = splitString(typeString, ' ');
if (components.size() == 1) {
parsedFeedbackType.type = components[0];
} else if (components.size() == 2) {
parsedFeedbackType.type = components[0];
parsedFeedbackType.subtype = components[1];
} else {
continue;
}
}
result.feedbackTypes.push_back(std::move(parsedFeedbackType));
}
}
}
}
return result;
}
absl::optional<GroupJoinVideoInformation> parseVideoInformation(json11::Json::object const &object) {
GroupJoinVideoInformation result;
const auto serverSources = object.find("server_sources");
if (serverSources != object.end() && serverSources->second.is_array()) {
for (const auto &item : serverSources->second.array_items()) {
if (item.is_number()) {
int32_t value = item.int_value();
uint32_t unsignedValue = *(uint32_t *)&value;
result.serverVideoBandwidthProbingSsrc = unsignedValue;
}
}
}
const auto payloadTypes = object.find("payload-types");
if (payloadTypes != object.end() && payloadTypes->second.is_array()) {
for (const auto &payloadType : payloadTypes->second.array_items()) {
if (payloadType.is_object()) {
if (const auto parsedPayloadType = parsePayloadType(payloadType.object_items())) {
result.payloadTypes.push_back(parsedPayloadType.value());
}
}
}
}
const auto rtpHdrexts = object.find("rtp-hdrexts");
if (rtpHdrexts != object.end() && rtpHdrexts->second.is_array()) {
for (const auto &rtpHdrext : rtpHdrexts->second.array_items()) {
if (rtpHdrext.is_object()) {
const auto id = rtpHdrext.object_items().find("id");
if (id == rtpHdrext.object_items().end() || !id->second.is_number()) {
continue;
}
const auto uri = rtpHdrext.object_items().find("uri");
if (uri == rtpHdrext.object_items().end() || !uri->second.is_string()) {
continue;
}
result.extensionMap.push_back(std::make_pair(id->second.int_value(), uri->second.string_value()));
}
}
}
const auto endpointId = object.find("endpoint");
if (endpointId != object.end() && endpointId->second.is_string()) {
result.endpointId = endpointId->second.string_value();
}
return result;
}
}
std::string GroupJoinInternalPayload::serialize() {
json11::Json::object object;
int32_t signedSsrc = *(int32_t *)&audioSsrc;
object.insert(std::make_pair("ssrc", json11::Json(signedSsrc)));
object.insert(std::make_pair("ufrag", json11::Json(transport.ufrag)));
object.insert(std::make_pair("pwd", json11::Json(transport.pwd)));
json11::Json::array fingerprints;
for (const auto &fingerprint : transport.fingerprints) {
json11::Json::object fingerprintJson;
fingerprintJson.insert(std::make_pair("hash", json11::Json(fingerprint.hash)));
fingerprintJson.insert(std::make_pair("fingerprint", json11::Json(fingerprint.fingerprint)));
fingerprintJson.insert(std::make_pair("setup", json11::Json(fingerprint.setup)));
fingerprints.push_back(json11::Json(std::move(fingerprintJson)));
}
object.insert(std::make_pair("fingerprints", json11::Json(std::move(fingerprints))));
if (videoInformation) {
json11::Json::array ssrcGroups;
for (const auto &ssrcGroup : videoInformation->ssrcGroups) {
json11::Json::object ssrcGroupJson;
json11::Json::array ssrcGroupSources;
for (auto ssrc : ssrcGroup.ssrcs) {
int32_t signedValue = *(int32_t *)&ssrc;
ssrcGroupSources.push_back(json11::Json(signedValue));
}
ssrcGroupJson.insert(std::make_pair("sources", json11::Json(std::move(ssrcGroupSources))));
ssrcGroupJson.insert(std::make_pair("semantics", json11::Json(ssrcGroup.semantics)));
ssrcGroups.push_back(json11::Json(std::move(ssrcGroupJson)));
}
object.insert(std::make_pair("ssrc-groups", json11::Json(std::move(ssrcGroups))));
}
auto json = json11::Json(std::move(object));
return json.dump();
}
absl::optional<GroupJoinResponsePayload> GroupJoinResponsePayload::parse(std::string const &data) {
std::string parsingError;
auto json = json11::Json::parse(std::string(data.begin(), data.end()), parsingError);
if (json.type() != json11::Json::OBJECT) {
return absl::nullopt;
}
tgcalls::GroupJoinResponsePayload result;
const auto transport = json.object_items().find("transport");
if (transport == json.object_items().end() || !transport->second.is_object()) {
return absl::nullopt;
}
if (const auto parsedTransport = parseTransportDescription(transport->second.object_items())) {
result.transport = parsedTransport.value();
} else {
return absl::nullopt;
}
const auto video = json.object_items().find("video");
if (video != json.object_items().end() && video->second.is_object()) {
result.videoInformation = parseVideoInformation(video->second.object_items());
}
return result;
}
}

View file

@ -0,0 +1,32 @@
#ifndef TGCALLS_GROUP_JOIN_PAYLOAD_INTERNAL_H
#define TGCALLS_GROUP_JOIN_PAYLOAD_INTERNAL_H
#include "GroupJoinPayload.h"
#include <vector>
#include <string>
#include <stdint.h>
#include "absl/types/optional.h"
namespace tgcalls {
struct GroupJoinResponsePayload {
GroupJoinTransportDescription transport;
absl::optional<GroupJoinVideoInformation> videoInformation;
static absl::optional<GroupJoinResponsePayload> parse(std::string const &data);
};
struct GroupJoinInternalPayload {
GroupJoinTransportDescription transport;
uint32_t audioSsrc = 0;
absl::optional<GroupParticipantVideoInformation> videoInformation;
std::string serialize();
};
}
#endif

View file

@ -13,6 +13,8 @@
#include "p2p/base/dtls_transport_factory.h" #include "p2p/base/dtls_transport_factory.h"
#include "pc/dtls_srtp_transport.h" #include "pc/dtls_srtp_transport.h"
#include "pc/dtls_transport.h" #include "pc/dtls_transport.h"
#include "media/sctp/sctp_transport_factory.h"
#include "platform/PlatformInterface.h"
#include "StaticThreads.h" #include "StaticThreads.h"
@ -137,10 +139,10 @@ public:
_dataChannel->OnDataReceived(params, buffer); _dataChannel->OnDataReceived(params, buffer);
} }
virtual bool SendData(const cricket::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override { virtual bool SendData(int sid, const webrtc::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override {
assert(_threads->getNetworkThread()->IsCurrent()); assert(_threads->getNetworkThread()->IsCurrent());
return _sctpTransport->SendData(params, payload); return _sctpTransport->SendData(sid, params, payload);
} }
virtual bool ConnectDataChannel(webrtc::SctpDataChannel *data_channel) override { virtual bool ConnectDataChannel(webrtc::SctpDataChannel *data_channel) override {
@ -199,14 +201,12 @@ webrtc::CryptoOptions GroupNetworkManager::getDefaulCryptoOptions() {
GroupNetworkManager::GroupNetworkManager( GroupNetworkManager::GroupNetworkManager(
std::function<void(const State &)> stateUpdated, std::function<void(const State &)> stateUpdated,
std::function<void(rtc::CopyOnWriteBuffer const &, bool)> transportMessageReceived, std::function<void(rtc::CopyOnWriteBuffer const &, bool)> transportMessageReceived,
std::function<void(rtc::CopyOnWriteBuffer const &, int64_t)> rtcpPacketReceived,
std::function<void(bool)> dataChannelStateUpdated, std::function<void(bool)> dataChannelStateUpdated,
std::function<void(std::string const &)> dataChannelMessageReceived, std::function<void(std::string const &)> dataChannelMessageReceived,
std::shared_ptr<Threads> threads) : std::shared_ptr<Threads> threads) :
_threads(std::move(threads)), _threads(std::move(threads)),
_stateUpdated(std::move(stateUpdated)), _stateUpdated(std::move(stateUpdated)),
_transportMessageReceived(std::move(transportMessageReceived)), _transportMessageReceived(std::move(transportMessageReceived)),
_rtcpPacketReceived(std::move(rtcpPacketReceived)),
_dataChannelStateUpdated(dataChannelStateUpdated), _dataChannelStateUpdated(dataChannelStateUpdated),
_dataChannelMessageReceived(dataChannelMessageReceived) { _dataChannelMessageReceived(dataChannelMessageReceived) {
assert(_threads->getNetworkThread()->IsCurrent()); assert(_threads->getNetworkThread()->IsCurrent());
@ -214,18 +214,18 @@ _dataChannelMessageReceived(dataChannelMessageReceived) {
_localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)); _localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH));
_localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt); _localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt);
_networkMonitorFactory = PlatformInterface::SharedInstance()->createNetworkMonitorFactory();
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread())); _socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread()));
_networkManager = std::make_unique<rtc::BasicNetworkManager>(); _networkManager = std::make_unique<rtc::BasicNetworkManager>(_networkMonitorFactory.get());
_asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>(); _asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
_dtlsSrtpTransport = std::make_unique<webrtc::DtlsSrtpTransport>(true); _dtlsSrtpTransport = std::make_unique<webrtc::DtlsSrtpTransport>(true);
_dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr);
_dtlsSrtpTransport->SetActiveResetSrtpParams(false); _dtlsSrtpTransport->SetActiveResetSrtpParams(false);
_dtlsSrtpTransport->SignalDtlsStateChange.connect(this, &GroupNetworkManager::DtlsStateChanged);
_dtlsSrtpTransport->SignalReadyToSend.connect(this, &GroupNetworkManager::DtlsReadyToSend); _dtlsSrtpTransport->SignalReadyToSend.connect(this, &GroupNetworkManager::DtlsReadyToSend);
_dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &GroupNetworkManager::RtpPacketReceived_n); _dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &GroupNetworkManager::RtpPacketReceived_n);
_dtlsSrtpTransport->SignalRtcpPacketReceived.connect(this, &GroupNetworkManager::OnRtcpPacketReceived_n);
resetDtlsSrtpTransport(); resetDtlsSrtpTransport();
} }
@ -281,8 +281,6 @@ void GroupNetworkManager::resetDtlsSrtpTransport() {
this, &GroupNetworkManager::OnTransportWritableState_n); this, &GroupNetworkManager::OnTransportWritableState_n);
_dtlsTransport->SignalReceivingState.connect( _dtlsTransport->SignalReceivingState.connect(
this, &GroupNetworkManager::OnTransportReceivingState_n); this, &GroupNetworkManager::OnTransportReceivingState_n);
_dtlsTransport->SignalDtlsHandshakeError.connect(
this, &GroupNetworkManager::OnDtlsHandshakeError);
_dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER); _dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER);
_dtlsTransport->SetLocalCertificate(_localCertificate); _dtlsTransport->SetLocalCertificate(_localCertificate);
@ -293,22 +291,27 @@ void GroupNetworkManager::resetDtlsSrtpTransport() {
void GroupNetworkManager::start() { void GroupNetworkManager::start() {
_transportChannel->MaybeStartGathering(); _transportChannel->MaybeStartGathering();
/*const auto weak = std::weak_ptr<GroupNetworkManager>(shared_from_this()); const auto weak = std::weak_ptr<GroupNetworkManager>(shared_from_this());
_dataChannelInterface.reset(new SctpDataChannelProviderInterfaceImpl(_dtlsTransport.get(), [weak, threads = _threads](bool state) { _dataChannelInterface.reset(new SctpDataChannelProviderInterfaceImpl(
assert(threads->getNetworkThread()->IsCurrent()); _dtlsTransport.get(),
const auto strong = weak.lock(); [weak, threads = _threads](bool state) {
if (!strong) { assert(threads->getNetworkThread()->IsCurrent());
return; const auto strong = weak.lock();
} if (!strong) {
strong->_dataChannelStateUpdated(state); return;
}, [weak, threads = _threads[](std::string const &message) { }
assert(threads->getNetworkThread()->IsCurrent()); strong->_dataChannelStateUpdated(state);
const auto strong = weak.lock(); },
if (!strong) { [weak, threads = _threads](std::string const &message) {
return; assert(threads->getNetworkThread()->IsCurrent());
} const auto strong = weak.lock();
strong->_dataChannelMessageReceived(message); if (!strong) {
}));*/ return;
}
strong->_dataChannelMessageReceived(message);
},
_threads
));
} }
void GroupNetworkManager::stop() { void GroupNetworkManager::stop() {
@ -317,7 +320,6 @@ void GroupNetworkManager::stop() {
_dtlsTransport->SignalWritableState.disconnect(this); _dtlsTransport->SignalWritableState.disconnect(this);
_dtlsTransport->SignalReceivingState.disconnect(this); _dtlsTransport->SignalReceivingState.disconnect(this);
_dtlsTransport->SignalDtlsHandshakeError.disconnect(this);
_dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr); _dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr);
@ -416,25 +418,6 @@ void GroupNetworkManager::OnTransportReceivingState_n(rtc::PacketTransportIntern
UpdateAggregateStates_n(); UpdateAggregateStates_n();
} }
void GroupNetworkManager::OnDtlsHandshakeError(rtc::SSLHandshakeError error) {
assert(_threads->getNetworkThread()->IsCurrent());
}
void GroupNetworkManager::DtlsStateChanged() {
UpdateAggregateStates_n();
if (_dtlsTransport->IsDtlsActive()) {
const auto weak = std::weak_ptr<GroupNetworkManager>(shared_from_this());
_threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() {
const auto strong = weak.lock();
if (!strong) {
return;
}
strong->UpdateAggregateStates_n();
});
}
}
void GroupNetworkManager::DtlsReadyToSend(bool isReadyToSend) { void GroupNetworkManager::DtlsReadyToSend(bool isReadyToSend) {
UpdateAggregateStates_n(); UpdateAggregateStates_n();
@ -470,12 +453,6 @@ void GroupNetworkManager::RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, in
} }
} }
void GroupNetworkManager::OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us) {
if (_rtcpPacketReceived) {
_rtcpPacketReceived(*packet, packet_time_us);
}
}
void GroupNetworkManager::UpdateAggregateStates_n() { void GroupNetworkManager::UpdateAggregateStates_n() {
assert(_threads->getNetworkThread()->IsCurrent()); assert(_threads->getNetworkThread()->IsCurrent());

View file

@ -8,9 +8,10 @@
#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/network_monitor_factory.h"
#include "api/candidate.h" #include "api/candidate.h"
#include "media/base/media_channel.h" #include "media/base/media_channel.h"
#include "media/sctp/sctp_transport.h" #include "rtc_base/ssl_fingerprint.h"
#include "pc/sctp_data_channel.h" #include "pc/sctp_data_channel.h"
#include <functional> #include <functional>
@ -58,7 +59,6 @@ public:
GroupNetworkManager( GroupNetworkManager(
std::function<void(const State &)> stateUpdated, std::function<void(const State &)> stateUpdated,
std::function<void(rtc::CopyOnWriteBuffer const &, bool)> transportMessageReceived, std::function<void(rtc::CopyOnWriteBuffer const &, bool)> transportMessageReceived,
std::function<void(rtc::CopyOnWriteBuffer const &, int64_t)> rtcpPacketReceived,
std::function<void(bool)> dataChannelStateUpdated, std::function<void(bool)> dataChannelStateUpdated,
std::function<void(std::string const &)> dataChannelMessageReceived, std::function<void(std::string const &)> dataChannelMessageReceived,
std::shared_ptr<Threads> threads); std::shared_ptr<Threads> threads);
@ -82,11 +82,9 @@ private:
void candidateGatheringState(cricket::IceTransportInternal *transport); void candidateGatheringState(cricket::IceTransportInternal *transport);
void OnTransportWritableState_n(rtc::PacketTransportInternal *transport); void OnTransportWritableState_n(rtc::PacketTransportInternal *transport);
void OnTransportReceivingState_n(rtc::PacketTransportInternal *transport); void OnTransportReceivingState_n(rtc::PacketTransportInternal *transport);
void OnDtlsHandshakeError(rtc::SSLHandshakeError error);
void transportStateChanged(cricket::IceTransportInternal *transport); void transportStateChanged(cricket::IceTransportInternal *transport);
void transportReadyToSend(cricket::IceTransportInternal *transport); void transportReadyToSend(cricket::IceTransportInternal *transport);
void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused); void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused);
void DtlsStateChanged();
void DtlsReadyToSend(bool DtlsReadyToSend); void DtlsReadyToSend(bool DtlsReadyToSend);
void UpdateAggregateStates_n(); void UpdateAggregateStates_n();
void RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved); void RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved);
@ -98,10 +96,10 @@ private:
std::shared_ptr<Threads> _threads; std::shared_ptr<Threads> _threads;
std::function<void(const GroupNetworkManager::State &)> _stateUpdated; std::function<void(const GroupNetworkManager::State &)> _stateUpdated;
std::function<void(rtc::CopyOnWriteBuffer const &, bool)> _transportMessageReceived; std::function<void(rtc::CopyOnWriteBuffer const &, bool)> _transportMessageReceived;
std::function<void(rtc::CopyOnWriteBuffer const &, int64_t)> _rtcpPacketReceived;
std::function<void(bool)> _dataChannelStateUpdated; std::function<void(bool)> _dataChannelStateUpdated;
std::function<void(std::string const &)> _dataChannelMessageReceived; std::function<void(std::string const &)> _dataChannelMessageReceived;
std::unique_ptr<rtc::NetworkMonitorFactory> _networkMonitorFactory;
std::unique_ptr<rtc::BasicPacketSocketFactory> _socketFactory; std::unique_ptr<rtc::BasicPacketSocketFactory> _socketFactory;
std::unique_ptr<rtc::BasicNetworkManager> _networkManager; std::unique_ptr<rtc::BasicNetworkManager> _networkManager;
std::unique_ptr<webrtc::TurnCustomizer> _turnCustomizer; std::unique_ptr<webrtc::TurnCustomizer> _turnCustomizer;

View file

@ -12,7 +12,6 @@ extern "C" {
#include <string> #include <string>
#include <set> #include <set>
#include <map> #include <map>
#include <stdint.h>
namespace tgcalls { namespace tgcalls {
@ -310,26 +309,28 @@ private:
} }
int ret = 0; int ret = 0;
do {
ret = av_read_frame(_inputFormatContext, &_packet); ret = av_read_frame(_inputFormatContext, &_packet);
if (ret < 0) { if (ret < 0) {
_didReadToEnd = true; _didReadToEnd = true;
return; return;
} }
ret = avcodec_send_packet(_codecContext, &_packet); ret = avcodec_send_packet(_codecContext, &_packet);
if (ret < 0) { if (ret < 0) {
_didReadToEnd = true; _didReadToEnd = true;
return; return;
} }
int bytesPerSample = av_get_bytes_per_sample(_codecContext->sample_fmt); int bytesPerSample = av_get_bytes_per_sample(_codecContext->sample_fmt);
if (bytesPerSample != 2 && bytesPerSample != 4) { if (bytesPerSample != 2 && bytesPerSample != 4) {
_didReadToEnd = true; _didReadToEnd = true;
return; return;
} }
ret = avcodec_receive_frame(_codecContext, _frame);
} while (ret == AVERROR(EAGAIN));
ret = avcodec_receive_frame(_codecContext, _frame);
if (ret != 0) { if (ret != 0) {
_didReadToEnd = true; _didReadToEnd = true;
return; return;

View file

@ -3,6 +3,7 @@
#include "absl/types/optional.h" #include "absl/types/optional.h"
#include <vector> #include <vector>
#include <stdint.h>
namespace tgcalls { namespace tgcalls {

View file

@ -5,6 +5,9 @@
#include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory.h"
#include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_decoder_factory.h"
#include "api/media_stream_interface.h" #include "api/media_stream_interface.h"
#include "rtc_base/network_monitor_factory.h"
#include "modules/audio_device/include/audio_device.h"
#include "rtc_base/ref_counted_object.h"
#include <string> #include <string>
namespace tgcalls { namespace tgcalls {
@ -16,6 +19,277 @@ class PlatformContext;
struct PlatformCaptureInfo { struct PlatformCaptureInfo {
bool shouldBeAdaptedToReceiverAspectRate = false; bool shouldBeAdaptedToReceiverAspectRate = false;
int rotation = 0;
};
class WrappedAudioDeviceModule : public webrtc::AudioDeviceModule {
public:
virtual void Stop() = 0;
};
class DefaultWrappedAudioDeviceModule : public WrappedAudioDeviceModule {
public:
DefaultWrappedAudioDeviceModule(rtc::scoped_refptr<webrtc::AudioDeviceModule> impl) :
_impl(impl) {
}
virtual ~DefaultWrappedAudioDeviceModule() {
}
virtual void Stop() override {
}
virtual int32_t ActiveAudioLayer(AudioLayer *audioLayer) const override {
return _impl->ActiveAudioLayer(audioLayer);
}
virtual int32_t RegisterAudioCallback(webrtc::AudioTransport *audioCallback) override {
return _impl->RegisterAudioCallback(audioCallback);
}
virtual int32_t Init() override {
return _impl->Init();
}
virtual int32_t Terminate() override {
return _impl->Terminate();
}
virtual bool Initialized() const override {
return _impl->Initialized();
}
virtual int16_t PlayoutDevices() override {
return _impl->PlayoutDevices();
}
virtual int16_t RecordingDevices() override {
return _impl->RecordingDevices();
}
virtual int32_t PlayoutDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override {
return _impl->PlayoutDeviceName(index, name, guid);
}
virtual int32_t RecordingDeviceName(uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override {
return _impl->RecordingDeviceName(index, name, guid);
}
virtual int32_t SetPlayoutDevice(uint16_t index) override {
return _impl->SetPlayoutDevice(index);
}
virtual int32_t SetPlayoutDevice(WindowsDeviceType device) override {
return _impl->SetPlayoutDevice(device);
}
virtual int32_t SetRecordingDevice(uint16_t index) override {
return _impl->SetRecordingDevice(index);
}
virtual int32_t SetRecordingDevice(WindowsDeviceType device) override {
return _impl->SetRecordingDevice(device);
}
virtual int32_t PlayoutIsAvailable(bool *available) override {
return _impl->PlayoutIsAvailable(available);
}
virtual int32_t InitPlayout() override {
return _impl->InitPlayout();
}
virtual bool PlayoutIsInitialized() const override {
return _impl->PlayoutIsInitialized();
}
virtual int32_t RecordingIsAvailable(bool *available) override {
return _impl->RecordingIsAvailable(available);
}
virtual int32_t InitRecording() override {
return _impl->InitRecording();
}
virtual bool RecordingIsInitialized() const override {
return _impl->RecordingIsInitialized();
}
virtual int32_t StartPlayout() override {
return _impl->StartPlayout();
}
virtual int32_t StopPlayout() override {
return _impl->StopPlayout();
}
virtual bool Playing() const override {
return _impl->Playing();
}
virtual int32_t StartRecording() override {
return _impl->StartRecording();
}
virtual int32_t StopRecording() override {
return _impl->StopRecording();
}
virtual bool Recording() const override {
return _impl->Recording();
}
virtual int32_t InitSpeaker() override {
return _impl->InitSpeaker();
}
virtual bool SpeakerIsInitialized() const override {
return _impl->SpeakerIsInitialized();
}
virtual int32_t InitMicrophone() override {
return _impl->InitMicrophone();
}
virtual bool MicrophoneIsInitialized() const override {
return _impl->MicrophoneIsInitialized();
}
virtual int32_t SpeakerVolumeIsAvailable(bool *available) override {
return _impl->SpeakerVolumeIsAvailable(available);
}
virtual int32_t SetSpeakerVolume(uint32_t volume) override {
return _impl->SetSpeakerVolume(volume);
}
virtual int32_t SpeakerVolume(uint32_t* volume) const override {
return _impl->SpeakerVolume(volume);
}
virtual int32_t MaxSpeakerVolume(uint32_t *maxVolume) const override {
return _impl->MaxSpeakerVolume(maxVolume);
}
virtual int32_t MinSpeakerVolume(uint32_t *minVolume) const override {
return _impl->MinSpeakerVolume(minVolume);
}
virtual int32_t MicrophoneVolumeIsAvailable(bool *available) override {
return _impl->MicrophoneVolumeIsAvailable(available);
}
virtual int32_t SetMicrophoneVolume(uint32_t volume) override {
return _impl->SetMicrophoneVolume(volume);
}
virtual int32_t MicrophoneVolume(uint32_t *volume) const override {
return _impl->MicrophoneVolume(volume);
}
virtual int32_t MaxMicrophoneVolume(uint32_t *maxVolume) const override {
return _impl->MaxMicrophoneVolume(maxVolume);
}
virtual int32_t MinMicrophoneVolume(uint32_t *minVolume) const override {
return _impl->MinMicrophoneVolume(minVolume);
}
virtual int32_t SpeakerMuteIsAvailable(bool *available) override {
return _impl->SpeakerMuteIsAvailable(available);
}
virtual int32_t SetSpeakerMute(bool enable) override {
return _impl->SetSpeakerMute(enable);
}
virtual int32_t SpeakerMute(bool *enabled) const override {
return _impl->SpeakerMute(enabled);
}
virtual int32_t MicrophoneMuteIsAvailable(bool *available) override {
return _impl->MicrophoneMuteIsAvailable(available);
}
virtual int32_t SetMicrophoneMute(bool enable) override {
return _impl->SetMicrophoneMute(enable);
}
virtual int32_t MicrophoneMute(bool *enabled) const override {
return _impl->MicrophoneMute(enabled);
}
virtual int32_t StereoPlayoutIsAvailable(bool *available) const override {
return _impl->StereoPlayoutIsAvailable(available);
}
virtual int32_t SetStereoPlayout(bool enable) override {
return _impl->SetStereoPlayout(enable);
}
virtual int32_t StereoPlayout(bool *enabled) const override {
return _impl->StereoPlayout(enabled);
}
virtual int32_t StereoRecordingIsAvailable(bool *available) const override {
return _impl->StereoRecordingIsAvailable(available);
}
virtual int32_t SetStereoRecording(bool enable) override {
return _impl->SetStereoRecording(enable);
}
virtual int32_t StereoRecording(bool *enabled) const override {
return _impl->StereoRecording(enabled);
}
virtual int32_t PlayoutDelay(uint16_t* delayMS) const override {
return _impl->PlayoutDelay(delayMS);
}
virtual bool BuiltInAECIsAvailable() const override {
return _impl->BuiltInAECIsAvailable();
}
virtual bool BuiltInAGCIsAvailable() const override {
return _impl->BuiltInAGCIsAvailable();
}
virtual bool BuiltInNSIsAvailable() const override {
return _impl->BuiltInNSIsAvailable();
}
virtual int32_t EnableBuiltInAEC(bool enable) override {
return _impl->EnableBuiltInAEC(enable);
}
virtual int32_t EnableBuiltInAGC(bool enable) override {
return _impl->EnableBuiltInAGC(enable);
}
virtual int32_t EnableBuiltInNS(bool enable) override {
return _impl->EnableBuiltInNS(enable);
}
virtual int32_t GetPlayoutUnderrunCount() const override {
return _impl->GetPlayoutUnderrunCount();
}
#if defined(WEBRTC_IOS)
virtual int GetPlayoutAudioParameters(webrtc::AudioParameters *params) const override {
return _impl->GetPlayoutAudioParameters(params);
}
virtual int GetRecordAudioParameters(webrtc::AudioParameters *params) const override {
return _impl->GetRecordAudioParameters(params);
}
#endif // WEBRTC_IOS
rtc::scoped_refptr<webrtc::AudioDeviceModule> WrappedInstance() {
return _impl;
}
private:
rtc::scoped_refptr<webrtc::AudioDeviceModule> _impl;
}; };
class PlatformInterface { class PlatformInterface {
@ -26,12 +300,19 @@ public:
virtual void configurePlatformAudio() { virtual void configurePlatformAudio() {
} }
virtual std::unique_ptr<rtc::NetworkMonitorFactory> createNetworkMonitorFactory() {
return nullptr;
}
virtual std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) = 0; virtual std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) = 0;
virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) = 0; virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) = 0;
virtual bool supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) = 0; virtual bool supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) = 0;
virtual rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0; virtual rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) = 0;
virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) = 0; virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) = 0;
virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::function<void(PlatformCaptureInfo)> captureInfoUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) = 0; virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::function<void(PlatformCaptureInfo)> captureInfoUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) = 0;
virtual rtc::scoped_refptr<WrappedAudioDeviceModule> wrapAudioDeviceModule(rtc::scoped_refptr<webrtc::AudioDeviceModule> module) {
return new rtc::RefCountedObject<DefaultWrappedAudioDeviceModule>(module);
}
}; };

View file

@ -5,22 +5,22 @@
namespace tgcalls { namespace tgcalls {
AndroidContext::AndroidContext(JNIEnv *env, jobject instance) { AndroidContext::AndroidContext(JNIEnv *env, jobject instance, bool screencast) {
VideoCameraCapturerClass = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/VideoCameraCapturer")); VideoCapturerDeviceClass = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/VideoCapturerDevice"));
jmethodID initMethodId = env->GetMethodID(VideoCameraCapturerClass, "<init>", "()V"); jmethodID initMethodId = env->GetMethodID(VideoCapturerDeviceClass, "<init>", "(Z)V");
javaCapturer = env->NewGlobalRef(env->NewObject(VideoCameraCapturerClass, initMethodId)); javaCapturer = env->NewGlobalRef(env->NewObject(VideoCapturerDeviceClass, initMethodId, screencast));
javaInstance = env->NewGlobalRef(instance); javaInstance = env->NewGlobalRef(instance);
} }
AndroidContext::~AndroidContext() { AndroidContext::~AndroidContext() {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
jmethodID onDestroyMethodId = env->GetMethodID(VideoCameraCapturerClass, "onDestroy", "()V"); jmethodID onDestroyMethodId = env->GetMethodID(VideoCapturerDeviceClass, "onDestroy", "()V");
env->CallVoidMethod(javaCapturer, onDestroyMethodId); env->CallVoidMethod(javaCapturer, onDestroyMethodId);
env->DeleteGlobalRef(javaCapturer); env->DeleteGlobalRef(javaCapturer);
javaCapturer = nullptr; javaCapturer = nullptr;
env->DeleteGlobalRef(VideoCameraCapturerClass); env->DeleteGlobalRef(VideoCapturerDeviceClass);
if (javaInstance) { if (javaInstance) {
env->DeleteGlobalRef(javaInstance); env->DeleteGlobalRef(javaInstance);
@ -40,7 +40,7 @@ jobject AndroidContext::getJavaCapturer() {
} }
jclass AndroidContext::getJavaCapturerClass() { jclass AndroidContext::getJavaCapturerClass() {
return VideoCameraCapturerClass; return VideoCapturerDeviceClass;
} }
} // namespace tgcalls } // namespace tgcalls

View file

@ -10,7 +10,7 @@ namespace tgcalls {
class AndroidContext final : public PlatformContext { class AndroidContext final : public PlatformContext {
public: public:
AndroidContext(JNIEnv *env, jobject instance); AndroidContext(JNIEnv *env, jobject instance, bool screencast);
~AndroidContext() override; ~AndroidContext() override;
jobject getJavaCapturer(); jobject getJavaCapturer();
@ -20,9 +20,10 @@ public:
void setJavaInstance(JNIEnv *env, jobject instance); void setJavaInstance(JNIEnv *env, jobject instance);
std::shared_ptr<BroadcastPartTask> streamTask; std::shared_ptr<BroadcastPartTask> streamTask;
std::vector<std::shared_ptr<RequestMediaChannelDescriptionTask>> descriptionTasks;
private: private:
jclass VideoCameraCapturerClass = nullptr; jclass VideoCapturerDeviceClass = nullptr;
jobject javaCapturer = nullptr; jobject javaCapturer = nullptr;
jobject javaInstance = nullptr; jobject javaInstance = nullptr;

View file

@ -3,6 +3,8 @@
#include <rtc_base/ssl_adapter.h> #include <rtc_base/ssl_adapter.h>
#include <modules/utility/include/jvm_android.h> #include <modules/utility/include/jvm_android.h>
#include <sdk/android/src/jni/android_video_track_source.h> #include <sdk/android/src/jni/android_video_track_source.h>
#include <sdk/android/src/jni/android_video_track_source.h>
#include <sdk/android/src/jni/pc/android_network_monitor.h>
#include <media/base/media_constants.h> #include <media/base/media_constants.h>
#include "VideoCapturerInterfaceImpl.h" #include "VideoCapturerInterfaceImpl.h"
@ -55,10 +57,10 @@ void AndroidInterface::adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSou
} }
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) { rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
_source = webrtc::CreateJavaVideoSource(env, signalingThread, false, false); _source[screencapture ? 1 : 0] = webrtc::CreateJavaVideoSource(env, signalingThread, false, false);
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, _source); return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, _source[screencapture ? 1 : 0]);
} }
bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) { bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) {
@ -84,9 +86,12 @@ bool AndroidInterface::supportsEncoding(const std::string &codecName, std::share
} }
std::unique_ptr<VideoCapturerInterface> AndroidInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::function<void(PlatformCaptureInfo)> captureInfoUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) { std::unique_ptr<VideoCapturerInterface> AndroidInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::function<void(PlatformCaptureInfo)> captureInfoUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) {
return std::make_unique<VideoCapturerInterfaceImpl>(_source, deviceId, stateUpdated, platformContext); return std::make_unique<VideoCapturerInterfaceImpl>(_source[deviceId == "screen" ? 1 : 0], deviceId, stateUpdated, platformContext);
} }
std::unique_ptr<rtc::NetworkMonitorFactory> AndroidInterface::createNetworkMonitorFactory() {
return std::make_unique<webrtc::jni::AndroidNetworkMonitorFactory>();
}
std::unique_ptr<PlatformInterface> CreatePlatformInterface() { std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
return std::make_unique<AndroidInterface>(); return std::make_unique<AndroidInterface>();

View file

@ -13,12 +13,13 @@ public:
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) override; std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) override;
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) override; std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) override;
bool supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) override; bool supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) override;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override; rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread, bool screencapture) override;
void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override; void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override;
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::function<void(PlatformCaptureInfo)> captureInfoUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override; std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::function<void(PlatformCaptureInfo)> captureInfoUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override;
std::unique_ptr<rtc::NetworkMonitorFactory> createNetworkMonitorFactory() override;
private: private:
rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> _source; rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> _source[2];
std::unique_ptr<webrtc::VideoEncoderFactory> hardwareVideoEncoderFactory; std::unique_ptr<webrtc::VideoEncoderFactory> hardwareVideoEncoderFactory;
std::unique_ptr<webrtc::VideoEncoderFactory> softwareVideoEncoderFactory; std::unique_ptr<webrtc::VideoEncoderFactory> softwareVideoEncoderFactory;

View file

@ -1,6 +1,6 @@
#include "VideoCameraCapturer.h" #include "VideoCameraCapturer.h"
#include <stdint.h> #include <cstdint>
#include <memory> #include <memory>
#include <algorithm> #include <algorithm>
@ -13,8 +13,8 @@ namespace tgcalls {
VideoCameraCapturer::VideoCameraCapturer(rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) : _source(source), _stateUpdated(stateUpdated), _platformContext(platformContext) { VideoCameraCapturer::VideoCameraCapturer(rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) : _source(source), _stateUpdated(stateUpdated), _platformContext(platformContext) {
AndroidContext *context = (AndroidContext *) platformContext.get(); AndroidContext *context = (AndroidContext *) platformContext.get();
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "init", "(JZ)V"); jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "init", "(JLjava/lang/String;)V");
env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, (jboolean) (deviceId != "back")); env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, env->NewStringUTF(deviceId.c_str()));
} }
void VideoCameraCapturer::setState(VideoState state) { void VideoCameraCapturer::setState(VideoState state) {
@ -23,7 +23,7 @@ void VideoCameraCapturer::setState(VideoState state) {
_stateUpdated(_state); _stateUpdated(_state);
} }
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
AndroidContext *context = (AndroidContext *) _platformContext.get(); auto context = (AndroidContext *) _platformContext.get();
jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "onStateChanged", "(JI)V"); jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "onStateChanged", "(JI)V");
env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, (jint) state); env->CallVoidMethod(context->getJavaCapturer(), methodId, (jlong) (intptr_t) this, (jint) state);
} }
@ -31,7 +31,7 @@ void VideoCameraCapturer::setState(VideoState state) {
void VideoCameraCapturer::setPreferredCaptureAspectRatio(float aspectRatio) { void VideoCameraCapturer::setPreferredCaptureAspectRatio(float aspectRatio) {
_aspectRatio = aspectRatio; _aspectRatio = aspectRatio;
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
AndroidContext *context = (AndroidContext *) _platformContext.get(); auto context = (AndroidContext *) _platformContext.get();
jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "onAspectRatioRequested", "(F)V"); jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "onAspectRatioRequested", "(F)V");
env->CallVoidMethod(context->getJavaCapturer(), methodId, (jfloat) aspectRatio); env->CallVoidMethod(context->getJavaCapturer(), methodId, (jfloat) aspectRatio);
} }
@ -54,7 +54,7 @@ webrtc::ScopedJavaLocalRef<jobject> VideoCameraCapturer::GetJavaVideoCapturerObs
extern "C" { extern "C" {
JNIEXPORT jobject Java_org_telegram_messenger_voip_VideoCameraCapturer_nativeGetJavaVideoCapturerObserver(JNIEnv *env, jclass clazz, jlong ptr) { JNIEXPORT jobject Java_org_telegram_messenger_voip_VideoCapturerDevice_nativeGetJavaVideoCapturerObserver(JNIEnv *env, jclass clazz, jlong ptr) {
tgcalls::VideoCameraCapturer *capturer = (tgcalls::VideoCameraCapturer *) (intptr_t) ptr; tgcalls::VideoCameraCapturer *capturer = (tgcalls::VideoCameraCapturer *) (intptr_t) ptr;
return capturer->GetJavaVideoCapturerObserver(env).Release(); return capturer->GetJavaVideoCapturerObserver(env).Release();
} }

View file

@ -1,11 +1,13 @@
#include "VideoCapturerInterfaceImpl.h" #include "VideoCapturerInterfaceImpl.h"
#include <memory>
#include "VideoCameraCapturer.h" #include "VideoCameraCapturer.h"
namespace tgcalls { namespace tgcalls {
VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) { VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> source, std::string deviceId, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
_capturer = std::unique_ptr<VideoCameraCapturer>(new VideoCameraCapturer(source, deviceId, stateUpdated, platformContext)); _capturer = std::make_unique<VideoCameraCapturer>(source, deviceId, stateUpdated, platformContext);
} }
void VideoCapturerInterfaceImpl::setState(VideoState state) { void VideoCapturerInterfaceImpl::setState(VideoState state) {
@ -20,4 +22,12 @@ void VideoCapturerInterfaceImpl::setUncroppedOutput(std::shared_ptr<rtc::VideoSi
_capturer->setUncroppedSink(sink); _capturer->setUncroppedSink(sink);
} }
int VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl::getRotation() {
return 0;
}
void VideoCapturerInterfaceImpl::setOnFatalError(std::function<void()> error) {
}
} // namespace tgcalls } // namespace tgcalls

View file

@ -14,6 +14,8 @@ public:
void setState(VideoState state) override; void setState(VideoState state) override;
void setPreferredCaptureAspectRatio(float aspectRatio) override; void setPreferredCaptureAspectRatio(float aspectRatio) override;
void setUncroppedOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override; void setUncroppedOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override;
int getRotation() override;
void setOnFatalError(std::function<void()> error) override;
private: private:
std::unique_ptr<VideoCameraCapturer> _capturer; std::unique_ptr<VideoCameraCapturer> _capturer;

View file

@ -844,11 +844,15 @@ private:
std::vector<webrtc::RtpCodecCapability> codecs; std::vector<webrtc::RtpCodecCapability> codecs;
for (auto &codec : capabilities.codecs) { for (auto &codec : capabilities.codecs) {
#ifndef WEBRTC_DISABLE_H265
if (codec.name == cricket::kH265CodecName) { if (codec.name == cricket::kH265CodecName) {
codecs.insert(codecs.begin(), codec); codecs.insert(codecs.begin(), codec);
} else { } else {
codecs.push_back(codec); codecs.push_back(codec);
} }
#else
codecs.push_back(codec);
#endif
} }
it->SetCodecPreferences(codecs); it->SetCodecPreferences(codecs);

View file

@ -0,0 +1,790 @@
/* Copyright (c) 2013 Dropbox, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "json11.hpp"
#include <cassert>
#include <cmath>
#include <cstdlib>
#include <cstdio>
#include <limits>
namespace json11 {
static const int max_depth = 200;
using std::string;
using std::vector;
using std::map;
using std::make_shared;
using std::initializer_list;
using std::move;
/* Helper for representing null - just a do-nothing struct, plus comparison
* operators so the helpers in JsonValue work. We can't use nullptr_t because
* it may not be orderable.
*/
struct NullStruct {
bool operator==(NullStruct) const { return true; }
bool operator<(NullStruct) const { return false; }
};
/* * * * * * * * * * * * * * * * * * * *
* Serialization
*/
static void dump(NullStruct, string &out) {
out += "null";
}
static void dump(double value, string &out) {
if (std::isfinite(value)) {
char buf[32];
snprintf(buf, sizeof buf, "%.17g", value);
out += buf;
} else {
out += "null";
}
}
static void dump(int value, string &out) {
char buf[32];
snprintf(buf, sizeof buf, "%d", value);
out += buf;
}
static void dump(bool value, string &out) {
out += value ? "true" : "false";
}
static void dump(const string &value, string &out) {
out += '"';
for (size_t i = 0; i < value.length(); i++) {
const char ch = value[i];
if (ch == '\\') {
out += "\\\\";
} else if (ch == '"') {
out += "\\\"";
} else if (ch == '\b') {
out += "\\b";
} else if (ch == '\f') {
out += "\\f";
} else if (ch == '\n') {
out += "\\n";
} else if (ch == '\r') {
out += "\\r";
} else if (ch == '\t') {
out += "\\t";
} else if (static_cast<uint8_t>(ch) <= 0x1f) {
char buf[8];
snprintf(buf, sizeof buf, "\\u%04x", ch);
out += buf;
} else if (static_cast<uint8_t>(ch) == 0xe2 && static_cast<uint8_t>(value[i+1]) == 0x80
&& static_cast<uint8_t>(value[i+2]) == 0xa8) {
out += "\\u2028";
i += 2;
} else if (static_cast<uint8_t>(ch) == 0xe2 && static_cast<uint8_t>(value[i+1]) == 0x80
&& static_cast<uint8_t>(value[i+2]) == 0xa9) {
out += "\\u2029";
i += 2;
} else {
out += ch;
}
}
out += '"';
}
static void dump(const Json::array &values, string &out) {
bool first = true;
out += "[";
for (const auto &value : values) {
if (!first)
out += ", ";
value.dump(out);
first = false;
}
out += "]";
}
static void dump(const Json::object &values, string &out) {
bool first = true;
out += "{";
for (const auto &kv : values) {
if (!first)
out += ", ";
dump(kv.first, out);
out += ": ";
kv.second.dump(out);
first = false;
}
out += "}";
}
void Json::dump(string &out) const {
m_ptr->dump(out);
}
/* * * * * * * * * * * * * * * * * * * *
* Value wrappers
*/
template <Json::Type tag, typename T>
class Value : public JsonValue {
protected:
// Constructors
explicit Value(const T &value) : m_value(value) {}
explicit Value(T &&value) : m_value(move(value)) {}
// Get type tag
Json::Type type() const override {
return tag;
}
// Comparisons
bool equals(const JsonValue * other) const override {
return m_value == static_cast<const Value<tag, T> *>(other)->m_value;
}
bool less(const JsonValue * other) const override {
return m_value < static_cast<const Value<tag, T> *>(other)->m_value;
}
const T m_value;
void dump(string &out) const override { json11::dump(m_value, out); }
};
class JsonDouble final : public Value<Json::NUMBER, double> {
double number_value() const override { return m_value; }
int int_value() const override { return static_cast<int>(m_value); }
bool equals(const JsonValue * other) const override { return m_value == other->number_value(); }
bool less(const JsonValue * other) const override { return m_value < other->number_value(); }
public:
explicit JsonDouble(double value) : Value(value) {}
};
class JsonInt final : public Value<Json::NUMBER, int> {
double number_value() const override { return m_value; }
int int_value() const override { return m_value; }
bool equals(const JsonValue * other) const override { return m_value == other->number_value(); }
bool less(const JsonValue * other) const override { return m_value < other->number_value(); }
public:
explicit JsonInt(int value) : Value(value) {}
};
class JsonBoolean final : public Value<Json::BOOL, bool> {
bool bool_value() const override { return m_value; }
public:
explicit JsonBoolean(bool value) : Value(value) {}
};
class JsonString final : public Value<Json::STRING, string> {
const string &string_value() const override { return m_value; }
public:
explicit JsonString(const string &value) : Value(value) {}
explicit JsonString(string &&value) : Value(move(value)) {}
};
class JsonArray final : public Value<Json::ARRAY, Json::array> {
const Json::array &array_items() const override { return m_value; }
const Json & operator[](size_t i) const override;
public:
explicit JsonArray(const Json::array &value) : Value(value) {}
explicit JsonArray(Json::array &&value) : Value(move(value)) {}
};
class JsonObject final : public Value<Json::OBJECT, Json::object> {
const Json::object &object_items() const override { return m_value; }
const Json & operator[](const string &key) const override;
public:
explicit JsonObject(const Json::object &value) : Value(value) {}
explicit JsonObject(Json::object &&value) : Value(move(value)) {}
};
class JsonNull final : public Value<Json::NUL, NullStruct> {
public:
JsonNull() : Value({}) {}
};
/* * * * * * * * * * * * * * * * * * * *
* Static globals - static-init-safe
*/
struct Statics {
const std::shared_ptr<JsonValue> null = make_shared<JsonNull>();
const std::shared_ptr<JsonValue> t = make_shared<JsonBoolean>(true);
const std::shared_ptr<JsonValue> f = make_shared<JsonBoolean>(false);
const string empty_string;
const vector<Json> empty_vector;
const map<string, Json> empty_map;
Statics() {}
};
static const Statics & statics() {
static const Statics s {};
return s;
}
static const Json & static_null() {
// This has to be separate, not in Statics, because Json() accesses statics().null.
static const Json json_null;
return json_null;
}
/* * * * * * * * * * * * * * * * * * * *
* Constructors
*/
Json::Json() noexcept : m_ptr(statics().null) {}
Json::Json(std::nullptr_t) noexcept : m_ptr(statics().null) {}
Json::Json(double value) : m_ptr(make_shared<JsonDouble>(value)) {}
Json::Json(int value) : m_ptr(make_shared<JsonInt>(value)) {}
Json::Json(bool value) : m_ptr(value ? statics().t : statics().f) {}
Json::Json(const string &value) : m_ptr(make_shared<JsonString>(value)) {}
Json::Json(string &&value) : m_ptr(make_shared<JsonString>(move(value))) {}
Json::Json(const char * value) : m_ptr(make_shared<JsonString>(value)) {}
Json::Json(const Json::array &values) : m_ptr(make_shared<JsonArray>(values)) {}
Json::Json(Json::array &&values) : m_ptr(make_shared<JsonArray>(move(values))) {}
Json::Json(const Json::object &values) : m_ptr(make_shared<JsonObject>(values)) {}
Json::Json(Json::object &&values) : m_ptr(make_shared<JsonObject>(move(values))) {}
/* * * * * * * * * * * * * * * * * * * *
* Accessors
*/
Json::Type Json::type() const { return m_ptr->type(); }
double Json::number_value() const { return m_ptr->number_value(); }
int Json::int_value() const { return m_ptr->int_value(); }
bool Json::bool_value() const { return m_ptr->bool_value(); }
const string & Json::string_value() const { return m_ptr->string_value(); }
const vector<Json> & Json::array_items() const { return m_ptr->array_items(); }
const map<string, Json> & Json::object_items() const { return m_ptr->object_items(); }
const Json & Json::operator[] (size_t i) const { return (*m_ptr)[i]; }
const Json & Json::operator[] (const string &key) const { return (*m_ptr)[key]; }
double JsonValue::number_value() const { return 0; }
int JsonValue::int_value() const { return 0; }
bool JsonValue::bool_value() const { return false; }
const string & JsonValue::string_value() const { return statics().empty_string; }
const vector<Json> & JsonValue::array_items() const { return statics().empty_vector; }
const map<string, Json> & JsonValue::object_items() const { return statics().empty_map; }
const Json & JsonValue::operator[] (size_t) const { return static_null(); }
const Json & JsonValue::operator[] (const string &) const { return static_null(); }
const Json & JsonObject::operator[] (const string &key) const {
auto iter = m_value.find(key);
return (iter == m_value.end()) ? static_null() : iter->second;
}
const Json & JsonArray::operator[] (size_t i) const {
if (i >= m_value.size()) return static_null();
else return m_value[i];
}
/* * * * * * * * * * * * * * * * * * * *
* Comparison
*/
bool Json::operator== (const Json &other) const {
if (m_ptr == other.m_ptr)
return true;
if (m_ptr->type() != other.m_ptr->type())
return false;
return m_ptr->equals(other.m_ptr.get());
}
bool Json::operator< (const Json &other) const {
if (m_ptr == other.m_ptr)
return false;
if (m_ptr->type() != other.m_ptr->type())
return m_ptr->type() < other.m_ptr->type();
return m_ptr->less(other.m_ptr.get());
}
/* * * * * * * * * * * * * * * * * * * *
* Parsing
*/
/* esc(c)
*
* Format char c suitable for printing in an error message.
*/
static inline string esc(char c) {
char buf[12];
if (static_cast<uint8_t>(c) >= 0x20 && static_cast<uint8_t>(c) <= 0x7f) {
snprintf(buf, sizeof buf, "'%c' (%d)", c, c);
} else {
snprintf(buf, sizeof buf, "(%d)", c);
}
return string(buf);
}
static inline bool in_range(long x, long lower, long upper) {
return (x >= lower && x <= upper);
}
namespace {
/* JsonParser
*
* Object that tracks all state of an in-progress parse.
*/
struct JsonParser final {
/* State
*/
const string &str;
size_t i;
string &err;
bool failed;
const JsonParse strategy;
/* fail(msg, err_ret = Json())
*
* Mark this parse as failed.
*/
Json fail(string &&msg) {
return fail(move(msg), Json());
}
template <typename T>
T fail(string &&msg, const T err_ret) {
if (!failed)
err = std::move(msg);
failed = true;
return err_ret;
}
/* consume_whitespace()
*
* Advance until the current character is non-whitespace.
*/
void consume_whitespace() {
while (str[i] == ' ' || str[i] == '\r' || str[i] == '\n' || str[i] == '\t')
i++;
}
/* consume_comment()
*
* Advance comments (c-style inline and multiline).
*/
bool consume_comment() {
bool comment_found = false;
if (str[i] == '/') {
i++;
if (i == str.size())
return fail("unexpected end of input after start of comment", false);
if (str[i] == '/') { // inline comment
i++;
// advance until next line, or end of input
while (i < str.size() && str[i] != '\n') {
i++;
}
comment_found = true;
}
else if (str[i] == '*') { // multiline comment
i++;
if (i > str.size()-2)
return fail("unexpected end of input inside multi-line comment", false);
// advance until closing tokens
while (!(str[i] == '*' && str[i+1] == '/')) {
i++;
if (i > str.size()-2)
return fail(
"unexpected end of input inside multi-line comment", false);
}
i += 2;
comment_found = true;
}
else
return fail("malformed comment", false);
}
return comment_found;
}
/* consume_garbage()
*
* Advance until the current character is non-whitespace and non-comment.
*/
void consume_garbage() {
consume_whitespace();
if(strategy == JsonParse::COMMENTS) {
bool comment_found = false;
do {
comment_found = consume_comment();
if (failed) return;
consume_whitespace();
}
while(comment_found);
}
}
/* get_next_token()
*
* Return the next non-whitespace character. If the end of the input is reached,
* flag an error and return 0.
*/
char get_next_token() {
consume_garbage();
if (failed) return static_cast<char>(0);
if (i == str.size())
return fail("unexpected end of input", static_cast<char>(0));
return str[i++];
}
/* encode_utf8(pt, out)
*
* Encode pt as UTF-8 and add it to out.
*/
void encode_utf8(long pt, string & out) {
if (pt < 0)
return;
if (pt < 0x80) {
out += static_cast<char>(pt);
} else if (pt < 0x800) {
out += static_cast<char>((pt >> 6) | 0xC0);
out += static_cast<char>((pt & 0x3F) | 0x80);
} else if (pt < 0x10000) {
out += static_cast<char>((pt >> 12) | 0xE0);
out += static_cast<char>(((pt >> 6) & 0x3F) | 0x80);
out += static_cast<char>((pt & 0x3F) | 0x80);
} else {
out += static_cast<char>((pt >> 18) | 0xF0);
out += static_cast<char>(((pt >> 12) & 0x3F) | 0x80);
out += static_cast<char>(((pt >> 6) & 0x3F) | 0x80);
out += static_cast<char>((pt & 0x3F) | 0x80);
}
}
/* parse_string()
*
* Parse a string, starting at the current position.
*/
string parse_string() {
string out;
long last_escaped_codepoint = -1;
while (true) {
if (i == str.size())
return fail("unexpected end of input in string", "");
char ch = str[i++];
if (ch == '"') {
encode_utf8(last_escaped_codepoint, out);
return out;
}
if (in_range(ch, 0, 0x1f))
return fail("unescaped " + esc(ch) + " in string", "");
// The usual case: non-escaped characters
if (ch != '\\') {
encode_utf8(last_escaped_codepoint, out);
last_escaped_codepoint = -1;
out += ch;
continue;
}
// Handle escapes
if (i == str.size())
return fail("unexpected end of input in string", "");
ch = str[i++];
if (ch == 'u') {
// Extract 4-byte escape sequence
string esc = str.substr(i, 4);
// Explicitly check length of the substring. The following loop
// relies on std::string returning the terminating NUL when
// accessing str[length]. Checking here reduces brittleness.
if (esc.length() < 4) {
return fail("bad \\u escape: " + esc, "");
}
for (size_t j = 0; j < 4; j++) {
if (!in_range(esc[j], 'a', 'f') && !in_range(esc[j], 'A', 'F')
&& !in_range(esc[j], '0', '9'))
return fail("bad \\u escape: " + esc, "");
}
long codepoint = strtol(esc.data(), nullptr, 16);
// JSON specifies that characters outside the BMP shall be encoded as a pair
// of 4-hex-digit \u escapes encoding their surrogate pair components. Check
// whether we're in the middle of such a beast: the previous codepoint was an
// escaped lead (high) surrogate, and this is a trail (low) surrogate.
if (in_range(last_escaped_codepoint, 0xD800, 0xDBFF)
&& in_range(codepoint, 0xDC00, 0xDFFF)) {
// Reassemble the two surrogate pairs into one astral-plane character, per
// the UTF-16 algorithm.
encode_utf8((((last_escaped_codepoint - 0xD800) << 10)
| (codepoint - 0xDC00)) + 0x10000, out);
last_escaped_codepoint = -1;
} else {
encode_utf8(last_escaped_codepoint, out);
last_escaped_codepoint = codepoint;
}
i += 4;
continue;
}
encode_utf8(last_escaped_codepoint, out);
last_escaped_codepoint = -1;
if (ch == 'b') {
out += '\b';
} else if (ch == 'f') {
out += '\f';
} else if (ch == 'n') {
out += '\n';
} else if (ch == 'r') {
out += '\r';
} else if (ch == 't') {
out += '\t';
} else if (ch == '"' || ch == '\\' || ch == '/') {
out += ch;
} else {
return fail("invalid escape character " + esc(ch), "");
}
}
}
/* parse_number()
*
* Parse a double.
*/
Json parse_number() {
size_t start_pos = i;
if (str[i] == '-')
i++;
// Integer part
if (str[i] == '0') {
i++;
if (in_range(str[i], '0', '9'))
return fail("leading 0s not permitted in numbers");
} else if (in_range(str[i], '1', '9')) {
i++;
while (in_range(str[i], '0', '9'))
i++;
} else {
return fail("invalid " + esc(str[i]) + " in number");
}
if (str[i] != '.' && str[i] != 'e' && str[i] != 'E'
&& (i - start_pos) <= static_cast<size_t>(std::numeric_limits<int>::digits10)) {
return std::atoi(str.c_str() + start_pos);
}
// Decimal part
if (str[i] == '.') {
i++;
if (!in_range(str[i], '0', '9'))
return fail("at least one digit required in fractional part");
while (in_range(str[i], '0', '9'))
i++;
}
// Exponent part
if (str[i] == 'e' || str[i] == 'E') {
i++;
if (str[i] == '+' || str[i] == '-')
i++;
if (!in_range(str[i], '0', '9'))
return fail("at least one digit required in exponent");
while (in_range(str[i], '0', '9'))
i++;
}
return std::strtod(str.c_str() + start_pos, nullptr);
}
/* expect(str, res)
*
* Expect that 'str' starts at the character that was just read. If it does, advance
* the input and return res. If not, flag an error.
*/
Json expect(const string &expected, Json res) {
assert(i != 0);
i--;
if (str.compare(i, expected.length(), expected) == 0) {
i += expected.length();
return res;
} else {
return fail("parse error: expected " + expected + ", got " + str.substr(i, expected.length()));
}
}
/* parse_json()
*
* Parse a JSON object.
*/
Json parse_json(int depth) {
if (depth > max_depth) {
return fail("exceeded maximum nesting depth");
}
char ch = get_next_token();
if (failed)
return Json();
if (ch == '-' || (ch >= '0' && ch <= '9')) {
i--;
return parse_number();
}
if (ch == 't')
return expect("true", true);
if (ch == 'f')
return expect("false", false);
if (ch == 'n')
return expect("null", Json());
if (ch == '"')
return parse_string();
if (ch == '{') {
map<string, Json> data;
ch = get_next_token();
if (ch == '}')
return data;
while (1) {
if (ch != '"')
return fail("expected '\"' in object, got " + esc(ch));
string key = parse_string();
if (failed)
return Json();
ch = get_next_token();
if (ch != ':')
return fail("expected ':' in object, got " + esc(ch));
data[std::move(key)] = parse_json(depth + 1);
if (failed)
return Json();
ch = get_next_token();
if (ch == '}')
break;
if (ch != ',')
return fail("expected ',' in object, got " + esc(ch));
ch = get_next_token();
}
return data;
}
if (ch == '[') {
vector<Json> data;
ch = get_next_token();
if (ch == ']')
return data;
while (1) {
i--;
data.push_back(parse_json(depth + 1));
if (failed)
return Json();
ch = get_next_token();
if (ch == ']')
break;
if (ch != ',')
return fail("expected ',' in list, got " + esc(ch));
ch = get_next_token();
(void)ch;
}
return data;
}
return fail("expected value, got " + esc(ch));
}
};
}//namespace {
Json Json::parse(const string &in, string &err, JsonParse strategy) {
JsonParser parser { in, 0, err, false, strategy };
Json result = parser.parse_json(0);
// Check for any trailing garbage
parser.consume_garbage();
if (parser.failed)
return Json();
if (parser.i != in.size())
return parser.fail("unexpected trailing " + esc(in[parser.i]));
return result;
}
// Documented in json11.hpp
vector<Json> Json::parse_multi(const string &in,
std::string::size_type &parser_stop_pos,
string &err,
JsonParse strategy) {
JsonParser parser { in, 0, err, false, strategy };
parser_stop_pos = 0;
vector<Json> json_vec;
while (parser.i != in.size() && !parser.failed) {
json_vec.push_back(parser.parse_json(0));
if (parser.failed)
break;
// Check for another object
parser.consume_garbage();
if (parser.failed)
break;
parser_stop_pos = parser.i;
}
return json_vec;
}
/* * * * * * * * * * * * * * * * * * * *
* Shape-checking
*/
bool Json::has_shape(const shape & types, string & err) const {
if (!is_object()) {
err = "expected JSON object, got " + dump();
return false;
}
const auto& obj_items = object_items();
for (auto & item : types) {
const auto it = obj_items.find(item.first);
if (it == obj_items.cend() || it->second.type() != item.second) {
err = "bad type for " + item.first + " in " + dump();
return false;
}
}
return true;
}
} // namespace json11

View file

@ -0,0 +1,232 @@
/* json11
*
* json11 is a tiny JSON library for C++11, providing JSON parsing and serialization.
*
* The core object provided by the library is json11::Json. A Json object represents any JSON
* value: null, bool, number (int or double), string (std::string), array (std::vector), or
* object (std::map).
*
* Json objects act like values: they can be assigned, copied, moved, compared for equality or
* order, etc. There are also helper methods Json::dump, to serialize a Json to a string, and
* Json::parse (static) to parse a std::string as a Json object.
*
* Internally, the various types of Json object are represented by the JsonValue class
* hierarchy.
*
* A note on numbers - JSON specifies the syntax of number formatting but not its semantics,
* so some JSON implementations distinguish between integers and floating-point numbers, while
* some don't. In json11, we choose the latter. Because some JSON implementations (namely
* Javascript itself) treat all numbers as the same type, distinguishing the two leads
* to JSON that will be *silently* changed by a round-trip through those implementations.
* Dangerous! To avoid that risk, json11 stores all numbers as double internally, but also
* provides integer helpers.
*
* Fortunately, double-precision IEEE754 ('double') can precisely store any integer in the
* range +/-2^53, which includes every 'int' on most systems. (Timestamps often use int64
* or long long to avoid the Y2038K problem; a double storing microseconds since some epoch
* will be exact for +/- 275 years.)
*/
/* Copyright (c) 2013 Dropbox, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include <string>
#include <vector>
#include <map>
#include <memory>
#include <initializer_list>
#ifdef _MSC_VER
#if _MSC_VER <= 1800 // VS 2013
#ifndef noexcept
#define noexcept throw()
#endif
#ifndef snprintf
#define snprintf _snprintf_s
#endif
#endif
#endif
namespace json11 {
enum JsonParse {
STANDARD, COMMENTS
};
class JsonValue;
class Json final {
public:
// Types
enum Type {
NUL, NUMBER, BOOL, STRING, ARRAY, OBJECT
};
// Array and object typedefs
typedef std::vector<Json> array;
typedef std::map<std::string, Json> object;
// Constructors for the various types of JSON value.
Json() noexcept; // NUL
Json(std::nullptr_t) noexcept; // NUL
Json(double value); // NUMBER
Json(int value); // NUMBER
Json(bool value); // BOOL
Json(const std::string &value); // STRING
Json(std::string &&value); // STRING
Json(const char * value); // STRING
Json(const array &values); // ARRAY
Json(array &&values); // ARRAY
Json(const object &values); // OBJECT
Json(object &&values); // OBJECT
// Implicit constructor: anything with a to_json() function.
template <class T, class = decltype(&T::to_json)>
Json(const T & t) : Json(t.to_json()) {}
// Implicit constructor: map-like objects (std::map, std::unordered_map, etc)
template <class M, typename std::enable_if<
std::is_constructible<std::string, decltype(std::declval<M>().begin()->first)>::value
&& std::is_constructible<Json, decltype(std::declval<M>().begin()->second)>::value,
int>::type = 0>
Json(const M & m) : Json(object(m.begin(), m.end())) {}
// Implicit constructor: vector-like objects (std::list, std::vector, std::set, etc)
template <class V, typename std::enable_if<
std::is_constructible<Json, decltype(*std::declval<V>().begin())>::value,
int>::type = 0>
Json(const V & v) : Json(array(v.begin(), v.end())) {}
// This prevents Json(some_pointer) from accidentally producing a bool. Use
// Json(bool(some_pointer)) if that behavior is desired.
Json(void *) = delete;
// Accessors
Type type() const;
bool is_null() const { return type() == NUL; }
bool is_number() const { return type() == NUMBER; }
bool is_bool() const { return type() == BOOL; }
bool is_string() const { return type() == STRING; }
bool is_array() const { return type() == ARRAY; }
bool is_object() const { return type() == OBJECT; }
// Return the enclosed value if this is a number, 0 otherwise. Note that json11 does not
// distinguish between integer and non-integer numbers - number_value() and int_value()
// can both be applied to a NUMBER-typed object.
double number_value() const;
int int_value() const;
// Return the enclosed value if this is a boolean, false otherwise.
bool bool_value() const;
// Return the enclosed string if this is a string, "" otherwise.
const std::string &string_value() const;
// Return the enclosed std::vector if this is an array, or an empty vector otherwise.
const array &array_items() const;
// Return the enclosed std::map if this is an object, or an empty map otherwise.
const object &object_items() const;
// Return a reference to arr[i] if this is an array, Json() otherwise.
const Json & operator[](size_t i) const;
// Return a reference to obj[key] if this is an object, Json() otherwise.
const Json & operator[](const std::string &key) const;
// Serialize.
void dump(std::string &out) const;
std::string dump() const {
std::string out;
dump(out);
return out;
}
// Parse. If parse fails, return Json() and assign an error message to err.
static Json parse(const std::string & in,
std::string & err,
JsonParse strategy = JsonParse::STANDARD);
static Json parse(const char * in,
std::string & err,
JsonParse strategy = JsonParse::STANDARD) {
if (in) {
return parse(std::string(in), err, strategy);
} else {
err = "null input";
return nullptr;
}
}
// Parse multiple objects, concatenated or separated by whitespace
static std::vector<Json> parse_multi(
const std::string & in,
std::string::size_type & parser_stop_pos,
std::string & err,
JsonParse strategy = JsonParse::STANDARD);
static inline std::vector<Json> parse_multi(
const std::string & in,
std::string & err,
JsonParse strategy = JsonParse::STANDARD) {
std::string::size_type parser_stop_pos;
return parse_multi(in, parser_stop_pos, err, strategy);
}
bool operator== (const Json &rhs) const;
bool operator< (const Json &rhs) const;
bool operator!= (const Json &rhs) const { return !(*this == rhs); }
bool operator<= (const Json &rhs) const { return !(rhs < *this); }
bool operator> (const Json &rhs) const { return (rhs < *this); }
bool operator>= (const Json &rhs) const { return !(*this < rhs); }
/* has_shape(types, err)
*
* Return true if this is a JSON object and, for each item in types, has a field of
* the given type. If not, return false and set err to a descriptive message.
*/
typedef std::initializer_list<std::pair<std::string, Type>> shape;
bool has_shape(const shape & types, std::string & err) const;
private:
std::shared_ptr<JsonValue> m_ptr;
};
// Internal class hierarchy - JsonValue objects are not exposed to users of this API.
class JsonValue {
protected:
friend class Json;
friend class JsonInt;
friend class JsonDouble;
virtual Json::Type type() const = 0;
virtual bool equals(const JsonValue * other) const = 0;
virtual bool less(const JsonValue * other) const = 0;
virtual void dump(std::string &out) const = 0;
virtual double number_value() const;
virtual int int_value() const;
virtual bool bool_value() const;
virtual const std::string &string_value() const;
virtual const Json::array &array_items() const;
virtual const Json &operator[](size_t i) const;
virtual const Json::object &object_items() const;
virtual const Json &operator[](const std::string &key) const;
virtual ~JsonValue() {}
};
} // namespace json11

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,57 @@
#ifndef TGCALLS_INSTANCEV2_IMPL_H
#define TGCALLS_INSTANCEV2_IMPL_H
#include "Instance.h"
#include "StaticThreads.h"
namespace tgcalls {
class LogSinkImpl;
class Manager;
template <typename T>
class ThreadLocalObject;
class InstanceV2ImplInternal;
class InstanceV2Impl final : public Instance {
public:
explicit InstanceV2Impl(Descriptor &&descriptor);
~InstanceV2Impl() override;
void receiveSignalingData(const std::vector<uint8_t> &data) override;
void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) override;
void setRequestedVideoAspect(float aspect) override;
void setNetworkType(NetworkType networkType) override;
void setMuteMicrophone(bool muteMicrophone) override;
bool supportsVideo() override {
return true;
}
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override;
void setAudioOutputGainControlEnabled(bool enabled) override;
void setEchoCancellationStrength(int strength) override;
void setAudioInputDevice(std::string id) override;
void setAudioOutputDevice(std::string id) override;
void setInputVolume(float level) override;
void setOutputVolume(float level) override;
void setAudioOutputDuckingEnabled(bool enabled) override;
void setIsLowBatteryLevel(bool isLowBatteryLevel) override;
static std::vector<std::string> GetVersions();
static int GetConnectionMaxLayer();
std::string getLastError() override;
std::string getDebugInfo() override;
int64_t getPreferredRelayId() override;
TrafficStats getTrafficStats() override;
PersistentState getPersistentState() override;
void stop(std::function<void(FinalState)> completion) override;
private:
std::shared_ptr<Threads> _threads;
std::unique_ptr<ThreadLocalObject<InstanceV2ImplInternal>> _internal;
std::unique_ptr<LogSinkImpl> _logSink;
};
} // namespace tgcalls
#endif

View file

@ -0,0 +1,565 @@
#include "v2/NativeNetworkingImpl.h"
#include "p2p/base/basic_packet_socket_factory.h"
#include "p2p/client/basic_port_allocator.h"
#include "p2p/base/p2p_transport_channel.h"
#include "p2p/base/basic_async_resolver_factory.h"
#include "api/packet_socket_factory.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "p2p/base/ice_credentials_iterator.h"
#include "api/jsep_ice_candidate.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/dtls_transport_factory.h"
#include "pc/dtls_srtp_transport.h"
#include "pc/dtls_transport.h"
#include "StaticThreads.h"
namespace tgcalls {
class TurnCustomizerImpl : public webrtc::TurnCustomizer {
public:
TurnCustomizerImpl() {
}
virtual ~TurnCustomizerImpl() {
}
void MaybeModifyOutgoingStunMessage(cricket::PortInterface* port,
cricket::StunMessage* message) override {
message->AddAttribute(std::make_unique<cricket::StunByteStringAttribute>(cricket::STUN_ATTR_SOFTWARE, "Telegram "));
}
bool AllowChannelData(cricket::PortInterface* port, const void *data, size_t size, bool payload) override {
return true;
}
};
class SctpDataChannelProviderInterfaceImpl : public sigslot::has_slots<>, public webrtc::SctpDataChannelProviderInterface, public webrtc::DataChannelObserver {
public:
SctpDataChannelProviderInterfaceImpl(
cricket::DtlsTransport *transportChannel,
bool isOutgoing,
std::function<void(bool)> onStateChanged,
std::function<void(std::string const &)> onMessageReceived,
std::shared_ptr<Threads> threads
) :
_threads(std::move(threads)),
_onStateChanged(onStateChanged),
_onMessageReceived(onMessageReceived) {
assert(_threads->getNetworkThread()->IsCurrent());
_sctpTransportFactory.reset(new cricket::SctpTransportFactory(_threads->getNetworkThread()));
_sctpTransport = _sctpTransportFactory->CreateSctpTransport(transportChannel);
_sctpTransport->SignalReadyToSendData.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpReadyToSendData);
_sctpTransport->SignalDataReceived.connect(this, &SctpDataChannelProviderInterfaceImpl::sctpDataReceived);
webrtc::InternalDataChannelInit dataChannelInit;
dataChannelInit.id = 0;
dataChannelInit.open_handshake_role = isOutgoing ? webrtc::InternalDataChannelInit::kOpener : webrtc::InternalDataChannelInit::kAcker;
_dataChannel = webrtc::SctpDataChannel::Create(
this,
"data",
dataChannelInit,
_threads->getNetworkThread(),
_threads->getNetworkThread()
);
_dataChannel->RegisterObserver(this);
}
virtual ~SctpDataChannelProviderInterfaceImpl() {
assert(_threads->getNetworkThread()->IsCurrent());
_dataChannel->UnregisterObserver();
_dataChannel->Close();
_dataChannel = nullptr;
_sctpTransport = nullptr;
_sctpTransportFactory.reset();
}
void sendDataChannelMessage(std::string const &message) {
assert(_threads->getNetworkThread()->IsCurrent());
if (_isDataChannelOpen) {
RTC_LOG(LS_INFO) << "Outgoing DataChannel message: " << message;
webrtc::DataBuffer buffer(message);
_dataChannel->Send(buffer);
} else {
RTC_LOG(LS_INFO) << "Could not send an outgoing DataChannel message: the channel is not open";
}
}
virtual void OnStateChange() override {
assert(_threads->getNetworkThread()->IsCurrent());
auto state = _dataChannel->state();
bool isDataChannelOpen = state == webrtc::DataChannelInterface::DataState::kOpen;
if (_isDataChannelOpen != isDataChannelOpen) {
_isDataChannelOpen = isDataChannelOpen;
_onStateChanged(_isDataChannelOpen);
}
}
virtual void OnMessage(const webrtc::DataBuffer& buffer) override {
assert(_threads->getNetworkThread()->IsCurrent());
if (!buffer.binary) {
std::string messageText(buffer.data.data(), buffer.data.data() + buffer.data.size());
RTC_LOG(LS_INFO) << "Incoming DataChannel message: " << messageText;
_onMessageReceived(messageText);
}
}
void updateIsConnected(bool isConnected) {
assert(_threads->getNetworkThread()->IsCurrent());
if (isConnected) {
if (!_isSctpTransportStarted) {
_isSctpTransportStarted = true;
_sctpTransport->Start(5000, 5000, 262144);
}
}
}
void sctpReadyToSendData() {
assert(_threads->getNetworkThread()->IsCurrent());
_dataChannel->OnTransportReady(true);
}
void sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) {
assert(_threads->getNetworkThread()->IsCurrent());
_dataChannel->OnDataReceived(params, buffer);
}
virtual bool SendData(const cricket::SendDataParams& params, const rtc::CopyOnWriteBuffer& payload, cricket::SendDataResult* result) override {
assert(_threads->getNetworkThread()->IsCurrent());
return _sctpTransport->SendData(params, payload);
}
virtual bool ConnectDataChannel(webrtc::SctpDataChannel *data_channel) override {
assert(_threads->getNetworkThread()->IsCurrent());
return true;
}
virtual void DisconnectDataChannel(webrtc::SctpDataChannel* data_channel) override {
assert(_threads->getNetworkThread()->IsCurrent());
return;
}
virtual void AddSctpDataStream(int sid) override {
assert(_threads->getNetworkThread()->IsCurrent());
_sctpTransport->OpenStream(sid);
}
virtual void RemoveSctpDataStream(int sid) override {
assert(_threads->getNetworkThread()->IsCurrent());
_threads->getNetworkThread()->Invoke<void>(RTC_FROM_HERE, [this, sid]() {
_sctpTransport->ResetStream(sid);
});
}
virtual bool ReadyToSendData() const override {
assert(_threads->getNetworkThread()->IsCurrent());
return _sctpTransport->ReadyToSendData();
}
private:
std::shared_ptr<Threads> _threads;
std::function<void(bool)> _onStateChanged;
std::function<void(std::string const &)> _onMessageReceived;
std::unique_ptr<cricket::SctpTransportFactory> _sctpTransportFactory;
std::unique_ptr<cricket::SctpTransportInternal> _sctpTransport;
rtc::scoped_refptr<webrtc::SctpDataChannel> _dataChannel;
bool _isSctpTransportStarted = false;
bool _isDataChannelOpen = false;
};
webrtc::CryptoOptions NativeNetworkingImpl::getDefaulCryptoOptions() {
auto options = webrtc::CryptoOptions();
options.srtp.enable_aes128_sha1_80_crypto_cipher = true;
options.srtp.enable_gcm_crypto_suites = true;
return options;
}
NativeNetworkingImpl::NativeNetworkingImpl(Configuration &&configuration) :
_threads(std::move(configuration.threads)),
_isOutgoing(configuration.isOutgoing),
_enableStunMarking(configuration.enableStunMarking),
_enableTCP(configuration.enableTCP),
_enableP2P(configuration.enableP2P),
_rtcServers(configuration.rtcServers),
_stateUpdated(std::move(configuration.stateUpdated)),
_candidateGathered(std::move(configuration.candidateGathered)),
_transportMessageReceived(std::move(configuration.transportMessageReceived)),
_rtcpPacketReceived(std::move(configuration.rtcpPacketReceived)),
_dataChannelStateUpdated(configuration.dataChannelStateUpdated),
_dataChannelMessageReceived(configuration.dataChannelMessageReceived) {
assert(_threads->getNetworkThread()->IsCurrent());
_localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH));
_localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt);
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_threads->getNetworkThread()));
_networkManager = std::make_unique<rtc::BasicNetworkManager>();
_asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
_dtlsSrtpTransport = std::make_unique<webrtc::DtlsSrtpTransport>(true);
_dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr);
_dtlsSrtpTransport->SetActiveResetSrtpParams(false);
_dtlsSrtpTransport->SignalReadyToSend.connect(this, &NativeNetworkingImpl::DtlsReadyToSend);
_dtlsSrtpTransport->SignalRtpPacketReceived.connect(this, &NativeNetworkingImpl::RtpPacketReceived_n);
_dtlsSrtpTransport->SignalRtcpPacketReceived.connect(this, &NativeNetworkingImpl::OnRtcpPacketReceived_n);
resetDtlsSrtpTransport();
}
NativeNetworkingImpl::~NativeNetworkingImpl() {
assert(_threads->getNetworkThread()->IsCurrent());
RTC_LOG(LS_INFO) << "NativeNetworkingImpl::~NativeNetworkingImpl()";
_dtlsSrtpTransport.reset();
_dtlsTransport.reset();
_dataChannelInterface.reset();
_transportChannel.reset();
_asyncResolverFactory.reset();
_portAllocator.reset();
_networkManager.reset();
_socketFactory.reset();
}
void NativeNetworkingImpl::resetDtlsSrtpTransport() {
if (_enableStunMarking) {
_turnCustomizer.reset(new TurnCustomizerImpl());
}
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), _turnCustomizer.get(), nullptr));
uint32_t flags = _portAllocator->flags();
flags |=
//cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET |
cricket::PORTALLOCATOR_ENABLE_IPV6 |
cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI;
if (!_enableTCP) {
flags |= cricket::PORTALLOCATOR_DISABLE_TCP;
}
if (!_enableP2P) {
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
uint32_t candidateFilter = _portAllocator->candidate_filter();
candidateFilter &= ~(cricket::CF_REFLEXIVE);
_portAllocator->SetCandidateFilter(candidateFilter);
}
_portAllocator->set_step_delay(cricket::kMinimumStepDelay);
//TODO: figure out the proxy setup
/*if (_proxy) {
rtc::ProxyInfo proxyInfo;
proxyInfo.type = rtc::ProxyType::PROXY_SOCKS5;
proxyInfo.address = rtc::SocketAddress(_proxy->host, _proxy->port);
proxyInfo.username = _proxy->login;
proxyInfo.password = rtc::CryptString(TgCallsCryptStringImpl(_proxy->password));
_portAllocator->set_proxy("t/1.0", proxyInfo);
}*/
_portAllocator->set_flags(flags);
_portAllocator->Initialize();
cricket::ServerAddresses stunServers;
std::vector<cricket::RelayServerConfig> turnServers;
for (auto &server : _rtcServers) {
if (server.isTurn) {
turnServers.push_back(cricket::RelayServerConfig(
rtc::SocketAddress(server.host, server.port),
server.login,
server.password,
cricket::PROTO_UDP
));
} else {
rtc::SocketAddress stunAddress = rtc::SocketAddress(server.host, server.port);
stunServers.insert(stunAddress);
}
}
_portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE, _turnCustomizer.get());
_transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr));
cricket::IceConfig iceConfig;
iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY;
iceConfig.prioritize_most_likely_candidate_pairs = true;
iceConfig.regather_on_failed_networks_interval = 8000;
_transportChannel->SetIceConfig(iceConfig);
cricket::IceParameters localIceParameters(
_localIceParameters.ufrag,
_localIceParameters.pwd,
false
);
_transportChannel->SetIceParameters(localIceParameters);
_transportChannel->SetIceRole(_isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED);
_transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
_transportChannel->SignalCandidateGathered.connect(this, &NativeNetworkingImpl::candidateGathered);
_transportChannel->SignalIceTransportStateChanged.connect(this, &NativeNetworkingImpl::transportStateChanged);
_transportChannel->SignalReadPacket.connect(this, &NativeNetworkingImpl::transportPacketReceived);
webrtc::CryptoOptions cryptoOptions = NativeNetworkingImpl::getDefaulCryptoOptions();
_dtlsTransport.reset(new cricket::DtlsTransport(_transportChannel.get(), cryptoOptions, nullptr));
_dtlsTransport->SignalWritableState.connect(
this, &NativeNetworkingImpl::OnTransportWritableState_n);
_dtlsTransport->SignalReceivingState.connect(
this, &NativeNetworkingImpl::OnTransportReceivingState_n);
_dtlsTransport->SetLocalCertificate(_localCertificate);
_dtlsSrtpTransport->SetDtlsTransports(_dtlsTransport.get(), nullptr);
}
void NativeNetworkingImpl::start() {
_transportChannel->MaybeStartGathering();
const auto weak = std::weak_ptr<NativeNetworkingImpl>(shared_from_this());
_dataChannelInterface.reset(new SctpDataChannelProviderInterfaceImpl(
_dtlsTransport.get(),
_isOutgoing,
[weak, threads = _threads](bool state) {
assert(threads->getNetworkThread()->IsCurrent());
const auto strong = weak.lock();
if (!strong) {
return;
}
strong->_dataChannelStateUpdated(state);
},
[weak, threads = _threads](std::string const &message) {
assert(threads->getNetworkThread()->IsCurrent());
const auto strong = weak.lock();
if (!strong) {
return;
}
strong->_dataChannelMessageReceived(message);
},
_threads
));
}
void NativeNetworkingImpl::stop() {
_transportChannel->SignalCandidateGathered.disconnect(this);
_transportChannel->SignalIceTransportStateChanged.disconnect(this);
_transportChannel->SignalReadPacket.disconnect(this);
_dtlsTransport->SignalWritableState.disconnect(this);
_dtlsTransport->SignalReceivingState.disconnect(this);
_dtlsSrtpTransport->SetDtlsTransports(nullptr, nullptr);
_dataChannelInterface.reset();
_dtlsTransport.reset();
_transportChannel.reset();
_portAllocator.reset();
_localIceParameters = PeerIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH));
_localCertificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(rtc::KT_ECDSA), absl::nullopt);
resetDtlsSrtpTransport();
}
PeerIceParameters NativeNetworkingImpl::getLocalIceParameters() {
return _localIceParameters;
}
std::unique_ptr<rtc::SSLFingerprint> NativeNetworkingImpl::getLocalFingerprint() {
auto certificate = _localCertificate;
if (!certificate) {
return nullptr;
}
return rtc::SSLFingerprint::CreateFromCertificate(*certificate);
}
void NativeNetworkingImpl::setRemoteParams(PeerIceParameters const &remoteIceParameters, rtc::SSLFingerprint *fingerprint, std::string const &sslSetup) {
_remoteIceParameters = remoteIceParameters;
cricket::IceParameters parameters(
remoteIceParameters.ufrag,
remoteIceParameters.pwd,
false
);
_transportChannel->SetRemoteIceParameters(parameters);
if (sslSetup == "active") {
_dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_SERVER);
} else if (sslSetup == "passive") {
_dtlsTransport->SetDtlsRole(rtc::SSLRole::SSL_CLIENT);
} else {
_dtlsTransport->SetDtlsRole(_isOutgoing ? rtc::SSLRole::SSL_CLIENT : rtc::SSLRole::SSL_SERVER);
}
if (fingerprint) {
_dtlsTransport->SetRemoteFingerprint(fingerprint->algorithm, fingerprint->digest.data(), fingerprint->digest.size());
}
}
void NativeNetworkingImpl::addCandidates(std::vector<cricket::Candidate> const &candidates) {
for (const auto &candidate : candidates) {
_transportChannel->AddRemoteCandidate(candidate);
}
}
void NativeNetworkingImpl::sendDataChannelMessage(std::string const &message) {
if (_dataChannelInterface) {
_dataChannelInterface->sendDataChannelMessage(message);
}
}
webrtc::RtpTransport *NativeNetworkingImpl::getRtpTransport() {
return _dtlsSrtpTransport.get();
}
void NativeNetworkingImpl::checkConnectionTimeout() {
const auto weak = std::weak_ptr<NativeNetworkingImpl>(shared_from_this());
_threads->getNetworkThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() {
auto strong = weak.lock();
if (!strong) {
return;
}
int64_t currentTimestamp = rtc::TimeMillis();
const int64_t maxTimeout = 20000;
if (strong->_lastNetworkActivityMs + maxTimeout < currentTimestamp) {
NativeNetworkingImpl::State emitState;
emitState.isReadyToSendData = false;
emitState.isFailed = true;
strong->_stateUpdated(emitState);
}
strong->checkConnectionTimeout();
}, 1000);
}
void NativeNetworkingImpl::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
assert(_threads->getNetworkThread()->IsCurrent());
_candidateGathered(candidate);
}
void NativeNetworkingImpl::candidateGatheringState(cricket::IceTransportInternal *transport) {
assert(_threads->getNetworkThread()->IsCurrent());
}
void NativeNetworkingImpl::OnTransportWritableState_n(rtc::PacketTransportInternal *transport) {
assert(_threads->getNetworkThread()->IsCurrent());
UpdateAggregateStates_n();
}
void NativeNetworkingImpl::OnTransportReceivingState_n(rtc::PacketTransportInternal *transport) {
assert(_threads->getNetworkThread()->IsCurrent());
UpdateAggregateStates_n();
}
void NativeNetworkingImpl::DtlsReadyToSend(bool isReadyToSend) {
UpdateAggregateStates_n();
if (isReadyToSend) {
const auto weak = std::weak_ptr<NativeNetworkingImpl>(shared_from_this());
_threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() {
const auto strong = weak.lock();
if (!strong) {
return;
}
strong->UpdateAggregateStates_n();
});
}
}
void NativeNetworkingImpl::transportStateChanged(cricket::IceTransportInternal *transport) {
UpdateAggregateStates_n();
}
void NativeNetworkingImpl::transportReadyToSend(cricket::IceTransportInternal *transport) {
assert(_threads->getNetworkThread()->IsCurrent());
}
void NativeNetworkingImpl::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused) {
assert(_threads->getNetworkThread()->IsCurrent());
_lastNetworkActivityMs = rtc::TimeMillis();
}
void NativeNetworkingImpl::RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved) {
if (_transportMessageReceived) {
_transportMessageReceived(*packet, isUnresolved);
}
}
void NativeNetworkingImpl::OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us) {
if (_rtcpPacketReceived) {
_rtcpPacketReceived(*packet, packet_time_us);
}
}
void NativeNetworkingImpl::UpdateAggregateStates_n() {
assert(_threads->getNetworkThread()->IsCurrent());
auto state = _transportChannel->GetIceTransportState();
bool isConnected = false;
switch (state) {
case webrtc::IceTransportState::kConnected:
case webrtc::IceTransportState::kCompleted:
isConnected = true;
break;
default:
break;
}
if (!_dtlsSrtpTransport->IsWritable(false)) {
isConnected = false;
}
if (_isConnected != isConnected) {
_isConnected = isConnected;
NativeNetworkingImpl::State emitState;
emitState.isReadyToSendData = isConnected;
_stateUpdated(emitState);
if (_dataChannelInterface) {
_dataChannelInterface->updateIsConnected(isConnected);
}
}
}
void NativeNetworkingImpl::sctpReadyToSendData() {
}
void NativeNetworkingImpl::sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer) {
}
} // namespace tgcalls

View file

@ -0,0 +1,142 @@
#ifndef TGCALLS_NATIVE_NETWORKING_IMPL_H
#define TGCALLS_NATIVE_NETWORKING_IMPL_H
#ifdef WEBRTC_WIN
// Compiler errors in conflicting Windows headers if not included here.
#include <winsock2.h>
#endif // WEBRTC_WIN
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "api/candidate.h"
#include "media/base/media_channel.h"
#include "media/sctp/sctp_transport.h"
#include "pc/sctp_data_channel.h"
#include <functional>
#include <memory>
#include "Message.h"
#include "ThreadLocalObject.h"
#include "Instance.h"
namespace rtc {
class BasicPacketSocketFactory;
class BasicNetworkManager;
class PacketTransportInternal;
struct NetworkRoute;
} // namespace rtc
namespace cricket {
class BasicPortAllocator;
class P2PTransportChannel;
class IceTransportInternal;
class DtlsTransport;
} // namespace cricket
namespace webrtc {
class BasicAsyncResolverFactory;
class TurnCustomizer;
class DtlsSrtpTransport;
class RtpTransport;
} // namespace webrtc
namespace tgcalls {
struct Message;
class SctpDataChannelProviderInterfaceImpl;
class Threads;
class NativeNetworkingImpl : public sigslot::has_slots<>, public std::enable_shared_from_this<NativeNetworkingImpl> {
public:
struct State {
bool isReadyToSendData = false;
bool isFailed = false;
};
struct Configuration {
bool isOutgoing = false;
bool enableStunMarking = false;
bool enableTCP = false;
bool enableP2P = false;
std::vector<RtcServer> rtcServers;
std::function<void(const NativeNetworkingImpl::State &)> stateUpdated;
std::function<void(const cricket::Candidate &)> candidateGathered;
std::function<void(rtc::CopyOnWriteBuffer const &, bool)> transportMessageReceived;
std::function<void(rtc::CopyOnWriteBuffer const &, int64_t)> rtcpPacketReceived;
std::function<void(bool)> dataChannelStateUpdated;
std::function<void(std::string const &)> dataChannelMessageReceived;
std::shared_ptr<Threads> threads;
};
static webrtc::CryptoOptions getDefaulCryptoOptions();
NativeNetworkingImpl(Configuration &&configuration);
~NativeNetworkingImpl();
void start();
void stop();
PeerIceParameters getLocalIceParameters();
std::unique_ptr<rtc::SSLFingerprint> getLocalFingerprint();
void setRemoteParams(PeerIceParameters const &remoteIceParameters, rtc::SSLFingerprint *fingerprint, std::string const &sslSetup);
void addCandidates(std::vector<cricket::Candidate> const &candidates);
void sendDataChannelMessage(std::string const &message);
webrtc::RtpTransport *getRtpTransport();
private:
void resetDtlsSrtpTransport();
void checkConnectionTimeout();
void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
void candidateGatheringState(cricket::IceTransportInternal *transport);
void OnTransportWritableState_n(rtc::PacketTransportInternal *transport);
void OnTransportReceivingState_n(rtc::PacketTransportInternal *transport);
void transportStateChanged(cricket::IceTransportInternal *transport);
void transportReadyToSend(cricket::IceTransportInternal *transport);
void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused);
void DtlsReadyToSend(bool DtlsReadyToSend);
void UpdateAggregateStates_n();
void RtpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us, bool isUnresolved);
void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer *packet, int64_t packet_time_us);
void sctpReadyToSendData();
void sctpDataReceived(const cricket::ReceiveDataParams& params, const rtc::CopyOnWriteBuffer& buffer);
std::shared_ptr<Threads> _threads;
bool _isOutgoing = false;
bool _enableStunMarking = false;
bool _enableTCP = false;
bool _enableP2P = false;
std::vector<RtcServer> _rtcServers;
std::function<void(const NativeNetworkingImpl::State &)> _stateUpdated;
std::function<void(const cricket::Candidate &)> _candidateGathered;
std::function<void(rtc::CopyOnWriteBuffer const &, bool)> _transportMessageReceived;
std::function<void(rtc::CopyOnWriteBuffer const &, int64_t)> _rtcpPacketReceived;
std::function<void(bool)> _dataChannelStateUpdated;
std::function<void(std::string const &)> _dataChannelMessageReceived;
std::unique_ptr<rtc::BasicPacketSocketFactory> _socketFactory;
std::unique_ptr<rtc::BasicNetworkManager> _networkManager;
std::unique_ptr<webrtc::TurnCustomizer> _turnCustomizer;
std::unique_ptr<cricket::BasicPortAllocator> _portAllocator;
std::unique_ptr<webrtc::BasicAsyncResolverFactory> _asyncResolverFactory;
std::unique_ptr<cricket::P2PTransportChannel> _transportChannel;
std::unique_ptr<cricket::DtlsTransport> _dtlsTransport;
std::unique_ptr<webrtc::DtlsSrtpTransport> _dtlsSrtpTransport;
std::unique_ptr<SctpDataChannelProviderInterfaceImpl> _dataChannelInterface;
rtc::scoped_refptr<rtc::RTCCertificate> _localCertificate;
PeerIceParameters _localIceParameters;
absl::optional<PeerIceParameters> _remoteIceParameters;
bool _isConnected = false;
int64_t _lastNetworkActivityMs = 0;
};
} // namespace tgcalls
#endif

View file

@ -0,0 +1,652 @@
#include "v2/Signaling.h"
#include "third-party/json11.hpp"
#include "rtc_base/checks.h"
#include <sstream>
namespace tgcalls {
namespace signaling {
static std::string uint32ToString(uint32_t value) {
std::ostringstream stringStream;
stringStream << value;
return stringStream.str();
}
static uint32_t stringToUInt32(std::string const &string) {
std::stringstream stringStream(string);
uint32_t value = 0;
stringStream >> value;
return value;
}
json11::Json::object SsrcGroup_serialize(SsrcGroup const &ssrcGroup) {
json11::Json::object object;
json11::Json::array ssrcs;
for (auto ssrc : ssrcGroup.ssrcs) {
ssrcs.push_back(json11::Json(uint32ToString(ssrc)));
}
object.insert(std::make_pair("semantics", json11::Json(ssrcGroup.semantics)));
object.insert(std::make_pair("ssrcs", json11::Json(std::move(ssrcs))));
return object;
}
absl::optional<SsrcGroup> SsrcGroup_parse(json11::Json::object const &object) {
SsrcGroup result;
const auto semantics = object.find("semantics");
if (semantics == object.end() || !semantics->second.is_string()) {
return absl::nullopt;
}
result.semantics = semantics->second.string_value();
const auto ssrcs = object.find("ssrcs");
if (ssrcs == object.end() || !ssrcs->second.is_array()) {
return absl::nullopt;
}
for (const auto &ssrc : ssrcs->second.array_items()) {
if (ssrc.is_string()) {
uint32_t parsedSsrc = stringToUInt32(ssrc.string_value());
if (parsedSsrc == 0) {
return absl::nullopt;
}
result.ssrcs.push_back(parsedSsrc);
} else if (ssrc.is_number()) {
uint32_t parsedSsrc = (uint32_t)ssrc.number_value();
result.ssrcs.push_back(parsedSsrc);
} else {
return absl::nullopt;
}
}
return result;
}
json11::Json::object FeedbackType_serialize(FeedbackType const &feedbackType) {
json11::Json::object object;
object.insert(std::make_pair("type", json11::Json(feedbackType.type)));
object.insert(std::make_pair("subtype", json11::Json(feedbackType.subtype)));
return object;
}
absl::optional<FeedbackType> FeedbackType_parse(json11::Json::object const &object) {
FeedbackType result;
const auto type = object.find("type");
if (type == object.end() || !type->second.is_string()) {
return absl::nullopt;
}
result.type = type->second.string_value();
const auto subtype = object.find("subtype");
if (subtype == object.end() || !subtype->second.is_string()) {
return absl::nullopt;
}
result.subtype = subtype->second.string_value();
return result;
}
json11::Json::object RtpExtension_serialize(webrtc::RtpExtension const &rtpExtension) {
json11::Json::object object;
object.insert(std::make_pair("id", json11::Json(rtpExtension.id)));
object.insert(std::make_pair("uri", json11::Json(rtpExtension.uri)));
return object;
}
absl::optional<webrtc::RtpExtension> RtpExtension_parse(json11::Json::object const &object) {
const auto id = object.find("id");
if (id == object.end() || !id->second.is_number()) {
return absl::nullopt;
}
const auto uri = object.find("uri");
if (uri == object.end() || !uri->second.is_string()) {
return absl::nullopt;
}
return webrtc::RtpExtension(uri->second.string_value(), id->second.int_value());
}
json11::Json::object PayloadType_serialize(PayloadType const &payloadType) {
json11::Json::object object;
object.insert(std::make_pair("id", json11::Json((int)payloadType.id)));
object.insert(std::make_pair("name", json11::Json(payloadType.name)));
object.insert(std::make_pair("clockrate", json11::Json((int)payloadType.clockrate)));
object.insert(std::make_pair("channels", json11::Json((int)payloadType.channels)));
json11::Json::array feedbackTypes;
for (const auto &feedbackType : payloadType.feedbackTypes) {
feedbackTypes.push_back(FeedbackType_serialize(feedbackType));
}
object.insert(std::make_pair("feedbackTypes", json11::Json(std::move(feedbackTypes))));
json11::Json::object parameters;
for (auto it : payloadType.parameters) {
parameters.insert(std::make_pair(it.first, json11::Json(it.second)));
}
object.insert(std::make_pair("parameters", json11::Json(std::move(parameters))));
return object;
}
absl::optional<PayloadType> PayloadType_parse(json11::Json::object const &object) {
PayloadType result;
const auto id = object.find("id");
if (id == object.end() || !id->second.is_number()) {
return absl::nullopt;
}
result.id = id->second.int_value();
const auto name = object.find("name");
if (name == object.end() || !name->second.is_string()) {
return absl::nullopt;
}
result.name = name->second.string_value();
const auto clockrate = object.find("clockrate");
if (clockrate == object.end() || !clockrate->second.is_number()) {
return absl::nullopt;
}
result.clockrate = clockrate->second.int_value();
const auto channels = object.find("channels");
if (channels != object.end()) {
if (!channels->second.is_number()) {
return absl::nullopt;
}
result.channels = channels->second.int_value();
}
const auto feedbackTypes = object.find("feedbackTypes");
if (feedbackTypes != object.end()) {
if (!feedbackTypes->second.is_array()) {
return absl::nullopt;
}
for (const auto &feedbackType : feedbackTypes->second.array_items()) {
if (!feedbackType.is_object()) {
return absl::nullopt;
}
if (const auto parsedFeedbackType = FeedbackType_parse(feedbackType.object_items())) {
result.feedbackTypes.push_back(parsedFeedbackType.value());
} else {
return absl::nullopt;
}
}
}
const auto parameters = object.find("parameters");
if (parameters != object.end()) {
if (!parameters->second.is_object()) {
return absl::nullopt;
}
for (const auto &item : parameters->second.object_items()) {
if (!item.second.is_string()) {
return absl::nullopt;
}
result.parameters.push_back(std::make_pair(item.first, item.second.string_value()));
}
}
return result;
}
json11::Json::object MediaContent_serialize(MediaContent const &mediaContent) {
json11::Json::object object;
object.insert(std::make_pair("ssrc", json11::Json(uint32ToString(mediaContent.ssrc))));
if (mediaContent.ssrcGroups.size() != 0) {
json11::Json::array ssrcGroups;
for (const auto &group : mediaContent.ssrcGroups) {
ssrcGroups.push_back(SsrcGroup_serialize(group));
}
object.insert(std::make_pair("ssrcGroups", json11::Json(std::move(ssrcGroups))));
}
if (mediaContent.payloadTypes.size() != 0) {
json11::Json::array payloadTypes;
for (const auto &payloadType : mediaContent.payloadTypes) {
payloadTypes.push_back(PayloadType_serialize(payloadType));
}
object.insert(std::make_pair("payloadTypes", json11::Json(std::move(payloadTypes))));
}
json11::Json::array rtpExtensions;
for (const auto &rtpExtension : mediaContent.rtpExtensions) {
rtpExtensions.push_back(RtpExtension_serialize(rtpExtension));
}
object.insert(std::make_pair("rtpExtensions", json11::Json(std::move(rtpExtensions))));
return object;
}
absl::optional<MediaContent> MediaContent_parse(json11::Json::object const &object) {
MediaContent result;
const auto ssrc = object.find("ssrc");
if (ssrc == object.end()) {
return absl::nullopt;
}
if (ssrc->second.is_string()) {
result.ssrc = stringToUInt32(ssrc->second.string_value());
} else if (ssrc->second.is_number()) {
result.ssrc = (uint32_t)ssrc->second.number_value();
} else {
return absl::nullopt;
}
const auto ssrcGroups = object.find("ssrcGroups");
if (ssrcGroups != object.end()) {
if (!ssrcGroups->second.is_array()) {
return absl::nullopt;
}
for (const auto &ssrcGroup : ssrcGroups->second.array_items()) {
if (!ssrcGroup.is_object()) {
return absl::nullopt;
}
if (const auto parsedSsrcGroup = SsrcGroup_parse(ssrcGroup.object_items())) {
result.ssrcGroups.push_back(parsedSsrcGroup.value());
} else {
return absl::nullopt;
}
}
}
const auto payloadTypes = object.find("payloadTypes");
if (payloadTypes != object.end()) {
if (!payloadTypes->second.is_array()) {
return absl::nullopt;
}
for (const auto &payloadType : payloadTypes->second.array_items()) {
if (!payloadType.is_object()) {
return absl::nullopt;
}
if (const auto parsedPayloadType = PayloadType_parse(payloadType.object_items())) {
result.payloadTypes.push_back(parsedPayloadType.value());
} else {
return absl::nullopt;
}
}
}
const auto rtpExtensions = object.find("rtpExtensions");
if (rtpExtensions != object.end()) {
if (!rtpExtensions->second.is_array()) {
return absl::nullopt;
}
for (const auto &rtpExtension : rtpExtensions->second.array_items()) {
if (!rtpExtension.is_object()) {
return absl::nullopt;
}
if (const auto parsedRtpExtension = RtpExtension_parse(rtpExtension.object_items())) {
result.rtpExtensions.push_back(parsedRtpExtension.value());
} else {
return absl::nullopt;
}
}
}
return result;
}
std::vector<uint8_t> InitialSetupMessage_serialize(const InitialSetupMessage * const message) {
json11::Json::object object;
object.insert(std::make_pair("@type", json11::Json("InitialSetup")));
object.insert(std::make_pair("ufrag", json11::Json(message->ufrag)));
object.insert(std::make_pair("pwd", json11::Json(message->pwd)));
json11::Json::array jsonFingerprints;
for (const auto &fingerprint : message->fingerprints) {
json11::Json::object jsonFingerprint;
jsonFingerprint.insert(std::make_pair("hash", json11::Json(fingerprint.hash)));
jsonFingerprint.insert(std::make_pair("setup", json11::Json(fingerprint.setup)));
jsonFingerprint.insert(std::make_pair("fingerprint", json11::Json(fingerprint.fingerprint)));
jsonFingerprints.emplace_back(std::move(jsonFingerprint));
}
object.insert(std::make_pair("fingerprints", json11::Json(std::move(jsonFingerprints))));
if (const auto audio = message->audio) {
object.insert(std::make_pair("audio", json11::Json(MediaContent_serialize(audio.value()))));
}
if (const auto video = message->video) {
object.insert(std::make_pair("video", json11::Json(MediaContent_serialize(video.value()))));
}
auto json = json11::Json(std::move(object));
std::string result = json.dump();
return std::vector<uint8_t>(result.begin(), result.end());
}
absl::optional<InitialSetupMessage> InitialSetupMessage_parse(json11::Json::object const &object) {
const auto ufrag = object.find("ufrag");
if (ufrag == object.end() || !ufrag->second.is_string()) {
return absl::nullopt;
}
const auto pwd = object.find("pwd");
if (pwd == object.end() || !pwd->second.is_string()) {
return absl::nullopt;
}
const auto fingerprints = object.find("fingerprints");
if (fingerprints == object.end() || !fingerprints->second.is_array()) {
return absl::nullopt;
}
std::vector<DtlsFingerprint> parsedFingerprints;
for (const auto &fingerprintObject : fingerprints->second.array_items()) {
if (!fingerprintObject.is_object()) {
return absl::nullopt;
}
const auto hash = fingerprintObject.object_items().find("hash");
if (hash == fingerprintObject.object_items().end() || !hash->second.is_string()) {
return absl::nullopt;
}
const auto setup = fingerprintObject.object_items().find("setup");
if (setup == fingerprintObject.object_items().end() || !setup->second.is_string()) {
return absl::nullopt;
}
const auto fingerprint = fingerprintObject.object_items().find("fingerprint");
if (fingerprint == fingerprintObject.object_items().end() || !fingerprint->second.is_string()) {
return absl::nullopt;
}
DtlsFingerprint parsedFingerprint;
parsedFingerprint.hash = hash->second.string_value();
parsedFingerprint.setup = setup->second.string_value();
parsedFingerprint.fingerprint = fingerprint->second.string_value();
parsedFingerprints.push_back(std::move(parsedFingerprint));
}
InitialSetupMessage message;
message.ufrag = ufrag->second.string_value();
message.pwd = pwd->second.string_value();
message.fingerprints = std::move(parsedFingerprints);
const auto audio = object.find("audio");
if (audio != object.end()) {
if (!audio->second.is_object()) {
return absl::nullopt;
}
if (const auto parsedAudio = MediaContent_parse(audio->second.object_items())) {
message.audio = parsedAudio.value();
} else {
return absl::nullopt;
}
}
const auto video = object.find("video");
if (video != object.end()) {
if (!video->second.is_object()) {
return absl::nullopt;
}
if (const auto parsedVideo = MediaContent_parse(video->second.object_items())) {
message.video = parsedVideo.value();
} else {
return absl::nullopt;
}
}
return message;
}
json11::Json::object ConnectionAddress_serialize(ConnectionAddress const &connectionAddress) {
json11::Json::object object;
object.insert(std::make_pair("ip", json11::Json(connectionAddress.ip)));
object.insert(std::make_pair("port", json11::Json(connectionAddress.port)));
return object;
}
absl::optional<ConnectionAddress> ConnectionAddress_parse(json11::Json::object const &object) {
const auto ip = object.find("ip");
if (ip == object.end() || !ip->second.is_string()) {
return absl::nullopt;
}
const auto port = object.find("port");
if (port == object.end() || !port->second.is_number()) {
return absl::nullopt;
}
ConnectionAddress address;
address.ip = ip->second.string_value();
address.port = port->second.int_value();
return address;
}
std::vector<uint8_t> CandidatesMessage_serialize(const CandidatesMessage * const message) {
json11::Json::array candidates;
for (const auto &candidate : message->iceCandidates) {
json11::Json::object candidateObject;
candidateObject.insert(std::make_pair("sdpString", json11::Json(candidate.sdpString)));
candidates.emplace_back(std::move(candidateObject));
}
json11::Json::object object;
object.insert(std::make_pair("@type", json11::Json("Candidates")));
object.insert(std::make_pair("candidates", json11::Json(std::move(candidates))));
auto json = json11::Json(std::move(object));
std::string result = json.dump();
return std::vector<uint8_t>(result.begin(), result.end());
}
absl::optional<CandidatesMessage> CandidatesMessage_parse(json11::Json::object const &object) {
const auto candidates = object.find("candidates");
if (candidates == object.end() || !candidates->second.is_array()) {
return absl::nullopt;
}
std::vector<IceCandidate> parsedCandidates;
for (const auto &candidateObject : candidates->second.array_items()) {
if (!candidateObject.is_object()) {
return absl::nullopt;
}
IceCandidate candidate;
const auto sdpString = candidateObject.object_items().find("sdpString");
if (sdpString == candidateObject.object_items().end() || !sdpString->second.is_string()) {
return absl::nullopt;
}
candidate.sdpString = sdpString->second.string_value();
parsedCandidates.push_back(std::move(candidate));
}
CandidatesMessage message;
message.iceCandidates = std::move(parsedCandidates);
return message;
}
std::vector<uint8_t> MediaStateMessage_serialize(const MediaStateMessage * const message) {
json11::Json::object object;
object.insert(std::make_pair("@type", json11::Json("MediaState")));
object.insert(std::make_pair("muted", json11::Json(message->isMuted)));
object.insert(std::make_pair("lowBattery", json11::Json(message->isBatteryLow)));
std::string videoStateValue;
switch (message->videoState) {
case MediaStateMessage::VideoState::Inactive: {
videoStateValue = "inactive";
break;
}
case MediaStateMessage::VideoState::Suspended: {
videoStateValue = "suspended";
break;
}
case MediaStateMessage::VideoState::Active: {
videoStateValue = "active";
break;
}
default: {
RTC_FATAL() << "Unknown videoState";
break;
}
}
object.insert(std::make_pair("videoState", json11::Json(videoStateValue)));
int videoRotationValue = 0;
switch (message->videoRotation) {
case MediaStateMessage::VideoRotation::Rotation0: {
videoRotationValue = 0;
break;
}
case MediaStateMessage::VideoRotation::Rotation90: {
videoRotationValue = 90;
break;
}
case MediaStateMessage::VideoRotation::Rotation180: {
videoRotationValue = 180;
break;
}
case MediaStateMessage::VideoRotation::Rotation270: {
videoRotationValue = 270;
break;
}
default: {
RTC_FATAL() << "Unknown videoRotation";
break;
}
}
object.insert(std::make_pair("videoRotation", json11::Json(videoRotationValue)));
auto json = json11::Json(std::move(object));
std::string result = json.dump();
return std::vector<uint8_t>(result.begin(), result.end());
}
absl::optional<MediaStateMessage> MediaStateMessage_parse(json11::Json::object const &object) {
MediaStateMessage message;
const auto muted = object.find("muted");
if (muted != object.end()) {
if (!muted->second.is_bool()) {
return absl::nullopt;
}
message.isMuted = muted->second.bool_value();
}
const auto lowBattery = object.find("lowBattery");
if (lowBattery != object.end()) {
if (!lowBattery->second.is_bool()) {
return absl::nullopt;
}
message.isBatteryLow = lowBattery->second.bool_value();
}
const auto videoState = object.find("videoState");
if (videoState != object.end()) {
if (!videoState->second.is_string()) {
return absl::nullopt;
}
if (videoState->second.string_value() == "inactive") {
message.videoState = MediaStateMessage::VideoState::Inactive;
} else if (videoState->second.string_value() == "suspended") {
message.videoState = MediaStateMessage::VideoState::Suspended;
} else if (videoState->second.string_value() == "active") {
message.videoState = MediaStateMessage::VideoState::Active;
}
} else {
message.videoState = MediaStateMessage::VideoState::Inactive;
}
const auto videoRotation = object.find("videoRotation");
if (videoRotation != object.end()) {
if (!videoRotation->second.is_number()) {
return absl::nullopt;
}
if (videoState->second.int_value() == 0) {
message.videoRotation = MediaStateMessage::VideoRotation::Rotation0;
} else if (videoState->second.int_value() == 90) {
message.videoRotation = MediaStateMessage::VideoRotation::Rotation90;
} else if (videoState->second.int_value() == 180) {
message.videoRotation = MediaStateMessage::VideoRotation::Rotation180;
} else if (videoState->second.int_value() == 270) {
message.videoRotation = MediaStateMessage::VideoRotation::Rotation270;
} else {
message.videoRotation = MediaStateMessage::VideoRotation::Rotation0;
}
} else {
message.videoRotation = MediaStateMessage::VideoRotation::Rotation0;
}
return message;
}
std::vector<uint8_t> Message::serialize() const {
if (const auto initialSetup = absl::get_if<InitialSetupMessage>(&data)) {
return InitialSetupMessage_serialize(initialSetup);
} else if (const auto candidates = absl::get_if<CandidatesMessage>(&data)) {
return CandidatesMessage_serialize(candidates);
} else if (const auto mediaState = absl::get_if<MediaStateMessage>(&data)) {
return MediaStateMessage_serialize(mediaState);
} else {
return {};
}
}
absl::optional<Message> Message::parse(const std::vector<uint8_t> &data) {
std::string parsingError;
auto json = json11::Json::parse(std::string(data.begin(), data.end()), parsingError);
if (json.type() != json11::Json::OBJECT) {
return absl::nullopt;
}
auto type = json.object_items().find("@type");
if (type == json.object_items().end()) {
return absl::nullopt;
}
if (!type->second.is_string()) {
return absl::nullopt;
}
if (type->second.string_value() == "InitialSetup") {
auto parsed = InitialSetupMessage_parse(json.object_items());
if (!parsed) {
return absl::nullopt;
}
Message message;
message.data = std::move(parsed.value());
return message;
} else if (type->second.string_value() == "Candidates") {
auto parsed = CandidatesMessage_parse(json.object_items());
if (!parsed) {
return absl::nullopt;
}
Message message;
message.data = std::move(parsed.value());
return message;
} else if (type->second.string_value() == "MediaState") {
auto parsed = MediaStateMessage_parse(json.object_items());
if (!parsed) {
return absl::nullopt;
}
Message message;
message.data = std::move(parsed.value());
return message;
} else {
return absl::nullopt;
}
}
} // namespace signaling
} // namespace tgcalls

View file

@ -0,0 +1,103 @@
#ifndef TGCALLS_SIGNALING_H
#define TGCALLS_SIGNALING_H
#include <string>
#include <vector>
#include "absl/types/variant.h"
#include "absl/types/optional.h"
#include "api/rtp_parameters.h"
namespace tgcalls {
namespace signaling {
struct DtlsFingerprint {
std::string hash;
std::string setup;
std::string fingerprint;
};
struct ConnectionAddress {
std::string ip;
int port = 0;
};
struct IceCandidate {
std::string sdpString;
};
struct SsrcGroup {
std::vector<uint32_t> ssrcs;
std::string semantics;
};
struct FeedbackType {
std::string type;
std::string subtype;
};
struct PayloadType {
uint32_t id = 0;
std::string name;
uint32_t clockrate = 0;
uint32_t channels = 0;
std::vector<FeedbackType> feedbackTypes;
std::vector<std::pair<std::string, std::string>> parameters;
};
struct MediaContent {
uint32_t ssrc = 0;
std::vector<SsrcGroup> ssrcGroups;
std::vector<PayloadType> payloadTypes;
std::vector<webrtc::RtpExtension> rtpExtensions;
};
struct InitialSetupMessage {
std::string ufrag;
std::string pwd;
std::vector<DtlsFingerprint> fingerprints;
absl::optional<MediaContent> audio;
absl::optional<MediaContent> video;
};
struct CandidatesMessage {
std::vector<IceCandidate> iceCandidates;
};
struct MediaStateMessage {
enum class VideoState {
Inactive,
Suspended,
Active
};
enum class VideoRotation {
Rotation0,
Rotation90,
Rotation180,
Rotation270
};
bool isMuted = false;
VideoState videoState = VideoState::Inactive;
VideoRotation videoRotation = VideoRotation::Rotation0;
bool isBatteryLow = false;
};
struct Message {
absl::variant<
InitialSetupMessage,
CandidatesMessage,
MediaStateMessage> data;
std::vector<uint8_t> serialize() const;
static absl::optional<Message> parse(const std::vector<uint8_t> &data);
};
};
} // namespace tgcalls
#endif

View file

@ -0,0 +1,22 @@
#include "v2/SignalingEncryption.h"
namespace tgcalls {
SignalingEncryption::SignalingEncryption(EncryptionKey const &encryptionKey) {
_connection.reset(new EncryptedConnection(EncryptedConnection::Type::Signaling, encryptionKey, [](int, int) {
}));
}
SignalingEncryption::~SignalingEncryption() {
}
absl::optional<rtc::CopyOnWriteBuffer> SignalingEncryption::encryptOutgoing(std::vector<uint8_t> const &data) {
return _connection->encryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size()));
}
absl::optional<rtc::CopyOnWriteBuffer> SignalingEncryption::decryptIncoming(std::vector<uint8_t> const &data) {
return _connection->decryptRawPacket(rtc::CopyOnWriteBuffer(data.data(), data.size()));
}
} // namespace tgcalls

View file

@ -0,0 +1,23 @@
#ifndef TGCALLS_SIGNALING_ENCRYPTION_H
#define TGCALLS_SIGNALING_ENCRYPTION_H
#include "Instance.h"
#include "EncryptedConnection.h"
namespace tgcalls {
class SignalingEncryption {
public:
SignalingEncryption(EncryptionKey const &encryptionKey);
~SignalingEncryption();
absl::optional<rtc::CopyOnWriteBuffer> encryptOutgoing(std::vector<uint8_t> const &data);
absl::optional<rtc::CopyOnWriteBuffer> decryptIncoming(std::vector<uint8_t> const &data);
private:
std::unique_ptr<EncryptedConnection> _connection;
};
} // namespace tgcalls
#endif

View file

@ -1,71 +0,0 @@
# Design considerations
The header files in this directory form the API to the WebRTC library
that is intended for client applications' use.
This API is designed to be used on top of a multithreaded runtime.
The public API functions are designed to be called from a single thread*
(the "client thread"), and can do internal dispatching to the thread
where activity needs to happen. Those threads can be passed in by the
client, typically as arguments to factory constructors, or they can be
created by the library if factory constructors that don't take threads
are used.
Many of the functions are designed to be used in an asynchronous manner,
where a function is called to initiate an activity, and a callback will
be called when the activity is completed, or a handler function will
be called on an observer object when interesting events happen.
Note: Often, even functions that look like simple functions (such as
information query functions) will need to jump between threads to perform
their function - which means that things may happen on other threads
between calls; writing "increment(x); increment(x)" is not a safe
way to increment X by exactly two, since the increment function may have
jumped to another thread that already had a queue of things to handle,
causing large amounts of other activity to have intervened between
the two calls.
(*) The term "thread" is used here to denote any construct that guarantees
sequential execution - other names for such constructs are task runners
and sequenced task queues.
# Client threads and callbacks
At the moment, the API does not give any guarantee on which thread* the
callbacks and events are called on. So it's best to write all callback
and event handlers like this (pseudocode):
<pre>
void ObserverClass::Handler(event) {
if (!called_on_client_thread()) {
dispatch_to_client_thread(bind(handler(event)));
return;
}
// Process event, we're now on the right thread
}
</pre>
In the future, the implementation may change to always call the callbacks
and event handlers on the client thread.
# Implementation considerations
The C++ classes that are part of the public API are also used to derive
classes that form part of the implementation.
This should not directly concern users of the API, but may matter if one
wants to look at how the WebRTC library is implemented, or for legacy code
that directly accesses internal APIs.
Many APIs are defined in terms of a "proxy object", which will do a blocking
dispatch of the function to another thread, and an "implementation object"
which will do the actual
work, but can only be created, invoked and destroyed on its "home thread".
Usually, the classes are named "xxxInterface" (in api/), "xxxProxy" and
"xxx" (not in api/). WebRTC users should only need to depend on the files
in api/. In many cases, the "xxxProxy" and "xxx" classes are subclasses
of "xxxInterface", but this property is an implementation feature only,
and should not be relied upon.
The threading properties of these internal APIs are NOT documented in
this note, and need to be understood by inspecting those classes.

View file

@ -2,7 +2,6 @@ crodbro@webrtc.org
deadbeef@webrtc.org deadbeef@webrtc.org
hta@webrtc.org hta@webrtc.org
juberti@webrtc.org juberti@webrtc.org
kwiberg@webrtc.org
magjed@webrtc.org magjed@webrtc.org
perkj@webrtc.org perkj@webrtc.org
tkchin@webrtc.org tkchin@webrtc.org
@ -11,4 +10,16 @@ tommi@webrtc.org
per-file peer_connection*=hbos@webrtc.org per-file peer_connection*=hbos@webrtc.org
per-file DEPS=mbonadei@webrtc.org per-file DEPS=mbonadei@webrtc.org
per-file DEPS=kwiberg@webrtc.org
# Please keep this list in sync with Chromium's //base/metrics/OWNERS and
# send a CL when you notice any difference.
# Even if people in the list below cannot formally grant +1 on WebRTC, it
# is good to get their LGTM before sending the CL to one of the folder OWNERS.
per-file uma_metrics.h=asvitkine@chromium.org
per-file uma_metrics.h=bcwhite@chromium.org
per-file uma_metrics.h=caitlinfischer@google.com
per-file uma_metrics.h=holte@chromium.org
per-file uma_metrics.h=isherman@chromium.org
per-file uma_metrics.h=jwd@chromium.org
per-file uma_metrics.h=mpearson@chromium.org
per-file uma_metrics.h=rkaplow@chromium.org

View file

@ -1,6 +1,6 @@
# How to write code in the `api/` directory # How to write code in the `api/` directory
Mostly, just follow the regular [style guide](../style-guide.md), but: Mostly, just follow the regular [style guide](../g3doc/style-guide.md), but:
* Note that `api/` code is not exempt from the “`.h` and `.cc` files come in * Note that `api/` code is not exempt from the “`.h` and `.cc` files come in
pairs” rule, so if you declare something in `api/path/to/foo.h`, it should be pairs” rule, so if you declare something in `api/path/to/foo.h`, it should be
@ -17,7 +17,7 @@ it from a `.cc` file, so that users of our API headers wont transitively
For headers in `api/` that need to refer to non-public types, forward For headers in `api/` that need to refer to non-public types, forward
declarations are often a lesser evil than including non-public header files. The declarations are often a lesser evil than including non-public header files. The
usual [rules](../style-guide.md#forward-declarations) still apply, though. usual [rules](../g3doc/style-guide.md#forward-declarations) still apply, though.
`.cc` files in `api/` should preferably be kept reasonably small. If a `.cc` files in `api/` should preferably be kept reasonably small. If a
substantial implementation is needed, consider putting it with our non-public substantial implementation is needed, consider putting it with our non-public

View file

@ -0,0 +1,86 @@
/*
* Copyright 2021 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_ASYNC_DNS_RESOLVER_H_
#define API_ASYNC_DNS_RESOLVER_H_
#include <functional>
#include <memory>
#include "rtc_base/socket_address.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
// This interface defines the methods to resolve a hostname asynchronously.
// The AsyncDnsResolverInterface class encapsulates a single name query.
//
// Usage:
// std::unique_ptr<AsyncDnsResolverInterface> resolver =
// factory->Create(address-to-be-resolved, [r = resolver.get()]() {
// if (r->result.GetResolvedAddress(AF_INET, &addr) {
// // success
// } else {
// // failure
// error = r->result().GetError();
// }
// // Release resolver.
// resolver_list.erase(std::remove_if(resolver_list.begin(),
// resolver_list.end(),
// [](refptr) { refptr.get() == r; });
// });
// resolver_list.push_back(std::move(resolver));
class AsyncDnsResolverResult {
public:
virtual ~AsyncDnsResolverResult() = default;
// Returns true iff the address from |Start| was successfully resolved.
// If the address was successfully resolved, sets |addr| to a copy of the
// address from |Start| with the IP address set to the top most resolved
// address of |family| (|addr| will have both hostname and the resolved ip).
virtual bool GetResolvedAddress(int family,
rtc::SocketAddress* addr) const = 0;
// Returns error from resolver.
virtual int GetError() const = 0;
};
class RTC_EXPORT AsyncDnsResolverInterface {
public:
virtual ~AsyncDnsResolverInterface() = default;
// Start address resolution of the hostname in |addr|.
virtual void Start(const rtc::SocketAddress& addr,
std::function<void()> callback) = 0;
virtual const AsyncDnsResolverResult& result() const = 0;
};
// An abstract factory for creating AsyncDnsResolverInterfaces. This allows
// client applications to provide WebRTC with their own mechanism for
// performing DNS resolution.
class AsyncDnsResolverFactoryInterface {
public:
virtual ~AsyncDnsResolverFactoryInterface() = default;
// Creates an AsyncDnsResolver and starts resolving the name. The callback
// will be called when resolution is finished.
// The callback will be called on the thread that the caller runs on.
virtual std::unique_ptr<webrtc::AsyncDnsResolverInterface> CreateAndResolve(
const rtc::SocketAddress& addr,
std::function<void()> callback) = 0;
// Creates an AsyncDnsResolver and does not start it.
// For backwards compatibility, will be deprecated and removed.
// One has to do a separate Start() call on the
// resolver to start name resolution.
virtual std::unique_ptr<webrtc::AsyncDnsResolverInterface> Create() = 0;
};
} // namespace webrtc
#endif // API_ASYNC_DNS_RESOLVER_H_

View file

@ -153,6 +153,7 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) {
res = res & Limit(&c->filter.config_change_duration_blocks, 0, 100000); res = res & Limit(&c->filter.config_change_duration_blocks, 0, 100000);
res = res & Limit(&c->filter.initial_state_seconds, 0.f, 100.f); res = res & Limit(&c->filter.initial_state_seconds, 0.f, 100.f);
res = res & Limit(&c->filter.coarse_reset_hangover_blocks, 0, 250000);
res = res & Limit(&c->erle.min, 1.f, 100000.f); res = res & Limit(&c->erle.min, 1.f, 100000.f);
res = res & Limit(&c->erle.max_l, 1.f, 100000.f); res = res & Limit(&c->erle.max_l, 1.f, 100000.f);

View file

@ -43,6 +43,7 @@ struct RTC_EXPORT EchoCanceller3Config {
size_t hysteresis_limit_blocks = 1; size_t hysteresis_limit_blocks = 1;
size_t fixed_capture_delay_samples = 0; size_t fixed_capture_delay_samples = 0;
float delay_estimate_smoothing = 0.7f; float delay_estimate_smoothing = 0.7f;
float delay_estimate_smoothing_delay_found = 0.7f;
float delay_candidate_detection_threshold = 0.2f; float delay_candidate_detection_threshold = 0.2f;
struct DelaySelectionThresholds { struct DelaySelectionThresholds {
int initial; int initial;
@ -86,9 +87,11 @@ struct RTC_EXPORT EchoCanceller3Config {
size_t config_change_duration_blocks = 250; size_t config_change_duration_blocks = 250;
float initial_state_seconds = 2.5f; float initial_state_seconds = 2.5f;
int coarse_reset_hangover_blocks = 25;
bool conservative_initial_phase = false; bool conservative_initial_phase = false;
bool enable_coarse_filter_output_usage = true; bool enable_coarse_filter_output_usage = true;
bool use_linear_filter = true; bool use_linear_filter = true;
bool high_pass_filter_echo_reference = false;
bool export_linear_aec_output = false; bool export_linear_aec_output = false;
} filter; } filter;
@ -107,6 +110,7 @@ struct RTC_EXPORT EchoCanceller3Config {
float default_len = 0.83f; float default_len = 0.83f;
bool echo_can_saturate = true; bool echo_can_saturate = true;
bool bounded_erl = false; bool bounded_erl = false;
bool erle_onset_compensation_in_dominant_nearend = false;
} ep_strength; } ep_strength;
struct EchoAudibility { struct EchoAudibility {

View file

@ -191,6 +191,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string,
&cfg.delay.fixed_capture_delay_samples); &cfg.delay.fixed_capture_delay_samples);
ReadParam(section, "delay_estimate_smoothing", ReadParam(section, "delay_estimate_smoothing",
&cfg.delay.delay_estimate_smoothing); &cfg.delay.delay_estimate_smoothing);
ReadParam(section, "delay_estimate_smoothing_delay_found",
&cfg.delay.delay_estimate_smoothing_delay_found);
ReadParam(section, "delay_candidate_detection_threshold", ReadParam(section, "delay_candidate_detection_threshold",
&cfg.delay.delay_candidate_detection_threshold); &cfg.delay.delay_candidate_detection_threshold);
@ -223,11 +225,15 @@ void Aec3ConfigFromJsonString(absl::string_view json_string,
&cfg.filter.config_change_duration_blocks); &cfg.filter.config_change_duration_blocks);
ReadParam(section, "initial_state_seconds", ReadParam(section, "initial_state_seconds",
&cfg.filter.initial_state_seconds); &cfg.filter.initial_state_seconds);
ReadParam(section, "coarse_reset_hangover_blocks",
&cfg.filter.coarse_reset_hangover_blocks);
ReadParam(section, "conservative_initial_phase", ReadParam(section, "conservative_initial_phase",
&cfg.filter.conservative_initial_phase); &cfg.filter.conservative_initial_phase);
ReadParam(section, "enable_coarse_filter_output_usage", ReadParam(section, "enable_coarse_filter_output_usage",
&cfg.filter.enable_coarse_filter_output_usage); &cfg.filter.enable_coarse_filter_output_usage);
ReadParam(section, "use_linear_filter", &cfg.filter.use_linear_filter); ReadParam(section, "use_linear_filter", &cfg.filter.use_linear_filter);
ReadParam(section, "high_pass_filter_echo_reference",
&cfg.filter.high_pass_filter_echo_reference);
ReadParam(section, "export_linear_aec_output", ReadParam(section, "export_linear_aec_output",
&cfg.filter.export_linear_aec_output); &cfg.filter.export_linear_aec_output);
} }
@ -249,6 +255,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string,
ReadParam(section, "default_len", &cfg.ep_strength.default_len); ReadParam(section, "default_len", &cfg.ep_strength.default_len);
ReadParam(section, "echo_can_saturate", &cfg.ep_strength.echo_can_saturate); ReadParam(section, "echo_can_saturate", &cfg.ep_strength.echo_can_saturate);
ReadParam(section, "bounded_erl", &cfg.ep_strength.bounded_erl); ReadParam(section, "bounded_erl", &cfg.ep_strength.bounded_erl);
ReadParam(section, "erle_onset_compensation_in_dominant_nearend",
&cfg.ep_strength.erle_onset_compensation_in_dominant_nearend);
} }
if (rtc::GetValueFromJsonObject(aec3_root, "echo_audibility", &section)) { if (rtc::GetValueFromJsonObject(aec3_root, "echo_audibility", &section)) {
@ -419,6 +427,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) {
<< config.delay.fixed_capture_delay_samples << ","; << config.delay.fixed_capture_delay_samples << ",";
ost << "\"delay_estimate_smoothing\": " ost << "\"delay_estimate_smoothing\": "
<< config.delay.delay_estimate_smoothing << ","; << config.delay.delay_estimate_smoothing << ",";
ost << "\"delay_estimate_smoothing_delay_found\": "
<< config.delay.delay_estimate_smoothing_delay_found << ",";
ost << "\"delay_candidate_detection_threshold\": " ost << "\"delay_candidate_detection_threshold\": "
<< config.delay.delay_candidate_detection_threshold << ","; << config.delay.delay_candidate_detection_threshold << ",";
@ -502,6 +512,8 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) {
<< config.filter.config_change_duration_blocks << ","; << config.filter.config_change_duration_blocks << ",";
ost << "\"initial_state_seconds\": " << config.filter.initial_state_seconds ost << "\"initial_state_seconds\": " << config.filter.initial_state_seconds
<< ","; << ",";
ost << "\"coarse_reset_hangover_blocks\": "
<< config.filter.coarse_reset_hangover_blocks << ",";
ost << "\"conservative_initial_phase\": " ost << "\"conservative_initial_phase\": "
<< (config.filter.conservative_initial_phase ? "true" : "false") << ","; << (config.filter.conservative_initial_phase ? "true" : "false") << ",";
ost << "\"enable_coarse_filter_output_usage\": " ost << "\"enable_coarse_filter_output_usage\": "
@ -509,6 +521,9 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) {
<< ","; << ",";
ost << "\"use_linear_filter\": " ost << "\"use_linear_filter\": "
<< (config.filter.use_linear_filter ? "true" : "false") << ","; << (config.filter.use_linear_filter ? "true" : "false") << ",";
ost << "\"high_pass_filter_echo_reference\": "
<< (config.filter.high_pass_filter_echo_reference ? "true" : "false")
<< ",";
ost << "\"export_linear_aec_output\": " ost << "\"export_linear_aec_output\": "
<< (config.filter.export_linear_aec_output ? "true" : "false"); << (config.filter.export_linear_aec_output ? "true" : "false");
@ -533,8 +548,11 @@ std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) {
ost << "\"echo_can_saturate\": " ost << "\"echo_can_saturate\": "
<< (config.ep_strength.echo_can_saturate ? "true" : "false") << ","; << (config.ep_strength.echo_can_saturate ? "true" : "false") << ",";
ost << "\"bounded_erl\": " ost << "\"bounded_erl\": "
<< (config.ep_strength.bounded_erl ? "true" : "false"); << (config.ep_strength.bounded_erl ? "true" : "false") << ",";
ost << "\"erle_onset_compensation_in_dominant_nearend\": "
<< (config.ep_strength.erle_onset_compensation_in_dominant_nearend
? "true"
: "false");
ost << "},"; ost << "},";
ost << "\"echo_audibility\": {"; ost << "\"echo_audibility\": {";

Some files were not shown because too many files have changed in this diff Show more