update to 9.4.5

This commit is contained in:
xaxtix 2023-02-25 12:01:39 +04:00
parent 51e9947527
commit 810bc4ae67
1050 changed files with 118411 additions and 42521 deletions

View file

@ -42,14 +42,15 @@ dependencies {
implementation 'com.google.mlkit:language-id:16.1.1'
implementation 'com.android.billingclient:billing:5.1.0'
implementation 'com.google.code.gson:gson:2.10'
implementation 'com.google.guava:guava:31.1-android'
coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:1.1.5'
}
def isWindows = String.valueOf(DefaultNativePlatform.currentOperatingSystem.toFamilyName() == OperatingSystemFamily.WINDOWS)
android {
compileSdkVersion 31
buildToolsVersion '31.0.0'
compileSdkVersion 33
buildToolsVersion '33.0.0'
ndkVersion "21.4.7075529"
sourceSets.main.jniLibs.srcDirs = ['./jni/']

View file

@ -435,7 +435,7 @@ target_include_directories(breakpad PUBLIC
#voip
include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt)
set(NATIVE_LIB "tmessages.42")
set(NATIVE_LIB "tmessages.43")
#tmessages
add_library(${NATIVE_LIB} SHARED

View file

@ -294,7 +294,7 @@ int initRecorder(const char *path, opus_int32 sampleRate) {
return 0;
}
_fileOs = fopen(path, "wb");
_fileOs = fopen(path, "w");
if (!_fileOs) {
LOGE("error cannot open file: %s", path);
return 0;

View file

@ -2,7 +2,7 @@
# instructions for build
# used
# ffmpeg 4.3.3
# ffmpeg 4.4.3
# lib vpx 1.10.9
# NDK for compile libvpx. Last successful build with 21.1.6352462
# NDK r10e for compile ffmpeg
@ -22,7 +22,7 @@ NDK_r10e="/opt/android/ndk/android-ndk-r10e"
cd ./vpx-android
export ANDROID_NDK=$NDK
sh build-vpx.sh
cd ../..
cd ..
NDK=$NDK_r10e

View file

@ -74,7 +74,7 @@ configure() {
--stl libc++ \
--install-dir=${TOOLCHAIN_PREFIX}
PREFIX=../../../android/${ABI}
PREFIX=../../android/${ABI}
export PATH=${TOOLCHAIN_PREFIX}/bin:$PATH

View file

@ -410,6 +410,11 @@ typedef struct RcOverride{
* Export the AVVideoEncParams structure through frame side data.
*/
#define AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS (1 << 2)
/**
* Decoding only.
* Do not apply film grain, export it instead.
*/
#define AV_CODEC_EXPORT_DATA_FILM_GRAIN (1 << 3)
/**
* Pan Scan area.
@ -508,6 +513,11 @@ typedef struct AVProducerReferenceTime {
*/
#define AV_GET_BUFFER_FLAG_REF (1 << 0)
/**
* The encoder will keep a reference to the packet and may reuse it later.
*/
#define AV_GET_ENCODE_BUFFER_FLAG_REF (1 << 0)
struct AVCodecInternal;
/**
@ -1314,9 +1324,9 @@ typedef struct AVCodecContext {
*
* Some decoders do not support linesizes changing between frames.
*
* If frame multithreading is used and thread_safe_callbacks is set,
* this callback may be called from a different thread, but not from more
* than one at once. Does not need to be reentrant.
* If frame multithreading is used, this callback may be called from a
* different thread, but not from more than one at once. Does not need to be
* reentrant.
*
* @see avcodec_align_dimensions2()
*
@ -1340,6 +1350,7 @@ typedef struct AVCodecContext {
*/
int (*get_buffer2)(struct AVCodecContext *s, AVFrame *frame, int flags);
#if FF_API_OLD_ENCDEC
/**
* If non-zero, the decoded audio and video frames returned from
* avcodec_decode_video2() and avcodec_decode_audio4() are reference-counted
@ -1355,6 +1366,7 @@ typedef struct AVCodecContext {
*/
attribute_deprecated
int refcounted_frames;
#endif
/* - encoding parameters */
float qcompress; ///< amount of qscale change between easy & hard scenes (0.0-1.0)
@ -1614,42 +1626,20 @@ typedef struct AVCodecContext {
#define FF_DEBUG_BITSTREAM 4
#define FF_DEBUG_MB_TYPE 8
#define FF_DEBUG_QP 16
#if FF_API_DEBUG_MV
/**
* @deprecated this option does nothing
*/
#define FF_DEBUG_MV 32
#endif
#define FF_DEBUG_DCT_COEFF 0x00000040
#define FF_DEBUG_SKIP 0x00000080
#define FF_DEBUG_STARTCODE 0x00000100
#define FF_DEBUG_ER 0x00000400
#define FF_DEBUG_MMCO 0x00000800
#define FF_DEBUG_BUGS 0x00001000
#if FF_API_DEBUG_MV
#define FF_DEBUG_VIS_QP 0x00002000
#define FF_DEBUG_VIS_MB_TYPE 0x00004000
#endif
#define FF_DEBUG_BUFFERS 0x00008000
#define FF_DEBUG_THREADS 0x00010000
#define FF_DEBUG_GREEN_MD 0x00800000
#define FF_DEBUG_NOMC 0x01000000
#if FF_API_DEBUG_MV
/**
* debug
* - encoding: Set by user.
* - decoding: Set by user.
*/
int debug_mv;
#define FF_DEBUG_VIS_MV_P_FOR 0x00000001 // visualize forward predicted MVs of P-frames
#define FF_DEBUG_VIS_MV_B_FOR 0x00000002 // visualize forward predicted MVs of B-frames
#define FF_DEBUG_VIS_MV_B_BACK 0x00000004 // visualize backward predicted MVs of B-frames
#endif
/**
* Error recognition; may misdetect some more or less valid parts as errors.
* - encoding: unused
* - encoding: Set by user.
* - decoding: Set by user.
*/
int err_recognition;
@ -1756,14 +1746,12 @@ typedef struct AVCodecContext {
*/
int bits_per_raw_sample;
#if FF_API_LOWRES
/**
* low resolution decoding, 1-> 1/2 size, 2->1/4 size
* - encoding: unused
* - decoding: Set by user.
*/
int lowres;
#endif
#if FF_API_CODED_FRAME
/**
@ -1803,6 +1791,7 @@ typedef struct AVCodecContext {
*/
int active_thread_type;
#if FF_API_THREAD_SAFE_CALLBACKS
/**
* Set by the client if its custom get_buffer() callback can be called
* synchronously from another thread, which allows faster multithreaded decoding.
@ -1810,8 +1799,18 @@ typedef struct AVCodecContext {
* Ignored if the default get_buffer() is used.
* - encoding: Set by user.
* - decoding: Set by user.
*
* @deprecated the custom get_buffer2() callback should always be
* thread-safe. Thread-unsafe get_buffer2() implementations will be
* invalid starting with LIBAVCODEC_VERSION_MAJOR=60; in other words,
* libavcodec will behave as if this field was always set to 1.
* Callers that want to be forward compatible with future libavcodec
* versions should wrap access to this field in
* #if LIBAVCODEC_VERSION_MAJOR < 60
*/
attribute_deprecated
int thread_safe_callbacks;
#endif
/**
* The codec may call this to execute several independent things.
@ -1949,6 +1948,9 @@ typedef struct AVCodecContext {
#define FF_PROFILE_HEVC_MAIN_STILL_PICTURE 3
#define FF_PROFILE_HEVC_REXT 4
#define FF_PROFILE_VVC_MAIN_10 1
#define FF_PROFILE_VVC_MAIN_10_444 33
#define FF_PROFILE_AV1_MAIN 0
#define FF_PROFILE_AV1_HIGH 1
#define FF_PROFILE_AV1_PROFESSIONAL 2
@ -2089,15 +2091,6 @@ typedef struct AVCodecContext {
*/
const AVCodecDescriptor *codec_descriptor;
#if !FF_API_LOWRES
/**
* low resolution decoding, 1-> 1/2 size, 2->1/4 size
* - encoding: unused
* - decoding: Set by user.
*/
int lowres;
#endif
/**
* Current statistics for PTS correction.
* - decoding: maintained and used by libavcodec, not intended to be used by user apps
@ -2148,12 +2141,11 @@ typedef struct AVCodecContext {
*/
int seek_preroll;
#if !FF_API_DEBUG_MV
#if FF_API_DEBUG_MV
/**
* debug motion vectors
* - encoding: Set by user.
* - decoding: Set by user.
* @deprecated unused
*/
attribute_deprecated
int debug_mv;
#define FF_DEBUG_VIS_MV_P_FOR 0x00000001 //visualize forward predicted MVs of P frames
#define FF_DEBUG_VIS_MV_B_FOR 0x00000002 //visualize forward predicted MVs of B frames
@ -2352,6 +2344,44 @@ typedef struct AVCodecContext {
* - encoding: set by user
*/
int export_side_data;
/**
* This callback is called at the beginning of each packet to get a data
* buffer for it.
*
* The following field will be set in the packet before this callback is
* called:
* - size
* This callback must use the above value to calculate the required buffer size,
* which must padded by at least AV_INPUT_BUFFER_PADDING_SIZE bytes.
*
* This callback must fill the following fields in the packet:
* - data: alignment requirements for AVPacket apply, if any. Some architectures and
* encoders may benefit from having aligned data.
* - buf: must contain a pointer to an AVBufferRef structure. The packet's
* data pointer must be contained in it. See: av_buffer_create(), av_buffer_alloc(),
* and av_buffer_ref().
*
* If AV_CODEC_CAP_DR1 is not set then get_encode_buffer() must call
* avcodec_default_get_encode_buffer() instead of providing a buffer allocated by
* some other means.
*
* The flags field may contain a combination of AV_GET_ENCODE_BUFFER_FLAG_ flags.
* They may be used for example to hint what use the buffer may get after being
* created.
* Implementations of this callback may ignore flags they don't understand.
* If AV_GET_ENCODE_BUFFER_FLAG_REF is set in flags then the packet may be reused
* (read and/or written to if it is writable) later by libavcodec.
*
* This callback must be thread-safe, as when frame threading is used, it may
* be called from multiple threads simultaneously.
*
* @see avcodec_default_get_encode_buffer()
*
* - encoding: Set by libavcodec, user can override.
* - decoding: unused
*/
int (*get_encode_buffer)(struct AVCodecContext *s, AVPacket *pkt, int flags);
} AVCodecContext;
#if FF_API_CODEC_GET_SET
@ -2372,12 +2402,10 @@ void av_codec_set_codec_descriptor(AVCodecContext *avctx, co
attribute_deprecated
unsigned av_codec_get_codec_properties(const AVCodecContext *avctx);
#if FF_API_LOWRES
attribute_deprecated
int av_codec_get_lowres(const AVCodecContext *avctx);
attribute_deprecated
void av_codec_set_lowres(AVCodecContext *avctx, int val);
#endif
attribute_deprecated
int av_codec_get_seek_preroll(const AVCodecContext *avctx);
@ -2727,25 +2755,13 @@ const char *avcodec_license(void);
#if FF_API_NEXT
/**
* Register the codec codec and initialize libavcodec.
*
* @warning either this function or avcodec_register_all() must be called
* before any other libavcodec functions.
*
* @see avcodec_register_all()
* @deprecated Calling this function is unnecessary.
*/
attribute_deprecated
void avcodec_register(AVCodec *codec);
/**
* Register all the codecs, parsers and bitstream filters which were enabled at
* configuration time. If you do not call this function you can select exactly
* which formats you want to support, by using the individual registration
* functions.
*
* @see avcodec_register
* @see av_register_codec_parser
* @see av_register_bitstream_filter
* @deprecated Calling this function is unnecessary.
*/
attribute_deprecated
void avcodec_register_all(void);
@ -2789,14 +2805,13 @@ int avcodec_get_context_defaults3(AVCodecContext *s, const AVCodec *codec);
*/
const AVClass *avcodec_get_class(void);
#if FF_API_COPY_CONTEXT
#if FF_API_GET_FRAME_CLASS
/**
* Get the AVClass for AVFrame. It can be used in combination with
* AV_OPT_SEARCH_FAKE_OBJ for examining options.
*
* @see av_opt_find().
* @deprecated This function should not be used.
*/
attribute_deprecated
const AVClass *avcodec_get_frame_class(void);
#endif
/**
* Get the AVClass for AVSubtitleRect. It can be used in combination with
@ -2806,6 +2821,7 @@ const AVClass *avcodec_get_frame_class(void);
*/
const AVClass *avcodec_get_subtitle_rect_class(void);
#if FF_API_COPY_CONTEXT
/**
* Copy the settings of the source AVCodecContext into the destination
* AVCodecContext. The resulting destination codec context will be
@ -2862,7 +2878,6 @@ int avcodec_parameters_to_context(AVCodecContext *codec,
* @ref avcodec_receive_frame()).
*
* @code
* avcodec_register_all();
* av_dict_set(&opts, "b", "2.5M", 0);
* codec = avcodec_find_decoder(AV_CODEC_ID_H264);
* if (!codec)
@ -2926,6 +2941,13 @@ void avsubtitle_free(AVSubtitle *sub);
*/
int avcodec_default_get_buffer2(AVCodecContext *s, AVFrame *frame, int flags);
/**
* The default callback for AVCodecContext.get_encode_buffer(). It is made public so
* it can be called by custom get_encode_buffer() implementations for encoders without
* AV_CODEC_CAP_DR1 set.
*/
int avcodec_default_get_encode_buffer(AVCodecContext *s, AVPacket *pkt, int flags);
/**
* Modify width and height values so that they will result in a memory
* buffer that is acceptable for the codec if you do not use any horizontal
@ -2967,6 +2989,7 @@ int avcodec_enum_to_chroma_pos(int *xpos, int *ypos, enum AVChromaLocation pos);
*/
enum AVChromaLocation avcodec_chroma_pos_to_enum(int xpos, int ypos);
#if FF_API_OLD_ENCDEC
/**
* Decode the audio frame of size avpkt->size from avpkt->data into frame.
*
@ -3073,6 +3096,7 @@ attribute_deprecated
int avcodec_decode_video2(AVCodecContext *avctx, AVFrame *picture,
int *got_picture_ptr,
const AVPacket *avpkt);
#endif
/**
* Decode a subtitle message.
@ -3529,7 +3553,10 @@ typedef struct AVCodecParser {
const uint8_t *buf, int buf_size);
void (*parser_close)(AVCodecParserContext *s);
int (*split)(AVCodecContext *avctx, const uint8_t *buf, int buf_size);
#if FF_API_NEXT
attribute_deprecated
struct AVCodecParser *next;
#endif
} AVCodecParser;
/**
@ -3543,11 +3570,13 @@ typedef struct AVCodecParser {
*/
const AVCodecParser *av_parser_iterate(void **opaque);
#if FF_API_NEXT
attribute_deprecated
AVCodecParser *av_parser_next(const AVCodecParser *c);
attribute_deprecated
void av_register_codec_parser(AVCodecParser *parser);
#endif
AVCodecParserContext *av_parser_init(int codec_id);
/**
@ -3588,14 +3617,18 @@ int av_parser_parse2(AVCodecParserContext *s,
int64_t pts, int64_t dts,
int64_t pos);
#if FF_API_PARSER_CHANGE
/**
* @return 0 if the output buffer is a subset of the input, 1 if it is allocated and must be freed
* @deprecated use AVBitStreamFilter
* @deprecated Use dump_extradata, remove_extra or extract_extradata
* bitstream filters instead.
*/
attribute_deprecated
int av_parser_change(AVCodecParserContext *s,
AVCodecContext *avctx,
uint8_t **poutbuf, int *poutbuf_size,
const uint8_t *buf, int buf_size, int keyframe);
#endif
void av_parser_close(AVCodecParserContext *s);
/**
@ -3608,6 +3641,7 @@ void av_parser_close(AVCodecParserContext *s);
* @{
*/
#if FF_API_OLD_ENCDEC
/**
* Encode a frame of audio.
*
@ -3646,7 +3680,9 @@ void av_parser_close(AVCodecParserContext *s);
* not be used.
* @return 0 on success, negative error code on failure
*
* @deprecated use avcodec_send_frame()/avcodec_receive_packet() instead
* @deprecated use avcodec_send_frame()/avcodec_receive_packet() instead.
* If allowed and required, set AVCodecContext.get_encode_buffer to
* a custom function to pass user supplied output buffers.
*/
attribute_deprecated
int avcodec_encode_audio2(AVCodecContext *avctx, AVPacket *avpkt,
@ -3685,11 +3721,14 @@ int avcodec_encode_audio2(AVCodecContext *avctx, AVPacket *avpkt,
* not be used.
* @return 0 on success, negative error code on failure
*
* @deprecated use avcodec_send_frame()/avcodec_receive_packet() instead
* @deprecated use avcodec_send_frame()/avcodec_receive_packet() instead.
* If allowed and required, set AVCodecContext.get_encode_buffer to
* a custom function to pass user supplied output buffers.
*/
attribute_deprecated
int avcodec_encode_video2(AVCodecContext *avctx, AVPacket *avpkt,
const AVFrame *frame, int *got_packet_ptr);
#endif
int avcodec_encode_subtitle(AVCodecContext *avctx, uint8_t *buf, int buf_size,
const AVSubtitle *sub);
@ -3796,12 +3835,6 @@ void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int
*/
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt);
/**
* @deprecated see av_get_pix_fmt_loss()
*/
int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt,
int has_alpha);
/**
* Find the best pixel format to convert to given a certain source pixel
* format. When converting from one pixel format to another, information loss
@ -3823,15 +3856,24 @@ enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(const enum AVPixelFormat *p
enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr);
#if FF_API_AVCODEC_PIX_FMT
/**
* @deprecated see av_get_pix_fmt_loss()
*/
attribute_deprecated
int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt,
int has_alpha);
/**
* @deprecated see av_find_best_pix_fmt_of_2()
*/
attribute_deprecated
enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
attribute_deprecated
enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
#endif
enum AVPixelFormat avcodec_default_get_format(struct AVCodecContext *s, const enum AVPixelFormat * fmt);

View file

@ -215,7 +215,7 @@ int av_bsf_send_packet(AVBSFContext *ctx, AVPacket *pkt);
int av_bsf_receive_packet(AVBSFContext *ctx, AVPacket *pkt);
/**
* Reset the internal bitstream filter state / flush internal buffers.
* Reset the internal bitstream filter state. Should be called e.g. when seeking.
*/
void av_bsf_flush(AVBSFContext *ctx);

View file

@ -43,9 +43,11 @@
*/
#define AV_CODEC_CAP_DRAW_HORIZ_BAND (1 << 0)
/**
* Codec uses get_buffer() for allocating buffers and supports custom allocators.
* If not set, it might not use get_buffer() at all or use operations that
* assume the buffer was allocated by avcodec_default_get_buffer.
* Codec uses get_buffer() or get_encode_buffer() for allocating buffers and
* supports custom allocators.
* If not set, it might not use get_buffer() or get_encode_buffer() at all, or
* use operations that assume the buffer was allocated by
* avcodec_default_get_buffer2 or avcodec_default_get_encode_buffer.
*/
#define AV_CODEC_CAP_DR1 (1 << 1)
#define AV_CODEC_CAP_TRUNCATED (1 << 3)
@ -113,9 +115,14 @@
*/
#define AV_CODEC_CAP_PARAM_CHANGE (1 << 14)
/**
* Codec supports avctx->thread_count == 0 (auto).
* Codec supports multithreading through a method other than slice- or
* frame-level multithreading. Typically this marks wrappers around
* multithreading-capable external libraries.
*/
#define AV_CODEC_CAP_AUTO_THREADS (1 << 15)
#define AV_CODEC_CAP_OTHER_THREADS (1 << 15)
#if FF_API_AUTO_THREADS
#define AV_CODEC_CAP_AUTO_THREADS AV_CODEC_CAP_OTHER_THREADS
#endif
/**
* Audio encoder supports receiving a different number of samples in each call.
*/
@ -236,7 +243,9 @@ typedef struct AVCodec {
*****************************************************************
*/
int priv_data_size;
#if FF_API_NEXT
struct AVCodec *next;
#endif
/**
* @name Frame-level threading support functions
* @{
@ -257,7 +266,7 @@ typedef struct AVCodec {
const AVCodecDefault *defaults;
/**
* Initialize codec static data, called from avcodec_register().
* Initialize codec static data, called from av_codec_iterate().
*
* This is not intended for time consuming operations as it is
* run for every codec regardless of that codec being used.
@ -271,7 +280,7 @@ typedef struct AVCodec {
* Encode data to an AVPacket.
*
* @param avctx codec context
* @param avpkt output AVPacket (may contain a user-provided buffer)
* @param avpkt output AVPacket
* @param[in] frame AVFrame containing the raw data to be encoded
* @param[out] got_packet_ptr encoder sets to 0 or 1 to indicate that a
* non-empty packet was returned in avpkt.
@ -279,17 +288,26 @@ typedef struct AVCodec {
*/
int (*encode2)(struct AVCodecContext *avctx, struct AVPacket *avpkt,
const struct AVFrame *frame, int *got_packet_ptr);
int (*decode)(struct AVCodecContext *, void *outdata, int *outdata_size, struct AVPacket *avpkt);
/**
* Decode picture or subtitle data.
*
* @param avctx codec context
* @param outdata codec type dependent output struct
* @param[out] got_frame_ptr decoder sets to 0 or 1 to indicate that a
* non-empty frame or subtitle was returned in
* outdata.
* @param[in] avpkt AVPacket containing the data to be decoded
* @return amount of bytes read from the packet on success, negative error
* code on failure
*/
int (*decode)(struct AVCodecContext *avctx, void *outdata,
int *got_frame_ptr, struct AVPacket *avpkt);
int (*close)(struct AVCodecContext *);
/**
* Encode API with decoupled packet/frame dataflow. The API is the
* same as the avcodec_ prefixed APIs (avcodec_send_frame() etc.), except
* that:
* - never called if the codec is closed or the wrong type,
* - if AV_CODEC_CAP_DELAY is not set, drain frames are never sent,
* - only one drain frame is ever passed down,
* Encode API with decoupled frame/packet dataflow. This function is called
* to get one output packet. It should call ff_encode_get_frame() to obtain
* input data.
*/
int (*send_frame)(struct AVCodecContext *avctx, const struct AVFrame *frame);
int (*receive_packet)(struct AVCodecContext *avctx, struct AVPacket *avpkt);
/**
@ -322,7 +340,7 @@ typedef struct AVCodec {
*
* The user can only access this field via avcodec_get_hw_config().
*/
const struct AVCodecHWConfigInternal **hw_configs;
const struct AVCodecHWConfigInternal *const *hw_configs;
/**
* List of supported codec_tags, terminated by FF_CODEC_TAGS_END.

View file

@ -241,6 +241,11 @@ enum AVCodecID {
AV_CODEC_ID_SCREENPRESSO,
AV_CODEC_ID_RSCC,
AV_CODEC_ID_AVS2,
AV_CODEC_ID_PGX,
AV_CODEC_ID_AVS3,
AV_CODEC_ID_MSP2,
AV_CODEC_ID_VVC,
#define AV_CODEC_ID_H266 AV_CODEC_ID_VVC
AV_CODEC_ID_Y41P = 0x8000,
AV_CODEC_ID_AVRP,
@ -295,6 +300,13 @@ enum AVCodecID {
AV_CODEC_ID_MV30,
AV_CODEC_ID_NOTCHLC,
AV_CODEC_ID_PFM,
AV_CODEC_ID_MOBICLIP,
AV_CODEC_ID_PHOTOCD,
AV_CODEC_ID_IPU,
AV_CODEC_ID_ARGO,
AV_CODEC_ID_CRI,
AV_CODEC_ID_SIMBIOSIS_IMX,
AV_CODEC_ID_SGA_VIDEO,
/* various PCM "codecs" */
AV_CODEC_ID_FIRST_AUDIO = 0x10000, ///< A dummy id pointing at the start of audio codecs
@ -335,6 +347,7 @@ enum AVCodecID {
AV_CODEC_ID_PCM_F16LE,
AV_CODEC_ID_PCM_F24LE,
AV_CODEC_ID_PCM_VIDC,
AV_CODEC_ID_PCM_SGA,
/* various ADPCM codecs */
AV_CODEC_ID_ADPCM_IMA_QT = 0x11000,
@ -387,6 +400,7 @@ enum AVCodecID {
AV_CODEC_ID_ADPCM_IMA_ALP,
AV_CODEC_ID_ADPCM_IMA_MTF,
AV_CODEC_ID_ADPCM_IMA_CUNNING,
AV_CODEC_ID_ADPCM_IMA_MOFLEX,
/* AMR */
AV_CODEC_ID_AMR_NB = 0x12000,
@ -502,6 +516,7 @@ enum AVCodecID {
AV_CODEC_ID_MPEGH_3D_AUDIO,
AV_CODEC_ID_SIREN,
AV_CODEC_ID_HCA,
AV_CODEC_ID_FASTAUDIO,
/* subtitle codecs */
AV_CODEC_ID_FIRST_SUBTITLE = 0x17000, ///< A dummy ID pointing at the start of subtitle codecs.

View file

@ -282,6 +282,14 @@ enum AVPacketSideDataType {
*/
AV_PKT_DATA_DOVI_CONF,
/**
* Timecode which conforms to SMPTE ST 12-1:2014. The data is an array of 4 uint32_t
* where the first uint32_t describes how many (1-3) of the other timecodes are used.
* The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum()
* function in libavutil/timecode.h.
*/
AV_PKT_DATA_S12M_TIMECODE,
/**
* The number of side data types.
* This is not part of the public API/ABI in the sense that it may
@ -297,7 +305,11 @@ enum AVPacketSideDataType {
typedef struct AVPacketSideData {
uint8_t *data;
#if FF_API_BUFFER_SIZE_T
int size;
#else
size_t size;
#endif
enum AVPacketSideDataType type;
} AVPacketSideData;
@ -311,10 +323,6 @@ typedef struct AVPacketSideData {
* packets, with no compressed data, containing only side data
* (e.g. to update some stream parameters at the end of encoding).
*
* AVPacket is one of the few structs in FFmpeg, whose size is a part of public
* ABI. Thus it may be allocated on stack and no new fields can be added to it
* without libavcodec and libavformat major bump.
*
* The semantics of data ownership depends on the buf field.
* If it is set, the packet data is dynamically allocated and is
* valid indefinitely until a call to av_packet_unref() reduces the
@ -326,6 +334,12 @@ typedef struct AVPacketSideData {
* The side data is always allocated with av_malloc(), copied by
* av_packet_ref() and freed by av_packet_unref().
*
* sizeof(AVPacket) being a part of the public ABI is deprecated. once
* av_init_packet() is removed, new packets will only be able to be allocated
* with av_packet_alloc(), and new fields may be added to the end of the struct
* with a minor bump.
*
* @see av_packet_alloc
* @see av_packet_ref
* @see av_packet_unref
*/
@ -385,6 +399,14 @@ typedef struct AVPacket {
#endif
} AVPacket;
#if FF_API_INIT_PACKET
attribute_deprecated
typedef struct AVPacketList {
AVPacket pkt;
struct AVPacketList *next;
} AVPacketList;
#endif
#define AV_PKT_FLAG_KEY 0x0001 ///< The packet contains a keyframe
#define AV_PKT_FLAG_CORRUPT 0x0002 ///< The packet content is corrupted
/**
@ -447,6 +469,7 @@ AVPacket *av_packet_clone(const AVPacket *src);
*/
void av_packet_free(AVPacket **pkt);
#if FF_API_INIT_PACKET
/**
* Initialize optional fields of a packet with default values.
*
@ -454,8 +477,16 @@ void av_packet_free(AVPacket **pkt);
* initialized separately.
*
* @param pkt packet
*
* @see av_packet_alloc
* @see av_packet_unref
*
* @deprecated This function is deprecated. Once it's removed,
sizeof(AVPacket) will not be a part of the ABI anymore.
*/
attribute_deprecated
void av_init_packet(AVPacket *pkt);
#endif
/**
* Allocate the payload of a packet and initialize its fields with
@ -546,7 +577,11 @@ void av_free_packet(AVPacket *pkt);
* @return pointer to fresh allocated data or NULL otherwise
*/
uint8_t* av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type,
#if FF_API_BUFFER_SIZE_T
int size);
#else
size_t size);
#endif
/**
* Wrap an existing array as a packet side data.
@ -573,18 +608,27 @@ int av_packet_add_side_data(AVPacket *pkt, enum AVPacketSideDataType type,
* @return 0 on success, < 0 on failure
*/
int av_packet_shrink_side_data(AVPacket *pkt, enum AVPacketSideDataType type,
#if FF_API_BUFFER_SIZE_T
int size);
#else
size_t size);
#endif
/**
* Get side information from packet.
*
* @param pkt packet
* @param type desired side information type
* @param size pointer for side information size to store (optional)
* @param size If supplied, *size will be set to the size of the side data
* or to zero if the desired side data is not present.
* @return pointer to data if present or NULL otherwise
*/
uint8_t* av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type,
#if FF_API_BUFFER_SIZE_T
int *size);
#else
size_t *size);
#endif
#if FF_API_MERGE_SD_API
attribute_deprecated
@ -603,7 +647,11 @@ const char *av_packet_side_data_name(enum AVPacketSideDataType type);
* @param size pointer to store the size of the returned data
* @return pointer to data if successful, NULL otherwise
*/
#if FF_API_BUFFER_SIZE_T
uint8_t *av_packet_pack_dictionary(AVDictionary *dict, int *size);
#else
uint8_t *av_packet_pack_dictionary(AVDictionary *dict, size_t *size);
#endif
/**
* Unpack a dictionary from side_data.
*
@ -612,8 +660,12 @@ uint8_t *av_packet_pack_dictionary(AVDictionary *dict, int *size);
* @param dict the metadata storage dictionary
* @return 0 on success, < 0 on failure
*/
#if FF_API_BUFFER_SIZE_T
int av_packet_unpack_dictionary(const uint8_t *data, int size, AVDictionary **dict);
#else
int av_packet_unpack_dictionary(const uint8_t *data, size_t size,
AVDictionary **dict);
#endif
/**
* Convenience function to free all the side data stored.

View file

@ -28,7 +28,7 @@
#include "libavutil/version.h"
#define LIBAVCODEC_VERSION_MAJOR 58
#define LIBAVCODEC_VERSION_MINOR 91
#define LIBAVCODEC_VERSION_MINOR 134
#define LIBAVCODEC_VERSION_MICRO 100
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
@ -51,12 +51,6 @@
* at once through the bump. This improves the git bisect-ability of the change.
*/
#ifndef FF_API_LOWRES
#define FF_API_LOWRES (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_DEBUG_MV
#define FF_API_DEBUG_MV (LIBAVCODEC_VERSION_MAJOR < 58)
#endif
#ifndef FF_API_AVCTX_TIMEBASE
#define FF_API_AVCTX_TIMEBASE (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
@ -144,6 +138,35 @@
#ifndef FF_API_UNUSED_CODEC_CAPS
#define FF_API_UNUSED_CODEC_CAPS (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_AVPRIV_PUT_BITS
#define FF_API_AVPRIV_PUT_BITS (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_OLD_ENCDEC
#define FF_API_OLD_ENCDEC (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_AVCODEC_PIX_FMT
#define FF_API_AVCODEC_PIX_FMT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_MPV_RC_STRATEGY
#define FF_API_MPV_RC_STRATEGY (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_PARSER_CHANGE
#define FF_API_PARSER_CHANGE (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_THREAD_SAFE_CALLBACKS
#define FF_API_THREAD_SAFE_CALLBACKS (LIBAVCODEC_VERSION_MAJOR < 60)
#endif
#ifndef FF_API_DEBUG_MV
#define FF_API_DEBUG_MV (LIBAVCODEC_VERSION_MAJOR < 60)
#endif
#ifndef FF_API_GET_FRAME_CLASS
#define FF_API_GET_FRAME_CLASS (LIBAVCODEC_VERSION_MAJOR < 60)
#endif
#ifndef FF_API_AUTO_THREADS
#define FF_API_AUTO_THREADS (LIBAVCODEC_VERSION_MAJOR < 60)
#endif
#ifndef FF_API_INIT_PACKET
#define FF_API_INIT_PACKET (LIBAVCODEC_VERSION_MAJOR < 60)
#endif
#endif /* AVCODEC_VERSION_H */

View file

@ -356,7 +356,7 @@ struct AVDeviceCapabilitiesQuery;
* sorting will have '-sort' appended. E.g. artist="The Beatles",
* artist-sort="Beatles, The".
* - Some protocols and demuxers support metadata updates. After a successful
* call to av_read_packet(), AVFormatContext.event_flags or AVStream.event_flags
* call to av_read_frame(), AVFormatContext.event_flags or AVStream.event_flags
* will be updated to indicate if metadata changed. In order to detect metadata
* changes on a stream, you need to loop through all streams in the AVFormatContext
* and check their individual event_flags.
@ -534,7 +534,9 @@ typedef struct AVOutputFormat {
#else
#define ff_const59 const
#endif
#if FF_API_NEXT
ff_const59 struct AVOutputFormat *next;
#endif
/**
* size of private data so that it can be allocated in the wrapper
*/
@ -588,6 +590,7 @@ typedef struct AVOutputFormat {
* @see avdevice_list_devices() for more details.
*/
int (*get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list);
#if LIBAVFORMAT_VERSION_MAJOR < 59
/**
* Initialize device capabilities submodule.
* @see avdevice_capabilities_create() for more details.
@ -598,6 +601,7 @@ typedef struct AVOutputFormat {
* @see avdevice_capabilities_free() for more details.
*/
int (*free_device_capabilities)(struct AVFormatContext *s, struct AVDeviceCapabilitiesQuery *caps);
#endif
enum AVCodecID data_codec; /**< default data codec */
/**
* Initialize format. May allocate data here, and set any AVFormatContext or
@ -679,7 +683,9 @@ typedef struct AVInputFormat {
* New public fields should be added right above.
*****************************************************************
*/
#if FF_API_NEXT
ff_const59 struct AVInputFormat *next;
#endif
/**
* Raw demuxers store their codec ID here.
@ -765,6 +771,7 @@ typedef struct AVInputFormat {
*/
int (*get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list);
#if LIBAVFORMAT_VERSION_MAJOR < 59
/**
* Initialize device capabilities submodule.
* @see avdevice_capabilities_create() for more details.
@ -776,6 +783,7 @@ typedef struct AVInputFormat {
* @see avdevice_capabilities_free() for more details.
*/
int (*free_device_capabilities)(struct AVFormatContext *s, struct AVDeviceCapabilitiesQuery *caps);
#endif
} AVInputFormat;
/**
* @}
@ -973,12 +981,30 @@ typedef struct AVStream {
int nb_side_data;
/**
* Flags for the user to detect events happening on the stream. Flags must
* be cleared by the user once the event has been handled.
* A combination of AVSTREAM_EVENT_FLAG_*.
* Flags indicating events happening on the stream, a combination of
* AVSTREAM_EVENT_FLAG_*.
*
* - demuxing: may be set by the demuxer in avformat_open_input(),
* avformat_find_stream_info() and av_read_frame(). Flags must be cleared
* by the user once the event has been handled.
* - muxing: may be set by the user after avformat_write_header(). to
* indicate a user-triggered event. The muxer will clear the flags for
* events it has handled in av_[interleaved]_write_frame().
*/
int event_flags;
#define AVSTREAM_EVENT_FLAG_METADATA_UPDATED 0x0001 ///< The call resulted in updated metadata.
/**
* - demuxing: the demuxer read new metadata from the file and updated
* AVStream.metadata accordingly
* - muxing: the user updated AVStream.metadata and wishes the muxer to write
* it into the file
*/
#define AVSTREAM_EVENT_FLAG_METADATA_UPDATED 0x0001
/**
* - demuxing: new packets for this stream were read from the file. This
* event is informational only and does not guarantee that new packets
* for this stream will necessarily be returned from av_read_frame().
*/
#define AVSTREAM_EVENT_FLAG_NEW_PACKETS (1 << 1)
/**
* Real base framerate of the stream.
@ -1023,38 +1049,10 @@ typedef struct AVStream {
*****************************************************************
*/
#define MAX_STD_TIMEBASES (30*12+30+3+6)
/**
* Stream information used internally by avformat_find_stream_info()
*/
struct {
int64_t last_dts;
int64_t duration_gcd;
int duration_count;
int64_t rfps_duration_sum;
double (*duration_error)[2][MAX_STD_TIMEBASES];
int64_t codec_info_duration;
int64_t codec_info_duration_fields;
int frame_delay_evidence;
/**
* 0 -> decoder has not been searched for yet.
* >0 -> decoder found
* <0 -> decoder with codec_id == -found_decoder has not been found
*/
int found_decoder;
int64_t last_duration;
/**
* Those are used for average framerate estimation.
*/
int64_t fps_first_dts;
int fps_first_dts_idx;
int64_t fps_last_dts;
int fps_last_dts_idx;
} *info;
#if LIBAVFORMAT_VERSION_MAJOR < 59
// kept for ABI compatibility only, do not access in any way
void *unused;
#endif
int pts_wrap_bits; /**< number of bits in pts (used for wrapping control) */
@ -1085,14 +1083,12 @@ typedef struct AVStream {
enum AVStreamParseType need_parsing;
struct AVCodecParserContext *parser;
/**
* last packet in packet_buffer for this stream when muxing.
*/
struct AVPacketList *last_in_packet_buffer;
AVProbeData probe_data;
#define MAX_REORDER_DELAY 16
int64_t pts_buffer[MAX_REORDER_DELAY+1];
#if LIBAVFORMAT_VERSION_MAJOR < 59
// kept for ABI compatibility only, do not access in any way
void *unused7;
AVProbeData unused6;
int64_t unused5[16+1];
#endif
AVIndexEntry *index_entries; /**< Only used if the format does not
support seeking natively. */
int nb_index_entries;
@ -1105,115 +1101,12 @@ typedef struct AVStream {
*/
int stream_identifier;
/**
* Details of the MPEG-TS program which created this stream.
*/
int program_num;
int pmt_version;
int pmt_stream_idx;
int64_t interleaver_chunk_size;
int64_t interleaver_chunk_duration;
/**
* stream probing state
* -1 -> probing finished
* 0 -> no probing requested
* rest -> perform probing with request_probe being the minimum score to accept.
*/
int request_probe;
/**
* Indicates that everything up to the next keyframe
* should be discarded.
*/
int skip_to_keyframe;
/**
* Number of samples to skip at the start of the frame decoded from the next packet.
*/
int skip_samples;
/**
* If not 0, the number of samples that should be skipped from the start of
* the stream (the samples are removed from packets with pts==0, which also
* assumes negative timestamps do not happen).
* Intended for use with formats such as mp3 with ad-hoc gapless audio
* support.
*/
int64_t start_skip_samples;
/**
* If not 0, the first audio sample that should be discarded from the stream.
* This is broken by design (needs global sample count), but can't be
* avoided for broken by design formats such as mp3 with ad-hoc gapless
* audio support.
*/
int64_t first_discard_sample;
/**
* The sample after last sample that is intended to be discarded after
* first_discard_sample. Works on frame boundaries only. Used to prevent
* early EOF if the gapless info is broken (considered concatenated mp3s).
*/
int64_t last_discard_sample;
/**
* Number of internally decoded frames, used internally in libavformat, do not access
* its lifetime differs from info which is why it is not in that structure.
*/
int nb_decoded_frames;
/**
* Timestamp offset added to timestamps before muxing
*/
int64_t mux_ts_offset;
/**
* Internal data to check for wrapping of the time stamp
*/
int64_t pts_wrap_reference;
/**
* Options for behavior, when a wrap is detected.
*
* Defined by AV_PTS_WRAP_ values.
*
* If correction is enabled, there are two possibilities:
* If the first time stamp is near the wrap point, the wrap offset
* will be subtracted, which will create negative time stamps.
* Otherwise the offset will be added.
*/
int pts_wrap_behavior;
/**
* Internal data to prevent doing update_initial_durations() twice
*/
int update_initial_durations_done;
/**
* Internal data to generate dts from pts
*/
int64_t pts_reorder_error[MAX_REORDER_DELAY+1];
uint8_t pts_reorder_error_count[MAX_REORDER_DELAY+1];
/**
* Internal data to analyze DTS and detect faulty mpeg streams
*/
int64_t last_dts_for_order_check;
uint8_t dts_ordered;
uint8_t dts_misordered;
/**
* Internal data to inject global side data
*/
int inject_global_side_data;
/**
* display aspect ratio (0 if unknown)
* - encoding: unused
* - decoding: Set by libavformat to calculate sample_aspect_ratio internally
*/
AVRational display_aspect_ratio;
#if LIBAVFORMAT_VERSION_MAJOR < 59
// kept for ABI compatibility only, do not access in any way
int unused8;
int unused9;
int unused10;
#endif
/**
* An opaque field for libavformat internal usage.
@ -1292,7 +1185,11 @@ typedef struct AVProgram {
change dynamically at runtime. */
typedef struct AVChapter {
#if FF_API_CHAPTER_ID_INT
int id; ///< unique ID to identify the chapter
#else
int64_t id; ///< unique ID to identify the chapter
#endif
AVRational time_base; ///< time base in which the start/end timestamps are specified
int64_t start, end; ///< chapter start/end time in time_base units
AVDictionary *metadata;
@ -1487,7 +1384,9 @@ typedef struct AVFormatContext {
#define AVFMT_FLAG_MP4A_LATM 0x8000 ///< Deprecated, does nothing.
#endif
#define AVFMT_FLAG_SORT_DTS 0x10000 ///< try to interleave outputted packets by dts (using this flag can slow demuxing down)
#define AVFMT_FLAG_PRIV_OPT 0x20000 ///< Enable use of private options by delaying codec open (this could be made default once all code is converted)
#if FF_API_LAVF_PRIV_OPT
#define AVFMT_FLAG_PRIV_OPT 0x20000 ///< Enable use of private options by delaying codec open (deprecated, will do nothing once av_demuxer_open() is removed)
#endif
#if FF_API_LAVF_KEEPSIDE_FLAG
#define AVFMT_FLAG_KEEP_SIDE_DATA 0x40000 ///< Deprecated, does nothing.
#endif
@ -1645,12 +1544,24 @@ typedef struct AVFormatContext {
int strict_std_compliance;
/**
* Flags for the user to detect events happening on the file. Flags must
* be cleared by the user once the event has been handled.
* A combination of AVFMT_EVENT_FLAG_*.
* Flags indicating events happening on the file, a combination of
* AVFMT_EVENT_FLAG_*.
*
* - demuxing: may be set by the demuxer in avformat_open_input(),
* avformat_find_stream_info() and av_read_frame(). Flags must be cleared
* by the user once the event has been handled.
* - muxing: may be set by the user after avformat_write_header() to
* indicate a user-triggered event. The muxer will clear the flags for
* events it has handled in av_[interleaved]_write_frame().
*/
int event_flags;
#define AVFMT_EVENT_FLAG_METADATA_UPDATED 0x0001 ///< The call resulted in updated metadata.
/**
* - demuxing: the demuxer read new metadata from the file and updated
* AVFormatContext.metadata accordingly
* - muxing: the user updated AVFormatContext.metadata and wishes the muxer to
* write it into the file
*/
#define AVFMT_EVENT_FLAG_METADATA_UPDATED 0x0001
/**
* Maximum number of packets to read while waiting for the first timestamp.
@ -2007,12 +1918,6 @@ void av_format_inject_global_side_data(AVFormatContext *s);
*/
enum AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(const AVFormatContext* ctx);
typedef struct AVPacketList {
AVPacket pkt;
struct AVPacketList *next;
} AVPacketList;
/**
* @defgroup lavf_core Core functions
* @ingroup libavf
@ -2183,17 +2088,26 @@ int av_stream_add_side_data(AVStream *st, enum AVPacketSideDataType type,
* @return pointer to fresh allocated data or NULL otherwise
*/
uint8_t *av_stream_new_side_data(AVStream *stream,
#if FF_API_BUFFER_SIZE_T
enum AVPacketSideDataType type, int size);
#else
enum AVPacketSideDataType type, size_t size);
#endif
/**
* Get side information from stream.
*
* @param stream stream
* @param type desired side information type
* @param size pointer for side information size to store (optional)
* @param size If supplied, *size will be set to the size of the side data
* or to zero if the desired side data is not present.
* @return pointer to data if present or NULL otherwise
*/
uint8_t *av_stream_get_side_data(const AVStream *stream,
#if FF_API_BUFFER_SIZE_T
enum AVPacketSideDataType type, int *size);
#else
enum AVPacketSideDataType type, size_t *size);
#endif
AVProgram *av_new_program(AVFormatContext *s, int id);
@ -2311,8 +2225,13 @@ int av_probe_input_buffer(AVIOContext *pb, ff_const59 AVInputFormat **fmt,
*/
int avformat_open_input(AVFormatContext **ps, const char *url, ff_const59 AVInputFormat *fmt, AVDictionary **options);
#if FF_API_DEMUXER_OPEN
/**
* @deprecated Use an AVDictionary to pass options to a demuxer.
*/
attribute_deprecated
int av_demuxer_open(AVFormatContext *ic);
#endif
/**
* Read packets of a media file to get stream information. This

View file

@ -32,7 +32,7 @@
// Major bumping may affect Ticket5467, 5421, 5451(compatibility with Chromium)
// Also please add any ticket numbers that you believe might be affected here
#define LIBAVFORMAT_VERSION_MAJOR 58
#define LIBAVFORMAT_VERSION_MINOR 45
#define LIBAVFORMAT_VERSION_MINOR 76
#define LIBAVFORMAT_VERSION_MICRO 100
#define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \
@ -106,6 +106,15 @@
#ifndef FF_API_AVIOFORMAT
#define FF_API_AVIOFORMAT (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_DEMUXER_OPEN
#define FF_API_DEMUXER_OPEN (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_CHAPTER_ID_INT
#define FF_API_CHAPTER_ID_INT (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_LAVF_PRIV_OPT
#define FF_API_LAVF_PRIV_OPT (LIBAVFORMAT_VERSION_MAJOR < 60)
#endif
#ifndef FF_API_R_FRAME_RATE

View file

@ -27,8 +27,10 @@
#ifndef AVUTIL_ADLER32_H
#define AVUTIL_ADLER32_H
#include <stddef.h>
#include <stdint.h>
#include "attributes.h"
#include "version.h"
/**
* @defgroup lavu_adler32 Adler-32
@ -38,6 +40,12 @@
* @{
*/
#if FF_API_CRYPTO_SIZE_T
typedef unsigned long AVAdler;
#else
typedef uint32_t AVAdler;
#endif
/**
* Calculate the Adler32 checksum of a buffer.
*
@ -50,8 +58,12 @@
* @param len size of input buffer
* @return updated checksum
*/
unsigned long av_adler32_update(unsigned long adler, const uint8_t *buf,
unsigned int len) av_pure;
AVAdler av_adler32_update(AVAdler adler, const uint8_t *buf,
#if FF_API_CRYPTO_SIZE_T
unsigned int len) av_pure;
#else
size_t len) av_pure;
#endif
/**
* @}

View file

@ -24,6 +24,7 @@
#include <stddef.h>
#include <stdint.h>
#include "attributes.h"
#include "version.h"
/**
* @addtogroup lavu_string
@ -155,10 +156,14 @@ static inline size_t av_strnlen(const char *s, size_t len)
*/
char *av_asprintf(const char *fmt, ...) av_printf_format(1, 2);
#if FF_API_D2STR
/**
* Convert a number to an av_malloced string.
* @deprecated use av_asprintf() with "%f" or a more specific format
*/
attribute_deprecated
char *av_d2str(double d);
#endif
/**
* Unescape the given string until a non escaped terminating char,
@ -319,6 +324,7 @@ enum AVEscapeMode {
AV_ESCAPE_MODE_AUTO, ///< Use auto-selected escaping mode.
AV_ESCAPE_MODE_BACKSLASH, ///< Use backslash escaping.
AV_ESCAPE_MODE_QUOTE, ///< Use single-quote escaping.
AV_ESCAPE_MODE_XML, ///< Use XML non-markup character data escaping.
};
/**
@ -338,6 +344,19 @@ enum AVEscapeMode {
*/
#define AV_ESCAPE_FLAG_STRICT (1 << 1)
/**
* Within AV_ESCAPE_MODE_XML, additionally escape single quotes for single
* quoted attributes.
*/
#define AV_ESCAPE_FLAG_XML_SINGLE_QUOTES (1 << 2)
/**
* Within AV_ESCAPE_MODE_XML, additionally escape double quotes for double
* quoted attributes.
*/
#define AV_ESCAPE_FLAG_XML_DOUBLE_QUOTES (1 << 3)
/**
* Escape string in src, and put the escaped string in an allocated
* string in *dst, which must be freed with av_free().

View file

@ -25,8 +25,11 @@
#ifndef AVUTIL_BUFFER_H
#define AVUTIL_BUFFER_H
#include <stddef.h>
#include <stdint.h>
#include "version.h"
/**
* @defgroup lavu_buffer AVBuffer
* @ingroup lavu_data
@ -90,7 +93,11 @@ typedef struct AVBufferRef {
/**
* Size of data in bytes.
*/
#if FF_API_BUFFER_SIZE_T
int size;
#else
size_t size;
#endif
} AVBufferRef;
/**
@ -98,13 +105,21 @@ typedef struct AVBufferRef {
*
* @return an AVBufferRef of given size or NULL when out of memory
*/
#if FF_API_BUFFER_SIZE_T
AVBufferRef *av_buffer_alloc(int size);
#else
AVBufferRef *av_buffer_alloc(size_t size);
#endif
/**
* Same as av_buffer_alloc(), except the returned buffer will be initialized
* to zero.
*/
#if FF_API_BUFFER_SIZE_T
AVBufferRef *av_buffer_allocz(int size);
#else
AVBufferRef *av_buffer_allocz(size_t size);
#endif
/**
* Always treat the buffer as read-only, even when it has only one
@ -127,7 +142,11 @@ AVBufferRef *av_buffer_allocz(int size);
*
* @return an AVBufferRef referring to data on success, NULL on failure.
*/
#if FF_API_BUFFER_SIZE_T
AVBufferRef *av_buffer_create(uint8_t *data, int size,
#else
AVBufferRef *av_buffer_create(uint8_t *data, size_t size,
#endif
void (*free)(void *opaque, uint8_t *data),
void *opaque, int flags);
@ -195,7 +214,27 @@ int av_buffer_make_writable(AVBufferRef **buf);
* reference to it (i.e. the one passed to this function). In all other cases
* a new buffer is allocated and the data is copied.
*/
#if FF_API_BUFFER_SIZE_T
int av_buffer_realloc(AVBufferRef **buf, int size);
#else
int av_buffer_realloc(AVBufferRef **buf, size_t size);
#endif
/**
* Ensure dst refers to the same data as src.
*
* When *dst is already equivalent to src, do nothing. Otherwise unreference dst
* and replace it with a new reference to src.
*
* @param dst Pointer to either a valid buffer reference or NULL. On success,
* this will point to a buffer reference equivalent to src. On
* failure, dst will be left untouched.
* @param src A buffer reference to replace dst with. May be NULL, then this
* function is equivalent to av_buffer_unref(dst).
* @return 0 on success
* AVERROR(ENOMEM) on memory allocation failure.
*/
int av_buffer_replace(AVBufferRef **dst, AVBufferRef *src);
/**
* @}
@ -246,7 +285,11 @@ typedef struct AVBufferPool AVBufferPool;
* (av_buffer_alloc()).
* @return newly created buffer pool on success, NULL on error.
*/
#if FF_API_BUFFER_SIZE_T
AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size));
#else
AVBufferPool *av_buffer_pool_init(size_t size, AVBufferRef* (*alloc)(size_t size));
#endif
/**
* Allocate and initialize a buffer pool with a more complex allocator.
@ -263,8 +306,13 @@ AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size));
* data. May be NULL.
* @return newly created buffer pool on success, NULL on error.
*/
#if FF_API_BUFFER_SIZE_T
AVBufferPool *av_buffer_pool_init2(int size, void *opaque,
AVBufferRef* (*alloc)(void *opaque, int size),
#else
AVBufferPool *av_buffer_pool_init2(size_t size, void *opaque,
AVBufferRef* (*alloc)(void *opaque, size_t size),
#endif
void (*pool_free)(void *opaque));
/**

View file

@ -71,6 +71,11 @@
#define AV_CH_SURROUND_DIRECT_LEFT 0x0000000200000000ULL
#define AV_CH_SURROUND_DIRECT_RIGHT 0x0000000400000000ULL
#define AV_CH_LOW_FREQUENCY_2 0x0000000800000000ULL
#define AV_CH_TOP_SIDE_LEFT 0x0000001000000000ULL
#define AV_CH_TOP_SIDE_RIGHT 0x0000002000000000ULL
#define AV_CH_BOTTOM_FRONT_CENTER 0x0000004000000000ULL
#define AV_CH_BOTTOM_FRONT_LEFT 0x0000008000000000ULL
#define AV_CH_BOTTOM_FRONT_RIGHT 0x0000010000000000ULL
/** Channel mask value used for AVCodecContext.request_channel_layout
to indicate that the user requests the channel order of the decoder output
@ -110,6 +115,7 @@
#define AV_CH_LAYOUT_OCTAGONAL (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_HEXADECAGONAL (AV_CH_LAYOUT_OCTAGONAL|AV_CH_WIDE_LEFT|AV_CH_WIDE_RIGHT|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT)
#define AV_CH_LAYOUT_STEREO_DOWNMIX (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT)
#define AV_CH_LAYOUT_22POINT2 (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER|AV_CH_BACK_CENTER|AV_CH_LOW_FREQUENCY_2|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_CENTER|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_SIDE_LEFT|AV_CH_TOP_SIDE_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_BOTTOM_FRONT_CENTER|AV_CH_BOTTOM_FRONT_LEFT|AV_CH_BOTTOM_FRONT_RIGHT)
enum AVMatrixEncoding {
AV_MATRIX_ENCODING_NONE,

View file

@ -80,6 +80,15 @@
*/
#define FFNABS(a) ((a) <= 0 ? (a) : (-(a)))
/**
* Unsigned Absolute value.
* This takes the absolute value of a signed int and returns it as a unsigned.
* This also works with INT_MIN which would otherwise not be representable
* As with many macros, this evaluates its argument twice.
*/
#define FFABSU(a) ((a) <= 0 ? -(unsigned)(a) : (unsigned)(a))
#define FFABS64U(a) ((a) <= 0 ? -(uint64_t)(a) : (uint64_t)(a))
/**
* Comparator.
* For two numerical expressions x and y, gives 1 if x > y, -1 if x < y, and 0
@ -106,8 +115,72 @@
# include "intmath.h"
#endif
/* Pull in unguarded fallback defines at the end of this file. */
#include "common.h"
#ifndef av_ceil_log2
# define av_ceil_log2 av_ceil_log2_c
#endif
#ifndef av_clip
# define av_clip av_clip_c
#endif
#ifndef av_clip64
# define av_clip64 av_clip64_c
#endif
#ifndef av_clip_uint8
# define av_clip_uint8 av_clip_uint8_c
#endif
#ifndef av_clip_int8
# define av_clip_int8 av_clip_int8_c
#endif
#ifndef av_clip_uint16
# define av_clip_uint16 av_clip_uint16_c
#endif
#ifndef av_clip_int16
# define av_clip_int16 av_clip_int16_c
#endif
#ifndef av_clipl_int32
# define av_clipl_int32 av_clipl_int32_c
#endif
#ifndef av_clip_intp2
# define av_clip_intp2 av_clip_intp2_c
#endif
#ifndef av_clip_uintp2
# define av_clip_uintp2 av_clip_uintp2_c
#endif
#ifndef av_mod_uintp2
# define av_mod_uintp2 av_mod_uintp2_c
#endif
#ifndef av_sat_add32
# define av_sat_add32 av_sat_add32_c
#endif
#ifndef av_sat_dadd32
# define av_sat_dadd32 av_sat_dadd32_c
#endif
#ifndef av_sat_sub32
# define av_sat_sub32 av_sat_sub32_c
#endif
#ifndef av_sat_dsub32
# define av_sat_dsub32 av_sat_dsub32_c
#endif
#ifndef av_sat_add64
# define av_sat_add64 av_sat_add64_c
#endif
#ifndef av_sat_sub64
# define av_sat_sub64 av_sat_sub64_c
#endif
#ifndef av_clipf
# define av_clipf av_clipf_c
#endif
#ifndef av_clipd
# define av_clipd av_clipd_c
#endif
#ifndef av_popcount
# define av_popcount av_popcount_c
#endif
#ifndef av_popcount64
# define av_popcount64 av_popcount64_c
#endif
#ifndef av_parity
# define av_parity av_parity_c
#endif
#ifndef av_log2
av_const int av_log2(unsigned v);
@ -303,11 +376,10 @@ static av_always_inline int64_t av_sat_add64_c(int64_t a, int64_t b) {
int64_t tmp;
return !__builtin_add_overflow(a, b, &tmp) ? tmp : (tmp < 0 ? INT64_MAX : INT64_MIN);
#else
if (b >= 0 && a >= INT64_MAX - b)
return INT64_MAX;
if (b <= 0 && a <= INT64_MIN - b)
return INT64_MIN;
return a + b;
int64_t s = a+(uint64_t)b;
if ((int64_t)(a^b | ~s^b) >= 0)
return INT64_MAX ^ (b >> 63);
return s;
#endif
}
@ -534,75 +606,3 @@ static av_always_inline av_const int av_parity_c(uint32_t v)
#endif /* HAVE_AV_CONFIG_H */
#endif /* AVUTIL_COMMON_H */
/*
* The following definitions are outside the multiple inclusion guard
* to ensure they are immediately available in intmath.h.
*/
#ifndef av_ceil_log2
# define av_ceil_log2 av_ceil_log2_c
#endif
#ifndef av_clip
# define av_clip av_clip_c
#endif
#ifndef av_clip64
# define av_clip64 av_clip64_c
#endif
#ifndef av_clip_uint8
# define av_clip_uint8 av_clip_uint8_c
#endif
#ifndef av_clip_int8
# define av_clip_int8 av_clip_int8_c
#endif
#ifndef av_clip_uint16
# define av_clip_uint16 av_clip_uint16_c
#endif
#ifndef av_clip_int16
# define av_clip_int16 av_clip_int16_c
#endif
#ifndef av_clipl_int32
# define av_clipl_int32 av_clipl_int32_c
#endif
#ifndef av_clip_intp2
# define av_clip_intp2 av_clip_intp2_c
#endif
#ifndef av_clip_uintp2
# define av_clip_uintp2 av_clip_uintp2_c
#endif
#ifndef av_mod_uintp2
# define av_mod_uintp2 av_mod_uintp2_c
#endif
#ifndef av_sat_add32
# define av_sat_add32 av_sat_add32_c
#endif
#ifndef av_sat_dadd32
# define av_sat_dadd32 av_sat_dadd32_c
#endif
#ifndef av_sat_sub32
# define av_sat_sub32 av_sat_sub32_c
#endif
#ifndef av_sat_dsub32
# define av_sat_dsub32 av_sat_dsub32_c
#endif
#ifndef av_sat_add64
# define av_sat_add64 av_sat_add64_c
#endif
#ifndef av_sat_sub64
# define av_sat_sub64 av_sat_sub64_c
#endif
#ifndef av_clipf
# define av_clipf av_clipf_c
#endif
#ifndef av_clipd
# define av_clipd av_clipd_c
#endif
#ifndef av_popcount
# define av_popcount av_popcount_c
#endif
#ifndef av_popcount64
# define av_popcount64 av_popcount64_c
#endif
#ifndef av_parity
# define av_parity av_parity_c
#endif

View file

@ -71,6 +71,9 @@
#define AV_CPU_FLAG_VFP_VM (1 << 7) ///< VFPv2 vector mode, deprecated in ARMv7-A and unavailable in various CPUs implementations
#define AV_CPU_FLAG_SETEND (1 <<16)
#define AV_CPU_FLAG_MMI (1 << 0)
#define AV_CPU_FLAG_MSA (1 << 1)
/**
* Return the flags which specify extensions supported by the CPU.
* The returned value is affected by av_force_cpu_flags() if that was used

View file

@ -1,5 +1,5 @@
/* Automatically generated by version.sh, do not manually edit! */
#ifndef AVUTIL_FFVERSION_H
#define AVUTIL_FFVERSION_H
#define FFMPEG_VERSION "4.3.2"
#define FFMPEG_VERSION "4.4.3"
#endif /* AVUTIL_FFVERSION_H */

View file

@ -0,0 +1,168 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_FILM_GRAIN_PARAMS_H
#define AVUTIL_FILM_GRAIN_PARAMS_H
#include "frame.h"
enum AVFilmGrainParamsType {
AV_FILM_GRAIN_PARAMS_NONE = 0,
/**
* The union is valid when interpreted as AVFilmGrainAOMParams (codec.aom)
*/
AV_FILM_GRAIN_PARAMS_AV1,
};
/**
* This structure describes how to handle film grain synthesis for AOM codecs.
*
* @note The struct must be allocated as part of AVFilmGrainParams using
* av_film_grain_params_alloc(). Its size is not a part of the public ABI.
*/
typedef struct AVFilmGrainAOMParams {
/**
* Number of points, and the scale and value for each point of the
* piecewise linear scaling function for the uma plane.
*/
int num_y_points;
uint8_t y_points[14][2 /* value, scaling */];
/**
* Signals whether to derive the chroma scaling function from the luma.
* Not equivalent to copying the luma values and scales.
*/
int chroma_scaling_from_luma;
/**
* If chroma_scaling_from_luma is set to 0, signals the chroma scaling
* function parameters.
*/
int num_uv_points[2 /* cb, cr */];
uint8_t uv_points[2 /* cb, cr */][10][2 /* value, scaling */];
/**
* Specifies the shift applied to the chroma components. For AV1, its within
* [8; 11] and determines the range and quantization of the film grain.
*/
int scaling_shift;
/**
* Specifies the auto-regression lag.
*/
int ar_coeff_lag;
/**
* Luma auto-regression coefficients. The number of coefficients is given by
* 2 * ar_coeff_lag * (ar_coeff_lag + 1).
*/
int8_t ar_coeffs_y[24];
/**
* Chroma auto-regression coefficients. The number of coefficients is given by
* 2 * ar_coeff_lag * (ar_coeff_lag + 1) + !!num_y_points.
*/
int8_t ar_coeffs_uv[2 /* cb, cr */][25];
/**
* Specifies the range of the auto-regressive coefficients. Values of 6,
* 7, 8 and so on represent a range of [-2, 2), [-1, 1), [-0.5, 0.5) and
* so on. For AV1 must be between 6 and 9.
*/
int ar_coeff_shift;
/**
* Signals the down shift applied to the generated gaussian numbers during
* synthesis.
*/
int grain_scale_shift;
/**
* Specifies the luma/chroma multipliers for the index to the component
* scaling function.
*/
int uv_mult[2 /* cb, cr */];
int uv_mult_luma[2 /* cb, cr */];
/**
* Offset used for component scaling function. For AV1 its a 9-bit value
* with a range [-256, 255]
*/
int uv_offset[2 /* cb, cr */];
/**
* Signals whether to overlap film grain blocks.
*/
int overlap_flag;
/**
* Signals to clip to limited color levels after film grain application.
*/
int limit_output_range;
} AVFilmGrainAOMParams;
/**
* This structure describes how to handle film grain synthesis in video
* for specific codecs. Must be present on every frame where film grain is
* meant to be synthesised for correct presentation.
*
* @note The struct must be allocated with av_film_grain_params_alloc() and
* its size is not a part of the public ABI.
*/
typedef struct AVFilmGrainParams {
/**
* Specifies the codec for which this structure is valid.
*/
enum AVFilmGrainParamsType type;
/**
* Seed to use for the synthesis process, if the codec allows for it.
*/
uint64_t seed;
/**
* Additional fields may be added both here and in any structure included.
* If a codec's film grain structure differs slightly over another
* codec's, fields within may change meaning depending on the type.
*/
union {
AVFilmGrainAOMParams aom;
} codec;
} AVFilmGrainParams;
/**
* Allocate an AVFilmGrainParams structure and set its fields to
* default values. The resulting struct can be freed using av_freep().
* If size is not NULL it will be set to the number of bytes allocated.
*
* @return An AVFilmGrainParams filled with default values or NULL
* on failure.
*/
AVFilmGrainParams *av_film_grain_params_alloc(size_t *size);
/**
* Allocate a complete AVFilmGrainParams and add it to the frame.
*
* @param frame The frame which side data is added to.
*
* @return The AVFilmGrainParams structure to be filled by caller.
*/
AVFilmGrainParams *av_film_grain_params_create_side_data(AVFrame *frame);
#endif /* AVUTIL_FILM_GRAIN_PARAMS_H */

View file

@ -162,8 +162,8 @@ enum AVFrameSideDataType {
/**
* Timecode which conforms to SMPTE ST 12-1. The data is an array of 4 uint32_t
* where the first uint32_t describes how many (1-3) of the other timecodes are used.
* The timecode format is described in the av_timecode_get_smpte_from_framenum()
* function in libavutil/timecode.c.
* The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum()
* function in libavutil/timecode.h.
*/
AV_FRAME_DATA_S12M_TIMECODE,
@ -184,6 +184,20 @@ enum AVFrameSideDataType {
* Encoding parameters for a video frame, as described by AVVideoEncParams.
*/
AV_FRAME_DATA_VIDEO_ENC_PARAMS,
/**
* User data unregistered metadata associated with a video frame.
* This is the H.26[45] UDU SEI message, and shouldn't be used for any other purpose
* The data is stored as uint8_t in AVFrameSideData.data which is 16 bytes of
* uuid_iso_iec_11578 followed by AVFrameSideData.size - 16 bytes of user_data_payload_byte.
*/
AV_FRAME_DATA_SEI_UNREGISTERED,
/**
* Film grain parameters for a frame, described by AVFilmGrainParams.
* Must be present for every frame which should have film grain applied.
*/
AV_FRAME_DATA_FILM_GRAIN_PARAMS,
};
enum AVActiveFormatDescription {
@ -206,7 +220,11 @@ enum AVActiveFormatDescription {
typedef struct AVFrameSideData {
enum AVFrameSideDataType type;
uint8_t *data;
#if FF_API_BUFFER_SIZE_T
int size;
#else
size_t size;
#endif
AVDictionary *metadata;
AVBufferRef *buf;
} AVFrameSideData;
@ -899,7 +917,11 @@ AVBufferRef *av_frame_get_plane_buffer(AVFrame *frame, int plane);
*/
AVFrameSideData *av_frame_new_side_data(AVFrame *frame,
enum AVFrameSideDataType type,
#if FF_API_BUFFER_SIZE_T
int size);
#else
size_t size);
#endif
/**
* Add a new side data to a frame from an existing AVBufferRef

View file

@ -27,6 +27,7 @@
#ifndef AVUTIL_HASH_H
#define AVUTIL_HASH_H
#include <stddef.h>
#include <stdint.h>
#include "version.h"

View file

@ -67,6 +67,20 @@ int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane);
*/
int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int width);
/**
* Fill plane sizes for an image with pixel format pix_fmt and height height.
*
* @param size the array to be filled with the size of each image plane
* @param linesizes the array containing the linesize for each
* plane, should be filled by av_image_fill_linesizes()
* @return >= 0 in case of success, a negative error code otherwise
*
* @note The linesize parameters have the type ptrdiff_t here, while they are
* int for av_image_fill_linesizes().
*/
int av_image_fill_plane_sizes(size_t size[4], enum AVPixelFormat pix_fmt,
int height, const ptrdiff_t linesizes[4]);
/**
* Fill plane data pointers for an image with pixel format pix_fmt and
* height height.

View file

@ -112,6 +112,7 @@ typedef struct AVClass {
*/
void* (*child_next)(void *obj, void *prev);
#if FF_API_CHILD_CLASS_NEXT
/**
* Return an AVClass corresponding to the next potential
* AVOptions-enabled child.
@ -120,7 +121,9 @@ typedef struct AVClass {
* child_next iterates over _already existing_ objects, while
* child_class_next iterates over _all possible_ children.
*/
attribute_deprecated
const struct AVClass* (*child_class_next)(const struct AVClass *prev);
#endif
/**
* Category used for visualization (like color)
@ -140,6 +143,21 @@ typedef struct AVClass {
* available since version (52.12)
*/
int (*query_ranges)(struct AVOptionRanges **, void *obj, const char *key, int flags);
/**
* Iterate over the AVClasses corresponding to potential AVOptions-enabled
* children.
*
* @param iter pointer to opaque iteration state. The caller must initialize
* *iter to NULL before the first call.
* @return AVClass for the next AVOptions-enabled child or NULL if there are
* no more such children.
*
* @note The difference between child_next and this is that child_next
* iterates over _already existing_ objects, while child_class_iterate
* iterates over _all possible_ children.
*/
const struct AVClass* (*child_class_iterate)(void **iter);
} AVClass;
/**

View file

@ -0,0 +1,66 @@
/*
* LZO 1x decompression
* copyright (c) 2006 Reimar Doeffinger
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_LZO_H
#define AVUTIL_LZO_H
/**
* @defgroup lavu_lzo LZO
* @ingroup lavu_crypto
*
* @{
*/
#include <stdint.h>
/** @name Error flags returned by av_lzo1x_decode
* @{ */
/// end of the input buffer reached before decoding finished
#define AV_LZO_INPUT_DEPLETED 1
/// decoded data did not fit into output buffer
#define AV_LZO_OUTPUT_FULL 2
/// a reference to previously decoded data was wrong
#define AV_LZO_INVALID_BACKPTR 4
/// a non-specific error in the compressed bitstream
#define AV_LZO_ERROR 8
/** @} */
#define AV_LZO_INPUT_PADDING 8
#define AV_LZO_OUTPUT_PADDING 12
/**
* @brief Decodes LZO 1x compressed data.
* @param out output buffer
* @param outlen size of output buffer, number of bytes left are returned here
* @param in input buffer
* @param inlen size of input buffer, number of bytes left are returned here
* @return 0 on success, otherwise a combination of the error flags above
*
* Make sure all buffers are appropriately padded, in must provide
* AV_LZO_INPUT_PADDING, out must provide AV_LZO_OUTPUT_PADDING additional bytes.
*/
int av_lzo1x_decode(void *out, int *outlen, const void *in, int *inlen);
/**
* @}
*/
#endif /* AVUTIL_LZO_H */

View file

@ -33,6 +33,7 @@
#include "attributes.h"
#include "error.h"
#include "avutil.h"
#include "version.h"
/**
* @addtogroup lavu_mem
@ -49,6 +50,10 @@
* dealing with memory consistently possible on all platforms.
*
* @{
*/
#if FF_API_DECLARE_ALIGNED
/**
*
* @defgroup lavu_mem_macros Alignment Macros
* Helper macros for declaring aligned variables.
@ -125,6 +130,7 @@
/**
* @}
*/
#endif
/**
* @defgroup lavu_mem_attrs Function Attributes

View file

@ -27,6 +27,7 @@
#ifndef AVUTIL_MURMUR3_H
#define AVUTIL_MURMUR3_H
#include <stddef.h>
#include <stdint.h>
#include "version.h"

View file

@ -114,7 +114,7 @@
* libavcodec exports generic options, while its priv_data field exports
* codec-specific options). In such a case, it is possible to set up the
* parent struct to export a child's options. To do that, simply
* implement AVClass.child_next() and AVClass.child_class_next() in the
* implement AVClass.child_next() and AVClass.child_class_iterate() in the
* parent struct's AVClass.
* Assuming that the test_struct from above now also contains a
* child_struct field:
@ -143,23 +143,25 @@
* return t->child_struct;
* return NULL
* }
* const AVClass child_class_next(const AVClass *prev)
* const AVClass child_class_iterate(void **iter)
* {
* return prev ? NULL : &child_class;
* const AVClass *c = *iter ? NULL : &child_class;
* *iter = (void*)(uintptr_t)c;
* return c;
* }
* @endcode
* Putting child_next() and child_class_next() as defined above into
* Putting child_next() and child_class_iterate() as defined above into
* test_class will now make child_struct's options accessible through
* test_struct (again, proper setup as described above needs to be done on
* child_struct right after it is created).
*
* From the above example it might not be clear why both child_next()
* and child_class_next() are needed. The distinction is that child_next()
* iterates over actually existing objects, while child_class_next()
* and child_class_iterate() are needed. The distinction is that child_next()
* iterates over actually existing objects, while child_class_iterate()
* iterates over all possible child classes. E.g. if an AVCodecContext
* was initialized to use a codec which has private options, then its
* child_next() will return AVCodecContext.priv_data and finish
* iterating. OTOH child_class_next() on AVCodecContext.av_class will
* iterating. OTOH child_class_iterate() on AVCodecContext.av_class will
* iterate over all available codecs with private options.
*
* @subsection avoptions_implement_named_constants Named constants
@ -194,7 +196,7 @@
* For enumerating there are basically two cases. The first is when you want to
* get all options that may potentially exist on the struct and its children
* (e.g. when constructing documentation). In that case you should call
* av_opt_child_class_next() recursively on the parent struct's AVClass. The
* av_opt_child_class_iterate() recursively on the parent struct's AVClass. The
* second case is when you have an already initialized struct with all its
* children and you want to get all options that can be actually written or read
* from it. In that case you should call av_opt_child_next() recursively (and
@ -646,13 +648,26 @@ const AVOption *av_opt_next(const void *obj, const AVOption *prev);
*/
void *av_opt_child_next(void *obj, void *prev);
#if FF_API_CHILD_CLASS_NEXT
/**
* Iterate over potential AVOptions-enabled children of parent.
*
* @param prev result of a previous call to this function or NULL
* @return AVClass corresponding to next potential child or NULL
*
* @deprecated use av_opt_child_class_iterate
*/
attribute_deprecated
const AVClass *av_opt_child_class_next(const AVClass *parent, const AVClass *prev);
#endif
/**
* Iterate over potential AVOptions-enabled children of parent.
*
* @param iter a pointer where iteration state is stored.
* @return AVClass corresponding to next potential child or NULL
*/
const AVClass *av_opt_child_class_iterate(const AVClass *parent, void **iter);
/**
* @defgroup opt_set_funcs Option setting functions

View file

@ -147,6 +147,7 @@ typedef struct AVPixFmtDescriptor {
*/
#define AV_PIX_FMT_FLAG_RGB (1 << 5)
#if FF_API_PSEUDOPAL
/**
* The pixel format is "pseudo-paletted". This means that it contains a
* fixed palette in the 2nd plane but the palette is fixed/constant for each
@ -164,6 +165,7 @@ typedef struct AVPixFmtDescriptor {
* before the deprecation, though).
*/
#define AV_PIX_FMT_FLAG_PSEUDOPAL (1 << 6)
#endif
/**
* The pixel format has an alpha channel. This is set on all formats that

View file

@ -358,6 +358,8 @@ enum AVPixelFormat {
AV_PIX_FMT_Y210BE, ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
AV_PIX_FMT_Y210LE, ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
AV_PIX_FMT_X2RGB10LE, ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
AV_PIX_FMT_X2RGB10BE, ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined
AV_PIX_FMT_NB ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
@ -447,6 +449,7 @@ enum AVPixelFormat {
#define AV_PIX_FMT_P016 AV_PIX_FMT_NE(P016BE, P016LE)
#define AV_PIX_FMT_Y210 AV_PIX_FMT_NE(Y210BE, Y210LE)
#define AV_PIX_FMT_X2RGB10 AV_PIX_FMT_NE(X2RGB10BE, X2RGB10LE)
/**
* Chromaticity coordinates of the source primaries.
@ -527,12 +530,60 @@ enum AVColorSpace {
};
/**
* MPEG vs JPEG YUV range.
* Visual content value range.
*
* These values are based on definitions that can be found in multiple
* specifications, such as ITU-T BT.709 (3.4 - Quantization of RGB, luminance
* and colour-difference signals), ITU-T BT.2020 (Table 5 - Digital
* Representation) as well as ITU-T BT.2100 (Table 9 - Digital 10- and 12-bit
* integer representation). At the time of writing, the BT.2100 one is
* recommended, as it also defines the full range representation.
*
* Common definitions:
* - For RGB and luminance planes such as Y in YCbCr and I in ICtCp,
* 'E' is the original value in range of 0.0 to 1.0.
* - For chrominance planes such as Cb,Cr and Ct,Cp, 'E' is the original
* value in range of -0.5 to 0.5.
* - 'n' is the output bit depth.
* - For additional definitions such as rounding and clipping to valid n
* bit unsigned integer range, please refer to BT.2100 (Table 9).
*/
enum AVColorRange {
AVCOL_RANGE_UNSPECIFIED = 0,
AVCOL_RANGE_MPEG = 1, ///< the normal 219*2^(n-8) "MPEG" YUV ranges
AVCOL_RANGE_JPEG = 2, ///< the normal 2^n-1 "JPEG" YUV ranges
/**
* Narrow or limited range content.
*
* - For luminance planes:
*
* (219 * E + 16) * 2^(n-8)
*
* F.ex. the range of 16-235 for 8 bits
*
* - For chrominance planes:
*
* (224 * E + 128) * 2^(n-8)
*
* F.ex. the range of 16-240 for 8 bits
*/
AVCOL_RANGE_MPEG = 1,
/**
* Full range content.
*
* - For RGB and luminance planes:
*
* (2^n - 1) * E
*
* F.ex. the range of 0-255 for 8 bits
*
* - For chrominance planes:
*
* (2^n - 1) * E + 2^(n - 1)
*
* F.ex. the range of 1-255 for 8 bits
*/
AVCOL_RANGE_JPEG = 2,
AVCOL_RANGE_NB ///< Not part of ABI
};

View file

@ -28,6 +28,7 @@
#ifndef AVUTIL_RIPEMD_H
#define AVUTIL_RIPEMD_H
#include <stddef.h>
#include <stdint.h>
#include "attributes.h"

View file

@ -49,9 +49,9 @@ typedef struct {
* Adjust frame number for NTSC drop frame time code.
*
* @param framenum frame number to adjust
* @param fps frame per second, 30 or 60
* @param fps frame per second, multiples of 30
* @return adjusted frame number
* @warning adjustment is only valid in NTSC 29.97 and 59.94
* @warning adjustment is only valid for multiples of NTSC 29.97
*/
int av_timecode_adjust_ntsc_framenum2(int framenum, int fps);
@ -62,14 +62,39 @@ int av_timecode_adjust_ntsc_framenum2(int framenum, int fps);
* @param framenum frame number
* @return the SMPTE binary representation
*
* See SMPTE ST 314M-2005 Sec 4.4.2.2.1 "Time code pack (TC)"
* the format description as follows:
* bits 0-5: hours, in BCD(6bits)
* bits 6: BGF1
* bits 7: BGF2 (NTSC) or FIELD (PAL)
* bits 8-14: minutes, in BCD(7bits)
* bits 15: BGF0 (NTSC) or BGF2 (PAL)
* bits 16-22: seconds, in BCD(7bits)
* bits 23: FIELD (NTSC) or BGF0 (PAL)
* bits 24-29: frames, in BCD(6bits)
* bits 30: drop frame flag (0: non drop, 1: drop)
* bits 31: color frame flag (0: unsync mode, 1: sync mode)
* @note BCD numbers (6 or 7 bits): 4 or 5 lower bits for units, 2 higher bits for tens.
* @note Frame number adjustment is automatically done in case of drop timecode,
* you do NOT have to call av_timecode_adjust_ntsc_framenum2().
* @note The frame number is relative to tc->start.
* @note Color frame (CF), binary group flags (BGF) and biphase mark polarity
* correction (PC) bits are set to zero.
* @note Color frame (CF) and binary group flags (BGF) bits are set to zero.
*/
uint32_t av_timecode_get_smpte_from_framenum(const AVTimecode *tc, int framenum);
/**
* Convert sei info to SMPTE 12M binary representation.
*
* @param rate frame rate in rational form
* @param drop drop flag
* @param hh hour
* @param mm minute
* @param ss second
* @param ff frame number
* @return the SMPTE binary representation
*/
uint32_t av_timecode_get_smpte(AVRational rate, int drop, int hh, int mm, int ss, int ff);
/**
* Load timecode string in buf.
*
@ -84,6 +109,23 @@ uint32_t av_timecode_get_smpte_from_framenum(const AVTimecode *tc, int framenum)
*/
char *av_timecode_make_string(const AVTimecode *tc, char *buf, int framenum);
/**
* Get the timecode string from the SMPTE timecode format.
*
* In contrast to av_timecode_make_smpte_tc_string this function supports 50/60
* fps timecodes by using the field bit.
*
* @param buf destination buffer, must be at least AV_TIMECODE_STR_SIZE long
* @param rate frame rate of the timecode
* @param tcsmpte the 32-bit SMPTE timecode
* @param prevent_df prevent the use of a drop flag when it is known the DF bit
* is arbitrary
* @param skip_field prevent the use of a field flag when it is known the field
* bit is arbitrary (e.g. because it is used as PC flag)
* @return the buf parameter
*/
char *av_timecode_make_smpte_tc_string2(char *buf, AVRational rate, uint32_t tcsmpte, int prevent_df, int skip_field);
/**
* Get the timecode string from the SMPTE timecode format.
*
@ -118,6 +160,23 @@ char *av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit);
*/
int av_timecode_init(AVTimecode *tc, AVRational rate, int flags, int frame_start, void *log_ctx);
/**
* Init a timecode struct from the passed timecode components.
*
* @param log_ctx a pointer to an arbitrary struct of which the first field
* is a pointer to an AVClass struct (used for av_log)
* @param tc pointer to an allocated AVTimecode
* @param rate frame rate in rational form
* @param flags miscellaneous flags such as drop frame, +24 hours, ...
* (see AVTimecodeFlag)
* @param hh hours
* @param mm minutes
* @param ss seconds
* @param ff frames
* @return 0 on success, AVERROR otherwise
*/
int av_timecode_init_from_components(AVTimecode *tc, AVRational rate, int flags, int hh, int mm, int ss, int ff, void *log_ctx);
/**
* Parse timecode representation (hh:mm:ss[:;.]ff).
*

View file

@ -43,6 +43,7 @@ enum AVTXType {
* The stride parameter is ignored.
*/
AV_TX_FLOAT_FFT = 0,
/**
* Standard MDCT with sample data type of float and a scale type of
* float. Length is the frame size, not the window size (which is 2x frame)
@ -51,21 +52,27 @@ enum AVTXType {
* For inverse transforms, the stride specifies the spacing between each
* sample in the input array in bytes. The output will be a flat array.
* Stride must be a non-zero multiple of sizeof(float).
* NOTE: the inverse transform is half-length, meaning the output will not
* contain redundant data. This is what most codecs work with.
*/
AV_TX_FLOAT_MDCT = 1,
/**
* Same as AV_TX_FLOAT_FFT with a data type of AVComplexDouble.
*/
AV_TX_DOUBLE_FFT = 2,
/**
* Same as AV_TX_FLOAT_MDCT with data and scale type of double.
* Stride must be a non-zero multiple of sizeof(double).
*/
AV_TX_DOUBLE_MDCT = 3,
/**
* Same as AV_TX_FLOAT_FFT with a data type of AVComplexInt32.
*/
AV_TX_INT32_FFT = 4,
/**
* Same as AV_TX_FLOAT_MDCT with data type of int32_t and scale type of float.
* Only scale values less than or equal to 1.0 are supported.
@ -91,10 +98,21 @@ enum AVTXType {
*/
typedef void (*av_tx_fn)(AVTXContext *s, void *out, void *in, ptrdiff_t stride);
/**
* Flags for av_tx_init()
*/
enum AVTXFlags {
/**
* Performs an in-place transformation on the input. The output argument
* of av_tn_fn() MUST match the input. May be unsupported or slower for some
* transform types.
*/
AV_TX_INPLACE = 1ULL << 0,
};
/**
* Initialize a transform context with the given configuration
* Currently power of two lengths from 2 to 131072 are supported, along with
* any length decomposable to a power of two and either 3, 5 or 15.
* (i)MDCTs with an odd length are currently not supported.
*
* @param ctx the context to allocate, will be NULL on error
* @param tx pointer to the transform function pointer to set
@ -102,7 +120,7 @@ typedef void (*av_tx_fn)(AVTXContext *s, void *out, void *in, ptrdiff_t stride);
* @param inv whether to do an inverse or a forward transform
* @param len the size of the transform in samples
* @param scale pointer to the value to scale the output if supported by type
* @param flags currently unused
* @param flags a bitmask of AVTXFlags or 0
*
* @return 0 on success, negative error code on failure
*/

View file

@ -79,7 +79,7 @@
*/
#define LIBAVUTIL_VERSION_MAJOR 56
#define LIBAVUTIL_VERSION_MINOR 51
#define LIBAVUTIL_VERSION_MINOR 70
#define LIBAVUTIL_VERSION_MICRO 100
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \
@ -129,7 +129,18 @@
#ifndef FF_API_PSEUDOPAL
#define FF_API_PSEUDOPAL (LIBAVUTIL_VERSION_MAJOR < 57)
#endif
#ifndef FF_API_CHILD_CLASS_NEXT
#define FF_API_CHILD_CLASS_NEXT (LIBAVUTIL_VERSION_MAJOR < 57)
#endif
#ifndef FF_API_BUFFER_SIZE_T
#define FF_API_BUFFER_SIZE_T (LIBAVUTIL_VERSION_MAJOR < 57)
#endif
#ifndef FF_API_D2STR
#define FF_API_D2STR (LIBAVUTIL_VERSION_MAJOR < 58)
#endif
#ifndef FF_API_DECLARE_ALIGNED
#define FF_API_DECLARE_ALIGNED (LIBAVUTIL_VERSION_MAJOR < 58)
#endif
/**
* @}

View file

@ -55,6 +55,14 @@ enum AVVideoEncParamsType {
* as AVVideoBlockParams.qp_delta.
*/
AV_VIDEO_ENC_PARAMS_H264,
/*
* MPEG-2-compatible quantizer.
*
* Summing the frame-level qp with the per-block delta_qp gives the
* resulting quantizer for the block.
*/
AV_VIDEO_ENC_PARAMS_MPEG2,
};
/**
@ -153,7 +161,7 @@ AVVideoEncParams *av_video_enc_params_alloc(enum AVVideoEncParamsType type,
/**
* Allocates memory for AVEncodeInfoFrame plus an array of
* {@code nb_blocks} AVEncodeInfoBlock in the given AVFrame {@code frame}
* as AVFrameSideData of type AV_FRAME_DATA_ENCODE_INFO
* as AVFrameSideData of type AV_FRAME_DATA_VIDEO_ENC_PARAMS
* and initializes the variables.
*/
AVVideoEncParams*

View file

@ -29,7 +29,7 @@
#include "libavutil/avutil.h"
#define LIBSWRESAMPLE_VERSION_MAJOR 3
#define LIBSWRESAMPLE_VERSION_MINOR 7
#define LIBSWRESAMPLE_VERSION_MINOR 9
#define LIBSWRESAMPLE_VERSION_MICRO 100
#define LIBSWRESAMPLE_VERSION_INT AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, \

View file

@ -27,7 +27,7 @@
#include "libavutil/version.h"
#define LIBSWSCALE_VERSION_MAJOR 5
#define LIBSWSCALE_VERSION_MINOR 7
#define LIBSWSCALE_VERSION_MINOR 9
#define LIBSWSCALE_VERSION_MICRO 100
#define LIBSWSCALE_VERSION_INT AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, \

View file

@ -790,8 +790,14 @@ int readCallback(void *opaque, uint8_t *buf, int buf_size) {
if (attached) {
javaVm->DetachCurrentThread();
}
if (buf_size == 0) {
return AVERROR_EOF;
}
int ret = (int) read(info->fd, buf, (size_t) buf_size);
return ret ? ret : AVERROR_EOF;
if (ret <= 0) {
return AVERROR_EOF;
}
return ret;
}
}
}

View file

@ -1768,6 +1768,12 @@ int32_t ConnectionsManager::sendRequestInternal(TLObject *object, onCompleteFunc
auto request = new Request(instanceNum, lastRequestToken++, connetionType, flags, datacenterId, onComplete, onQuickAck, nullptr);
request->rawRequest = object;
request->rpcRequest = wrapInLayer(object, getDatacenterWithId(datacenterId), request);
auto cancelledIterator = tokensToBeCancelled.find(request->requestToken);
if (cancelledIterator != tokensToBeCancelled.end()) {
tokensToBeCancelled.erase(cancelledIterator);
delete request;
return request->requestToken;
}
requestsQueue.push_back(std::unique_ptr<Request>(request));
if (immediate) {
processRequestQueue(0, 0);
@ -1793,6 +1799,11 @@ int32_t ConnectionsManager::sendRequest(TLObject *object, onCompleteFunc onCompl
auto request = new Request(instanceNum, requestToken, connetionType, flags, datacenterId, onComplete, onQuickAck, nullptr);
request->rawRequest = object;
request->rpcRequest = wrapInLayer(object, getDatacenterWithId(datacenterId), request);
auto cancelledIterator = tokensToBeCancelled.find(request->requestToken);
if (cancelledIterator != tokensToBeCancelled.end()) {
tokensToBeCancelled.erase(cancelledIterator);
delete request;
}
requestsQueue.push_back(std::unique_ptr<Request>(request));
if (immediate) {
processRequestQueue(0, 0);
@ -1834,6 +1845,12 @@ void ConnectionsManager::sendRequest(TLObject *object, onCompleteFunc onComplete
request->ptr3 = ptr3;
request->rpcRequest = wrapInLayer(object, getDatacenterWithId(datacenterId), request);
if (LOGS_ENABLED) DEBUG_D("send request wrapped %p - %s", request->rpcRequest.get(), typeid(*(request->rpcRequest.get())).name());
auto cancelledIterator = tokensToBeCancelled.find(request->requestToken);
if (cancelledIterator != tokensToBeCancelled.end()) {
tokensToBeCancelled.erase(cancelledIterator);
delete request;
return;
}
requestsQueue.push_back(std::unique_ptr<Request>(request));
if (immediate) {
processRequestQueue(0, 0);
@ -1928,6 +1945,10 @@ void ConnectionsManager::removeRequestFromGuid(int32_t requestToken) {
}
bool ConnectionsManager::cancelRequestInternal(int32_t token, int64_t messageId, bool notifyServer, bool removeFromClass) {
if (!tokensToBeCancelled.empty() && (connectionState != ConnectionStateWaitingForNetwork || tokensToBeCancelled.size() > 5000)) {
tokensToBeCancelled.clear();
}
for (auto iter = requestsQueue.begin(); iter != requestsQueue.end(); iter++) {
Request *request = iter->get();
if ((token != 0 && request->requestToken == token) || (messageId != 0 && request->respondsToMessageId(messageId))) {
@ -1958,6 +1979,11 @@ bool ConnectionsManager::cancelRequestInternal(int32_t token, int64_t messageId,
return true;
}
}
if (token != 0 && connectionState == ConnectionStateWaitingForNetwork) {
tokensToBeCancelled.insert(token);
}
return false;
}

View file

@ -15,6 +15,7 @@
#include <sys/epoll.h>
#include <map>
#include <atomic>
#include <unordered_set>
#include "Defines.h"
#ifdef ANDROID
@ -203,6 +204,7 @@ private:
requestsList requestsQueue;
requestsList runningRequests;
std::vector<uint32_t> requestingSaltsForDc;
std::unordered_set<int32_t> tokensToBeCancelled;
int32_t lastPingId = 0;
int64_t lastInvokeAfterMessageId = 0;

View file

@ -687,15 +687,16 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onSignalBarsUpdated", "(I)V"), count);
});
},
.audioLevelUpdated = [platformContext](float level) {
tgvoip::jni::DoWithJNI([platformContext, level](JNIEnv *env) {
.audioLevelsUpdated = [platformContext](float myAudioLevel, float audioLevel) {
tgvoip::jni::DoWithJNI([platformContext, myAudioLevel, audioLevel](JNIEnv *env) {
jintArray intArray = nullptr;
jfloatArray floatArray = env->NewFloatArray(1);
jfloatArray floatArray = env->NewFloatArray(2);
jbooleanArray boolArray = nullptr;
jfloat floatFill[1];
floatFill[0] = level;
env->SetFloatArrayRegion(floatArray, 0, 1, floatFill);
jfloat floatFill[2];
floatFill[0] = myAudioLevel;
floatFill[1] = audioLevel;
env->SetFloatArrayRegion(floatArray, 0, 2, floatFill);
jobject globalRef = ((AndroidContext *) platformContext.get())->getJavaInstance();
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onAudioLevelsUpdated", "([I[F[Z)V"), intArray, floatArray, boolArray);

View file

@ -229,7 +229,7 @@ struct Descriptor {
std::shared_ptr<VideoCaptureInterface> videoCapture;
std::function<void(State)> stateUpdated;
std::function<void(int)> signalBarsUpdated;
std::function<void(float)> audioLevelUpdated;
std::function<void(float, float)> audioLevelsUpdated;
std::function<void(bool)> remoteBatteryLevelIsLowUpdated;
std::function<void(AudioState, VideoState)> remoteMediaStateUpdated;
std::function<void(float)> remotePrefferedAspectRatioUpdated;

View file

@ -98,7 +98,7 @@ _remoteBatteryLevelIsLowUpdated(std::move(descriptor.remoteBatteryLevelIsLowUpda
_remotePrefferedAspectRatioUpdated(std::move(descriptor.remotePrefferedAspectRatioUpdated)),
_signalingDataEmitted(std::move(descriptor.signalingDataEmitted)),
_signalBarsUpdated(std::move(descriptor.signalBarsUpdated)),
_audioLevelUpdated(std::move(descriptor.audioLevelUpdated)),
_audioLevelsUpdated(std::move(descriptor.audioLevelsUpdated)),
_createAudioDeviceModule(std::move(descriptor.createAudioDeviceModule)),
_enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo),
_dataSaving(descriptor.config.dataSaving),
@ -223,7 +223,7 @@ void Manager::start() {
});
}));
bool isOutgoing = _encryptionKey.isOutgoing;
_mediaManager.reset(new ThreadLocalObject<MediaManager>(StaticThreads::getMediaThread(), [weak, isOutgoing, protocolVersion = _protocolVersion, thread, sendSignalingMessage, videoCapture = _videoCapture, mediaDevicesConfig = _mediaDevicesConfig, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, audioLevelUpdated = _audioLevelUpdated, preferredCodecs = _preferredCodecs, createAudioDeviceModule = _createAudioDeviceModule, platformContext = _platformContext]() {
_mediaManager.reset(new ThreadLocalObject<MediaManager>(StaticThreads::getMediaThread(), [weak, isOutgoing, protocolVersion = _protocolVersion, thread, sendSignalingMessage, videoCapture = _videoCapture, mediaDevicesConfig = _mediaDevicesConfig, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, audioLevelsUpdated = _audioLevelsUpdated, preferredCodecs = _preferredCodecs, createAudioDeviceModule = _createAudioDeviceModule, platformContext = _platformContext]() {
return new MediaManager(
StaticThreads::getMediaThread(),
isOutgoing,
@ -241,7 +241,7 @@ void Manager::start() {
});
},
signalBarsUpdated,
audioLevelUpdated,
audioLevelsUpdated,
createAudioDeviceModule,
enableHighBitrateVideo,
preferredCodecs,

View file

@ -69,7 +69,7 @@ private:
std::function<void(float)> _remotePrefferedAspectRatioUpdated;
std::function<void(const std::vector<uint8_t> &)> _signalingDataEmitted;
std::function<void(int)> _signalBarsUpdated;
std::function<void(float)> _audioLevelUpdated;
std::function<void(float, float)> _audioLevelsUpdated;
std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> _createAudioDeviceModule;
std::function<uint32_t(const Message &)> _sendSignalingMessage;
std::function<void(Message&&)> _sendTransportMessage;

View file

@ -245,7 +245,7 @@ MediaManager::MediaManager(
std::function<void(Message &&)> sendSignalingMessage,
std::function<void(Message &&)> sendTransportMessage,
std::function<void(int)> signalBarsUpdated,
std::function<void(float)> audioLevelUpdated,
std::function<void(float, float)> audioLevelsUpdated,
std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> createAudioDeviceModule,
bool enableHighBitrateVideo,
std::vector<std::string> preferredCodecs,
@ -256,7 +256,7 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
_sendSignalingMessage(std::move(sendSignalingMessage)),
_sendTransportMessage(std::move(sendTransportMessage)),
_signalBarsUpdated(std::move(signalBarsUpdated)),
_audioLevelUpdated(std::move(audioLevelUpdated)),
_audioLevelsUpdated(std::move(audioLevelsUpdated)),
_createAudioDeviceModule(std::move(createAudioDeviceModule)),
_protocolVersion(protocolVersion),
_outgoingVideoState(videoCapture ? VideoState::Active : VideoState::Inactive),
@ -467,7 +467,7 @@ void MediaManager::start() {
}
beginStatsTimer(3000);
if (_audioLevelUpdated != nullptr) {
if (_audioLevelsUpdated != nullptr) {
beginLevelsTimer(100);
}
}
@ -595,8 +595,7 @@ void MediaManager::beginLevelsTimer(int timeoutMs) {
return;
}
float effectiveLevel = fmaxf(strong->_currentAudioLevel, strong->_currentMyAudioLevel);
strong->_audioLevelUpdated(effectiveLevel);
strong->_audioLevelsUpdated(strong->_currentMyAudioLevel, strong->_currentAudioLevel);
strong->beginLevelsTimer(100);
}, webrtc::TimeDelta::Millis(timeoutMs));

View file

@ -47,7 +47,7 @@ public:
std::function<void(Message &&)> sendSignalingMessage,
std::function<void(Message &&)> sendTransportMessage,
std::function<void(int)> signalBarsUpdated,
std::function<void(float)> audioLevelUpdated,
std::function<void(float, float)> audioLevelsUpdated,
std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> createAudioDeviceModule,
bool enableHighBitrateVideo,
std::vector<std::string> preferredCodecs,
@ -130,7 +130,7 @@ private:
std::function<void(Message &&)> _sendSignalingMessage;
std::function<void(Message &&)> _sendTransportMessage;
std::function<void(int)> _signalBarsUpdated;
std::function<void(float)> _audioLevelUpdated;
std::function<void(float, float)> _audioLevelsUpdated;
std::function<rtc::scoped_refptr<webrtc::AudioDeviceModule>(webrtc::TaskQueueFactory*)> _createAudioDeviceModule;
SSRC _ssrcAudio;

View file

@ -862,7 +862,7 @@ public:
_encryptionKey(std::move(descriptor.encryptionKey)),
_stateUpdated(descriptor.stateUpdated),
_signalBarsUpdated(descriptor.signalBarsUpdated),
_audioLevelUpdated(descriptor.audioLevelUpdated),
_audioLevelsUpdated(descriptor.audioLevelsUpdated),
_remoteBatteryLevelIsLowUpdated(descriptor.remoteBatteryLevelIsLowUpdated),
_remoteMediaStateUpdated(descriptor.remoteMediaStateUpdated),
_remotePrefferedAspectRatioUpdated(descriptor.remotePrefferedAspectRatioUpdated),
@ -2078,7 +2078,7 @@ private:
EncryptionKey _encryptionKey;
std::function<void(State)> _stateUpdated;
std::function<void(int)> _signalBarsUpdated;
std::function<void(float)> _audioLevelUpdated;
std::function<void(float, float)> _audioLevelsUpdated;
std::function<void(bool)> _remoteBatteryLevelIsLowUpdated;
std::function<void(AudioState, VideoState)> _remoteMediaStateUpdated;
std::function<void(float)> _remotePrefferedAspectRatioUpdated;

View file

@ -363,7 +363,7 @@ public:
_encryptionKey(std::move(descriptor.encryptionKey)),
_stateUpdated(descriptor.stateUpdated),
_signalBarsUpdated(descriptor.signalBarsUpdated),
_audioLevelUpdated(descriptor.audioLevelUpdated),
_audioLevelsUpdated(descriptor.audioLevelsUpdated),
_remoteBatteryLevelIsLowUpdated(descriptor.remoteBatteryLevelIsLowUpdated),
_remoteMediaStateUpdated(descriptor.remoteMediaStateUpdated),
_remotePrefferedAspectRatioUpdated(descriptor.remotePrefferedAspectRatioUpdated),
@ -1554,7 +1554,7 @@ private:
EncryptionKey _encryptionKey;
std::function<void(State)> _stateUpdated;
std::function<void(int)> _signalBarsUpdated;
std::function<void(float)> _audioLevelUpdated;
std::function<void(float, float)> _audioLevelsUpdated;
std::function<void(bool)> _remoteBatteryLevelIsLowUpdated;
std::function<void(AudioState, VideoState)> _remoteMediaStateUpdated;
std::function<void(float)> _remotePrefferedAspectRatioUpdated;

View file

@ -1213,7 +1213,7 @@ public:
_encryptionKey(std::move(descriptor.encryptionKey)),
_stateUpdated(descriptor.stateUpdated),
_signalBarsUpdated(descriptor.signalBarsUpdated),
_audioLevelUpdated(descriptor.audioLevelUpdated),
_audioLevelsUpdated(descriptor.audioLevelsUpdated),
_remoteBatteryLevelIsLowUpdated(descriptor.remoteBatteryLevelIsLowUpdated),
_remoteMediaStateUpdated(descriptor.remoteMediaStateUpdated),
_remotePrefferedAspectRatioUpdated(descriptor.remotePrefferedAspectRatioUpdated),
@ -2076,7 +2076,7 @@ private:
EncryptionKey _encryptionKey;
std::function<void(State)> _stateUpdated;
std::function<void(int)> _signalBarsUpdated;
std::function<void(float)> _audioLevelUpdated;
std::function<void(float, float)> _audioLevelsUpdated;
std::function<void(bool)> _remoteBatteryLevelIsLowUpdated;
std::function<void(AudioState, VideoState)> _remoteMediaStateUpdated;
std::function<void(float)> _remotePrefferedAspectRatioUpdated;

View file

@ -16,7 +16,6 @@
#include "common_video/h265/h265_common.h"
#include "common_video/h265/legacy_bit_buffer.h"
#include "rtc_base/bit_buffer.h"
#include "rtc_base/logging.h"
namespace {

View file

@ -443,7 +443,7 @@ std::vector<std::unique_ptr<PacketBuffer::Packet>> PacketBuffer::FindFrames(
}
}
#endif
if (is_h264 || full_frame_found) {
if (is_h264 || is_h265 || full_frame_found) {
const uint16_t end_seq_num = seq_num + 1;
// Use uint16_t type to handle sequence number wrap around case.
uint16_t num_packets = end_seq_num - start_seq_num;

View file

@ -18,6 +18,7 @@
<uses-feature android:name="android.hardware.microphone" android:required="false" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<uses-feature android:name="android.hardware.camera2" android:required="false" />
<uses-feature android:name="android.hardware.sensor.proximity" android:required="false" />
<uses-permission android:name="android.permission.READ_CLIPBOARD"/>
<uses-permission android:name="android.permission.BLUETOOTH_CONNECT" />

View file

@ -1,8 +1,19 @@
change SimpleExoPlayer.java
last exo player commit:
f72ca833
change ExoPlayerImpl.java
change Player.java
change VideoListener.java
change AspectRatioFrameLayout.java
change DefaultExtractorsFactory.java
add VideoListener.java
add support VideoListener
custom AspectRatioFrameLayout.java
change MediaCodecVideoRenderer.java
add SurfaceNotValidException.java
change MP4Extractor.java - MAXIMUM_READ_AHEAD_BYTES_STREAM to 1MB
add hack with SurfaceNotValidException
change MP4Extractor.java
MAXIMUM_READ_AHEAD_BYTES_STREAM to 1MB

View file

@ -13,16 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source;
package com.google.android.exoplayer2;
import android.util.Pair;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.source.ShuffleOrder;
import com.google.android.exoplayer2.util.Assertions;
/** Abstract base class for the concatenation of one or more {@link Timeline}s. */
/* package */ abstract class AbstractConcatenatedTimeline extends Timeline {
public abstract class AbstractConcatenatedTimeline extends Timeline {
private final int childCount;
private final ShuffleOrder shuffleOrder;
@ -34,7 +32,7 @@ import com.google.android.exoplayer2.util.Assertions;
* @param concatenatedUid UID of a period in a concatenated timeline.
* @return UID of the child timeline this period belongs to.
*/
@SuppressWarnings("nullness:return.type.incompatible")
@SuppressWarnings("nullness:return")
public static Object getChildTimelineUidFromConcatenatedUid(Object concatenatedUid) {
return ((Pair<?, ?>) concatenatedUid).first;
}
@ -45,7 +43,7 @@ import com.google.android.exoplayer2.util.Assertions;
* @param concatenatedUid UID of a period in a concatenated timeline.
* @return UID of the period in the child timeline.
*/
@SuppressWarnings("nullness:return.type.incompatible")
@SuppressWarnings("nullness:return")
public static Object getChildPeriodUidFromConcatenatedUid(Object concatenatedUid) {
return ((Pair<?, ?>) concatenatedUid).second;
}
@ -209,14 +207,14 @@ import com.google.android.exoplayer2.util.Assertions;
}
@Override
public final Period getPeriodByUid(Object uid, Period period) {
Object childUid = getChildTimelineUidFromConcatenatedUid(uid);
Object periodUid = getChildPeriodUidFromConcatenatedUid(uid);
public final Period getPeriodByUid(Object periodUid, Period period) {
Object childUid = getChildTimelineUidFromConcatenatedUid(periodUid);
Object childPeriodUid = getChildPeriodUidFromConcatenatedUid(periodUid);
int childIndex = getChildIndexByChildUid(childUid);
int firstWindowIndexInChild = getFirstWindowIndexByChildIndex(childIndex);
getTimelineByChildIndex(childIndex).getPeriodByUid(periodUid, period);
getTimelineByChildIndex(childIndex).getPeriodByUid(childPeriodUid, period);
period.windowIndex += firstWindowIndexInChild;
period.uid = uid;
period.uid = periodUid;
return period;
}
@ -242,12 +240,12 @@ import com.google.android.exoplayer2.util.Assertions;
return C.INDEX_UNSET;
}
Object childUid = getChildTimelineUidFromConcatenatedUid(uid);
Object periodUid = getChildPeriodUidFromConcatenatedUid(uid);
Object childPeriodUid = getChildPeriodUidFromConcatenatedUid(uid);
int childIndex = getChildIndexByChildUid(childUid);
if (childIndex == C.INDEX_UNSET) {
return C.INDEX_UNSET;
}
int periodIndexInChild = getTimelineByChildIndex(childIndex).getIndexOfPeriod(periodUid);
int periodIndexInChild = getTimelineByChildIndex(childIndex).getIndexOfPeriod(childPeriodUid);
return periodIndexInChild == C.INDEX_UNSET
? C.INDEX_UNSET
: getFirstPeriodIndexByChildIndex(childIndex) + periodIndexInChild;

View file

@ -21,6 +21,7 @@ import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.os.Handler;
import com.google.android.exoplayer2.util.Util;
/* package */ final class AudioBecomingNoisyManager {
@ -46,8 +47,8 @@ import android.os.Handler;
*/
public void setEnabled(boolean enabled) {
if (enabled && !receiverRegistered) {
context.registerReceiver(
receiver, new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY));
Util.registerReceiverNotExported(
context, receiver, new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY));
receiverRegistered = true;
} else if (!enabled && receiverRegistered) {
context.unregisterReceiver(receiver);

View file

@ -15,6 +15,9 @@
*/
package com.google.android.exoplayer2;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.content.Context;
import android.media.AudioFocusRequest;
import android.media.AudioManager;
@ -30,6 +33,7 @@ import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Manages requesting and responding to changes in audio focus. */
@ -58,6 +62,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({
PLAYER_COMMAND_DO_NOT_PLAY,
PLAYER_COMMAND_WAIT_FOR_CALLBACK,
@ -74,6 +79,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Audio focus state. */
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({
AUDIO_FOCUS_STATE_NO_FOCUS,
AUDIO_FOCUS_STATE_HAVE_FOCUS,
@ -90,6 +96,47 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Audio focus has been temporarily lost, but playback may continue with reduced volume. */
private static final int AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK = 3;
/**
* Audio focus types. One of {@link #AUDIOFOCUS_NONE}, {@link #AUDIOFOCUS_GAIN}, {@link
* #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or {@link
* #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({
AUDIOFOCUS_NONE,
AUDIOFOCUS_GAIN,
AUDIOFOCUS_GAIN_TRANSIENT,
AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK,
AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE
})
private @interface AudioFocusGain {}
/**
* @see AudioManager#AUDIOFOCUS_NONE
*/
@SuppressWarnings("InlinedApi")
private static final int AUDIOFOCUS_NONE = AudioManager.AUDIOFOCUS_NONE;
/**
* @see AudioManager#AUDIOFOCUS_GAIN
*/
private static final int AUDIOFOCUS_GAIN = AudioManager.AUDIOFOCUS_GAIN;
/**
* @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT
*/
private static final int AUDIOFOCUS_GAIN_TRANSIENT = AudioManager.AUDIOFOCUS_GAIN_TRANSIENT;
/**
* @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK
*/
private static final int AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK =
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
/**
* @see AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE
*/
@SuppressWarnings("InlinedApi")
private static final int AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE =
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
private static final String TAG = "AudioFocusManager";
private static final float VOLUME_MULTIPLIER_DUCK = 0.2f;
@ -100,8 +147,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Nullable private PlayerControl playerControl;
@Nullable private AudioAttributes audioAttributes;
@AudioFocusState private int audioFocusState;
@C.AudioFocusGain private int focusGain;
private @AudioFocusState int audioFocusState;
private @AudioFocusGain int focusGainToRequest;
private float volumeMultiplier = VOLUME_MULTIPLIER_DEFAULT;
private @MonotonicNonNull AudioFocusRequest audioFocusRequest;
@ -116,7 +163,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/
public AudioFocusManager(Context context, Handler eventHandler, PlayerControl playerControl) {
this.audioManager =
(AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
checkNotNull(
(AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE));
this.playerControl = playerControl;
this.focusListener = new AudioFocusListener(eventHandler);
this.audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS;
@ -139,9 +187,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void setAudioAttributes(@Nullable AudioAttributes audioAttributes) {
if (!Util.areEqual(this.audioAttributes, audioAttributes)) {
this.audioAttributes = audioAttributes;
focusGain = convertAudioAttributesToFocusGain(audioAttributes);
focusGainToRequest = convertAudioAttributesToFocusGain(audioAttributes);
Assertions.checkArgument(
focusGain == C.AUDIOFOCUS_GAIN || focusGain == C.AUDIOFOCUS_NONE,
focusGainToRequest == AUDIOFOCUS_GAIN || focusGainToRequest == AUDIOFOCUS_NONE,
"Automatic handling of audio focus is only available for USAGE_MEDIA and USAGE_GAME.");
}
}
@ -153,10 +201,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* @param playbackState The desired playback state.
* @return A {@link PlayerCommand} to execute on the player.
*/
@PlayerCommand
public int updateAudioFocus(boolean playWhenReady, @Player.State int playbackState) {
if (shouldAbandonAudioFocus(playbackState)) {
abandonAudioFocus();
public @PlayerCommand int updateAudioFocus(
boolean playWhenReady, @Player.State int playbackState) {
if (shouldAbandonAudioFocusIfHeld(playbackState)) {
abandonAudioFocusIfHeld();
return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY;
}
return playWhenReady ? requestAudioFocus() : PLAYER_COMMAND_DO_NOT_PLAY;
@ -168,7 +216,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/
public void release() {
playerControl = null;
abandonAudioFocus();
abandonAudioFocusIfHeld();
}
// Internal methods.
@ -178,12 +226,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return focusListener;
}
private boolean shouldAbandonAudioFocus(@Player.State int playbackState) {
return playbackState == Player.STATE_IDLE || focusGain != C.AUDIOFOCUS_GAIN;
private boolean shouldAbandonAudioFocusIfHeld(@Player.State int playbackState) {
return playbackState == Player.STATE_IDLE || focusGainToRequest != AUDIOFOCUS_GAIN;
}
@PlayerCommand
private int requestAudioFocus() {
private @PlayerCommand int requestAudioFocus() {
if (audioFocusState == AUDIO_FOCUS_STATE_HAVE_FOCUS) {
return PLAYER_COMMAND_PLAY_WHEN_READY;
}
@ -197,7 +244,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
}
private void abandonAudioFocus() {
private void abandonAudioFocusIfHeld() {
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
return;
}
@ -212,8 +259,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private int requestAudioFocusDefault() {
return audioManager.requestAudioFocus(
focusListener,
Util.getStreamTypeForAudioUsage(Assertions.checkNotNull(audioAttributes).usage),
focusGain);
Util.getStreamTypeForAudioUsage(checkNotNull(audioAttributes).usage),
focusGainToRequest);
}
@RequiresApi(26)
@ -221,13 +268,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (audioFocusRequest == null || rebuildAudioFocusRequest) {
AudioFocusRequest.Builder builder =
audioFocusRequest == null
? new AudioFocusRequest.Builder(focusGain)
? new AudioFocusRequest.Builder(focusGainToRequest)
: new AudioFocusRequest.Builder(audioFocusRequest);
boolean willPauseWhenDucked = willPauseWhenDucked();
audioFocusRequest =
builder
.setAudioAttributes(Assertions.checkNotNull(audioAttributes).getAudioAttributesV21())
.setAudioAttributes(
checkNotNull(audioAttributes).getAudioAttributesV21().audioAttributes)
.setWillPauseWhenDucked(willPauseWhenDucked)
.setOnAudioFocusChangeListener(focusListener)
.build();
@ -249,7 +297,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private boolean willPauseWhenDucked() {
return audioAttributes != null && audioAttributes.contentType == C.CONTENT_TYPE_SPEECH;
return audioAttributes != null && audioAttributes.contentType == C.AUDIO_CONTENT_TYPE_SPEECH;
}
/**
@ -260,12 +308,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* @param audioAttributes The audio attributes associated with this focus request.
* @return The type of audio focus gain that should be requested.
*/
@C.AudioFocusGain
private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes audioAttributes) {
private static @AudioFocusGain int convertAudioAttributesToFocusGain(
@Nullable AudioAttributes audioAttributes) {
if (audioAttributes == null) {
// Don't handle audio focus. It may be either video only contents or developers
// want to have more finer grained control. (e.g. adding audio focus listener)
return C.AUDIOFOCUS_NONE;
return AUDIOFOCUS_NONE;
}
switch (audioAttributes.usage) {
@ -273,13 +321,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// during the phone call when AUDIOFOCUS_GAIN_TRANSIENT is requested for that.
// Don't request audio focus here.
case C.USAGE_VOICE_COMMUNICATION_SIGNALLING:
return C.AUDIOFOCUS_NONE;
return AUDIOFOCUS_NONE;
// Javadoc says 'AUDIOFOCUS_GAIN: Examples of uses of this focus gain are for music
// playback, for a game or a video player'
case C.USAGE_GAME:
case C.USAGE_MEDIA:
return C.AUDIOFOCUS_GAIN;
return AUDIOFOCUS_GAIN;
// Special usages: USAGE_UNKNOWN shouldn't be used. Request audio focus to prevent
// multiple media playback happen at the same time.
@ -288,13 +336,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
TAG,
"Specify a proper usage in the audio attributes for audio focus"
+ " handling. Using AUDIOFOCUS_GAIN by default.");
return C.AUDIOFOCUS_GAIN;
return AUDIOFOCUS_GAIN;
// Javadoc says 'AUDIOFOCUS_GAIN_TRANSIENT: An example is for playing an alarm, or
// during a VoIP call'
case C.USAGE_ALARM:
case C.USAGE_VOICE_COMMUNICATION:
return C.AUDIOFOCUS_GAIN_TRANSIENT;
return AUDIOFOCUS_GAIN_TRANSIENT;
// Javadoc says 'AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK: Examples are when playing
// driving directions or notifications'
@ -306,28 +354,28 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
case C.USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
case C.USAGE_NOTIFICATION_EVENT:
case C.USAGE_NOTIFICATION_RINGTONE:
return C.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
return AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
// Javadoc says 'AUDIOFOCUS_GAIN_EXCLUSIVE: This is typically used if you are doing
// audio recording or speech recognition'.
// Assistant is considered as both recording and notifying developer
case C.USAGE_ASSISTANT:
if (Util.SDK_INT >= 19) {
return C.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
return AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
} else {
return C.AUDIOFOCUS_GAIN_TRANSIENT;
return AUDIOFOCUS_GAIN_TRANSIENT;
}
// Special usages:
case C.USAGE_ASSISTANCE_ACCESSIBILITY:
if (audioAttributes.contentType == C.CONTENT_TYPE_SPEECH) {
if (audioAttributes.contentType == C.AUDIO_CONTENT_TYPE_SPEECH) {
// Voice shouldn't be interrupted by other playback.
return C.AUDIOFOCUS_GAIN_TRANSIENT;
return AUDIOFOCUS_GAIN_TRANSIENT;
}
return C.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
return AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
default:
Log.w(TAG, "Unidentified audio usage: " + audioAttributes.usage);
return C.AUDIOFOCUS_NONE;
return AUDIOFOCUS_NONE;
}
}
@ -358,7 +406,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return;
case AudioManager.AUDIOFOCUS_LOSS:
executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY);
abandonAudioFocus();
abandonAudioFocusIfHeld();
return;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:

View file

@ -15,18 +15,102 @@
*/
package com.google.android.exoplayer2;
import static androidx.annotation.VisibleForTesting.PROTECTED;
import static java.lang.Math.max;
import static java.lang.Math.min;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import java.util.List;
/** Abstract base {@link Player} which implements common implementation independent methods. */
public abstract class BasePlayer implements Player {
protected final Timeline.Window window;
public BasePlayer() {
protected BasePlayer() {
window = new Timeline.Window();
}
@Override
public final void setMediaItem(MediaItem mediaItem) {
setMediaItems(ImmutableList.of(mediaItem));
}
@Override
public final void setMediaItem(MediaItem mediaItem, long startPositionMs) {
setMediaItems(ImmutableList.of(mediaItem), /* startIndex= */ 0, startPositionMs);
}
@Override
public final void setMediaItem(MediaItem mediaItem, boolean resetPosition) {
setMediaItems(ImmutableList.of(mediaItem), resetPosition);
}
@Override
public final void setMediaItems(List<MediaItem> mediaItems) {
setMediaItems(mediaItems, /* resetPosition= */ true);
}
@Override
public final void addMediaItem(int index, MediaItem mediaItem) {
addMediaItems(index, ImmutableList.of(mediaItem));
}
@Override
public final void addMediaItem(MediaItem mediaItem) {
addMediaItems(ImmutableList.of(mediaItem));
}
@Override
public final void addMediaItems(List<MediaItem> mediaItems) {
addMediaItems(/* index= */ Integer.MAX_VALUE, mediaItems);
}
@Override
public final void moveMediaItem(int currentIndex, int newIndex) {
if (currentIndex != newIndex) {
moveMediaItems(/* fromIndex= */ currentIndex, /* toIndex= */ currentIndex + 1, newIndex);
}
}
@Override
public final void removeMediaItem(int index) {
removeMediaItems(/* fromIndex= */ index, /* toIndex= */ index + 1);
}
@Override
public final void clearMediaItems() {
removeMediaItems(/* fromIndex= */ 0, /* toIndex= */ Integer.MAX_VALUE);
}
@Override
public final boolean isCommandAvailable(@Command int command) {
return getAvailableCommands().contains(command);
}
/**
* {@inheritDoc}
*
* <p>BasePlayer and its descendants will return {@code true}.
*/
@Override
public final boolean canAdvertiseSession() {
return true;
}
@Override
public final void play() {
setPlayWhenReady(true);
}
@Override
public final void pause() {
setPlayWhenReady(false);
}
@Override
public final boolean isPlaying() {
return getPlaybackState() == Player.STATE_READY
@ -36,80 +120,254 @@ public abstract class BasePlayer implements Player {
@Override
public final void seekToDefaultPosition() {
seekToDefaultPosition(getCurrentWindowIndex());
seekToDefaultPositionInternal(
getCurrentMediaItemIndex(), Player.COMMAND_SEEK_TO_DEFAULT_POSITION);
}
@Override
public final void seekToDefaultPosition(int windowIndex) {
seekTo(windowIndex, /* positionMs= */ C.TIME_UNSET);
public final void seekToDefaultPosition(int mediaItemIndex) {
seekToDefaultPositionInternal(mediaItemIndex, Player.COMMAND_SEEK_TO_MEDIA_ITEM);
}
@Override
public final void seekBack() {
seekToOffset(-getSeekBackIncrement(), Player.COMMAND_SEEK_BACK);
}
@Override
public final void seekForward() {
seekToOffset(getSeekForwardIncrement(), Player.COMMAND_SEEK_FORWARD);
}
/**
* @deprecated Use {@link #hasPreviousMediaItem()} instead.
*/
@Deprecated
@Override
public final boolean hasPrevious() {
return hasPreviousMediaItem();
}
/**
* @deprecated Use {@link #hasPreviousMediaItem()} instead.
*/
@Deprecated
@Override
public final boolean hasPreviousWindow() {
return hasPreviousMediaItem();
}
@Override
public final boolean hasPreviousMediaItem() {
return getPreviousMediaItemIndex() != C.INDEX_UNSET;
}
/**
* @deprecated Use {@link #seekToPreviousMediaItem()} instead.
*/
@Deprecated
@Override
public final void previous() {
seekToPreviousMediaItem();
}
/**
* @deprecated Use {@link #seekToPreviousMediaItem()} instead.
*/
@Deprecated
@Override
public final void seekToPreviousWindow() {
seekToPreviousMediaItem();
}
@Override
public final void seekToPreviousMediaItem() {
seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM);
}
@Override
public final void seekToPrevious() {
Timeline timeline = getCurrentTimeline();
if (timeline.isEmpty() || isPlayingAd()) {
return;
}
boolean hasPreviousMediaItem = hasPreviousMediaItem();
if (isCurrentMediaItemLive() && !isCurrentMediaItemSeekable()) {
if (hasPreviousMediaItem) {
seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS);
}
} else if (hasPreviousMediaItem && getCurrentPosition() <= getMaxSeekToPreviousPosition()) {
seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS);
} else {
seekToCurrentItem(/* positionMs= */ 0, Player.COMMAND_SEEK_TO_PREVIOUS);
}
}
/**
* @deprecated Use {@link #hasNextMediaItem()} instead.
*/
@Deprecated
@Override
public final boolean hasNext() {
return hasNextMediaItem();
}
/**
* @deprecated Use {@link #hasNextMediaItem()} instead.
*/
@Deprecated
@Override
public final boolean hasNextWindow() {
return hasNextMediaItem();
}
@Override
public final boolean hasNextMediaItem() {
return getNextMediaItemIndex() != C.INDEX_UNSET;
}
/**
* @deprecated Use {@link #seekToNextMediaItem()} instead.
*/
@Deprecated
@Override
public final void next() {
seekToNextMediaItem();
}
/**
* @deprecated Use {@link #seekToNextMediaItem()} instead.
*/
@Deprecated
@Override
public final void seekToNextWindow() {
seekToNextMediaItem();
}
@Override
public final void seekToNextMediaItem() {
seekToNextMediaItemInternal(Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM);
}
@Override
public final void seekToNext() {
Timeline timeline = getCurrentTimeline();
if (timeline.isEmpty() || isPlayingAd()) {
return;
}
if (hasNextMediaItem()) {
seekToNextMediaItemInternal(Player.COMMAND_SEEK_TO_NEXT);
} else if (isCurrentMediaItemLive() && isCurrentMediaItemDynamic()) {
seekToDefaultPositionInternal(getCurrentMediaItemIndex(), Player.COMMAND_SEEK_TO_NEXT);
}
}
@Override
public final void seekTo(long positionMs) {
seekTo(getCurrentWindowIndex(), positionMs);
seekToCurrentItem(positionMs, Player.COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM);
}
@Override
public final boolean hasPrevious() {
return getPreviousWindowIndex() != C.INDEX_UNSET;
public final void seekTo(int mediaItemIndex, long positionMs) {
seekTo(
mediaItemIndex,
positionMs,
Player.COMMAND_SEEK_TO_MEDIA_ITEM,
/* isRepeatingCurrentItem= */ false);
}
/**
* Seeks to a position in the specified {@link MediaItem}.
*
* @param mediaItemIndex The index of the {@link MediaItem}.
* @param positionMs The seek position in the specified {@link MediaItem} in milliseconds, or
* {@link C#TIME_UNSET} to seek to the media item's default position.
* @param seekCommand The {@link Player.Command} used to trigger the seek.
* @param isRepeatingCurrentItem Whether this seeks repeats the current item.
*/
@VisibleForTesting(otherwise = PROTECTED)
public abstract void seekTo(
int mediaItemIndex,
long positionMs,
@Player.Command int seekCommand,
boolean isRepeatingCurrentItem);
@Override
public final void previous() {
int previousWindowIndex = getPreviousWindowIndex();
if (previousWindowIndex != C.INDEX_UNSET) {
seekToDefaultPosition(previousWindowIndex);
}
public final void setPlaybackSpeed(float speed) {
setPlaybackParameters(getPlaybackParameters().withSpeed(speed));
}
/**
* @deprecated Use {@link #getCurrentMediaItemIndex()} instead.
*/
@Deprecated
@Override
public final boolean hasNext() {
return getNextWindowIndex() != C.INDEX_UNSET;
}
@Override
public final void next() {
int nextWindowIndex = getNextWindowIndex();
if (nextWindowIndex != C.INDEX_UNSET) {
seekToDefaultPosition(nextWindowIndex);
}
}
@Override
public final void stop() {
stop(/* reset= */ false);
public final int getCurrentWindowIndex() {
return getCurrentMediaItemIndex();
}
/**
* @deprecated Use {@link #getNextMediaItemIndex()} instead.
*/
@Deprecated
@Override
public final int getNextWindowIndex() {
return getNextMediaItemIndex();
}
@Override
public final int getNextMediaItemIndex() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty()
? C.INDEX_UNSET
: timeline.getNextWindowIndex(
getCurrentWindowIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled());
getCurrentMediaItemIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled());
}
/**
* @deprecated Use {@link #getPreviousMediaItemIndex()} instead.
*/
@Deprecated
@Override
public final int getPreviousWindowIndex() {
return getPreviousMediaItemIndex();
}
@Override
public final int getPreviousWindowIndex() {
public final int getPreviousMediaItemIndex() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty()
? C.INDEX_UNSET
: timeline.getPreviousWindowIndex(
getCurrentWindowIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled());
getCurrentMediaItemIndex(), getRepeatModeForNavigation(), getShuffleModeEnabled());
}
@Override
@Nullable
public final Object getCurrentTag() {
public final MediaItem getCurrentMediaItem() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).tag;
return timeline.isEmpty()
? null
: timeline.getWindow(getCurrentMediaItemIndex(), window).mediaItem;
}
@Override
public final int getMediaItemCount() {
return getCurrentTimeline().getWindowCount();
}
@Override
public final MediaItem getMediaItemAt(int index) {
return getCurrentTimeline().getWindow(index, window).mediaItem;
}
@Override
@Nullable
public final Object getCurrentManifest() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).manifest;
return timeline.isEmpty()
? null
: timeline.getWindow(getCurrentMediaItemIndex(), window).manifest;
}
@Override
@ -121,22 +379,63 @@ public abstract class BasePlayer implements Player {
: duration == 0 ? 100 : Util.constrainValue((int) ((position * 100) / duration), 0, 100);
}
/**
* @deprecated Use {@link #isCurrentMediaItemDynamic()} instead.
*/
@Deprecated
@Override
public final boolean isCurrentWindowDynamic() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic;
return isCurrentMediaItemDynamic();
}
@Override
public final boolean isCurrentMediaItemDynamic() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentMediaItemIndex(), window).isDynamic;
}
/**
* @deprecated Use {@link #isCurrentMediaItemLive()} instead.
*/
@Deprecated
@Override
public final boolean isCurrentWindowLive() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isLive;
return isCurrentMediaItemLive();
}
@Override
public final boolean isCurrentWindowSeekable() {
public final boolean isCurrentMediaItemLive() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isSeekable;
return !timeline.isEmpty() && timeline.getWindow(getCurrentMediaItemIndex(), window).isLive();
}
@Override
public final long getCurrentLiveOffset() {
Timeline timeline = getCurrentTimeline();
if (timeline.isEmpty()) {
return C.TIME_UNSET;
}
long windowStartTimeMs =
timeline.getWindow(getCurrentMediaItemIndex(), window).windowStartTimeMs;
if (windowStartTimeMs == C.TIME_UNSET) {
return C.TIME_UNSET;
}
return window.getCurrentUnixTimeMs() - window.windowStartTimeMs - getContentPosition();
}
/**
* @deprecated Use {@link #isCurrentMediaItemSeekable()} instead.
*/
@Deprecated
@Override
public final boolean isCurrentWindowSeekable() {
return isCurrentMediaItemSeekable();
}
@Override
public final boolean isCurrentMediaItemSeekable() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentMediaItemIndex(), window).isSeekable;
}
@Override
@ -144,66 +443,66 @@ public abstract class BasePlayer implements Player {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty()
? C.TIME_UNSET
: timeline.getWindow(getCurrentWindowIndex(), window).getDurationMs();
: timeline.getWindow(getCurrentMediaItemIndex(), window).getDurationMs();
}
@RepeatMode
private int getRepeatModeForNavigation() {
private @RepeatMode int getRepeatModeForNavigation() {
@RepeatMode int repeatMode = getRepeatMode();
return repeatMode == REPEAT_MODE_ONE ? REPEAT_MODE_OFF : repeatMode;
}
/** Holds a listener reference. */
protected static final class ListenerHolder {
private void seekToCurrentItem(long positionMs, @Player.Command int seekCommand) {
seekTo(
getCurrentMediaItemIndex(), positionMs, seekCommand, /* isRepeatingCurrentItem= */ false);
}
/**
* The listener on which {link #invoke} will execute {@link ListenerInvocation listener
* invocations}.
*/
public final Player.EventListener listener;
private boolean released;
public ListenerHolder(Player.EventListener listener) {
this.listener = listener;
private void seekToOffset(long offsetMs, @Player.Command int seekCommand) {
long positionMs = getCurrentPosition() + offsetMs;
long durationMs = getDuration();
if (durationMs != C.TIME_UNSET) {
positionMs = min(positionMs, durationMs);
}
positionMs = max(positionMs, 0);
seekToCurrentItem(positionMs, seekCommand);
}
/** Prevents any further {@link ListenerInvocation} to be executed on {@link #listener}. */
public void release() {
released = true;
private void seekToDefaultPositionInternal(int mediaItemIndex, @Player.Command int seekCommand) {
seekTo(
mediaItemIndex,
/* positionMs= */ C.TIME_UNSET,
seekCommand,
/* isRepeatingCurrentItem= */ false);
}
private void seekToNextMediaItemInternal(@Player.Command int seekCommand) {
int nextMediaItemIndex = getNextMediaItemIndex();
if (nextMediaItemIndex == C.INDEX_UNSET) {
return;
}
/**
* Executes the given {@link ListenerInvocation} on {@link #listener}. Does nothing if {@link
* #release} has been called on this instance.
*/
public void invoke(ListenerInvocation listenerInvocation) {
if (!released) {
listenerInvocation.invokeListener(listener);
}
}
@Override
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
return listener.equals(((ListenerHolder) other).listener);
}
@Override
public int hashCode() {
return listener.hashCode();
if (nextMediaItemIndex == getCurrentMediaItemIndex()) {
repeatCurrentMediaItem(seekCommand);
} else {
seekToDefaultPositionInternal(nextMediaItemIndex, seekCommand);
}
}
/** Parameterized invocation of a {@link Player.EventListener} method. */
protected interface ListenerInvocation {
private void seekToPreviousMediaItemInternal(@Player.Command int seekCommand) {
int previousMediaItemIndex = getPreviousMediaItemIndex();
if (previousMediaItemIndex == C.INDEX_UNSET) {
return;
}
if (previousMediaItemIndex == getCurrentMediaItemIndex()) {
repeatCurrentMediaItem(seekCommand);
} else {
seekToDefaultPositionInternal(previousMediaItemIndex, seekCommand);
}
}
/** Executes the invocation on the given {@link Player.EventListener}. */
void invokeListener(Player.EventListener listener);
private void repeatCurrentMediaItem(@Player.Command int seekCommand) {
seekTo(
getCurrentMediaItemIndex(),
/* positionMs= */ C.TIME_UNSET,
seekCommand,
/* isRepeatingCurrentItem= */ true);
}
}

View file

@ -15,49 +15,51 @@
*/
package com.google.android.exoplayer2;
import android.os.Looper;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static java.lang.Math.max;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.drm.DrmSession;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer.InsufficientCapacityException;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.source.SampleStream.ReadDataResult;
import com.google.android.exoplayer2.source.SampleStream.ReadFlags;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* An abstract base class suitable for most {@link Renderer} implementations.
*/
/** An abstract base class suitable for most {@link Renderer} implementations. */
public abstract class BaseRenderer implements Renderer, RendererCapabilities {
private final int trackType;
private final @C.TrackType int trackType;
private final FormatHolder formatHolder;
private RendererConfiguration configuration;
@Nullable private RendererConfiguration configuration;
private int index;
private @MonotonicNonNull PlayerId playerId;
private int state;
private SampleStream stream;
private Format[] streamFormats;
@Nullable private SampleStream stream;
@Nullable private Format[] streamFormats;
private long streamOffsetUs;
private long lastResetPositionUs;
private long readingPositionUs;
private boolean streamIsFinal;
private boolean throwRendererExceptionIsExecuting;
/**
* @param trackType The track type that the renderer handles. One of the {@link C}
* {@code TRACK_TYPE_*} constants.
* @param trackType The track type that the renderer handles. One of the {@link C} {@code
* TRACK_TYPE_*} constants.
*/
public BaseRenderer(int trackType) {
public BaseRenderer(@C.TrackType int trackType) {
this.trackType = trackType;
formatHolder = new FormatHolder();
readingPositionUs = C.TIME_END_OF_SOURCE;
}
@Override
public final int getTrackType() {
public final @C.TrackType int getTrackType() {
return trackType;
}
@ -67,8 +69,9 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
public final void setIndex(int index) {
public final void init(int index, PlayerId playerId) {
this.index = index;
this.playerId = playerId;
}
@Override
@ -83,15 +86,22 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
public final void enable(RendererConfiguration configuration, Format[] formats,
SampleStream stream, long positionUs, boolean joining, long offsetUs)
public final void enable(
RendererConfiguration configuration,
Format[] formats,
SampleStream stream,
long positionUs,
boolean joining,
boolean mayRenderStartOfStream,
long startPositionUs,
long offsetUs)
throws ExoPlaybackException {
Assertions.checkState(state == STATE_DISABLED);
this.configuration = configuration;
state = STATE_ENABLED;
onEnabled(joining);
replaceStream(formats, stream, offsetUs);
onPositionReset(positionUs, joining);
onEnabled(joining, mayRenderStartOfStream);
replaceStream(formats, stream, startPositionUs, offsetUs);
resetPosition(positionUs, joining);
}
@Override
@ -102,14 +112,17 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
public final void replaceStream(Format[] formats, SampleStream stream, long offsetUs)
public final void replaceStream(
Format[] formats, SampleStream stream, long startPositionUs, long offsetUs)
throws ExoPlaybackException {
Assertions.checkState(!streamIsFinal);
this.stream = stream;
readingPositionUs = offsetUs;
if (readingPositionUs == C.TIME_END_OF_SOURCE) {
readingPositionUs = startPositionUs;
}
streamFormats = formats;
streamOffsetUs = offsetUs;
onStreamChanged(formats, offsetUs);
onStreamChanged(formats, startPositionUs, offsetUs);
}
@Override
@ -140,18 +153,23 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Override
public final void maybeThrowStreamError() throws IOException {
stream.maybeThrowError();
Assertions.checkNotNull(stream).maybeThrowError();
}
@Override
public final void resetPosition(long positionUs) throws ExoPlaybackException {
resetPosition(positionUs, /* joining= */ false);
}
private void resetPosition(long positionUs, boolean joining) throws ExoPlaybackException {
streamIsFinal = false;
lastResetPositionUs = positionUs;
readingPositionUs = positionUs;
onPositionReset(positionUs, false);
onPositionReset(positionUs, joining);
}
@Override
public final void stop() throws ExoPlaybackException {
public final void stop() {
Assertions.checkState(state == STATE_STARTED);
state = STATE_ENABLED;
onStopped();
@ -178,15 +196,15 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
// RendererCapabilities implementation.
@Override
@AdaptiveSupport
public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
public @AdaptiveSupport int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
return ADAPTIVE_NOT_SUPPORTED;
}
// PlayerMessage.Target implementation.
@Override
public void handleMessage(int what, @Nullable Object object) throws ExoPlaybackException {
public void handleMessage(@MessageType int messageType, @Nullable Object message)
throws ExoPlaybackException {
// Do nothing.
}
@ -194,42 +212,46 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
/**
* Called when the renderer is enabled.
* <p>
* The default implementation is a no-op.
*
* <p>The default implementation is a no-op.
*
* @param joining Whether this renderer is being enabled to join an ongoing playback.
* @param mayRenderStartOfStream Whether this renderer is allowed to render the start of the
* stream even if the state is not {@link #STATE_STARTED} yet.
* @throws ExoPlaybackException If an error occurs.
*/
protected void onEnabled(boolean joining) throws ExoPlaybackException {
protected void onEnabled(boolean joining, boolean mayRenderStartOfStream)
throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the renderer's stream has changed. This occurs when the renderer is enabled after
* {@link #onEnabled(boolean)} has been called, and also when the stream has been replaced whilst
* the renderer is enabled or started.
* <p>
* The default implementation is a no-op.
* {@link #onEnabled(boolean, boolean)} has been called, and also when the stream has been
* replaced whilst the renderer is enabled or started.
*
* <p>The default implementation is a no-op.
*
* @param formats The enabled formats.
* @param offsetUs The offset that will be added to the timestamps of buffers read via
* {@link #readSource(FormatHolder, DecoderInputBuffer, boolean)} so that decoder input
* buffers have monotonically increasing timestamps.
* @param startPositionUs The start position of the new stream in renderer time (microseconds).
* @param offsetUs The offset that will be added to the timestamps of buffers read via {@link
* #readSource} so that decoder input buffers have monotonically increasing timestamps.
* @throws ExoPlaybackException If an error occurs.
*/
protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
protected void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs)
throws ExoPlaybackException {
// Do nothing.
}
/**
* Called when the position is reset. This occurs when the renderer is enabled after
* {@link #onStreamChanged(Format[], long)} has been called, and also when a position
* discontinuity is encountered.
* <p>
* After a position reset, the renderer's {@link SampleStream} is guaranteed to provide samples
* Called when the position is reset. This occurs when the renderer is enabled after {@link
* #onStreamChanged(Format[], long, long)} has been called, and also when a position discontinuity
* is encountered.
*
* <p>After a position reset, the renderer's {@link SampleStream} is guaranteed to provide samples
* starting from a key frame.
* <p>
* The default implementation is a no-op.
*
* <p>The default implementation is a no-op.
*
* @param positionUs The new playback position in microseconds.
* @param joining Whether this renderer is being enabled to join an ongoing playback.
@ -241,8 +263,8 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
/**
* Called when the renderer is started.
* <p>
* The default implementation is a no-op.
*
* <p>The default implementation is a no-op.
*
* @throws ExoPlaybackException If an error occurs.
*/
@ -252,19 +274,17 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
/**
* Called when the renderer is stopped.
* <p>
* The default implementation is a no-op.
*
* @throws ExoPlaybackException If an error occurs.
* <p>The default implementation is a no-op.
*/
protected void onStopped() throws ExoPlaybackException {
protected void onStopped() {
// Do nothing.
}
/**
* Called when the renderer is disabled.
* <p>
* The default implementation is a no-op.
*
* <p>The default implementation is a no-op.
*/
protected void onDisabled() {
// Do nothing.
@ -281,70 +301,91 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
// Methods to be called by subclasses.
/**
* Returns the position passed to the most recent call to {@link #enable} or {@link
* #resetPosition}.
*/
protected final long getLastResetPositionUs() {
return lastResetPositionUs;
}
/** Returns a clear {@link FormatHolder}. */
protected final FormatHolder getFormatHolder() {
formatHolder.clear();
return formatHolder;
}
/** Returns the formats of the currently enabled stream. */
/**
* Returns the formats of the currently enabled stream.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_ENABLED}, {@link #STATE_STARTED}.
*/
protected final Format[] getStreamFormats() {
return streamFormats;
return Assertions.checkNotNull(streamFormats);
}
/**
* Returns the configuration set when the renderer was most recently enabled.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_ENABLED}, {@link #STATE_STARTED}.
*/
protected final RendererConfiguration getConfiguration() {
return configuration;
}
/** Returns a {@link DrmSession} ready for assignment, handling resource management. */
@Nullable
protected final <T extends ExoMediaCrypto> DrmSession<T> getUpdatedSourceDrmSession(
@Nullable Format oldFormat,
Format newFormat,
@Nullable DrmSessionManager<T> drmSessionManager,
@Nullable DrmSession<T> existingSourceSession)
throws ExoPlaybackException {
boolean drmInitDataChanged =
!Util.areEqual(newFormat.drmInitData, oldFormat == null ? null : oldFormat.drmInitData);
if (!drmInitDataChanged) {
return existingSourceSession;
}
@Nullable DrmSession<T> newSourceDrmSession = null;
if (newFormat.drmInitData != null) {
if (drmSessionManager == null) {
throw createRendererException(
new IllegalStateException("Media requires a DrmSessionManager"), newFormat);
}
newSourceDrmSession =
drmSessionManager.acquireSession(
Assertions.checkNotNull(Looper.myLooper()), newFormat.drmInitData);
}
if (existingSourceSession != null) {
existingSourceSession.release();
}
return newSourceDrmSession;
return Assertions.checkNotNull(configuration);
}
/**
* Returns the index of the renderer within the player.
*
* <p>Must only be used after the renderer has been initialized by the player.
*/
protected final int getIndex() {
return index;
}
/**
* Returns the {@link PlayerId} of the player using this renderer.
*
* <p>Must only be used after the renderer has been initialized by the player.
*/
protected final PlayerId getPlayerId() {
return checkNotNull(playerId);
}
/**
* Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for
* this renderer.
*
* @param cause The cause of the exception.
* @param format The current format used by the renderer. May be null.
* @param errorCode A {@link PlaybackException.ErrorCode} to identify the cause of the playback
* failure.
* @return The created instance, in which {@link ExoPlaybackException#isRecoverable} is {@code
* false}.
*/
protected final ExoPlaybackException createRendererException(
Exception cause, @Nullable Format format) {
@FormatSupport int formatSupport = RendererCapabilities.FORMAT_HANDLED;
Throwable cause, @Nullable Format format, @PlaybackException.ErrorCode int errorCode) {
return createRendererException(cause, format, /* isRecoverable= */ false, errorCode);
}
/**
* Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for
* this renderer.
*
* @param cause The cause of the exception.
* @param format The current format used by the renderer. May be null.
* @param isRecoverable If the error is recoverable by disabling and re-enabling the renderer.
* @param errorCode A {@link PlaybackException.ErrorCode} to identify the cause of the playback
* failure.
* @return The created instance.
*/
protected final ExoPlaybackException createRendererException(
Throwable cause,
@Nullable Format format,
boolean isRecoverable,
@PlaybackException.ErrorCode int errorCode) {
@C.FormatSupport int formatSupport = C.FORMAT_HANDLED;
if (format != null && !throwRendererExceptionIsExecuting) {
// Prevent recursive re-entry from subclass supportsFormat implementations.
throwRendererExceptionIsExecuting = true;
@ -356,7 +397,8 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
throwRendererExceptionIsExecuting = false;
}
}
return ExoPlaybackException.createForRenderer(cause, getIndex(), format, formatSupport);
return ExoPlaybackException.createForRenderer(
cause, getName(), getIndex(), format, formatSupport, isRecoverable, errorCode);
}
/**
@ -364,30 +406,38 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
* {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamFinal()} has been
* called. {@link C#RESULT_NOTHING_READ} is returned otherwise.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_ENABLED}, {@link #STATE_STARTED}.
*
* @param formatHolder A {@link FormatHolder} to populate in the case of reading a format.
* @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
* end of the stream. If the end of the stream has been reached, the {@link
* C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
* @param formatRequired Whether the caller requires that the format of the stream be read even if
* it's not changing. A sample will never be read if set to true, however it is still possible
* for the end of stream or nothing to be read.
* @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or
* {@link C#RESULT_BUFFER_READ}.
* @param readFlags Flags controlling the behavior of this read operation.
* @return The {@link ReadDataResult result} of the read operation.
* @throws InsufficientCapacityException If the {@code buffer} has insufficient capacity to hold
* the data of a sample being read. The buffer {@link DecoderInputBuffer#timeUs timestamp} and
* flags are populated if this exception is thrown, but the read position is not advanced.
*/
protected final int readSource(
FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) {
int result = stream.readData(formatHolder, buffer, formatRequired);
protected final @ReadDataResult int readSource(
FormatHolder formatHolder, DecoderInputBuffer buffer, @ReadFlags int readFlags) {
@ReadDataResult
int result = Assertions.checkNotNull(stream).readData(formatHolder, buffer, readFlags);
if (result == C.RESULT_BUFFER_READ) {
if (buffer.isEndOfStream()) {
readingPositionUs = C.TIME_END_OF_SOURCE;
return streamIsFinal ? C.RESULT_BUFFER_READ : C.RESULT_NOTHING_READ;
}
buffer.timeUs += streamOffsetUs;
readingPositionUs = Math.max(readingPositionUs, buffer.timeUs);
readingPositionUs = max(readingPositionUs, buffer.timeUs);
} else if (result == C.RESULT_FORMAT_READ) {
Format format = formatHolder.format;
Format format = Assertions.checkNotNull(formatHolder.format);
if (format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) {
format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + streamOffsetUs);
format =
format
.buildUpon()
.setSubsampleOffsetUs(format.subsampleOffsetUs + streamOffsetUs)
.build();
formatHolder.format = format;
}
}
@ -398,39 +448,23 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
* Attempts to skip to the keyframe before the specified position, or to the end of the stream if
* {@code positionUs} is beyond it.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_ENABLED}, {@link #STATE_STARTED}.
*
* @param positionUs The position in microseconds.
* @return The number of samples that were skipped.
*/
protected int skipSource(long positionUs) {
return stream.skipData(positionUs - streamOffsetUs);
return Assertions.checkNotNull(stream).skipData(positionUs - streamOffsetUs);
}
/**
* Returns whether the upstream source is ready.
*
* <p>This method may be called when the renderer is in the following states: {@link
* #STATE_ENABLED}, {@link #STATE_STARTED}.
*/
protected final boolean isSourceReady() {
return hasReadStreamToEnd() ? streamIsFinal : stream.isReady();
return hasReadStreamToEnd() ? streamIsFinal : Assertions.checkNotNull(stream).isReady();
}
/**
* Returns whether {@code drmSessionManager} supports the specified {@code drmInitData}, or true
* if {@code drmInitData} is null.
*
* @param drmSessionManager The drm session manager.
* @param drmInitData {@link DrmInitData} of the format to check for support.
* @return Whether {@code drmSessionManager} supports the specified {@code drmInitData}, or
* true if {@code drmInitData} is null.
*/
protected static boolean supportsFormatDrm(@Nullable DrmSessionManager<?> drmSessionManager,
@Nullable DrmInitData drmInitData) {
if (drmInitData == null) {
// Content is unencrypted.
return true;
} else if (drmSessionManager == null) {
// Content is encrypted, but no drm session manager is available.
return false;
}
return drmSessionManager.canAcquireSession(drmInitData);
}
}

View file

@ -0,0 +1,125 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import android.os.Binder;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Parcel;
import android.os.RemoteException;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import java.util.List;
/**
* A {@link Binder} to transfer a list of {@link Bundle Bundles} across processes by splitting the
* list into multiple transactions.
*
* <p>Note: Using this class causes synchronous binder calls in the opposite direction regardless of
* the "oneway" property.
*
* <p>Example usage:
*
* <pre>{@code
* // Sender
* List<Bundle> list = ...;
* IBinder binder = new BundleListRetriever(list);
* Bundle bundle = new Bundle();
* bundle.putBinder("list", binder);
*
* // Receiver
* Bundle bundle = ...; // Received from the sender
* IBinder binder = bundle.getBinder("list");
* List<Bundle> list = BundleListRetriever.getList(binder);
* }</pre>
*/
public final class BundleListRetriever extends Binder {
// Soft limit of an IPC buffer size
private static final int SUGGESTED_MAX_IPC_SIZE =
Util.SDK_INT >= 30 ? IBinder.getSuggestedMaxIpcSizeBytes() : 64 * 1024;
private static final int REPLY_END_OF_LIST = 0;
private static final int REPLY_CONTINUE = 1;
private static final int REPLY_BREAK = 2;
private final ImmutableList<Bundle> list;
/** Creates a {@link Binder} to send a list of {@link Bundle Bundles} to another process. */
public BundleListRetriever(List<Bundle> list) {
this.list = ImmutableList.copyOf(list);
}
@Override
protected boolean onTransact(int code, Parcel data, @Nullable Parcel reply, int flags)
throws RemoteException {
if (code != FIRST_CALL_TRANSACTION) {
return super.onTransact(code, data, reply, flags);
}
if (reply == null) {
return false;
}
int count = list.size();
int index = data.readInt();
while (index < count && reply.dataSize() < SUGGESTED_MAX_IPC_SIZE) {
reply.writeInt(REPLY_CONTINUE);
reply.writeBundle(list.get(index));
index++;
}
reply.writeInt(index < count ? REPLY_BREAK : REPLY_END_OF_LIST);
return true;
}
/**
* Gets a list of {@link Bundle Bundles} from a {@link BundleListRetriever}.
*
* @param binder A binder interface backed by {@link BundleListRetriever}.
* @return The list of {@link Bundle Bundles}.
*/
public static ImmutableList<Bundle> getList(IBinder binder) {
ImmutableList.Builder<Bundle> builder = ImmutableList.builder();
int index = 0;
int replyCode = REPLY_CONTINUE;
while (replyCode != REPLY_END_OF_LIST) {
Parcel data = Parcel.obtain();
Parcel reply = Parcel.obtain();
try {
data.writeInt(index);
try {
binder.transact(FIRST_CALL_TRANSACTION, data, reply, /* flags= */ 0);
} catch (RemoteException e) {
throw new RuntimeException(e);
}
while ((replyCode = reply.readInt()) == REPLY_CONTINUE) {
builder.add(checkNotNull(reply.readBundle()));
index++;
}
} finally {
reply.recycle();
data.recycle();
}
}
return builder.build();
}
}

View file

@ -0,0 +1,52 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.os.Bundle;
/**
* Interface for classes whose instance can be stored in a {@link Bundle} by {@link #toBundle()} and
* can be restored from the {@link Bundle} by using the static {@code CREATOR} field that implements
* {@link Bundleable.Creator}.
*
* <p>For example, a {@link Bundleable} class {@code Foo} supports the following:
*
* <pre>{@code
* Foo foo = ...;
* Bundle fooBundle = foo.toBundle();
* Foo restoredFoo = Foo.CREATOR.fromBundle(fooBundle);
* assertThat(restoredFoo).isEqualTo(foo);
* }</pre>
*/
public interface Bundleable {
/** Returns a {@link Bundle} representing the information stored in this object. */
Bundle toBundle();
/** Interface for the static {@code CREATOR} field of {@link Bundleable} classes. */
interface Creator<T extends Bundleable> {
/**
* Restores a {@link Bundleable} instance from a {@link Bundle} produced by {@link
* Bundleable#toBundle()}.
*
* <p>It guarantees the compatibility of {@link Bundle} representations produced by different
* versions of {@link Bundleable#toBundle()} by providing best default values for missing
* fields. It throws an exception if any essential fields are missing.
*/
T fromBundle(Bundle bundle);
}
}

View file

@ -1,75 +0,0 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import com.google.android.exoplayer2.Player.RepeatMode;
/**
* Dispatches operations to the {@link Player}.
* <p>
* Implementations may choose to suppress (e.g. prevent playback from resuming if audio focus is
* denied) or modify (e.g. change the seek position to prevent a user from seeking past a
* non-skippable advert) operations.
*/
public interface ControlDispatcher {
/**
* Dispatches a {@link Player#setPlayWhenReady(boolean)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param playWhenReady Whether playback should proceed when ready.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady);
/**
* Dispatches a {@link Player#seekTo(int, long)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param windowIndex The index of the window.
* @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to
* the window's default position.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSeekTo(Player player, int windowIndex, long positionMs);
/**
* Dispatches a {@link Player#setRepeatMode(int)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param repeatMode The repeat mode.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode);
/**
* Dispatches a {@link Player#setShuffleModeEnabled(boolean)} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param shuffleModeEnabled Whether shuffling is enabled.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled);
/**
* Dispatches a {@link Player#stop()} operation.
*
* @param player The {@link Player} to which the operation should be dispatched.
* @param reset Whether the player should be reset.
* @return True if the operation was dispatched. False if suppressed.
*/
boolean dispatchStop(Player player, boolean reset);
}

View file

@ -1,55 +0,0 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import com.google.android.exoplayer2.Player.RepeatMode;
/**
* Default {@link ControlDispatcher} that dispatches all operations to the player without
* modification.
*/
public class DefaultControlDispatcher implements ControlDispatcher {
@Override
public boolean dispatchSetPlayWhenReady(Player player, boolean playWhenReady) {
player.setPlayWhenReady(playWhenReady);
return true;
}
@Override
public boolean dispatchSeekTo(Player player, int windowIndex, long positionMs) {
player.seekTo(windowIndex, positionMs);
return true;
}
@Override
public boolean dispatchSetRepeatMode(Player player, @RepeatMode int repeatMode) {
player.setRepeatMode(repeatMode);
return true;
}
@Override
public boolean dispatchSetShuffleModeEnabled(Player player, boolean shuffleModeEnabled) {
player.setShuffleModeEnabled(shuffleModeEnabled);
return true;
}
@Override
public boolean dispatchStop(Player player, boolean reset) {
player.stop(reset);
return true;
}
}

View file

@ -0,0 +1,458 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import static com.google.common.primitives.Longs.max;
import static java.lang.Math.abs;
import static java.lang.Math.max;
import android.os.SystemClock;
import com.google.android.exoplayer2.MediaItem.LiveConfiguration;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
/**
* A {@link LivePlaybackSpeedControl} that adjusts the playback speed using a proportional
* controller.
*
* <p>The control mechanism calculates the adjusted speed as {@code 1.0 + proportionalControlFactor
* x (currentLiveOffsetSec - targetLiveOffsetSec)}. Unit speed (1.0f) is used, if the {@code
* currentLiveOffsetSec} is closer to {@code targetLiveOffsetSec} than the value set with {@link
* Builder#setMaxLiveOffsetErrorMsForUnitSpeed(long)}.
*
* <p>The resulting speed is clamped to a minimum and maximum speed defined by the media, the
* fallback values set with {@link Builder#setFallbackMinPlaybackSpeed(float)} and {@link
* Builder#setFallbackMaxPlaybackSpeed(float)} or the {@link #DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED
* minimum} and {@link #DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED maximum} fallback default values.
*
* <p>When the player rebuffers, the target live offset {@link
* Builder#setTargetLiveOffsetIncrementOnRebufferMs(long) is increased} to adjust to the reduced
* network capabilities. The live playback speed control also {@link
* Builder#setMinPossibleLiveOffsetSmoothingFactor(float) keeps track} of the minimum possible live
* offset to decrease the target live offset again if conditions improve. The minimum possible live
* offset is derived from the current offset and the duration of buffered media.
*/
public final class DefaultLivePlaybackSpeedControl implements LivePlaybackSpeedControl {
/**
* The default minimum factor by which playback can be sped up that should be used if no minimum
* playback speed is defined by the media.
*/
public static final float DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED = 0.97f;
/**
* The default maximum factor by which playback can be sped up that should be used if no maximum
* playback speed is defined by the media.
*/
public static final float DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED = 1.03f;
/**
* The default {@link Builder#setMinUpdateIntervalMs(long) minimum interval} between playback
* speed changes, in milliseconds.
*/
public static final long DEFAULT_MIN_UPDATE_INTERVAL_MS = 1_000;
/**
* The default {@link Builder#setProportionalControlFactor(float) proportional control factor}
* used to adjust the playback speed.
*/
public static final float DEFAULT_PROPORTIONAL_CONTROL_FACTOR = 0.1f;
/**
* The default increment applied to the target live offset each time the player is rebuffering, in
* milliseconds
*/
public static final long DEFAULT_TARGET_LIVE_OFFSET_INCREMENT_ON_REBUFFER_MS = 500;
/**
* The default smoothing factor when smoothing the minimum possible live offset that can be
* achieved during playback.
*/
public static final float DEFAULT_MIN_POSSIBLE_LIVE_OFFSET_SMOOTHING_FACTOR = 0.999f;
/**
* The default maximum difference between the current live offset and the target live offset, in
* milliseconds, for which unit speed (1.0f) is used.
*/
public static final long DEFAULT_MAX_LIVE_OFFSET_ERROR_MS_FOR_UNIT_SPEED = 20;
/** Builder for a {@link DefaultLivePlaybackSpeedControl}. */
public static final class Builder {
private float fallbackMinPlaybackSpeed;
private float fallbackMaxPlaybackSpeed;
private long minUpdateIntervalMs;
private float proportionalControlFactorUs;
private long maxLiveOffsetErrorUsForUnitSpeed;
private long targetLiveOffsetIncrementOnRebufferUs;
private float minPossibleLiveOffsetSmoothingFactor;
/** Creates a builder. */
public Builder() {
fallbackMinPlaybackSpeed = DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED;
fallbackMaxPlaybackSpeed = DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED;
minUpdateIntervalMs = DEFAULT_MIN_UPDATE_INTERVAL_MS;
proportionalControlFactorUs = DEFAULT_PROPORTIONAL_CONTROL_FACTOR / C.MICROS_PER_SECOND;
maxLiveOffsetErrorUsForUnitSpeed =
Util.msToUs(DEFAULT_MAX_LIVE_OFFSET_ERROR_MS_FOR_UNIT_SPEED);
targetLiveOffsetIncrementOnRebufferUs =
Util.msToUs(DEFAULT_TARGET_LIVE_OFFSET_INCREMENT_ON_REBUFFER_MS);
minPossibleLiveOffsetSmoothingFactor = DEFAULT_MIN_POSSIBLE_LIVE_OFFSET_SMOOTHING_FACTOR;
}
/**
* Sets the minimum playback speed that should be used if no minimum playback speed is defined
* by the media.
*
* <p>The default is {@link #DEFAULT_FALLBACK_MIN_PLAYBACK_SPEED}.
*
* @param fallbackMinPlaybackSpeed The fallback minimum factor by which playback can be sped up.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setFallbackMinPlaybackSpeed(float fallbackMinPlaybackSpeed) {
Assertions.checkArgument(0 < fallbackMinPlaybackSpeed && fallbackMinPlaybackSpeed <= 1f);
this.fallbackMinPlaybackSpeed = fallbackMinPlaybackSpeed;
return this;
}
/**
* Sets the maximum playback speed that should be used if no maximum playback speed is defined
* by the media.
*
* <p>The default is {@link #DEFAULT_FALLBACK_MAX_PLAYBACK_SPEED}.
*
* @param fallbackMaxPlaybackSpeed The fallback maximum factor by which playback can be sped up.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setFallbackMaxPlaybackSpeed(float fallbackMaxPlaybackSpeed) {
Assertions.checkArgument(fallbackMaxPlaybackSpeed >= 1f);
this.fallbackMaxPlaybackSpeed = fallbackMaxPlaybackSpeed;
return this;
}
/**
* Sets the minimum interval between playback speed changes, in milliseconds.
*
* <p>The default is {@link #DEFAULT_MIN_UPDATE_INTERVAL_MS}.
*
* @param minUpdateIntervalMs The minimum interval between playback speed changes, in
* milliseconds.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setMinUpdateIntervalMs(long minUpdateIntervalMs) {
Assertions.checkArgument(minUpdateIntervalMs > 0);
this.minUpdateIntervalMs = minUpdateIntervalMs;
return this;
}
/**
* Sets the proportional control factor used to adjust the playback speed.
*
* <p>The factor by which playback will be sped up is calculated as {@code 1.0 +
* proportionalControlFactor x (currentLiveOffsetSec - targetLiveOffsetSec)}.
*
* <p>The default is {@link #DEFAULT_PROPORTIONAL_CONTROL_FACTOR}.
*
* @param proportionalControlFactor The proportional control factor used to adjust the playback
* speed.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setProportionalControlFactor(float proportionalControlFactor) {
Assertions.checkArgument(proportionalControlFactor > 0);
this.proportionalControlFactorUs = proportionalControlFactor / C.MICROS_PER_SECOND;
return this;
}
/**
* Sets the maximum difference between the current live offset and the target live offset, in
* milliseconds, for which unit speed (1.0f) is used.
*
* <p>The default is {@link #DEFAULT_MAX_LIVE_OFFSET_ERROR_MS_FOR_UNIT_SPEED}.
*
* @param maxLiveOffsetErrorMsForUnitSpeed The maximum live offset error for which unit speed is
* used, in milliseconds.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setMaxLiveOffsetErrorMsForUnitSpeed(long maxLiveOffsetErrorMsForUnitSpeed) {
Assertions.checkArgument(maxLiveOffsetErrorMsForUnitSpeed > 0);
this.maxLiveOffsetErrorUsForUnitSpeed = Util.msToUs(maxLiveOffsetErrorMsForUnitSpeed);
return this;
}
/**
* Sets the increment applied to the target live offset each time the player is rebuffering, in
* milliseconds.
*
* @param targetLiveOffsetIncrementOnRebufferMs The increment applied to the target live offset
* when the player is rebuffering, in milliseconds
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setTargetLiveOffsetIncrementOnRebufferMs(
long targetLiveOffsetIncrementOnRebufferMs) {
Assertions.checkArgument(targetLiveOffsetIncrementOnRebufferMs >= 0);
this.targetLiveOffsetIncrementOnRebufferUs =
Util.msToUs(targetLiveOffsetIncrementOnRebufferMs);
return this;
}
/**
* Sets the smoothing factor when smoothing the minimum possible live offset that can be
* achieved during playback.
*
* <p>The live playback speed control keeps track of the minimum possible live offset achievable
* during playback to know whether it can reduce the current target live offset. The minimum
* possible live offset is defined as {@code currentLiveOffset - bufferedDuration}. As the
* minimum possible live offset is constantly changing, it is smoothed over recent samples by
* applying exponential smoothing: {@code smoothedMinPossibleOffset = smoothingFactor x
* smoothedMinPossibleOffset + (1-smoothingFactor) x currentMinPossibleOffset}.
*
* @param minPossibleLiveOffsetSmoothingFactor The smoothing factor. Must be &ge; 0 and &lt; 1.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setMinPossibleLiveOffsetSmoothingFactor(
float minPossibleLiveOffsetSmoothingFactor) {
Assertions.checkArgument(
minPossibleLiveOffsetSmoothingFactor >= 0 && minPossibleLiveOffsetSmoothingFactor < 1f);
this.minPossibleLiveOffsetSmoothingFactor = minPossibleLiveOffsetSmoothingFactor;
return this;
}
/** Builds an instance. */
public DefaultLivePlaybackSpeedControl build() {
return new DefaultLivePlaybackSpeedControl(
fallbackMinPlaybackSpeed,
fallbackMaxPlaybackSpeed,
minUpdateIntervalMs,
proportionalControlFactorUs,
maxLiveOffsetErrorUsForUnitSpeed,
targetLiveOffsetIncrementOnRebufferUs,
minPossibleLiveOffsetSmoothingFactor);
}
}
private final float fallbackMinPlaybackSpeed;
private final float fallbackMaxPlaybackSpeed;
private final long minUpdateIntervalMs;
private final float proportionalControlFactor;
private final long maxLiveOffsetErrorUsForUnitSpeed;
private final long targetLiveOffsetRebufferDeltaUs;
private final float minPossibleLiveOffsetSmoothingFactor;
private long mediaConfigurationTargetLiveOffsetUs;
private long targetLiveOffsetOverrideUs;
private long idealTargetLiveOffsetUs;
private long minTargetLiveOffsetUs;
private long maxTargetLiveOffsetUs;
private long currentTargetLiveOffsetUs;
private float maxPlaybackSpeed;
private float minPlaybackSpeed;
private float adjustedPlaybackSpeed;
private long lastPlaybackSpeedUpdateMs;
private long smoothedMinPossibleLiveOffsetUs;
private long smoothedMinPossibleLiveOffsetDeviationUs;
private DefaultLivePlaybackSpeedControl(
float fallbackMinPlaybackSpeed,
float fallbackMaxPlaybackSpeed,
long minUpdateIntervalMs,
float proportionalControlFactor,
long maxLiveOffsetErrorUsForUnitSpeed,
long targetLiveOffsetRebufferDeltaUs,
float minPossibleLiveOffsetSmoothingFactor) {
this.fallbackMinPlaybackSpeed = fallbackMinPlaybackSpeed;
this.fallbackMaxPlaybackSpeed = fallbackMaxPlaybackSpeed;
this.minUpdateIntervalMs = minUpdateIntervalMs;
this.proportionalControlFactor = proportionalControlFactor;
this.maxLiveOffsetErrorUsForUnitSpeed = maxLiveOffsetErrorUsForUnitSpeed;
this.targetLiveOffsetRebufferDeltaUs = targetLiveOffsetRebufferDeltaUs;
this.minPossibleLiveOffsetSmoothingFactor = minPossibleLiveOffsetSmoothingFactor;
mediaConfigurationTargetLiveOffsetUs = C.TIME_UNSET;
targetLiveOffsetOverrideUs = C.TIME_UNSET;
minTargetLiveOffsetUs = C.TIME_UNSET;
maxTargetLiveOffsetUs = C.TIME_UNSET;
minPlaybackSpeed = fallbackMinPlaybackSpeed;
maxPlaybackSpeed = fallbackMaxPlaybackSpeed;
adjustedPlaybackSpeed = 1.0f;
lastPlaybackSpeedUpdateMs = C.TIME_UNSET;
idealTargetLiveOffsetUs = C.TIME_UNSET;
currentTargetLiveOffsetUs = C.TIME_UNSET;
smoothedMinPossibleLiveOffsetUs = C.TIME_UNSET;
smoothedMinPossibleLiveOffsetDeviationUs = C.TIME_UNSET;
}
@Override
public void setLiveConfiguration(LiveConfiguration liveConfiguration) {
mediaConfigurationTargetLiveOffsetUs = Util.msToUs(liveConfiguration.targetOffsetMs);
minTargetLiveOffsetUs = Util.msToUs(liveConfiguration.minOffsetMs);
maxTargetLiveOffsetUs = Util.msToUs(liveConfiguration.maxOffsetMs);
minPlaybackSpeed =
liveConfiguration.minPlaybackSpeed != C.RATE_UNSET
? liveConfiguration.minPlaybackSpeed
: fallbackMinPlaybackSpeed;
maxPlaybackSpeed =
liveConfiguration.maxPlaybackSpeed != C.RATE_UNSET
? liveConfiguration.maxPlaybackSpeed
: fallbackMaxPlaybackSpeed;
if (minPlaybackSpeed == 1f && maxPlaybackSpeed == 1f) {
// Don't bother calculating adjustments if it's not possible to change the speed.
mediaConfigurationTargetLiveOffsetUs = C.TIME_UNSET;
}
maybeResetTargetLiveOffsetUs();
}
@Override
public void setTargetLiveOffsetOverrideUs(long liveOffsetUs) {
targetLiveOffsetOverrideUs = liveOffsetUs;
maybeResetTargetLiveOffsetUs();
}
@Override
public void notifyRebuffer() {
if (currentTargetLiveOffsetUs == C.TIME_UNSET) {
return;
}
currentTargetLiveOffsetUs += targetLiveOffsetRebufferDeltaUs;
if (maxTargetLiveOffsetUs != C.TIME_UNSET
&& currentTargetLiveOffsetUs > maxTargetLiveOffsetUs) {
currentTargetLiveOffsetUs = maxTargetLiveOffsetUs;
}
lastPlaybackSpeedUpdateMs = C.TIME_UNSET;
}
@Override
public float getAdjustedPlaybackSpeed(long liveOffsetUs, long bufferedDurationUs) {
if (mediaConfigurationTargetLiveOffsetUs == C.TIME_UNSET) {
return 1f;
}
updateSmoothedMinPossibleLiveOffsetUs(liveOffsetUs, bufferedDurationUs);
if (lastPlaybackSpeedUpdateMs != C.TIME_UNSET
&& SystemClock.elapsedRealtime() - lastPlaybackSpeedUpdateMs < minUpdateIntervalMs) {
return adjustedPlaybackSpeed;
}
lastPlaybackSpeedUpdateMs = SystemClock.elapsedRealtime();
adjustTargetLiveOffsetUs(liveOffsetUs);
long liveOffsetErrorUs = liveOffsetUs - currentTargetLiveOffsetUs;
if (Math.abs(liveOffsetErrorUs) < maxLiveOffsetErrorUsForUnitSpeed) {
adjustedPlaybackSpeed = 1f;
} else {
float calculatedSpeed = 1f + proportionalControlFactor * liveOffsetErrorUs;
adjustedPlaybackSpeed =
Util.constrainValue(calculatedSpeed, minPlaybackSpeed, maxPlaybackSpeed);
}
return adjustedPlaybackSpeed;
}
@Override
public long getTargetLiveOffsetUs() {
return currentTargetLiveOffsetUs;
}
private void maybeResetTargetLiveOffsetUs() {
long idealOffsetUs = C.TIME_UNSET;
if (mediaConfigurationTargetLiveOffsetUs != C.TIME_UNSET) {
idealOffsetUs =
targetLiveOffsetOverrideUs != C.TIME_UNSET
? targetLiveOffsetOverrideUs
: mediaConfigurationTargetLiveOffsetUs;
if (minTargetLiveOffsetUs != C.TIME_UNSET && idealOffsetUs < minTargetLiveOffsetUs) {
idealOffsetUs = minTargetLiveOffsetUs;
}
if (maxTargetLiveOffsetUs != C.TIME_UNSET && idealOffsetUs > maxTargetLiveOffsetUs) {
idealOffsetUs = maxTargetLiveOffsetUs;
}
}
if (idealTargetLiveOffsetUs == idealOffsetUs) {
return;
}
idealTargetLiveOffsetUs = idealOffsetUs;
currentTargetLiveOffsetUs = idealOffsetUs;
smoothedMinPossibleLiveOffsetUs = C.TIME_UNSET;
smoothedMinPossibleLiveOffsetDeviationUs = C.TIME_UNSET;
lastPlaybackSpeedUpdateMs = C.TIME_UNSET;
}
private void updateSmoothedMinPossibleLiveOffsetUs(long liveOffsetUs, long bufferedDurationUs) {
long minPossibleLiveOffsetUs = liveOffsetUs - bufferedDurationUs;
if (smoothedMinPossibleLiveOffsetUs == C.TIME_UNSET) {
smoothedMinPossibleLiveOffsetUs = minPossibleLiveOffsetUs;
smoothedMinPossibleLiveOffsetDeviationUs = 0;
} else {
// Use the maximum here to ensure we keep track of the upper bound of what is safely possible,
// not the average.
smoothedMinPossibleLiveOffsetUs =
max(
minPossibleLiveOffsetUs,
smooth(
smoothedMinPossibleLiveOffsetUs,
minPossibleLiveOffsetUs,
minPossibleLiveOffsetSmoothingFactor));
long minPossibleLiveOffsetDeviationUs =
abs(minPossibleLiveOffsetUs - smoothedMinPossibleLiveOffsetUs);
smoothedMinPossibleLiveOffsetDeviationUs =
smooth(
smoothedMinPossibleLiveOffsetDeviationUs,
minPossibleLiveOffsetDeviationUs,
minPossibleLiveOffsetSmoothingFactor);
}
}
private void adjustTargetLiveOffsetUs(long liveOffsetUs) {
// Stay in a safe distance (3 standard deviations = >99%) to the minimum possible live offset.
long safeOffsetUs =
smoothedMinPossibleLiveOffsetUs + 3 * smoothedMinPossibleLiveOffsetDeviationUs;
if (currentTargetLiveOffsetUs > safeOffsetUs) {
// There is room for decreasing the target offset towards the ideal or safe offset (whichever
// is larger). We want to limit the decrease so that the playback speed delta we achieve is
// the same as the maximum delta when slowing down towards the target.
long minUpdateIntervalUs = Util.msToUs(minUpdateIntervalMs);
long decrementToOffsetCurrentSpeedUs =
(long) ((adjustedPlaybackSpeed - 1f) * minUpdateIntervalUs);
long decrementToIncreaseSpeedUs = (long) ((maxPlaybackSpeed - 1f) * minUpdateIntervalUs);
long maxDecrementUs = decrementToOffsetCurrentSpeedUs + decrementToIncreaseSpeedUs;
currentTargetLiveOffsetUs =
max(safeOffsetUs, idealTargetLiveOffsetUs, currentTargetLiveOffsetUs - maxDecrementUs);
} else {
// We'd like to reach a stable condition where the current live offset stays just below the
// safe offset. But don't increase the target offset to more than what would allow us to slow
// down gradually from the current offset.
long offsetWhenSlowingDownNowUs =
liveOffsetUs - (long) (max(0f, adjustedPlaybackSpeed - 1f) / proportionalControlFactor);
currentTargetLiveOffsetUs =
Util.constrainValue(offsetWhenSlowingDownNowUs, currentTargetLiveOffsetUs, safeOffsetUs);
if (maxTargetLiveOffsetUs != C.TIME_UNSET
&& currentTargetLiveOffsetUs > maxTargetLiveOffsetUs) {
currentTargetLiveOffsetUs = maxTargetLiveOffsetUs;
}
}
}
private static long smooth(long smoothedValue, long newValue, float smoothingFactor) {
return (long) (smoothingFactor * smoothedValue + (1f - smoothingFactor) * newValue);
}
}

View file

@ -15,30 +15,33 @@
*/
package com.google.android.exoplayer2;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static java.lang.Math.max;
import static java.lang.Math.min;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.ExoTrackSelection;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.DefaultAllocator;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
/**
* The default {@link LoadControl} implementation.
*/
/** The default {@link LoadControl} implementation. */
public class DefaultLoadControl implements LoadControl {
/**
* The default minimum duration of media that the player will attempt to ensure is buffered at all
* times, in milliseconds. This value is only applied to playbacks without video.
* times, in milliseconds.
*/
public static final int DEFAULT_MIN_BUFFER_MS = 15000;
public static final int DEFAULT_MIN_BUFFER_MS = 50_000;
/**
* The default maximum duration of media that the player will attempt to buffer, in milliseconds.
* For playbacks with video, this is also the default minimum duration of media that the player
* will attempt to ensure is buffered.
*/
public static final int DEFAULT_MAX_BUFFER_MS = 50000;
public static final int DEFAULT_MAX_BUFFER_MS = 50_000;
/**
* The default duration of media that must be buffered for playback to start or resume following a
@ -59,7 +62,7 @@ public class DefaultLoadControl implements LoadControl {
public static final int DEFAULT_TARGET_BUFFER_BYTES = C.LENGTH_UNSET;
/** The default prioritization of buffer time constraints over size constraints. */
public static final boolean DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS = true;
public static final boolean DEFAULT_PRIORITIZE_TIME_OVER_SIZE_THRESHOLDS = false;
/** The default back buffer duration in milliseconds. */
public static final int DEFAULT_BACK_BUFFER_DURATION_MS = 0;
@ -68,10 +71,10 @@ public class DefaultLoadControl implements LoadControl {
public static final boolean DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME = false;
/** A default size in bytes for a video buffer. */
public static final int DEFAULT_VIDEO_BUFFER_SIZE = 500 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
public static final int DEFAULT_VIDEO_BUFFER_SIZE = 2000 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for an audio buffer. */
public static final int DEFAULT_AUDIO_BUFFER_SIZE = 54 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
public static final int DEFAULT_AUDIO_BUFFER_SIZE = 200 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for a text buffer. */
public static final int DEFAULT_TEXT_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
@ -82,16 +85,24 @@ public class DefaultLoadControl implements LoadControl {
/** A default size in bytes for a camera motion buffer. */
public static final int DEFAULT_CAMERA_MOTION_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for an image buffer. */
public static final int DEFAULT_IMAGE_BUFFER_SIZE = 2 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** A default size in bytes for a muxed buffer (e.g. containing video, audio and text). */
public static final int DEFAULT_MUXED_BUFFER_SIZE =
DEFAULT_VIDEO_BUFFER_SIZE + DEFAULT_AUDIO_BUFFER_SIZE + DEFAULT_TEXT_BUFFER_SIZE;
/**
* The buffer size in bytes that will be used as a minimum target buffer in all cases. This is
* also the default target buffer before tracks are selected.
*/
public static final int DEFAULT_MIN_BUFFER_SIZE = 200 * C.DEFAULT_BUFFER_SEGMENT_SIZE;
/** Builder for {@link DefaultLoadControl}. */
public static final class Builder {
private DefaultAllocator allocator;
private int minBufferAudioMs;
private int minBufferVideoMs;
@Nullable private DefaultAllocator allocator;
private int minBufferMs;
private int maxBufferMs;
private int bufferForPlaybackMs;
private int bufferForPlaybackAfterRebufferMs;
@ -99,12 +110,11 @@ public class DefaultLoadControl implements LoadControl {
private boolean prioritizeTimeOverSizeThresholds;
private int backBufferDurationMs;
private boolean retainBackBufferFromKeyframe;
private boolean createDefaultLoadControlCalled;
private boolean buildCalled;
/** Constructs a new instance. */
public Builder() {
minBufferAudioMs = DEFAULT_MIN_BUFFER_MS;
minBufferVideoMs = DEFAULT_MAX_BUFFER_MS;
minBufferMs = DEFAULT_MIN_BUFFER_MS;
maxBufferMs = DEFAULT_MAX_BUFFER_MS;
bufferForPlaybackMs = DEFAULT_BUFFER_FOR_PLAYBACK_MS;
bufferForPlaybackAfterRebufferMs = DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS;
@ -119,10 +129,11 @@ public class DefaultLoadControl implements LoadControl {
*
* @param allocator The {@link DefaultAllocator}.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@CanIgnoreReturnValue
public Builder setAllocator(DefaultAllocator allocator) {
Assertions.checkState(!createDefaultLoadControlCalled);
checkState(!buildCalled);
this.allocator = allocator;
return this;
}
@ -140,14 +151,15 @@ public class DefaultLoadControl implements LoadControl {
* for playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be
* caused by buffer depletion rather than a user action.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@CanIgnoreReturnValue
public Builder setBufferDurationsMs(
int minBufferMs,
int maxBufferMs,
int bufferForPlaybackMs,
int bufferForPlaybackAfterRebufferMs) {
Assertions.checkState(!createDefaultLoadControlCalled);
checkState(!buildCalled);
assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0");
assertGreaterOrEqual(
bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0");
@ -158,8 +170,7 @@ public class DefaultLoadControl implements LoadControl {
"minBufferMs",
"bufferForPlaybackAfterRebufferMs");
assertGreaterOrEqual(maxBufferMs, minBufferMs, "maxBufferMs", "minBufferMs");
this.minBufferAudioMs = minBufferMs;
this.minBufferVideoMs = minBufferMs;
this.minBufferMs = minBufferMs;
this.maxBufferMs = maxBufferMs;
this.bufferForPlaybackMs = bufferForPlaybackMs;
this.bufferForPlaybackAfterRebufferMs = bufferForPlaybackAfterRebufferMs;
@ -172,10 +183,11 @@ public class DefaultLoadControl implements LoadControl {
*
* @param targetBufferBytes The target buffer size in bytes.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@CanIgnoreReturnValue
public Builder setTargetBufferBytes(int targetBufferBytes) {
Assertions.checkState(!createDefaultLoadControlCalled);
checkState(!buildCalled);
this.targetBufferBytes = targetBufferBytes;
return this;
}
@ -187,10 +199,11 @@ public class DefaultLoadControl implements LoadControl {
* @param prioritizeTimeOverSizeThresholds Whether the load control prioritizes buffer time
* constraints over buffer size constraints.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@CanIgnoreReturnValue
public Builder setPrioritizeTimeOverSizeThresholds(boolean prioritizeTimeOverSizeThresholds) {
Assertions.checkState(!createDefaultLoadControlCalled);
checkState(!buildCalled);
this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds;
return this;
}
@ -203,27 +216,35 @@ public class DefaultLoadControl implements LoadControl {
* @param retainBackBufferFromKeyframe Whether the back buffer is retained from the previous
* keyframe.
* @return This builder, for convenience.
* @throws IllegalStateException If {@link #createDefaultLoadControl()} has already been called.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@CanIgnoreReturnValue
public Builder setBackBuffer(int backBufferDurationMs, boolean retainBackBufferFromKeyframe) {
Assertions.checkState(!createDefaultLoadControlCalled);
checkState(!buildCalled);
assertGreaterOrEqual(backBufferDurationMs, 0, "backBufferDurationMs", "0");
this.backBufferDurationMs = backBufferDurationMs;
this.retainBackBufferFromKeyframe = retainBackBufferFromKeyframe;
return this;
}
/** Creates a {@link DefaultLoadControl}. */
/**
* @deprecated use {@link #build} instead.
*/
@Deprecated
public DefaultLoadControl createDefaultLoadControl() {
Assertions.checkState(!createDefaultLoadControlCalled);
createDefaultLoadControlCalled = true;
return build();
}
/** Creates a {@link DefaultLoadControl}. */
public DefaultLoadControl build() {
checkState(!buildCalled);
buildCalled = true;
if (allocator == null) {
allocator = new DefaultAllocator(/* trimOnReset= */ true, C.DEFAULT_BUFFER_SEGMENT_SIZE);
}
return new DefaultLoadControl(
allocator,
minBufferAudioMs,
minBufferVideoMs,
minBufferMs,
maxBufferMs,
bufferForPlaybackMs,
bufferForPlaybackAfterRebufferMs,
@ -236,8 +257,7 @@ public class DefaultLoadControl implements LoadControl {
private final DefaultAllocator allocator;
private final long minBufferAudioUs;
private final long minBufferVideoUs;
private final long minBufferUs;
private final long maxBufferUs;
private final long bufferForPlaybackUs;
private final long bufferForPlaybackAfterRebufferUs;
@ -246,23 +266,14 @@ public class DefaultLoadControl implements LoadControl {
private final long backBufferDurationUs;
private final boolean retainBackBufferFromKeyframe;
private int targetBufferSize;
private boolean isBuffering;
private boolean hasVideo;
private int targetBufferBytes;
private boolean isLoading;
/** Constructs a new instance, using the {@code DEFAULT_*} constants defined in this class. */
@SuppressWarnings("deprecation")
public DefaultLoadControl() {
this(new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE));
}
/** @deprecated Use {@link Builder} instead. */
@Deprecated
public DefaultLoadControl(DefaultAllocator allocator) {
this(
allocator,
/* minBufferAudioMs= */ DEFAULT_MIN_BUFFER_MS,
/* minBufferVideoMs= */ DEFAULT_MAX_BUFFER_MS,
new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE),
DEFAULT_MIN_BUFFER_MS,
DEFAULT_MAX_BUFFER_MS,
DEFAULT_BUFFER_FOR_PLAYBACK_MS,
DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS,
@ -272,8 +283,6 @@ public class DefaultLoadControl implements LoadControl {
DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME);
}
/** @deprecated Use {@link Builder} instead. */
@Deprecated
public DefaultLoadControl(
DefaultAllocator allocator,
int minBufferMs,
@ -281,61 +290,33 @@ public class DefaultLoadControl implements LoadControl {
int bufferForPlaybackMs,
int bufferForPlaybackAfterRebufferMs,
int targetBufferBytes,
boolean prioritizeTimeOverSizeThresholds) {
this(
allocator,
/* minBufferAudioMs= */ minBufferMs,
/* minBufferVideoMs= */ minBufferMs,
maxBufferMs,
bufferForPlaybackMs,
bufferForPlaybackAfterRebufferMs,
targetBufferBytes,
prioritizeTimeOverSizeThresholds,
DEFAULT_BACK_BUFFER_DURATION_MS,
DEFAULT_RETAIN_BACK_BUFFER_FROM_KEYFRAME);
}
protected DefaultLoadControl(
DefaultAllocator allocator,
int minBufferAudioMs,
int minBufferVideoMs,
int maxBufferMs,
int bufferForPlaybackMs,
int bufferForPlaybackAfterRebufferMs,
int targetBufferBytes,
boolean prioritizeTimeOverSizeThresholds,
int backBufferDurationMs,
boolean retainBackBufferFromKeyframe) {
assertGreaterOrEqual(bufferForPlaybackMs, 0, "bufferForPlaybackMs", "0");
assertGreaterOrEqual(
bufferForPlaybackAfterRebufferMs, 0, "bufferForPlaybackAfterRebufferMs", "0");
assertGreaterOrEqual(minBufferMs, bufferForPlaybackMs, "minBufferMs", "bufferForPlaybackMs");
assertGreaterOrEqual(
minBufferAudioMs, bufferForPlaybackMs, "minBufferAudioMs", "bufferForPlaybackMs");
assertGreaterOrEqual(
minBufferVideoMs, bufferForPlaybackMs, "minBufferVideoMs", "bufferForPlaybackMs");
assertGreaterOrEqual(
minBufferAudioMs,
minBufferMs,
bufferForPlaybackAfterRebufferMs,
"minBufferAudioMs",
"minBufferMs",
"bufferForPlaybackAfterRebufferMs");
assertGreaterOrEqual(
minBufferVideoMs,
bufferForPlaybackAfterRebufferMs,
"minBufferVideoMs",
"bufferForPlaybackAfterRebufferMs");
assertGreaterOrEqual(maxBufferMs, minBufferAudioMs, "maxBufferMs", "minBufferAudioMs");
assertGreaterOrEqual(maxBufferMs, minBufferVideoMs, "maxBufferMs", "minBufferVideoMs");
assertGreaterOrEqual(maxBufferMs, minBufferMs, "maxBufferMs", "minBufferMs");
assertGreaterOrEqual(backBufferDurationMs, 0, "backBufferDurationMs", "0");
this.allocator = allocator;
this.minBufferAudioUs = C.msToUs(minBufferAudioMs);
this.minBufferVideoUs = C.msToUs(minBufferVideoMs);
this.maxBufferUs = C.msToUs(maxBufferMs);
this.bufferForPlaybackUs = C.msToUs(bufferForPlaybackMs);
this.bufferForPlaybackAfterRebufferUs = C.msToUs(bufferForPlaybackAfterRebufferMs);
this.minBufferUs = Util.msToUs(minBufferMs);
this.maxBufferUs = Util.msToUs(maxBufferMs);
this.bufferForPlaybackUs = Util.msToUs(bufferForPlaybackMs);
this.bufferForPlaybackAfterRebufferUs = Util.msToUs(bufferForPlaybackAfterRebufferMs);
this.targetBufferBytesOverwrite = targetBufferBytes;
this.targetBufferBytes =
targetBufferBytesOverwrite != C.LENGTH_UNSET
? targetBufferBytesOverwrite
: DEFAULT_MIN_BUFFER_SIZE;
this.prioritizeTimeOverSizeThresholds = prioritizeTimeOverSizeThresholds;
this.backBufferDurationUs = C.msToUs(backBufferDurationMs);
this.backBufferDurationUs = Util.msToUs(backBufferDurationMs);
this.retainBackBufferFromKeyframe = retainBackBufferFromKeyframe;
}
@ -345,14 +326,13 @@ public class DefaultLoadControl implements LoadControl {
}
@Override
public void onTracksSelected(Renderer[] renderers, TrackGroupArray trackGroups,
TrackSelectionArray trackSelections) {
hasVideo = hasVideo(renderers, trackSelections);
targetBufferSize =
public void onTracksSelected(
Renderer[] renderers, TrackGroupArray trackGroups, ExoTrackSelection[] trackSelections) {
targetBufferBytes =
targetBufferBytesOverwrite == C.LENGTH_UNSET
? calculateTargetBufferSize(renderers, trackSelections)
? calculateTargetBufferBytes(renderers, trackSelections)
: targetBufferBytesOverwrite;
allocator.setTargetBufferSize(targetBufferSize);
allocator.setTargetBufferSize(targetBufferBytes);
}
@Override
@ -381,33 +361,44 @@ public class DefaultLoadControl implements LoadControl {
}
@Override
public boolean shouldContinueLoading(long bufferedDurationUs, float playbackSpeed) {
boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferSize;
long minBufferUs = hasVideo ? minBufferVideoUs : minBufferAudioUs;
public boolean shouldContinueLoading(
long playbackPositionUs, long bufferedDurationUs, float playbackSpeed) {
boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferBytes;
long minBufferUs = this.minBufferUs;
if (playbackSpeed > 1) {
// The playback speed is faster than real time, so scale up the minimum required media
// duration to keep enough media buffered for a playout duration of minBufferUs.
long mediaDurationMinBufferUs =
Util.getMediaDurationForPlayoutDuration(minBufferUs, playbackSpeed);
minBufferUs = Math.min(mediaDurationMinBufferUs, maxBufferUs);
minBufferUs = min(mediaDurationMinBufferUs, maxBufferUs);
}
// Prevent playback from getting stuck if minBufferUs is too small.
minBufferUs = max(minBufferUs, 500_000);
if (bufferedDurationUs < minBufferUs) {
isBuffering = prioritizeTimeOverSizeThresholds || !targetBufferSizeReached;
isLoading = prioritizeTimeOverSizeThresholds || !targetBufferSizeReached;
if (!isLoading && bufferedDurationUs < 500_000) {
Log.w(
"DefaultLoadControl",
"Target buffer size reached with less than 500ms of buffered media data.");
}
} else if (bufferedDurationUs >= maxBufferUs || targetBufferSizeReached) {
isBuffering = false;
} // Else don't change the buffering state
return isBuffering;
isLoading = false;
} // Else don't change the loading state.
return isLoading;
}
@Override
public boolean shouldStartPlayback(
long bufferedDurationUs, float playbackSpeed, boolean rebuffering) {
long bufferedDurationUs, float playbackSpeed, boolean rebuffering, long targetLiveOffsetUs) {
bufferedDurationUs = Util.getPlayoutDurationForMediaDuration(bufferedDurationUs, playbackSpeed);
long minBufferDurationUs = rebuffering ? bufferForPlaybackAfterRebufferUs : bufferForPlaybackUs;
if (targetLiveOffsetUs != C.TIME_UNSET) {
minBufferDurationUs = min(targetLiveOffsetUs / 2, minBufferDurationUs);
}
return minBufferDurationUs <= 0
|| bufferedDurationUs >= minBufferDurationUs
|| (!prioritizeTimeOverSizeThresholds
&& allocator.getTotalBytesAllocated() >= targetBufferSize);
&& allocator.getTotalBytesAllocated() >= targetBufferBytes);
}
/**
@ -418,26 +409,29 @@ public class DefaultLoadControl implements LoadControl {
* @param trackSelectionArray The selected tracks.
* @return The target buffer size in bytes.
*/
protected int calculateTargetBufferSize(
Renderer[] renderers, TrackSelectionArray trackSelectionArray) {
protected int calculateTargetBufferBytes(
Renderer[] renderers, ExoTrackSelection[] trackSelectionArray) {
int targetBufferSize = 0;
for (int i = 0; i < renderers.length; i++) {
if (trackSelectionArray.get(i) != null) {
if (trackSelectionArray[i] != null) {
targetBufferSize += getDefaultBufferSize(renderers[i].getTrackType());
}
}
return targetBufferSize;
return max(DEFAULT_MIN_BUFFER_SIZE, targetBufferSize);
}
private void reset(boolean resetAllocator) {
targetBufferSize = 0;
isBuffering = false;
targetBufferBytes =
targetBufferBytesOverwrite == C.LENGTH_UNSET
? DEFAULT_MIN_BUFFER_SIZE
: targetBufferBytesOverwrite;
isLoading = false;
if (resetAllocator) {
allocator.reset();
}
}
private static int getDefaultBufferSize(int trackType) {
private static int getDefaultBufferSize(@C.TrackType int trackType) {
switch (trackType) {
case C.TRACK_TYPE_DEFAULT:
return DEFAULT_MUXED_BUFFER_SIZE;
@ -451,22 +445,16 @@ public class DefaultLoadControl implements LoadControl {
return DEFAULT_METADATA_BUFFER_SIZE;
case C.TRACK_TYPE_CAMERA_MOTION:
return DEFAULT_CAMERA_MOTION_BUFFER_SIZE;
case C.TRACK_TYPE_IMAGE:
return DEFAULT_IMAGE_BUFFER_SIZE;
case C.TRACK_TYPE_NONE:
return 0;
case C.TRACK_TYPE_UNKNOWN:
default:
throw new IllegalArgumentException();
}
}
private static boolean hasVideo(Renderer[] renderers, TrackSelectionArray trackSelectionArray) {
for (int i = 0; i < renderers.length; i++) {
if (renderers[i].getTrackType() == C.TRACK_TYPE_VIDEO && trackSelectionArray.get(i) != null) {
return true;
}
}
return false;
}
private static void assertGreaterOrEqual(int value1, int value2, String name1, String name2) {
Assertions.checkArgument(value1 >= value2, name1 + " cannot be less than " + name2);
}

View file

@ -16,32 +16,31 @@
package com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.StandaloneMediaClock;
/**
* Default {@link MediaClock} which uses a renderer media clock and falls back to a
* {@link StandaloneMediaClock} if necessary.
* Default {@link MediaClock} which uses a renderer media clock and falls back to a {@link
* StandaloneMediaClock} if necessary.
*/
/* package */ final class DefaultMediaClock implements MediaClock {
/**
* Listener interface to be notified of changes to the active playback parameters.
*/
public interface PlaybackParameterListener {
/** Listener interface to be notified of changes to the active playback parameters. */
public interface PlaybackParametersListener {
/**
* Called when the active playback parameters changed. Will not be called for {@link
* #setPlaybackParameters(PlaybackParameters)}.
*
* @param newPlaybackParameters The newly active {@link PlaybackParameters}.
* @param newPlaybackParameters The newly active playback parameters.
*/
void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters);
}
private final StandaloneMediaClock standaloneClock;
private final PlaybackParameterListener listener;
private final PlaybackParametersListener listener;
@Nullable private Renderer rendererClockSource;
@Nullable private MediaClock rendererClock;
@ -49,30 +48,25 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
private boolean standaloneClockIsStarted;
/**
* Creates a new instance with listener for playback parameter changes and a {@link Clock} to use
* for the standalone clock implementation.
* Creates a new instance with a listener for playback parameters changes and a {@link Clock} to
* use for the standalone clock implementation.
*
* @param listener A {@link PlaybackParameterListener} to listen for playback parameter
* changes.
* @param listener A {@link PlaybackParametersListener} to listen for playback parameters changes.
* @param clock A {@link Clock}.
*/
public DefaultMediaClock(PlaybackParameterListener listener, Clock clock) {
public DefaultMediaClock(PlaybackParametersListener listener, Clock clock) {
this.listener = listener;
this.standaloneClock = new StandaloneMediaClock(clock);
isUsingStandaloneClock = true;
}
/**
* Starts the standalone fallback clock.
*/
/** Starts the standalone fallback clock. */
public void start() {
standaloneClockIsStarted = true;
standaloneClock.start();
}
/**
* Stops the standalone fallback clock.
*/
/** Stops the standalone fallback clock. */
public void stop() {
standaloneClockIsStarted = false;
standaloneClock.stop();
@ -96,7 +90,7 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
* clock is already provided.
*/
public void onRendererEnabled(Renderer renderer) throws ExoPlaybackException {
MediaClock rendererMediaClock = renderer.getMediaClock();
@Nullable MediaClock rendererMediaClock = renderer.getMediaClock();
if (rendererMediaClock != null && rendererMediaClock != rendererClock) {
if (rendererClock != null) {
throw ExoPlaybackException.createForUnexpected(
@ -136,7 +130,9 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
@Override
public long getPositionUs() {
return isUsingStandaloneClock ? standaloneClock.getPositionUs() : rendererClock.getPositionUs();
return isUsingStandaloneClock
? standaloneClock.getPositionUs()
: Assertions.checkNotNull(rendererClock).getPositionUs();
}
@Override
@ -163,6 +159,9 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
}
return;
}
// We are either already using the renderer clock or switching from the standalone to the
// renderer clock, so it must be non-null.
MediaClock rendererClock = Assertions.checkNotNull(this.rendererClock);
long rendererClockPositionUs = rendererClock.getPositionUs();
if (isUsingStandaloneClock) {
// Ensure enabling the renderer clock doesn't jump backwards in time.

View file

@ -15,19 +15,22 @@
*/
package com.google.android.exoplayer2;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.content.Context;
import android.media.MediaCodec;
import android.media.PlaybackParams;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.audio.AudioCapabilities;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.AudioSink;
import com.google.android.exoplayer2.audio.DefaultAudioSink;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.mediacodec.DefaultMediaCodecAdapterFactory;
import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.metadata.MetadataOutput;
import com.google.android.exoplayer2.metadata.MetadataRenderer;
@ -38,15 +41,15 @@ import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
import com.google.android.exoplayer2.video.VideoRendererEventListener;
import com.google.android.exoplayer2.video.spherical.CameraMotionRenderer;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
/**
* Default {@link RenderersFactory} implementation.
*/
/** Default {@link RenderersFactory} implementation. */
public class DefaultRenderersFactory implements RenderersFactory {
/**
@ -61,114 +64,55 @@ public class DefaultRenderersFactory implements RenderersFactory {
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({EXTENSION_RENDERER_MODE_OFF, EXTENSION_RENDERER_MODE_ON, EXTENSION_RENDERER_MODE_PREFER})
public @interface ExtensionRendererMode {}
/**
* Do not allow use of extension renderers.
*/
/** Do not allow use of extension renderers. */
public static final int EXTENSION_RENDERER_MODE_OFF = 0;
/**
* Allow use of extension renderers. Extension renderers are indexed after core renderers of the
* same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore
* prefer to use a core renderer to an extension renderer in the case that both are able to play
* a given track.
* prefer to use a core renderer to an extension renderer in the case that both are able to play a
* given track.
*/
public static final int EXTENSION_RENDERER_MODE_ON = 1;
/**
* Allow use of extension renderers. Extension renderers are indexed before core renderers of the
* same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore
* prefer to use an extension renderer to a core renderer in the case that both are able to play
* a given track.
* prefer to use an extension renderer to a core renderer in the case that both are able to play a
* given track.
*/
public static final int EXTENSION_RENDERER_MODE_PREFER = 2;
/**
* The maximum number of frames that can be dropped between invocations of {@link
* VideoRendererEventListener#onDroppedFrames(int, long)}.
*/
public static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50;
private static final String TAG = "DefaultRenderersFactory";
protected static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50;
private final Context context;
@Nullable private DrmSessionManager<FrameworkMediaCrypto> drmSessionManager;
@ExtensionRendererMode private int extensionRendererMode;
private final DefaultMediaCodecAdapterFactory codecAdapterFactory;
private @ExtensionRendererMode int extensionRendererMode;
private long allowedVideoJoiningTimeMs;
private boolean playClearSamplesWithoutKeys;
private boolean enableDecoderFallback;
private MediaCodecSelector mediaCodecSelector;
private boolean enableFloatOutput;
private boolean enableAudioTrackPlaybackParams;
private boolean enableOffload;
/** @param context A {@link Context}. */
/**
* @param context A {@link Context}.
*/
public DefaultRenderersFactory(Context context) {
this.context = context;
codecAdapterFactory = new DefaultMediaCodecAdapterFactory();
extensionRendererMode = EXTENSION_RENDERER_MODE_OFF;
allowedVideoJoiningTimeMs = DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS;
mediaCodecSelector = MediaCodecSelector.DEFAULT;
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and pass {@link DrmSessionManager}
* directly to {@link SimpleExoPlayer.Builder}.
*/
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context, @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
this(context, drmSessionManager, EXTENSION_RENDERER_MODE_OFF);
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link
* #setExtensionRendererMode(int)}.
*/
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context, @ExtensionRendererMode int extensionRendererMode) {
this(context, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link
* #setExtensionRendererMode(int)}, and pass {@link DrmSessionManager} directly to {@link
* SimpleExoPlayer.Builder}.
*/
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode) {
this(context, drmSessionManager, extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link
* #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}.
*/
@Deprecated
@SuppressWarnings("deprecation")
public DefaultRenderersFactory(
Context context,
@ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
this(context, null, extensionRendererMode, allowedVideoJoiningTimeMs);
}
/**
* @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link
* #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}, and pass
* {@link DrmSessionManager} directly to {@link SimpleExoPlayer.Builder}.
*/
@Deprecated
public DefaultRenderersFactory(
Context context,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
this.context = context;
this.extensionRendererMode = extensionRendererMode;
this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs;
this.drmSessionManager = drmSessionManager;
mediaCodecSelector = MediaCodecSelector.DEFAULT;
}
/**
* Sets the extension renderer mode, which determines if and how available extension renderers are
* used. Note that extensions must be included in the application build for them to be considered
@ -179,6 +123,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
* @param extensionRendererMode The extension renderer mode.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setExtensionRendererMode(
@ExtensionRendererMode int extensionRendererMode) {
this.extensionRendererMode = extensionRendererMode;
@ -186,21 +131,46 @@ public class DefaultRenderersFactory implements RenderersFactory {
}
/**
* Sets whether renderers are permitted to play clear regions of encrypted media prior to having
* obtained the keys necessary to decrypt encrypted regions of the media. For encrypted media that
* starts with a short clear region, this allows playback to begin in parallel with key
* acquisition, which can reduce startup latency.
* Enables {@link com.google.android.exoplayer2.mediacodec.MediaCodecRenderer} instances to
* operate their {@link MediaCodec} in asynchronous mode and perform asynchronous queueing.
*
* <p>The default value is {@code false}.
* <p>This feature can be enabled only on devices with API versions &gt;= 23. For devices with
* older API versions, this method is a no-op.
*
* @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of
* encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of
* the media.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setPlayClearSamplesWithoutKeys(
boolean playClearSamplesWithoutKeys) {
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
@CanIgnoreReturnValue
public DefaultRenderersFactory forceEnableMediaCodecAsynchronousQueueing() {
codecAdapterFactory.forceEnableAsynchronous();
return this;
}
/**
* Disables {@link com.google.android.exoplayer2.mediacodec.MediaCodecRenderer} instances from
* operating their {@link MediaCodec} in asynchronous mode and perform asynchronous queueing.
* {@link MediaCodec} instances will be operated synchronous mode.
*
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory forceDisableMediaCodecAsynchronousQueueing() {
codecAdapterFactory.forceDisableAsynchronous();
return this;
}
/**
* Enable synchronizing codec interactions with asynchronous buffer queueing.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param enabled Whether codec interactions will be synchronized with asynchronous buffer
* queueing.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory experimentalSetSynchronizeCodecInteractionsWithQueueingEnabled(
boolean enabled) {
codecAdapterFactory.experimentalSetSynchronizeCodecInteractionsWithQueueingEnabled(enabled);
return this;
}
@ -212,6 +182,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
* initialization fails.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setEnableDecoderFallback(boolean enableDecoderFallback) {
this.enableDecoderFallback = enableDecoderFallback;
return this;
@ -225,11 +196,77 @@ public class DefaultRenderersFactory implements RenderersFactory {
* @param mediaCodecSelector The {@link MediaCodecSelector}.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setMediaCodecSelector(MediaCodecSelector mediaCodecSelector) {
this.mediaCodecSelector = mediaCodecSelector;
return this;
}
/**
* Sets whether floating point audio should be output when possible.
*
* <p>Enabling floating point output disables audio processing, but may allow for higher quality
* audio output.
*
* <p>The default value is {@code false}.
*
* @param enableFloatOutput Whether to enable use of floating point audio output, if available.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setEnableAudioFloatOutput(boolean enableFloatOutput) {
this.enableFloatOutput = enableFloatOutput;
return this;
}
/**
* Sets whether audio should be played using the offload path.
*
* <p>Audio offload disables ExoPlayer audio processing, but significantly reduces the energy
* consumption of the playback when {@link
* ExoPlayer#experimentalSetOffloadSchedulingEnabled(boolean) offload scheduling} is enabled.
*
* <p>Most Android devices can only support one offload {@link android.media.AudioTrack} at a time
* and can invalidate it at any time. Thus an app can never be guaranteed that it will be able to
* play in offload.
*
* <p>The default value is {@code false}.
*
* @param enableOffload Whether to enable use of audio offload for supported formats, if
* available.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setEnableAudioOffload(boolean enableOffload) {
this.enableOffload = enableOffload;
return this;
}
/**
* Sets whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, which is supported from API level
* 23, rather than using application-level audio speed adjustment. This setting has no effect on
* builds before API level 23 (application-level speed adjustment will be used in all cases).
*
* <p>If enabled and supported, new playback speed settings will take effect more quickly because
* they are applied at the audio mixer, rather than at the point of writing data to the track.
*
* <p>When using this mode, the maximum supported playback speed is limited by the size of the
* audio track's buffer. If the requested speed is not supported the player's event listener will
* be notified twice on setting playback speed, once with the requested speed, then again with the
* old playback speed reflecting the fact that the requested speed was not supported.
*
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setEnableAudioTrackPlaybackParams(
boolean enableAudioTrackPlaybackParams) {
this.enableAudioTrackPlaybackParams = enableAudioTrackPlaybackParams;
return this;
}
/**
* Sets the maximum duration for which video renderers can attempt to seamlessly join an ongoing
* playback.
@ -240,6 +277,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
* seamlessly join an ongoing playback, in milliseconds.
* @return This factory, for convenience.
*/
@CanIgnoreReturnValue
public DefaultRenderersFactory setAllowedVideoJoiningTimeMs(long allowedVideoJoiningTimeMs) {
this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs;
return this;
@ -251,38 +289,43 @@ public class DefaultRenderersFactory implements RenderersFactory {
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
if (drmSessionManager == null) {
drmSessionManager = this.drmSessionManager;
}
MetadataOutput metadataRendererOutput) {
ArrayList<Renderer> renderersList = new ArrayList<>();
buildVideoRenderers(
context,
extensionRendererMode,
mediaCodecSelector,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
eventHandler,
videoRendererEventListener,
allowedVideoJoiningTimeMs,
renderersList);
buildAudioRenderers(
@Nullable
AudioSink audioSink =
buildAudioSink(context, enableFloatOutput, enableAudioTrackPlaybackParams, enableOffload);
if (audioSink != null) {
buildAudioRenderers(
context,
extensionRendererMode,
mediaCodecSelector,
enableDecoderFallback,
audioSink,
eventHandler,
audioRendererEventListener,
renderersList);
}
buildTextRenderers(
context,
textRendererOutput,
eventHandler.getLooper(),
extensionRendererMode,
renderersList);
buildMetadataRenderers(
context,
metadataRendererOutput,
eventHandler.getLooper(),
extensionRendererMode,
mediaCodecSelector,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
buildAudioProcessors(),
eventHandler,
audioRendererEventListener,
renderersList);
buildTextRenderers(context, textRendererOutput, eventHandler.getLooper(),
extensionRendererMode, renderersList);
buildMetadataRenderers(context, metadataRendererOutput, eventHandler.getLooper(),
extensionRendererMode, renderersList);
buildCameraMotionRenderers(context, extensionRendererMode, renderersList);
buildMiscellaneousRenderers(context, eventHandler, extensionRendererMode, renderersList);
return renderersList.toArray(new Renderer[0]);
@ -294,11 +337,6 @@ public class DefaultRenderersFactory implements RenderersFactory {
* @param context The {@link Context} associated with the player.
* @param extensionRendererMode The extension renderer mode.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
* not be used for DRM protected playbacks.
* @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of
* encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of
* the media.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
@ -312,24 +350,22 @@ public class DefaultRenderersFactory implements RenderersFactory {
Context context,
@ExtensionRendererMode int extensionRendererMode,
MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
boolean enableDecoderFallback,
Handler eventHandler,
VideoRendererEventListener eventListener,
long allowedVideoJoiningTimeMs,
ArrayList<Renderer> out) {
out.add(
MediaCodecVideoRenderer videoRenderer =
new MediaCodecVideoRenderer(
context,
getCodecAdapterFactory(),
mediaCodecSelector,
allowedVideoJoiningTimeMs,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
eventHandler,
eventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
out.add(videoRenderer);
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@ -341,7 +377,6 @@ public class DefaultRenderersFactory implements RenderersFactory {
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.vp9.LibvpxVideoRenderer");
Constructor<?> constructor =
clazz.getConstructor(
@ -349,7 +384,6 @@ public class DefaultRenderersFactory implements RenderersFactory {
android.os.Handler.class,
com.google.android.exoplayer2.video.VideoRendererEventListener.class,
int.class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer)
constructor.newInstance(
@ -368,7 +402,6 @@ public class DefaultRenderersFactory implements RenderersFactory {
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.av1.Libgav1VideoRenderer");
Constructor<?> constructor =
clazz.getConstructor(
@ -376,7 +409,6 @@ public class DefaultRenderersFactory implements RenderersFactory {
android.os.Handler.class,
com.google.android.exoplayer2.video.VideoRendererEventListener.class,
int.class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer)
constructor.newInstance(
@ -400,16 +432,10 @@ public class DefaultRenderersFactory implements RenderersFactory {
* @param context The {@link Context} associated with the player.
* @param extensionRendererMode The extension renderer mode.
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
* not be used for DRM protected playbacks.
* @param playClearSamplesWithoutKeys Whether renderers are permitted to play clear regions of
* encrypted media prior to having obtained the keys necessary to decrypt encrypted regions of
* the media.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio buffers
* before output. May be empty.
* @param audioSink A sink to which the renderers will output.
* @param eventHandler A handler to use when invoking event listeners and outputs.
* @param eventListener An event listener.
* @param out An array to which the built renderers should be appended.
@ -418,23 +444,21 @@ public class DefaultRenderersFactory implements RenderersFactory {
Context context,
@ExtensionRendererMode int extensionRendererMode,
MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
boolean enableDecoderFallback,
AudioProcessor[] audioProcessors,
AudioSink audioSink,
Handler eventHandler,
AudioRendererEventListener eventListener,
ArrayList<Renderer> out) {
out.add(
MediaCodecAudioRenderer audioRenderer =
new MediaCodecAudioRenderer(
context,
getCodecAdapterFactory(),
mediaCodecSelector,
drmSessionManager,
playClearSamplesWithoutKeys,
enableDecoderFallback,
eventHandler,
eventListener,
new DefaultAudioSink(AudioCapabilities.getCapabilities(context), audioProcessors)));
audioSink);
out.add(audioRenderer);
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
@ -444,18 +468,29 @@ public class DefaultRenderersFactory implements RenderersFactory {
extensionRendererIndex--;
}
try {
Class<?> clazz = Class.forName("com.google.android.exoplayer2.decoder.midi.MidiRenderer");
Constructor<?> constructor = clazz.getConstructor();
Renderer renderer = (Renderer) constructor.newInstance();
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded MidiRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating MIDI extension", e);
}
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
Constructor<?> constructor =
clazz.getConstructor(
android.os.Handler.class,
com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
com.google.android.exoplayer2.audio.AudioProcessor[].class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
com.google.android.exoplayer2.audio.AudioSink.class);
Renderer renderer =
(Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
(Renderer) constructor.newInstance(eventHandler, eventListener, audioSink);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibopusAudioRenderer.");
} catch (ClassNotFoundException e) {
@ -467,16 +502,14 @@ public class DefaultRenderersFactory implements RenderersFactory {
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
Constructor<?> constructor =
clazz.getConstructor(
android.os.Handler.class,
com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
com.google.android.exoplayer2.audio.AudioProcessor[].class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
com.google.android.exoplayer2.audio.AudioSink.class);
Renderer renderer =
(Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
(Renderer) constructor.newInstance(eventHandler, eventListener, audioSink);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibflacAudioRenderer.");
} catch (ClassNotFoundException e) {
@ -488,17 +521,15 @@ public class DefaultRenderersFactory implements RenderersFactory {
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz =
Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
Constructor<?> constructor =
clazz.getConstructor(
android.os.Handler.class,
com.google.android.exoplayer2.audio.AudioRendererEventListener.class,
com.google.android.exoplayer2.audio.AudioProcessor[].class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
com.google.android.exoplayer2.audio.AudioSink.class);
Renderer renderer =
(Renderer) constructor.newInstance(eventHandler, eventListener, audioProcessors);
(Renderer) constructor.newInstance(eventHandler, eventListener, audioSink);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded FfmpegAudioRenderer.");
} catch (ClassNotFoundException e) {
@ -565,16 +596,49 @@ public class DefaultRenderersFactory implements RenderersFactory {
* @param extensionRendererMode The extension renderer mode.
* @param out An array to which the built renderers should be appended.
*/
protected void buildMiscellaneousRenderers(Context context, Handler eventHandler,
@ExtensionRendererMode int extensionRendererMode, ArrayList<Renderer> out) {
protected void buildMiscellaneousRenderers(
Context context,
Handler eventHandler,
@ExtensionRendererMode int extensionRendererMode,
ArrayList<Renderer> out) {
// Do nothing.
}
/**
* Builds an array of {@link AudioProcessor}s that will process PCM audio before output.
* Builds an {@link AudioSink} to which the audio renderers will output.
*
* @param context The {@link Context} associated with the player.
* @param enableFloatOutput Whether to enable use of floating point audio output, if available.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable use of audio offload for supported formats, if
* available.
* @return The {@link AudioSink} to which the audio renderers will output. May be {@code null} if
* no audio renderers are required. If {@code null} is returned then {@link
* #buildAudioRenderers} will not be called.
*/
protected AudioProcessor[] buildAudioProcessors() {
return new AudioProcessor[0];
@Nullable
protected AudioSink buildAudioSink(
Context context,
boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) {
return new DefaultAudioSink.Builder()
.setAudioCapabilities(AudioCapabilities.getCapabilities(context))
.setEnableFloatOutput(enableFloatOutput)
.setEnableAudioTrackPlaybackParams(enableAudioTrackPlaybackParams)
.setOffloadMode(
enableOffload
? DefaultAudioSink.OFFLOAD_MODE_ENABLED_GAPLESS_REQUIRED
: DefaultAudioSink.OFFLOAD_MODE_DISABLED)
.build();
}
/**
* Returns the {@link MediaCodecAdapter.Factory} that will be used when creating {@link
* com.google.android.exoplayer2.mediacodec.MediaCodecRenderer} instances.
*/
protected MediaCodecAdapter.Factory getCodecAdapterFactory() {
return codecAdapterFactory;
}
}

View file

@ -0,0 +1,111 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.os.Bundle;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/** Information about the playback device. */
public final class DeviceInfo implements Bundleable {
/** Types of playback. One of {@link #PLAYBACK_TYPE_LOCAL} or {@link #PLAYBACK_TYPE_REMOTE}. */
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({
PLAYBACK_TYPE_LOCAL,
PLAYBACK_TYPE_REMOTE,
})
public @interface PlaybackType {}
/** Playback happens on the local device (e.g. phone). */
public static final int PLAYBACK_TYPE_LOCAL = 0;
/** Playback happens outside of the device (e.g. a cast device). */
public static final int PLAYBACK_TYPE_REMOTE = 1;
/** Unknown DeviceInfo. */
public static final DeviceInfo UNKNOWN =
new DeviceInfo(PLAYBACK_TYPE_LOCAL, /* minVolume= */ 0, /* maxVolume= */ 0);
/** The type of playback. */
public final @PlaybackType int playbackType;
/** The minimum volume that the device supports. */
public final int minVolume;
/** The maximum volume that the device supports. */
public final int maxVolume;
/** Creates device information. */
public DeviceInfo(@PlaybackType int playbackType, int minVolume, int maxVolume) {
this.playbackType = playbackType;
this.minVolume = minVolume;
this.maxVolume = maxVolume;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof DeviceInfo)) {
return false;
}
DeviceInfo other = (DeviceInfo) obj;
return playbackType == other.playbackType
&& minVolume == other.minVolume
&& maxVolume == other.maxVolume;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + playbackType;
result = 31 * result + minVolume;
result = 31 * result + maxVolume;
return result;
}
// Bundleable implementation.
private static final String FIELD_PLAYBACK_TYPE = Util.intToStringMaxRadix(0);
private static final String FIELD_MIN_VOLUME = Util.intToStringMaxRadix(1);
private static final String FIELD_MAX_VOLUME = Util.intToStringMaxRadix(2);
@Override
public Bundle toBundle() {
Bundle bundle = new Bundle();
bundle.putInt(FIELD_PLAYBACK_TYPE, playbackType);
bundle.putInt(FIELD_MIN_VOLUME, minVolume);
bundle.putInt(FIELD_MAX_VOLUME, maxVolume);
return bundle;
}
/** Object that can restore {@link DeviceInfo} from a {@link Bundle}. */
public static final Creator<DeviceInfo> CREATOR =
bundle -> {
int playbackType =
bundle.getInt(FIELD_PLAYBACK_TYPE, /* defaultValue= */ PLAYBACK_TYPE_LOCAL);
int minVolume = bundle.getInt(FIELD_MIN_VOLUME, /* defaultValue= */ 0);
int maxVolume = bundle.getInt(FIELD_MAX_VOLUME, /* defaultValue= */ 0);
return new DeviceInfo(playbackType, minVolume, maxVolume);
};
}

View file

@ -15,47 +15,60 @@
*/
package com.google.android.exoplayer2;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.LOCAL_VARIABLE;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.os.Bundle;
import android.os.SystemClock;
import android.text.TextUtils;
import androidx.annotation.CheckResult;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.RendererCapabilities.FormatSupport;
import com.google.android.exoplayer2.C.FormatSupport;
import com.google.android.exoplayer2.source.MediaPeriodId;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Thrown when a non-recoverable playback failure occurs.
*/
public final class ExoPlaybackException extends Exception {
/** Thrown when a non locally recoverable playback failure occurs. */
public final class ExoPlaybackException extends PlaybackException {
/**
* The type of source that produced the error. One of {@link #TYPE_SOURCE}, {@link #TYPE_RENDERER}
* {@link #TYPE_UNEXPECTED}, {@link #TYPE_REMOTE} or {@link #TYPE_OUT_OF_MEMORY}. Note that new
* types may be added in the future and error handling should handle unknown type values.
* {@link #TYPE_UNEXPECTED} or {@link #TYPE_REMOTE}. Note that new types may be added in the
* future and error handling should handle unknown type values.
*/
// @Target list includes both 'default' targets and TYPE_USE, to ensure backwards compatibility
// with Kotlin usages from before TYPE_USE was added.
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({TYPE_SOURCE, TYPE_RENDERER, TYPE_UNEXPECTED, TYPE_REMOTE, TYPE_OUT_OF_MEMORY})
@Target({FIELD, METHOD, PARAMETER, LOCAL_VARIABLE, TYPE_USE})
@IntDef({TYPE_SOURCE, TYPE_RENDERER, TYPE_UNEXPECTED, TYPE_REMOTE})
public @interface Type {}
/**
* The error occurred loading data from a {@link MediaSource}.
* <p>
* Call {@link #getSourceException()} to retrieve the underlying cause.
*
* <p>Call {@link #getSourceException()} to retrieve the underlying cause.
*/
public static final int TYPE_SOURCE = 0;
/**
* The error occurred in a {@link Renderer}.
* <p>
* Call {@link #getRendererException()} to retrieve the underlying cause.
*
* <p>Call {@link #getRendererException()} to retrieve the underlying cause.
*/
public static final int TYPE_RENDERER = 1;
/**
* The error was an unexpected {@link RuntimeException}.
* <p>
* Call {@link #getUnexpectedException()} to retrieve the underlying cause.
*
* <p>Call {@link #getUnexpectedException()} to retrieve the underlying cause.
*/
public static final int TYPE_UNEXPECTED = 2;
/**
@ -64,15 +77,14 @@ public final class ExoPlaybackException extends Exception {
* <p>Call {@link #getMessage()} to retrieve the message associated with the error.
*/
public static final int TYPE_REMOTE = 3;
/** The error was an {@link OutOfMemoryError}. */
public static final int TYPE_OUT_OF_MEMORY = 4;
/** The {@link Type} of the playback failure. */
@Type public final int type;
public final @Type int type;
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this is the index of the renderer.
*/
/** If {@link #type} is {@link #TYPE_RENDERER}, this is the name of the renderer. */
@Nullable public final String rendererName;
/** If {@link #type} is {@link #TYPE_RENDERER}, this is the index of the renderer. */
public final int rendererIndex;
/**
@ -84,23 +96,29 @@ public final class ExoPlaybackException extends Exception {
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this is the level of {@link FormatSupport} of the
* renderer for {@link #rendererFormat}. If {@link #rendererFormat} is null, this is {@link
* RendererCapabilities#FORMAT_HANDLED}.
* C#FORMAT_HANDLED}.
*/
@FormatSupport public final int rendererFormatSupport;
public final @FormatSupport int rendererFormatSupport;
/** The value of {@link SystemClock#elapsedRealtime()} when this exception was created. */
public final long timestampMs;
/** The {@link MediaPeriodId} of the media associated with this error, or null if undetermined. */
@Nullable public final MediaPeriodId mediaPeriodId;
@Nullable private final Throwable cause;
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this field indicates whether the error may be
* recoverable by disabling and re-enabling (but <em>not</em> resetting) the renderers. For other
* {@link Type types} this field will always be {@code false}.
*/
/* package */ final boolean isRecoverable;
/**
* Creates an instance of type {@link #TYPE_SOURCE}.
*
* @param cause The cause of the failure.
* @param errorCode See {@link #errorCode}.
* @return The created instance.
*/
public static ExoPlaybackException createForSource(IOException cause) {
return new ExoPlaybackException(TYPE_SOURCE, cause);
public static ExoPlaybackException createForSource(IOException cause, int errorCode) {
return new ExoPlaybackException(TYPE_SOURCE, cause, errorCode);
}
/**
@ -112,29 +130,50 @@ public final class ExoPlaybackException extends Exception {
* or null if the renderer wasn't using a {@link Format}.
* @param rendererFormatSupport The {@link FormatSupport} of the renderer for {@code
* rendererFormat}. Ignored if {@code rendererFormat} is null.
* @param isRecoverable If the failure can be recovered by disabling and re-enabling the renderer.
* @param errorCode See {@link #errorCode}.
* @return The created instance.
*/
public static ExoPlaybackException createForRenderer(
Exception cause,
Throwable cause,
String rendererName,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
@FormatSupport int rendererFormatSupport,
boolean isRecoverable,
@ErrorCode int errorCode) {
return new ExoPlaybackException(
TYPE_RENDERER,
cause,
/* customMessage= */ null,
errorCode,
rendererName,
rendererIndex,
rendererFormat,
rendererFormat == null ? RendererCapabilities.FORMAT_HANDLED : rendererFormatSupport);
rendererFormat == null ? C.FORMAT_HANDLED : rendererFormatSupport,
isRecoverable);
}
/**
* @deprecated Use {@link #createForUnexpected(RuntimeException, int)
* createForUnexpected(RuntimeException, ERROR_CODE_UNSPECIFIED)} instead.
*/
@Deprecated
public static ExoPlaybackException createForUnexpected(RuntimeException cause) {
return createForUnexpected(cause, ERROR_CODE_UNSPECIFIED);
}
/**
* Creates an instance of type {@link #TYPE_UNEXPECTED}.
*
* @param cause The cause of the failure.
* @param errorCode See {@link #errorCode}.
* @return The created instance.
*/
public static ExoPlaybackException createForUnexpected(RuntimeException cause) {
return new ExoPlaybackException(TYPE_UNEXPECTED, cause);
public static ExoPlaybackException createForUnexpected(
RuntimeException cause, @ErrorCode int errorCode) {
return new ExoPlaybackException(TYPE_UNEXPECTED, cause, errorCode);
}
/**
@ -144,51 +183,97 @@ public final class ExoPlaybackException extends Exception {
* @return The created instance.
*/
public static ExoPlaybackException createForRemote(String message) {
return new ExoPlaybackException(TYPE_REMOTE, message);
return new ExoPlaybackException(
TYPE_REMOTE,
/* cause= */ null,
/* customMessage= */ message,
ERROR_CODE_REMOTE_ERROR,
/* rendererName= */ null,
/* rendererIndex= */ C.INDEX_UNSET,
/* rendererFormat= */ null,
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false);
}
/**
* Creates an instance of type {@link #TYPE_OUT_OF_MEMORY}.
*
* @param cause The cause of the failure.
* @return The created instance.
*/
public static ExoPlaybackException createForOutOfMemoryError(OutOfMemoryError cause) {
return new ExoPlaybackException(TYPE_OUT_OF_MEMORY, cause);
}
private ExoPlaybackException(@Type int type, Throwable cause) {
private ExoPlaybackException(@Type int type, Throwable cause, @ErrorCode int errorCode) {
this(
type,
cause,
/* customMessage= */ null,
errorCode,
/* rendererName= */ null,
/* rendererIndex= */ C.INDEX_UNSET,
/* rendererFormat= */ null,
/* rendererFormatSupport= */ RendererCapabilities.FORMAT_HANDLED);
/* rendererFormatSupport= */ C.FORMAT_HANDLED,
/* isRecoverable= */ false);
}
private ExoPlaybackException(
@Type int type,
Throwable cause,
@Nullable Throwable cause,
@Nullable String customMessage,
@ErrorCode int errorCode,
@Nullable String rendererName,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
super(cause);
@FormatSupport int rendererFormatSupport,
boolean isRecoverable) {
this(
deriveMessage(
type,
customMessage,
rendererName,
rendererIndex,
rendererFormat,
rendererFormatSupport),
cause,
errorCode,
type,
rendererName,
rendererIndex,
rendererFormat,
rendererFormatSupport,
/* mediaPeriodId= */ null,
/* timestampMs= */ SystemClock.elapsedRealtime(),
isRecoverable);
}
private ExoPlaybackException(Bundle bundle) {
super(bundle);
type = bundle.getInt(FIELD_TYPE, /* defaultValue= */ TYPE_UNEXPECTED);
rendererName = bundle.getString(FIELD_RENDERER_NAME);
rendererIndex = bundle.getInt(FIELD_RENDERER_INDEX, /* defaultValue= */ C.INDEX_UNSET);
@Nullable Bundle rendererFormatBundle = bundle.getBundle(FIELD_RENDERER_FORMAT);
rendererFormat =
rendererFormatBundle == null ? null : Format.CREATOR.fromBundle(rendererFormatBundle);
rendererFormatSupport =
bundle.getInt(FIELD_RENDERER_FORMAT_SUPPORT, /* defaultValue= */ C.FORMAT_HANDLED);
isRecoverable = bundle.getBoolean(FIELD_IS_RECOVERABLE, /* defaultValue= */ false);
mediaPeriodId = null;
}
private ExoPlaybackException(
String message,
@Nullable Throwable cause,
@ErrorCode int errorCode,
@Type int type,
@Nullable String rendererName,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport,
@Nullable MediaPeriodId mediaPeriodId,
long timestampMs,
boolean isRecoverable) {
super(message, cause, errorCode, timestampMs);
Assertions.checkArgument(!isRecoverable || type == TYPE_RENDERER);
Assertions.checkArgument(cause != null || type == TYPE_REMOTE);
this.type = type;
this.cause = cause;
this.rendererName = rendererName;
this.rendererIndex = rendererIndex;
this.rendererFormat = rendererFormat;
this.rendererFormatSupport = rendererFormatSupport;
timestampMs = SystemClock.elapsedRealtime();
}
private ExoPlaybackException(@Type int type, String message) {
super(message);
this.type = type;
rendererIndex = C.INDEX_UNSET;
rendererFormat = null;
rendererFormatSupport = RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
cause = null;
timestampMs = SystemClock.elapsedRealtime();
this.mediaPeriodId = mediaPeriodId;
this.isRecoverable = isRecoverable;
}
/**
@ -198,7 +283,7 @@ public final class ExoPlaybackException extends Exception {
*/
public IOException getSourceException() {
Assertions.checkState(type == TYPE_SOURCE);
return (IOException) Assertions.checkNotNull(cause);
return (IOException) Assertions.checkNotNull(getCause());
}
/**
@ -208,7 +293,7 @@ public final class ExoPlaybackException extends Exception {
*/
public Exception getRendererException() {
Assertions.checkState(type == TYPE_RENDERER);
return (Exception) Assertions.checkNotNull(cause);
return (Exception) Assertions.checkNotNull(getCause());
}
/**
@ -218,16 +303,119 @@ public final class ExoPlaybackException extends Exception {
*/
public RuntimeException getUnexpectedException() {
Assertions.checkState(type == TYPE_UNEXPECTED);
return (RuntimeException) Assertions.checkNotNull(cause);
return (RuntimeException) Assertions.checkNotNull(getCause());
}
@Override
public boolean errorInfoEquals(@Nullable PlaybackException that) {
if (!super.errorInfoEquals(that)) {
return false;
}
// We know that is not null and is an ExoPlaybackException because of the super call returning
// true.
ExoPlaybackException other = (ExoPlaybackException) Util.castNonNull(that);
return type == other.type
&& Util.areEqual(rendererName, other.rendererName)
&& rendererIndex == other.rendererIndex
&& Util.areEqual(rendererFormat, other.rendererFormat)
&& rendererFormatSupport == other.rendererFormatSupport
&& Util.areEqual(mediaPeriodId, other.mediaPeriodId)
&& isRecoverable == other.isRecoverable;
}
/**
* Retrieves the underlying error when {@link #type} is {@link #TYPE_OUT_OF_MEMORY}.
* Returns a copy of this exception with the provided {@link MediaPeriodId}.
*
* @throws IllegalStateException If {@link #type} is not {@link #TYPE_OUT_OF_MEMORY}.
* @param mediaPeriodId The {@link MediaPeriodId}.
* @return The copied exception.
*/
public OutOfMemoryError getOutOfMemoryError() {
Assertions.checkState(type == TYPE_OUT_OF_MEMORY);
return (OutOfMemoryError) Assertions.checkNotNull(cause);
@CheckResult
/* package */ ExoPlaybackException copyWithMediaPeriodId(@Nullable MediaPeriodId mediaPeriodId) {
return new ExoPlaybackException(
Util.castNonNull(getMessage()),
getCause(),
errorCode,
type,
rendererName,
rendererIndex,
rendererFormat,
rendererFormatSupport,
mediaPeriodId,
timestampMs,
isRecoverable);
}
private static String deriveMessage(
@Type int type,
@Nullable String customMessage,
@Nullable String rendererName,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
String message;
switch (type) {
case TYPE_SOURCE:
message = "Source error";
break;
case TYPE_RENDERER:
message =
rendererName
+ " error"
+ ", index="
+ rendererIndex
+ ", format="
+ rendererFormat
+ ", format_supported="
+ Util.getFormatSupportString(rendererFormatSupport);
break;
case TYPE_REMOTE:
message = "Remote error";
break;
case TYPE_UNEXPECTED:
default:
message = "Unexpected runtime error";
break;
}
if (!TextUtils.isEmpty(customMessage)) {
message += ": " + customMessage;
}
return message;
}
// Bundleable implementation.
/** Object that can restore {@link ExoPlaybackException} from a {@link Bundle}. */
public static final Creator<ExoPlaybackException> CREATOR = ExoPlaybackException::new;
private static final String FIELD_TYPE = Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 1);
private static final String FIELD_RENDERER_NAME =
Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 2);
private static final String FIELD_RENDERER_INDEX =
Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 3);
private static final String FIELD_RENDERER_FORMAT =
Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 4);
private static final String FIELD_RENDERER_FORMAT_SUPPORT =
Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 5);
private static final String FIELD_IS_RECOVERABLE =
Util.intToStringMaxRadix(FIELD_CUSTOM_ID_BASE + 6);
/**
* {@inheritDoc}
*
* <p>It omits the {@link #mediaPeriodId} field. The {@link #mediaPeriodId} of an instance
* restored by {@link #CREATOR} will always be {@code null}.
*/
@Override
public Bundle toBundle() {
Bundle bundle = super.toBundle();
bundle.putInt(FIELD_TYPE, type);
bundle.putString(FIELD_RENDERER_NAME, rendererName);
bundle.putInt(FIELD_RENDERER_INDEX, rendererIndex);
if (rendererFormat != null) {
bundle.putBundle(FIELD_RENDERER_FORMAT, rendererFormat.toBundle());
}
bundle.putInt(FIELD_RENDERER_FORMAT_SUPPORT, rendererFormatSupport);
bundle.putBoolean(FIELD_IS_RECOVERABLE, isRecoverable);
return bundle;
}
}

View file

@ -1,350 +0,0 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.content.Context;
import android.os.Looper;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.upstream.BandwidthMeter;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Util;
/** @deprecated Use {@link SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder} instead. */
@Deprecated
public final class ExoPlayerFactory {
private ExoPlayerFactory() {}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode) {
RenderersFactory renderersFactory =
new DefaultRenderersFactory(context).setExtensionRendererMode(extensionRendererMode);
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
@DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
RenderersFactory renderersFactory =
new DefaultRenderersFactory(context)
.setExtensionRendererMode(extensionRendererMode)
.setAllowedVideoJoiningTimeMs(allowedVideoJoiningTimeMs);
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(Context context) {
return newSimpleInstance(context, new DefaultTrackSelector(context));
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector) {
return newSimpleInstance(context, new DefaultRenderersFactory(context), trackSelector);
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context, RenderersFactory renderersFactory, TrackSelector trackSelector) {
return newSimpleInstance(context, renderersFactory, trackSelector, new DefaultLoadControl());
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context, TrackSelector trackSelector, LoadControl loadControl) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
return newSimpleInstance(context, renderersFactory, trackSelector, loadControl);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
return newSimpleInstance(
context, renderersFactory, trackSelector, new DefaultLoadControl(), drmSessionManager);
}
/** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
/* drmSessionManager= */ null,
Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager) {
return newSimpleInstance(
context, renderersFactory, trackSelector, loadControl, drmSessionManager, Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
bandwidthMeter,
new AnalyticsCollector(Clock.DEFAULT),
Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AnalyticsCollector analyticsCollector) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
analyticsCollector,
Util.getLooper());
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
Looper looper) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
new AnalyticsCollector(Clock.DEFAULT),
looper);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AnalyticsCollector analyticsCollector,
Looper looper) {
return newSimpleInstance(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
DefaultBandwidthMeter.getSingletonInstance(context),
analyticsCollector,
looper);
}
/**
* @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* MediaSource} factories.
*/
@SuppressWarnings("deprecation")
@Deprecated
public static SimpleExoPlayer newSimpleInstance(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter,
AnalyticsCollector analyticsCollector,
Looper looper) {
return new SimpleExoPlayer(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
bandwidthMeter,
analyticsCollector,
Clock.DEFAULT,
looper);
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static ExoPlayer newInstance(
Context context, Renderer[] renderers, TrackSelector trackSelector) {
return newInstance(context, renderers, trackSelector, new DefaultLoadControl());
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static ExoPlayer newInstance(
Context context, Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) {
return newInstance(context, renderers, trackSelector, loadControl, Util.getLooper());
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
@SuppressWarnings("deprecation")
public static ExoPlayer newInstance(
Context context,
Renderer[] renderers,
TrackSelector trackSelector,
LoadControl loadControl,
Looper looper) {
return newInstance(
context,
renderers,
trackSelector,
loadControl,
DefaultBandwidthMeter.getSingletonInstance(context),
looper);
}
/** @deprecated Use {@link ExoPlayer.Builder} instead. */
@Deprecated
public static ExoPlayer newInstance(
Context context,
Renderer[] renderers,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
Looper looper) {
return new ExoPlayerImpl(
renderers, trackSelector, loadControl, bandwidthMeter, Clock.DEFAULT, looper);
}
}

Some files were not shown because too many files have changed in this diff Show more